1
0
mirror of https://github.com/sasjs/adapter.git synced 2025-12-11 09:24:35 +00:00

fix: after job executing get complete log

This commit is contained in:
Saad Jutt
2021-03-22 19:21:01 +05:00
parent 2bdcbda54c
commit b3474b6dfb
5 changed files with 74 additions and 18 deletions

View File

@@ -1,4 +1,10 @@
import { convertToCSV, isRelativePath, isUri, isUrl } from './utils'
import {
convertToCSV,
isRelativePath,
isUri,
isUrl,
fetchLogFileContentByChunks
} from './utils'
import * as NodeFormData from 'form-data'
import {
Job,
@@ -420,19 +426,19 @@ export class SASViyaApiClient {
})
let jobResult
let log
let log = ''
const logLink = currentJob.links.find((l) => l.rel === 'log')
if (debug && logLink) {
log = await this.requestClient
.get<any>(`${logLink.href}/content?limit=10000`, accessToken)
.then((res: any) =>
res.result.items.map((i: any) => i.line).join('\n')
)
.catch((err) => {
throw prefixMessage(err, 'Error while getting log. ')
})
const logUrl = `${logLink.href}/content`
const logCount = currentJob.logStatistics?.lineCount ?? 1000000
log = await fetchLogFileContentByChunks(
this.requestClient,
accessToken!,
logUrl,
logCount
)
}
if (jobStatus === 'failed' || jobStatus === 'error') {
@@ -453,14 +459,14 @@ export class SASViyaApiClient {
.catch(async (e) => {
if (e instanceof NotFoundError) {
if (logLink) {
log = await this.requestClient
.get<any>(`${logLink.href}/content?limit=10000`, accessToken)
.then((res: any) =>
res.result.items.map((i: any) => i.line).join('\n')
)
.catch((err) => {
throw prefixMessage(err, 'Error while getting log. ')
})
const logUrl = `${logLink.href}/content`
const logCount = currentJob.logStatistics?.lineCount ?? 1000000
log = await fetchLogFileContentByChunks(
this.requestClient,
accessToken!,
logUrl,
logCount
)
return Promise.reject({
status: 500,

View File

@@ -1,5 +1,6 @@
import { Link } from './Link'
import { JobResult } from './JobResult'
import { LogStatistics } from './LogStatistics'
export interface Job {
id: string
@@ -10,4 +11,5 @@ export interface Job {
links: Link[]
results: JobResult
error?: any
logStatistics: LogStatistics
}

View File

@@ -0,0 +1,4 @@
export interface LogStatistics {
lineCount: number
modifiedTimeStamp: string
}

View File

@@ -0,0 +1,43 @@
import { RequestClient } from '../request/RequestClient'
import { prefixMessage } from '@sasjs/utils/error'
/**
* Fetches content of the log file
* @param {object} requestClient - client object of Request Client.
* @param {string} accessToken - an access token for an authorized user.
* @param {string} logUrl - url of the log file.
* @param {number} logCount- total number of log lines in file.
* @returns an string containing log lines.
*/
export const fetchLogFileContentByChunks = async (
requestClient: RequestClient,
accessToken: string,
logUrl: string,
logCount: number
): Promise<string> => {
let log: string = ''
const loglimit = logCount < 10000 ? logCount : 10000
let start = 0
do {
console.log(
`Fetching logs from line no: ${start + 1} to ${
start + loglimit
} of ${logCount}.`
)
const logChunkJson = await requestClient!
.get<any>(`${logUrl}?start=${start}&limit=${loglimit}`, accessToken)
.then((res: any) => res.result)
.catch((err) => {
throw prefixMessage(err, 'Error while getting log. ')
})
if (logChunkJson.items.length === 0) break
const logChunk = logChunkJson.items.map((i: any) => i.line).join('\n')
log += logChunk
start += loglimit
} while (start < logCount)
return log
}

View File

@@ -11,3 +11,4 @@ export * from './parseSasViyaLog'
export * from './serialize'
export * from './splitChunks'
export * from './parseWeboutResponse'
export * from './fetchLogFileContentByChunks'