mirror of
https://github.com/sasjs/adapter.git
synced 2026-01-14 15:40:06 +00:00
fix: after job executing get complete log
This commit is contained in:
@@ -1,4 +1,10 @@
|
|||||||
import { convertToCSV, isRelativePath, isUri, isUrl } from './utils'
|
import {
|
||||||
|
convertToCSV,
|
||||||
|
isRelativePath,
|
||||||
|
isUri,
|
||||||
|
isUrl,
|
||||||
|
fetchLogFileContentByChunks
|
||||||
|
} from './utils'
|
||||||
import * as NodeFormData from 'form-data'
|
import * as NodeFormData from 'form-data'
|
||||||
import {
|
import {
|
||||||
Job,
|
Job,
|
||||||
@@ -420,19 +426,19 @@ export class SASViyaApiClient {
|
|||||||
})
|
})
|
||||||
|
|
||||||
let jobResult
|
let jobResult
|
||||||
let log
|
let log = ''
|
||||||
|
|
||||||
const logLink = currentJob.links.find((l) => l.rel === 'log')
|
const logLink = currentJob.links.find((l) => l.rel === 'log')
|
||||||
|
|
||||||
if (debug && logLink) {
|
if (debug && logLink) {
|
||||||
log = await this.requestClient
|
const logUrl = `${logLink.href}/content`
|
||||||
.get<any>(`${logLink.href}/content?limit=10000`, accessToken)
|
const logCount = currentJob.logStatistics?.lineCount ?? 1000000
|
||||||
.then((res: any) =>
|
log = await fetchLogFileContentByChunks(
|
||||||
res.result.items.map((i: any) => i.line).join('\n')
|
this.requestClient,
|
||||||
)
|
accessToken!,
|
||||||
.catch((err) => {
|
logUrl,
|
||||||
throw prefixMessage(err, 'Error while getting log. ')
|
logCount
|
||||||
})
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (jobStatus === 'failed' || jobStatus === 'error') {
|
if (jobStatus === 'failed' || jobStatus === 'error') {
|
||||||
@@ -453,14 +459,14 @@ export class SASViyaApiClient {
|
|||||||
.catch(async (e) => {
|
.catch(async (e) => {
|
||||||
if (e instanceof NotFoundError) {
|
if (e instanceof NotFoundError) {
|
||||||
if (logLink) {
|
if (logLink) {
|
||||||
log = await this.requestClient
|
const logUrl = `${logLink.href}/content`
|
||||||
.get<any>(`${logLink.href}/content?limit=10000`, accessToken)
|
const logCount = currentJob.logStatistics?.lineCount ?? 1000000
|
||||||
.then((res: any) =>
|
log = await fetchLogFileContentByChunks(
|
||||||
res.result.items.map((i: any) => i.line).join('\n')
|
this.requestClient,
|
||||||
)
|
accessToken!,
|
||||||
.catch((err) => {
|
logUrl,
|
||||||
throw prefixMessage(err, 'Error while getting log. ')
|
logCount
|
||||||
})
|
)
|
||||||
|
|
||||||
return Promise.reject({
|
return Promise.reject({
|
||||||
status: 500,
|
status: 500,
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { Link } from './Link'
|
import { Link } from './Link'
|
||||||
import { JobResult } from './JobResult'
|
import { JobResult } from './JobResult'
|
||||||
|
import { LogStatistics } from './LogStatistics'
|
||||||
|
|
||||||
export interface Job {
|
export interface Job {
|
||||||
id: string
|
id: string
|
||||||
@@ -10,4 +11,5 @@ export interface Job {
|
|||||||
links: Link[]
|
links: Link[]
|
||||||
results: JobResult
|
results: JobResult
|
||||||
error?: any
|
error?: any
|
||||||
|
logStatistics: LogStatistics
|
||||||
}
|
}
|
||||||
|
|||||||
4
src/types/LogStatistics.ts
Normal file
4
src/types/LogStatistics.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
export interface LogStatistics {
|
||||||
|
lineCount: number
|
||||||
|
modifiedTimeStamp: string
|
||||||
|
}
|
||||||
43
src/utils/fetchLogFileContentByChunks.ts
Normal file
43
src/utils/fetchLogFileContentByChunks.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import { RequestClient } from '../request/RequestClient'
|
||||||
|
import { prefixMessage } from '@sasjs/utils/error'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches content of the log file
|
||||||
|
* @param {object} requestClient - client object of Request Client.
|
||||||
|
* @param {string} accessToken - an access token for an authorized user.
|
||||||
|
* @param {string} logUrl - url of the log file.
|
||||||
|
* @param {number} logCount- total number of log lines in file.
|
||||||
|
* @returns an string containing log lines.
|
||||||
|
*/
|
||||||
|
export const fetchLogFileContentByChunks = async (
|
||||||
|
requestClient: RequestClient,
|
||||||
|
accessToken: string,
|
||||||
|
logUrl: string,
|
||||||
|
logCount: number
|
||||||
|
): Promise<string> => {
|
||||||
|
let log: string = ''
|
||||||
|
|
||||||
|
const loglimit = logCount < 10000 ? logCount : 10000
|
||||||
|
let start = 0
|
||||||
|
do {
|
||||||
|
console.log(
|
||||||
|
`Fetching logs from line no: ${start + 1} to ${
|
||||||
|
start + loglimit
|
||||||
|
} of ${logCount}.`
|
||||||
|
)
|
||||||
|
const logChunkJson = await requestClient!
|
||||||
|
.get<any>(`${logUrl}?start=${start}&limit=${loglimit}`, accessToken)
|
||||||
|
.then((res: any) => res.result)
|
||||||
|
.catch((err) => {
|
||||||
|
throw prefixMessage(err, 'Error while getting log. ')
|
||||||
|
})
|
||||||
|
|
||||||
|
if (logChunkJson.items.length === 0) break
|
||||||
|
|
||||||
|
const logChunk = logChunkJson.items.map((i: any) => i.line).join('\n')
|
||||||
|
log += logChunk
|
||||||
|
|
||||||
|
start += loglimit
|
||||||
|
} while (start < logCount)
|
||||||
|
return log
|
||||||
|
}
|
||||||
@@ -11,3 +11,4 @@ export * from './parseSasViyaLog'
|
|||||||
export * from './serialize'
|
export * from './serialize'
|
||||||
export * from './splitChunks'
|
export * from './splitChunks'
|
||||||
export * from './parseWeboutResponse'
|
export * from './parseWeboutResponse'
|
||||||
|
export * from './fetchLogFileContentByChunks'
|
||||||
|
|||||||
Reference in New Issue
Block a user