1
0
mirror of https://github.com/sasjs/adapter.git synced 2026-01-04 03:00:05 +00:00

chore(git): Merge branch 'master' into fixing-sas9-tests

This commit is contained in:
2021-07-26 14:53:52 +02:00
23 changed files with 291 additions and 138 deletions

View File

@@ -12,9 +12,9 @@ What code changes have been made to achieve the intent.
## Checks ## Checks
No PR (that involves a non-trivial code change) should be merged, unless all four of the items below are confirmed! If an urgent fix is needed - use a tar file. No PR (that involves a non-trivial code change) should be merged, unless all items below are confirmed! If an urgent fix is needed - use a tar file.
- [ ] Code is formatted correctly (`npm run lint:fix`).
- [ ] All unit tests are passing (`npm test`).
- [ ] All `sasjs-cli` unit tests are passing (`npm test`). - [ ] All `sasjs-cli` unit tests are passing (`npm test`).
- [ ] All `sasjs-tests` are passing (instructions available [here](https://github.com/sasjs/adapter/blob/master/sasjs-tests/README.md)). - [ ] All `sasjs-tests` are passing (instructions available [here](https://github.com/sasjs/adapter/blob/master/sasjs-tests/README.md)).
- [ ] [Data Controller](https://datacontroller.io) builds and is functional on both SAS 9 and Viya

View File

@@ -1,4 +1,4 @@
import { isUrl } from './utils' import { isUrl, getValidJson, parseSasViyaDebugResponse } from './utils'
import { UploadFile } from './types/UploadFile' import { UploadFile } from './types/UploadFile'
import { ErrorResponse, LoginRequiredError } from './types/errors' import { ErrorResponse, LoginRequiredError } from './types/errors'
import { RequestClient } from './request/RequestClient' import { RequestClient } from './request/RequestClient'
@@ -63,13 +63,28 @@ export class FileUploader {
return this.requestClient return this.requestClient
.post(uploadUrl, formData, undefined, 'application/json', headers) .post(uploadUrl, formData, undefined, 'application/json', headers)
.then((res) => { .then(async (res) => {
let result // for web approach on Viya
if (
this.sasjsConfig.debug &&
(this.sasjsConfig.useComputeApi === null ||
this.sasjsConfig.useComputeApi === undefined) &&
this.sasjsConfig.serverType === ServerType.SasViya
) {
const jsonResponse = await parseSasViyaDebugResponse(
res.result as string,
this.requestClient,
this.sasjsConfig.serverUrl
)
return typeof jsonResponse === 'string'
? getValidJson(jsonResponse)
: jsonResponse
}
result = return typeof res.result === 'string'
typeof res.result === 'string' ? JSON.parse(res.result) : res.result ? getValidJson(res.result)
: res.result
return result
//TODO: append to SASjs requests //TODO: append to SASjs requests
}) })
.catch((err: Error) => { .catch((err: Error) => {

View File

@@ -544,11 +544,22 @@ export default class SASjs {
* Process). Is prepended at runtime with the value of `appLoc`. * Process). Is prepended at runtime with the value of `appLoc`.
* @param files - array of files to be uploaded, including File object and file name. * @param files - array of files to be uploaded, including File object and file name.
* @param params - request URL parameters. * @param params - request URL parameters.
* @param overrideSasjsConfig - object to override existing config (optional)
*/ */
public uploadFile(sasJob: string, files: UploadFile[], params: any) { public uploadFile(
const fileUploader = sasJob: string,
this.fileUploader || files: UploadFile[],
new FileUploader(this.sasjsConfig, this.jobsPath, this.requestClient!) params: any,
overrideSasjsConfig?: any
) {
const fileUploader = overrideSasjsConfig
? new FileUploader(
{ ...this.sasjsConfig, ...overrideSasjsConfig },
this.jobsPath,
this.requestClient!
)
: this.fileUploader ||
new FileUploader(this.sasjsConfig, this.jobsPath, this.requestClient!)
return fileUploader.uploadFile(sasJob, files, params) return fileUploader.uploadFile(sasJob, files, params)
} }

View File

@@ -0,0 +1,17 @@
import { isFolder } from '@sasjs/utils/file'
import { generateTimestamp } from '@sasjs/utils/time'
import { Job } from '../../types'
export const getFileStream = async (job: Job, filePath?: string) => {
const { createWriteStream } = require('@sasjs/utils/file')
const logPath = filePath || process.cwd()
const isFolderPath = await isFolder(logPath)
if (isFolderPath) {
const logFileName = `${job.name || 'job'}-${generateTimestamp()}.log`
const path = require('path')
const logFilePath = path.join(filePath || process.cwd(), logFileName)
return await createWriteStream(logFilePath)
} else {
return await createWriteStream(logPath)
}
}

View File

@@ -3,11 +3,8 @@ import { Job, PollOptions } from '../..'
import { getTokens } from '../../auth/getTokens' import { getTokens } from '../../auth/getTokens'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { JobStatePollError } from '../../types/errors' import { JobStatePollError } from '../../types/errors'
import { generateTimestamp } from '@sasjs/utils/time' import { Link, WriteStream } from '../../types'
import { saveLog } from './saveLog' import { isNode } from '../../utils'
import { createWriteStream, isFolder } from '@sasjs/utils/file'
import { WriteStream } from 'fs'
import { Link } from '../../types'
export async function pollJobState( export async function pollJobState(
requestClient: RequestClient, requestClient: RequestClient,
@@ -21,11 +18,14 @@ export async function pollJobState(
let pollInterval = 300 let pollInterval = 300
let maxPollCount = 1000 let maxPollCount = 1000
if (pollOptions) { const defaultPollOptions: PollOptions = {
pollInterval = pollOptions.pollInterval || pollInterval maxPollCount,
maxPollCount = pollOptions.maxPollCount || maxPollCount pollInterval,
streamLog: false
} }
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
const stateLink = postedJob.links.find((l: any) => l.rel === 'state') const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
if (!stateLink) { if (!stateLink) {
throw new Error(`Job state link was not found.`) throw new Error(`Job state link was not found.`)
@@ -52,23 +52,12 @@ export async function pollJobState(
} }
let logFileStream let logFileStream
if (pollOptions?.streamLog) { if (pollOptions.streamLog && isNode()) {
const logPath = pollOptions?.logFolderPath || process.cwd() const { getFileStream } = require('./getFileStream')
const isFolderPath = await isFolder(logPath) logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
if (isFolderPath) {
const logFileName = `${
postedJob.name || 'job'
}-${generateTimestamp()}.log`
const logFilePath = `${
pollOptions?.logFolderPath || process.cwd()
}/${logFileName}`
logFileStream = await createWriteStream(logFilePath)
} else {
logFileStream = await createWriteStream(logPath)
}
} }
// Poll up to the first 100 times with the specified poll interval
let result = await doPoll( let result = await doPoll(
requestClient, requestClient,
postedJob, postedJob,
@@ -76,14 +65,18 @@ export async function pollJobState(
debug, debug,
pollCount, pollCount,
authConfig, authConfig,
pollOptions, {
...pollOptions,
maxPollCount:
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
},
logFileStream logFileStream
) )
currentState = result.state currentState = result.state
pollCount = result.pollCount pollCount = result.pollCount
if (!needsRetry(currentState) || pollCount >= maxPollCount) { if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) {
return currentState return currentState
} }
@@ -192,7 +185,7 @@ const doPoll = async (
throw new Error(`Job state link was not found.`) throw new Error(`Job state link was not found.`)
} }
while (needsRetry(state) && pollCount <= 100 && pollCount <= maxPollCount) { while (needsRetry(state) && pollCount <= maxPollCount) {
state = await getJobState( state = await getJobState(
requestClient, requestClient,
postedJob, postedJob,
@@ -222,14 +215,17 @@ const doPoll = async (
const endLogLine = job.logStatistics?.lineCount ?? 1000000 const endLogLine = job.logStatistics?.lineCount ?? 1000000
await saveLog( const { saveLog } = isNode() ? require('./saveLog') : { saveLog: null }
postedJob, if (saveLog) {
requestClient, await saveLog(
startLogLine, postedJob,
endLogLine, requestClient,
logStream, startLogLine,
authConfig?.access_token endLogLine,
) logStream,
authConfig?.access_token
)
}
startLogLine += endLogLine startLogLine += endLogLine
} }

View File

@@ -1,7 +1,7 @@
import { Job } from '../..' import { Job } from '../..'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { fetchLog } from '../../utils' import { fetchLog } from '../../utils'
import { WriteStream } from 'fs' import { WriteStream } from '../../types'
import { writeStream } from './writeStream' import { writeStream } from './writeStream'
/** /**

View File

@@ -0,0 +1,41 @@
import { Logger, LogLevel } from '@sasjs/utils/logger'
import * as path from 'path'
import * as fileModule from '@sasjs/utils/file'
import { getFileStream } from '../getFileStream'
import { mockJob } from './mockResponses'
import { WriteStream } from '../../../types'
describe('getFileStream', () => {
beforeEach(() => {
;(process as any).logger = new Logger(LogLevel.Off)
setupMocks()
})
it('should use the given log path if it points to a file', async () => {
const { createWriteStream } = require('@sasjs/utils/file')
await getFileStream(mockJob, path.join(__dirname, 'test.log'))
expect(createWriteStream).toHaveBeenCalledWith(
path.join(__dirname, 'test.log')
)
})
it('should generate a log file path with a timestamp if it points to a folder', async () => {
const { createWriteStream } = require('@sasjs/utils/file')
await getFileStream(mockJob, __dirname)
expect(createWriteStream).not.toHaveBeenCalledWith(__dirname)
expect(createWriteStream).toHaveBeenCalledWith(
expect.stringContaining(path.join(__dirname, 'test job-20'))
)
})
})
const setupMocks = () => {
jest.restoreAllMocks()
jest.mock('@sasjs/utils/file/file')
jest
.spyOn(fileModule, 'createWriteStream')
.mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
}

View File

@@ -1,11 +1,11 @@
import { Logger, LogLevel } from '@sasjs/utils' import { Logger, LogLevel } from '@sasjs/utils'
import * as path from 'path'
import * as fileModule from '@sasjs/utils/file'
import { RequestClient } from '../../../request/RequestClient' import { RequestClient } from '../../../request/RequestClient'
import { mockAuthConfig, mockJob } from './mockResponses' import { mockAuthConfig, mockJob } from './mockResponses'
import { pollJobState } from '../pollJobState' import { pollJobState } from '../pollJobState'
import * as getTokensModule from '../../../auth/getTokens' import * as getTokensModule from '../../../auth/getTokens'
import * as saveLogModule from '../saveLog' import * as saveLogModule from '../saveLog'
import * as getFileStreamModule from '../getFileStream'
import * as isNodeModule from '../../../utils/isNode'
import { PollOptions } from '../../../types' import { PollOptions } from '../../../types'
import { WriteStream } from 'fs' import { WriteStream } from 'fs'
@@ -77,42 +77,43 @@ describe('pollJobState', () => {
it('should attempt to fetch and save the log after each poll when streamLog is true', async () => { it('should attempt to fetch and save the log after each poll when streamLog is true', async () => {
mockSimplePoll() mockSimplePoll()
const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollOptions,
streamLog: true streamLog: true
}) })
expect(saveLogModule.saveLog).toHaveBeenCalledTimes(2) expect(saveLog).toHaveBeenCalledTimes(2)
}) })
it('should use the given log path if it points to a file', async () => { it('should create a write stream in Node.js environment when streamLog is true', async () => {
mockSimplePoll() mockSimplePoll()
const { getFileStream } = require('../getFileStream')
const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollOptions,
streamLog: true, streamLog: true
logFolderPath: path.join(__dirname, 'test.log')
}) })
expect(fileModule.createWriteStream).toHaveBeenCalledWith( expect(getFileStream).toHaveBeenCalled()
path.join(__dirname, 'test.log') expect(saveLog).toHaveBeenCalledTimes(2)
)
}) })
it('should generate a log file path with a timestamp if it points to a folder', async () => { it('should not create a write stream in a non-Node.js environment', async () => {
mockSimplePoll() mockSimplePoll()
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
const { saveLog } = require('../saveLog')
const { getFileStream } = require('../getFileStream')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollOptions,
streamLog: true, streamLog: true
logFolderPath: path.join(__dirname)
}) })
expect(fileModule.createWriteStream).not.toHaveBeenCalledWith(__dirname) expect(getFileStream).not.toHaveBeenCalled()
expect(fileModule.createWriteStream).toHaveBeenCalledWith( expect(saveLog).not.toHaveBeenCalled()
expect.stringContaining(__dirname + '/test job-20')
)
}) })
it('should not attempt to fetch and save the log after each poll when streamLog is false', async () => { it('should not attempt to fetch and save the log after each poll when streamLog is false', async () => {
@@ -247,7 +248,8 @@ const setupMocks = () => {
jest.mock('../../../request/RequestClient') jest.mock('../../../request/RequestClient')
jest.mock('../../../auth/getTokens') jest.mock('../../../auth/getTokens')
jest.mock('../saveLog') jest.mock('../saveLog')
jest.mock('@sasjs/utils/file') jest.mock('../getFileStream')
jest.mock('../../../utils/isNode')
jest jest
.spyOn(requestClient, 'get') .spyOn(requestClient, 'get')
@@ -261,8 +263,9 @@ const setupMocks = () => {
.spyOn(saveLogModule, 'saveLog') .spyOn(saveLogModule, 'saveLog')
.mockImplementation(() => Promise.resolve()) .mockImplementation(() => Promise.resolve())
jest jest
.spyOn(fileModule, 'createWriteStream') .spyOn(getFileStreamModule, 'getFileStream')
.mockImplementation(() => Promise.resolve({} as unknown as WriteStream)) .mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
} }
const mockSimplePoll = (runningCount = 2) => { const mockSimplePoll = (runningCount = 2) => {
@@ -308,7 +311,7 @@ const mockLongPoll = () => {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: count <= 101 ? 'running' : 'completed', result: count <= 102 ? 'running' : 'completed',
etag: '', etag: '',
status: 200 status: 200
}) })

View File

@@ -4,7 +4,7 @@ import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import * as writeStreamModule from '../writeStream' import * as writeStreamModule from '../writeStream'
import { saveLog } from '../saveLog' import { saveLog } from '../saveLog'
import { mockJob } from './mockResponses' import { mockJob } from './mockResponses'
import { WriteStream } from 'fs' import { WriteStream } from '../../../types'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
const stream = {} as unknown as WriteStream const stream = {} as unknown as WriteStream

View File

@@ -0,0 +1,25 @@
import { WriteStream } from '../../../types'
import { writeStream } from '../writeStream'
import 'jest-extended'
describe('writeStream', () => {
const stream: WriteStream = {
write: jest.fn(),
path: 'test'
}
it('should resolve when the stream is written successfully', async () => {
expect(writeStream(stream, 'test')).toResolve()
expect(stream.write).toHaveBeenCalledWith('test\n', expect.anything())
})
it('should reject when the write errors out', async () => {
jest
.spyOn(stream, 'write')
.mockImplementation((_, callback) => callback(new Error('Test Error')))
const error = await writeStream(stream, 'test').catch((e) => e)
expect(error.message).toEqual('Test Error')
})
})

View File

@@ -1,11 +1,11 @@
import { WriteStream } from 'fs' import { WriteStream } from '../../types'
export const writeStream = async ( export const writeStream = async (
stream: WriteStream, stream: WriteStream,
content: string content: string
): Promise<void> => { ): Promise<void> => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
stream.write(content + '\n\nnext chunk\n\n', (e) => { stream.write(content + '\n', (e) => {
if (e) { if (e) {
return reject(e) return reject(e)
} }

View File

@@ -1,9 +1,9 @@
import { import {
AuthConfig,
isAccessTokenExpiring, isAccessTokenExpiring,
isRefreshTokenExpiring, isRefreshTokenExpiring,
hasTokenExpired hasTokenExpired
} from '@sasjs/utils' } from '@sasjs/utils/auth'
import { AuthConfig } from '@sasjs/utils/types'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { refreshTokens } from './refreshTokens' import { refreshTokens } from './refreshTokens'

View File

@@ -8,7 +8,11 @@ import { generateFileUploadForm } from '../file/generateFileUploadForm'
import { generateTableUploadForm } from '../file/generateTableUploadForm' import { generateTableUploadForm } from '../file/generateTableUploadForm'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { SASViyaApiClient } from '../SASViyaApiClient' import { SASViyaApiClient } from '../SASViyaApiClient'
import { isRelativePath, isValidJson } from '../utils' import {
isRelativePath,
getValidJson,
parseSasViyaDebugResponse
} from '../utils'
import { BaseJobExecutor } from './JobExecutor' import { BaseJobExecutor } from './JobExecutor'
import { parseWeboutResponse } from '../utils/parseWeboutResponse' import { parseWeboutResponse } from '../utils/parseWeboutResponse'
@@ -95,15 +99,17 @@ export class WebJobExecutor extends BaseJobExecutor {
this.requestClient!.post(apiUrl, formData, undefined) this.requestClient!.post(apiUrl, formData, undefined)
.then(async (res) => { .then(async (res) => {
if (this.serverType === ServerType.SasViya && config.debug) { if (this.serverType === ServerType.SasViya && config.debug) {
const jsonResponse = await this.parseSasViyaDebugResponse( const jsonResponse = await parseSasViyaDebugResponse(
res.result as string res.result as string,
this.requestClient,
this.serverUrl
) )
this.appendRequest(res, sasJob, config.debug) this.appendRequest(res, sasJob, config.debug)
resolve(jsonResponse) resolve(jsonResponse)
} }
this.appendRequest(res, sasJob, config.debug) this.appendRequest(res, sasJob, config.debug)
isValidJson(res.result as string) getValidJson(res.result as string)
resolve(res.result) resolve(res.result)
}) })
.catch(async (e: Error) => { .catch(async (e: Error) => {
@@ -140,20 +146,6 @@ export class WebJobExecutor extends BaseJobExecutor {
return requestPromise return requestPromise
} }
private parseSasViyaDebugResponse = async (response: string) => {
const iframeStart = response.split(
'<iframe style="width: 99%; height: 500px" src="'
)[1]
const jsonUrl = iframeStart ? iframeStart.split('"></iframe>')[0] : null
if (!jsonUrl) {
throw new Error('Unable to find webout file URL.')
}
return this.requestClient
.get(this.serverUrl + jsonUrl, undefined)
.then((res) => res.result)
}
private async getJobUri(sasJob: string) { private async getJobUri(sasJob: string) {
if (!this.sasViyaApiClient) return '' if (!this.sasViyaApiClient) return ''
let uri = '' let uri = ''

View File

@@ -11,7 +11,7 @@ import {
import { parseWeboutResponse } from '../utils/parseWeboutResponse' import { parseWeboutResponse } from '../utils/parseWeboutResponse'
import { prefixMessage } from '@sasjs/utils/error' import { prefixMessage } from '@sasjs/utils/error'
import { SAS9AuthError } from '../types/errors/SAS9AuthError' import { SAS9AuthError } from '../types/errors/SAS9AuthError'
import { isValidJson } from '../utils' import { getValidJson } from '../utils'
export interface HttpClient { export interface HttpClient {
get<T>( get<T>(
@@ -434,7 +434,7 @@ export class RequestClient implements HttpClient {
throw new Error('Valid JSON could not be extracted from response.') throw new Error('Valid JSON could not be extracted from response.')
} }
const jsonResponse = isValidJson(weboutResponse) const jsonResponse = getValidJson(weboutResponse)
parsedResponse = jsonResponse parsedResponse = jsonResponse
} catch { } catch {
parsedResponse = response.data parsedResponse = response.data

View File

@@ -0,0 +1,41 @@
import { getValidJson } from '../../utils'
describe('jsonValidator', () => {
it('should not throw an error with a valid json', () => {
const json = {
test: 'test'
}
expect(getValidJson(json)).toBe(json)
})
it('should not throw an error with a valid json string', () => {
const json = {
test: 'test'
}
expect(getValidJson(JSON.stringify(json))).toStrictEqual(json)
})
it('should throw an error with an invalid json', () => {
const json = `{\"test\":\"test\"\"test2\":\"test\"}`
let errorThrown = false
try {
getValidJson(json)
} catch (error) {
errorThrown = true
}
expect(errorThrown).toBe(true)
})
it('should throw an error when an array is passed', () => {
const array = ['hello', 'world']
let errorThrown = false
try {
getValidJson(array)
} catch (error) {
errorThrown = true
}
expect(errorThrown).toBe(true)
})
})

View File

@@ -1,31 +0,0 @@
import { isValidJson } from '../../utils'
describe('jsonValidator', () => {
it('should not throw an error with an valid json', () => {
const json = {
test: 'test'
}
expect(isValidJson(json)).toBe(json)
})
it('should not throw an error with an valid json string', () => {
const json = {
test: 'test'
}
expect(isValidJson(JSON.stringify(json))).toStrictEqual(json)
})
it('should throw an error with an invalid json', () => {
const json = `{\"test\":\"test\"\"test2\":\"test\"}`
expect(() => {
try {
isValidJson(json)
} catch (err) {
throw new Error()
}
}).toThrowError
})
})

4
src/types/WriteStream.ts Normal file
View File

@@ -0,0 +1,4 @@
export interface WriteStream {
write: (content: string, callback: (err?: Error) => any) => void
path: string
}

View File

@@ -11,3 +11,4 @@ export * from './SASjsRequest'
export * from './Session' export * from './Session'
export * from './UploadFile' export * from './UploadFile'
export * from './PollOptions' export * from './PollOptions'
export * from './WriteStream'

16
src/utils/getValidJson.ts Normal file
View File

@@ -0,0 +1,16 @@
/**
* if string passed then parse the string to json else if throw error for all other types unless it is not a valid json object.
* @param str - string to check.
*/
export const getValidJson = (str: string | object) => {
try {
if (Array.isArray(str)) {
throw new Error('Can not parse array object to json.')
}
if (typeof str === 'object') return str
return JSON.parse(str)
} catch (e) {
throw new Error('Invalid JSON response.')
}
}

View File

@@ -1,6 +1,7 @@
export * from './asyncForEach' export * from './asyncForEach'
export * from './compareTimestamps' export * from './compareTimestamps'
export * from './convertToCsv' export * from './convertToCsv'
export * from './isNode'
export * from './isRelativePath' export * from './isRelativePath'
export * from './isUri' export * from './isUri'
export * from './isUrl' export * from './isUrl'
@@ -12,4 +13,5 @@ export * from './serialize'
export * from './splitChunks' export * from './splitChunks'
export * from './parseWeboutResponse' export * from './parseWeboutResponse'
export * from './fetchLogByChunks' export * from './fetchLogByChunks'
export * from './isValidJson' export * from './getValidJson'
export * from './parseViyaDebugResponse'

4
src/utils/isNode.ts Normal file
View File

@@ -0,0 +1,4 @@
export const isNode = () =>
typeof process !== 'undefined' &&
process.versions != null &&
process.versions.node != null

View File

@@ -1,13 +0,0 @@
/**
* Checks if string is in valid JSON format else throw error.
* @param str - string to check.
*/
export const isValidJson = (str: string | object) => {
try {
if (typeof str === 'object') return str
return JSON.parse(str)
} catch (e) {
throw new Error('Invalid JSON response.')
}
}

View File

@@ -0,0 +1,29 @@
import { RequestClient } from '../request/RequestClient'
/**
* When querying a Viya job using the Web approach (as opposed to using the APIs) with _DEBUG enabled,
* the first response contains the log with the content in an iframe. Therefore when debug is enabled,
* and the serverType is VIYA, and useComputeApi is null (WEB), we call this function to extract the
* (_webout) content from the iframe.
* @param response - first response from viya job
* @param requestClient
* @param serverUrl
* @returns
*/
export const parseSasViyaDebugResponse = async (
response: string,
requestClient: RequestClient,
serverUrl: string
) => {
const iframeStart = response.split(
'<iframe style="width: 99%; height: 500px" src="'
)[1]
const jsonUrl = iframeStart ? iframeStart.split('"></iframe>')[0] : null
if (!jsonUrl) {
throw new Error('Unable to find webout file URL.')
}
return requestClient
.get(serverUrl + jsonUrl, undefined)
.then((res) => res.result)
}