mirror of
https://github.com/sasjs/adapter.git
synced 2025-12-24 06:21:20 +00:00
Compare commits
35 Commits
v2.9.0
...
fixing-sas
| Author | SHA1 | Date | |
|---|---|---|---|
| 0bd156141c | |||
|
|
0ea66f6d37 | ||
| a615c5fdb6 | |||
| ca7ee83f7f | |||
|
|
eac9da22bf | ||
|
|
626fc2e15f | ||
|
|
87e2edbd6c | ||
|
|
7cf681bea3 | ||
|
|
281a145bef | ||
|
|
15d5f9ec91 | ||
|
|
0a6c5a0ec4 | ||
|
|
2a9526d056 | ||
|
|
c2ff28c323 | ||
|
|
50710ee1df | ||
|
|
062ba91c17 | ||
| 97a530cc66 | |||
| 317c8c81a0 | |||
| c87776ca1b | |||
| 04032831c3 | |||
|
|
9600fa2512 | ||
|
|
7951817480 | ||
|
|
405eea1d6c | ||
|
|
e3f189eed4 | ||
|
|
0bb42c5e3c | ||
|
|
c02eac196e | ||
|
|
3fb0d863e9 | ||
|
|
6d573d3897 | ||
|
|
33280d7a5b | ||
| 85e5ade93a | |||
| 4a61fb8f7f | |||
| 5347aeba09 | |||
|
|
7ac7c5e52b | ||
| 5098342dfe | |||
| c69be8ffc3 | |||
| 69999d8e8b |
24
.all-contributorsrc
Normal file
24
.all-contributorsrc
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"files": [
|
||||
"README.md"
|
||||
],
|
||||
"imageSize": 100,
|
||||
"commit": false,
|
||||
"contributors": [
|
||||
{
|
||||
"login": "medjedovicm",
|
||||
"name": "Mihajlo Medjedovic",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/18329105?v=4",
|
||||
"profile": "https://github.com/medjedovicm",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
}
|
||||
],
|
||||
"contributorsPerLine": 7,
|
||||
"projectName": "adapter",
|
||||
"projectOwner": "sasjs",
|
||||
"repoType": "github",
|
||||
"repoHost": "https://github.com",
|
||||
"skipCi": true
|
||||
}
|
||||
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12.x]
|
||||
node-version: [15.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
@@ -12,9 +12,9 @@ What code changes have been made to achieve the intent.
|
||||
|
||||
## Checks
|
||||
|
||||
No PR (that involves a non-trivial code change) should be merged, unless all four of the items below are confirmed! If an urgent fix is needed - use a tar file.
|
||||
No PR (that involves a non-trivial code change) should be merged, unless all items below are confirmed! If an urgent fix is needed - use a tar file.
|
||||
|
||||
|
||||
- [ ] Code is formatted correctly (`npm run lint:fix`).
|
||||
- [ ] All unit tests are passing (`npm test`).
|
||||
- [ ] All `sasjs-cli` unit tests are passing (`npm test`).
|
||||
- [ ] All `sasjs-tests` are passing (instructions available [here](https://github.com/sasjs/adapter/blob/master/sasjs-tests/README.md)).
|
||||
- [ ] [Data Controller](https://datacontroller.io) builds and is functional on both SAS 9 and Viya
|
||||
|
||||
20
README.md
20
README.md
@@ -234,3 +234,23 @@ If you are a SAS 9 or SAS Viya customer you can also request a copy of [Data Con
|
||||
If you find this library useful, help us grow our star graph!
|
||||
|
||||

|
||||
|
||||
## Contributors ✨
|
||||
|
||||
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
|
||||
<!-- prettier-ignore-start -->
|
||||
<!-- markdownlint-disable -->
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center"><a href="https://github.com/medjedovicm"><img src="https://avatars.githubusercontent.com/u/18329105?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Mihajlo Medjedovic</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=medjedovicm" title="Code">💻</a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<!-- markdownlint-restore -->
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
||||
|
||||
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
|
||||
|
||||
58
package-lock.json
generated
58
package-lock.json
generated
@@ -7,7 +7,7 @@
|
||||
"name": "@sasjs/adapter",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@sasjs/utils": "^2.25.4",
|
||||
"@sasjs/utils": "^2.27.1",
|
||||
"axios": "^0.21.1",
|
||||
"axios-cookiejar-support": "^1.0.1",
|
||||
"form-data": "^4.0.0",
|
||||
@@ -41,6 +41,9 @@
|
||||
"typescript": "^4.3.5",
|
||||
"webpack": "^5.44.0",
|
||||
"webpack-cli": "^4.7.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=15"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/code-frame": {
|
||||
@@ -1187,9 +1190,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@sasjs/utils": {
|
||||
"version": "2.25.4",
|
||||
"resolved": "https://registry.npmjs.org/@sasjs/utils/-/utils-2.25.4.tgz",
|
||||
"integrity": "sha512-LTWExtHp4g3VcLLCUMyeeyTXEAZawSQngmJ3/2Z93ysxpeu2/NS7lGG/ERGCQb2snbqmXK8dkZmfg44Tn4Qebw==",
|
||||
"version": "2.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@sasjs/utils/-/utils-2.27.1.tgz",
|
||||
"integrity": "sha512-CYTQwEj89cc7H3tGiQQcyDkZYaWRc1HZJpOF8o2RHYS37fIAOy0SyyJdq6mcQ74Nb1u5AmFXPFIvnRCMEcTYeQ==",
|
||||
"dependencies": {
|
||||
"@types/fs-extra": "^9.0.11",
|
||||
"@types/prompts": "^2.0.13",
|
||||
@@ -1199,7 +1202,11 @@
|
||||
"fs-extra": "^10.0.0",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"prompts": "^2.4.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"valid-url": "^1.0.9"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=15"
|
||||
}
|
||||
},
|
||||
"node_modules/@semantic-release/commit-analyzer": {
|
||||
@@ -2157,7 +2164,6 @@
|
||||
},
|
||||
"node_modules/balanced-match": {
|
||||
"version": "1.0.2",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/base": {
|
||||
@@ -2259,7 +2265,6 @@
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
@@ -2741,7 +2746,6 @@
|
||||
},
|
||||
"node_modules/concat-map": {
|
||||
"version": "0.0.1",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/consola": {
|
||||
@@ -4113,7 +4117,6 @@
|
||||
},
|
||||
"node_modules/fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
@@ -4243,7 +4246,6 @@
|
||||
},
|
||||
"node_modules/glob": {
|
||||
"version": "7.1.7",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"fs.realpath": "^1.0.0",
|
||||
@@ -4657,7 +4659,6 @@
|
||||
},
|
||||
"node_modules/inflight": {
|
||||
"version": "1.0.6",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"once": "^1.3.0",
|
||||
@@ -4666,7 +4667,6 @@
|
||||
},
|
||||
"node_modules/inherits": {
|
||||
"version": "2.0.4",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/ini": {
|
||||
@@ -7610,7 +7610,6 @@
|
||||
},
|
||||
"node_modules/minimatch": {
|
||||
"version": "3.0.4",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
@@ -11007,7 +11006,6 @@
|
||||
},
|
||||
"node_modules/once": {
|
||||
"version": "1.4.0",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"wrappy": "1"
|
||||
@@ -11242,7 +11240,6 @@
|
||||
},
|
||||
"node_modules/path-is-absolute": {
|
||||
"version": "1.0.1",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
@@ -11776,7 +11773,6 @@
|
||||
},
|
||||
"node_modules/rimraf": {
|
||||
"version": "3.0.2",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"glob": "^7.1.3"
|
||||
@@ -13853,7 +13849,6 @@
|
||||
},
|
||||
"node_modules/wrappy": {
|
||||
"version": "1.0.2",
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/write-file-atomic": {
|
||||
@@ -14791,9 +14786,9 @@
|
||||
}
|
||||
},
|
||||
"@sasjs/utils": {
|
||||
"version": "2.25.4",
|
||||
"resolved": "https://registry.npmjs.org/@sasjs/utils/-/utils-2.25.4.tgz",
|
||||
"integrity": "sha512-LTWExtHp4g3VcLLCUMyeeyTXEAZawSQngmJ3/2Z93ysxpeu2/NS7lGG/ERGCQb2snbqmXK8dkZmfg44Tn4Qebw==",
|
||||
"version": "2.27.1",
|
||||
"resolved": "https://registry.npmjs.org/@sasjs/utils/-/utils-2.27.1.tgz",
|
||||
"integrity": "sha512-CYTQwEj89cc7H3tGiQQcyDkZYaWRc1HZJpOF8o2RHYS37fIAOy0SyyJdq6mcQ74Nb1u5AmFXPFIvnRCMEcTYeQ==",
|
||||
"requires": {
|
||||
"@types/fs-extra": "^9.0.11",
|
||||
"@types/prompts": "^2.0.13",
|
||||
@@ -14803,6 +14798,7 @@
|
||||
"fs-extra": "^10.0.0",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"prompts": "^2.4.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"valid-url": "^1.0.9"
|
||||
}
|
||||
},
|
||||
@@ -15504,8 +15500,7 @@
|
||||
}
|
||||
},
|
||||
"balanced-match": {
|
||||
"version": "1.0.2",
|
||||
"dev": true
|
||||
"version": "1.0.2"
|
||||
},
|
||||
"base": {
|
||||
"version": "0.11.2",
|
||||
@@ -15570,7 +15565,6 @@
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
@@ -15892,8 +15886,7 @@
|
||||
"dev": true
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"dev": true
|
||||
"version": "0.0.1"
|
||||
},
|
||||
"consola": {
|
||||
"version": "2.15.3"
|
||||
@@ -16846,8 +16839,7 @@
|
||||
}
|
||||
},
|
||||
"fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"dev": true
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"fsevents": {
|
||||
"version": "2.3.2",
|
||||
@@ -16938,7 +16930,6 @@
|
||||
},
|
||||
"glob": {
|
||||
"version": "7.1.7",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"fs.realpath": "^1.0.0",
|
||||
"inflight": "^1.0.4",
|
||||
@@ -17186,15 +17177,13 @@
|
||||
},
|
||||
"inflight": {
|
||||
"version": "1.0.6",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"once": "^1.3.0",
|
||||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"inherits": {
|
||||
"version": "2.0.4",
|
||||
"dev": true
|
||||
"version": "2.0.4"
|
||||
},
|
||||
"ini": {
|
||||
"version": "1.3.8",
|
||||
@@ -19219,7 +19208,6 @@
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
@@ -21534,7 +21522,6 @@
|
||||
},
|
||||
"once": {
|
||||
"version": "1.4.0",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"wrappy": "1"
|
||||
}
|
||||
@@ -21688,8 +21675,7 @@
|
||||
"dev": true
|
||||
},
|
||||
"path-is-absolute": {
|
||||
"version": "1.0.1",
|
||||
"dev": true
|
||||
"version": "1.0.1"
|
||||
},
|
||||
"path-key": {
|
||||
"version": "3.1.1",
|
||||
@@ -22020,7 +22006,6 @@
|
||||
},
|
||||
"rimraf": {
|
||||
"version": "3.0.2",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"glob": "^7.1.3"
|
||||
}
|
||||
@@ -23403,8 +23388,7 @@
|
||||
}
|
||||
},
|
||||
"wrappy": {
|
||||
"version": "1.0.2",
|
||||
"dev": true
|
||||
"version": "1.0.2"
|
||||
},
|
||||
"write-file-atomic": {
|
||||
"version": "3.0.3",
|
||||
|
||||
@@ -67,11 +67,14 @@
|
||||
},
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@sasjs/utils": "^2.25.4",
|
||||
"@sasjs/utils": "^2.27.1",
|
||||
"axios": "^0.21.1",
|
||||
"axios-cookiejar-support": "^1.0.1",
|
||||
"form-data": "^4.0.0",
|
||||
"https": "^1.0.0",
|
||||
"tough-cookie": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=15"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,7 +47,9 @@ export const basicTests = (
|
||||
'Should fail on first attempt and should log the user in on second attempt',
|
||||
test: async () => {
|
||||
await adapter.logOut()
|
||||
await sleep(1000)
|
||||
await adapter.logIn('invalid', 'invalid')
|
||||
await sleep(1000)
|
||||
return adapter.logIn(userName, password)
|
||||
},
|
||||
assertion: (response: any) =>
|
||||
@@ -151,6 +153,9 @@ export const basicTests = (
|
||||
description:
|
||||
'Should complete successful request with extra attributes present in response',
|
||||
test: async () => {
|
||||
if (adapter.getSasjsConfig().serverType !== 'SASVIYA')
|
||||
return Promise.resolve('skip')
|
||||
|
||||
const config = {
|
||||
useComputeApi: false
|
||||
}
|
||||
@@ -165,9 +170,15 @@ export const basicTests = (
|
||||
)
|
||||
},
|
||||
assertion: (response: any) => {
|
||||
if (response === 'skip') return true
|
||||
|
||||
const responseKeys: any = Object.keys(response)
|
||||
return responseKeys.includes('file') && responseKeys.includes('data')
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
const sleep = (ms: number) => {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms))
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { isUrl } from './utils'
|
||||
import { isUrl, getValidJson, parseSasViyaDebugResponse } from './utils'
|
||||
import { UploadFile } from './types/UploadFile'
|
||||
import { ErrorResponse, LoginRequiredError } from './types/errors'
|
||||
import { RequestClient } from './request/RequestClient'
|
||||
@@ -63,13 +63,28 @@ export class FileUploader {
|
||||
|
||||
return this.requestClient
|
||||
.post(uploadUrl, formData, undefined, 'application/json', headers)
|
||||
.then((res) => {
|
||||
let result
|
||||
.then(async (res) => {
|
||||
// for web approach on Viya
|
||||
if (
|
||||
this.sasjsConfig.debug &&
|
||||
(this.sasjsConfig.useComputeApi === null ||
|
||||
this.sasjsConfig.useComputeApi === undefined) &&
|
||||
this.sasjsConfig.serverType === ServerType.SasViya
|
||||
) {
|
||||
const jsonResponse = await parseSasViyaDebugResponse(
|
||||
res.result as string,
|
||||
this.requestClient,
|
||||
this.sasjsConfig.serverUrl
|
||||
)
|
||||
return typeof jsonResponse === 'string'
|
||||
? getValidJson(jsonResponse)
|
||||
: jsonResponse
|
||||
}
|
||||
|
||||
result =
|
||||
typeof res.result === 'string' ? JSON.parse(res.result) : res.result
|
||||
return typeof res.result === 'string'
|
||||
? getValidJson(res.result)
|
||||
: res.result
|
||||
|
||||
return result
|
||||
//TODO: append to SASjs requests
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
|
||||
19
src/SASjs.ts
19
src/SASjs.ts
@@ -544,11 +544,22 @@ export default class SASjs {
|
||||
* Process). Is prepended at runtime with the value of `appLoc`.
|
||||
* @param files - array of files to be uploaded, including File object and file name.
|
||||
* @param params - request URL parameters.
|
||||
* @param overrideSasjsConfig - object to override existing config (optional)
|
||||
*/
|
||||
public uploadFile(sasJob: string, files: UploadFile[], params: any) {
|
||||
const fileUploader =
|
||||
this.fileUploader ||
|
||||
new FileUploader(this.sasjsConfig, this.jobsPath, this.requestClient!)
|
||||
public uploadFile(
|
||||
sasJob: string,
|
||||
files: UploadFile[],
|
||||
params: any,
|
||||
overrideSasjsConfig?: any
|
||||
) {
|
||||
const fileUploader = overrideSasjsConfig
|
||||
? new FileUploader(
|
||||
{ ...this.sasjsConfig, ...overrideSasjsConfig },
|
||||
this.jobsPath,
|
||||
this.requestClient!
|
||||
)
|
||||
: this.fileUploader ||
|
||||
new FileUploader(this.sasjsConfig, this.jobsPath, this.requestClient!)
|
||||
|
||||
return fileUploader.uploadFile(sasJob, files, params)
|
||||
}
|
||||
|
||||
17
src/api/viya/getFileStream.ts
Normal file
17
src/api/viya/getFileStream.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { isFolder } from '@sasjs/utils/file'
|
||||
import { generateTimestamp } from '@sasjs/utils/time'
|
||||
import { Job } from '../../types'
|
||||
|
||||
export const getFileStream = async (job: Job, filePath?: string) => {
|
||||
const { createWriteStream } = require('@sasjs/utils/file')
|
||||
const logPath = filePath || process.cwd()
|
||||
const isFolderPath = await isFolder(logPath)
|
||||
if (isFolderPath) {
|
||||
const logFileName = `${job.name || 'job'}-${generateTimestamp()}.log`
|
||||
const path = require('path')
|
||||
const logFilePath = path.join(filePath || process.cwd(), logFileName)
|
||||
return await createWriteStream(logFilePath)
|
||||
} else {
|
||||
return await createWriteStream(logPath)
|
||||
}
|
||||
}
|
||||
@@ -3,11 +3,8 @@ import { Job, PollOptions } from '../..'
|
||||
import { getTokens } from '../../auth/getTokens'
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
import { JobStatePollError } from '../../types/errors'
|
||||
import { generateTimestamp } from '@sasjs/utils/time'
|
||||
import { saveLog } from './saveLog'
|
||||
import { createWriteStream } from '@sasjs/utils/file'
|
||||
import { WriteStream } from 'fs'
|
||||
import { Link } from '../../types'
|
||||
import { Link, WriteStream } from '../../types'
|
||||
import { isNode } from '../../utils'
|
||||
|
||||
export async function pollJobState(
|
||||
requestClient: RequestClient,
|
||||
@@ -21,11 +18,14 @@ export async function pollJobState(
|
||||
let pollInterval = 300
|
||||
let maxPollCount = 1000
|
||||
|
||||
if (pollOptions) {
|
||||
pollInterval = pollOptions.pollInterval || pollInterval
|
||||
maxPollCount = pollOptions.maxPollCount || maxPollCount
|
||||
const defaultPollOptions: PollOptions = {
|
||||
maxPollCount,
|
||||
pollInterval,
|
||||
streamLog: false
|
||||
}
|
||||
|
||||
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
|
||||
|
||||
const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
|
||||
if (!stateLink) {
|
||||
throw new Error(`Job state link was not found.`)
|
||||
@@ -52,15 +52,12 @@ export async function pollJobState(
|
||||
}
|
||||
|
||||
let logFileStream
|
||||
if (pollOptions?.streamLog) {
|
||||
const logFileName = `${postedJob.name || 'job'}-${generateTimestamp()}.log`
|
||||
const logFilePath = `${
|
||||
pollOptions?.logFolderPath || process.cwd()
|
||||
}/${logFileName}`
|
||||
|
||||
logFileStream = await createWriteStream(logFilePath)
|
||||
if (pollOptions.streamLog && isNode()) {
|
||||
const { getFileStream } = require('./getFileStream')
|
||||
logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
|
||||
}
|
||||
|
||||
// Poll up to the first 100 times with the specified poll interval
|
||||
let result = await doPoll(
|
||||
requestClient,
|
||||
postedJob,
|
||||
@@ -68,14 +65,18 @@ export async function pollJobState(
|
||||
debug,
|
||||
pollCount,
|
||||
authConfig,
|
||||
pollOptions,
|
||||
{
|
||||
...pollOptions,
|
||||
maxPollCount:
|
||||
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
|
||||
},
|
||||
logFileStream
|
||||
)
|
||||
|
||||
currentState = result.state
|
||||
pollCount = result.pollCount
|
||||
|
||||
if (!needsRetry(currentState) || pollCount >= maxPollCount) {
|
||||
if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) {
|
||||
return currentState
|
||||
}
|
||||
|
||||
@@ -184,7 +185,7 @@ const doPoll = async (
|
||||
throw new Error(`Job state link was not found.`)
|
||||
}
|
||||
|
||||
while (needsRetry(state) && pollCount <= 100 && pollCount <= maxPollCount) {
|
||||
while (needsRetry(state) && pollCount <= maxPollCount) {
|
||||
state = await getJobState(
|
||||
requestClient,
|
||||
postedJob,
|
||||
@@ -214,14 +215,17 @@ const doPoll = async (
|
||||
|
||||
const endLogLine = job.logStatistics?.lineCount ?? 1000000
|
||||
|
||||
await saveLog(
|
||||
postedJob,
|
||||
requestClient,
|
||||
startLogLine,
|
||||
endLogLine,
|
||||
logStream,
|
||||
authConfig?.access_token
|
||||
)
|
||||
const { saveLog } = isNode() ? require('./saveLog') : { saveLog: null }
|
||||
if (saveLog) {
|
||||
await saveLog(
|
||||
postedJob,
|
||||
requestClient,
|
||||
startLogLine,
|
||||
endLogLine,
|
||||
logStream,
|
||||
authConfig?.access_token
|
||||
)
|
||||
}
|
||||
|
||||
startLogLine += endLogLine
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Job } from '../..'
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
import { fetchLog } from '../../utils'
|
||||
import { WriteStream } from 'fs'
|
||||
import { WriteStream } from '../../types'
|
||||
import { writeStream } from './writeStream'
|
||||
|
||||
/**
|
||||
|
||||
41
src/api/viya/spec/getFileStream.spec.ts
Normal file
41
src/api/viya/spec/getFileStream.spec.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { Logger, LogLevel } from '@sasjs/utils/logger'
|
||||
import * as path from 'path'
|
||||
import * as fileModule from '@sasjs/utils/file'
|
||||
import { getFileStream } from '../getFileStream'
|
||||
import { mockJob } from './mockResponses'
|
||||
import { WriteStream } from '../../../types'
|
||||
|
||||
describe('getFileStream', () => {
|
||||
beforeEach(() => {
|
||||
;(process as any).logger = new Logger(LogLevel.Off)
|
||||
setupMocks()
|
||||
})
|
||||
it('should use the given log path if it points to a file', async () => {
|
||||
const { createWriteStream } = require('@sasjs/utils/file')
|
||||
|
||||
await getFileStream(mockJob, path.join(__dirname, 'test.log'))
|
||||
|
||||
expect(createWriteStream).toHaveBeenCalledWith(
|
||||
path.join(__dirname, 'test.log')
|
||||
)
|
||||
})
|
||||
|
||||
it('should generate a log file path with a timestamp if it points to a folder', async () => {
|
||||
const { createWriteStream } = require('@sasjs/utils/file')
|
||||
|
||||
await getFileStream(mockJob, __dirname)
|
||||
|
||||
expect(createWriteStream).not.toHaveBeenCalledWith(__dirname)
|
||||
expect(createWriteStream).toHaveBeenCalledWith(
|
||||
expect.stringContaining(path.join(__dirname, 'test job-20'))
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
const setupMocks = () => {
|
||||
jest.restoreAllMocks()
|
||||
jest.mock('@sasjs/utils/file/file')
|
||||
jest
|
||||
.spyOn(fileModule, 'createWriteStream')
|
||||
.mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
|
||||
}
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Logger, LogLevel } from '@sasjs/utils'
|
||||
import * as fileModule from '@sasjs/utils/file'
|
||||
import { RequestClient } from '../../../request/RequestClient'
|
||||
import { mockAuthConfig, mockJob } from './mockResponses'
|
||||
import { pollJobState } from '../pollJobState'
|
||||
import * as getTokensModule from '../../../auth/getTokens'
|
||||
import * as saveLogModule from '../saveLog'
|
||||
import * as getFileStreamModule from '../getFileStream'
|
||||
import * as isNodeModule from '../../../utils/isNode'
|
||||
import { PollOptions } from '../../../types'
|
||||
import { WriteStream } from 'fs'
|
||||
|
||||
@@ -76,13 +77,43 @@ describe('pollJobState', () => {
|
||||
|
||||
it('should attempt to fetch and save the log after each poll when streamLog is true', async () => {
|
||||
mockSimplePoll()
|
||||
const { saveLog } = require('../saveLog')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
expect(saveLogModule.saveLog).toHaveBeenCalledTimes(2)
|
||||
expect(saveLog).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should create a write stream in Node.js environment when streamLog is true', async () => {
|
||||
mockSimplePoll()
|
||||
const { getFileStream } = require('../getFileStream')
|
||||
const { saveLog } = require('../saveLog')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
expect(getFileStream).toHaveBeenCalled()
|
||||
expect(saveLog).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should not create a write stream in a non-Node.js environment', async () => {
|
||||
mockSimplePoll()
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
|
||||
const { saveLog } = require('../saveLog')
|
||||
const { getFileStream } = require('../getFileStream')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
expect(getFileStream).not.toHaveBeenCalled()
|
||||
expect(saveLog).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not attempt to fetch and save the log after each poll when streamLog is false', async () => {
|
||||
@@ -217,7 +248,8 @@ const setupMocks = () => {
|
||||
jest.mock('../../../request/RequestClient')
|
||||
jest.mock('../../../auth/getTokens')
|
||||
jest.mock('../saveLog')
|
||||
jest.mock('@sasjs/utils/file')
|
||||
jest.mock('../getFileStream')
|
||||
jest.mock('../../../utils/isNode')
|
||||
|
||||
jest
|
||||
.spyOn(requestClient, 'get')
|
||||
@@ -231,8 +263,9 @@ const setupMocks = () => {
|
||||
.spyOn(saveLogModule, 'saveLog')
|
||||
.mockImplementation(() => Promise.resolve())
|
||||
jest
|
||||
.spyOn(fileModule, 'createWriteStream')
|
||||
.spyOn(getFileStreamModule, 'getFileStream')
|
||||
.mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
|
||||
}
|
||||
|
||||
const mockSimplePoll = (runningCount = 2) => {
|
||||
@@ -278,7 +311,7 @@ const mockLongPoll = () => {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
return Promise.resolve({
|
||||
result: count <= 101 ? 'running' : 'completed',
|
||||
result: count <= 102 ? 'running' : 'completed',
|
||||
etag: '',
|
||||
status: 200
|
||||
})
|
||||
|
||||
@@ -4,7 +4,7 @@ import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
|
||||
import * as writeStreamModule from '../writeStream'
|
||||
import { saveLog } from '../saveLog'
|
||||
import { mockJob } from './mockResponses'
|
||||
import { WriteStream } from 'fs'
|
||||
import { WriteStream } from '../../../types'
|
||||
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
const stream = {} as unknown as WriteStream
|
||||
|
||||
25
src/api/viya/spec/writeStream.spec.ts
Normal file
25
src/api/viya/spec/writeStream.spec.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { WriteStream } from '../../../types'
|
||||
import { writeStream } from '../writeStream'
|
||||
import 'jest-extended'
|
||||
|
||||
describe('writeStream', () => {
|
||||
const stream: WriteStream = {
|
||||
write: jest.fn(),
|
||||
path: 'test'
|
||||
}
|
||||
|
||||
it('should resolve when the stream is written successfully', async () => {
|
||||
expect(writeStream(stream, 'test')).toResolve()
|
||||
|
||||
expect(stream.write).toHaveBeenCalledWith('test\n', expect.anything())
|
||||
})
|
||||
|
||||
it('should reject when the write errors out', async () => {
|
||||
jest
|
||||
.spyOn(stream, 'write')
|
||||
.mockImplementation((_, callback) => callback(new Error('Test Error')))
|
||||
const error = await writeStream(stream, 'test').catch((e) => e)
|
||||
|
||||
expect(error.message).toEqual('Test Error')
|
||||
})
|
||||
})
|
||||
@@ -1,11 +1,11 @@
|
||||
import { WriteStream } from 'fs'
|
||||
import { WriteStream } from '../../types'
|
||||
|
||||
export const writeStream = async (
|
||||
stream: WriteStream,
|
||||
content: string
|
||||
): Promise<void> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.write(content + '\n\nnext chunk\n\n', (e) => {
|
||||
stream.write(content + '\n', (e) => {
|
||||
if (e) {
|
||||
return reject(e)
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import {
|
||||
AuthConfig,
|
||||
isAccessTokenExpiring,
|
||||
isRefreshTokenExpiring,
|
||||
hasTokenExpired
|
||||
} from '@sasjs/utils'
|
||||
} from '@sasjs/utils/auth'
|
||||
import { AuthConfig } from '@sasjs/utils/types'
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { refreshTokens } from './refreshTokens'
|
||||
|
||||
|
||||
@@ -8,7 +8,11 @@ import { generateFileUploadForm } from '../file/generateFileUploadForm'
|
||||
import { generateTableUploadForm } from '../file/generateTableUploadForm'
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { SASViyaApiClient } from '../SASViyaApiClient'
|
||||
import { isRelativePath, isValidJson } from '../utils'
|
||||
import {
|
||||
isRelativePath,
|
||||
getValidJson,
|
||||
parseSasViyaDebugResponse
|
||||
} from '../utils'
|
||||
import { BaseJobExecutor } from './JobExecutor'
|
||||
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
|
||||
|
||||
@@ -95,26 +99,17 @@ export class WebJobExecutor extends BaseJobExecutor {
|
||||
this.requestClient!.post(apiUrl, formData, undefined)
|
||||
.then(async (res) => {
|
||||
if (this.serverType === ServerType.SasViya && config.debug) {
|
||||
const jsonResponse = await this.parseSasViyaDebugResponse(
|
||||
res.result as string
|
||||
const jsonResponse = await parseSasViyaDebugResponse(
|
||||
res.result as string,
|
||||
this.requestClient,
|
||||
this.serverUrl
|
||||
)
|
||||
this.appendRequest(res, sasJob, config.debug)
|
||||
resolve(jsonResponse)
|
||||
}
|
||||
if (this.serverType === ServerType.Sas9 && config.debug) {
|
||||
const jsonResponse = parseWeboutResponse(res.result as string)
|
||||
if (jsonResponse === '') {
|
||||
throw new Error(
|
||||
'Valid JSON could not be extracted from response.'
|
||||
)
|
||||
}
|
||||
|
||||
isValidJson(jsonResponse)
|
||||
this.appendRequest(res, sasJob, config.debug)
|
||||
resolve(res.result)
|
||||
}
|
||||
isValidJson(res.result as string)
|
||||
|
||||
this.appendRequest(res, sasJob, config.debug)
|
||||
getValidJson(res.result as string)
|
||||
resolve(res.result)
|
||||
})
|
||||
.catch(async (e: Error) => {
|
||||
@@ -151,20 +146,6 @@ export class WebJobExecutor extends BaseJobExecutor {
|
||||
return requestPromise
|
||||
}
|
||||
|
||||
private parseSasViyaDebugResponse = async (response: string) => {
|
||||
const iframeStart = response.split(
|
||||
'<iframe style="width: 99%; height: 500px" src="'
|
||||
)[1]
|
||||
const jsonUrl = iframeStart ? iframeStart.split('"></iframe>')[0] : null
|
||||
if (!jsonUrl) {
|
||||
throw new Error('Unable to find webout file URL.')
|
||||
}
|
||||
|
||||
return this.requestClient
|
||||
.get(this.serverUrl + jsonUrl, undefined)
|
||||
.then((res) => res.result)
|
||||
}
|
||||
|
||||
private async getJobUri(sasJob: string) {
|
||||
if (!this.sasViyaApiClient) return ''
|
||||
let uri = ''
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import { SAS9AuthError } from '../types/errors/SAS9AuthError'
|
||||
import { isValidJson } from '../utils'
|
||||
import { getValidJson } from '../utils'
|
||||
|
||||
export interface HttpClient {
|
||||
get<T>(
|
||||
@@ -434,7 +434,7 @@ export class RequestClient implements HttpClient {
|
||||
throw new Error('Valid JSON could not be extracted from response.')
|
||||
}
|
||||
|
||||
const jsonResponse = isValidJson(weboutResponse)
|
||||
const jsonResponse = getValidJson(weboutResponse)
|
||||
parsedResponse = jsonResponse
|
||||
} catch {
|
||||
parsedResponse = response.data
|
||||
|
||||
41
src/test/utils/getValidJson.spec.ts
Normal file
41
src/test/utils/getValidJson.spec.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { getValidJson } from '../../utils'
|
||||
|
||||
describe('jsonValidator', () => {
|
||||
it('should not throw an error with a valid json', () => {
|
||||
const json = {
|
||||
test: 'test'
|
||||
}
|
||||
|
||||
expect(getValidJson(json)).toBe(json)
|
||||
})
|
||||
|
||||
it('should not throw an error with a valid json string', () => {
|
||||
const json = {
|
||||
test: 'test'
|
||||
}
|
||||
|
||||
expect(getValidJson(JSON.stringify(json))).toStrictEqual(json)
|
||||
})
|
||||
|
||||
it('should throw an error with an invalid json', () => {
|
||||
const json = `{\"test\":\"test\"\"test2\":\"test\"}`
|
||||
let errorThrown = false
|
||||
try {
|
||||
getValidJson(json)
|
||||
} catch (error) {
|
||||
errorThrown = true
|
||||
}
|
||||
expect(errorThrown).toBe(true)
|
||||
})
|
||||
|
||||
it('should throw an error when an array is passed', () => {
|
||||
const array = ['hello', 'world']
|
||||
let errorThrown = false
|
||||
try {
|
||||
getValidJson(array)
|
||||
} catch (error) {
|
||||
errorThrown = true
|
||||
}
|
||||
expect(errorThrown).toBe(true)
|
||||
})
|
||||
})
|
||||
@@ -1,31 +0,0 @@
|
||||
import { isValidJson } from '../../utils'
|
||||
|
||||
describe('jsonValidator', () => {
|
||||
it('should not throw an error with an valid json', () => {
|
||||
const json = {
|
||||
test: 'test'
|
||||
}
|
||||
|
||||
expect(isValidJson(json)).toBe(json)
|
||||
})
|
||||
|
||||
it('should not throw an error with an valid json string', () => {
|
||||
const json = {
|
||||
test: 'test'
|
||||
}
|
||||
|
||||
expect(isValidJson(JSON.stringify(json))).toStrictEqual(json)
|
||||
})
|
||||
|
||||
it('should throw an error with an invalid json', () => {
|
||||
const json = `{\"test\":\"test\"\"test2\":\"test\"}`
|
||||
|
||||
expect(() => {
|
||||
try {
|
||||
isValidJson(json)
|
||||
} catch (err) {
|
||||
throw new Error()
|
||||
}
|
||||
}).toThrowError
|
||||
})
|
||||
})
|
||||
4
src/types/WriteStream.ts
Normal file
4
src/types/WriteStream.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export interface WriteStream {
|
||||
write: (content: string, callback: (err?: Error) => any) => void
|
||||
path: string
|
||||
}
|
||||
@@ -11,3 +11,4 @@ export * from './SASjsRequest'
|
||||
export * from './Session'
|
||||
export * from './UploadFile'
|
||||
export * from './PollOptions'
|
||||
export * from './WriteStream'
|
||||
|
||||
16
src/utils/getValidJson.ts
Normal file
16
src/utils/getValidJson.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* if string passed then parse the string to json else if throw error for all other types unless it is not a valid json object.
|
||||
* @param str - string to check.
|
||||
*/
|
||||
export const getValidJson = (str: string | object) => {
|
||||
try {
|
||||
if (Array.isArray(str)) {
|
||||
throw new Error('Can not parse array object to json.')
|
||||
}
|
||||
if (typeof str === 'object') return str
|
||||
|
||||
return JSON.parse(str)
|
||||
} catch (e) {
|
||||
throw new Error('Invalid JSON response.')
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
export * from './asyncForEach'
|
||||
export * from './compareTimestamps'
|
||||
export * from './convertToCsv'
|
||||
export * from './isNode'
|
||||
export * from './isRelativePath'
|
||||
export * from './isUri'
|
||||
export * from './isUrl'
|
||||
@@ -12,4 +13,5 @@ export * from './serialize'
|
||||
export * from './splitChunks'
|
||||
export * from './parseWeboutResponse'
|
||||
export * from './fetchLogByChunks'
|
||||
export * from './isValidJson'
|
||||
export * from './getValidJson'
|
||||
export * from './parseViyaDebugResponse'
|
||||
|
||||
4
src/utils/isNode.ts
Normal file
4
src/utils/isNode.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export const isNode = () =>
|
||||
typeof process !== 'undefined' &&
|
||||
process.versions != null &&
|
||||
process.versions.node != null
|
||||
@@ -1,13 +0,0 @@
|
||||
/**
|
||||
* Checks if string is in valid JSON format else throw error.
|
||||
* @param str - string to check.
|
||||
*/
|
||||
export const isValidJson = (str: string | object) => {
|
||||
try {
|
||||
if (typeof str === 'object') return str
|
||||
|
||||
return JSON.parse(str)
|
||||
} catch (e) {
|
||||
throw new Error('Invalid JSON response.')
|
||||
}
|
||||
}
|
||||
29
src/utils/parseViyaDebugResponse.ts
Normal file
29
src/utils/parseViyaDebugResponse.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
|
||||
/**
|
||||
* When querying a Viya job using the Web approach (as opposed to using the APIs) with _DEBUG enabled,
|
||||
* the first response contains the log with the content in an iframe. Therefore when debug is enabled,
|
||||
* and the serverType is VIYA, and useComputeApi is null (WEB), we call this function to extract the
|
||||
* (_webout) content from the iframe.
|
||||
* @param response - first response from viya job
|
||||
* @param requestClient
|
||||
* @param serverUrl
|
||||
* @returns
|
||||
*/
|
||||
export const parseSasViyaDebugResponse = async (
|
||||
response: string,
|
||||
requestClient: RequestClient,
|
||||
serverUrl: string
|
||||
) => {
|
||||
const iframeStart = response.split(
|
||||
'<iframe style="width: 99%; height: 500px" src="'
|
||||
)[1]
|
||||
const jsonUrl = iframeStart ? iframeStart.split('"></iframe>')[0] : null
|
||||
if (!jsonUrl) {
|
||||
throw new Error('Unable to find webout file URL.')
|
||||
}
|
||||
|
||||
return requestClient
|
||||
.get(serverUrl + jsonUrl, undefined)
|
||||
.then((res) => res.result)
|
||||
}
|
||||
Reference in New Issue
Block a user