1
0
mirror of https://github.com/sasjs/adapter.git synced 2026-01-06 20:10:05 +00:00

Compare commits

..

35 Commits

Author SHA1 Message Date
0bd156141c chore(git): Merge branch 'master' into fixing-sas9-tests 2021-07-26 14:53:52 +02:00
Allan Bowe
0ea66f6d37 Merge pull request #494 from sasjs/fix-browser-issue
fix(browser): only import file I/O functions when running in Node.js environments
2021-07-25 10:00:51 +03:00
a615c5fdb6 style: lint 2021-07-24 18:07:17 +02:00
ca7ee83f7f chore: fixing multiple login attempts by adding pause between calling functions 2021-07-24 18:06:15 +02:00
Krishna Acondy
eac9da22bf chore(test): fix assertion 2021-07-24 10:27:31 +01:00
Krishna Acondy
626fc2e15f fix(path): make log file path platform-agnostic 2021-07-24 09:53:39 +01:00
Krishna Acondy
87e2edbd6c chore(test): fix long poll count 2021-07-24 00:12:11 +01:00
Krishna Acondy
7cf681bea3 chore(tests): fix tests 2021-07-23 22:24:48 +01:00
Krishna Acondy
281a145bef fix(node): only create and write file stream if running in node 2021-07-23 22:24:41 +01:00
Krishna Acondy
15d5f9ec91 chore(paths): fix import paths 2021-07-23 22:24:21 +01:00
Krishna Acondy
0a6c5a0ec4 fix(fs): replace fs imports with locally defined WriteStream interface 2021-07-23 22:24:04 +01:00
Krishna Acondy
2a9526d056 fix(node): add util to check if running in node 2021-07-23 22:23:05 +01:00
Allan Bowe
c2ff28c323 Update PULL_REQUEST_TEMPLATE.md 2021-07-23 13:04:38 +03:00
Allan Bowe
50710ee1df Merge pull request #476 from sasjs/issue-170
fix: file upload with debug enabled
2021-07-23 11:41:06 +03:00
Krishna Acondy
062ba91c17 Merge pull request #486 from sasjs/fix-poll-logic
fix(poll): add default poll options
2021-07-22 14:53:03 +01:00
97a530cc66 style: lint 2021-07-22 14:44:13 +02:00
317c8c81a0 chore: JES test disable on SAS9 2021-07-22 13:48:11 +02:00
c87776ca1b chore(git): Merge branch 'master' into fixing-sas9-tests 2021-07-22 13:44:23 +02:00
04032831c3 fix: debug on test & make error and parse log test 2021-07-22 13:43:50 +02:00
Krishna Acondy
9600fa2512 fix(poll): add default poll options 2021-07-22 11:31:10 +01:00
Krishna Acondy
7951817480 Merge pull request #485 from sasjs/log-file-paths
fix(stream-log): use filepath if provided
2021-07-22 09:57:31 +01:00
Krishna Acondy
405eea1d6c chore(infra): set minimum node version to 15 2021-07-22 09:41:30 +01:00
Krishna Acondy
e3f189eed4 chore(test): fix test 2021-07-22 09:31:32 +01:00
Krishna Acondy
0bb42c5e3c fix(streamlog): use filepath if provided 2021-07-22 09:25:55 +01:00
Allan Bowe
c02eac196e Merge pull request #483 from sasjs/all-contributors/add-medjedovicm
docs: add medjedovicm as a contributor for code
2021-07-21 18:55:43 +03:00
Allan Bowe
3fb0d863e9 Update README.md 2021-07-21 18:55:01 +03:00
allcontributors[bot]
6d573d3897 docs: create .all-contributorsrc [skip ci] 2021-07-21 15:53:39 +00:00
allcontributors[bot]
33280d7a5b docs: update README.md [skip ci] 2021-07-21 15:53:38 +00:00
85e5ade93a fix: handle the case when array is passed in getValidJson method 2021-07-19 13:01:18 +05:00
4a61fb8f7f chore: update variable name from config to ovverrideSasjsConfig 2021-07-19 13:00:06 +05:00
5347aeba09 fix: replace isValidJson with getValidJson 2021-07-18 23:24:22 +05:00
Sabir Hassan
7ac7c5e52b Merge branch 'master' into issue-170 2021-07-18 21:56:33 +05:00
5098342dfe fix: retrieve content from the iframe in first response when viya Web approach used with debug enabled 2021-07-18 21:39:57 +05:00
c69be8ffc3 fix: move parseSasViyaDebugResponse method to utils folder 2021-07-18 21:37:08 +05:00
69999d8e8b fix: update fileUpload method to override existing config 2021-07-18 21:34:16 +05:00
29 changed files with 388 additions and 166 deletions

24
.all-contributorsrc Normal file
View File

@@ -0,0 +1,24 @@
{
"files": [
"README.md"
],
"imageSize": 100,
"commit": false,
"contributors": [
{
"login": "medjedovicm",
"name": "Mihajlo Medjedovic",
"avatar_url": "https://avatars.githubusercontent.com/u/18329105?v=4",
"profile": "https://github.com/medjedovicm",
"contributions": [
"code"
]
}
],
"contributorsPerLine": 7,
"projectName": "adapter",
"projectOwner": "sasjs",
"repoType": "github",
"repoHost": "https://github.com",
"skipCi": true
}

View File

@@ -13,7 +13,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [12.x] node-version: [15.x]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@@ -12,9 +12,9 @@ What code changes have been made to achieve the intent.
## Checks ## Checks
No PR (that involves a non-trivial code change) should be merged, unless all four of the items below are confirmed! If an urgent fix is needed - use a tar file. No PR (that involves a non-trivial code change) should be merged, unless all items below are confirmed! If an urgent fix is needed - use a tar file.
- [ ] Code is formatted correctly (`npm run lint:fix`).
- [ ] All unit tests are passing (`npm test`).
- [ ] All `sasjs-cli` unit tests are passing (`npm test`). - [ ] All `sasjs-cli` unit tests are passing (`npm test`).
- [ ] All `sasjs-tests` are passing (instructions available [here](https://github.com/sasjs/adapter/blob/master/sasjs-tests/README.md)). - [ ] All `sasjs-tests` are passing (instructions available [here](https://github.com/sasjs/adapter/blob/master/sasjs-tests/README.md)).
- [ ] [Data Controller](https://datacontroller.io) builds and is functional on both SAS 9 and Viya

View File

@@ -234,3 +234,23 @@ If you are a SAS 9 or SAS Viya customer you can also request a copy of [Data Con
If you find this library useful, help us grow our star graph! If you find this library useful, help us grow our star graph!
![](https://starchart.cc/sasjs/adapter.svg) ![](https://starchart.cc/sasjs/adapter.svg)
## Contributors ✨
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
<!-- prettier-ignore-start -->
<!-- markdownlint-disable -->
<table>
<tr>
<td align="center"><a href="https://github.com/medjedovicm"><img src="https://avatars.githubusercontent.com/u/18329105?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Mihajlo Medjedovic</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=medjedovicm" title="Code">💻</a></td>
</tr>
</table>
<!-- markdownlint-restore -->
<!-- prettier-ignore-end -->
<!-- ALL-CONTRIBUTORS-LIST:END -->
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!

58
package-lock.json generated
View File

@@ -7,7 +7,7 @@
"name": "@sasjs/adapter", "name": "@sasjs/adapter",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"@sasjs/utils": "^2.25.4", "@sasjs/utils": "^2.27.1",
"axios": "^0.21.1", "axios": "^0.21.1",
"axios-cookiejar-support": "^1.0.1", "axios-cookiejar-support": "^1.0.1",
"form-data": "^4.0.0", "form-data": "^4.0.0",
@@ -41,6 +41,9 @@
"typescript": "^4.3.5", "typescript": "^4.3.5",
"webpack": "^5.44.0", "webpack": "^5.44.0",
"webpack-cli": "^4.7.2" "webpack-cli": "^4.7.2"
},
"engines": {
"node": ">=15"
} }
}, },
"node_modules/@babel/code-frame": { "node_modules/@babel/code-frame": {
@@ -1187,9 +1190,9 @@
} }
}, },
"node_modules/@sasjs/utils": { "node_modules/@sasjs/utils": {
"version": "2.25.4", "version": "2.27.1",
"resolved": "https://registry.npmjs.org/@sasjs/utils/-/utils-2.25.4.tgz", "resolved": "https://registry.npmjs.org/@sasjs/utils/-/utils-2.27.1.tgz",
"integrity": "sha512-LTWExtHp4g3VcLLCUMyeeyTXEAZawSQngmJ3/2Z93ysxpeu2/NS7lGG/ERGCQb2snbqmXK8dkZmfg44Tn4Qebw==", "integrity": "sha512-CYTQwEj89cc7H3tGiQQcyDkZYaWRc1HZJpOF8o2RHYS37fIAOy0SyyJdq6mcQ74Nb1u5AmFXPFIvnRCMEcTYeQ==",
"dependencies": { "dependencies": {
"@types/fs-extra": "^9.0.11", "@types/fs-extra": "^9.0.11",
"@types/prompts": "^2.0.13", "@types/prompts": "^2.0.13",
@@ -1199,7 +1202,11 @@
"fs-extra": "^10.0.0", "fs-extra": "^10.0.0",
"jwt-decode": "^3.1.2", "jwt-decode": "^3.1.2",
"prompts": "^2.4.1", "prompts": "^2.4.1",
"rimraf": "^3.0.2",
"valid-url": "^1.0.9" "valid-url": "^1.0.9"
},
"engines": {
"node": ">=15"
} }
}, },
"node_modules/@semantic-release/commit-analyzer": { "node_modules/@semantic-release/commit-analyzer": {
@@ -2157,7 +2164,6 @@
}, },
"node_modules/balanced-match": { "node_modules/balanced-match": {
"version": "1.0.2", "version": "1.0.2",
"dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/base": { "node_modules/base": {
@@ -2259,7 +2265,6 @@
}, },
"node_modules/brace-expansion": { "node_modules/brace-expansion": {
"version": "1.1.11", "version": "1.1.11",
"dev": true,
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"balanced-match": "^1.0.0", "balanced-match": "^1.0.0",
@@ -2741,7 +2746,6 @@
}, },
"node_modules/concat-map": { "node_modules/concat-map": {
"version": "0.0.1", "version": "0.0.1",
"dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/consola": { "node_modules/consola": {
@@ -4113,7 +4117,6 @@
}, },
"node_modules/fs.realpath": { "node_modules/fs.realpath": {
"version": "1.0.0", "version": "1.0.0",
"dev": true,
"license": "ISC" "license": "ISC"
}, },
"node_modules/fsevents": { "node_modules/fsevents": {
@@ -4243,7 +4246,6 @@
}, },
"node_modules/glob": { "node_modules/glob": {
"version": "7.1.7", "version": "7.1.7",
"dev": true,
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"fs.realpath": "^1.0.0", "fs.realpath": "^1.0.0",
@@ -4657,7 +4659,6 @@
}, },
"node_modules/inflight": { "node_modules/inflight": {
"version": "1.0.6", "version": "1.0.6",
"dev": true,
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"once": "^1.3.0", "once": "^1.3.0",
@@ -4666,7 +4667,6 @@
}, },
"node_modules/inherits": { "node_modules/inherits": {
"version": "2.0.4", "version": "2.0.4",
"dev": true,
"license": "ISC" "license": "ISC"
}, },
"node_modules/ini": { "node_modules/ini": {
@@ -7610,7 +7610,6 @@
}, },
"node_modules/minimatch": { "node_modules/minimatch": {
"version": "3.0.4", "version": "3.0.4",
"dev": true,
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"brace-expansion": "^1.1.7" "brace-expansion": "^1.1.7"
@@ -11007,7 +11006,6 @@
}, },
"node_modules/once": { "node_modules/once": {
"version": "1.4.0", "version": "1.4.0",
"dev": true,
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"wrappy": "1" "wrappy": "1"
@@ -11242,7 +11240,6 @@
}, },
"node_modules/path-is-absolute": { "node_modules/path-is-absolute": {
"version": "1.0.1", "version": "1.0.1",
"dev": true,
"license": "MIT", "license": "MIT",
"engines": { "engines": {
"node": ">=0.10.0" "node": ">=0.10.0"
@@ -11776,7 +11773,6 @@
}, },
"node_modules/rimraf": { "node_modules/rimraf": {
"version": "3.0.2", "version": "3.0.2",
"dev": true,
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"glob": "^7.1.3" "glob": "^7.1.3"
@@ -13853,7 +13849,6 @@
}, },
"node_modules/wrappy": { "node_modules/wrappy": {
"version": "1.0.2", "version": "1.0.2",
"dev": true,
"license": "ISC" "license": "ISC"
}, },
"node_modules/write-file-atomic": { "node_modules/write-file-atomic": {
@@ -14791,9 +14786,9 @@
} }
}, },
"@sasjs/utils": { "@sasjs/utils": {
"version": "2.25.4", "version": "2.27.1",
"resolved": "https://registry.npmjs.org/@sasjs/utils/-/utils-2.25.4.tgz", "resolved": "https://registry.npmjs.org/@sasjs/utils/-/utils-2.27.1.tgz",
"integrity": "sha512-LTWExtHp4g3VcLLCUMyeeyTXEAZawSQngmJ3/2Z93ysxpeu2/NS7lGG/ERGCQb2snbqmXK8dkZmfg44Tn4Qebw==", "integrity": "sha512-CYTQwEj89cc7H3tGiQQcyDkZYaWRc1HZJpOF8o2RHYS37fIAOy0SyyJdq6mcQ74Nb1u5AmFXPFIvnRCMEcTYeQ==",
"requires": { "requires": {
"@types/fs-extra": "^9.0.11", "@types/fs-extra": "^9.0.11",
"@types/prompts": "^2.0.13", "@types/prompts": "^2.0.13",
@@ -14803,6 +14798,7 @@
"fs-extra": "^10.0.0", "fs-extra": "^10.0.0",
"jwt-decode": "^3.1.2", "jwt-decode": "^3.1.2",
"prompts": "^2.4.1", "prompts": "^2.4.1",
"rimraf": "^3.0.2",
"valid-url": "^1.0.9" "valid-url": "^1.0.9"
} }
}, },
@@ -15504,8 +15500,7 @@
} }
}, },
"balanced-match": { "balanced-match": {
"version": "1.0.2", "version": "1.0.2"
"dev": true
}, },
"base": { "base": {
"version": "0.11.2", "version": "0.11.2",
@@ -15570,7 +15565,6 @@
}, },
"brace-expansion": { "brace-expansion": {
"version": "1.1.11", "version": "1.1.11",
"dev": true,
"requires": { "requires": {
"balanced-match": "^1.0.0", "balanced-match": "^1.0.0",
"concat-map": "0.0.1" "concat-map": "0.0.1"
@@ -15892,8 +15886,7 @@
"dev": true "dev": true
}, },
"concat-map": { "concat-map": {
"version": "0.0.1", "version": "0.0.1"
"dev": true
}, },
"consola": { "consola": {
"version": "2.15.3" "version": "2.15.3"
@@ -16846,8 +16839,7 @@
} }
}, },
"fs.realpath": { "fs.realpath": {
"version": "1.0.0", "version": "1.0.0"
"dev": true
}, },
"fsevents": { "fsevents": {
"version": "2.3.2", "version": "2.3.2",
@@ -16938,7 +16930,6 @@
}, },
"glob": { "glob": {
"version": "7.1.7", "version": "7.1.7",
"dev": true,
"requires": { "requires": {
"fs.realpath": "^1.0.0", "fs.realpath": "^1.0.0",
"inflight": "^1.0.4", "inflight": "^1.0.4",
@@ -17186,15 +17177,13 @@
}, },
"inflight": { "inflight": {
"version": "1.0.6", "version": "1.0.6",
"dev": true,
"requires": { "requires": {
"once": "^1.3.0", "once": "^1.3.0",
"wrappy": "1" "wrappy": "1"
} }
}, },
"inherits": { "inherits": {
"version": "2.0.4", "version": "2.0.4"
"dev": true
}, },
"ini": { "ini": {
"version": "1.3.8", "version": "1.3.8",
@@ -19219,7 +19208,6 @@
}, },
"minimatch": { "minimatch": {
"version": "3.0.4", "version": "3.0.4",
"dev": true,
"requires": { "requires": {
"brace-expansion": "^1.1.7" "brace-expansion": "^1.1.7"
} }
@@ -21534,7 +21522,6 @@
}, },
"once": { "once": {
"version": "1.4.0", "version": "1.4.0",
"dev": true,
"requires": { "requires": {
"wrappy": "1" "wrappy": "1"
} }
@@ -21688,8 +21675,7 @@
"dev": true "dev": true
}, },
"path-is-absolute": { "path-is-absolute": {
"version": "1.0.1", "version": "1.0.1"
"dev": true
}, },
"path-key": { "path-key": {
"version": "3.1.1", "version": "3.1.1",
@@ -22020,7 +22006,6 @@
}, },
"rimraf": { "rimraf": {
"version": "3.0.2", "version": "3.0.2",
"dev": true,
"requires": { "requires": {
"glob": "^7.1.3" "glob": "^7.1.3"
} }
@@ -23403,8 +23388,7 @@
} }
}, },
"wrappy": { "wrappy": {
"version": "1.0.2", "version": "1.0.2"
"dev": true
}, },
"write-file-atomic": { "write-file-atomic": {
"version": "3.0.3", "version": "3.0.3",

View File

@@ -67,11 +67,14 @@
}, },
"main": "index.js", "main": "index.js",
"dependencies": { "dependencies": {
"@sasjs/utils": "^2.25.4", "@sasjs/utils": "^2.27.1",
"axios": "^0.21.1", "axios": "^0.21.1",
"axios-cookiejar-support": "^1.0.1", "axios-cookiejar-support": "^1.0.1",
"form-data": "^4.0.0", "form-data": "^4.0.0",
"https": "^1.0.0", "https": "^1.0.0",
"tough-cookie": "^4.0.0" "tough-cookie": "^4.0.0"
},
"engines": {
"node": ">=15"
} }
} }

View File

@@ -47,7 +47,9 @@ export const basicTests = (
'Should fail on first attempt and should log the user in on second attempt', 'Should fail on first attempt and should log the user in on second attempt',
test: async () => { test: async () => {
await adapter.logOut() await adapter.logOut()
await sleep(1000)
await adapter.logIn('invalid', 'invalid') await adapter.logIn('invalid', 'invalid')
await sleep(1000)
return adapter.logIn(userName, password) return adapter.logIn(userName, password)
}, },
assertion: (response: any) => assertion: (response: any) =>
@@ -151,6 +153,9 @@ export const basicTests = (
description: description:
'Should complete successful request with extra attributes present in response', 'Should complete successful request with extra attributes present in response',
test: async () => { test: async () => {
if (adapter.getSasjsConfig().serverType !== 'SASVIYA')
return Promise.resolve('skip')
const config = { const config = {
useComputeApi: false useComputeApi: false
} }
@@ -165,9 +170,15 @@ export const basicTests = (
) )
}, },
assertion: (response: any) => { assertion: (response: any) => {
if (response === 'skip') return true
const responseKeys: any = Object.keys(response) const responseKeys: any = Object.keys(response)
return responseKeys.includes('file') && responseKeys.includes('data') return responseKeys.includes('file') && responseKeys.includes('data')
} }
} }
] ]
}) })
const sleep = (ms: number) => {
return new Promise((resolve) => setTimeout(resolve, ms))
}

View File

@@ -1,4 +1,4 @@
import { isUrl } from './utils' import { isUrl, getValidJson, parseSasViyaDebugResponse } from './utils'
import { UploadFile } from './types/UploadFile' import { UploadFile } from './types/UploadFile'
import { ErrorResponse, LoginRequiredError } from './types/errors' import { ErrorResponse, LoginRequiredError } from './types/errors'
import { RequestClient } from './request/RequestClient' import { RequestClient } from './request/RequestClient'
@@ -63,13 +63,28 @@ export class FileUploader {
return this.requestClient return this.requestClient
.post(uploadUrl, formData, undefined, 'application/json', headers) .post(uploadUrl, formData, undefined, 'application/json', headers)
.then((res) => { .then(async (res) => {
let result // for web approach on Viya
if (
this.sasjsConfig.debug &&
(this.sasjsConfig.useComputeApi === null ||
this.sasjsConfig.useComputeApi === undefined) &&
this.sasjsConfig.serverType === ServerType.SasViya
) {
const jsonResponse = await parseSasViyaDebugResponse(
res.result as string,
this.requestClient,
this.sasjsConfig.serverUrl
)
return typeof jsonResponse === 'string'
? getValidJson(jsonResponse)
: jsonResponse
}
result = return typeof res.result === 'string'
typeof res.result === 'string' ? JSON.parse(res.result) : res.result ? getValidJson(res.result)
: res.result
return result
//TODO: append to SASjs requests //TODO: append to SASjs requests
}) })
.catch((err: Error) => { .catch((err: Error) => {

View File

@@ -544,11 +544,22 @@ export default class SASjs {
* Process). Is prepended at runtime with the value of `appLoc`. * Process). Is prepended at runtime with the value of `appLoc`.
* @param files - array of files to be uploaded, including File object and file name. * @param files - array of files to be uploaded, including File object and file name.
* @param params - request URL parameters. * @param params - request URL parameters.
* @param overrideSasjsConfig - object to override existing config (optional)
*/ */
public uploadFile(sasJob: string, files: UploadFile[], params: any) { public uploadFile(
const fileUploader = sasJob: string,
this.fileUploader || files: UploadFile[],
new FileUploader(this.sasjsConfig, this.jobsPath, this.requestClient!) params: any,
overrideSasjsConfig?: any
) {
const fileUploader = overrideSasjsConfig
? new FileUploader(
{ ...this.sasjsConfig, ...overrideSasjsConfig },
this.jobsPath,
this.requestClient!
)
: this.fileUploader ||
new FileUploader(this.sasjsConfig, this.jobsPath, this.requestClient!)
return fileUploader.uploadFile(sasJob, files, params) return fileUploader.uploadFile(sasJob, files, params)
} }

View File

@@ -0,0 +1,17 @@
import { isFolder } from '@sasjs/utils/file'
import { generateTimestamp } from '@sasjs/utils/time'
import { Job } from '../../types'
export const getFileStream = async (job: Job, filePath?: string) => {
const { createWriteStream } = require('@sasjs/utils/file')
const logPath = filePath || process.cwd()
const isFolderPath = await isFolder(logPath)
if (isFolderPath) {
const logFileName = `${job.name || 'job'}-${generateTimestamp()}.log`
const path = require('path')
const logFilePath = path.join(filePath || process.cwd(), logFileName)
return await createWriteStream(logFilePath)
} else {
return await createWriteStream(logPath)
}
}

View File

@@ -3,11 +3,8 @@ import { Job, PollOptions } from '../..'
import { getTokens } from '../../auth/getTokens' import { getTokens } from '../../auth/getTokens'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { JobStatePollError } from '../../types/errors' import { JobStatePollError } from '../../types/errors'
import { generateTimestamp } from '@sasjs/utils/time' import { Link, WriteStream } from '../../types'
import { saveLog } from './saveLog' import { isNode } from '../../utils'
import { createWriteStream } from '@sasjs/utils/file'
import { WriteStream } from 'fs'
import { Link } from '../../types'
export async function pollJobState( export async function pollJobState(
requestClient: RequestClient, requestClient: RequestClient,
@@ -21,11 +18,14 @@ export async function pollJobState(
let pollInterval = 300 let pollInterval = 300
let maxPollCount = 1000 let maxPollCount = 1000
if (pollOptions) { const defaultPollOptions: PollOptions = {
pollInterval = pollOptions.pollInterval || pollInterval maxPollCount,
maxPollCount = pollOptions.maxPollCount || maxPollCount pollInterval,
streamLog: false
} }
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
const stateLink = postedJob.links.find((l: any) => l.rel === 'state') const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
if (!stateLink) { if (!stateLink) {
throw new Error(`Job state link was not found.`) throw new Error(`Job state link was not found.`)
@@ -52,15 +52,12 @@ export async function pollJobState(
} }
let logFileStream let logFileStream
if (pollOptions?.streamLog) { if (pollOptions.streamLog && isNode()) {
const logFileName = `${postedJob.name || 'job'}-${generateTimestamp()}.log` const { getFileStream } = require('./getFileStream')
const logFilePath = `${ logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
pollOptions?.logFolderPath || process.cwd()
}/${logFileName}`
logFileStream = await createWriteStream(logFilePath)
} }
// Poll up to the first 100 times with the specified poll interval
let result = await doPoll( let result = await doPoll(
requestClient, requestClient,
postedJob, postedJob,
@@ -68,14 +65,18 @@ export async function pollJobState(
debug, debug,
pollCount, pollCount,
authConfig, authConfig,
pollOptions, {
...pollOptions,
maxPollCount:
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
},
logFileStream logFileStream
) )
currentState = result.state currentState = result.state
pollCount = result.pollCount pollCount = result.pollCount
if (!needsRetry(currentState) || pollCount >= maxPollCount) { if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) {
return currentState return currentState
} }
@@ -184,7 +185,7 @@ const doPoll = async (
throw new Error(`Job state link was not found.`) throw new Error(`Job state link was not found.`)
} }
while (needsRetry(state) && pollCount <= 100 && pollCount <= maxPollCount) { while (needsRetry(state) && pollCount <= maxPollCount) {
state = await getJobState( state = await getJobState(
requestClient, requestClient,
postedJob, postedJob,
@@ -214,14 +215,17 @@ const doPoll = async (
const endLogLine = job.logStatistics?.lineCount ?? 1000000 const endLogLine = job.logStatistics?.lineCount ?? 1000000
await saveLog( const { saveLog } = isNode() ? require('./saveLog') : { saveLog: null }
postedJob, if (saveLog) {
requestClient, await saveLog(
startLogLine, postedJob,
endLogLine, requestClient,
logStream, startLogLine,
authConfig?.access_token endLogLine,
) logStream,
authConfig?.access_token
)
}
startLogLine += endLogLine startLogLine += endLogLine
} }

View File

@@ -1,7 +1,7 @@
import { Job } from '../..' import { Job } from '../..'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { fetchLog } from '../../utils' import { fetchLog } from '../../utils'
import { WriteStream } from 'fs' import { WriteStream } from '../../types'
import { writeStream } from './writeStream' import { writeStream } from './writeStream'
/** /**

View File

@@ -0,0 +1,41 @@
import { Logger, LogLevel } from '@sasjs/utils/logger'
import * as path from 'path'
import * as fileModule from '@sasjs/utils/file'
import { getFileStream } from '../getFileStream'
import { mockJob } from './mockResponses'
import { WriteStream } from '../../../types'
describe('getFileStream', () => {
beforeEach(() => {
;(process as any).logger = new Logger(LogLevel.Off)
setupMocks()
})
it('should use the given log path if it points to a file', async () => {
const { createWriteStream } = require('@sasjs/utils/file')
await getFileStream(mockJob, path.join(__dirname, 'test.log'))
expect(createWriteStream).toHaveBeenCalledWith(
path.join(__dirname, 'test.log')
)
})
it('should generate a log file path with a timestamp if it points to a folder', async () => {
const { createWriteStream } = require('@sasjs/utils/file')
await getFileStream(mockJob, __dirname)
expect(createWriteStream).not.toHaveBeenCalledWith(__dirname)
expect(createWriteStream).toHaveBeenCalledWith(
expect.stringContaining(path.join(__dirname, 'test job-20'))
)
})
})
const setupMocks = () => {
jest.restoreAllMocks()
jest.mock('@sasjs/utils/file/file')
jest
.spyOn(fileModule, 'createWriteStream')
.mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
}

View File

@@ -1,10 +1,11 @@
import { Logger, LogLevel } from '@sasjs/utils' import { Logger, LogLevel } from '@sasjs/utils'
import * as fileModule from '@sasjs/utils/file'
import { RequestClient } from '../../../request/RequestClient' import { RequestClient } from '../../../request/RequestClient'
import { mockAuthConfig, mockJob } from './mockResponses' import { mockAuthConfig, mockJob } from './mockResponses'
import { pollJobState } from '../pollJobState' import { pollJobState } from '../pollJobState'
import * as getTokensModule from '../../../auth/getTokens' import * as getTokensModule from '../../../auth/getTokens'
import * as saveLogModule from '../saveLog' import * as saveLogModule from '../saveLog'
import * as getFileStreamModule from '../getFileStream'
import * as isNodeModule from '../../../utils/isNode'
import { PollOptions } from '../../../types' import { PollOptions } from '../../../types'
import { WriteStream } from 'fs' import { WriteStream } from 'fs'
@@ -76,13 +77,43 @@ describe('pollJobState', () => {
it('should attempt to fetch and save the log after each poll when streamLog is true', async () => { it('should attempt to fetch and save the log after each poll when streamLog is true', async () => {
mockSimplePoll() mockSimplePoll()
const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollOptions,
streamLog: true streamLog: true
}) })
expect(saveLogModule.saveLog).toHaveBeenCalledTimes(2) expect(saveLog).toHaveBeenCalledTimes(2)
})
it('should create a write stream in Node.js environment when streamLog is true', async () => {
mockSimplePoll()
const { getFileStream } = require('../getFileStream')
const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions,
streamLog: true
})
expect(getFileStream).toHaveBeenCalled()
expect(saveLog).toHaveBeenCalledTimes(2)
})
it('should not create a write stream in a non-Node.js environment', async () => {
mockSimplePoll()
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
const { saveLog } = require('../saveLog')
const { getFileStream } = require('../getFileStream')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions,
streamLog: true
})
expect(getFileStream).not.toHaveBeenCalled()
expect(saveLog).not.toHaveBeenCalled()
}) })
it('should not attempt to fetch and save the log after each poll when streamLog is false', async () => { it('should not attempt to fetch and save the log after each poll when streamLog is false', async () => {
@@ -217,7 +248,8 @@ const setupMocks = () => {
jest.mock('../../../request/RequestClient') jest.mock('../../../request/RequestClient')
jest.mock('../../../auth/getTokens') jest.mock('../../../auth/getTokens')
jest.mock('../saveLog') jest.mock('../saveLog')
jest.mock('@sasjs/utils/file') jest.mock('../getFileStream')
jest.mock('../../../utils/isNode')
jest jest
.spyOn(requestClient, 'get') .spyOn(requestClient, 'get')
@@ -231,8 +263,9 @@ const setupMocks = () => {
.spyOn(saveLogModule, 'saveLog') .spyOn(saveLogModule, 'saveLog')
.mockImplementation(() => Promise.resolve()) .mockImplementation(() => Promise.resolve())
jest jest
.spyOn(fileModule, 'createWriteStream') .spyOn(getFileStreamModule, 'getFileStream')
.mockImplementation(() => Promise.resolve({} as unknown as WriteStream)) .mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
} }
const mockSimplePoll = (runningCount = 2) => { const mockSimplePoll = (runningCount = 2) => {
@@ -278,7 +311,7 @@ const mockLongPoll = () => {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: count <= 101 ? 'running' : 'completed', result: count <= 102 ? 'running' : 'completed',
etag: '', etag: '',
status: 200 status: 200
}) })

View File

@@ -4,7 +4,7 @@ import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import * as writeStreamModule from '../writeStream' import * as writeStreamModule from '../writeStream'
import { saveLog } from '../saveLog' import { saveLog } from '../saveLog'
import { mockJob } from './mockResponses' import { mockJob } from './mockResponses'
import { WriteStream } from 'fs' import { WriteStream } from '../../../types'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
const stream = {} as unknown as WriteStream const stream = {} as unknown as WriteStream

View File

@@ -0,0 +1,25 @@
import { WriteStream } from '../../../types'
import { writeStream } from '../writeStream'
import 'jest-extended'
describe('writeStream', () => {
const stream: WriteStream = {
write: jest.fn(),
path: 'test'
}
it('should resolve when the stream is written successfully', async () => {
expect(writeStream(stream, 'test')).toResolve()
expect(stream.write).toHaveBeenCalledWith('test\n', expect.anything())
})
it('should reject when the write errors out', async () => {
jest
.spyOn(stream, 'write')
.mockImplementation((_, callback) => callback(new Error('Test Error')))
const error = await writeStream(stream, 'test').catch((e) => e)
expect(error.message).toEqual('Test Error')
})
})

View File

@@ -1,11 +1,11 @@
import { WriteStream } from 'fs' import { WriteStream } from '../../types'
export const writeStream = async ( export const writeStream = async (
stream: WriteStream, stream: WriteStream,
content: string content: string
): Promise<void> => { ): Promise<void> => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
stream.write(content + '\n\nnext chunk\n\n', (e) => { stream.write(content + '\n', (e) => {
if (e) { if (e) {
return reject(e) return reject(e)
} }

View File

@@ -1,9 +1,9 @@
import { import {
AuthConfig,
isAccessTokenExpiring, isAccessTokenExpiring,
isRefreshTokenExpiring, isRefreshTokenExpiring,
hasTokenExpired hasTokenExpired
} from '@sasjs/utils' } from '@sasjs/utils/auth'
import { AuthConfig } from '@sasjs/utils/types'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { refreshTokens } from './refreshTokens' import { refreshTokens } from './refreshTokens'

View File

@@ -8,7 +8,11 @@ import { generateFileUploadForm } from '../file/generateFileUploadForm'
import { generateTableUploadForm } from '../file/generateTableUploadForm' import { generateTableUploadForm } from '../file/generateTableUploadForm'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { SASViyaApiClient } from '../SASViyaApiClient' import { SASViyaApiClient } from '../SASViyaApiClient'
import { isRelativePath, isValidJson } from '../utils' import {
isRelativePath,
getValidJson,
parseSasViyaDebugResponse
} from '../utils'
import { BaseJobExecutor } from './JobExecutor' import { BaseJobExecutor } from './JobExecutor'
import { parseWeboutResponse } from '../utils/parseWeboutResponse' import { parseWeboutResponse } from '../utils/parseWeboutResponse'
@@ -95,26 +99,17 @@ export class WebJobExecutor extends BaseJobExecutor {
this.requestClient!.post(apiUrl, formData, undefined) this.requestClient!.post(apiUrl, formData, undefined)
.then(async (res) => { .then(async (res) => {
if (this.serverType === ServerType.SasViya && config.debug) { if (this.serverType === ServerType.SasViya && config.debug) {
const jsonResponse = await this.parseSasViyaDebugResponse( const jsonResponse = await parseSasViyaDebugResponse(
res.result as string res.result as string,
this.requestClient,
this.serverUrl
) )
this.appendRequest(res, sasJob, config.debug) this.appendRequest(res, sasJob, config.debug)
resolve(jsonResponse) resolve(jsonResponse)
} }
if (this.serverType === ServerType.Sas9 && config.debug) {
const jsonResponse = parseWeboutResponse(res.result as string)
if (jsonResponse === '') {
throw new Error(
'Valid JSON could not be extracted from response.'
)
}
isValidJson(jsonResponse)
this.appendRequest(res, sasJob, config.debug)
resolve(res.result)
}
isValidJson(res.result as string)
this.appendRequest(res, sasJob, config.debug) this.appendRequest(res, sasJob, config.debug)
getValidJson(res.result as string)
resolve(res.result) resolve(res.result)
}) })
.catch(async (e: Error) => { .catch(async (e: Error) => {
@@ -151,20 +146,6 @@ export class WebJobExecutor extends BaseJobExecutor {
return requestPromise return requestPromise
} }
private parseSasViyaDebugResponse = async (response: string) => {
const iframeStart = response.split(
'<iframe style="width: 99%; height: 500px" src="'
)[1]
const jsonUrl = iframeStart ? iframeStart.split('"></iframe>')[0] : null
if (!jsonUrl) {
throw new Error('Unable to find webout file URL.')
}
return this.requestClient
.get(this.serverUrl + jsonUrl, undefined)
.then((res) => res.result)
}
private async getJobUri(sasJob: string) { private async getJobUri(sasJob: string) {
if (!this.sasViyaApiClient) return '' if (!this.sasViyaApiClient) return ''
let uri = '' let uri = ''

View File

@@ -11,7 +11,7 @@ import {
import { parseWeboutResponse } from '../utils/parseWeboutResponse' import { parseWeboutResponse } from '../utils/parseWeboutResponse'
import { prefixMessage } from '@sasjs/utils/error' import { prefixMessage } from '@sasjs/utils/error'
import { SAS9AuthError } from '../types/errors/SAS9AuthError' import { SAS9AuthError } from '../types/errors/SAS9AuthError'
import { isValidJson } from '../utils' import { getValidJson } from '../utils'
export interface HttpClient { export interface HttpClient {
get<T>( get<T>(
@@ -434,7 +434,7 @@ export class RequestClient implements HttpClient {
throw new Error('Valid JSON could not be extracted from response.') throw new Error('Valid JSON could not be extracted from response.')
} }
const jsonResponse = isValidJson(weboutResponse) const jsonResponse = getValidJson(weboutResponse)
parsedResponse = jsonResponse parsedResponse = jsonResponse
} catch { } catch {
parsedResponse = response.data parsedResponse = response.data

View File

@@ -0,0 +1,41 @@
import { getValidJson } from '../../utils'
describe('jsonValidator', () => {
it('should not throw an error with a valid json', () => {
const json = {
test: 'test'
}
expect(getValidJson(json)).toBe(json)
})
it('should not throw an error with a valid json string', () => {
const json = {
test: 'test'
}
expect(getValidJson(JSON.stringify(json))).toStrictEqual(json)
})
it('should throw an error with an invalid json', () => {
const json = `{\"test\":\"test\"\"test2\":\"test\"}`
let errorThrown = false
try {
getValidJson(json)
} catch (error) {
errorThrown = true
}
expect(errorThrown).toBe(true)
})
it('should throw an error when an array is passed', () => {
const array = ['hello', 'world']
let errorThrown = false
try {
getValidJson(array)
} catch (error) {
errorThrown = true
}
expect(errorThrown).toBe(true)
})
})

View File

@@ -1,31 +0,0 @@
import { isValidJson } from '../../utils'
describe('jsonValidator', () => {
it('should not throw an error with an valid json', () => {
const json = {
test: 'test'
}
expect(isValidJson(json)).toBe(json)
})
it('should not throw an error with an valid json string', () => {
const json = {
test: 'test'
}
expect(isValidJson(JSON.stringify(json))).toStrictEqual(json)
})
it('should throw an error with an invalid json', () => {
const json = `{\"test\":\"test\"\"test2\":\"test\"}`
expect(() => {
try {
isValidJson(json)
} catch (err) {
throw new Error()
}
}).toThrowError
})
})

4
src/types/WriteStream.ts Normal file
View File

@@ -0,0 +1,4 @@
export interface WriteStream {
write: (content: string, callback: (err?: Error) => any) => void
path: string
}

View File

@@ -11,3 +11,4 @@ export * from './SASjsRequest'
export * from './Session' export * from './Session'
export * from './UploadFile' export * from './UploadFile'
export * from './PollOptions' export * from './PollOptions'
export * from './WriteStream'

16
src/utils/getValidJson.ts Normal file
View File

@@ -0,0 +1,16 @@
/**
* if string passed then parse the string to json else if throw error for all other types unless it is not a valid json object.
* @param str - string to check.
*/
export const getValidJson = (str: string | object) => {
try {
if (Array.isArray(str)) {
throw new Error('Can not parse array object to json.')
}
if (typeof str === 'object') return str
return JSON.parse(str)
} catch (e) {
throw new Error('Invalid JSON response.')
}
}

View File

@@ -1,6 +1,7 @@
export * from './asyncForEach' export * from './asyncForEach'
export * from './compareTimestamps' export * from './compareTimestamps'
export * from './convertToCsv' export * from './convertToCsv'
export * from './isNode'
export * from './isRelativePath' export * from './isRelativePath'
export * from './isUri' export * from './isUri'
export * from './isUrl' export * from './isUrl'
@@ -12,4 +13,5 @@ export * from './serialize'
export * from './splitChunks' export * from './splitChunks'
export * from './parseWeboutResponse' export * from './parseWeboutResponse'
export * from './fetchLogByChunks' export * from './fetchLogByChunks'
export * from './isValidJson' export * from './getValidJson'
export * from './parseViyaDebugResponse'

4
src/utils/isNode.ts Normal file
View File

@@ -0,0 +1,4 @@
export const isNode = () =>
typeof process !== 'undefined' &&
process.versions != null &&
process.versions.node != null

View File

@@ -1,13 +0,0 @@
/**
* Checks if string is in valid JSON format else throw error.
* @param str - string to check.
*/
export const isValidJson = (str: string | object) => {
try {
if (typeof str === 'object') return str
return JSON.parse(str)
} catch (e) {
throw new Error('Invalid JSON response.')
}
}

View File

@@ -0,0 +1,29 @@
import { RequestClient } from '../request/RequestClient'
/**
* When querying a Viya job using the Web approach (as opposed to using the APIs) with _DEBUG enabled,
* the first response contains the log with the content in an iframe. Therefore when debug is enabled,
* and the serverType is VIYA, and useComputeApi is null (WEB), we call this function to extract the
* (_webout) content from the iframe.
* @param response - first response from viya job
* @param requestClient
* @param serverUrl
* @returns
*/
export const parseSasViyaDebugResponse = async (
response: string,
requestClient: RequestClient,
serverUrl: string
) => {
const iframeStart = response.split(
'<iframe style="width: 99%; height: 500px" src="'
)[1]
const jsonUrl = iframeStart ? iframeStart.split('"></iframe>')[0] : null
if (!jsonUrl) {
throw new Error('Unable to find webout file URL.')
}
return requestClient
.get(serverUrl + jsonUrl, undefined)
.then((res) => res.result)
}