mirror of
https://github.com/sasjs/adapter.git
synced 2025-12-15 18:54:36 +00:00
Compare commits
30 Commits
v4.4.0
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
65ae198c27 | ||
|
|
76bf5b88e9 | ||
|
|
a97ac4eaa6 | ||
|
|
37cfea6ca7 | ||
|
|
f74c8aca57 | ||
|
|
77baaabfcd | ||
|
|
510ba771f0 | ||
|
|
6fce65f4c8 | ||
|
|
fe03faa59f | ||
|
|
6272eeda23 | ||
|
|
104d1b88b3 | ||
|
|
0d9ba36de8 | ||
|
|
4e7a845d99 | ||
|
|
716cc513ff | ||
|
|
22edcb0a8e | ||
|
|
aedf5c1734 | ||
|
|
4440e5d1f9 | ||
|
|
f484a5a6a1 | ||
|
|
5c74186bab | ||
|
|
ea68c3dff3 | ||
|
|
153b285670 | ||
|
|
f9f4aa5aa6 | ||
|
|
bd02656b3c | ||
|
|
991519a13d | ||
|
|
615c9d012e | ||
|
|
d166231c12 | ||
|
|
4cb150e951 | ||
|
|
fc8598473f | ||
|
|
367e0ae25a | ||
|
|
85dde61baf |
4
.github/reviewer-lottery.yml
vendored
4
.github/reviewer-lottery.yml
vendored
@@ -5,7 +5,3 @@ groups:
|
||||
- YuryShkoda
|
||||
- medjedovicm
|
||||
- sabhas
|
||||
- name: SASjs QA
|
||||
reviewers: 1
|
||||
usernames:
|
||||
- VladislavParhomchik
|
||||
|
||||
2
.github/workflows/generateDocs.yml
vendored
2
.github/workflows/generateDocs.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [lts/fermium]
|
||||
node-version: [lts/hydrogen]
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
2
.github/workflows/npmpublish.yml
vendored
2
.github/workflows/npmpublish.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [lts/fermium]
|
||||
node-version: [lts/hydrogen]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
96
package-lock.json
generated
96
package-lock.json
generated
@@ -13,7 +13,7 @@
|
||||
"axios-cookiejar-support": "1.0.1",
|
||||
"form-data": "4.0.0",
|
||||
"https": "1.0.0",
|
||||
"tough-cookie": "4.0.0"
|
||||
"tough-cookie": "4.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cypress/webpack-preprocessor": "5.9.1",
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/jest": "27.4.0",
|
||||
"@types/mime": "2.0.3",
|
||||
"@types/pem": "1.9.6",
|
||||
"@types/tough-cookie": "4.0.1",
|
||||
"@types/tough-cookie": "4.0.2",
|
||||
"copyfiles": "2.4.1",
|
||||
"cp": "0.2.0",
|
||||
"cypress": "7.7.0",
|
||||
@@ -3440,9 +3440,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/tough-cookie": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz",
|
||||
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg=="
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
|
||||
"integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
|
||||
},
|
||||
"node_modules/@types/yargs": {
|
||||
"version": "16.0.5",
|
||||
@@ -14110,6 +14110,11 @@
|
||||
"node": ">=0.4.x"
|
||||
}
|
||||
},
|
||||
"node_modules/querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
|
||||
},
|
||||
"node_modules/queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
@@ -14457,6 +14462,11 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/requires-port": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
|
||||
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
|
||||
},
|
||||
"node_modules/resolve": {
|
||||
"version": "1.22.1",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
|
||||
@@ -15702,22 +15712,23 @@
|
||||
}
|
||||
},
|
||||
"node_modules/tough-cookie": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
|
||||
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
|
||||
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
|
||||
"dependencies": {
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.1.2"
|
||||
"universalify": "^0.2.0",
|
||||
"url-parse": "^1.5.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/tough-cookie/node_modules/universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
|
||||
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
|
||||
"engines": {
|
||||
"node": ">= 4.0.0"
|
||||
}
|
||||
@@ -16351,6 +16362,15 @@
|
||||
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/url-parse": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"dependencies": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/url/node_modules/punycode": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
|
||||
@@ -16780,9 +16800,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/word-wrap": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
|
||||
"integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz",
|
||||
"integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
@@ -19536,9 +19556,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"@types/tough-cookie": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz",
|
||||
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg=="
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
|
||||
"integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
|
||||
},
|
||||
"@types/yargs": {
|
||||
"version": "16.0.5",
|
||||
@@ -27552,6 +27572,11 @@
|
||||
"integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==",
|
||||
"dev": true
|
||||
},
|
||||
"querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
|
||||
},
|
||||
"queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
@@ -27833,6 +27858,11 @@
|
||||
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
|
||||
"dev": true
|
||||
},
|
||||
"requires-port": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
|
||||
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
|
||||
},
|
||||
"resolve": {
|
||||
"version": "1.22.1",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
|
||||
@@ -28799,19 +28829,20 @@
|
||||
"dev": true
|
||||
},
|
||||
"tough-cookie": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
|
||||
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
|
||||
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
|
||||
"requires": {
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.1.2"
|
||||
"universalify": "^0.2.0",
|
||||
"url-parse": "^1.5.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
|
||||
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg=="
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -29269,6 +29300,15 @@
|
||||
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
|
||||
"dev": true
|
||||
},
|
||||
"url-parse": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"requires": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"util": {
|
||||
"version": "0.12.5",
|
||||
"resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz",
|
||||
@@ -29586,9 +29626,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"word-wrap": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
|
||||
"integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==",
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz",
|
||||
"integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==",
|
||||
"dev": true
|
||||
},
|
||||
"wordwrap": {
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
"@types/jest": "27.4.0",
|
||||
"@types/mime": "2.0.3",
|
||||
"@types/pem": "1.9.6",
|
||||
"@types/tough-cookie": "4.0.1",
|
||||
"@types/tough-cookie": "4.0.2",
|
||||
"copyfiles": "2.4.1",
|
||||
"cp": "0.2.0",
|
||||
"cypress": "7.7.0",
|
||||
@@ -82,6 +82,6 @@
|
||||
"axios-cookiejar-support": "1.0.1",
|
||||
"form-data": "4.0.0",
|
||||
"https": "1.0.0",
|
||||
"tough-cookie": "4.0.0"
|
||||
"tough-cookie": "4.1.3"
|
||||
}
|
||||
}
|
||||
|
||||
14136
sasjs-tests/package-lock.json
generated
14136
sasjs-tests/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -29,6 +29,12 @@ import { executeScript } from './api/viya/executeScript'
|
||||
import { getAccessTokenForViya } from './auth/getAccessTokenForViya'
|
||||
import { refreshTokensForViya } from './auth/refreshTokensForViya'
|
||||
|
||||
interface JobExecutionResult {
|
||||
result?: { result: object }
|
||||
log?: string
|
||||
error?: object
|
||||
}
|
||||
|
||||
/**
|
||||
* A client for interfacing with the SAS Viya REST API.
|
||||
*
|
||||
@@ -270,7 +276,7 @@ export class SASViyaApiClient {
|
||||
* @param debug - when set to true, the log will be returned.
|
||||
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
|
||||
* @param waitForResult - when set to true, function will return the session
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
*/
|
||||
@@ -621,7 +627,7 @@ export class SASViyaApiClient {
|
||||
* @param accessToken - an optional access token for an authorized user.
|
||||
* @param waitForResult - a boolean indicating if the function should wait for a result.
|
||||
* @param expectWebout - a boolean indicating whether to expect a _webout response.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
*/
|
||||
@@ -732,11 +738,13 @@ export class SASViyaApiClient {
|
||||
debug: boolean,
|
||||
data?: any,
|
||||
authConfig?: AuthConfig
|
||||
) {
|
||||
): Promise<JobExecutionResult> {
|
||||
let access_token = (authConfig || {}).access_token
|
||||
|
||||
if (authConfig) {
|
||||
;({ access_token } = await getTokens(this.requestClient, authConfig))
|
||||
}
|
||||
|
||||
if (isRelativePath(sasJob) && !this.rootFolderName) {
|
||||
throw new Error(
|
||||
'Relative paths cannot be used without specifying a root folder name.'
|
||||
@@ -749,6 +757,7 @@ export class SASViyaApiClient {
|
||||
const fullFolderPath = isRelativePath(sasJob)
|
||||
? `${this.rootFolderName}/${folderPath}`
|
||||
: folderPath
|
||||
|
||||
await this.populateFolderMap(fullFolderPath, access_token)
|
||||
|
||||
const jobFolder = this.folderMap.get(fullFolderPath)
|
||||
@@ -765,9 +774,8 @@ export class SASViyaApiClient {
|
||||
files = await this.uploadTables(data, access_token)
|
||||
}
|
||||
|
||||
if (!jobToExecute) {
|
||||
throw new Error(`Job was not found.`)
|
||||
}
|
||||
if (!jobToExecute) throw new Error(`Job was not found.`)
|
||||
|
||||
const jobDefinitionLink = jobToExecute?.links.find(
|
||||
(l) => l.rel === 'getResource'
|
||||
)?.href
|
||||
@@ -807,16 +815,19 @@ export class SASViyaApiClient {
|
||||
jobDefinition,
|
||||
arguments: jobArguments
|
||||
}
|
||||
|
||||
const { result: postedJob } = await this.requestClient.post<Job>(
|
||||
`${this.serverUrl}/jobExecution/jobs?_action=wait`,
|
||||
postJobRequestBody,
|
||||
access_token
|
||||
)
|
||||
|
||||
const jobStatus = await this.pollJobState(postedJob, authConfig).catch(
|
||||
(err) => {
|
||||
throw prefixMessage(err, 'Error while polling job status. ')
|
||||
}
|
||||
)
|
||||
|
||||
const { result: currentJob } = await this.requestClient.get<Job>(
|
||||
`${this.serverUrl}/jobExecution/jobs/${postedJob.id}`,
|
||||
access_token
|
||||
@@ -827,6 +838,7 @@ export class SASViyaApiClient {
|
||||
|
||||
const resultLink = currentJob.results['_webout.json']
|
||||
const logLink = currentJob.links.find((l) => l.rel === 'log')
|
||||
|
||||
if (resultLink) {
|
||||
jobResult = await this.requestClient.get<any>(
|
||||
`${this.serverUrl}${resultLink}/content`,
|
||||
@@ -834,11 +846,13 @@ export class SASViyaApiClient {
|
||||
'text/plain'
|
||||
)
|
||||
}
|
||||
|
||||
if (debug && logLink) {
|
||||
log = await this.requestClient
|
||||
.get<any>(`${this.serverUrl}${logLink.href}/content`, access_token)
|
||||
.then((res: any) => res.result.items.map((i: any) => i.line).join('\n'))
|
||||
}
|
||||
|
||||
if (jobStatus === 'failed') {
|
||||
throw new JobExecutionError(
|
||||
currentJob.error?.errorCode,
|
||||
@@ -846,7 +860,16 @@ export class SASViyaApiClient {
|
||||
log
|
||||
)
|
||||
}
|
||||
return { result: jobResult?.result, log }
|
||||
|
||||
const executionResult: JobExecutionResult = {
|
||||
result: jobResult?.result,
|
||||
log
|
||||
}
|
||||
|
||||
const { error } = currentJob
|
||||
if (error) executionResult.error = error
|
||||
|
||||
return executionResult
|
||||
}
|
||||
|
||||
private async populateFolderMap(folderPath: string, accessToken?: string) {
|
||||
|
||||
@@ -851,7 +851,7 @@ export default class SASjs {
|
||||
* @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs.
|
||||
* The access token is not required when the user is authenticated via the browser.
|
||||
* @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
*/
|
||||
|
||||
@@ -12,7 +12,7 @@ import { RequestClient } from '../../request/RequestClient'
|
||||
import { SessionManager } from '../../SessionManager'
|
||||
import { isRelativePath, fetchLogByChunks } from '../../utils'
|
||||
import { formatDataForRequest } from '../../utils/formatDataForRequest'
|
||||
import { pollJobState } from './pollJobState'
|
||||
import { pollJobState, JobState } from './pollJobState'
|
||||
import { uploadTables } from './uploadTables'
|
||||
|
||||
/**
|
||||
@@ -25,7 +25,7 @@ import { uploadTables } from './uploadTables'
|
||||
* @param debug - when set to true, the log will be returned.
|
||||
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
|
||||
* @param waitForResult - when set to true, function will return the session
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
*/
|
||||
@@ -228,7 +228,7 @@ export async function executeScript(
|
||||
)
|
||||
}
|
||||
|
||||
if (jobStatus === 'failed' || jobStatus === 'error') {
|
||||
if (jobStatus === JobState.Failed || jobStatus === JobState.Error) {
|
||||
throw new ComputeJobExecutionError(currentJob, log)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,29 +1,88 @@
|
||||
import { AuthConfig } from '@sasjs/utils/types'
|
||||
import { Job, PollOptions } from '../..'
|
||||
import { Job, PollOptions, PollStrategy } from '../..'
|
||||
import { getTokens } from '../../auth/getTokens'
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
import { JobStatePollError } from '../../types/errors'
|
||||
import { Link, WriteStream } from '../../types'
|
||||
import { delay, isNode } from '../../utils'
|
||||
|
||||
export enum JobState {
|
||||
Completed = 'completed',
|
||||
Running = 'running',
|
||||
Pending = 'pending',
|
||||
Unavailable = 'unavailable',
|
||||
NoState = '',
|
||||
Failed = 'failed',
|
||||
Error = 'error'
|
||||
}
|
||||
|
||||
/**
|
||||
* Polls job status using default or provided poll options.
|
||||
* @param requestClient - the pre-configured HTTP request client.
|
||||
* @param postedJob - the relative or absolute path to the job.
|
||||
* @param debug - sets the _debug flag in the job arguments.
|
||||
* @param authConfig - an access token, refresh token, client and secret for an authorized user.
|
||||
* @param pollOptions - an object containing maxPollCount, pollInterval, streamLog and logFolderPath. It will override the first default poll options in poll strategy if provided.
|
||||
* Example pollOptions:
|
||||
* {
|
||||
* maxPollCount: 200,
|
||||
* pollInterval: 300,
|
||||
* streamLog: true, // optional, equals to false by default.
|
||||
* pollStrategy?: // optional array of poll options that should be applied after 'maxPollCount' of the provided poll options is reached. If not provided the default (see example below) poll strategy will be used.
|
||||
* }
|
||||
* Example pollStrategy (values used from default poll strategy):
|
||||
* [
|
||||
* { maxPollCount: 200, pollInterval: 300 }, // approximately ~2 mins (including time to get response (~300ms))
|
||||
* { maxPollCount: 300, pollInterval: 3000 }, // approximately ~5.5 mins (including time to get response (~300ms))
|
||||
* { maxPollCount: 500, pollInterval: 30000 }, // approximately ~50.5 mins (including time to get response (~300ms))
|
||||
* { maxPollCount: 3400, pollInterval: 60000 } // approximately ~3015 mins (~125 hours) (including time to get response (~300ms))
|
||||
* ]
|
||||
* @returns - a promise which resolves with a job state
|
||||
*/
|
||||
export async function pollJobState(
|
||||
requestClient: RequestClient,
|
||||
postedJob: Job,
|
||||
debug: boolean,
|
||||
authConfig?: AuthConfig,
|
||||
pollOptions?: PollOptions
|
||||
) {
|
||||
): Promise<JobState> {
|
||||
const logger = process.logger || console
|
||||
|
||||
let pollInterval = 300
|
||||
let maxPollCount = 1000
|
||||
const streamLog = pollOptions?.streamLog || false
|
||||
|
||||
const defaultPollOptions: PollOptions = {
|
||||
maxPollCount,
|
||||
pollInterval,
|
||||
streamLog: false
|
||||
const defaultPollStrategy: PollStrategy = [
|
||||
{ maxPollCount: 200, pollInterval: 300 },
|
||||
{ maxPollCount: 300, pollInterval: 3000 },
|
||||
{ maxPollCount: 500, pollInterval: 30000 },
|
||||
{ maxPollCount: 3400, pollInterval: 60000 }
|
||||
]
|
||||
|
||||
let pollStrategy: PollStrategy
|
||||
|
||||
if (pollOptions !== undefined) {
|
||||
pollStrategy = [pollOptions]
|
||||
|
||||
let { pollStrategy: providedPollStrategy } = pollOptions
|
||||
|
||||
if (providedPollStrategy !== undefined) {
|
||||
validatePollStrategies(providedPollStrategy)
|
||||
|
||||
// INFO: sort by 'maxPollCount'
|
||||
providedPollStrategy = providedPollStrategy.sort(
|
||||
(strategyA: PollOptions, strategyB: PollOptions) =>
|
||||
strategyA.maxPollCount - strategyB.maxPollCount
|
||||
)
|
||||
|
||||
pollStrategy = [...pollStrategy, ...providedPollStrategy]
|
||||
} else {
|
||||
pollStrategy = [...pollStrategy, ...defaultPollStrategy]
|
||||
}
|
||||
} else {
|
||||
pollStrategy = defaultPollStrategy
|
||||
}
|
||||
|
||||
let defaultPollOptions: PollOptions = pollStrategy.splice(0, 1)[0]
|
||||
|
||||
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
|
||||
|
||||
const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
|
||||
@@ -31,10 +90,10 @@ export async function pollJobState(
|
||||
throw new Error(`Job state link was not found.`)
|
||||
}
|
||||
|
||||
let currentState = await getJobState(
|
||||
let currentState: JobState = await getJobState(
|
||||
requestClient,
|
||||
postedJob,
|
||||
'',
|
||||
JobState.NoState,
|
||||
debug,
|
||||
authConfig
|
||||
).catch((err) => {
|
||||
@@ -42,73 +101,71 @@ export async function pollJobState(
|
||||
`Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`,
|
||||
err
|
||||
)
|
||||
return 'unavailable'
|
||||
|
||||
return JobState.Unavailable
|
||||
})
|
||||
|
||||
let pollCount = 0
|
||||
|
||||
if (currentState === 'completed') {
|
||||
if (currentState === JobState.Completed) {
|
||||
return Promise.resolve(currentState)
|
||||
}
|
||||
|
||||
let logFileStream
|
||||
if (pollOptions.streamLog && isNode()) {
|
||||
if (streamLog && isNode()) {
|
||||
const { getFileStream } = require('./getFileStream')
|
||||
|
||||
logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
|
||||
}
|
||||
|
||||
// Poll up to the first 100 times with the specified poll interval
|
||||
let result = await doPoll(
|
||||
requestClient,
|
||||
postedJob,
|
||||
currentState,
|
||||
debug,
|
||||
pollCount,
|
||||
pollOptions,
|
||||
authConfig,
|
||||
{
|
||||
...pollOptions,
|
||||
maxPollCount:
|
||||
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
|
||||
},
|
||||
streamLog,
|
||||
logFileStream
|
||||
)
|
||||
|
||||
currentState = result.state
|
||||
pollCount = result.pollCount
|
||||
|
||||
if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) {
|
||||
if (
|
||||
!needsRetry(currentState) ||
|
||||
(pollCount >= pollOptions.maxPollCount && !pollStrategy.length)
|
||||
) {
|
||||
return currentState
|
||||
}
|
||||
|
||||
// If we get to this point, this is a long-running job that needs longer polling.
|
||||
// We will resume polling with a bigger interval of 1 minute
|
||||
let longJobPollOptions: PollOptions = {
|
||||
maxPollCount: 24 * 60,
|
||||
pollInterval: 60000,
|
||||
streamLog: false
|
||||
}
|
||||
if (pollOptions) {
|
||||
longJobPollOptions.streamLog = pollOptions.streamLog
|
||||
longJobPollOptions.logFolderPath = pollOptions.logFolderPath
|
||||
// INFO: If we get to this point, this is a long-running job that needs longer polling.
|
||||
// We will resume polling with a bigger interval according to the next polling strategy
|
||||
while (pollStrategy.length && needsRetry(currentState)) {
|
||||
defaultPollOptions = pollStrategy.splice(0, 1)[0]
|
||||
|
||||
if (pollOptions) {
|
||||
defaultPollOptions.logFolderPath = pollOptions.logFolderPath
|
||||
}
|
||||
|
||||
result = await doPoll(
|
||||
requestClient,
|
||||
postedJob,
|
||||
currentState,
|
||||
debug,
|
||||
pollCount,
|
||||
defaultPollOptions,
|
||||
authConfig,
|
||||
streamLog,
|
||||
logFileStream
|
||||
)
|
||||
|
||||
currentState = result.state
|
||||
pollCount = result.pollCount
|
||||
}
|
||||
|
||||
result = await doPoll(
|
||||
requestClient,
|
||||
postedJob,
|
||||
currentState,
|
||||
debug,
|
||||
pollCount,
|
||||
authConfig,
|
||||
longJobPollOptions,
|
||||
logFileStream
|
||||
)
|
||||
|
||||
currentState = result.state
|
||||
pollCount = result.pollCount
|
||||
|
||||
if (logFileStream) {
|
||||
logFileStream.end()
|
||||
}
|
||||
if (logFileStream) logFileStream.end()
|
||||
|
||||
return currentState
|
||||
}
|
||||
@@ -119,17 +176,13 @@ const getJobState = async (
|
||||
currentState: string,
|
||||
debug: boolean,
|
||||
authConfig?: AuthConfig
|
||||
) => {
|
||||
const stateLink = job.links.find((l: any) => l.rel === 'state')
|
||||
if (!stateLink) {
|
||||
throw new Error(`Job state link was not found.`)
|
||||
}
|
||||
): Promise<JobState> => {
|
||||
const stateLink = job.links.find((l: any) => l.rel === 'state')!
|
||||
|
||||
if (needsRetry(currentState)) {
|
||||
let tokens
|
||||
if (authConfig) {
|
||||
tokens = await getTokens(requestClient, authConfig)
|
||||
}
|
||||
|
||||
if (authConfig) tokens = await getTokens(requestClient, authConfig)
|
||||
|
||||
const { result: jobState } = await requestClient
|
||||
.get<string>(
|
||||
@@ -143,48 +196,38 @@ const getJobState = async (
|
||||
throw new JobStatePollError(job.id, err)
|
||||
})
|
||||
|
||||
return jobState.trim()
|
||||
return jobState.trim() as JobState
|
||||
} else {
|
||||
return currentState
|
||||
return currentState as JobState
|
||||
}
|
||||
}
|
||||
|
||||
const needsRetry = (state: string) =>
|
||||
state === 'running' ||
|
||||
state === '' ||
|
||||
state === 'pending' ||
|
||||
state === 'unavailable'
|
||||
state === JobState.Running ||
|
||||
state === JobState.NoState ||
|
||||
state === JobState.Pending ||
|
||||
state === JobState.Unavailable
|
||||
|
||||
const doPoll = async (
|
||||
requestClient: RequestClient,
|
||||
postedJob: Job,
|
||||
currentState: string,
|
||||
currentState: JobState,
|
||||
debug: boolean,
|
||||
pollCount: number,
|
||||
pollOptions: PollOptions,
|
||||
authConfig?: AuthConfig,
|
||||
pollOptions?: PollOptions,
|
||||
streamLog?: boolean,
|
||||
logStream?: WriteStream
|
||||
): Promise<{ state: string; pollCount: number }> => {
|
||||
let pollInterval = 300
|
||||
let maxPollCount = 1000
|
||||
): Promise<{ state: JobState; pollCount: number }> => {
|
||||
const { maxPollCount, pollInterval } = pollOptions
|
||||
const logger = process.logger || console
|
||||
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')!
|
||||
let maxErrorCount = 5
|
||||
let errorCount = 0
|
||||
let state = currentState
|
||||
let printedState = ''
|
||||
let printedState = JobState.NoState
|
||||
let startLogLine = 0
|
||||
|
||||
const logger = process.logger || console
|
||||
|
||||
if (pollOptions) {
|
||||
pollInterval = pollOptions.pollInterval || pollInterval
|
||||
maxPollCount = pollOptions.maxPollCount || maxPollCount
|
||||
}
|
||||
|
||||
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')
|
||||
if (!stateLink) {
|
||||
throw new Error(`Job state link was not found.`)
|
||||
}
|
||||
|
||||
while (needsRetry(state) && pollCount <= maxPollCount) {
|
||||
state = await getJobState(
|
||||
requestClient,
|
||||
@@ -194,21 +237,24 @@ const doPoll = async (
|
||||
authConfig
|
||||
).catch((err) => {
|
||||
errorCount++
|
||||
|
||||
if (pollCount >= maxPollCount || errorCount >= maxErrorCount) {
|
||||
throw err
|
||||
}
|
||||
|
||||
logger.error(
|
||||
`Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`,
|
||||
err
|
||||
)
|
||||
return 'unavailable'
|
||||
|
||||
return JobState.Unavailable
|
||||
})
|
||||
|
||||
pollCount++
|
||||
|
||||
const jobHref = postedJob.links.find((l: Link) => l.rel === 'self')!.href
|
||||
|
||||
if (pollOptions?.streamLog) {
|
||||
if (streamLog) {
|
||||
const { result: job } = await requestClient.get<Job>(
|
||||
jobHref,
|
||||
authConfig?.access_token
|
||||
@@ -238,12 +284,45 @@ const doPoll = async (
|
||||
printedState = state
|
||||
}
|
||||
|
||||
if (state != 'unavailable' && errorCount > 0) {
|
||||
if (state !== JobState.Unavailable && errorCount > 0) {
|
||||
errorCount = 0
|
||||
}
|
||||
|
||||
await delay(pollInterval)
|
||||
if (state !== JobState.Completed) {
|
||||
await delay(pollInterval)
|
||||
}
|
||||
}
|
||||
|
||||
return { state, pollCount }
|
||||
}
|
||||
|
||||
const validatePollStrategies = (strategy: PollStrategy) => {
|
||||
const throwError = (message?: string, pollOptions?: PollOptions) => {
|
||||
throw new Error(
|
||||
`Poll strategies are not valid.${message ? ` ${message}` : ''}${
|
||||
pollOptions
|
||||
? ` Invalid poll strategy: \n${JSON.stringify(pollOptions, null, 2)}`
|
||||
: ''
|
||||
}`
|
||||
)
|
||||
}
|
||||
|
||||
strategy.forEach((pollOptions: PollOptions, i: number) => {
|
||||
const { maxPollCount, pollInterval } = pollOptions
|
||||
|
||||
if (maxPollCount < 1) {
|
||||
throwError(`'maxPollCount' has to be greater than 0.`, pollOptions)
|
||||
} else if (i !== 0) {
|
||||
const previousPollOptions = strategy[i - 1]
|
||||
|
||||
if (maxPollCount <= previousPollOptions.maxPollCount) {
|
||||
throwError(
|
||||
`'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy.`,
|
||||
pollOptions
|
||||
)
|
||||
}
|
||||
} else if (pollInterval < 1) {
|
||||
throwError(`'pollInterval' has to be greater than 0.`, pollOptions)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -15,8 +15,7 @@ const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)()
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
const defaultPollOptions: PollOptions = {
|
||||
maxPollCount: 100,
|
||||
pollInterval: 500,
|
||||
streamLog: false
|
||||
pollInterval: 500
|
||||
}
|
||||
|
||||
describe('executeScript', () => {
|
||||
@@ -452,7 +451,9 @@ describe('executeScript', () => {
|
||||
it('should throw a ComputeJobExecutionError if the job has failed', async () => {
|
||||
jest
|
||||
.spyOn(pollJobStateModule, 'pollJobState')
|
||||
.mockImplementation(() => Promise.resolve('failed'))
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve(pollJobStateModule.JobState.Failed)
|
||||
)
|
||||
|
||||
const error: ComputeJobExecutionError = await executeScript(
|
||||
requestClient,
|
||||
@@ -485,7 +486,9 @@ describe('executeScript', () => {
|
||||
it('should throw a ComputeJobExecutionError if the job has errored out', async () => {
|
||||
jest
|
||||
.spyOn(pollJobStateModule, 'pollJobState')
|
||||
.mockImplementation(() => Promise.resolve('error'))
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve(pollJobStateModule.JobState.Error)
|
||||
)
|
||||
|
||||
const error: ComputeJobExecutionError = await executeScript(
|
||||
requestClient,
|
||||
@@ -654,7 +657,9 @@ const setupMocks = () => {
|
||||
.mockImplementation(() => Promise.resolve(mockAuthConfig))
|
||||
jest
|
||||
.spyOn(pollJobStateModule, 'pollJobState')
|
||||
.mockImplementation(() => Promise.resolve('completed'))
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve(pollJobStateModule.JobState.Completed)
|
||||
)
|
||||
jest
|
||||
.spyOn(sessionManager, 'getVariable')
|
||||
.mockImplementation(() =>
|
||||
|
||||
@@ -6,17 +6,18 @@ import * as getTokensModule from '../../../auth/getTokens'
|
||||
import * as saveLogModule from '../saveLog'
|
||||
import * as getFileStreamModule from '../getFileStream'
|
||||
import * as isNodeModule from '../../../utils/isNode'
|
||||
import { PollOptions } from '../../../types'
|
||||
import * as delayModule from '../../../utils/delay'
|
||||
import { PollOptions, PollStrategy } from '../../../types'
|
||||
import { WriteStream } from 'fs'
|
||||
|
||||
const baseUrl = 'http://localhost'
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
requestClient['httpClient'].defaults.baseURL = baseUrl
|
||||
|
||||
const defaultPollOptions: PollOptions = {
|
||||
const defaultStreamLog = false
|
||||
const defaultPollStrategy: PollOptions = {
|
||||
maxPollCount: 100,
|
||||
pollInterval: 500,
|
||||
streamLog: false
|
||||
pollInterval: 500
|
||||
}
|
||||
|
||||
describe('pollJobState', () => {
|
||||
@@ -26,13 +27,10 @@ describe('pollJobState', () => {
|
||||
})
|
||||
|
||||
it('should get valid tokens if the authConfig has been provided', async () => {
|
||||
await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
defaultPollOptions
|
||||
)
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollStrategy,
|
||||
streamLog: defaultStreamLog
|
||||
})
|
||||
|
||||
expect(getTokensModule.getTokens).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
@@ -46,7 +44,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect(getTokensModule.getTokens).not.toHaveBeenCalled()
|
||||
@@ -58,7 +56,7 @@ describe('pollJobState', () => {
|
||||
{ ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') },
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
).catch((e: any) => e)
|
||||
|
||||
expect((error as Error).message).toContain('Job state link was not found.')
|
||||
@@ -72,7 +70,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3)
|
||||
@@ -83,7 +81,7 @@ describe('pollJobState', () => {
|
||||
const { saveLog } = require('../saveLog')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
...defaultPollStrategy,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
@@ -96,7 +94,7 @@ describe('pollJobState', () => {
|
||||
const { saveLog } = require('../saveLog')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
...defaultPollStrategy,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
@@ -111,7 +109,7 @@ describe('pollJobState', () => {
|
||||
const { getFileStream } = require('../getFileStream')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
...defaultPollStrategy,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
@@ -127,7 +125,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect(saveLogModule.saveLog).not.toHaveBeenCalled()
|
||||
@@ -136,15 +134,18 @@ describe('pollJobState', () => {
|
||||
it('should return the current status when the max poll count is reached', async () => {
|
||||
mockRunningPoll()
|
||||
|
||||
const pollOptions: PollOptions = {
|
||||
...defaultPollStrategy,
|
||||
maxPollCount: 1,
|
||||
pollStrategy: []
|
||||
}
|
||||
|
||||
const state = await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
{
|
||||
...defaultPollOptions,
|
||||
maxPollCount: 1
|
||||
}
|
||||
pollOptions
|
||||
)
|
||||
|
||||
expect(state).toEqual('running')
|
||||
@@ -159,7 +160,7 @@ describe('pollJobState', () => {
|
||||
false,
|
||||
mockAuthConfig,
|
||||
{
|
||||
...defaultPollOptions,
|
||||
...defaultPollStrategy,
|
||||
maxPollCount: 200,
|
||||
pollInterval: 10
|
||||
}
|
||||
@@ -176,7 +177,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect(requestClient.get).toHaveBeenCalledTimes(2)
|
||||
@@ -192,7 +193,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
true,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect((process as any).logger.info).toHaveBeenCalledTimes(4)
|
||||
@@ -222,7 +223,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect(requestClient.get).toHaveBeenCalledTimes(2)
|
||||
@@ -237,13 +238,119 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
).catch((e: any) => e)
|
||||
|
||||
expect(error.message).toEqual(
|
||||
'Error while polling job state for job j0b: Status Error'
|
||||
)
|
||||
})
|
||||
|
||||
it('should change poll strategies', async () => {
|
||||
mockSimplePoll(6)
|
||||
|
||||
const delays: number[] = []
|
||||
|
||||
jest.spyOn(delayModule, 'delay').mockImplementation((ms: number) => {
|
||||
delays.push(ms)
|
||||
|
||||
return Promise.resolve()
|
||||
})
|
||||
|
||||
const pollIntervals = [3, 4, 5, 6]
|
||||
|
||||
const pollStrategy = [
|
||||
{ maxPollCount: 2, pollInterval: pollIntervals[1] },
|
||||
{ maxPollCount: 3, pollInterval: pollIntervals[2] },
|
||||
{ maxPollCount: 4, pollInterval: pollIntervals[3] }
|
||||
]
|
||||
|
||||
const pollOptions: PollOptions = {
|
||||
maxPollCount: 1,
|
||||
pollInterval: pollIntervals[0],
|
||||
pollStrategy: pollStrategy
|
||||
}
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, undefined, pollOptions)
|
||||
|
||||
expect(delays).toEqual([pollIntervals[0], ...pollIntervals])
|
||||
})
|
||||
|
||||
it('should throw an error if not valid poll strategies provided', async () => {
|
||||
// INFO: 'maxPollCount' has to be > 0
|
||||
let invalidPollStrategy = {
|
||||
maxPollCount: 0,
|
||||
pollInterval: 3
|
||||
}
|
||||
|
||||
let pollStrategy: PollStrategy = [invalidPollStrategy]
|
||||
|
||||
let expectedError = new Error(
|
||||
`Poll strategies are not valid. 'maxPollCount' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
|
||||
invalidPollStrategy,
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
)
|
||||
|
||||
await expect(
|
||||
pollJobState(requestClient, mockJob, false, undefined, {
|
||||
...defaultPollStrategy,
|
||||
pollStrategy: pollStrategy
|
||||
})
|
||||
).rejects.toThrow(expectedError)
|
||||
|
||||
// INFO: 'maxPollCount' has to be > than 'maxPollCount' of the previous strategy
|
||||
const validPollStrategy = {
|
||||
maxPollCount: 5,
|
||||
pollInterval: 2
|
||||
}
|
||||
|
||||
invalidPollStrategy = {
|
||||
maxPollCount: validPollStrategy.maxPollCount,
|
||||
pollInterval: 3
|
||||
}
|
||||
|
||||
pollStrategy = [validPollStrategy, invalidPollStrategy]
|
||||
|
||||
expectedError = new Error(
|
||||
`Poll strategies are not valid. 'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy. Invalid poll strategy: \n${JSON.stringify(
|
||||
invalidPollStrategy,
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
)
|
||||
|
||||
await expect(
|
||||
pollJobState(requestClient, mockJob, false, undefined, {
|
||||
...defaultPollStrategy,
|
||||
pollStrategy: pollStrategy
|
||||
})
|
||||
).rejects.toThrow(expectedError)
|
||||
|
||||
// INFO: invalid 'pollInterval'
|
||||
invalidPollStrategy = {
|
||||
maxPollCount: 1,
|
||||
pollInterval: 0
|
||||
}
|
||||
|
||||
pollStrategy = [invalidPollStrategy]
|
||||
|
||||
expectedError = new Error(
|
||||
`Poll strategies are not valid. 'pollInterval' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
|
||||
invalidPollStrategy,
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
)
|
||||
|
||||
await expect(
|
||||
pollJobState(requestClient, mockJob, false, undefined, {
|
||||
...defaultPollStrategy,
|
||||
pollStrategy: pollStrategy
|
||||
})
|
||||
).rejects.toThrow(expectedError)
|
||||
})
|
||||
})
|
||||
|
||||
const setupMocks = () => {
|
||||
@@ -273,11 +380,14 @@ const setupMocks = () => {
|
||||
|
||||
const mockSimplePoll = (runningCount = 2) => {
|
||||
let count = 0
|
||||
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
|
||||
return Promise.resolve({
|
||||
result:
|
||||
count === 0
|
||||
@@ -293,11 +403,14 @@ const mockSimplePoll = (runningCount = 2) => {
|
||||
|
||||
const mockRunningPoll = () => {
|
||||
let count = 0
|
||||
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
|
||||
return Promise.resolve({
|
||||
result: count === 0 ? 'pending' : 'running',
|
||||
etag: '',
|
||||
@@ -308,11 +421,14 @@ const mockRunningPoll = () => {
|
||||
|
||||
const mockLongPoll = () => {
|
||||
let count = 0
|
||||
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
|
||||
return Promise.resolve({
|
||||
result: count <= 102 ? 'running' : 'completed',
|
||||
etag: '',
|
||||
@@ -323,14 +439,18 @@ const mockLongPoll = () => {
|
||||
|
||||
const mockPollWithSingleError = () => {
|
||||
let count = 0
|
||||
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
|
||||
if (count === 1) {
|
||||
return Promise.reject('Status Error')
|
||||
}
|
||||
|
||||
return Promise.resolve({
|
||||
result: count === 0 ? 'pending' : 'completed',
|
||||
etag: '',
|
||||
@@ -344,6 +464,7 @@ const mockErroredPoll = () => {
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
|
||||
return Promise.reject('Status Error')
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as NodeFormData from 'form-data'
|
||||
import { convertToCSV } from '../utils/convertToCsv'
|
||||
import { isNode } from '../utils'
|
||||
|
||||
/**
|
||||
* One of the approaches SASjs takes to send tables-formatted JSON (see README)
|
||||
@@ -26,12 +27,15 @@ export const generateFileUploadForm = (
|
||||
)
|
||||
}
|
||||
|
||||
if (typeof FormData === 'undefined' && formData instanceof NodeFormData) {
|
||||
formData.append(name, csv, {
|
||||
// INFO: unfortunately it is not possible to check if formData is instance of NodeFormData or FormData because it will return true for both
|
||||
if (isNode()) {
|
||||
// INFO: environment is Node and formData is instance of NodeFormData
|
||||
;(formData as NodeFormData).append(name, csv, {
|
||||
filename: `${name}.csv`,
|
||||
contentType: 'application/csv'
|
||||
})
|
||||
} else {
|
||||
// INFO: environment is Browser and formData is instance of FormData
|
||||
const file = new Blob([csv], {
|
||||
type: 'application/csv'
|
||||
})
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { generateFileUploadForm } from '../generateFileUploadForm'
|
||||
import { convertToCSV } from '../../utils/convertToCsv'
|
||||
import * as NodeFormData from 'form-data'
|
||||
import * as isNodeModule from '../../utils/isNode'
|
||||
|
||||
describe('generateFileUploadForm', () => {
|
||||
beforeAll(() => {
|
||||
@@ -11,44 +14,94 @@ describe('generateFileUploadForm', () => {
|
||||
;(global as any).Blob = BlobMock
|
||||
})
|
||||
|
||||
it('should generate file upload form from data', () => {
|
||||
const formData = new FormData()
|
||||
const testTable = 'sometable'
|
||||
const testTableWithNullVars: { [key: string]: any } = {
|
||||
[testTable]: [
|
||||
{ var1: 'string', var2: 232, nullvar: 'A' },
|
||||
{ var1: 'string', var2: 232, nullvar: 'B' },
|
||||
{ var1: 'string', var2: 232, nullvar: '_' },
|
||||
{ var1: 'string', var2: 232, nullvar: 0 },
|
||||
{ var1: 'string', var2: 232, nullvar: 'z' },
|
||||
{ var1: 'string', var2: 232, nullvar: null }
|
||||
],
|
||||
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
|
||||
}
|
||||
const tableName = Object.keys(testTableWithNullVars).filter((key: string) =>
|
||||
Array.isArray(testTableWithNullVars[key])
|
||||
)[0]
|
||||
describe('browser', () => {
|
||||
afterAll(() => {
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
jest.spyOn(formData, 'append').mockImplementation(() => {})
|
||||
it('should generate file upload form from data', () => {
|
||||
const formData = new FormData()
|
||||
const testTable = 'sometable'
|
||||
const testTableWithNullVars: { [key: string]: any } = {
|
||||
[testTable]: [
|
||||
{ var1: 'string', var2: 232, nullvar: 'A' },
|
||||
{ var1: 'string', var2: 232, nullvar: 'B' },
|
||||
{ var1: 'string', var2: 232, nullvar: '_' },
|
||||
{ var1: 'string', var2: 232, nullvar: 0 },
|
||||
{ var1: 'string', var2: 232, nullvar: 'z' },
|
||||
{ var1: 'string', var2: 232, nullvar: null }
|
||||
],
|
||||
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
|
||||
}
|
||||
const tableName = Object.keys(testTableWithNullVars).filter(
|
||||
(key: string) => Array.isArray(testTableWithNullVars[key])
|
||||
)[0]
|
||||
|
||||
generateFileUploadForm(formData, testTableWithNullVars)
|
||||
jest.spyOn(formData, 'append').mockImplementation(() => {})
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
|
||||
|
||||
expect(formData.append).toHaveBeenCalledOnce()
|
||||
expect(formData.append).toHaveBeenCalledWith(
|
||||
tableName,
|
||||
{},
|
||||
`${tableName}.csv`
|
||||
)
|
||||
generateFileUploadForm(formData, testTableWithNullVars)
|
||||
|
||||
expect(formData.append).toHaveBeenCalledOnce()
|
||||
expect(formData.append).toHaveBeenCalledWith(
|
||||
tableName,
|
||||
{},
|
||||
`${tableName}.csv`
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw an error if too large string was provided', () => {
|
||||
const formData = new FormData()
|
||||
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
|
||||
|
||||
expect(() => generateFileUploadForm(formData, data)).toThrow(
|
||||
new Error(
|
||||
'The max length of a string value in SASjs is 32765 characters.'
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw an error if too large string was provided', () => {
|
||||
const formData = new FormData()
|
||||
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
|
||||
describe('node', () => {
|
||||
it('should generate file upload form from data', () => {
|
||||
const formData = new NodeFormData()
|
||||
const testTable = 'sometable'
|
||||
const testTableWithNullVars: { [key: string]: any } = {
|
||||
[testTable]: [
|
||||
{ var1: 'string', var2: 232, nullvar: 'A' },
|
||||
{ var1: 'string', var2: 232, nullvar: 'B' },
|
||||
{ var1: 'string', var2: 232, nullvar: '_' },
|
||||
{ var1: 'string', var2: 232, nullvar: 0 },
|
||||
{ var1: 'string', var2: 232, nullvar: 'z' },
|
||||
{ var1: 'string', var2: 232, nullvar: null }
|
||||
],
|
||||
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
|
||||
}
|
||||
const tableName = Object.keys(testTableWithNullVars).filter(
|
||||
(key: string) => Array.isArray(testTableWithNullVars[key])
|
||||
)[0]
|
||||
const csv = convertToCSV(testTableWithNullVars, tableName)
|
||||
|
||||
expect(() => generateFileUploadForm(formData, data)).toThrow(
|
||||
new Error(
|
||||
'The max length of a string value in SASjs is 32765 characters.'
|
||||
jest.spyOn(formData, 'append').mockImplementation(() => {})
|
||||
|
||||
generateFileUploadForm(formData, testTableWithNullVars)
|
||||
|
||||
expect(formData.append).toHaveBeenCalledOnce()
|
||||
expect(formData.append).toHaveBeenCalledWith(tableName, csv, {
|
||||
contentType: 'application/csv',
|
||||
filename: `${tableName}.csv`
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw an error if too large string was provided', () => {
|
||||
const formData = new NodeFormData()
|
||||
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
|
||||
|
||||
expect(() => generateFileUploadForm(formData, data)).toThrow(
|
||||
new Error(
|
||||
'The max length of a string value in SASjs is 32765 characters.'
|
||||
)
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -10,8 +10,8 @@ import {
|
||||
LoginRequiredError
|
||||
} from '../types/errors'
|
||||
import { generateFileUploadForm } from '../file/generateFileUploadForm'
|
||||
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { getFormData } from '../utils'
|
||||
|
||||
import {
|
||||
isRelativePath,
|
||||
@@ -53,8 +53,7 @@ export class SasjsJobExecutor extends BaseJobExecutor {
|
||||
* Use the available form data object (FormData in Browser, NodeFormData in
|
||||
* Node)
|
||||
*/
|
||||
let formData =
|
||||
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
|
||||
let formData = getFormData()
|
||||
|
||||
if (data) {
|
||||
// file upload approach
|
||||
|
||||
@@ -16,10 +16,11 @@ import { SASViyaApiClient } from '../SASViyaApiClient'
|
||||
import {
|
||||
isRelativePath,
|
||||
parseSasViyaDebugResponse,
|
||||
appendExtraResponseAttributes
|
||||
appendExtraResponseAttributes,
|
||||
parseWeboutResponse,
|
||||
getFormData
|
||||
} from '../utils'
|
||||
import { BaseJobExecutor } from './JobExecutor'
|
||||
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
|
||||
|
||||
export interface WaitingRequstPromise {
|
||||
promise: Promise<any> | null
|
||||
@@ -112,8 +113,7 @@ export class WebJobExecutor extends BaseJobExecutor {
|
||||
* Use the available form data object (FormData in Browser, NodeFormData in
|
||||
* Node)
|
||||
*/
|
||||
let formData =
|
||||
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
|
||||
let formData = getFormData()
|
||||
|
||||
if (data) {
|
||||
const stringifiedData = JSON.stringify(data)
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
export interface PollOptions {
|
||||
maxPollCount: number
|
||||
pollInterval: number
|
||||
streamLog: boolean
|
||||
pollInterval: number // milliseconds
|
||||
pollStrategy?: PollStrategy
|
||||
streamLog?: boolean
|
||||
logFolderPath?: string
|
||||
}
|
||||
|
||||
export type PollStrategy = PollOptions[]
|
||||
|
||||
5
src/utils/getFormData.ts
Normal file
5
src/utils/getFormData.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { isNode } from './'
|
||||
import * as NodeFormData from 'form-data'
|
||||
|
||||
export const getFormData = () =>
|
||||
isNode() ? new NodeFormData() : new FormData()
|
||||
@@ -20,3 +20,4 @@ export * from './parseWeboutResponse'
|
||||
export * from './serialize'
|
||||
export * from './splitChunks'
|
||||
export * from './validateInput'
|
||||
export * from './getFormData'
|
||||
|
||||
20
src/utils/spec/getFormData.spec.ts
Normal file
20
src/utils/spec/getFormData.spec.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { getFormData } from '..'
|
||||
import * as isNodeModule from '../isNode'
|
||||
import * as NodeFormData from 'form-data'
|
||||
|
||||
describe('getFormData', () => {
|
||||
it('should return NodeFormData if environment is Node', () => {
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
|
||||
|
||||
expect(getFormData() instanceof NodeFormData).toEqual(true)
|
||||
})
|
||||
|
||||
it('should return FormData if environment is not Node', () => {
|
||||
const formDataMock = () => {}
|
||||
;(global as any).FormData = formDataMock
|
||||
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
|
||||
|
||||
expect(getFormData() instanceof FormData).toEqual(true)
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user