1
0
mirror of https://github.com/sasjs/adapter.git synced 2026-01-08 04:50:06 +00:00

Compare commits

...

33 Commits

Author SHA1 Message Date
Allan Bowe
d744ee12a3 Merge pull request #823 from sasjs/@sasjs/server-response-fix
feat(sasjs-request-client): improved parseResponse method
2023-07-26 11:45:04 +01:00
Yury Shkoda
5f15226cd9 test(sasjs-request-client): removed unnecessary part of the log 2023-07-25 17:31:39 +03:00
Yury Shkoda
f31ea28b9c refactor(sasjs-request-client): used SASJS_LOGS_SEPARATOR const 2023-07-25 16:08:16 +03:00
Yury Shkoda
e315e4a619 feat(sasjs-request-client): improved parseResponse method 2023-07-25 16:01:35 +03:00
Yury Shkoda
76bf5b88e9 Merge pull request #818 from sasjs/deps-bump
Dependencies bump
2023-07-12 09:50:50 +03:00
Yury Shkoda
a97ac4eaa6 chore: commiting changes 2023-07-11 15:36:13 +03:00
Yury Shkoda
37cfea6ca7 chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/loader-utils-2.0.4' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:53:19 +03:00
Yury Shkoda
f74c8aca57 chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/json5-1.0.2' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:52:28 +03:00
Yury Shkoda
77baaabfcd chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/http-cache-semantics-4.1.1' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:51:32 +03:00
Yury Shkoda
510ba771f0 chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/webpack-5.76.3' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:50:38 +03:00
Allan Bowe
6fce65f4c8 Merge pull request #817 from sasjs/request-sasjs-fix
fix(file-upload-form): fixed form data for node env
2023-07-11 09:58:57 +01:00
Yury Shkoda
fe03faa59f chore(file-upload-form): left comments 2023-07-11 09:26:36 +03:00
Yury Shkoda
6272eeda23 fix(form-data): fixed formData type check 2023-07-10 19:14:47 +03:00
Yury Shkoda
104d1b88b3 chore(deps): bimped tough-cookie and @types/tough-cookie 2023-07-10 17:07:39 +03:00
Yury Shkoda
0d9ba36de8 fix(file-upload-form): fixed form data for node env 2023-07-06 15:49:24 +03:00
Yury Shkoda
4e7a845d99 Merge pull request #816 from sasjs/ci/cd-workwlows-node-version
chore(ci-cd): used Node lts/hydrogen version
2023-07-06 12:42:12 +03:00
Yury Shkoda
716cc513ff chore(ci-cd): used Node lts/hydrogen version 2023-07-05 16:10:45 +03:00
Yury Shkoda
22edcb0a8e Merge pull request #810 from sasjs/pollJobState-improvements
Poll job state improvements
2023-07-05 11:15:42 +03:00
Yury Shkoda
aedf5c1734 chore: Merge branch 'master' of github.com:sasjs/adapter into pollJobState-improvements 2023-07-05 10:49:12 +03:00
Yury Shkoda
4440e5d1f9 fix(types): fixed PollOptions exports 2023-05-17 14:10:17 +03:00
Yury Shkoda
f484a5a6a1 refactor(poll-job-state): updated types and func attributes 2023-05-17 11:16:35 +03:00
Yury Shkoda
5c74186bab feat(poll-strategy): added subsequentStrategies to PollStrategy 2023-05-16 17:48:04 +03:00
Yury Shkoda
ea68c3dff3 docs(poll-job-state): updated docs 2023-05-16 17:42:27 +03:00
Yury Shkoda
153b285670 chore(poll-job-status): renamed PollOptions to PollStrategy and added docs 2023-05-15 16:32:07 +03:00
Yury Shkoda
f9f4aa5aa6 chore(reviewer-lottery): removed QA group 2023-05-15 14:53:55 +03:00
Yury Shkoda
bd02656b3c docs(poll-job-state): added comments 2023-05-15 14:36:18 +03:00
Yury Shkoda
991519a13d fix(execute-job): added error object if it present 2023-05-15 14:26:24 +03:00
Yury Shkoda
615c9d012e feat(poll-job-state): implemented polling strategies 2023-05-15 14:24:11 +03:00
dependabot[bot]
d166231c12 chore(deps): bump webpack from 5.73.0 to 5.76.3 in /sasjs-tests
Bumps [webpack](https://github.com/webpack/webpack) from 5.73.0 to 5.76.3.
- [Release notes](https://github.com/webpack/webpack/releases)
- [Commits](https://github.com/webpack/webpack/compare/v5.73.0...v5.76.3)

---
updated-dependencies:
- dependency-name: webpack
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-03-27 12:12:06 +00:00
dependabot[bot]
4cb150e951 chore(deps): bump http-cache-semantics in /sasjs-tests
Bumps [http-cache-semantics](https://github.com/kornelski/http-cache-semantics) from 4.1.0 to 4.1.1.
- [Release notes](https://github.com/kornelski/http-cache-semantics/releases)
- [Commits](https://github.com/kornelski/http-cache-semantics/compare/v4.1.0...v4.1.1)

---
updated-dependencies:
- dependency-name: http-cache-semantics
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-02-04 13:53:04 +00:00
dependabot[bot]
fc8598473f chore(deps): bump json5 from 1.0.1 to 1.0.2 in /sasjs-tests
Bumps [json5](https://github.com/json5/json5) from 1.0.1 to 1.0.2.
- [Release notes](https://github.com/json5/json5/releases)
- [Changelog](https://github.com/json5/json5/blob/main/CHANGELOG.md)
- [Commits](https://github.com/json5/json5/compare/v1.0.1...v1.0.2)

---
updated-dependencies:
- dependency-name: json5
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-01-08 11:36:38 +00:00
dependabot[bot]
367e0ae25a chore(deps): bump loader-utils from 2.0.2 to 2.0.4 in /sasjs-tests
Bumps [loader-utils](https://github.com/webpack/loader-utils) from 2.0.2 to 2.0.4.
- [Release notes](https://github.com/webpack/loader-utils/releases)
- [Changelog](https://github.com/webpack/loader-utils/blob/v2.0.4/CHANGELOG.md)
- [Commits](https://github.com/webpack/loader-utils/compare/v2.0.2...v2.0.4)

---
updated-dependencies:
- dependency-name: loader-utils
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-11-16 06:56:29 +00:00
dependabot[bot]
85dde61baf chore(deps): bump semver-regex from 3.1.3 to 3.1.4
Bumps [semver-regex](https://github.com/sindresorhus/semver-regex) from 3.1.3 to 3.1.4.
- [Release notes](https://github.com/sindresorhus/semver-regex/releases)
- [Commits](https://github.com/sindresorhus/semver-regex/commits/v3.1.4)

---
updated-dependencies:
- dependency-name: semver-regex
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-06-03 23:43:27 +00:00
24 changed files with 14476 additions and 618 deletions

View File

@@ -5,7 +5,3 @@ groups:
- YuryShkoda - YuryShkoda
- medjedovicm - medjedovicm
- sabhas - sabhas
- name: SASjs QA
reviewers: 1
usernames:
- VladislavParhomchik

View File

@@ -11,7 +11,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [lts/fermium] node-version: [lts/hydrogen]
steps: steps:
- name: Checkout - name: Checkout

View File

@@ -14,7 +14,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [lts/fermium] node-version: [lts/hydrogen]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

84
package-lock.json generated
View File

@@ -13,7 +13,7 @@
"axios-cookiejar-support": "1.0.1", "axios-cookiejar-support": "1.0.1",
"form-data": "4.0.0", "form-data": "4.0.0",
"https": "1.0.0", "https": "1.0.0",
"tough-cookie": "4.0.0" "tough-cookie": "4.1.3"
}, },
"devDependencies": { "devDependencies": {
"@cypress/webpack-preprocessor": "5.9.1", "@cypress/webpack-preprocessor": "5.9.1",
@@ -21,7 +21,7 @@
"@types/jest": "27.4.0", "@types/jest": "27.4.0",
"@types/mime": "2.0.3", "@types/mime": "2.0.3",
"@types/pem": "1.9.6", "@types/pem": "1.9.6",
"@types/tough-cookie": "4.0.1", "@types/tough-cookie": "4.0.2",
"copyfiles": "2.4.1", "copyfiles": "2.4.1",
"cp": "0.2.0", "cp": "0.2.0",
"cypress": "7.7.0", "cypress": "7.7.0",
@@ -3440,9 +3440,9 @@
"dev": true "dev": true
}, },
"node_modules/@types/tough-cookie": { "node_modules/@types/tough-cookie": {
"version": "4.0.1", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg==" "integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
}, },
"node_modules/@types/yargs": { "node_modules/@types/yargs": {
"version": "16.0.5", "version": "16.0.5",
@@ -14110,6 +14110,11 @@
"node": ">=0.4.x" "node": ">=0.4.x"
} }
}, },
"node_modules/querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"node_modules/queue-microtask": { "node_modules/queue-microtask": {
"version": "1.2.3", "version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -14457,6 +14462,11 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"node_modules/resolve": { "node_modules/resolve": {
"version": "1.22.1", "version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
@@ -15702,22 +15712,23 @@
} }
}, },
"node_modules/tough-cookie": { "node_modules/tough-cookie": {
"version": "4.0.0", "version": "4.1.3",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
"dependencies": { "dependencies": {
"psl": "^1.1.33", "psl": "^1.1.33",
"punycode": "^2.1.1", "punycode": "^2.1.1",
"universalify": "^0.1.2" "universalify": "^0.2.0",
"url-parse": "^1.5.3"
}, },
"engines": { "engines": {
"node": ">=6" "node": ">=6"
} }
}, },
"node_modules/tough-cookie/node_modules/universalify": { "node_modules/tough-cookie/node_modules/universalify": {
"version": "0.1.2", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
"engines": { "engines": {
"node": ">= 4.0.0" "node": ">= 4.0.0"
} }
@@ -16351,6 +16362,15 @@
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
"dev": true "dev": true
}, },
"node_modules/url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
"dependencies": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"node_modules/url/node_modules/punycode": { "node_modules/url/node_modules/punycode": {
"version": "1.3.2", "version": "1.3.2",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
@@ -19536,9 +19556,9 @@
"dev": true "dev": true
}, },
"@types/tough-cookie": { "@types/tough-cookie": {
"version": "4.0.1", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg==" "integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
}, },
"@types/yargs": { "@types/yargs": {
"version": "16.0.5", "version": "16.0.5",
@@ -27552,6 +27572,11 @@
"integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==", "integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==",
"dev": true "dev": true
}, },
"querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"queue-microtask": { "queue-microtask": {
"version": "1.2.3", "version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -27833,6 +27858,11 @@
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
"dev": true "dev": true
}, },
"requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"resolve": { "resolve": {
"version": "1.22.1", "version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
@@ -28799,19 +28829,20 @@
"dev": true "dev": true
}, },
"tough-cookie": { "tough-cookie": {
"version": "4.0.0", "version": "4.1.3",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
"requires": { "requires": {
"psl": "^1.1.33", "psl": "^1.1.33",
"punycode": "^2.1.1", "punycode": "^2.1.1",
"universalify": "^0.1.2" "universalify": "^0.2.0",
"url-parse": "^1.5.3"
}, },
"dependencies": { "dependencies": {
"universalify": { "universalify": {
"version": "0.1.2", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==" "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg=="
} }
} }
}, },
@@ -29269,6 +29300,15 @@
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
"dev": true "dev": true
}, },
"url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
"requires": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"util": { "util": {
"version": "0.12.5", "version": "0.12.5",
"resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz",

View File

@@ -49,7 +49,7 @@
"@types/jest": "27.4.0", "@types/jest": "27.4.0",
"@types/mime": "2.0.3", "@types/mime": "2.0.3",
"@types/pem": "1.9.6", "@types/pem": "1.9.6",
"@types/tough-cookie": "4.0.1", "@types/tough-cookie": "4.0.2",
"copyfiles": "2.4.1", "copyfiles": "2.4.1",
"cp": "0.2.0", "cp": "0.2.0",
"cypress": "7.7.0", "cypress": "7.7.0",
@@ -82,6 +82,6 @@
"axios-cookiejar-support": "1.0.1", "axios-cookiejar-support": "1.0.1",
"form-data": "4.0.0", "form-data": "4.0.0",
"https": "1.0.0", "https": "1.0.0",
"tough-cookie": "4.0.0" "tough-cookie": "4.1.3"
} }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -29,6 +29,12 @@ import { executeScript } from './api/viya/executeScript'
import { getAccessTokenForViya } from './auth/getAccessTokenForViya' import { getAccessTokenForViya } from './auth/getAccessTokenForViya'
import { refreshTokensForViya } from './auth/refreshTokensForViya' import { refreshTokensForViya } from './auth/refreshTokensForViya'
interface JobExecutionResult {
result?: { result: object }
log?: string
error?: object
}
/** /**
* A client for interfacing with the SAS Viya REST API. * A client for interfacing with the SAS Viya REST API.
* *
@@ -270,7 +276,7 @@ export class SASViyaApiClient {
* @param debug - when set to true, the log will be returned. * @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code). * @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session * @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
@@ -621,7 +627,7 @@ export class SASViyaApiClient {
* @param accessToken - an optional access token for an authorized user. * @param accessToken - an optional access token for an authorized user.
* @param waitForResult - a boolean indicating if the function should wait for a result. * @param waitForResult - a boolean indicating if the function should wait for a result.
* @param expectWebout - a boolean indicating whether to expect a _webout response. * @param expectWebout - a boolean indicating whether to expect a _webout response.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
@@ -732,11 +738,13 @@ export class SASViyaApiClient {
debug: boolean, debug: boolean,
data?: any, data?: any,
authConfig?: AuthConfig authConfig?: AuthConfig
) { ): Promise<JobExecutionResult> {
let access_token = (authConfig || {}).access_token let access_token = (authConfig || {}).access_token
if (authConfig) { if (authConfig) {
;({ access_token } = await getTokens(this.requestClient, authConfig)) ;({ access_token } = await getTokens(this.requestClient, authConfig))
} }
if (isRelativePath(sasJob) && !this.rootFolderName) { if (isRelativePath(sasJob) && !this.rootFolderName) {
throw new Error( throw new Error(
'Relative paths cannot be used without specifying a root folder name.' 'Relative paths cannot be used without specifying a root folder name.'
@@ -749,6 +757,7 @@ export class SASViyaApiClient {
const fullFolderPath = isRelativePath(sasJob) const fullFolderPath = isRelativePath(sasJob)
? `${this.rootFolderName}/${folderPath}` ? `${this.rootFolderName}/${folderPath}`
: folderPath : folderPath
await this.populateFolderMap(fullFolderPath, access_token) await this.populateFolderMap(fullFolderPath, access_token)
const jobFolder = this.folderMap.get(fullFolderPath) const jobFolder = this.folderMap.get(fullFolderPath)
@@ -765,9 +774,8 @@ export class SASViyaApiClient {
files = await this.uploadTables(data, access_token) files = await this.uploadTables(data, access_token)
} }
if (!jobToExecute) { if (!jobToExecute) throw new Error(`Job was not found.`)
throw new Error(`Job was not found.`)
}
const jobDefinitionLink = jobToExecute?.links.find( const jobDefinitionLink = jobToExecute?.links.find(
(l) => l.rel === 'getResource' (l) => l.rel === 'getResource'
)?.href )?.href
@@ -807,16 +815,19 @@ export class SASViyaApiClient {
jobDefinition, jobDefinition,
arguments: jobArguments arguments: jobArguments
} }
const { result: postedJob } = await this.requestClient.post<Job>( const { result: postedJob } = await this.requestClient.post<Job>(
`${this.serverUrl}/jobExecution/jobs?_action=wait`, `${this.serverUrl}/jobExecution/jobs?_action=wait`,
postJobRequestBody, postJobRequestBody,
access_token access_token
) )
const jobStatus = await this.pollJobState(postedJob, authConfig).catch( const jobStatus = await this.pollJobState(postedJob, authConfig).catch(
(err) => { (err) => {
throw prefixMessage(err, 'Error while polling job status. ') throw prefixMessage(err, 'Error while polling job status. ')
} }
) )
const { result: currentJob } = await this.requestClient.get<Job>( const { result: currentJob } = await this.requestClient.get<Job>(
`${this.serverUrl}/jobExecution/jobs/${postedJob.id}`, `${this.serverUrl}/jobExecution/jobs/${postedJob.id}`,
access_token access_token
@@ -827,6 +838,7 @@ export class SASViyaApiClient {
const resultLink = currentJob.results['_webout.json'] const resultLink = currentJob.results['_webout.json']
const logLink = currentJob.links.find((l) => l.rel === 'log') const logLink = currentJob.links.find((l) => l.rel === 'log')
if (resultLink) { if (resultLink) {
jobResult = await this.requestClient.get<any>( jobResult = await this.requestClient.get<any>(
`${this.serverUrl}${resultLink}/content`, `${this.serverUrl}${resultLink}/content`,
@@ -834,11 +846,13 @@ export class SASViyaApiClient {
'text/plain' 'text/plain'
) )
} }
if (debug && logLink) { if (debug && logLink) {
log = await this.requestClient log = await this.requestClient
.get<any>(`${this.serverUrl}${logLink.href}/content`, access_token) .get<any>(`${this.serverUrl}${logLink.href}/content`, access_token)
.then((res: any) => res.result.items.map((i: any) => i.line).join('\n')) .then((res: any) => res.result.items.map((i: any) => i.line).join('\n'))
} }
if (jobStatus === 'failed') { if (jobStatus === 'failed') {
throw new JobExecutionError( throw new JobExecutionError(
currentJob.error?.errorCode, currentJob.error?.errorCode,
@@ -846,7 +860,16 @@ export class SASViyaApiClient {
log log
) )
} }
return { result: jobResult?.result, log }
const executionResult: JobExecutionResult = {
result: jobResult?.result,
log
}
const { error } = currentJob
if (error) executionResult.error = error
return executionResult
} }
private async populateFolderMap(folderPath: string, accessToken?: string) { private async populateFolderMap(folderPath: string, accessToken?: string) {

View File

@@ -851,7 +851,7 @@ export default class SASjs {
* @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs. * @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs.
* The access token is not required when the user is authenticated via the browser. * The access token is not required when the user is authenticated via the browser.
* @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete. * @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */

View File

@@ -12,7 +12,7 @@ import { RequestClient } from '../../request/RequestClient'
import { SessionManager } from '../../SessionManager' import { SessionManager } from '../../SessionManager'
import { isRelativePath, fetchLogByChunks } from '../../utils' import { isRelativePath, fetchLogByChunks } from '../../utils'
import { formatDataForRequest } from '../../utils/formatDataForRequest' import { formatDataForRequest } from '../../utils/formatDataForRequest'
import { pollJobState } from './pollJobState' import { pollJobState, JobState } from './pollJobState'
import { uploadTables } from './uploadTables' import { uploadTables } from './uploadTables'
/** /**
@@ -25,7 +25,7 @@ import { uploadTables } from './uploadTables'
* @param debug - when set to true, the log will be returned. * @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code). * @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session * @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
@@ -228,7 +228,7 @@ export async function executeScript(
) )
} }
if (jobStatus === 'failed' || jobStatus === 'error') { if (jobStatus === JobState.Failed || jobStatus === JobState.Error) {
throw new ComputeJobExecutionError(currentJob, log) throw new ComputeJobExecutionError(currentJob, log)
} }

View File

@@ -1,29 +1,88 @@
import { AuthConfig } from '@sasjs/utils/types' import { AuthConfig } from '@sasjs/utils/types'
import { Job, PollOptions } from '../..' import { Job, PollOptions, PollStrategy } from '../..'
import { getTokens } from '../../auth/getTokens' import { getTokens } from '../../auth/getTokens'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { JobStatePollError } from '../../types/errors' import { JobStatePollError } from '../../types/errors'
import { Link, WriteStream } from '../../types' import { Link, WriteStream } from '../../types'
import { delay, isNode } from '../../utils' import { delay, isNode } from '../../utils'
export enum JobState {
Completed = 'completed',
Running = 'running',
Pending = 'pending',
Unavailable = 'unavailable',
NoState = '',
Failed = 'failed',
Error = 'error'
}
/**
* Polls job status using default or provided poll options.
* @param requestClient - the pre-configured HTTP request client.
* @param postedJob - the relative or absolute path to the job.
* @param debug - sets the _debug flag in the job arguments.
* @param authConfig - an access token, refresh token, client and secret for an authorized user.
* @param pollOptions - an object containing maxPollCount, pollInterval, streamLog and logFolderPath. It will override the first default poll options in poll strategy if provided.
* Example pollOptions:
* {
* maxPollCount: 200,
* pollInterval: 300,
* streamLog: true, // optional, equals to false by default.
* pollStrategy?: // optional array of poll options that should be applied after 'maxPollCount' of the provided poll options is reached. If not provided the default (see example below) poll strategy will be used.
* }
* Example pollStrategy (values used from default poll strategy):
* [
* { maxPollCount: 200, pollInterval: 300 }, // approximately ~2 mins (including time to get response (~300ms))
* { maxPollCount: 300, pollInterval: 3000 }, // approximately ~5.5 mins (including time to get response (~300ms))
* { maxPollCount: 500, pollInterval: 30000 }, // approximately ~50.5 mins (including time to get response (~300ms))
* { maxPollCount: 3400, pollInterval: 60000 } // approximately ~3015 mins (~125 hours) (including time to get response (~300ms))
* ]
* @returns - a promise which resolves with a job state
*/
export async function pollJobState( export async function pollJobState(
requestClient: RequestClient, requestClient: RequestClient,
postedJob: Job, postedJob: Job,
debug: boolean, debug: boolean,
authConfig?: AuthConfig, authConfig?: AuthConfig,
pollOptions?: PollOptions pollOptions?: PollOptions
) { ): Promise<JobState> {
const logger = process.logger || console const logger = process.logger || console
let pollInterval = 300 const streamLog = pollOptions?.streamLog || false
let maxPollCount = 1000
const defaultPollOptions: PollOptions = { const defaultPollStrategy: PollStrategy = [
maxPollCount, { maxPollCount: 200, pollInterval: 300 },
pollInterval, { maxPollCount: 300, pollInterval: 3000 },
streamLog: false { maxPollCount: 500, pollInterval: 30000 },
{ maxPollCount: 3400, pollInterval: 60000 }
]
let pollStrategy: PollStrategy
if (pollOptions !== undefined) {
pollStrategy = [pollOptions]
let { pollStrategy: providedPollStrategy } = pollOptions
if (providedPollStrategy !== undefined) {
validatePollStrategies(providedPollStrategy)
// INFO: sort by 'maxPollCount'
providedPollStrategy = providedPollStrategy.sort(
(strategyA: PollOptions, strategyB: PollOptions) =>
strategyA.maxPollCount - strategyB.maxPollCount
)
pollStrategy = [...pollStrategy, ...providedPollStrategy]
} else {
pollStrategy = [...pollStrategy, ...defaultPollStrategy]
}
} else {
pollStrategy = defaultPollStrategy
} }
let defaultPollOptions: PollOptions = pollStrategy.splice(0, 1)[0]
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) } pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
const stateLink = postedJob.links.find((l: any) => l.rel === 'state') const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
@@ -31,10 +90,10 @@ export async function pollJobState(
throw new Error(`Job state link was not found.`) throw new Error(`Job state link was not found.`)
} }
let currentState = await getJobState( let currentState: JobState = await getJobState(
requestClient, requestClient,
postedJob, postedJob,
'', JobState.NoState,
debug, debug,
authConfig authConfig
).catch((err) => { ).catch((err) => {
@@ -42,73 +101,71 @@ export async function pollJobState(
`Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`, `Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`,
err err
) )
return 'unavailable'
return JobState.Unavailable
}) })
let pollCount = 0 let pollCount = 0
if (currentState === 'completed') { if (currentState === JobState.Completed) {
return Promise.resolve(currentState) return Promise.resolve(currentState)
} }
let logFileStream let logFileStream
if (pollOptions.streamLog && isNode()) { if (streamLog && isNode()) {
const { getFileStream } = require('./getFileStream') const { getFileStream } = require('./getFileStream')
logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath) logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
} }
// Poll up to the first 100 times with the specified poll interval
let result = await doPoll( let result = await doPoll(
requestClient, requestClient,
postedJob, postedJob,
currentState, currentState,
debug, debug,
pollCount, pollCount,
pollOptions,
authConfig, authConfig,
{ streamLog,
...pollOptions,
maxPollCount:
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
},
logFileStream logFileStream
) )
currentState = result.state currentState = result.state
pollCount = result.pollCount pollCount = result.pollCount
if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) { if (
!needsRetry(currentState) ||
(pollCount >= pollOptions.maxPollCount && !pollStrategy.length)
) {
return currentState return currentState
} }
// If we get to this point, this is a long-running job that needs longer polling. // INFO: If we get to this point, this is a long-running job that needs longer polling.
// We will resume polling with a bigger interval of 1 minute // We will resume polling with a bigger interval according to the next polling strategy
let longJobPollOptions: PollOptions = { while (pollStrategy.length && needsRetry(currentState)) {
maxPollCount: 24 * 60, defaultPollOptions = pollStrategy.splice(0, 1)[0]
pollInterval: 60000,
streamLog: false if (pollOptions) {
} defaultPollOptions.logFolderPath = pollOptions.logFolderPath
if (pollOptions) { }
longJobPollOptions.streamLog = pollOptions.streamLog
longJobPollOptions.logFolderPath = pollOptions.logFolderPath result = await doPoll(
requestClient,
postedJob,
currentState,
debug,
pollCount,
defaultPollOptions,
authConfig,
streamLog,
logFileStream
)
currentState = result.state
pollCount = result.pollCount
} }
result = await doPoll( if (logFileStream) logFileStream.end()
requestClient,
postedJob,
currentState,
debug,
pollCount,
authConfig,
longJobPollOptions,
logFileStream
)
currentState = result.state
pollCount = result.pollCount
if (logFileStream) {
logFileStream.end()
}
return currentState return currentState
} }
@@ -119,17 +176,13 @@ const getJobState = async (
currentState: string, currentState: string,
debug: boolean, debug: boolean,
authConfig?: AuthConfig authConfig?: AuthConfig
) => { ): Promise<JobState> => {
const stateLink = job.links.find((l: any) => l.rel === 'state') const stateLink = job.links.find((l: any) => l.rel === 'state')!
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
if (needsRetry(currentState)) { if (needsRetry(currentState)) {
let tokens let tokens
if (authConfig) {
tokens = await getTokens(requestClient, authConfig) if (authConfig) tokens = await getTokens(requestClient, authConfig)
}
const { result: jobState } = await requestClient const { result: jobState } = await requestClient
.get<string>( .get<string>(
@@ -143,48 +196,38 @@ const getJobState = async (
throw new JobStatePollError(job.id, err) throw new JobStatePollError(job.id, err)
}) })
return jobState.trim() return jobState.trim() as JobState
} else { } else {
return currentState return currentState as JobState
} }
} }
const needsRetry = (state: string) => const needsRetry = (state: string) =>
state === 'running' || state === JobState.Running ||
state === '' || state === JobState.NoState ||
state === 'pending' || state === JobState.Pending ||
state === 'unavailable' state === JobState.Unavailable
const doPoll = async ( const doPoll = async (
requestClient: RequestClient, requestClient: RequestClient,
postedJob: Job, postedJob: Job,
currentState: string, currentState: JobState,
debug: boolean, debug: boolean,
pollCount: number, pollCount: number,
pollOptions: PollOptions,
authConfig?: AuthConfig, authConfig?: AuthConfig,
pollOptions?: PollOptions, streamLog?: boolean,
logStream?: WriteStream logStream?: WriteStream
): Promise<{ state: string; pollCount: number }> => { ): Promise<{ state: JobState; pollCount: number }> => {
let pollInterval = 300 const { maxPollCount, pollInterval } = pollOptions
let maxPollCount = 1000 const logger = process.logger || console
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')!
let maxErrorCount = 5 let maxErrorCount = 5
let errorCount = 0 let errorCount = 0
let state = currentState let state = currentState
let printedState = '' let printedState = JobState.NoState
let startLogLine = 0 let startLogLine = 0
const logger = process.logger || console
if (pollOptions) {
pollInterval = pollOptions.pollInterval || pollInterval
maxPollCount = pollOptions.maxPollCount || maxPollCount
}
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
while (needsRetry(state) && pollCount <= maxPollCount) { while (needsRetry(state) && pollCount <= maxPollCount) {
state = await getJobState( state = await getJobState(
requestClient, requestClient,
@@ -194,21 +237,24 @@ const doPoll = async (
authConfig authConfig
).catch((err) => { ).catch((err) => {
errorCount++ errorCount++
if (pollCount >= maxPollCount || errorCount >= maxErrorCount) { if (pollCount >= maxPollCount || errorCount >= maxErrorCount) {
throw err throw err
} }
logger.error( logger.error(
`Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`, `Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`,
err err
) )
return 'unavailable'
return JobState.Unavailable
}) })
pollCount++ pollCount++
const jobHref = postedJob.links.find((l: Link) => l.rel === 'self')!.href const jobHref = postedJob.links.find((l: Link) => l.rel === 'self')!.href
if (pollOptions?.streamLog) { if (streamLog) {
const { result: job } = await requestClient.get<Job>( const { result: job } = await requestClient.get<Job>(
jobHref, jobHref,
authConfig?.access_token authConfig?.access_token
@@ -238,12 +284,45 @@ const doPoll = async (
printedState = state printedState = state
} }
if (state != 'unavailable' && errorCount > 0) { if (state !== JobState.Unavailable && errorCount > 0) {
errorCount = 0 errorCount = 0
} }
await delay(pollInterval) if (state !== JobState.Completed) {
await delay(pollInterval)
}
} }
return { state, pollCount } return { state, pollCount }
} }
const validatePollStrategies = (strategy: PollStrategy) => {
const throwError = (message?: string, pollOptions?: PollOptions) => {
throw new Error(
`Poll strategies are not valid.${message ? ` ${message}` : ''}${
pollOptions
? ` Invalid poll strategy: \n${JSON.stringify(pollOptions, null, 2)}`
: ''
}`
)
}
strategy.forEach((pollOptions: PollOptions, i: number) => {
const { maxPollCount, pollInterval } = pollOptions
if (maxPollCount < 1) {
throwError(`'maxPollCount' has to be greater than 0.`, pollOptions)
} else if (i !== 0) {
const previousPollOptions = strategy[i - 1]
if (maxPollCount <= previousPollOptions.maxPollCount) {
throwError(
`'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy.`,
pollOptions
)
}
} else if (pollInterval < 1) {
throwError(`'pollInterval' has to be greater than 0.`, pollOptions)
}
})
}

View File

@@ -15,8 +15,7 @@ const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)()
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
const defaultPollOptions: PollOptions = { const defaultPollOptions: PollOptions = {
maxPollCount: 100, maxPollCount: 100,
pollInterval: 500, pollInterval: 500
streamLog: false
} }
describe('executeScript', () => { describe('executeScript', () => {
@@ -452,7 +451,9 @@ describe('executeScript', () => {
it('should throw a ComputeJobExecutionError if the job has failed', async () => { it('should throw a ComputeJobExecutionError if the job has failed', async () => {
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('failed')) .mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Failed)
)
const error: ComputeJobExecutionError = await executeScript( const error: ComputeJobExecutionError = await executeScript(
requestClient, requestClient,
@@ -485,7 +486,9 @@ describe('executeScript', () => {
it('should throw a ComputeJobExecutionError if the job has errored out', async () => { it('should throw a ComputeJobExecutionError if the job has errored out', async () => {
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('error')) .mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Error)
)
const error: ComputeJobExecutionError = await executeScript( const error: ComputeJobExecutionError = await executeScript(
requestClient, requestClient,
@@ -654,7 +657,9 @@ const setupMocks = () => {
.mockImplementation(() => Promise.resolve(mockAuthConfig)) .mockImplementation(() => Promise.resolve(mockAuthConfig))
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('completed')) .mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Completed)
)
jest jest
.spyOn(sessionManager, 'getVariable') .spyOn(sessionManager, 'getVariable')
.mockImplementation(() => .mockImplementation(() =>

View File

@@ -6,17 +6,18 @@ import * as getTokensModule from '../../../auth/getTokens'
import * as saveLogModule from '../saveLog' import * as saveLogModule from '../saveLog'
import * as getFileStreamModule from '../getFileStream' import * as getFileStreamModule from '../getFileStream'
import * as isNodeModule from '../../../utils/isNode' import * as isNodeModule from '../../../utils/isNode'
import { PollOptions } from '../../../types' import * as delayModule from '../../../utils/delay'
import { PollOptions, PollStrategy } from '../../../types'
import { WriteStream } from 'fs' import { WriteStream } from 'fs'
const baseUrl = 'http://localhost' const baseUrl = 'http://localhost'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
requestClient['httpClient'].defaults.baseURL = baseUrl requestClient['httpClient'].defaults.baseURL = baseUrl
const defaultPollOptions: PollOptions = { const defaultStreamLog = false
const defaultPollStrategy: PollOptions = {
maxPollCount: 100, maxPollCount: 100,
pollInterval: 500, pollInterval: 500
streamLog: false
} }
describe('pollJobState', () => { describe('pollJobState', () => {
@@ -26,13 +27,10 @@ describe('pollJobState', () => {
}) })
it('should get valid tokens if the authConfig has been provided', async () => { it('should get valid tokens if the authConfig has been provided', async () => {
await pollJobState( await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
requestClient, ...defaultPollStrategy,
mockJob, streamLog: defaultStreamLog
false, })
mockAuthConfig,
defaultPollOptions
)
expect(getTokensModule.getTokens).toHaveBeenCalledWith( expect(getTokensModule.getTokens).toHaveBeenCalledWith(
requestClient, requestClient,
@@ -46,7 +44,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect(getTokensModule.getTokens).not.toHaveBeenCalled() expect(getTokensModule.getTokens).not.toHaveBeenCalled()
@@ -58,7 +56,7 @@ describe('pollJobState', () => {
{ ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') }, { ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') },
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
).catch((e: any) => e) ).catch((e: any) => e)
expect((error as Error).message).toContain('Job state link was not found.') expect((error as Error).message).toContain('Job state link was not found.')
@@ -72,7 +70,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
defaultPollOptions defaultPollStrategy
) )
expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3) expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3)
@@ -83,7 +81,7 @@ describe('pollJobState', () => {
const { saveLog } = require('../saveLog') const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollStrategy,
streamLog: true streamLog: true
}) })
@@ -96,7 +94,7 @@ describe('pollJobState', () => {
const { saveLog } = require('../saveLog') const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollStrategy,
streamLog: true streamLog: true
}) })
@@ -111,7 +109,7 @@ describe('pollJobState', () => {
const { getFileStream } = require('../getFileStream') const { getFileStream } = require('../getFileStream')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollStrategy,
streamLog: true streamLog: true
}) })
@@ -127,7 +125,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
defaultPollOptions defaultPollStrategy
) )
expect(saveLogModule.saveLog).not.toHaveBeenCalled() expect(saveLogModule.saveLog).not.toHaveBeenCalled()
@@ -136,15 +134,18 @@ describe('pollJobState', () => {
it('should return the current status when the max poll count is reached', async () => { it('should return the current status when the max poll count is reached', async () => {
mockRunningPoll() mockRunningPoll()
const pollOptions: PollOptions = {
...defaultPollStrategy,
maxPollCount: 1,
pollStrategy: []
}
const state = await pollJobState( const state = await pollJobState(
requestClient, requestClient,
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
{ pollOptions
...defaultPollOptions,
maxPollCount: 1
}
) )
expect(state).toEqual('running') expect(state).toEqual('running')
@@ -159,7 +160,7 @@ describe('pollJobState', () => {
false, false,
mockAuthConfig, mockAuthConfig,
{ {
...defaultPollOptions, ...defaultPollStrategy,
maxPollCount: 200, maxPollCount: 200,
pollInterval: 10 pollInterval: 10
} }
@@ -176,7 +177,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect(requestClient.get).toHaveBeenCalledTimes(2) expect(requestClient.get).toHaveBeenCalledTimes(2)
@@ -192,7 +193,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
true, true,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect((process as any).logger.info).toHaveBeenCalledTimes(4) expect((process as any).logger.info).toHaveBeenCalledTimes(4)
@@ -222,7 +223,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect(requestClient.get).toHaveBeenCalledTimes(2) expect(requestClient.get).toHaveBeenCalledTimes(2)
@@ -237,13 +238,119 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
).catch((e: any) => e) ).catch((e: any) => e)
expect(error.message).toEqual( expect(error.message).toEqual(
'Error while polling job state for job j0b: Status Error' 'Error while polling job state for job j0b: Status Error'
) )
}) })
it('should change poll strategies', async () => {
mockSimplePoll(6)
const delays: number[] = []
jest.spyOn(delayModule, 'delay').mockImplementation((ms: number) => {
delays.push(ms)
return Promise.resolve()
})
const pollIntervals = [3, 4, 5, 6]
const pollStrategy = [
{ maxPollCount: 2, pollInterval: pollIntervals[1] },
{ maxPollCount: 3, pollInterval: pollIntervals[2] },
{ maxPollCount: 4, pollInterval: pollIntervals[3] }
]
const pollOptions: PollOptions = {
maxPollCount: 1,
pollInterval: pollIntervals[0],
pollStrategy: pollStrategy
}
await pollJobState(requestClient, mockJob, false, undefined, pollOptions)
expect(delays).toEqual([pollIntervals[0], ...pollIntervals])
})
it('should throw an error if not valid poll strategies provided', async () => {
// INFO: 'maxPollCount' has to be > 0
let invalidPollStrategy = {
maxPollCount: 0,
pollInterval: 3
}
let pollStrategy: PollStrategy = [invalidPollStrategy]
let expectedError = new Error(
`Poll strategies are not valid. 'maxPollCount' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
// INFO: 'maxPollCount' has to be > than 'maxPollCount' of the previous strategy
const validPollStrategy = {
maxPollCount: 5,
pollInterval: 2
}
invalidPollStrategy = {
maxPollCount: validPollStrategy.maxPollCount,
pollInterval: 3
}
pollStrategy = [validPollStrategy, invalidPollStrategy]
expectedError = new Error(
`Poll strategies are not valid. 'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
// INFO: invalid 'pollInterval'
invalidPollStrategy = {
maxPollCount: 1,
pollInterval: 0
}
pollStrategy = [invalidPollStrategy]
expectedError = new Error(
`Poll strategies are not valid. 'pollInterval' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
})
}) })
const setupMocks = () => { const setupMocks = () => {
@@ -273,11 +380,14 @@ const setupMocks = () => {
const mockSimplePoll = (runningCount = 2) => { const mockSimplePoll = (runningCount = 2) => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: result:
count === 0 count === 0
@@ -293,11 +403,14 @@ const mockSimplePoll = (runningCount = 2) => {
const mockRunningPoll = () => { const mockRunningPoll = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: count === 0 ? 'pending' : 'running', result: count === 0 ? 'pending' : 'running',
etag: '', etag: '',
@@ -308,11 +421,14 @@ const mockRunningPoll = () => {
const mockLongPoll = () => { const mockLongPoll = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: count <= 102 ? 'running' : 'completed', result: count <= 102 ? 'running' : 'completed',
etag: '', etag: '',
@@ -323,14 +439,18 @@ const mockLongPoll = () => {
const mockPollWithSingleError = () => { const mockPollWithSingleError = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
if (count === 1) { if (count === 1) {
return Promise.reject('Status Error') return Promise.reject('Status Error')
} }
return Promise.resolve({ return Promise.resolve({
result: count === 0 ? 'pending' : 'completed', result: count === 0 ? 'pending' : 'completed',
etag: '', etag: '',
@@ -344,6 +464,7 @@ const mockErroredPoll = () => {
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.reject('Status Error') return Promise.reject('Status Error')
}) })
} }

View File

@@ -1,5 +1,6 @@
import * as NodeFormData from 'form-data' import * as NodeFormData from 'form-data'
import { convertToCSV } from '../utils/convertToCsv' import { convertToCSV } from '../utils/convertToCsv'
import { isNode } from '../utils'
/** /**
* One of the approaches SASjs takes to send tables-formatted JSON (see README) * One of the approaches SASjs takes to send tables-formatted JSON (see README)
@@ -26,12 +27,15 @@ export const generateFileUploadForm = (
) )
} }
if (typeof FormData === 'undefined' && formData instanceof NodeFormData) { // INFO: unfortunately it is not possible to check if formData is instance of NodeFormData or FormData because it will return true for both
formData.append(name, csv, { if (isNode()) {
// INFO: environment is Node and formData is instance of NodeFormData
;(formData as NodeFormData).append(name, csv, {
filename: `${name}.csv`, filename: `${name}.csv`,
contentType: 'application/csv' contentType: 'application/csv'
}) })
} else { } else {
// INFO: environment is Browser and formData is instance of FormData
const file = new Blob([csv], { const file = new Blob([csv], {
type: 'application/csv' type: 'application/csv'
}) })

View File

@@ -1,4 +1,7 @@
import { generateFileUploadForm } from '../generateFileUploadForm' import { generateFileUploadForm } from '../generateFileUploadForm'
import { convertToCSV } from '../../utils/convertToCsv'
import * as NodeFormData from 'form-data'
import * as isNodeModule from '../../utils/isNode'
describe('generateFileUploadForm', () => { describe('generateFileUploadForm', () => {
beforeAll(() => { beforeAll(() => {
@@ -11,44 +14,94 @@ describe('generateFileUploadForm', () => {
;(global as any).Blob = BlobMock ;(global as any).Blob = BlobMock
}) })
it('should generate file upload form from data', () => { describe('browser', () => {
const formData = new FormData() afterAll(() => {
const testTable = 'sometable' jest.restoreAllMocks()
const testTableWithNullVars: { [key: string]: any } = { })
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter((key: string) =>
Array.isArray(testTableWithNullVars[key])
)[0]
jest.spyOn(formData, 'append').mockImplementation(() => {}) it('should generate file upload form from data', () => {
const formData = new FormData()
const testTable = 'sometable'
const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter(
(key: string) => Array.isArray(testTableWithNullVars[key])
)[0]
generateFileUploadForm(formData, testTableWithNullVars) jest.spyOn(formData, 'append').mockImplementation(() => {})
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
expect(formData.append).toHaveBeenCalledOnce() generateFileUploadForm(formData, testTableWithNullVars)
expect(formData.append).toHaveBeenCalledWith(
tableName, expect(formData.append).toHaveBeenCalledOnce()
{}, expect(formData.append).toHaveBeenCalledWith(
`${tableName}.csv` tableName,
) {},
`${tableName}.csv`
)
})
it('should throw an error if too large string was provided', () => {
const formData = new FormData()
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
)
})
}) })
it('should throw an error if too large string was provided', () => { describe('node', () => {
const formData = new FormData() it('should generate file upload form from data', () => {
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] } const formData = new NodeFormData()
const testTable = 'sometable'
const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter(
(key: string) => Array.isArray(testTableWithNullVars[key])
)[0]
const csv = convertToCSV(testTableWithNullVars, tableName)
expect(() => generateFileUploadForm(formData, data)).toThrow( jest.spyOn(formData, 'append').mockImplementation(() => {})
new Error(
'The max length of a string value in SASjs is 32765 characters.' generateFileUploadForm(formData, testTableWithNullVars)
expect(formData.append).toHaveBeenCalledOnce()
expect(formData.append).toHaveBeenCalledWith(tableName, csv, {
contentType: 'application/csv',
filename: `${tableName}.csv`
})
})
it('should throw an error if too large string was provided', () => {
const formData = new NodeFormData()
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
) )
) })
}) })
}) })

View File

@@ -1,8 +1,7 @@
import { import {
getValidJson, getValidJson,
parseSasViyaDebugResponse, parseSasViyaDebugResponse,
parseWeboutResponse, parseWeboutResponse
SASJS_LOGS_SEPARATOR
} from '../utils' } from '../utils'
import { UploadFile } from '../types/UploadFile' import { UploadFile } from '../types/UploadFile'
import { import {

View File

@@ -10,8 +10,8 @@ import {
LoginRequiredError LoginRequiredError
} from '../types/errors' } from '../types/errors'
import { generateFileUploadForm } from '../file/generateFileUploadForm' import { generateFileUploadForm } from '../file/generateFileUploadForm'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getFormData } from '../utils'
import { import {
isRelativePath, isRelativePath,
@@ -53,8 +53,7 @@ export class SasjsJobExecutor extends BaseJobExecutor {
* Use the available form data object (FormData in Browser, NodeFormData in * Use the available form data object (FormData in Browser, NodeFormData in
* Node) * Node)
*/ */
let formData = let formData = getFormData()
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
if (data) { if (data) {
// file upload approach // file upload approach

View File

@@ -16,10 +16,11 @@ import { SASViyaApiClient } from '../SASViyaApiClient'
import { import {
isRelativePath, isRelativePath,
parseSasViyaDebugResponse, parseSasViyaDebugResponse,
appendExtraResponseAttributes appendExtraResponseAttributes,
parseWeboutResponse,
getFormData
} from '../utils' } from '../utils'
import { BaseJobExecutor } from './JobExecutor' import { BaseJobExecutor } from './JobExecutor'
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
export interface WaitingRequstPromise { export interface WaitingRequstPromise {
promise: Promise<any> | null promise: Promise<any> | null
@@ -112,8 +113,7 @@ export class WebJobExecutor extends BaseJobExecutor {
* Use the available form data object (FormData in Browser, NodeFormData in * Use the available form data object (FormData in Browser, NodeFormData in
* Node) * Node)
*/ */
let formData = let formData = getFormData()
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
if (data) { if (data) {
const stringifiedData = JSON.stringify(data) const stringifiedData = JSON.stringify(data)

View File

@@ -1,8 +1,7 @@
import { RequestClient } from './RequestClient' import { RequestClient } from './RequestClient'
import { AxiosResponse } from 'axios' import { AxiosResponse } from 'axios'
import { SASJS_LOGS_SEPARATOR } from '../utils'
interface SasjsParsedResponse<T> { export interface SasjsParsedResponse<T> {
result: T result: T
log: string log: string
etag: string etag: string
@@ -45,13 +44,30 @@ export class SasjsRequestClient extends RequestClient {
} }
} catch { } catch {
if (response.data.includes(SASJS_LOGS_SEPARATOR)) { if (response.data.includes(SASJS_LOGS_SEPARATOR)) {
const splittedResponse = response.data.split(SASJS_LOGS_SEPARATOR) const { data } = response
const splittedResponse: string[] = data.split(SASJS_LOGS_SEPARATOR)
webout = splittedResponse[0] webout = splittedResponse.splice(0, 1)[0]
if (webout !== undefined) parsedResponse = webout if (webout !== undefined) parsedResponse = webout
log = splittedResponse[1] // log can contain nested logs
printOutput = splittedResponse[2] const logs = splittedResponse.splice(0, splittedResponse.length - 1)
// tests if string ends with SASJS_LOGS_SEPARATOR
const endingWithLogSepRegExp = new RegExp(`${SASJS_LOGS_SEPARATOR}$`)
// at this point splittedResponse can contain only one item
const lastChunk = splittedResponse[0]
if (lastChunk) {
// if the last chunk doesn't end with SASJS_LOGS_SEPARATOR, then it is a printOutput
// else the last chunk is part of the log and has to be joined
if (!endingWithLogSepRegExp.test(data)) printOutput = lastChunk
else if (logs.length > 1) logs.push(lastChunk)
}
// join logs into single log with SASJS_LOGS_SEPARATOR
log = logs.join(SASJS_LOGS_SEPARATOR)
} else { } else {
parsedResponse = response.data parsedResponse = response.data
} }
@@ -59,7 +75,7 @@ export class SasjsRequestClient extends RequestClient {
const returnResult: SasjsParsedResponse<T> = { const returnResult: SasjsParsedResponse<T> = {
result: parsedResponse as T, result: parsedResponse as T,
log, log: log || '',
etag, etag,
status: response.status status: response.status
} }
@@ -69,3 +85,6 @@ export class SasjsRequestClient extends RequestClient {
return returnResult return returnResult
} }
} }
export const SASJS_LOGS_SEPARATOR =
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'

View File

@@ -0,0 +1,172 @@
import {
SASJS_LOGS_SEPARATOR,
SasjsRequestClient,
SasjsParsedResponse
} from '../SasjsRequestClient'
import { AxiosResponse } from 'axios'
describe('SasjsRequestClient', () => {
const requestClient = new SasjsRequestClient('')
const etag = 'etag'
const status = 200
const webout = `hello`
const log = `1 The SAS System Tuesday, 25 July 2023 12:51:00
PROC MIGRATE will preserve current SAS file attributes and is
recommended for converting all your SAS libraries from any
SAS 8 release to SAS 9. For details and examples, please see
http://support.sas.com/rnd/migration/index.html
NOTE: SAS initialization used:
real time 0.01 seconds
cpu time 0.02 seconds
`
const printOutput = 'printOutPut'
describe('parseResponse', () => {})
it('should parse response with 1 log', () => {
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${log}
`,
etag,
status
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with 1 log and printOutput', () => {
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
${printOutput}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${log}
`,
etag,
status,
printOutput: `
${printOutput}`
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with nested logs', () => {
const logWithNestedLog = `root log start
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
root log end`
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${logWithNestedLog}
${SASJS_LOGS_SEPARATOR}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${logWithNestedLog}
`,
etag,
status
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with nested logs and printOutput', () => {
const logWithNestedLog = `root log start
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
log with indentation
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
some SAS code containing ${SASJS_LOGS_SEPARATOR}
root log end`
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${logWithNestedLog}
${SASJS_LOGS_SEPARATOR}
${printOutput}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${logWithNestedLog}
`,
etag,
status,
printOutput: `
${printOutput}`
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
})
describe('SASJS_LOGS_SEPARATOR', () => {
it('SASJS_LOGS_SEPARATOR should be hardcoded', () => {
expect(SASJS_LOGS_SEPARATOR).toEqual(
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'
)
})
})

View File

@@ -1,6 +1,9 @@
export interface PollOptions { export interface PollOptions {
maxPollCount: number maxPollCount: number
pollInterval: number pollInterval: number // milliseconds
streamLog: boolean pollStrategy?: PollStrategy
streamLog?: boolean
logFolderPath?: string logFolderPath?: string
} }
export type PollStrategy = PollOptions[]

View File

@@ -1,2 +0,0 @@
export const SASJS_LOGS_SEPARATOR =
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'

5
src/utils/getFormData.ts Normal file
View File

@@ -0,0 +1,5 @@
import { isNode } from './'
import * as NodeFormData from 'form-data'
export const getFormData = () =>
isNode() ? new NodeFormData() : new FormData()

View File

@@ -2,7 +2,6 @@ export * from './appendExtraResponseAttributes'
export * from './asyncForEach' export * from './asyncForEach'
export * from './compareTimestamps' export * from './compareTimestamps'
export * from './convertToCsv' export * from './convertToCsv'
export * from './constants'
export * from './createAxiosInstance' export * from './createAxiosInstance'
export * from './delay' export * from './delay'
export * from './fetchLogByChunks' export * from './fetchLogByChunks'
@@ -20,3 +19,4 @@ export * from './parseWeboutResponse'
export * from './serialize' export * from './serialize'
export * from './splitChunks' export * from './splitChunks'
export * from './validateInput' export * from './validateInput'
export * from './getFormData'

View File

@@ -0,0 +1,20 @@
import { getFormData } from '..'
import * as isNodeModule from '../isNode'
import * as NodeFormData from 'form-data'
describe('getFormData', () => {
it('should return NodeFormData if environment is Node', () => {
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
expect(getFormData() instanceof NodeFormData).toEqual(true)
})
it('should return FormData if environment is not Node', () => {
const formDataMock = () => {}
;(global as any).FormData = formDataMock
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
expect(getFormData() instanceof FormData).toEqual(true)
})
})