1
0
mirror of https://github.com/sasjs/adapter.git synced 2026-01-03 10:40:06 +00:00

Compare commits

..

47 Commits

Author SHA1 Message Date
dependabot[bot]
65ae198c27 chore(deps-dev): bump word-wrap from 1.2.3 to 1.2.4
Bumps [word-wrap](https://github.com/jonschlinkert/word-wrap) from 1.2.3 to 1.2.4.
- [Release notes](https://github.com/jonschlinkert/word-wrap/releases)
- [Commits](https://github.com/jonschlinkert/word-wrap/compare/1.2.3...1.2.4)

---
updated-dependencies:
- dependency-name: word-wrap
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-07-19 01:03:33 +00:00
Yury Shkoda
76bf5b88e9 Merge pull request #818 from sasjs/deps-bump
Dependencies bump
2023-07-12 09:50:50 +03:00
Yury Shkoda
a97ac4eaa6 chore: commiting changes 2023-07-11 15:36:13 +03:00
Yury Shkoda
37cfea6ca7 chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/loader-utils-2.0.4' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:53:19 +03:00
Yury Shkoda
f74c8aca57 chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/json5-1.0.2' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:52:28 +03:00
Yury Shkoda
77baaabfcd chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/http-cache-semantics-4.1.1' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:51:32 +03:00
Yury Shkoda
510ba771f0 chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/webpack-5.76.3' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:50:38 +03:00
Allan Bowe
6fce65f4c8 Merge pull request #817 from sasjs/request-sasjs-fix
fix(file-upload-form): fixed form data for node env
2023-07-11 09:58:57 +01:00
Yury Shkoda
fe03faa59f chore(file-upload-form): left comments 2023-07-11 09:26:36 +03:00
Yury Shkoda
6272eeda23 fix(form-data): fixed formData type check 2023-07-10 19:14:47 +03:00
Yury Shkoda
104d1b88b3 chore(deps): bimped tough-cookie and @types/tough-cookie 2023-07-10 17:07:39 +03:00
Yury Shkoda
0d9ba36de8 fix(file-upload-form): fixed form data for node env 2023-07-06 15:49:24 +03:00
Yury Shkoda
4e7a845d99 Merge pull request #816 from sasjs/ci/cd-workwlows-node-version
chore(ci-cd): used Node lts/hydrogen version
2023-07-06 12:42:12 +03:00
Yury Shkoda
716cc513ff chore(ci-cd): used Node lts/hydrogen version 2023-07-05 16:10:45 +03:00
Yury Shkoda
22edcb0a8e Merge pull request #810 from sasjs/pollJobState-improvements
Poll job state improvements
2023-07-05 11:15:42 +03:00
Yury Shkoda
aedf5c1734 chore: Merge branch 'master' of github.com:sasjs/adapter into pollJobState-improvements 2023-07-05 10:49:12 +03:00
Yury Shkoda
784bd20ee0 Merge pull request #814 from sasjs/issue-811-fixed
Issue 811 fixed
2023-07-05 10:27:04 +03:00
Yury Shkoda
61db1e0609 test: fixed unit tests 2023-06-23 18:04:48 +03:00
Yury Shkoda
5c589a6af3 chore: reverted dev changes to build.yml 2023-06-23 17:52:46 +03:00
Yury Shkoda
275cd6dbd3 chore: debugging 2023-06-23 17:20:16 +03:00
Yury Shkoda
d874e07889 fix(file-uploader): fixed parsing response for SASJS 2023-06-23 16:37:25 +03:00
Yury Shkoda
1648cf28d5 chore: Merge branch 'master' of github.com:sasjs/adapter into issue-811-fixed 2023-06-23 15:26:01 +03:00
Yury Shkoda
6bf68a315c fix(sasjs-utils): fixed imports 2023-06-22 13:37:07 +03:00
Yury Shkoda
e0aebc169f chore: debugging 2023-06-21 18:28:39 +03:00
Yury Shkoda
9a50e5cb63 chore: debugging 2023-06-21 18:16:20 +03:00
Yury Shkoda
a51923dad7 chore: debugging 2023-06-21 18:01:21 +03:00
Yury Shkoda
9aee77f0e3 chore: debugging 2023-06-21 17:44:24 +03:00
Yury Shkoda
c32d037063 chore: debugging 2023-06-21 17:34:16 +03:00
Yury Shkoda
94f7492c31 chore: debugging 2023-06-21 17:19:13 +03:00
Yury Shkoda
d29e0a0f57 chore(sasjs-tests): bumped node-sass 2023-06-21 16:36:22 +03:00
Yury Shkoda
8d7cc11db5 chore(sasjs-tests): bumped node-sass 2023-06-21 16:35:33 +03:00
Yury Shkoda
28e9d1cc6b test(get-token): covered getTokenRequestErrorPrefix 2023-06-21 16:34:26 +03:00
Yury Shkoda
375cec48ca feat(get-token): improved error prefix 2023-06-21 16:33:45 +03:00
Yury Shkoda
4440e5d1f9 fix(types): fixed PollOptions exports 2023-05-17 14:10:17 +03:00
Yury Shkoda
f484a5a6a1 refactor(poll-job-state): updated types and func attributes 2023-05-17 11:16:35 +03:00
Yury Shkoda
5c74186bab feat(poll-strategy): added subsequentStrategies to PollStrategy 2023-05-16 17:48:04 +03:00
Yury Shkoda
ea68c3dff3 docs(poll-job-state): updated docs 2023-05-16 17:42:27 +03:00
Yury Shkoda
153b285670 chore(poll-job-status): renamed PollOptions to PollStrategy and added docs 2023-05-15 16:32:07 +03:00
Yury Shkoda
f9f4aa5aa6 chore(reviewer-lottery): removed QA group 2023-05-15 14:53:55 +03:00
Yury Shkoda
bd02656b3c docs(poll-job-state): added comments 2023-05-15 14:36:18 +03:00
Yury Shkoda
991519a13d fix(execute-job): added error object if it present 2023-05-15 14:26:24 +03:00
Yury Shkoda
615c9d012e feat(poll-job-state): implemented polling strategies 2023-05-15 14:24:11 +03:00
dependabot[bot]
d166231c12 chore(deps): bump webpack from 5.73.0 to 5.76.3 in /sasjs-tests
Bumps [webpack](https://github.com/webpack/webpack) from 5.73.0 to 5.76.3.
- [Release notes](https://github.com/webpack/webpack/releases)
- [Commits](https://github.com/webpack/webpack/compare/v5.73.0...v5.76.3)

---
updated-dependencies:
- dependency-name: webpack
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-03-27 12:12:06 +00:00
dependabot[bot]
4cb150e951 chore(deps): bump http-cache-semantics in /sasjs-tests
Bumps [http-cache-semantics](https://github.com/kornelski/http-cache-semantics) from 4.1.0 to 4.1.1.
- [Release notes](https://github.com/kornelski/http-cache-semantics/releases)
- [Commits](https://github.com/kornelski/http-cache-semantics/compare/v4.1.0...v4.1.1)

---
updated-dependencies:
- dependency-name: http-cache-semantics
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-02-04 13:53:04 +00:00
dependabot[bot]
fc8598473f chore(deps): bump json5 from 1.0.1 to 1.0.2 in /sasjs-tests
Bumps [json5](https://github.com/json5/json5) from 1.0.1 to 1.0.2.
- [Release notes](https://github.com/json5/json5/releases)
- [Changelog](https://github.com/json5/json5/blob/main/CHANGELOG.md)
- [Commits](https://github.com/json5/json5/compare/v1.0.1...v1.0.2)

---
updated-dependencies:
- dependency-name: json5
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-01-08 11:36:38 +00:00
dependabot[bot]
367e0ae25a chore(deps): bump loader-utils from 2.0.2 to 2.0.4 in /sasjs-tests
Bumps [loader-utils](https://github.com/webpack/loader-utils) from 2.0.2 to 2.0.4.
- [Release notes](https://github.com/webpack/loader-utils/releases)
- [Changelog](https://github.com/webpack/loader-utils/blob/v2.0.4/CHANGELOG.md)
- [Commits](https://github.com/webpack/loader-utils/compare/v2.0.2...v2.0.4)

---
updated-dependencies:
- dependency-name: loader-utils
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-11-16 06:56:29 +00:00
dependabot[bot]
85dde61baf chore(deps): bump semver-regex from 3.1.3 to 3.1.4
Bumps [semver-regex](https://github.com/sindresorhus/semver-regex) from 3.1.3 to 3.1.4.
- [Release notes](https://github.com/sindresorhus/semver-regex/releases)
- [Commits](https://github.com/sindresorhus/semver-regex/commits/v3.1.4)

---
updated-dependencies:
- dependency-name: semver-regex
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-06-03 23:43:27 +00:00
40 changed files with 4198 additions and 4622 deletions

View File

@@ -5,7 +5,3 @@ groups:
- YuryShkoda - YuryShkoda
- medjedovicm - medjedovicm
- sabhas - sabhas
- name: SASjs QA
reviewers: 1
usernames:
- VladislavParhomchik

View File

@@ -12,7 +12,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [lts/fermium] node-version: [lts/hydrogen]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@@ -11,7 +11,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [lts/fermium] node-version: [lts/hydrogen]
steps: steps:
- name: Checkout - name: Checkout

View File

@@ -14,7 +14,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [lts/fermium] node-version: [lts/hydrogen]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

96
package-lock.json generated
View File

@@ -13,7 +13,7 @@
"axios-cookiejar-support": "1.0.1", "axios-cookiejar-support": "1.0.1",
"form-data": "4.0.0", "form-data": "4.0.0",
"https": "1.0.0", "https": "1.0.0",
"tough-cookie": "4.0.0" "tough-cookie": "4.1.3"
}, },
"devDependencies": { "devDependencies": {
"@cypress/webpack-preprocessor": "5.9.1", "@cypress/webpack-preprocessor": "5.9.1",
@@ -21,7 +21,7 @@
"@types/jest": "27.4.0", "@types/jest": "27.4.0",
"@types/mime": "2.0.3", "@types/mime": "2.0.3",
"@types/pem": "1.9.6", "@types/pem": "1.9.6",
"@types/tough-cookie": "4.0.1", "@types/tough-cookie": "4.0.2",
"copyfiles": "2.4.1", "copyfiles": "2.4.1",
"cp": "0.2.0", "cp": "0.2.0",
"cypress": "7.7.0", "cypress": "7.7.0",
@@ -3440,9 +3440,9 @@
"dev": true "dev": true
}, },
"node_modules/@types/tough-cookie": { "node_modules/@types/tough-cookie": {
"version": "4.0.1", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg==" "integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
}, },
"node_modules/@types/yargs": { "node_modules/@types/yargs": {
"version": "16.0.5", "version": "16.0.5",
@@ -14110,6 +14110,11 @@
"node": ">=0.4.x" "node": ">=0.4.x"
} }
}, },
"node_modules/querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"node_modules/queue-microtask": { "node_modules/queue-microtask": {
"version": "1.2.3", "version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -14457,6 +14462,11 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"node_modules/resolve": { "node_modules/resolve": {
"version": "1.22.1", "version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
@@ -15702,22 +15712,23 @@
} }
}, },
"node_modules/tough-cookie": { "node_modules/tough-cookie": {
"version": "4.0.0", "version": "4.1.3",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
"dependencies": { "dependencies": {
"psl": "^1.1.33", "psl": "^1.1.33",
"punycode": "^2.1.1", "punycode": "^2.1.1",
"universalify": "^0.1.2" "universalify": "^0.2.0",
"url-parse": "^1.5.3"
}, },
"engines": { "engines": {
"node": ">=6" "node": ">=6"
} }
}, },
"node_modules/tough-cookie/node_modules/universalify": { "node_modules/tough-cookie/node_modules/universalify": {
"version": "0.1.2", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
"engines": { "engines": {
"node": ">= 4.0.0" "node": ">= 4.0.0"
} }
@@ -16351,6 +16362,15 @@
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
"dev": true "dev": true
}, },
"node_modules/url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
"dependencies": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"node_modules/url/node_modules/punycode": { "node_modules/url/node_modules/punycode": {
"version": "1.3.2", "version": "1.3.2",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
@@ -16780,9 +16800,9 @@
"dev": true "dev": true
}, },
"node_modules/word-wrap": { "node_modules/word-wrap": {
"version": "1.2.3", "version": "1.2.4",
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz",
"integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==",
"dev": true, "dev": true,
"engines": { "engines": {
"node": ">=0.10.0" "node": ">=0.10.0"
@@ -19536,9 +19556,9 @@
"dev": true "dev": true
}, },
"@types/tough-cookie": { "@types/tough-cookie": {
"version": "4.0.1", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg==" "integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
}, },
"@types/yargs": { "@types/yargs": {
"version": "16.0.5", "version": "16.0.5",
@@ -27552,6 +27572,11 @@
"integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==", "integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==",
"dev": true "dev": true
}, },
"querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"queue-microtask": { "queue-microtask": {
"version": "1.2.3", "version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -27833,6 +27858,11 @@
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
"dev": true "dev": true
}, },
"requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"resolve": { "resolve": {
"version": "1.22.1", "version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
@@ -28799,19 +28829,20 @@
"dev": true "dev": true
}, },
"tough-cookie": { "tough-cookie": {
"version": "4.0.0", "version": "4.1.3",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
"requires": { "requires": {
"psl": "^1.1.33", "psl": "^1.1.33",
"punycode": "^2.1.1", "punycode": "^2.1.1",
"universalify": "^0.1.2" "universalify": "^0.2.0",
"url-parse": "^1.5.3"
}, },
"dependencies": { "dependencies": {
"universalify": { "universalify": {
"version": "0.1.2", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==" "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg=="
} }
} }
}, },
@@ -29269,6 +29300,15 @@
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
"dev": true "dev": true
}, },
"url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
"requires": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"util": { "util": {
"version": "0.12.5", "version": "0.12.5",
"resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz",
@@ -29586,9 +29626,9 @@
"dev": true "dev": true
}, },
"word-wrap": { "word-wrap": {
"version": "1.2.3", "version": "1.2.4",
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz",
"integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==",
"dev": true "dev": true
}, },
"wordwrap": { "wordwrap": {

View File

@@ -49,7 +49,7 @@
"@types/jest": "27.4.0", "@types/jest": "27.4.0",
"@types/mime": "2.0.3", "@types/mime": "2.0.3",
"@types/pem": "1.9.6", "@types/pem": "1.9.6",
"@types/tough-cookie": "4.0.1", "@types/tough-cookie": "4.0.2",
"copyfiles": "2.4.1", "copyfiles": "2.4.1",
"cp": "0.2.0", "cp": "0.2.0",
"cypress": "7.7.0", "cypress": "7.7.0",
@@ -82,6 +82,6 @@
"axios-cookiejar-support": "1.0.1", "axios-cookiejar-support": "1.0.1",
"form-data": "4.0.0", "form-data": "4.0.0",
"https": "1.0.0", "https": "1.0.0",
"tough-cookie": "4.0.0" "tough-cookie": "4.1.3"
} }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -4,15 +4,14 @@
"homepage": ".", "homepage": ".",
"private": true, "private": true,
"dependencies": { "dependencies": {
"@sasjs/adapter": "file:../build/sasjs-adapter-5.0.0.tgz",
"@sasjs/test-framework": "1.5.7", "@sasjs/test-framework": "1.5.7",
"@types/jest": "^26.0.20", "@types/jest": "^26.0.20",
"@types/node": "^14.14.41", "@types/node": "^14.14.41",
"@types/react": "^17.0.1", "@types/react": "^16.0.1",
"@types/react-dom": "^17.0.0", "@types/react-dom": "^16.0.0",
"@types/react-router-dom": "^5.1.7", "@types/react-router-dom": "^5.1.7",
"react": "^17.0.1", "react": "^16.0.1",
"react-dom": "^17.0.1", "react-dom": "^16.0.1",
"react-router-dom": "^5.2.0", "react-router-dom": "^5.2.0",
"react-scripts": "^5.0.1", "react-scripts": "^5.0.1",
"typescript": "^4.1.3" "typescript": "^4.1.3"
@@ -22,7 +21,7 @@
"build": "react-scripts build", "build": "react-scripts build",
"test": "react-scripts test", "test": "react-scripts test",
"eject": "react-scripts eject", "eject": "react-scripts eject",
"update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz --legacy-peer-deps", "update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz",
"deploy:tests": "rsync -avhe ssh ./build/* --delete $SSH_ACCOUNT:$DEPLOY_PATH || npm run deploy:tests-win", "deploy:tests": "rsync -avhe ssh ./build/* --delete $SSH_ACCOUNT:$DEPLOY_PATH || npm run deploy:tests-win",
"deploy:tests-win": "scp %DEPLOY_PATH% ./build/*", "deploy:tests-win": "scp %DEPLOY_PATH% ./build/*",
"deploy": "npm run update:adapter && npm run build && npm run deploy:tests" "deploy": "npm run update:adapter && npm run build && npm run deploy:tests"
@@ -43,6 +42,6 @@
] ]
}, },
"devDependencies": { "devDependencies": {
"node-sass": "7.0.3" "node-sass": "9.0.0"
} }
} }

View File

@@ -29,6 +29,12 @@ import { executeScript } from './api/viya/executeScript'
import { getAccessTokenForViya } from './auth/getAccessTokenForViya' import { getAccessTokenForViya } from './auth/getAccessTokenForViya'
import { refreshTokensForViya } from './auth/refreshTokensForViya' import { refreshTokensForViya } from './auth/refreshTokensForViya'
interface JobExecutionResult {
result?: { result: object }
log?: string
error?: object
}
/** /**
* A client for interfacing with the SAS Viya REST API. * A client for interfacing with the SAS Viya REST API.
* *
@@ -270,7 +276,7 @@ export class SASViyaApiClient {
* @param debug - when set to true, the log will be returned. * @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code). * @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session * @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
@@ -621,7 +627,7 @@ export class SASViyaApiClient {
* @param accessToken - an optional access token for an authorized user. * @param accessToken - an optional access token for an authorized user.
* @param waitForResult - a boolean indicating if the function should wait for a result. * @param waitForResult - a boolean indicating if the function should wait for a result.
* @param expectWebout - a boolean indicating whether to expect a _webout response. * @param expectWebout - a boolean indicating whether to expect a _webout response.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
@@ -732,11 +738,13 @@ export class SASViyaApiClient {
debug: boolean, debug: boolean,
data?: any, data?: any,
authConfig?: AuthConfig authConfig?: AuthConfig
) { ): Promise<JobExecutionResult> {
let access_token = (authConfig || {}).access_token let access_token = (authConfig || {}).access_token
if (authConfig) { if (authConfig) {
;({ access_token } = await getTokens(this.requestClient, authConfig)) ;({ access_token } = await getTokens(this.requestClient, authConfig))
} }
if (isRelativePath(sasJob) && !this.rootFolderName) { if (isRelativePath(sasJob) && !this.rootFolderName) {
throw new Error( throw new Error(
'Relative paths cannot be used without specifying a root folder name.' 'Relative paths cannot be used without specifying a root folder name.'
@@ -749,6 +757,7 @@ export class SASViyaApiClient {
const fullFolderPath = isRelativePath(sasJob) const fullFolderPath = isRelativePath(sasJob)
? `${this.rootFolderName}/${folderPath}` ? `${this.rootFolderName}/${folderPath}`
: folderPath : folderPath
await this.populateFolderMap(fullFolderPath, access_token) await this.populateFolderMap(fullFolderPath, access_token)
const jobFolder = this.folderMap.get(fullFolderPath) const jobFolder = this.folderMap.get(fullFolderPath)
@@ -765,9 +774,8 @@ export class SASViyaApiClient {
files = await this.uploadTables(data, access_token) files = await this.uploadTables(data, access_token)
} }
if (!jobToExecute) { if (!jobToExecute) throw new Error(`Job was not found.`)
throw new Error(`Job was not found.`)
}
const jobDefinitionLink = jobToExecute?.links.find( const jobDefinitionLink = jobToExecute?.links.find(
(l) => l.rel === 'getResource' (l) => l.rel === 'getResource'
)?.href )?.href
@@ -807,16 +815,19 @@ export class SASViyaApiClient {
jobDefinition, jobDefinition,
arguments: jobArguments arguments: jobArguments
} }
const { result: postedJob } = await this.requestClient.post<Job>( const { result: postedJob } = await this.requestClient.post<Job>(
`${this.serverUrl}/jobExecution/jobs?_action=wait`, `${this.serverUrl}/jobExecution/jobs?_action=wait`,
postJobRequestBody, postJobRequestBody,
access_token access_token
) )
const jobStatus = await this.pollJobState(postedJob, authConfig).catch( const jobStatus = await this.pollJobState(postedJob, authConfig).catch(
(err) => { (err) => {
throw prefixMessage(err, 'Error while polling job status. ') throw prefixMessage(err, 'Error while polling job status. ')
} }
) )
const { result: currentJob } = await this.requestClient.get<Job>( const { result: currentJob } = await this.requestClient.get<Job>(
`${this.serverUrl}/jobExecution/jobs/${postedJob.id}`, `${this.serverUrl}/jobExecution/jobs/${postedJob.id}`,
access_token access_token
@@ -827,6 +838,7 @@ export class SASViyaApiClient {
const resultLink = currentJob.results['_webout.json'] const resultLink = currentJob.results['_webout.json']
const logLink = currentJob.links.find((l) => l.rel === 'log') const logLink = currentJob.links.find((l) => l.rel === 'log')
if (resultLink) { if (resultLink) {
jobResult = await this.requestClient.get<any>( jobResult = await this.requestClient.get<any>(
`${this.serverUrl}${resultLink}/content`, `${this.serverUrl}${resultLink}/content`,
@@ -834,11 +846,13 @@ export class SASViyaApiClient {
'text/plain' 'text/plain'
) )
} }
if (debug && logLink) { if (debug && logLink) {
log = await this.requestClient log = await this.requestClient
.get<any>(`${this.serverUrl}${logLink.href}/content`, access_token) .get<any>(`${this.serverUrl}${logLink.href}/content`, access_token)
.then((res: any) => res.result.items.map((i: any) => i.line).join('\n')) .then((res: any) => res.result.items.map((i: any) => i.line).join('\n'))
} }
if (jobStatus === 'failed') { if (jobStatus === 'failed') {
throw new JobExecutionError( throw new JobExecutionError(
currentJob.error?.errorCode, currentJob.error?.errorCode,
@@ -846,7 +860,16 @@ export class SASViyaApiClient {
log log
) )
} }
return { result: jobResult?.result, log }
const executionResult: JobExecutionResult = {
result: jobResult?.result,
log
}
const { error } = currentJob
if (error) executionResult.error = error
return executionResult
} }
private async populateFolderMap(folderPath: string, accessToken?: string) { private async populateFolderMap(folderPath: string, accessToken?: string) {

View File

@@ -851,7 +851,7 @@ export default class SASjs {
* @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs. * @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs.
* The access token is not required when the user is authenticated via the browser. * The access token is not required when the user is authenticated via the browser.
* @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete. * @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */

View File

@@ -12,7 +12,7 @@ import { RequestClient } from '../../request/RequestClient'
import { SessionManager } from '../../SessionManager' import { SessionManager } from '../../SessionManager'
import { isRelativePath, fetchLogByChunks } from '../../utils' import { isRelativePath, fetchLogByChunks } from '../../utils'
import { formatDataForRequest } from '../../utils/formatDataForRequest' import { formatDataForRequest } from '../../utils/formatDataForRequest'
import { pollJobState } from './pollJobState' import { pollJobState, JobState } from './pollJobState'
import { uploadTables } from './uploadTables' import { uploadTables } from './uploadTables'
/** /**
@@ -25,7 +25,7 @@ import { uploadTables } from './uploadTables'
* @param debug - when set to true, the log will be returned. * @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code). * @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session * @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
@@ -228,7 +228,7 @@ export async function executeScript(
) )
} }
if (jobStatus === 'failed' || jobStatus === 'error') { if (jobStatus === JobState.Failed || jobStatus === JobState.Error) {
throw new ComputeJobExecutionError(currentJob, log) throw new ComputeJobExecutionError(currentJob, log)
} }

View File

@@ -1,29 +1,88 @@
import { AuthConfig } from '@sasjs/utils/types' import { AuthConfig } from '@sasjs/utils/types'
import { Job, PollOptions } from '../..' import { Job, PollOptions, PollStrategy } from '../..'
import { getTokens } from '../../auth/getTokens' import { getTokens } from '../../auth/getTokens'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { JobStatePollError } from '../../types/errors' import { JobStatePollError } from '../../types/errors'
import { Link, WriteStream } from '../../types' import { Link, WriteStream } from '../../types'
import { delay, isNode } from '../../utils' import { delay, isNode } from '../../utils'
export enum JobState {
Completed = 'completed',
Running = 'running',
Pending = 'pending',
Unavailable = 'unavailable',
NoState = '',
Failed = 'failed',
Error = 'error'
}
/**
* Polls job status using default or provided poll options.
* @param requestClient - the pre-configured HTTP request client.
* @param postedJob - the relative or absolute path to the job.
* @param debug - sets the _debug flag in the job arguments.
* @param authConfig - an access token, refresh token, client and secret for an authorized user.
* @param pollOptions - an object containing maxPollCount, pollInterval, streamLog and logFolderPath. It will override the first default poll options in poll strategy if provided.
* Example pollOptions:
* {
* maxPollCount: 200,
* pollInterval: 300,
* streamLog: true, // optional, equals to false by default.
* pollStrategy?: // optional array of poll options that should be applied after 'maxPollCount' of the provided poll options is reached. If not provided the default (see example below) poll strategy will be used.
* }
* Example pollStrategy (values used from default poll strategy):
* [
* { maxPollCount: 200, pollInterval: 300 }, // approximately ~2 mins (including time to get response (~300ms))
* { maxPollCount: 300, pollInterval: 3000 }, // approximately ~5.5 mins (including time to get response (~300ms))
* { maxPollCount: 500, pollInterval: 30000 }, // approximately ~50.5 mins (including time to get response (~300ms))
* { maxPollCount: 3400, pollInterval: 60000 } // approximately ~3015 mins (~125 hours) (including time to get response (~300ms))
* ]
* @returns - a promise which resolves with a job state
*/
export async function pollJobState( export async function pollJobState(
requestClient: RequestClient, requestClient: RequestClient,
postedJob: Job, postedJob: Job,
debug: boolean, debug: boolean,
authConfig?: AuthConfig, authConfig?: AuthConfig,
pollOptions?: PollOptions pollOptions?: PollOptions
) { ): Promise<JobState> {
const logger = process.logger || console const logger = process.logger || console
let pollInterval = 300 const streamLog = pollOptions?.streamLog || false
let maxPollCount = 1000
const defaultPollOptions: PollOptions = { const defaultPollStrategy: PollStrategy = [
maxPollCount, { maxPollCount: 200, pollInterval: 300 },
pollInterval, { maxPollCount: 300, pollInterval: 3000 },
streamLog: false { maxPollCount: 500, pollInterval: 30000 },
{ maxPollCount: 3400, pollInterval: 60000 }
]
let pollStrategy: PollStrategy
if (pollOptions !== undefined) {
pollStrategy = [pollOptions]
let { pollStrategy: providedPollStrategy } = pollOptions
if (providedPollStrategy !== undefined) {
validatePollStrategies(providedPollStrategy)
// INFO: sort by 'maxPollCount'
providedPollStrategy = providedPollStrategy.sort(
(strategyA: PollOptions, strategyB: PollOptions) =>
strategyA.maxPollCount - strategyB.maxPollCount
)
pollStrategy = [...pollStrategy, ...providedPollStrategy]
} else {
pollStrategy = [...pollStrategy, ...defaultPollStrategy]
}
} else {
pollStrategy = defaultPollStrategy
} }
let defaultPollOptions: PollOptions = pollStrategy.splice(0, 1)[0]
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) } pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
const stateLink = postedJob.links.find((l: any) => l.rel === 'state') const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
@@ -31,10 +90,10 @@ export async function pollJobState(
throw new Error(`Job state link was not found.`) throw new Error(`Job state link was not found.`)
} }
let currentState = await getJobState( let currentState: JobState = await getJobState(
requestClient, requestClient,
postedJob, postedJob,
'', JobState.NoState,
debug, debug,
authConfig authConfig
).catch((err) => { ).catch((err) => {
@@ -42,73 +101,71 @@ export async function pollJobState(
`Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`, `Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`,
err err
) )
return 'unavailable'
return JobState.Unavailable
}) })
let pollCount = 0 let pollCount = 0
if (currentState === 'completed') { if (currentState === JobState.Completed) {
return Promise.resolve(currentState) return Promise.resolve(currentState)
} }
let logFileStream let logFileStream
if (pollOptions.streamLog && isNode()) { if (streamLog && isNode()) {
const { getFileStream } = require('./getFileStream') const { getFileStream } = require('./getFileStream')
logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath) logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
} }
// Poll up to the first 100 times with the specified poll interval
let result = await doPoll( let result = await doPoll(
requestClient, requestClient,
postedJob, postedJob,
currentState, currentState,
debug, debug,
pollCount, pollCount,
pollOptions,
authConfig, authConfig,
{ streamLog,
...pollOptions,
maxPollCount:
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
},
logFileStream logFileStream
) )
currentState = result.state currentState = result.state
pollCount = result.pollCount pollCount = result.pollCount
if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) { if (
!needsRetry(currentState) ||
(pollCount >= pollOptions.maxPollCount && !pollStrategy.length)
) {
return currentState return currentState
} }
// If we get to this point, this is a long-running job that needs longer polling. // INFO: If we get to this point, this is a long-running job that needs longer polling.
// We will resume polling with a bigger interval of 1 minute // We will resume polling with a bigger interval according to the next polling strategy
let longJobPollOptions: PollOptions = { while (pollStrategy.length && needsRetry(currentState)) {
maxPollCount: 24 * 60, defaultPollOptions = pollStrategy.splice(0, 1)[0]
pollInterval: 60000,
streamLog: false if (pollOptions) {
} defaultPollOptions.logFolderPath = pollOptions.logFolderPath
if (pollOptions) { }
longJobPollOptions.streamLog = pollOptions.streamLog
longJobPollOptions.logFolderPath = pollOptions.logFolderPath result = await doPoll(
requestClient,
postedJob,
currentState,
debug,
pollCount,
defaultPollOptions,
authConfig,
streamLog,
logFileStream
)
currentState = result.state
pollCount = result.pollCount
} }
result = await doPoll( if (logFileStream) logFileStream.end()
requestClient,
postedJob,
currentState,
debug,
pollCount,
authConfig,
longJobPollOptions,
logFileStream
)
currentState = result.state
pollCount = result.pollCount
if (logFileStream) {
logFileStream.end()
}
return currentState return currentState
} }
@@ -119,17 +176,13 @@ const getJobState = async (
currentState: string, currentState: string,
debug: boolean, debug: boolean,
authConfig?: AuthConfig authConfig?: AuthConfig
) => { ): Promise<JobState> => {
const stateLink = job.links.find((l: any) => l.rel === 'state') const stateLink = job.links.find((l: any) => l.rel === 'state')!
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
if (needsRetry(currentState)) { if (needsRetry(currentState)) {
let tokens let tokens
if (authConfig) {
tokens = await getTokens(requestClient, authConfig) if (authConfig) tokens = await getTokens(requestClient, authConfig)
}
const { result: jobState } = await requestClient const { result: jobState } = await requestClient
.get<string>( .get<string>(
@@ -143,48 +196,38 @@ const getJobState = async (
throw new JobStatePollError(job.id, err) throw new JobStatePollError(job.id, err)
}) })
return jobState.trim() return jobState.trim() as JobState
} else { } else {
return currentState return currentState as JobState
} }
} }
const needsRetry = (state: string) => const needsRetry = (state: string) =>
state === 'running' || state === JobState.Running ||
state === '' || state === JobState.NoState ||
state === 'pending' || state === JobState.Pending ||
state === 'unavailable' state === JobState.Unavailable
const doPoll = async ( const doPoll = async (
requestClient: RequestClient, requestClient: RequestClient,
postedJob: Job, postedJob: Job,
currentState: string, currentState: JobState,
debug: boolean, debug: boolean,
pollCount: number, pollCount: number,
pollOptions: PollOptions,
authConfig?: AuthConfig, authConfig?: AuthConfig,
pollOptions?: PollOptions, streamLog?: boolean,
logStream?: WriteStream logStream?: WriteStream
): Promise<{ state: string; pollCount: number }> => { ): Promise<{ state: JobState; pollCount: number }> => {
let pollInterval = 300 const { maxPollCount, pollInterval } = pollOptions
let maxPollCount = 1000 const logger = process.logger || console
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')!
let maxErrorCount = 5 let maxErrorCount = 5
let errorCount = 0 let errorCount = 0
let state = currentState let state = currentState
let printedState = '' let printedState = JobState.NoState
let startLogLine = 0 let startLogLine = 0
const logger = process.logger || console
if (pollOptions) {
pollInterval = pollOptions.pollInterval || pollInterval
maxPollCount = pollOptions.maxPollCount || maxPollCount
}
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
while (needsRetry(state) && pollCount <= maxPollCount) { while (needsRetry(state) && pollCount <= maxPollCount) {
state = await getJobState( state = await getJobState(
requestClient, requestClient,
@@ -194,21 +237,24 @@ const doPoll = async (
authConfig authConfig
).catch((err) => { ).catch((err) => {
errorCount++ errorCount++
if (pollCount >= maxPollCount || errorCount >= maxErrorCount) { if (pollCount >= maxPollCount || errorCount >= maxErrorCount) {
throw err throw err
} }
logger.error( logger.error(
`Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`, `Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`,
err err
) )
return 'unavailable'
return JobState.Unavailable
}) })
pollCount++ pollCount++
const jobHref = postedJob.links.find((l: Link) => l.rel === 'self')!.href const jobHref = postedJob.links.find((l: Link) => l.rel === 'self')!.href
if (pollOptions?.streamLog) { if (streamLog) {
const { result: job } = await requestClient.get<Job>( const { result: job } = await requestClient.get<Job>(
jobHref, jobHref,
authConfig?.access_token authConfig?.access_token
@@ -238,12 +284,45 @@ const doPoll = async (
printedState = state printedState = state
} }
if (state != 'unavailable' && errorCount > 0) { if (state !== JobState.Unavailable && errorCount > 0) {
errorCount = 0 errorCount = 0
} }
await delay(pollInterval) if (state !== JobState.Completed) {
await delay(pollInterval)
}
} }
return { state, pollCount } return { state, pollCount }
} }
const validatePollStrategies = (strategy: PollStrategy) => {
const throwError = (message?: string, pollOptions?: PollOptions) => {
throw new Error(
`Poll strategies are not valid.${message ? ` ${message}` : ''}${
pollOptions
? ` Invalid poll strategy: \n${JSON.stringify(pollOptions, null, 2)}`
: ''
}`
)
}
strategy.forEach((pollOptions: PollOptions, i: number) => {
const { maxPollCount, pollInterval } = pollOptions
if (maxPollCount < 1) {
throwError(`'maxPollCount' has to be greater than 0.`, pollOptions)
} else if (i !== 0) {
const previousPollOptions = strategy[i - 1]
if (maxPollCount <= previousPollOptions.maxPollCount) {
throwError(
`'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy.`,
pollOptions
)
}
} else if (pollInterval < 1) {
throwError(`'pollInterval' has to be greater than 0.`, pollOptions)
}
})
}

View File

@@ -9,14 +9,13 @@ import * as formatDataModule from '../../../utils/formatDataForRequest'
import * as fetchLogsModule from '../../../utils/fetchLogByChunks' import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import { PollOptions } from '../../../types' import { PollOptions } from '../../../types'
import { ComputeJobExecutionError, NotFoundError } from '../../../types/errors' import { ComputeJobExecutionError, NotFoundError } from '../../../types/errors'
import { Logger, LogLevel } from '@sasjs/utils' import { Logger, LogLevel } from '@sasjs/utils/logger'
const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)() const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)()
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
const defaultPollOptions: PollOptions = { const defaultPollOptions: PollOptions = {
maxPollCount: 100, maxPollCount: 100,
pollInterval: 500, pollInterval: 500
streamLog: false
} }
describe('executeScript', () => { describe('executeScript', () => {
@@ -452,7 +451,9 @@ describe('executeScript', () => {
it('should throw a ComputeJobExecutionError if the job has failed', async () => { it('should throw a ComputeJobExecutionError if the job has failed', async () => {
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('failed')) .mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Failed)
)
const error: ComputeJobExecutionError = await executeScript( const error: ComputeJobExecutionError = await executeScript(
requestClient, requestClient,
@@ -485,7 +486,9 @@ describe('executeScript', () => {
it('should throw a ComputeJobExecutionError if the job has errored out', async () => { it('should throw a ComputeJobExecutionError if the job has errored out', async () => {
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('error')) .mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Error)
)
const error: ComputeJobExecutionError = await executeScript( const error: ComputeJobExecutionError = await executeScript(
requestClient, requestClient,
@@ -654,7 +657,9 @@ const setupMocks = () => {
.mockImplementation(() => Promise.resolve(mockAuthConfig)) .mockImplementation(() => Promise.resolve(mockAuthConfig))
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('completed')) .mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Completed)
)
jest jest
.spyOn(sessionManager, 'getVariable') .spyOn(sessionManager, 'getVariable')
.mockImplementation(() => .mockImplementation(() =>

View File

@@ -1,4 +1,4 @@
import { Logger, LogLevel } from '@sasjs/utils' import { Logger, LogLevel } from '@sasjs/utils/logger'
import { RequestClient } from '../../../request/RequestClient' import { RequestClient } from '../../../request/RequestClient'
import { mockAuthConfig, mockJob } from './mockResponses' import { mockAuthConfig, mockJob } from './mockResponses'
import { pollJobState } from '../pollJobState' import { pollJobState } from '../pollJobState'
@@ -6,17 +6,18 @@ import * as getTokensModule from '../../../auth/getTokens'
import * as saveLogModule from '../saveLog' import * as saveLogModule from '../saveLog'
import * as getFileStreamModule from '../getFileStream' import * as getFileStreamModule from '../getFileStream'
import * as isNodeModule from '../../../utils/isNode' import * as isNodeModule from '../../../utils/isNode'
import { PollOptions } from '../../../types' import * as delayModule from '../../../utils/delay'
import { PollOptions, PollStrategy } from '../../../types'
import { WriteStream } from 'fs' import { WriteStream } from 'fs'
const baseUrl = 'http://localhost' const baseUrl = 'http://localhost'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
requestClient['httpClient'].defaults.baseURL = baseUrl requestClient['httpClient'].defaults.baseURL = baseUrl
const defaultPollOptions: PollOptions = { const defaultStreamLog = false
const defaultPollStrategy: PollOptions = {
maxPollCount: 100, maxPollCount: 100,
pollInterval: 500, pollInterval: 500
streamLog: false
} }
describe('pollJobState', () => { describe('pollJobState', () => {
@@ -26,13 +27,10 @@ describe('pollJobState', () => {
}) })
it('should get valid tokens if the authConfig has been provided', async () => { it('should get valid tokens if the authConfig has been provided', async () => {
await pollJobState( await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
requestClient, ...defaultPollStrategy,
mockJob, streamLog: defaultStreamLog
false, })
mockAuthConfig,
defaultPollOptions
)
expect(getTokensModule.getTokens).toHaveBeenCalledWith( expect(getTokensModule.getTokens).toHaveBeenCalledWith(
requestClient, requestClient,
@@ -46,7 +44,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect(getTokensModule.getTokens).not.toHaveBeenCalled() expect(getTokensModule.getTokens).not.toHaveBeenCalled()
@@ -58,7 +56,7 @@ describe('pollJobState', () => {
{ ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') }, { ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') },
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
).catch((e: any) => e) ).catch((e: any) => e)
expect((error as Error).message).toContain('Job state link was not found.') expect((error as Error).message).toContain('Job state link was not found.')
@@ -72,7 +70,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
defaultPollOptions defaultPollStrategy
) )
expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3) expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3)
@@ -83,7 +81,7 @@ describe('pollJobState', () => {
const { saveLog } = require('../saveLog') const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollStrategy,
streamLog: true streamLog: true
}) })
@@ -96,7 +94,7 @@ describe('pollJobState', () => {
const { saveLog } = require('../saveLog') const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollStrategy,
streamLog: true streamLog: true
}) })
@@ -111,7 +109,7 @@ describe('pollJobState', () => {
const { getFileStream } = require('../getFileStream') const { getFileStream } = require('../getFileStream')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollStrategy,
streamLog: true streamLog: true
}) })
@@ -127,7 +125,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
defaultPollOptions defaultPollStrategy
) )
expect(saveLogModule.saveLog).not.toHaveBeenCalled() expect(saveLogModule.saveLog).not.toHaveBeenCalled()
@@ -136,15 +134,18 @@ describe('pollJobState', () => {
it('should return the current status when the max poll count is reached', async () => { it('should return the current status when the max poll count is reached', async () => {
mockRunningPoll() mockRunningPoll()
const pollOptions: PollOptions = {
...defaultPollStrategy,
maxPollCount: 1,
pollStrategy: []
}
const state = await pollJobState( const state = await pollJobState(
requestClient, requestClient,
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
{ pollOptions
...defaultPollOptions,
maxPollCount: 1
}
) )
expect(state).toEqual('running') expect(state).toEqual('running')
@@ -159,7 +160,7 @@ describe('pollJobState', () => {
false, false,
mockAuthConfig, mockAuthConfig,
{ {
...defaultPollOptions, ...defaultPollStrategy,
maxPollCount: 200, maxPollCount: 200,
pollInterval: 10 pollInterval: 10
} }
@@ -176,7 +177,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect(requestClient.get).toHaveBeenCalledTimes(2) expect(requestClient.get).toHaveBeenCalledTimes(2)
@@ -192,7 +193,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
true, true,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect((process as any).logger.info).toHaveBeenCalledTimes(4) expect((process as any).logger.info).toHaveBeenCalledTimes(4)
@@ -222,7 +223,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect(requestClient.get).toHaveBeenCalledTimes(2) expect(requestClient.get).toHaveBeenCalledTimes(2)
@@ -237,13 +238,119 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
).catch((e: any) => e) ).catch((e: any) => e)
expect(error.message).toEqual( expect(error.message).toEqual(
'Error while polling job state for job j0b: Status Error' 'Error while polling job state for job j0b: Status Error'
) )
}) })
it('should change poll strategies', async () => {
mockSimplePoll(6)
const delays: number[] = []
jest.spyOn(delayModule, 'delay').mockImplementation((ms: number) => {
delays.push(ms)
return Promise.resolve()
})
const pollIntervals = [3, 4, 5, 6]
const pollStrategy = [
{ maxPollCount: 2, pollInterval: pollIntervals[1] },
{ maxPollCount: 3, pollInterval: pollIntervals[2] },
{ maxPollCount: 4, pollInterval: pollIntervals[3] }
]
const pollOptions: PollOptions = {
maxPollCount: 1,
pollInterval: pollIntervals[0],
pollStrategy: pollStrategy
}
await pollJobState(requestClient, mockJob, false, undefined, pollOptions)
expect(delays).toEqual([pollIntervals[0], ...pollIntervals])
})
it('should throw an error if not valid poll strategies provided', async () => {
// INFO: 'maxPollCount' has to be > 0
let invalidPollStrategy = {
maxPollCount: 0,
pollInterval: 3
}
let pollStrategy: PollStrategy = [invalidPollStrategy]
let expectedError = new Error(
`Poll strategies are not valid. 'maxPollCount' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
// INFO: 'maxPollCount' has to be > than 'maxPollCount' of the previous strategy
const validPollStrategy = {
maxPollCount: 5,
pollInterval: 2
}
invalidPollStrategy = {
maxPollCount: validPollStrategy.maxPollCount,
pollInterval: 3
}
pollStrategy = [validPollStrategy, invalidPollStrategy]
expectedError = new Error(
`Poll strategies are not valid. 'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
// INFO: invalid 'pollInterval'
invalidPollStrategy = {
maxPollCount: 1,
pollInterval: 0
}
pollStrategy = [invalidPollStrategy]
expectedError = new Error(
`Poll strategies are not valid. 'pollInterval' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
})
}) })
const setupMocks = () => { const setupMocks = () => {
@@ -273,11 +380,14 @@ const setupMocks = () => {
const mockSimplePoll = (runningCount = 2) => { const mockSimplePoll = (runningCount = 2) => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: result:
count === 0 count === 0
@@ -293,11 +403,14 @@ const mockSimplePoll = (runningCount = 2) => {
const mockRunningPoll = () => { const mockRunningPoll = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: count === 0 ? 'pending' : 'running', result: count === 0 ? 'pending' : 'running',
etag: '', etag: '',
@@ -308,11 +421,14 @@ const mockRunningPoll = () => {
const mockLongPoll = () => { const mockLongPoll = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: count <= 102 ? 'running' : 'completed', result: count <= 102 ? 'running' : 'completed',
etag: '', etag: '',
@@ -323,14 +439,18 @@ const mockLongPoll = () => {
const mockPollWithSingleError = () => { const mockPollWithSingleError = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
if (count === 1) { if (count === 1) {
return Promise.reject('Status Error') return Promise.reject('Status Error')
} }
return Promise.resolve({ return Promise.resolve({
result: count === 0 ? 'pending' : 'completed', result: count === 0 ? 'pending' : 'completed',
etag: '', etag: '',
@@ -344,6 +464,7 @@ const mockErroredPoll = () => {
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.reject('Status Error') return Promise.reject('Status Error')
}) })
} }

View File

@@ -1,4 +1,4 @@
import { Logger, LogLevel } from '@sasjs/utils' import { Logger, LogLevel } from '@sasjs/utils/logger'
import { RequestClient } from '../../../request/RequestClient' import { RequestClient } from '../../../request/RequestClient'
import * as fetchLogsModule from '../../../utils/fetchLogByChunks' import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import * as writeStreamModule from '../writeStream' import * as writeStreamModule from '../writeStream'

View File

@@ -5,7 +5,7 @@ import {
fileExists, fileExists,
readFile, readFile,
deleteFile deleteFile
} from '@sasjs/utils' } from '@sasjs/utils/file'
describe('writeStream', () => { describe('writeStream', () => {
const filename = 'test.txt' const filename = 'test.txt'

View File

@@ -1,5 +1,7 @@
import { prefixMessage } from '@sasjs/utils/error' import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
import { ServerType } from '@sasjs/utils/types'
/** /**
* Exchanges the auth code for an access token for the given client. * Exchanges the auth code for an access token for the given client.
@@ -31,6 +33,16 @@ export async function getAccessTokenForSasjs(
} }
}) })
.catch((err) => { .catch((err) => {
throw prefixMessage(err, 'Error while getting access token. ') throw prefixMessage(
err,
getTokenRequestErrorPrefix(
'fetching access token',
'getAccessTokenForSasjs',
ServerType.Sasjs,
url,
data,
clientId
)
)
}) })
} }

View File

@@ -1,11 +1,12 @@
import { SasAuthResponse } from '@sasjs/utils/types' import { SasAuthResponse, ServerType } from '@sasjs/utils/types'
import { prefixMessage } from '@sasjs/utils/error' import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { CertificateError } from '../types/errors' import { CertificateError } from '../types/errors'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
/** /**
* Exchanges the auth code for an access token for the given client. * Exchange the auth code for access / refresh tokens for the given client / secret pair.
* @param requestClient - the pre-configured HTTP request client * @param requestClient - the pre-configured HTTP request client.
* @param clientId - the client ID to authenticate with. * @param clientId - the client ID to authenticate with.
* @param clientSecret - the client secret to authenticate with. * @param clientSecret - the client secret to authenticate with.
* @param authCode - the auth code received from the server. * @param authCode - the auth code received from the server.
@@ -16,29 +17,44 @@ export async function getAccessTokenForViya(
clientSecret: string, clientSecret: string,
authCode: string authCode: string
): Promise<SasAuthResponse> { ): Promise<SasAuthResponse> {
const url = '/SASLogon/oauth/token'
let token let token
if (typeof Buffer === 'undefined') { if (typeof Buffer === 'undefined') {
token = btoa(clientId + ':' + clientSecret) token = btoa(clientId + ':' + clientSecret)
} else { } else {
token = Buffer.from(clientId + ':' + clientSecret).toString('base64') token = Buffer.from(clientId + ':' + clientSecret).toString('base64')
} }
const url = '/SASLogon/oauth/token'
const headers = { const headers = {
Authorization: 'Basic ' + token, Authorization: 'Basic ' + token,
Accept: 'application/json' Accept: 'application/json'
} }
const data = new URLSearchParams({ const dataJson = new URLSearchParams({
grant_type: 'authorization_code', grant_type: 'authorization_code',
code: authCode code: authCode
}) })
const data = new URLSearchParams(dataJson)
const authResponse = await requestClient const authResponse = await requestClient
.post(url, data, undefined, 'application/x-www-form-urlencoded', headers) .post(url, data, undefined, 'application/x-www-form-urlencoded', headers)
.then((res) => res.result as SasAuthResponse) .then((res) => res.result as SasAuthResponse)
.catch((err) => { .catch((err) => {
if (err instanceof CertificateError) throw err if (err instanceof CertificateError) throw err
throw prefixMessage(err, 'Error while getting access token. ') throw prefixMessage(
err,
getTokenRequestErrorPrefix(
'fetching access token',
'getAccessTokenForViya',
ServerType.SasViya,
url,
dataJson,
headers,
clientId,
clientSecret
)
)
}) })
return authResponse return authResponse

View File

@@ -0,0 +1,88 @@
import { ServerType } from '@sasjs/utils/types'
type Server = ServerType.SasViya | ServerType.Sasjs
type Operation = 'fetching access token' | 'refreshing tokens'
const getServerName = (server: Server) =>
server === ServerType.SasViya ? 'Viya' : 'Sasjs'
const getResponseTitle = (server: Server) =>
`Response from ${getServerName(server)} is below.`
/**
* Forms error prefix for requests related to token operations.
* @param operation - string describing operation ('fetching access token' or 'refreshing tokens').
* @param funcName - name of the function sent the request.
* @param server - server type (SASVIYA or SASJS).
* @param url - endpoint used to send the request.
* @param data - request payload.
* @param headers - request headers.
* @param clientId - client ID to authenticate with.
* @param clientSecret - client secret to authenticate with.
* @returns - string containing request information. Example:
* Error while fetching access token from /SASLogon/oauth/token
* Thrown by the @sasjs/adapter getAccessTokenForViya function.
* Payload:
* {
* "grant_type": "authorization_code",
* "code": "example_code"
* }
* Headers:
* {
* "Authorization": "Basic NEdMQXBwOjRHTEFwcDE=",
* "Accept": "application/json"
* }
* ClientId: exampleClientId
* ClientSecret: exampleClientSecret
*
* Response from Viya is below.
* Auth error: {
* "error": "invalid_token",
* "error_description": "No scopes were granted"
* }
*/
export const getTokenRequestErrorPrefix = (
operation: Operation,
funcName: string,
server: Server,
url: string,
data?: {},
headers?: {},
clientId?: string,
clientSecret?: string
) => {
const stringify = (obj: {}) => JSON.stringify(obj, null, 2)
const lines = [
`Error while ${operation} from ${url}`,
`Thrown by the @sasjs/adapter ${funcName} function.`
]
if (data) {
lines.push('Payload:')
lines.push(stringify(data))
}
if (headers) {
lines.push('Headers:')
lines.push(stringify(headers))
}
if (clientId) lines.push(`ClientId: ${clientId}`)
if (clientSecret) lines.push(`ClientSecret: ${clientSecret}`)
lines.push('')
lines.push(`${getResponseTitle(server)}`)
lines.push('')
return lines.join(`\n`)
}
/**
* Parse error prefix to get response payload.
* @param prefix - error prefix generated by getTokenRequestErrorPrefix function.
* @param server - server type (SASVIYA or SASJS).
* @returns - response payload.
*/
export const getTokenRequestErrorPrefixResponse = (
prefix: string,
server: ServerType.SasViya | ServerType.Sasjs
) => prefix.split(`${getResponseTitle(server)}\n`).pop() as string

View File

@@ -22,6 +22,7 @@ export async function getTokens(
): Promise<AuthConfig> { ): Promise<AuthConfig> {
const logger = process.logger || console const logger = process.logger || console
let { access_token, refresh_token, client, secret } = authConfig let { access_token, refresh_token, client, secret } = authConfig
if ( if (
isAccessTokenExpiring(access_token) || isAccessTokenExpiring(access_token) ||
isRefreshTokenExpiring(refresh_token) isRefreshTokenExpiring(refresh_token)
@@ -29,6 +30,7 @@ export async function getTokens(
if (hasTokenExpired(refresh_token)) { if (hasTokenExpired(refresh_token)) {
const error = const error =
'Unable to obtain new access token. Your refresh token has expired.' 'Unable to obtain new access token. Your refresh token has expired.'
logger.error(error) logger.error(error)
throw new Error(error) throw new Error(error)
@@ -47,5 +49,6 @@ export async function getTokens(
: await refreshTokensForSasjs(requestClient, refresh_token) : await refreshTokensForSasjs(requestClient, refresh_token)
;({ access_token, refresh_token } = tokens) ;({ access_token, refresh_token } = tokens)
} }
return { access_token, refresh_token, client, secret } return { access_token, refresh_token, client, secret }
} }

View File

@@ -1,5 +1,7 @@
import { prefixMessage } from '@sasjs/utils/error' import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
import { ServerType } from '@sasjs/utils/types'
/** /**
* Exchanges the refresh token for an access token for the given client. * Exchanges the refresh token for an access token for the given client.
@@ -28,7 +30,15 @@ export async function refreshTokensForSasjs(
} }
}) })
.catch((err) => { .catch((err) => {
throw prefixMessage(err, 'Error while refreshing tokens: ') throw prefixMessage(
err,
getTokenRequestErrorPrefix(
'refreshing tokens',
'refreshTokensForSasjs',
ServerType.Sasjs,
url
)
)
}) })
return authResponse return authResponse

View File

@@ -1,8 +1,9 @@
import { SasAuthResponse } from '@sasjs/utils/types' import { SasAuthResponse, ServerType } from '@sasjs/utils/types'
import { prefixMessage } from '@sasjs/utils/error' import { prefixMessage } from '@sasjs/utils/error'
import * as NodeFormData from 'form-data' import * as NodeFormData from 'form-data'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { isNode } from '../utils' import { isNode } from '../utils'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
/** /**
* Exchanges the refresh token for an access token for the given client. * Exchanges the refresh token for an access token for the given client.
@@ -46,7 +47,19 @@ export async function refreshTokensForViya(
) )
.then((res) => res.result) .then((res) => res.result)
.catch((err) => { .catch((err) => {
throw prefixMessage(err, 'Error while refreshing tokens: ') throw prefixMessage(
err,
getTokenRequestErrorPrefix(
'refreshing tokens',
'refreshTokensForViya',
ServerType.SasViya,
url,
formData,
headers,
clientId,
clientSecret
)
)
}) })
return authResponse return authResponse

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils' import { AuthConfig } from '@sasjs/utils/types'
import { generateToken, mockSasjsAuthResponse } from './mockResponses' import { generateToken, mockSasjsAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { getAccessTokenForSasjs } from '../getAccessTokenForSasjs' import { getAccessTokenForSasjs } from '../getAccessTokenForSasjs'
@@ -55,7 +55,7 @@ describe('getAccessTokenForSasjs', () => {
authConfig.refresh_token authConfig.refresh_token
).catch((e: any) => e) ).catch((e: any) => e)
expect(error).toContain('Error while getting access token') expect(error).toContain('Error while fetching access token')
}) })
}) })

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils' import { AuthConfig } from '@sasjs/utils/types'
import * as NodeFormData from 'form-data' import * as NodeFormData from 'form-data'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
@@ -66,7 +66,7 @@ describe('getAccessTokenForViya', () => {
authConfig.refresh_token authConfig.refresh_token
).catch((e: any) => e) ).catch((e: any) => e)
expect(error).toContain('Error while getting access token') expect(error).toContain('Error while fetching access token')
}) })
}) })

View File

@@ -0,0 +1,81 @@
import { ServerType } from '@sasjs/utils/types'
import { getTokenRequestErrorPrefix } from '../getTokenRequestErrorPrefix'
describe('getTokenRequestErrorPrefix', () => {
it('should return error prefix', () => {
// INFO: Viya with only required attributes
let operation: 'fetching access token' = 'fetching access token'
const funcName = 'testFunc'
const url = '/SASjsApi/auth/token'
let expectedPrefix = `Error while ${operation} from ${url}
Thrown by the @sasjs/adapter ${funcName} function.
Response from Viya is below.
`
expect(
getTokenRequestErrorPrefix(operation, funcName, ServerType.SasViya, url)
).toEqual(expectedPrefix)
// INFO: Sasjs with data and headers
const data = {
grant_type: 'authorization_code',
code: 'testCode'
}
const headers = {
Authorization: 'Basic test=',
Accept: 'application/json'
}
expectedPrefix = `Error while ${operation} from ${url}
Thrown by the @sasjs/adapter ${funcName} function.
Payload:
${JSON.stringify(data, null, 2)}
Headers:
${JSON.stringify(headers, null, 2)}
Response from Sasjs is below.
`
expect(
getTokenRequestErrorPrefix(
operation,
funcName,
ServerType.Sasjs,
url,
data,
headers
)
).toEqual(expectedPrefix)
// INFO: Viya with all attributes
const clientId = 'testId'
const clientSecret = 'testSecret'
expectedPrefix = `Error while ${operation} from ${url}
Thrown by the @sasjs/adapter ${funcName} function.
Payload:
${JSON.stringify(data, null, 2)}
Headers:
${JSON.stringify(headers, null, 2)}
ClientId: ${clientId}
ClientSecret: ${clientSecret}
Response from Viya is below.
`
expect(
getTokenRequestErrorPrefix(
operation,
funcName,
ServerType.SasViya,
url,
data,
headers,
clientId,
clientSecret
)
).toEqual(expectedPrefix)
})
})

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils' import { AuthConfig } from '@sasjs/utils/types'
import * as refreshTokensModule from '../refreshTokensForViya' import * as refreshTokensModule from '../refreshTokensForViya'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { getTokens } from '../getTokens' import { getTokens } from '../getTokens'

View File

@@ -1,6 +1,8 @@
import { ServerType } from '@sasjs/utils/types'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { refreshTokensForSasjs } from '../refreshTokensForSasjs' import { refreshTokensForSasjs } from '../refreshTokensForSasjs'
import { getTokenRequestErrorPrefixResponse } from '../getTokenRequestErrorPrefix'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
@@ -38,9 +40,9 @@ describe('refreshTokensForSasjs', () => {
const error = await refreshTokensForSasjs( const error = await refreshTokensForSasjs(
requestClient, requestClient,
refresh_token refresh_token
).catch((e: any) => e) ).catch((e: any) => getTokenRequestErrorPrefixResponse(e, ServerType.Sasjs))
expect(error).toEqual(`Error while refreshing tokens: ${tokenError}`) expect(error).toEqual(tokenError)
}) })
}) })

View File

@@ -1,9 +1,10 @@
import { AuthConfig } from '@sasjs/utils' import { AuthConfig, ServerType } from '@sasjs/utils/types'
import * as NodeFormData from 'form-data' import * as NodeFormData from 'form-data'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { refreshTokensForViya } from '../refreshTokensForViya' import { refreshTokensForViya } from '../refreshTokensForViya'
import * as IsNodeModule from '../../utils/isNode' import * as IsNodeModule from '../../utils/isNode'
import { getTokenRequestErrorPrefixResponse } from '../getTokenRequestErrorPrefix'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
@@ -67,9 +68,11 @@ describe('refreshTokensForViya', () => {
authConfig.client, authConfig.client,
authConfig.secret, authConfig.secret,
authConfig.refresh_token authConfig.refresh_token
).catch((e: any) => e) ).catch((e: any) =>
getTokenRequestErrorPrefixResponse(e, ServerType.SasViya)
)
expect(error).toEqual(`Error while refreshing tokens: ${tokenError}`) expect(error).toEqual(tokenError)
}) })
it('should throw an error if environment is not Node', async () => { it('should throw an error if environment is not Node', async () => {

View File

@@ -1,5 +1,6 @@
import * as NodeFormData from 'form-data' import * as NodeFormData from 'form-data'
import { convertToCSV } from '../utils/convertToCsv' import { convertToCSV } from '../utils/convertToCsv'
import { isNode } from '../utils'
/** /**
* One of the approaches SASjs takes to send tables-formatted JSON (see README) * One of the approaches SASjs takes to send tables-formatted JSON (see README)
@@ -26,12 +27,15 @@ export const generateFileUploadForm = (
) )
} }
if (typeof FormData === 'undefined' && formData instanceof NodeFormData) { // INFO: unfortunately it is not possible to check if formData is instance of NodeFormData or FormData because it will return true for both
formData.append(name, csv, { if (isNode()) {
// INFO: environment is Node and formData is instance of NodeFormData
;(formData as NodeFormData).append(name, csv, {
filename: `${name}.csv`, filename: `${name}.csv`,
contentType: 'application/csv' contentType: 'application/csv'
}) })
} else { } else {
// INFO: environment is Browser and formData is instance of FormData
const file = new Blob([csv], { const file = new Blob([csv], {
type: 'application/csv' type: 'application/csv'
}) })

View File

@@ -1,4 +1,7 @@
import { generateFileUploadForm } from '../generateFileUploadForm' import { generateFileUploadForm } from '../generateFileUploadForm'
import { convertToCSV } from '../../utils/convertToCsv'
import * as NodeFormData from 'form-data'
import * as isNodeModule from '../../utils/isNode'
describe('generateFileUploadForm', () => { describe('generateFileUploadForm', () => {
beforeAll(() => { beforeAll(() => {
@@ -11,44 +14,94 @@ describe('generateFileUploadForm', () => {
;(global as any).Blob = BlobMock ;(global as any).Blob = BlobMock
}) })
it('should generate file upload form from data', () => { describe('browser', () => {
const formData = new FormData() afterAll(() => {
const testTable = 'sometable' jest.restoreAllMocks()
const testTableWithNullVars: { [key: string]: any } = { })
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter((key: string) =>
Array.isArray(testTableWithNullVars[key])
)[0]
jest.spyOn(formData, 'append').mockImplementation(() => {}) it('should generate file upload form from data', () => {
const formData = new FormData()
const testTable = 'sometable'
const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter(
(key: string) => Array.isArray(testTableWithNullVars[key])
)[0]
generateFileUploadForm(formData, testTableWithNullVars) jest.spyOn(formData, 'append').mockImplementation(() => {})
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
expect(formData.append).toHaveBeenCalledOnce() generateFileUploadForm(formData, testTableWithNullVars)
expect(formData.append).toHaveBeenCalledWith(
tableName, expect(formData.append).toHaveBeenCalledOnce()
{}, expect(formData.append).toHaveBeenCalledWith(
`${tableName}.csv` tableName,
) {},
`${tableName}.csv`
)
})
it('should throw an error if too large string was provided', () => {
const formData = new FormData()
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
)
})
}) })
it('should throw an error if too large string was provided', () => { describe('node', () => {
const formData = new FormData() it('should generate file upload form from data', () => {
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] } const formData = new NodeFormData()
const testTable = 'sometable'
const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter(
(key: string) => Array.isArray(testTableWithNullVars[key])
)[0]
const csv = convertToCSV(testTableWithNullVars, tableName)
expect(() => generateFileUploadForm(formData, data)).toThrow( jest.spyOn(formData, 'append').mockImplementation(() => {})
new Error(
'The max length of a string value in SASjs is 32765 characters.' generateFileUploadForm(formData, testTableWithNullVars)
expect(formData.append).toHaveBeenCalledOnce()
expect(formData.append).toHaveBeenCalledWith(tableName, csv, {
contentType: 'application/csv',
filename: `${tableName}.csv`
})
})
it('should throw an error if too large string was provided', () => {
const formData = new NodeFormData()
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
) )
) })
}) })
}) })

View File

@@ -93,12 +93,21 @@ export class FileUploader extends BaseJobExecutor {
this.requestClient, this.requestClient,
config.serverUrl config.serverUrl
) )
break break
case ServerType.Sas9: case ServerType.Sas9:
jsonResponse = jsonResponse =
typeof res.result === 'string' typeof res.result === 'string'
? parseWeboutResponse(res.result, uploadUrl) ? parseWeboutResponse(res.result, uploadUrl)
: res.result : res.result
break
case ServerType.Sasjs:
jsonResponse =
typeof res.result === 'string'
? getValidJson(res.result)
: res.result
break break
} }
} else { } else {

View File

@@ -10,8 +10,8 @@ import {
LoginRequiredError LoginRequiredError
} from '../types/errors' } from '../types/errors'
import { generateFileUploadForm } from '../file/generateFileUploadForm' import { generateFileUploadForm } from '../file/generateFileUploadForm'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getFormData } from '../utils'
import { import {
isRelativePath, isRelativePath,
@@ -53,8 +53,7 @@ export class SasjsJobExecutor extends BaseJobExecutor {
* Use the available form data object (FormData in Browser, NodeFormData in * Use the available form data object (FormData in Browser, NodeFormData in
* Node) * Node)
*/ */
let formData = let formData = getFormData()
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
if (data) { if (data) {
// file upload approach // file upload approach

View File

@@ -16,10 +16,11 @@ import { SASViyaApiClient } from '../SASViyaApiClient'
import { import {
isRelativePath, isRelativePath,
parseSasViyaDebugResponse, parseSasViyaDebugResponse,
appendExtraResponseAttributes appendExtraResponseAttributes,
parseWeboutResponse,
getFormData
} from '../utils' } from '../utils'
import { BaseJobExecutor } from './JobExecutor' import { BaseJobExecutor } from './JobExecutor'
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
export interface WaitingRequstPromise { export interface WaitingRequstPromise {
promise: Promise<any> | null promise: Promise<any> | null
@@ -112,8 +113,7 @@ export class WebJobExecutor extends BaseJobExecutor {
* Use the available form data object (FormData in Browser, NodeFormData in * Use the available form data object (FormData in Browser, NodeFormData in
* Node) * Node)
*/ */
let formData = let formData = getFormData()
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
if (data) { if (data) {
const stringifiedData = JSON.stringify(data) const stringifiedData = JSON.stringify(data)

View File

@@ -2,7 +2,7 @@ import * as pem from 'pem'
import * as http from 'http' import * as http from 'http'
import * as https from 'https' import * as https from 'https'
import { app, mockedAuthResponse } from './SAS_server_app' import { app, mockedAuthResponse } from './SAS_server_app'
import { ServerType } from '@sasjs/utils' import { ServerType } from '@sasjs/utils/types'
import SASjs from '../SASjs' import SASjs from '../SASjs'
import * as axiosModules from '../utils/createAxiosInstance' import * as axiosModules from '../utils/createAxiosInstance'
import { import {
@@ -11,8 +11,8 @@ import {
NotFoundError, NotFoundError,
InternalServerError InternalServerError
} from '../types/errors' } from '../types/errors'
import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefixResponse } from '../auth/getTokenRequestErrorPrefix'
const axiosActual = jest.requireActual('axios') const axiosActual = jest.requireActual('axios')
@@ -66,14 +66,18 @@ describe('RequestClient', () => {
}) })
it('should response the POST method with Unauthorized', async () => { it('should response the POST method with Unauthorized', async () => {
await expect( const expectedError = new LoginRequiredError({
adapter.getAccessToken('clientId', 'clientSecret', 'incorrect') error: 'unauthorized',
).rejects.toEqual( error_description: 'Bad credentials'
prefixMessage( })
new LoginRequiredError(incorrectAuthCodeErr),
'Error while getting access token. ' const rejectionErrorMessage = await adapter
.getAccessToken('clientId', 'clientSecret', 'incorrect')
.catch((err) =>
getTokenRequestErrorPrefixResponse(err.message, ServerType.SasViya)
) )
)
expect(rejectionErrorMessage).toEqual(expectedError.message)
}) })
describe('handleError', () => { describe('handleError', () => {
@@ -209,15 +213,15 @@ describe('RequestClient - Self Signed Server', () => {
serverType: ServerType.SasViya serverType: ServerType.SasViya
}) })
await expect( const expectedError = 'self-signed certificate'
adapterWithoutCertificate.getAccessToken(
'clientId', const rejectionErrorMessage = await adapterWithoutCertificate
'clientSecret', .getAccessToken('clientId', 'clientSecret', 'authCode')
'authCode' .catch((err) =>
getTokenRequestErrorPrefixResponse(err.message, ServerType.SasViya)
) )
).rejects.toThrow(
`Error while getting access token. ${ERROR_MESSAGES.selfSigned}` expect(rejectionErrorMessage).toEqual(expectedError)
)
}) })
it('should response the POST method using insecure flag', async () => { it('should response the POST method using insecure flag', async () => {
@@ -247,14 +251,18 @@ describe('RequestClient - Self Signed Server', () => {
}) })
it('should response the POST method with Unauthorized', async () => { it('should response the POST method with Unauthorized', async () => {
await expect( const expectedError = new LoginRequiredError({
adapter.getAccessToken('clientId', 'clientSecret', 'incorrect') error: 'unauthorized',
).rejects.toEqual( error_description: 'Bad credentials'
prefixMessage( })
new LoginRequiredError(incorrectAuthCodeErr),
'Error while getting access token. ' const rejectionErrorMessage = await adapter
.getAccessToken('clientId', 'clientSecret', 'incorrect')
.catch((err) =>
getTokenRequestErrorPrefixResponse(err.message, ServerType.SasViya)
) )
)
expect(rejectionErrorMessage).toEqual(expectedError.message)
}) })
}) })

View File

@@ -2,7 +2,7 @@ import { SessionManager } from '../SessionManager'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import * as dotenv from 'dotenv' import * as dotenv from 'dotenv'
import axios from 'axios' import axios from 'axios'
import { Logger, LogLevel } from '@sasjs/utils' import { Logger, LogLevel } from '@sasjs/utils/logger'
import { Session, Context } from '../types' import { Session, Context } from '../types'
jest.mock('axios') jest.mock('axios')

View File

@@ -1,6 +1,9 @@
export interface PollOptions { export interface PollOptions {
maxPollCount: number maxPollCount: number
pollInterval: number pollInterval: number // milliseconds
streamLog: boolean pollStrategy?: PollStrategy
streamLog?: boolean
logFolderPath?: string logFolderPath?: string
} }
export type PollStrategy = PollOptions[]

View File

@@ -7,7 +7,7 @@ describe('RootFolderNotFoundError', () => {
const error = new RootFolderNotFoundError( const error = new RootFolderNotFoundError(
'/myProject', '/myProject',
'https://analytium.co.uk', 'https://sas.4gl.io',
token token
) )
@@ -19,7 +19,7 @@ describe('RootFolderNotFoundError', () => {
it('when access token is not provided, error message should not contain scopes', () => { it('when access token is not provided, error message should not contain scopes', () => {
const error = new RootFolderNotFoundError( const error = new RootFolderNotFoundError(
'/myProject', '/myProject',
'https://analytium.co.uk' 'https://sas.4gl.io'
) )
expect(error).toBeInstanceOf(RootFolderNotFoundError) expect(error).toBeInstanceOf(RootFolderNotFoundError)
@@ -30,7 +30,7 @@ describe('RootFolderNotFoundError', () => {
it('should include the folder path and SASDrive URL in the message', () => { it('should include the folder path and SASDrive URL in the message', () => {
const folderPath = '/myProject' const folderPath = '/myProject'
const serverUrl = 'https://analytium.co.uk' const serverUrl = 'https://sas.4gl.io'
const error = new RootFolderNotFoundError(folderPath, serverUrl) const error = new RootFolderNotFoundError(folderPath, serverUrl)
expect(error).toBeInstanceOf(RootFolderNotFoundError) expect(error).toBeInstanceOf(RootFolderNotFoundError)

5
src/utils/getFormData.ts Normal file
View File

@@ -0,0 +1,5 @@
import { isNode } from './'
import * as NodeFormData from 'form-data'
export const getFormData = () =>
isNode() ? new NodeFormData() : new FormData()

View File

@@ -20,3 +20,4 @@ export * from './parseWeboutResponse'
export * from './serialize' export * from './serialize'
export * from './splitChunks' export * from './splitChunks'
export * from './validateInput' export * from './validateInput'
export * from './getFormData'

View File

@@ -0,0 +1,20 @@
import { getFormData } from '..'
import * as isNodeModule from '../isNode'
import * as NodeFormData from 'form-data'
describe('getFormData', () => {
it('should return NodeFormData if environment is Node', () => {
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
expect(getFormData() instanceof NodeFormData).toEqual(true)
})
it('should return FormData if environment is not Node', () => {
const formDataMock = () => {}
;(global as any).FormData = formDataMock
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
expect(getFormData() instanceof FormData).toEqual(true)
})
})