mirror of
https://github.com/sasjs/adapter.git
synced 2025-12-11 01:14:36 +00:00
Compare commits
63 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5756638dc2 | ||
|
|
e511cd613c | ||
|
|
2119c81ebb | ||
|
|
ea4b30d6ef | ||
|
|
f1e1b33571 | ||
|
|
ccb8599f00 | ||
|
|
5bcd17096b | ||
|
|
d744ee12a3 | ||
|
|
5f15226cd9 | ||
|
|
f31ea28b9c | ||
|
|
e315e4a619 | ||
|
|
76bf5b88e9 | ||
|
|
a97ac4eaa6 | ||
|
|
37cfea6ca7 | ||
|
|
f74c8aca57 | ||
|
|
77baaabfcd | ||
|
|
510ba771f0 | ||
|
|
6fce65f4c8 | ||
|
|
fe03faa59f | ||
|
|
6272eeda23 | ||
|
|
104d1b88b3 | ||
|
|
0d9ba36de8 | ||
|
|
4e7a845d99 | ||
|
|
716cc513ff | ||
|
|
22edcb0a8e | ||
|
|
aedf5c1734 | ||
|
|
784bd20ee0 | ||
|
|
61db1e0609 | ||
|
|
5c589a6af3 | ||
|
|
275cd6dbd3 | ||
|
|
d874e07889 | ||
|
|
1648cf28d5 | ||
| a4aaeba31c | |||
|
|
6bf68a315c | ||
|
|
c0f78d0c1e | ||
|
|
e0aebc169f | ||
|
|
9a50e5cb63 | ||
|
|
a51923dad7 | ||
|
|
9aee77f0e3 | ||
|
|
c32d037063 | ||
|
|
94f7492c31 | ||
|
|
d29e0a0f57 | ||
|
|
8d7cc11db5 | ||
|
|
28e9d1cc6b | ||
|
|
375cec48ca | ||
| 7d826685f7 | |||
| f42f6bca00 | |||
|
|
4440e5d1f9 | ||
|
|
f484a5a6a1 | ||
|
|
5c74186bab | ||
|
|
ea68c3dff3 | ||
|
|
153b285670 | ||
|
|
f9f4aa5aa6 | ||
|
|
bd02656b3c | ||
|
|
991519a13d | ||
|
|
615c9d012e | ||
|
|
bd872e0e75 | ||
|
|
a14a1663fc | ||
|
|
d166231c12 | ||
|
|
4cb150e951 | ||
|
|
fc8598473f | ||
|
|
367e0ae25a | ||
|
|
85dde61baf |
4
.github/reviewer-lottery.yml
vendored
4
.github/reviewer-lottery.yml
vendored
@@ -5,7 +5,3 @@ groups:
|
||||
- YuryShkoda
|
||||
- medjedovicm
|
||||
- sabhas
|
||||
- name: SASjs QA
|
||||
reviewers: 1
|
||||
usernames:
|
||||
- VladislavParhomchik
|
||||
|
||||
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [lts/fermium]
|
||||
node-version: [lts/hydrogen]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
2
.github/workflows/generateDocs.yml
vendored
2
.github/workflows/generateDocs.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [lts/fermium]
|
||||
node-version: [lts/hydrogen]
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
2
.github/workflows/npmpublish.yml
vendored
2
.github/workflows/npmpublish.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [lts/fermium]
|
||||
node-version: [lts/hydrogen]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
@@ -151,7 +151,11 @@ The `request()` method also has optional parameters such as a config object and
|
||||
|
||||
The response object will contain returned tables and columns. Table names are always lowercase, and column names uppercase.
|
||||
|
||||
The adapter will also cache the logs (if debug enabled) and even the work tables. For performance, it is best to keep debug mode off.
|
||||
The adapter will also cache the logs (if debug enabled) and even the work tables. For performance, it is best to keep debug mode off.
|
||||
|
||||
### Verbose Mode
|
||||
|
||||
Set `verbose` to `true` to enable verbose mode that logs a summary of every HTTP response. Verbose mode can be disabled by calling `disableVerboseMode` method or enabled by `enableVerboseMode` method. Verbose mode can also be enabled/disabled by `startComputeJob` method.
|
||||
|
||||
### Session Manager
|
||||
|
||||
@@ -273,6 +277,7 @@ Configuration on the client side involves passing an object on startup, which ca
|
||||
* `serverType` - either `SAS9`, `SASVIYA` or `SASJS`. The `SASJS` server type is for use with [sasjs/server](https://github.com/sasjs/server).
|
||||
* `serverUrl` - the location (including http protocol and port) of the SAS Server. Can be omitted, eg if serving directly from the SAS Web Server, or in streaming mode.
|
||||
* `debug` - if `true` then SAS Logs and extra debug information is returned.
|
||||
* `verbose` - optional, if `true` then a summary of every HTTP response is logged.
|
||||
* `loginMechanism` - either `Default` or `Redirected`. See [SAS Logon](#sas-logon) section.
|
||||
* `useComputeApi` - Only relevant when the serverType is `SASVIYA`. If `true` the [Compute API](#using-the-compute-api) is used. If `false` the [JES API](#using-the-jes-api) is used. If `null` or `undefined` the [Web](#using-jes-web-app) approach is used.
|
||||
* `contextName` - Compute context on which the requests will be called. If missing or not provided, defaults to `Job Execution Compute context`.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<script src="https://cdn.jsdelivr.net/combine/npm/chart.js@2.9.3,npm/jquery@3.5.1,npm/@sasjs/adapter@1"></script>
|
||||
<script src="https://cdn.jsdelivr.net/combine/npm/chart.js@2.9.3,npm/jquery@3.5.1,npm/@sasjs/adapter@4"></script>
|
||||
<script>
|
||||
var sasJs = new SASjs.default({
|
||||
appLoc: "/Public/app/readme"
|
||||
|
||||
84
package-lock.json
generated
84
package-lock.json
generated
@@ -13,7 +13,7 @@
|
||||
"axios-cookiejar-support": "1.0.1",
|
||||
"form-data": "4.0.0",
|
||||
"https": "1.0.0",
|
||||
"tough-cookie": "4.0.0"
|
||||
"tough-cookie": "4.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cypress/webpack-preprocessor": "5.9.1",
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/jest": "27.4.0",
|
||||
"@types/mime": "2.0.3",
|
||||
"@types/pem": "1.9.6",
|
||||
"@types/tough-cookie": "4.0.1",
|
||||
"@types/tough-cookie": "4.0.2",
|
||||
"copyfiles": "2.4.1",
|
||||
"cp": "0.2.0",
|
||||
"cypress": "7.7.0",
|
||||
@@ -3440,9 +3440,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/tough-cookie": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz",
|
||||
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg=="
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
|
||||
"integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
|
||||
},
|
||||
"node_modules/@types/yargs": {
|
||||
"version": "16.0.5",
|
||||
@@ -14110,6 +14110,11 @@
|
||||
"node": ">=0.4.x"
|
||||
}
|
||||
},
|
||||
"node_modules/querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
|
||||
},
|
||||
"node_modules/queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
@@ -14457,6 +14462,11 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/requires-port": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
|
||||
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
|
||||
},
|
||||
"node_modules/resolve": {
|
||||
"version": "1.22.1",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
|
||||
@@ -15702,22 +15712,23 @@
|
||||
}
|
||||
},
|
||||
"node_modules/tough-cookie": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
|
||||
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
|
||||
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
|
||||
"dependencies": {
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.1.2"
|
||||
"universalify": "^0.2.0",
|
||||
"url-parse": "^1.5.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/tough-cookie/node_modules/universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
|
||||
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
|
||||
"engines": {
|
||||
"node": ">= 4.0.0"
|
||||
}
|
||||
@@ -16351,6 +16362,15 @@
|
||||
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/url-parse": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"dependencies": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/url/node_modules/punycode": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
|
||||
@@ -19536,9 +19556,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"@types/tough-cookie": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz",
|
||||
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg=="
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
|
||||
"integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
|
||||
},
|
||||
"@types/yargs": {
|
||||
"version": "16.0.5",
|
||||
@@ -27552,6 +27572,11 @@
|
||||
"integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==",
|
||||
"dev": true
|
||||
},
|
||||
"querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
|
||||
},
|
||||
"queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
@@ -27833,6 +27858,11 @@
|
||||
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
|
||||
"dev": true
|
||||
},
|
||||
"requires-port": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
|
||||
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
|
||||
},
|
||||
"resolve": {
|
||||
"version": "1.22.1",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
|
||||
@@ -28799,19 +28829,20 @@
|
||||
"dev": true
|
||||
},
|
||||
"tough-cookie": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
|
||||
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
|
||||
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
|
||||
"requires": {
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.1.2"
|
||||
"universalify": "^0.2.0",
|
||||
"url-parse": "^1.5.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
|
||||
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg=="
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -29269,6 +29300,15 @@
|
||||
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
|
||||
"dev": true
|
||||
},
|
||||
"url-parse": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"requires": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"util": {
|
||||
"version": "0.12.5",
|
||||
"resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz",
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
"@types/jest": "27.4.0",
|
||||
"@types/mime": "2.0.3",
|
||||
"@types/pem": "1.9.6",
|
||||
"@types/tough-cookie": "4.0.1",
|
||||
"@types/tough-cookie": "4.0.2",
|
||||
"copyfiles": "2.4.1",
|
||||
"cp": "0.2.0",
|
||||
"cypress": "7.7.0",
|
||||
@@ -82,6 +82,6 @@
|
||||
"axios-cookiejar-support": "1.0.1",
|
||||
"form-data": "4.0.0",
|
||||
"https": "1.0.0",
|
||||
"tough-cookie": "4.0.0"
|
||||
"tough-cookie": "4.1.3"
|
||||
}
|
||||
}
|
||||
|
||||
7695
sasjs-tests/package-lock.json
generated
7695
sasjs-tests/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,15 +4,14 @@
|
||||
"homepage": ".",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@sasjs/adapter": "file:../build/sasjs-adapter-5.0.0.tgz",
|
||||
"@sasjs/test-framework": "1.5.7",
|
||||
"@types/jest": "^26.0.20",
|
||||
"@types/node": "^14.14.41",
|
||||
"@types/react": "^17.0.1",
|
||||
"@types/react-dom": "^17.0.0",
|
||||
"@types/react": "^16.0.1",
|
||||
"@types/react-dom": "^16.0.0",
|
||||
"@types/react-router-dom": "^5.1.7",
|
||||
"react": "^17.0.1",
|
||||
"react-dom": "^17.0.1",
|
||||
"react": "^16.0.1",
|
||||
"react-dom": "^16.0.1",
|
||||
"react-router-dom": "^5.2.0",
|
||||
"react-scripts": "^5.0.1",
|
||||
"typescript": "^4.1.3"
|
||||
@@ -22,7 +21,7 @@
|
||||
"build": "react-scripts build",
|
||||
"test": "react-scripts test",
|
||||
"eject": "react-scripts eject",
|
||||
"update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz --legacy-peer-deps",
|
||||
"update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz",
|
||||
"deploy:tests": "rsync -avhe ssh ./build/* --delete $SSH_ACCOUNT:$DEPLOY_PATH || npm run deploy:tests-win",
|
||||
"deploy:tests-win": "scp %DEPLOY_PATH% ./build/*",
|
||||
"deploy": "npm run update:adapter && npm run build && npm run deploy:tests"
|
||||
@@ -43,6 +42,6 @@
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"node-sass": "7.0.3"
|
||||
"node-sass": "9.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,6 +29,12 @@ import { executeScript } from './api/viya/executeScript'
|
||||
import { getAccessTokenForViya } from './auth/getAccessTokenForViya'
|
||||
import { refreshTokensForViya } from './auth/refreshTokensForViya'
|
||||
|
||||
interface JobExecutionResult {
|
||||
result?: { result: object }
|
||||
log?: string
|
||||
error?: object
|
||||
}
|
||||
|
||||
/**
|
||||
* A client for interfacing with the SAS Viya REST API.
|
||||
*
|
||||
@@ -270,7 +276,7 @@ export class SASViyaApiClient {
|
||||
* @param debug - when set to true, the log will be returned.
|
||||
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
|
||||
* @param waitForResult - when set to true, function will return the session
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
*/
|
||||
@@ -621,7 +627,7 @@ export class SASViyaApiClient {
|
||||
* @param accessToken - an optional access token for an authorized user.
|
||||
* @param waitForResult - a boolean indicating if the function should wait for a result.
|
||||
* @param expectWebout - a boolean indicating whether to expect a _webout response.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
*/
|
||||
@@ -732,11 +738,13 @@ export class SASViyaApiClient {
|
||||
debug: boolean,
|
||||
data?: any,
|
||||
authConfig?: AuthConfig
|
||||
) {
|
||||
): Promise<JobExecutionResult> {
|
||||
let access_token = (authConfig || {}).access_token
|
||||
|
||||
if (authConfig) {
|
||||
;({ access_token } = await getTokens(this.requestClient, authConfig))
|
||||
}
|
||||
|
||||
if (isRelativePath(sasJob) && !this.rootFolderName) {
|
||||
throw new Error(
|
||||
'Relative paths cannot be used without specifying a root folder name.'
|
||||
@@ -749,6 +757,7 @@ export class SASViyaApiClient {
|
||||
const fullFolderPath = isRelativePath(sasJob)
|
||||
? `${this.rootFolderName}/${folderPath}`
|
||||
: folderPath
|
||||
|
||||
await this.populateFolderMap(fullFolderPath, access_token)
|
||||
|
||||
const jobFolder = this.folderMap.get(fullFolderPath)
|
||||
@@ -765,9 +774,8 @@ export class SASViyaApiClient {
|
||||
files = await this.uploadTables(data, access_token)
|
||||
}
|
||||
|
||||
if (!jobToExecute) {
|
||||
throw new Error(`Job was not found.`)
|
||||
}
|
||||
if (!jobToExecute) throw new Error(`Job was not found.`)
|
||||
|
||||
const jobDefinitionLink = jobToExecute?.links.find(
|
||||
(l) => l.rel === 'getResource'
|
||||
)?.href
|
||||
@@ -807,16 +815,19 @@ export class SASViyaApiClient {
|
||||
jobDefinition,
|
||||
arguments: jobArguments
|
||||
}
|
||||
|
||||
const { result: postedJob } = await this.requestClient.post<Job>(
|
||||
`${this.serverUrl}/jobExecution/jobs?_action=wait`,
|
||||
postJobRequestBody,
|
||||
access_token
|
||||
)
|
||||
|
||||
const jobStatus = await this.pollJobState(postedJob, authConfig).catch(
|
||||
(err) => {
|
||||
throw prefixMessage(err, 'Error while polling job status. ')
|
||||
}
|
||||
)
|
||||
|
||||
const { result: currentJob } = await this.requestClient.get<Job>(
|
||||
`${this.serverUrl}/jobExecution/jobs/${postedJob.id}`,
|
||||
access_token
|
||||
@@ -827,6 +838,7 @@ export class SASViyaApiClient {
|
||||
|
||||
const resultLink = currentJob.results['_webout.json']
|
||||
const logLink = currentJob.links.find((l) => l.rel === 'log')
|
||||
|
||||
if (resultLink) {
|
||||
jobResult = await this.requestClient.get<any>(
|
||||
`${this.serverUrl}${resultLink}/content`,
|
||||
@@ -834,11 +846,13 @@ export class SASViyaApiClient {
|
||||
'text/plain'
|
||||
)
|
||||
}
|
||||
|
||||
if (debug && logLink) {
|
||||
log = await this.requestClient
|
||||
.get<any>(`${this.serverUrl}${logLink.href}/content`, access_token)
|
||||
.then((res: any) => res.result.items.map((i: any) => i.line).join('\n'))
|
||||
}
|
||||
|
||||
if (jobStatus === 'failed') {
|
||||
throw new JobExecutionError(
|
||||
currentJob.error?.errorCode,
|
||||
@@ -846,7 +860,16 @@ export class SASViyaApiClient {
|
||||
log
|
||||
)
|
||||
}
|
||||
return { result: jobResult?.result, log }
|
||||
|
||||
const executionResult: JobExecutionResult = {
|
||||
result: jobResult?.result,
|
||||
log
|
||||
}
|
||||
|
||||
const { error } = currentJob
|
||||
if (error) executionResult.error = error
|
||||
|
||||
return executionResult
|
||||
}
|
||||
|
||||
private async populateFolderMap(folderPath: string, accessToken?: string) {
|
||||
|
||||
32
src/SASjs.ts
32
src/SASjs.ts
@@ -31,6 +31,7 @@ import {
|
||||
} from './job-execution'
|
||||
import { ErrorResponse } from './types/errors'
|
||||
import { LoginOptions, LoginResult } from './types/Login'
|
||||
import { AxiosResponse } from 'axios'
|
||||
|
||||
interface ExecuteScriptParams {
|
||||
linesOfCode: string[]
|
||||
@@ -851,9 +852,10 @@ export default class SASjs {
|
||||
* @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs.
|
||||
* The access token is not required when the user is authenticated via the browser.
|
||||
* @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
* @param verboseMode - boolean to enable verbose mode (log every HTTP response).
|
||||
*/
|
||||
public async startComputeJob(
|
||||
sasJob: string,
|
||||
@@ -863,7 +865,8 @@ export default class SASjs {
|
||||
waitForResult?: boolean,
|
||||
pollOptions?: PollOptions,
|
||||
printPid = false,
|
||||
variables?: MacroVar
|
||||
variables?: MacroVar,
|
||||
verboseMode?: boolean
|
||||
) {
|
||||
config = {
|
||||
...this.sasjsConfig,
|
||||
@@ -877,6 +880,9 @@ export default class SASjs {
|
||||
)
|
||||
}
|
||||
|
||||
if (verboseMode) this.requestClient?.enableVerboseMode()
|
||||
else if (verboseMode === false) this.requestClient?.disableVerboseMode()
|
||||
|
||||
return this.sasViyaApiClient?.executeComputeJob(
|
||||
sasJob,
|
||||
config.contextName,
|
||||
@@ -970,7 +976,8 @@ export default class SASjs {
|
||||
this.requestClient = new RequestClientClass(
|
||||
this.sasjsConfig.serverUrl,
|
||||
this.sasjsConfig.httpsAgentOptions,
|
||||
this.sasjsConfig.requestHistoryLimit
|
||||
this.sasjsConfig.requestHistoryLimit,
|
||||
this.sasjsConfig.verbose
|
||||
)
|
||||
} else {
|
||||
this.requestClient.setConfig(
|
||||
@@ -1134,4 +1141,23 @@ export default class SASjs {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables verbose mode that will log a summary of every HTTP response.
|
||||
* @param successCallBack - function that should be triggered on every HTTP response with the status 2**.
|
||||
* @param errorCallBack - function that should be triggered on every HTTP response with the status different from 2**.
|
||||
*/
|
||||
public enableVerboseMode(
|
||||
successCallBack?: (response: AxiosResponse) => AxiosResponse,
|
||||
errorCallBack?: (response: AxiosResponse) => AxiosResponse
|
||||
) {
|
||||
this.requestClient?.enableVerboseMode(successCallBack, errorCallBack)
|
||||
}
|
||||
|
||||
/**
|
||||
* Turns off verbose mode to log every HTTP response.
|
||||
*/
|
||||
public disableVerboseMode() {
|
||||
this.requestClient?.disableVerboseMode()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ import { RequestClient } from '../../request/RequestClient'
|
||||
import { SessionManager } from '../../SessionManager'
|
||||
import { isRelativePath, fetchLogByChunks } from '../../utils'
|
||||
import { formatDataForRequest } from '../../utils/formatDataForRequest'
|
||||
import { pollJobState } from './pollJobState'
|
||||
import { pollJobState, JobState } from './pollJobState'
|
||||
import { uploadTables } from './uploadTables'
|
||||
|
||||
/**
|
||||
@@ -25,7 +25,7 @@ import { uploadTables } from './uploadTables'
|
||||
* @param debug - when set to true, the log will be returned.
|
||||
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
|
||||
* @param waitForResult - when set to true, function will return the session
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
*/
|
||||
@@ -228,7 +228,7 @@ export async function executeScript(
|
||||
)
|
||||
}
|
||||
|
||||
if (jobStatus === 'failed' || jobStatus === 'error') {
|
||||
if (jobStatus === JobState.Failed || jobStatus === JobState.Error) {
|
||||
throw new ComputeJobExecutionError(currentJob, log)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,29 +1,88 @@
|
||||
import { AuthConfig } from '@sasjs/utils/types'
|
||||
import { Job, PollOptions } from '../..'
|
||||
import { Job, PollOptions, PollStrategy } from '../..'
|
||||
import { getTokens } from '../../auth/getTokens'
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
import { JobStatePollError } from '../../types/errors'
|
||||
import { Link, WriteStream } from '../../types'
|
||||
import { delay, isNode } from '../../utils'
|
||||
|
||||
export enum JobState {
|
||||
Completed = 'completed',
|
||||
Running = 'running',
|
||||
Pending = 'pending',
|
||||
Unavailable = 'unavailable',
|
||||
NoState = '',
|
||||
Failed = 'failed',
|
||||
Error = 'error'
|
||||
}
|
||||
|
||||
/**
|
||||
* Polls job status using default or provided poll options.
|
||||
* @param requestClient - the pre-configured HTTP request client.
|
||||
* @param postedJob - the relative or absolute path to the job.
|
||||
* @param debug - sets the _debug flag in the job arguments.
|
||||
* @param authConfig - an access token, refresh token, client and secret for an authorized user.
|
||||
* @param pollOptions - an object containing maxPollCount, pollInterval, streamLog and logFolderPath. It will override the first default poll options in poll strategy if provided.
|
||||
* Example pollOptions:
|
||||
* {
|
||||
* maxPollCount: 200,
|
||||
* pollInterval: 300,
|
||||
* streamLog: true, // optional, equals to false by default.
|
||||
* pollStrategy?: // optional array of poll options that should be applied after 'maxPollCount' of the provided poll options is reached. If not provided the default (see example below) poll strategy will be used.
|
||||
* }
|
||||
* Example pollStrategy (values used from default poll strategy):
|
||||
* [
|
||||
* { maxPollCount: 200, pollInterval: 300 }, // approximately ~2 mins (including time to get response (~300ms))
|
||||
* { maxPollCount: 300, pollInterval: 3000 }, // approximately ~5.5 mins (including time to get response (~300ms))
|
||||
* { maxPollCount: 500, pollInterval: 30000 }, // approximately ~50.5 mins (including time to get response (~300ms))
|
||||
* { maxPollCount: 3400, pollInterval: 60000 } // approximately ~3015 mins (~125 hours) (including time to get response (~300ms))
|
||||
* ]
|
||||
* @returns - a promise which resolves with a job state
|
||||
*/
|
||||
export async function pollJobState(
|
||||
requestClient: RequestClient,
|
||||
postedJob: Job,
|
||||
debug: boolean,
|
||||
authConfig?: AuthConfig,
|
||||
pollOptions?: PollOptions
|
||||
) {
|
||||
): Promise<JobState> {
|
||||
const logger = process.logger || console
|
||||
|
||||
let pollInterval = 300
|
||||
let maxPollCount = 1000
|
||||
const streamLog = pollOptions?.streamLog || false
|
||||
|
||||
const defaultPollOptions: PollOptions = {
|
||||
maxPollCount,
|
||||
pollInterval,
|
||||
streamLog: false
|
||||
const defaultPollStrategy: PollStrategy = [
|
||||
{ maxPollCount: 200, pollInterval: 300 },
|
||||
{ maxPollCount: 300, pollInterval: 3000 },
|
||||
{ maxPollCount: 500, pollInterval: 30000 },
|
||||
{ maxPollCount: 3400, pollInterval: 60000 }
|
||||
]
|
||||
|
||||
let pollStrategy: PollStrategy
|
||||
|
||||
if (pollOptions !== undefined) {
|
||||
pollStrategy = [pollOptions]
|
||||
|
||||
let { pollStrategy: providedPollStrategy } = pollOptions
|
||||
|
||||
if (providedPollStrategy !== undefined) {
|
||||
validatePollStrategies(providedPollStrategy)
|
||||
|
||||
// INFO: sort by 'maxPollCount'
|
||||
providedPollStrategy = providedPollStrategy.sort(
|
||||
(strategyA: PollOptions, strategyB: PollOptions) =>
|
||||
strategyA.maxPollCount - strategyB.maxPollCount
|
||||
)
|
||||
|
||||
pollStrategy = [...pollStrategy, ...providedPollStrategy]
|
||||
} else {
|
||||
pollStrategy = [...pollStrategy, ...defaultPollStrategy]
|
||||
}
|
||||
} else {
|
||||
pollStrategy = defaultPollStrategy
|
||||
}
|
||||
|
||||
let defaultPollOptions: PollOptions = pollStrategy.splice(0, 1)[0]
|
||||
|
||||
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
|
||||
|
||||
const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
|
||||
@@ -31,10 +90,10 @@ export async function pollJobState(
|
||||
throw new Error(`Job state link was not found.`)
|
||||
}
|
||||
|
||||
let currentState = await getJobState(
|
||||
let currentState: JobState = await getJobState(
|
||||
requestClient,
|
||||
postedJob,
|
||||
'',
|
||||
JobState.NoState,
|
||||
debug,
|
||||
authConfig
|
||||
).catch((err) => {
|
||||
@@ -42,73 +101,71 @@ export async function pollJobState(
|
||||
`Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`,
|
||||
err
|
||||
)
|
||||
return 'unavailable'
|
||||
|
||||
return JobState.Unavailable
|
||||
})
|
||||
|
||||
let pollCount = 0
|
||||
|
||||
if (currentState === 'completed') {
|
||||
if (currentState === JobState.Completed) {
|
||||
return Promise.resolve(currentState)
|
||||
}
|
||||
|
||||
let logFileStream
|
||||
if (pollOptions.streamLog && isNode()) {
|
||||
if (streamLog && isNode()) {
|
||||
const { getFileStream } = require('./getFileStream')
|
||||
|
||||
logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
|
||||
}
|
||||
|
||||
// Poll up to the first 100 times with the specified poll interval
|
||||
let result = await doPoll(
|
||||
requestClient,
|
||||
postedJob,
|
||||
currentState,
|
||||
debug,
|
||||
pollCount,
|
||||
pollOptions,
|
||||
authConfig,
|
||||
{
|
||||
...pollOptions,
|
||||
maxPollCount:
|
||||
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
|
||||
},
|
||||
streamLog,
|
||||
logFileStream
|
||||
)
|
||||
|
||||
currentState = result.state
|
||||
pollCount = result.pollCount
|
||||
|
||||
if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) {
|
||||
if (
|
||||
!needsRetry(currentState) ||
|
||||
(pollCount >= pollOptions.maxPollCount && !pollStrategy.length)
|
||||
) {
|
||||
return currentState
|
||||
}
|
||||
|
||||
// If we get to this point, this is a long-running job that needs longer polling.
|
||||
// We will resume polling with a bigger interval of 1 minute
|
||||
let longJobPollOptions: PollOptions = {
|
||||
maxPollCount: 24 * 60,
|
||||
pollInterval: 60000,
|
||||
streamLog: false
|
||||
}
|
||||
if (pollOptions) {
|
||||
longJobPollOptions.streamLog = pollOptions.streamLog
|
||||
longJobPollOptions.logFolderPath = pollOptions.logFolderPath
|
||||
// INFO: If we get to this point, this is a long-running job that needs longer polling.
|
||||
// We will resume polling with a bigger interval according to the next polling strategy
|
||||
while (pollStrategy.length && needsRetry(currentState)) {
|
||||
defaultPollOptions = pollStrategy.splice(0, 1)[0]
|
||||
|
||||
if (pollOptions) {
|
||||
defaultPollOptions.logFolderPath = pollOptions.logFolderPath
|
||||
}
|
||||
|
||||
result = await doPoll(
|
||||
requestClient,
|
||||
postedJob,
|
||||
currentState,
|
||||
debug,
|
||||
pollCount,
|
||||
defaultPollOptions,
|
||||
authConfig,
|
||||
streamLog,
|
||||
logFileStream
|
||||
)
|
||||
|
||||
currentState = result.state
|
||||
pollCount = result.pollCount
|
||||
}
|
||||
|
||||
result = await doPoll(
|
||||
requestClient,
|
||||
postedJob,
|
||||
currentState,
|
||||
debug,
|
||||
pollCount,
|
||||
authConfig,
|
||||
longJobPollOptions,
|
||||
logFileStream
|
||||
)
|
||||
|
||||
currentState = result.state
|
||||
pollCount = result.pollCount
|
||||
|
||||
if (logFileStream) {
|
||||
logFileStream.end()
|
||||
}
|
||||
if (logFileStream) logFileStream.end()
|
||||
|
||||
return currentState
|
||||
}
|
||||
@@ -119,17 +176,13 @@ const getJobState = async (
|
||||
currentState: string,
|
||||
debug: boolean,
|
||||
authConfig?: AuthConfig
|
||||
) => {
|
||||
const stateLink = job.links.find((l: any) => l.rel === 'state')
|
||||
if (!stateLink) {
|
||||
throw new Error(`Job state link was not found.`)
|
||||
}
|
||||
): Promise<JobState> => {
|
||||
const stateLink = job.links.find((l: any) => l.rel === 'state')!
|
||||
|
||||
if (needsRetry(currentState)) {
|
||||
let tokens
|
||||
if (authConfig) {
|
||||
tokens = await getTokens(requestClient, authConfig)
|
||||
}
|
||||
|
||||
if (authConfig) tokens = await getTokens(requestClient, authConfig)
|
||||
|
||||
const { result: jobState } = await requestClient
|
||||
.get<string>(
|
||||
@@ -143,48 +196,38 @@ const getJobState = async (
|
||||
throw new JobStatePollError(job.id, err)
|
||||
})
|
||||
|
||||
return jobState.trim()
|
||||
return jobState.trim() as JobState
|
||||
} else {
|
||||
return currentState
|
||||
return currentState as JobState
|
||||
}
|
||||
}
|
||||
|
||||
const needsRetry = (state: string) =>
|
||||
state === 'running' ||
|
||||
state === '' ||
|
||||
state === 'pending' ||
|
||||
state === 'unavailable'
|
||||
state === JobState.Running ||
|
||||
state === JobState.NoState ||
|
||||
state === JobState.Pending ||
|
||||
state === JobState.Unavailable
|
||||
|
||||
const doPoll = async (
|
||||
requestClient: RequestClient,
|
||||
postedJob: Job,
|
||||
currentState: string,
|
||||
currentState: JobState,
|
||||
debug: boolean,
|
||||
pollCount: number,
|
||||
pollOptions: PollOptions,
|
||||
authConfig?: AuthConfig,
|
||||
pollOptions?: PollOptions,
|
||||
streamLog?: boolean,
|
||||
logStream?: WriteStream
|
||||
): Promise<{ state: string; pollCount: number }> => {
|
||||
let pollInterval = 300
|
||||
let maxPollCount = 1000
|
||||
): Promise<{ state: JobState; pollCount: number }> => {
|
||||
const { maxPollCount, pollInterval } = pollOptions
|
||||
const logger = process.logger || console
|
||||
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')!
|
||||
let maxErrorCount = 5
|
||||
let errorCount = 0
|
||||
let state = currentState
|
||||
let printedState = ''
|
||||
let printedState = JobState.NoState
|
||||
let startLogLine = 0
|
||||
|
||||
const logger = process.logger || console
|
||||
|
||||
if (pollOptions) {
|
||||
pollInterval = pollOptions.pollInterval || pollInterval
|
||||
maxPollCount = pollOptions.maxPollCount || maxPollCount
|
||||
}
|
||||
|
||||
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')
|
||||
if (!stateLink) {
|
||||
throw new Error(`Job state link was not found.`)
|
||||
}
|
||||
|
||||
while (needsRetry(state) && pollCount <= maxPollCount) {
|
||||
state = await getJobState(
|
||||
requestClient,
|
||||
@@ -194,21 +237,24 @@ const doPoll = async (
|
||||
authConfig
|
||||
).catch((err) => {
|
||||
errorCount++
|
||||
|
||||
if (pollCount >= maxPollCount || errorCount >= maxErrorCount) {
|
||||
throw err
|
||||
}
|
||||
|
||||
logger.error(
|
||||
`Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`,
|
||||
err
|
||||
)
|
||||
return 'unavailable'
|
||||
|
||||
return JobState.Unavailable
|
||||
})
|
||||
|
||||
pollCount++
|
||||
|
||||
const jobHref = postedJob.links.find((l: Link) => l.rel === 'self')!.href
|
||||
|
||||
if (pollOptions?.streamLog) {
|
||||
if (streamLog) {
|
||||
const { result: job } = await requestClient.get<Job>(
|
||||
jobHref,
|
||||
authConfig?.access_token
|
||||
@@ -238,12 +284,45 @@ const doPoll = async (
|
||||
printedState = state
|
||||
}
|
||||
|
||||
if (state != 'unavailable' && errorCount > 0) {
|
||||
if (state !== JobState.Unavailable && errorCount > 0) {
|
||||
errorCount = 0
|
||||
}
|
||||
|
||||
await delay(pollInterval)
|
||||
if (state !== JobState.Completed) {
|
||||
await delay(pollInterval)
|
||||
}
|
||||
}
|
||||
|
||||
return { state, pollCount }
|
||||
}
|
||||
|
||||
const validatePollStrategies = (strategy: PollStrategy) => {
|
||||
const throwError = (message?: string, pollOptions?: PollOptions) => {
|
||||
throw new Error(
|
||||
`Poll strategies are not valid.${message ? ` ${message}` : ''}${
|
||||
pollOptions
|
||||
? ` Invalid poll strategy: \n${JSON.stringify(pollOptions, null, 2)}`
|
||||
: ''
|
||||
}`
|
||||
)
|
||||
}
|
||||
|
||||
strategy.forEach((pollOptions: PollOptions, i: number) => {
|
||||
const { maxPollCount, pollInterval } = pollOptions
|
||||
|
||||
if (maxPollCount < 1) {
|
||||
throwError(`'maxPollCount' has to be greater than 0.`, pollOptions)
|
||||
} else if (i !== 0) {
|
||||
const previousPollOptions = strategy[i - 1]
|
||||
|
||||
if (maxPollCount <= previousPollOptions.maxPollCount) {
|
||||
throwError(
|
||||
`'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy.`,
|
||||
pollOptions
|
||||
)
|
||||
}
|
||||
} else if (pollInterval < 1) {
|
||||
throwError(`'pollInterval' has to be greater than 0.`, pollOptions)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -9,14 +9,13 @@ import * as formatDataModule from '../../../utils/formatDataForRequest'
|
||||
import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
|
||||
import { PollOptions } from '../../../types'
|
||||
import { ComputeJobExecutionError, NotFoundError } from '../../../types/errors'
|
||||
import { Logger, LogLevel } from '@sasjs/utils'
|
||||
import { Logger, LogLevel } from '@sasjs/utils/logger'
|
||||
|
||||
const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)()
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
const defaultPollOptions: PollOptions = {
|
||||
maxPollCount: 100,
|
||||
pollInterval: 500,
|
||||
streamLog: false
|
||||
pollInterval: 500
|
||||
}
|
||||
|
||||
describe('executeScript', () => {
|
||||
@@ -452,7 +451,9 @@ describe('executeScript', () => {
|
||||
it('should throw a ComputeJobExecutionError if the job has failed', async () => {
|
||||
jest
|
||||
.spyOn(pollJobStateModule, 'pollJobState')
|
||||
.mockImplementation(() => Promise.resolve('failed'))
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve(pollJobStateModule.JobState.Failed)
|
||||
)
|
||||
|
||||
const error: ComputeJobExecutionError = await executeScript(
|
||||
requestClient,
|
||||
@@ -485,7 +486,9 @@ describe('executeScript', () => {
|
||||
it('should throw a ComputeJobExecutionError if the job has errored out', async () => {
|
||||
jest
|
||||
.spyOn(pollJobStateModule, 'pollJobState')
|
||||
.mockImplementation(() => Promise.resolve('error'))
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve(pollJobStateModule.JobState.Error)
|
||||
)
|
||||
|
||||
const error: ComputeJobExecutionError = await executeScript(
|
||||
requestClient,
|
||||
@@ -654,7 +657,9 @@ const setupMocks = () => {
|
||||
.mockImplementation(() => Promise.resolve(mockAuthConfig))
|
||||
jest
|
||||
.spyOn(pollJobStateModule, 'pollJobState')
|
||||
.mockImplementation(() => Promise.resolve('completed'))
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve(pollJobStateModule.JobState.Completed)
|
||||
)
|
||||
jest
|
||||
.spyOn(sessionManager, 'getVariable')
|
||||
.mockImplementation(() =>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Logger, LogLevel } from '@sasjs/utils'
|
||||
import { Logger, LogLevel } from '@sasjs/utils/logger'
|
||||
import { RequestClient } from '../../../request/RequestClient'
|
||||
import { mockAuthConfig, mockJob } from './mockResponses'
|
||||
import { pollJobState } from '../pollJobState'
|
||||
@@ -6,17 +6,18 @@ import * as getTokensModule from '../../../auth/getTokens'
|
||||
import * as saveLogModule from '../saveLog'
|
||||
import * as getFileStreamModule from '../getFileStream'
|
||||
import * as isNodeModule from '../../../utils/isNode'
|
||||
import { PollOptions } from '../../../types'
|
||||
import * as delayModule from '../../../utils/delay'
|
||||
import { PollOptions, PollStrategy } from '../../../types'
|
||||
import { WriteStream } from 'fs'
|
||||
|
||||
const baseUrl = 'http://localhost'
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
requestClient['httpClient'].defaults.baseURL = baseUrl
|
||||
|
||||
const defaultPollOptions: PollOptions = {
|
||||
const defaultStreamLog = false
|
||||
const defaultPollStrategy: PollOptions = {
|
||||
maxPollCount: 100,
|
||||
pollInterval: 500,
|
||||
streamLog: false
|
||||
pollInterval: 500
|
||||
}
|
||||
|
||||
describe('pollJobState', () => {
|
||||
@@ -26,13 +27,10 @@ describe('pollJobState', () => {
|
||||
})
|
||||
|
||||
it('should get valid tokens if the authConfig has been provided', async () => {
|
||||
await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
defaultPollOptions
|
||||
)
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollStrategy,
|
||||
streamLog: defaultStreamLog
|
||||
})
|
||||
|
||||
expect(getTokensModule.getTokens).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
@@ -46,7 +44,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect(getTokensModule.getTokens).not.toHaveBeenCalled()
|
||||
@@ -58,7 +56,7 @@ describe('pollJobState', () => {
|
||||
{ ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') },
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
).catch((e: any) => e)
|
||||
|
||||
expect((error as Error).message).toContain('Job state link was not found.')
|
||||
@@ -72,7 +70,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3)
|
||||
@@ -83,7 +81,7 @@ describe('pollJobState', () => {
|
||||
const { saveLog } = require('../saveLog')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
...defaultPollStrategy,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
@@ -96,7 +94,7 @@ describe('pollJobState', () => {
|
||||
const { saveLog } = require('../saveLog')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
...defaultPollStrategy,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
@@ -111,7 +109,7 @@ describe('pollJobState', () => {
|
||||
const { getFileStream } = require('../getFileStream')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
...defaultPollStrategy,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
@@ -127,7 +125,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect(saveLogModule.saveLog).not.toHaveBeenCalled()
|
||||
@@ -136,15 +134,18 @@ describe('pollJobState', () => {
|
||||
it('should return the current status when the max poll count is reached', async () => {
|
||||
mockRunningPoll()
|
||||
|
||||
const pollOptions: PollOptions = {
|
||||
...defaultPollStrategy,
|
||||
maxPollCount: 1,
|
||||
pollStrategy: []
|
||||
}
|
||||
|
||||
const state = await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
{
|
||||
...defaultPollOptions,
|
||||
maxPollCount: 1
|
||||
}
|
||||
pollOptions
|
||||
)
|
||||
|
||||
expect(state).toEqual('running')
|
||||
@@ -159,7 +160,7 @@ describe('pollJobState', () => {
|
||||
false,
|
||||
mockAuthConfig,
|
||||
{
|
||||
...defaultPollOptions,
|
||||
...defaultPollStrategy,
|
||||
maxPollCount: 200,
|
||||
pollInterval: 10
|
||||
}
|
||||
@@ -176,7 +177,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect(requestClient.get).toHaveBeenCalledTimes(2)
|
||||
@@ -192,7 +193,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
true,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect((process as any).logger.info).toHaveBeenCalledTimes(4)
|
||||
@@ -222,7 +223,7 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
)
|
||||
|
||||
expect(requestClient.get).toHaveBeenCalledTimes(2)
|
||||
@@ -237,13 +238,119 @@ describe('pollJobState', () => {
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
defaultPollStrategy
|
||||
).catch((e: any) => e)
|
||||
|
||||
expect(error.message).toEqual(
|
||||
'Error while polling job state for job j0b: Status Error'
|
||||
)
|
||||
})
|
||||
|
||||
it('should change poll strategies', async () => {
|
||||
mockSimplePoll(6)
|
||||
|
||||
const delays: number[] = []
|
||||
|
||||
jest.spyOn(delayModule, 'delay').mockImplementation((ms: number) => {
|
||||
delays.push(ms)
|
||||
|
||||
return Promise.resolve()
|
||||
})
|
||||
|
||||
const pollIntervals = [3, 4, 5, 6]
|
||||
|
||||
const pollStrategy = [
|
||||
{ maxPollCount: 2, pollInterval: pollIntervals[1] },
|
||||
{ maxPollCount: 3, pollInterval: pollIntervals[2] },
|
||||
{ maxPollCount: 4, pollInterval: pollIntervals[3] }
|
||||
]
|
||||
|
||||
const pollOptions: PollOptions = {
|
||||
maxPollCount: 1,
|
||||
pollInterval: pollIntervals[0],
|
||||
pollStrategy: pollStrategy
|
||||
}
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, undefined, pollOptions)
|
||||
|
||||
expect(delays).toEqual([pollIntervals[0], ...pollIntervals])
|
||||
})
|
||||
|
||||
it('should throw an error if not valid poll strategies provided', async () => {
|
||||
// INFO: 'maxPollCount' has to be > 0
|
||||
let invalidPollStrategy = {
|
||||
maxPollCount: 0,
|
||||
pollInterval: 3
|
||||
}
|
||||
|
||||
let pollStrategy: PollStrategy = [invalidPollStrategy]
|
||||
|
||||
let expectedError = new Error(
|
||||
`Poll strategies are not valid. 'maxPollCount' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
|
||||
invalidPollStrategy,
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
)
|
||||
|
||||
await expect(
|
||||
pollJobState(requestClient, mockJob, false, undefined, {
|
||||
...defaultPollStrategy,
|
||||
pollStrategy: pollStrategy
|
||||
})
|
||||
).rejects.toThrow(expectedError)
|
||||
|
||||
// INFO: 'maxPollCount' has to be > than 'maxPollCount' of the previous strategy
|
||||
const validPollStrategy = {
|
||||
maxPollCount: 5,
|
||||
pollInterval: 2
|
||||
}
|
||||
|
||||
invalidPollStrategy = {
|
||||
maxPollCount: validPollStrategy.maxPollCount,
|
||||
pollInterval: 3
|
||||
}
|
||||
|
||||
pollStrategy = [validPollStrategy, invalidPollStrategy]
|
||||
|
||||
expectedError = new Error(
|
||||
`Poll strategies are not valid. 'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy. Invalid poll strategy: \n${JSON.stringify(
|
||||
invalidPollStrategy,
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
)
|
||||
|
||||
await expect(
|
||||
pollJobState(requestClient, mockJob, false, undefined, {
|
||||
...defaultPollStrategy,
|
||||
pollStrategy: pollStrategy
|
||||
})
|
||||
).rejects.toThrow(expectedError)
|
||||
|
||||
// INFO: invalid 'pollInterval'
|
||||
invalidPollStrategy = {
|
||||
maxPollCount: 1,
|
||||
pollInterval: 0
|
||||
}
|
||||
|
||||
pollStrategy = [invalidPollStrategy]
|
||||
|
||||
expectedError = new Error(
|
||||
`Poll strategies are not valid. 'pollInterval' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
|
||||
invalidPollStrategy,
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
)
|
||||
|
||||
await expect(
|
||||
pollJobState(requestClient, mockJob, false, undefined, {
|
||||
...defaultPollStrategy,
|
||||
pollStrategy: pollStrategy
|
||||
})
|
||||
).rejects.toThrow(expectedError)
|
||||
})
|
||||
})
|
||||
|
||||
const setupMocks = () => {
|
||||
@@ -273,11 +380,14 @@ const setupMocks = () => {
|
||||
|
||||
const mockSimplePoll = (runningCount = 2) => {
|
||||
let count = 0
|
||||
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
|
||||
return Promise.resolve({
|
||||
result:
|
||||
count === 0
|
||||
@@ -293,11 +403,14 @@ const mockSimplePoll = (runningCount = 2) => {
|
||||
|
||||
const mockRunningPoll = () => {
|
||||
let count = 0
|
||||
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
|
||||
return Promise.resolve({
|
||||
result: count === 0 ? 'pending' : 'running',
|
||||
etag: '',
|
||||
@@ -308,11 +421,14 @@ const mockRunningPoll = () => {
|
||||
|
||||
const mockLongPoll = () => {
|
||||
let count = 0
|
||||
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
|
||||
return Promise.resolve({
|
||||
result: count <= 102 ? 'running' : 'completed',
|
||||
etag: '',
|
||||
@@ -323,14 +439,18 @@ const mockLongPoll = () => {
|
||||
|
||||
const mockPollWithSingleError = () => {
|
||||
let count = 0
|
||||
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
|
||||
if (count === 1) {
|
||||
return Promise.reject('Status Error')
|
||||
}
|
||||
|
||||
return Promise.resolve({
|
||||
result: count === 0 ? 'pending' : 'completed',
|
||||
etag: '',
|
||||
@@ -344,6 +464,7 @@ const mockErroredPoll = () => {
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
|
||||
return Promise.reject('Status Error')
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Logger, LogLevel } from '@sasjs/utils'
|
||||
import { Logger, LogLevel } from '@sasjs/utils/logger'
|
||||
import { RequestClient } from '../../../request/RequestClient'
|
||||
import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
|
||||
import * as writeStreamModule from '../writeStream'
|
||||
|
||||
@@ -5,7 +5,7 @@ import {
|
||||
fileExists,
|
||||
readFile,
|
||||
deleteFile
|
||||
} from '@sasjs/utils'
|
||||
} from '@sasjs/utils/file'
|
||||
|
||||
describe('writeStream', () => {
|
||||
const filename = 'test.txt'
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
|
||||
import { ServerType } from '@sasjs/utils/types'
|
||||
|
||||
/**
|
||||
* Exchanges the auth code for an access token for the given client.
|
||||
@@ -31,6 +33,16 @@ export async function getAccessTokenForSasjs(
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while getting access token. ')
|
||||
throw prefixMessage(
|
||||
err,
|
||||
getTokenRequestErrorPrefix(
|
||||
'fetching access token',
|
||||
'getAccessTokenForSasjs',
|
||||
ServerType.Sasjs,
|
||||
url,
|
||||
data,
|
||||
clientId
|
||||
)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { SasAuthResponse } from '@sasjs/utils/types'
|
||||
import { SasAuthResponse, ServerType } from '@sasjs/utils/types'
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { CertificateError } from '../types/errors'
|
||||
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
|
||||
|
||||
/**
|
||||
* Exchanges the auth code for an access token for the given client.
|
||||
* @param requestClient - the pre-configured HTTP request client
|
||||
* Exchange the auth code for access / refresh tokens for the given client / secret pair.
|
||||
* @param requestClient - the pre-configured HTTP request client.
|
||||
* @param clientId - the client ID to authenticate with.
|
||||
* @param clientSecret - the client secret to authenticate with.
|
||||
* @param authCode - the auth code received from the server.
|
||||
@@ -16,29 +17,44 @@ export async function getAccessTokenForViya(
|
||||
clientSecret: string,
|
||||
authCode: string
|
||||
): Promise<SasAuthResponse> {
|
||||
const url = '/SASLogon/oauth/token'
|
||||
let token
|
||||
|
||||
if (typeof Buffer === 'undefined') {
|
||||
token = btoa(clientId + ':' + clientSecret)
|
||||
} else {
|
||||
token = Buffer.from(clientId + ':' + clientSecret).toString('base64')
|
||||
}
|
||||
|
||||
const url = '/SASLogon/oauth/token'
|
||||
const headers = {
|
||||
Authorization: 'Basic ' + token,
|
||||
Accept: 'application/json'
|
||||
}
|
||||
|
||||
const data = new URLSearchParams({
|
||||
const dataJson = new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
code: authCode
|
||||
})
|
||||
const data = new URLSearchParams(dataJson)
|
||||
|
||||
const authResponse = await requestClient
|
||||
.post(url, data, undefined, 'application/x-www-form-urlencoded', headers)
|
||||
.then((res) => res.result as SasAuthResponse)
|
||||
.catch((err) => {
|
||||
if (err instanceof CertificateError) throw err
|
||||
throw prefixMessage(err, 'Error while getting access token. ')
|
||||
throw prefixMessage(
|
||||
err,
|
||||
getTokenRequestErrorPrefix(
|
||||
'fetching access token',
|
||||
'getAccessTokenForViya',
|
||||
ServerType.SasViya,
|
||||
url,
|
||||
dataJson,
|
||||
headers,
|
||||
clientId,
|
||||
clientSecret
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
return authResponse
|
||||
|
||||
88
src/auth/getTokenRequestErrorPrefix.ts
Normal file
88
src/auth/getTokenRequestErrorPrefix.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { ServerType } from '@sasjs/utils/types'
|
||||
|
||||
type Server = ServerType.SasViya | ServerType.Sasjs
|
||||
type Operation = 'fetching access token' | 'refreshing tokens'
|
||||
|
||||
const getServerName = (server: Server) =>
|
||||
server === ServerType.SasViya ? 'Viya' : 'Sasjs'
|
||||
|
||||
const getResponseTitle = (server: Server) =>
|
||||
`Response from ${getServerName(server)} is below.`
|
||||
|
||||
/**
|
||||
* Forms error prefix for requests related to token operations.
|
||||
* @param operation - string describing operation ('fetching access token' or 'refreshing tokens').
|
||||
* @param funcName - name of the function sent the request.
|
||||
* @param server - server type (SASVIYA or SASJS).
|
||||
* @param url - endpoint used to send the request.
|
||||
* @param data - request payload.
|
||||
* @param headers - request headers.
|
||||
* @param clientId - client ID to authenticate with.
|
||||
* @param clientSecret - client secret to authenticate with.
|
||||
* @returns - string containing request information. Example:
|
||||
* Error while fetching access token from /SASLogon/oauth/token
|
||||
* Thrown by the @sasjs/adapter getAccessTokenForViya function.
|
||||
* Payload:
|
||||
* {
|
||||
* "grant_type": "authorization_code",
|
||||
* "code": "example_code"
|
||||
* }
|
||||
* Headers:
|
||||
* {
|
||||
* "Authorization": "Basic NEdMQXBwOjRHTEFwcDE=",
|
||||
* "Accept": "application/json"
|
||||
* }
|
||||
* ClientId: exampleClientId
|
||||
* ClientSecret: exampleClientSecret
|
||||
*
|
||||
* Response from Viya is below.
|
||||
* Auth error: {
|
||||
* "error": "invalid_token",
|
||||
* "error_description": "No scopes were granted"
|
||||
* }
|
||||
*/
|
||||
export const getTokenRequestErrorPrefix = (
|
||||
operation: Operation,
|
||||
funcName: string,
|
||||
server: Server,
|
||||
url: string,
|
||||
data?: {},
|
||||
headers?: {},
|
||||
clientId?: string,
|
||||
clientSecret?: string
|
||||
) => {
|
||||
const stringify = (obj: {}) => JSON.stringify(obj, null, 2)
|
||||
|
||||
const lines = [
|
||||
`Error while ${operation} from ${url}`,
|
||||
`Thrown by the @sasjs/adapter ${funcName} function.`
|
||||
]
|
||||
|
||||
if (data) {
|
||||
lines.push('Payload:')
|
||||
lines.push(stringify(data))
|
||||
}
|
||||
if (headers) {
|
||||
lines.push('Headers:')
|
||||
lines.push(stringify(headers))
|
||||
}
|
||||
if (clientId) lines.push(`ClientId: ${clientId}`)
|
||||
if (clientSecret) lines.push(`ClientSecret: ${clientSecret}`)
|
||||
|
||||
lines.push('')
|
||||
lines.push(`${getResponseTitle(server)}`)
|
||||
lines.push('')
|
||||
|
||||
return lines.join(`\n`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse error prefix to get response payload.
|
||||
* @param prefix - error prefix generated by getTokenRequestErrorPrefix function.
|
||||
* @param server - server type (SASVIYA or SASJS).
|
||||
* @returns - response payload.
|
||||
*/
|
||||
export const getTokenRequestErrorPrefixResponse = (
|
||||
prefix: string,
|
||||
server: ServerType.SasViya | ServerType.Sasjs
|
||||
) => prefix.split(`${getResponseTitle(server)}\n`).pop() as string
|
||||
@@ -22,6 +22,7 @@ export async function getTokens(
|
||||
): Promise<AuthConfig> {
|
||||
const logger = process.logger || console
|
||||
let { access_token, refresh_token, client, secret } = authConfig
|
||||
|
||||
if (
|
||||
isAccessTokenExpiring(access_token) ||
|
||||
isRefreshTokenExpiring(refresh_token)
|
||||
@@ -29,6 +30,7 @@ export async function getTokens(
|
||||
if (hasTokenExpired(refresh_token)) {
|
||||
const error =
|
||||
'Unable to obtain new access token. Your refresh token has expired.'
|
||||
|
||||
logger.error(error)
|
||||
|
||||
throw new Error(error)
|
||||
@@ -47,5 +49,6 @@ export async function getTokens(
|
||||
: await refreshTokensForSasjs(requestClient, refresh_token)
|
||||
;({ access_token, refresh_token } = tokens)
|
||||
}
|
||||
|
||||
return { access_token, refresh_token, client, secret }
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
|
||||
import { ServerType } from '@sasjs/utils/types'
|
||||
|
||||
/**
|
||||
* Exchanges the refresh token for an access token for the given client.
|
||||
@@ -28,7 +30,15 @@ export async function refreshTokensForSasjs(
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while refreshing tokens: ')
|
||||
throw prefixMessage(
|
||||
err,
|
||||
getTokenRequestErrorPrefix(
|
||||
'refreshing tokens',
|
||||
'refreshTokensForSasjs',
|
||||
ServerType.Sasjs,
|
||||
url
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
return authResponse
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { SasAuthResponse } from '@sasjs/utils/types'
|
||||
import { SasAuthResponse, ServerType } from '@sasjs/utils/types'
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import * as NodeFormData from 'form-data'
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { isNode } from '../utils'
|
||||
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
|
||||
|
||||
/**
|
||||
* Exchanges the refresh token for an access token for the given client.
|
||||
@@ -46,7 +47,19 @@ export async function refreshTokensForViya(
|
||||
)
|
||||
.then((res) => res.result)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while refreshing tokens: ')
|
||||
throw prefixMessage(
|
||||
err,
|
||||
getTokenRequestErrorPrefix(
|
||||
'refreshing tokens',
|
||||
'refreshTokensForViya',
|
||||
ServerType.SasViya,
|
||||
url,
|
||||
formData,
|
||||
headers,
|
||||
clientId,
|
||||
clientSecret
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
return authResponse
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AuthConfig } from '@sasjs/utils'
|
||||
import { AuthConfig } from '@sasjs/utils/types'
|
||||
import { generateToken, mockSasjsAuthResponse } from './mockResponses'
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
import { getAccessTokenForSasjs } from '../getAccessTokenForSasjs'
|
||||
@@ -55,7 +55,7 @@ describe('getAccessTokenForSasjs', () => {
|
||||
authConfig.refresh_token
|
||||
).catch((e: any) => e)
|
||||
|
||||
expect(error).toContain('Error while getting access token')
|
||||
expect(error).toContain('Error while fetching access token')
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AuthConfig } from '@sasjs/utils'
|
||||
import { AuthConfig } from '@sasjs/utils/types'
|
||||
import * as NodeFormData from 'form-data'
|
||||
import { generateToken, mockAuthResponse } from './mockResponses'
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
@@ -66,7 +66,7 @@ describe('getAccessTokenForViya', () => {
|
||||
authConfig.refresh_token
|
||||
).catch((e: any) => e)
|
||||
|
||||
expect(error).toContain('Error while getting access token')
|
||||
expect(error).toContain('Error while fetching access token')
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
81
src/auth/spec/getTokenRequestErrorPrefix.spec.ts
Normal file
81
src/auth/spec/getTokenRequestErrorPrefix.spec.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { ServerType } from '@sasjs/utils/types'
|
||||
import { getTokenRequestErrorPrefix } from '../getTokenRequestErrorPrefix'
|
||||
|
||||
describe('getTokenRequestErrorPrefix', () => {
|
||||
it('should return error prefix', () => {
|
||||
// INFO: Viya with only required attributes
|
||||
let operation: 'fetching access token' = 'fetching access token'
|
||||
const funcName = 'testFunc'
|
||||
const url = '/SASjsApi/auth/token'
|
||||
|
||||
let expectedPrefix = `Error while ${operation} from ${url}
|
||||
Thrown by the @sasjs/adapter ${funcName} function.
|
||||
|
||||
Response from Viya is below.
|
||||
`
|
||||
|
||||
expect(
|
||||
getTokenRequestErrorPrefix(operation, funcName, ServerType.SasViya, url)
|
||||
).toEqual(expectedPrefix)
|
||||
|
||||
// INFO: Sasjs with data and headers
|
||||
const data = {
|
||||
grant_type: 'authorization_code',
|
||||
code: 'testCode'
|
||||
}
|
||||
const headers = {
|
||||
Authorization: 'Basic test=',
|
||||
Accept: 'application/json'
|
||||
}
|
||||
|
||||
expectedPrefix = `Error while ${operation} from ${url}
|
||||
Thrown by the @sasjs/adapter ${funcName} function.
|
||||
Payload:
|
||||
${JSON.stringify(data, null, 2)}
|
||||
Headers:
|
||||
${JSON.stringify(headers, null, 2)}
|
||||
|
||||
Response from Sasjs is below.
|
||||
`
|
||||
|
||||
expect(
|
||||
getTokenRequestErrorPrefix(
|
||||
operation,
|
||||
funcName,
|
||||
ServerType.Sasjs,
|
||||
url,
|
||||
data,
|
||||
headers
|
||||
)
|
||||
).toEqual(expectedPrefix)
|
||||
|
||||
// INFO: Viya with all attributes
|
||||
const clientId = 'testId'
|
||||
const clientSecret = 'testSecret'
|
||||
|
||||
expectedPrefix = `Error while ${operation} from ${url}
|
||||
Thrown by the @sasjs/adapter ${funcName} function.
|
||||
Payload:
|
||||
${JSON.stringify(data, null, 2)}
|
||||
Headers:
|
||||
${JSON.stringify(headers, null, 2)}
|
||||
ClientId: ${clientId}
|
||||
ClientSecret: ${clientSecret}
|
||||
|
||||
Response from Viya is below.
|
||||
`
|
||||
|
||||
expect(
|
||||
getTokenRequestErrorPrefix(
|
||||
operation,
|
||||
funcName,
|
||||
ServerType.SasViya,
|
||||
url,
|
||||
data,
|
||||
headers,
|
||||
clientId,
|
||||
clientSecret
|
||||
)
|
||||
).toEqual(expectedPrefix)
|
||||
})
|
||||
})
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AuthConfig } from '@sasjs/utils'
|
||||
import { AuthConfig } from '@sasjs/utils/types'
|
||||
import * as refreshTokensModule from '../refreshTokensForViya'
|
||||
import { generateToken, mockAuthResponse } from './mockResponses'
|
||||
import { getTokens } from '../getTokens'
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { ServerType } from '@sasjs/utils/types'
|
||||
import { generateToken, mockAuthResponse } from './mockResponses'
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
import { refreshTokensForSasjs } from '../refreshTokensForSasjs'
|
||||
import { getTokenRequestErrorPrefixResponse } from '../getTokenRequestErrorPrefix'
|
||||
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
|
||||
@@ -38,9 +40,9 @@ describe('refreshTokensForSasjs', () => {
|
||||
const error = await refreshTokensForSasjs(
|
||||
requestClient,
|
||||
refresh_token
|
||||
).catch((e: any) => e)
|
||||
).catch((e: any) => getTokenRequestErrorPrefixResponse(e, ServerType.Sasjs))
|
||||
|
||||
expect(error).toEqual(`Error while refreshing tokens: ${tokenError}`)
|
||||
expect(error).toEqual(tokenError)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { AuthConfig } from '@sasjs/utils'
|
||||
import { AuthConfig, ServerType } from '@sasjs/utils/types'
|
||||
import * as NodeFormData from 'form-data'
|
||||
import { generateToken, mockAuthResponse } from './mockResponses'
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
import { refreshTokensForViya } from '../refreshTokensForViya'
|
||||
import * as IsNodeModule from '../../utils/isNode'
|
||||
import { getTokenRequestErrorPrefixResponse } from '../getTokenRequestErrorPrefix'
|
||||
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
|
||||
@@ -67,9 +68,11 @@ describe('refreshTokensForViya', () => {
|
||||
authConfig.client,
|
||||
authConfig.secret,
|
||||
authConfig.refresh_token
|
||||
).catch((e: any) => e)
|
||||
).catch((e: any) =>
|
||||
getTokenRequestErrorPrefixResponse(e, ServerType.SasViya)
|
||||
)
|
||||
|
||||
expect(error).toEqual(`Error while refreshing tokens: ${tokenError}`)
|
||||
expect(error).toEqual(tokenError)
|
||||
})
|
||||
|
||||
it('should throw an error if environment is not Node', async () => {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as NodeFormData from 'form-data'
|
||||
import { convertToCSV } from '../utils/convertToCsv'
|
||||
import { isNode } from '../utils'
|
||||
|
||||
/**
|
||||
* One of the approaches SASjs takes to send tables-formatted JSON (see README)
|
||||
@@ -26,12 +27,15 @@ export const generateFileUploadForm = (
|
||||
)
|
||||
}
|
||||
|
||||
if (typeof FormData === 'undefined' && formData instanceof NodeFormData) {
|
||||
formData.append(name, csv, {
|
||||
// INFO: unfortunately it is not possible to check if formData is instance of NodeFormData or FormData because it will return true for both
|
||||
if (isNode()) {
|
||||
// INFO: environment is Node and formData is instance of NodeFormData
|
||||
;(formData as NodeFormData).append(name, csv, {
|
||||
filename: `${name}.csv`,
|
||||
contentType: 'application/csv'
|
||||
})
|
||||
} else {
|
||||
// INFO: environment is Browser and formData is instance of FormData
|
||||
const file = new Blob([csv], {
|
||||
type: 'application/csv'
|
||||
})
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { generateFileUploadForm } from '../generateFileUploadForm'
|
||||
import { convertToCSV } from '../../utils/convertToCsv'
|
||||
import * as NodeFormData from 'form-data'
|
||||
import * as isNodeModule from '../../utils/isNode'
|
||||
|
||||
describe('generateFileUploadForm', () => {
|
||||
beforeAll(() => {
|
||||
@@ -11,44 +14,94 @@ describe('generateFileUploadForm', () => {
|
||||
;(global as any).Blob = BlobMock
|
||||
})
|
||||
|
||||
it('should generate file upload form from data', () => {
|
||||
const formData = new FormData()
|
||||
const testTable = 'sometable'
|
||||
const testTableWithNullVars: { [key: string]: any } = {
|
||||
[testTable]: [
|
||||
{ var1: 'string', var2: 232, nullvar: 'A' },
|
||||
{ var1: 'string', var2: 232, nullvar: 'B' },
|
||||
{ var1: 'string', var2: 232, nullvar: '_' },
|
||||
{ var1: 'string', var2: 232, nullvar: 0 },
|
||||
{ var1: 'string', var2: 232, nullvar: 'z' },
|
||||
{ var1: 'string', var2: 232, nullvar: null }
|
||||
],
|
||||
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
|
||||
}
|
||||
const tableName = Object.keys(testTableWithNullVars).filter((key: string) =>
|
||||
Array.isArray(testTableWithNullVars[key])
|
||||
)[0]
|
||||
describe('browser', () => {
|
||||
afterAll(() => {
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
jest.spyOn(formData, 'append').mockImplementation(() => {})
|
||||
it('should generate file upload form from data', () => {
|
||||
const formData = new FormData()
|
||||
const testTable = 'sometable'
|
||||
const testTableWithNullVars: { [key: string]: any } = {
|
||||
[testTable]: [
|
||||
{ var1: 'string', var2: 232, nullvar: 'A' },
|
||||
{ var1: 'string', var2: 232, nullvar: 'B' },
|
||||
{ var1: 'string', var2: 232, nullvar: '_' },
|
||||
{ var1: 'string', var2: 232, nullvar: 0 },
|
||||
{ var1: 'string', var2: 232, nullvar: 'z' },
|
||||
{ var1: 'string', var2: 232, nullvar: null }
|
||||
],
|
||||
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
|
||||
}
|
||||
const tableName = Object.keys(testTableWithNullVars).filter(
|
||||
(key: string) => Array.isArray(testTableWithNullVars[key])
|
||||
)[0]
|
||||
|
||||
generateFileUploadForm(formData, testTableWithNullVars)
|
||||
jest.spyOn(formData, 'append').mockImplementation(() => {})
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
|
||||
|
||||
expect(formData.append).toHaveBeenCalledOnce()
|
||||
expect(formData.append).toHaveBeenCalledWith(
|
||||
tableName,
|
||||
{},
|
||||
`${tableName}.csv`
|
||||
)
|
||||
generateFileUploadForm(formData, testTableWithNullVars)
|
||||
|
||||
expect(formData.append).toHaveBeenCalledOnce()
|
||||
expect(formData.append).toHaveBeenCalledWith(
|
||||
tableName,
|
||||
{},
|
||||
`${tableName}.csv`
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw an error if too large string was provided', () => {
|
||||
const formData = new FormData()
|
||||
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
|
||||
|
||||
expect(() => generateFileUploadForm(formData, data)).toThrow(
|
||||
new Error(
|
||||
'The max length of a string value in SASjs is 32765 characters.'
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw an error if too large string was provided', () => {
|
||||
const formData = new FormData()
|
||||
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
|
||||
describe('node', () => {
|
||||
it('should generate file upload form from data', () => {
|
||||
const formData = new NodeFormData()
|
||||
const testTable = 'sometable'
|
||||
const testTableWithNullVars: { [key: string]: any } = {
|
||||
[testTable]: [
|
||||
{ var1: 'string', var2: 232, nullvar: 'A' },
|
||||
{ var1: 'string', var2: 232, nullvar: 'B' },
|
||||
{ var1: 'string', var2: 232, nullvar: '_' },
|
||||
{ var1: 'string', var2: 232, nullvar: 0 },
|
||||
{ var1: 'string', var2: 232, nullvar: 'z' },
|
||||
{ var1: 'string', var2: 232, nullvar: null }
|
||||
],
|
||||
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
|
||||
}
|
||||
const tableName = Object.keys(testTableWithNullVars).filter(
|
||||
(key: string) => Array.isArray(testTableWithNullVars[key])
|
||||
)[0]
|
||||
const csv = convertToCSV(testTableWithNullVars, tableName)
|
||||
|
||||
expect(() => generateFileUploadForm(formData, data)).toThrow(
|
||||
new Error(
|
||||
'The max length of a string value in SASjs is 32765 characters.'
|
||||
jest.spyOn(formData, 'append').mockImplementation(() => {})
|
||||
|
||||
generateFileUploadForm(formData, testTableWithNullVars)
|
||||
|
||||
expect(formData.append).toHaveBeenCalledOnce()
|
||||
expect(formData.append).toHaveBeenCalledWith(tableName, csv, {
|
||||
contentType: 'application/csv',
|
||||
filename: `${tableName}.csv`
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw an error if too large string was provided', () => {
|
||||
const formData = new NodeFormData()
|
||||
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
|
||||
|
||||
expect(() => generateFileUploadForm(formData, data)).toThrow(
|
||||
new Error(
|
||||
'The max length of a string value in SASjs is 32765 characters.'
|
||||
)
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import {
|
||||
getValidJson,
|
||||
parseSasViyaDebugResponse,
|
||||
parseWeboutResponse,
|
||||
SASJS_LOGS_SEPARATOR
|
||||
parseWeboutResponse
|
||||
} from '../utils'
|
||||
import { UploadFile } from '../types/UploadFile'
|
||||
import {
|
||||
@@ -93,15 +92,24 @@ export class FileUploader extends BaseJobExecutor {
|
||||
this.requestClient,
|
||||
config.serverUrl
|
||||
)
|
||||
|
||||
break
|
||||
case ServerType.Sas9:
|
||||
jsonResponse =
|
||||
typeof res.result === 'string'
|
||||
? parseWeboutResponse(res.result, uploadUrl)
|
||||
: res.result
|
||||
|
||||
break
|
||||
case ServerType.Sasjs:
|
||||
jsonResponse =
|
||||
typeof res.result === 'string'
|
||||
? getValidJson(res.result)
|
||||
: res.result
|
||||
|
||||
break
|
||||
}
|
||||
} else if (this.serverType !== ServerType.Sasjs) {
|
||||
} else {
|
||||
jsonResponse =
|
||||
typeof res.result === 'string'
|
||||
? getValidJson(res.result)
|
||||
|
||||
@@ -10,8 +10,8 @@ import {
|
||||
LoginRequiredError
|
||||
} from '../types/errors'
|
||||
import { generateFileUploadForm } from '../file/generateFileUploadForm'
|
||||
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { getFormData } from '../utils'
|
||||
|
||||
import {
|
||||
isRelativePath,
|
||||
@@ -53,8 +53,7 @@ export class SasjsJobExecutor extends BaseJobExecutor {
|
||||
* Use the available form data object (FormData in Browser, NodeFormData in
|
||||
* Node)
|
||||
*/
|
||||
let formData =
|
||||
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
|
||||
let formData = getFormData()
|
||||
|
||||
if (data) {
|
||||
// file upload approach
|
||||
@@ -93,8 +92,10 @@ export class SasjsJobExecutor extends BaseJobExecutor {
|
||||
)
|
||||
}
|
||||
|
||||
const { result } = res.result
|
||||
if (result && result.trim()) res.result = getValidJson(result)
|
||||
const { result } = res
|
||||
|
||||
if (result && typeof result === 'string' && result.trim())
|
||||
res.result = getValidJson(result)
|
||||
|
||||
this.requestClient!.appendRequest(res, sasJob, config.debug)
|
||||
|
||||
|
||||
@@ -16,10 +16,11 @@ import { SASViyaApiClient } from '../SASViyaApiClient'
|
||||
import {
|
||||
isRelativePath,
|
||||
parseSasViyaDebugResponse,
|
||||
appendExtraResponseAttributes
|
||||
appendExtraResponseAttributes,
|
||||
parseWeboutResponse,
|
||||
getFormData
|
||||
} from '../utils'
|
||||
import { BaseJobExecutor } from './JobExecutor'
|
||||
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
|
||||
|
||||
export interface WaitingRequstPromise {
|
||||
promise: Promise<any> | null
|
||||
@@ -112,8 +113,7 @@ export class WebJobExecutor extends BaseJobExecutor {
|
||||
* Use the available form data object (FormData in Browser, NodeFormData in
|
||||
* Node)
|
||||
*/
|
||||
let formData =
|
||||
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
|
||||
let formData = getFormData()
|
||||
|
||||
if (data) {
|
||||
const stringifiedData = JSON.stringify(data)
|
||||
|
||||
@@ -233,7 +233,8 @@ export default class SASjs {
|
||||
this.requestClient = new RequestClient(
|
||||
this.sasjsConfig.serverUrl,
|
||||
this.sasjsConfig.httpsAgentOptions,
|
||||
this.sasjsConfig.requestHistoryLimit
|
||||
this.sasjsConfig.requestHistoryLimit,
|
||||
this.sasjsConfig.verbose
|
||||
)
|
||||
} else {
|
||||
this.requestClient.setConfig(
|
||||
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
import {
|
||||
isRelativePath,
|
||||
parseSasViyaDebugResponse,
|
||||
appendExtraResponseAttributes,
|
||||
convertToCSV
|
||||
} from '../../utils'
|
||||
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
createAxiosInstance
|
||||
} from '../utils'
|
||||
import { InvalidSASjsCsrfError } from '../types/errors/InvalidSASjsCsrfError'
|
||||
import { inspect } from 'util'
|
||||
|
||||
export interface HttpClient {
|
||||
get<T>(
|
||||
@@ -59,6 +60,7 @@ export interface HttpClient {
|
||||
export class RequestClient implements HttpClient {
|
||||
private requests: SASjsRequest[] = []
|
||||
private requestsLimit: number = 10
|
||||
private httpInterceptor?: number
|
||||
|
||||
protected csrfToken: CsrfToken = { headerName: '', value: '' }
|
||||
protected fileUploadCsrfToken: CsrfToken | undefined
|
||||
@@ -67,10 +69,14 @@ export class RequestClient implements HttpClient {
|
||||
constructor(
|
||||
protected baseUrl: string,
|
||||
httpsAgentOptions?: https.AgentOptions,
|
||||
requestsLimit?: number
|
||||
requestsLimit?: number,
|
||||
verboseMode?: boolean
|
||||
) {
|
||||
this.createHttpClient(baseUrl, httpsAgentOptions)
|
||||
|
||||
if (requestsLimit) this.requestsLimit = requestsLimit
|
||||
|
||||
if (verboseMode) this.enableVerboseMode()
|
||||
}
|
||||
|
||||
public setConfig(baseUrl: string, httpsAgentOptions?: https.AgentOptions) {
|
||||
@@ -180,6 +186,7 @@ export class RequestClient implements HttpClient {
|
||||
responseType: contentType === 'text/plain' ? 'text' : 'json',
|
||||
withCredentials: true
|
||||
}
|
||||
|
||||
if (contentType === 'text/plain') {
|
||||
requestConfig.transformResponse = undefined
|
||||
}
|
||||
@@ -389,6 +396,105 @@ export class RequestClient implements HttpClient {
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds colors to the string.
|
||||
* @param str - string to be prettified.
|
||||
* @returns - prettified string
|
||||
*/
|
||||
private prettifyString = (str: any) => inspect(str, { colors: true })
|
||||
|
||||
/**
|
||||
* Formats HTTP request/response body.
|
||||
* @param body - HTTP request/response body.
|
||||
* @returns - formatted string.
|
||||
*/
|
||||
private parseInterceptedBody = (body: any) => {
|
||||
if (!body) return ''
|
||||
|
||||
let parsedBody
|
||||
|
||||
// Tries to parse body into JSON object.
|
||||
if (typeof body === 'string') {
|
||||
try {
|
||||
parsedBody = JSON.parse(body)
|
||||
} catch (error) {
|
||||
parsedBody = body
|
||||
}
|
||||
} else {
|
||||
parsedBody = body
|
||||
}
|
||||
|
||||
const bodyLines = this.prettifyString(parsedBody).split('\n')
|
||||
|
||||
// Leaves first 50 lines
|
||||
if (bodyLines.length > 51) {
|
||||
bodyLines.splice(50)
|
||||
bodyLines.push('...')
|
||||
}
|
||||
|
||||
return bodyLines.join('\n')
|
||||
}
|
||||
|
||||
private defaultInterceptionCallBack = (response: AxiosResponse) => {
|
||||
const { status, config, request, data: resData } = response
|
||||
const { data: reqData } = config
|
||||
const { _header: reqHeaders, res } = request
|
||||
const { rawHeaders } = res
|
||||
|
||||
// Converts an array of strings into a single string with the following format:
|
||||
// <headerName>: <headerValue>
|
||||
const resHeaders = rawHeaders.reduce(
|
||||
(acc: string, value: string, i: number) => {
|
||||
if (i % 2 === 0) {
|
||||
acc += `${i === 0 ? '' : '\n'}${value}`
|
||||
} else {
|
||||
acc += `: ${value}`
|
||||
}
|
||||
|
||||
return acc
|
||||
},
|
||||
''
|
||||
)
|
||||
|
||||
const parsedResBody = this.parseInterceptedBody(resData)
|
||||
|
||||
// HTTP response summary.
|
||||
process.logger?.info(`HTTP Request (first 50 lines):
|
||||
${reqHeaders}${this.parseInterceptedBody(reqData)}
|
||||
|
||||
HTTP Response Code: ${this.prettifyString(status)}
|
||||
|
||||
HTTP Response (first 50 lines):
|
||||
${resHeaders}${parsedResBody ? `\n\n${parsedResBody}` : ''}
|
||||
`)
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
/**
|
||||
* Turns on verbose mode to log every HTTP response.
|
||||
* @param successCallBack - function that should be triggered on every HTTP response with the status 2**.
|
||||
* @param errorCallBack - function that should be triggered on every HTTP response with the status different from 2**.
|
||||
*/
|
||||
public enableVerboseMode = (
|
||||
successCallBack = this.defaultInterceptionCallBack,
|
||||
errorCallBack = this.defaultInterceptionCallBack
|
||||
) => {
|
||||
this.httpInterceptor = this.httpClient.interceptors.response.use(
|
||||
successCallBack,
|
||||
errorCallBack
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Turns off verbose mode to log every HTTP response.
|
||||
*/
|
||||
public disableVerboseMode = () => {
|
||||
if (this.httpInterceptor) {
|
||||
this.httpClient.interceptors.response.eject(this.httpInterceptor)
|
||||
}
|
||||
}
|
||||
|
||||
protected getHeaders = (
|
||||
accessToken: string | undefined,
|
||||
contentType: string
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { RequestClient } from './RequestClient'
|
||||
import { AxiosResponse } from 'axios'
|
||||
import { SASJS_LOGS_SEPARATOR } from '../utils'
|
||||
|
||||
interface SasjsParsedResponse<T> {
|
||||
export interface SasjsParsedResponse<T> {
|
||||
result: T
|
||||
log: string
|
||||
etag: string
|
||||
@@ -45,13 +44,30 @@ export class SasjsRequestClient extends RequestClient {
|
||||
}
|
||||
} catch {
|
||||
if (response.data.includes(SASJS_LOGS_SEPARATOR)) {
|
||||
const splittedResponse = response.data.split(SASJS_LOGS_SEPARATOR)
|
||||
const { data } = response
|
||||
const splittedResponse: string[] = data.split(SASJS_LOGS_SEPARATOR)
|
||||
|
||||
webout = splittedResponse[0]
|
||||
if (webout) parsedResponse = webout
|
||||
webout = splittedResponse.splice(0, 1)[0]
|
||||
if (webout !== undefined) parsedResponse = webout
|
||||
|
||||
log = splittedResponse[1]
|
||||
printOutput = splittedResponse[2]
|
||||
// log can contain nested logs
|
||||
const logs = splittedResponse.splice(0, splittedResponse.length - 1)
|
||||
|
||||
// tests if string ends with SASJS_LOGS_SEPARATOR
|
||||
const endingWithLogSepRegExp = new RegExp(`${SASJS_LOGS_SEPARATOR}$`)
|
||||
|
||||
// at this point splittedResponse can contain only one item
|
||||
const lastChunk = splittedResponse[0]
|
||||
|
||||
if (lastChunk) {
|
||||
// if the last chunk doesn't end with SASJS_LOGS_SEPARATOR, then it is a printOutput
|
||||
// else the last chunk is part of the log and has to be joined
|
||||
if (!endingWithLogSepRegExp.test(data)) printOutput = lastChunk
|
||||
else if (logs.length > 1) logs.push(lastChunk)
|
||||
}
|
||||
|
||||
// join logs into single log with SASJS_LOGS_SEPARATOR
|
||||
log = logs.join(SASJS_LOGS_SEPARATOR)
|
||||
} else {
|
||||
parsedResponse = response.data
|
||||
}
|
||||
@@ -59,7 +75,7 @@ export class SasjsRequestClient extends RequestClient {
|
||||
|
||||
const returnResult: SasjsParsedResponse<T> = {
|
||||
result: parsedResponse as T,
|
||||
log,
|
||||
log: log || '',
|
||||
etag,
|
||||
status: response.status
|
||||
}
|
||||
@@ -69,3 +85,6 @@ export class SasjsRequestClient extends RequestClient {
|
||||
return returnResult
|
||||
}
|
||||
}
|
||||
|
||||
export const SASJS_LOGS_SEPARATOR =
|
||||
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'
|
||||
|
||||
172
src/request/spec/SasjsRequestClient.spec.ts
Normal file
172
src/request/spec/SasjsRequestClient.spec.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
import {
|
||||
SASJS_LOGS_SEPARATOR,
|
||||
SasjsRequestClient,
|
||||
SasjsParsedResponse
|
||||
} from '../SasjsRequestClient'
|
||||
import { AxiosResponse } from 'axios'
|
||||
|
||||
describe('SasjsRequestClient', () => {
|
||||
const requestClient = new SasjsRequestClient('')
|
||||
const etag = 'etag'
|
||||
const status = 200
|
||||
|
||||
const webout = `hello`
|
||||
const log = `1 The SAS System Tuesday, 25 July 2023 12:51:00
|
||||
|
||||
|
||||
PROC MIGRATE will preserve current SAS file attributes and is
|
||||
recommended for converting all your SAS libraries from any
|
||||
SAS 8 release to SAS 9. For details and examples, please see
|
||||
http://support.sas.com/rnd/migration/index.html
|
||||
|
||||
|
||||
|
||||
NOTE: SAS initialization used:
|
||||
real time 0.01 seconds
|
||||
cpu time 0.02 seconds
|
||||
|
||||
|
||||
`
|
||||
const printOutput = 'printOutPut'
|
||||
|
||||
describe('parseResponse', () => {})
|
||||
|
||||
it('should parse response with 1 log', () => {
|
||||
const response: AxiosResponse<any> = {
|
||||
data: `${webout}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${log}
|
||||
${SASJS_LOGS_SEPARATOR}`,
|
||||
status,
|
||||
statusText: 'ok',
|
||||
headers: { etag },
|
||||
config: {}
|
||||
}
|
||||
|
||||
const expectedParsedResponse: SasjsParsedResponse<string> = {
|
||||
result: `${webout}
|
||||
`,
|
||||
log: `
|
||||
${log}
|
||||
`,
|
||||
etag,
|
||||
status
|
||||
}
|
||||
|
||||
expect(requestClient['parseResponse'](response)).toEqual(
|
||||
expectedParsedResponse
|
||||
)
|
||||
})
|
||||
|
||||
it('should parse response with 1 log and printOutput', () => {
|
||||
const response: AxiosResponse<any> = {
|
||||
data: `${webout}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${log}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${printOutput}`,
|
||||
status,
|
||||
statusText: 'ok',
|
||||
headers: { etag },
|
||||
config: {}
|
||||
}
|
||||
|
||||
const expectedParsedResponse: SasjsParsedResponse<string> = {
|
||||
result: `${webout}
|
||||
`,
|
||||
log: `
|
||||
${log}
|
||||
`,
|
||||
etag,
|
||||
status,
|
||||
printOutput: `
|
||||
${printOutput}`
|
||||
}
|
||||
|
||||
expect(requestClient['parseResponse'](response)).toEqual(
|
||||
expectedParsedResponse
|
||||
)
|
||||
})
|
||||
|
||||
it('should parse response with nested logs', () => {
|
||||
const logWithNestedLog = `root log start
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${log}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
root log end`
|
||||
|
||||
const response: AxiosResponse<any> = {
|
||||
data: `${webout}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${logWithNestedLog}
|
||||
${SASJS_LOGS_SEPARATOR}`,
|
||||
status,
|
||||
statusText: 'ok',
|
||||
headers: { etag },
|
||||
config: {}
|
||||
}
|
||||
|
||||
const expectedParsedResponse: SasjsParsedResponse<string> = {
|
||||
result: `${webout}
|
||||
`,
|
||||
log: `
|
||||
${logWithNestedLog}
|
||||
`,
|
||||
etag,
|
||||
status
|
||||
}
|
||||
|
||||
expect(requestClient['parseResponse'](response)).toEqual(
|
||||
expectedParsedResponse
|
||||
)
|
||||
})
|
||||
|
||||
it('should parse response with nested logs and printOutput', () => {
|
||||
const logWithNestedLog = `root log start
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${log}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
log with indentation
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${log}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
some SAS code containing ${SASJS_LOGS_SEPARATOR}
|
||||
root log end`
|
||||
|
||||
const response: AxiosResponse<any> = {
|
||||
data: `${webout}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${logWithNestedLog}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${printOutput}`,
|
||||
status,
|
||||
statusText: 'ok',
|
||||
headers: { etag },
|
||||
config: {}
|
||||
}
|
||||
|
||||
const expectedParsedResponse: SasjsParsedResponse<string> = {
|
||||
result: `${webout}
|
||||
`,
|
||||
log: `
|
||||
${logWithNestedLog}
|
||||
`,
|
||||
etag,
|
||||
status,
|
||||
printOutput: `
|
||||
${printOutput}`
|
||||
}
|
||||
|
||||
expect(requestClient['parseResponse'](response)).toEqual(
|
||||
expectedParsedResponse
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('SASJS_LOGS_SEPARATOR', () => {
|
||||
it('SASJS_LOGS_SEPARATOR should be hardcoded', () => {
|
||||
expect(SASJS_LOGS_SEPARATOR).toEqual(
|
||||
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -2,7 +2,7 @@ import * as pem from 'pem'
|
||||
import * as http from 'http'
|
||||
import * as https from 'https'
|
||||
import { app, mockedAuthResponse } from './SAS_server_app'
|
||||
import { ServerType } from '@sasjs/utils'
|
||||
import { ServerType } from '@sasjs/utils/types'
|
||||
import SASjs from '../SASjs'
|
||||
import * as axiosModules from '../utils/createAxiosInstance'
|
||||
import {
|
||||
@@ -11,8 +11,10 @@ import {
|
||||
NotFoundError,
|
||||
InternalServerError
|
||||
} from '../types/errors'
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { getTokenRequestErrorPrefixResponse } from '../auth/getTokenRequestErrorPrefix'
|
||||
import { AxiosResponse } from 'axios'
|
||||
import { Logger, LogLevel } from '@sasjs/utils/logger'
|
||||
|
||||
const axiosActual = jest.requireActual('axios')
|
||||
|
||||
@@ -25,16 +27,6 @@ jest
|
||||
const PORT = 8000
|
||||
const SERVER_URL = `https://localhost:${PORT}/`
|
||||
|
||||
const ERROR_MESSAGES = {
|
||||
selfSigned: 'self signed certificate',
|
||||
CCA: 'unable to verify the first certificate'
|
||||
}
|
||||
|
||||
const incorrectAuthCodeErr = {
|
||||
error: 'unauthorized',
|
||||
error_description: 'Bad credentials'
|
||||
}
|
||||
|
||||
describe('RequestClient', () => {
|
||||
let server: http.Server
|
||||
|
||||
@@ -66,14 +58,199 @@ describe('RequestClient', () => {
|
||||
})
|
||||
|
||||
it('should response the POST method with Unauthorized', async () => {
|
||||
await expect(
|
||||
adapter.getAccessToken('clientId', 'clientSecret', 'incorrect')
|
||||
).rejects.toEqual(
|
||||
prefixMessage(
|
||||
new LoginRequiredError(incorrectAuthCodeErr),
|
||||
'Error while getting access token. '
|
||||
const expectedError = new LoginRequiredError({
|
||||
error: 'unauthorized',
|
||||
error_description: 'Bad credentials'
|
||||
})
|
||||
|
||||
const rejectionErrorMessage = await adapter
|
||||
.getAccessToken('clientId', 'clientSecret', 'incorrect')
|
||||
.catch((err) =>
|
||||
getTokenRequestErrorPrefixResponse(err.message, ServerType.SasViya)
|
||||
)
|
||||
)
|
||||
|
||||
expect(rejectionErrorMessage).toEqual(expectedError.message)
|
||||
})
|
||||
|
||||
describe('defaultInterceptionCallBack', () => {
|
||||
beforeAll(() => {
|
||||
;(process as any).logger = new Logger(LogLevel.Off)
|
||||
})
|
||||
|
||||
it('should log parsed response', () => {
|
||||
jest.spyOn((process as any).logger, 'info')
|
||||
|
||||
const status = 200
|
||||
const reqData = `{
|
||||
name: 'test_job',
|
||||
description: 'Powered by SASjs',
|
||||
code: ['test_code'],
|
||||
variables: {
|
||||
SYS_JES_JOB_URI: '',
|
||||
_program: '/Public/sasjs/jobs/jobs/test_job'
|
||||
},
|
||||
arguments: {
|
||||
_contextName: 'SAS Job Execution compute context',
|
||||
_OMITJSONLISTING: true,
|
||||
_OMITJSONLOG: true,
|
||||
_OMITSESSIONRESULTS: true,
|
||||
_OMITTEXTLISTING: true,
|
||||
_OMITTEXTLOG: true
|
||||
}
|
||||
}`
|
||||
const resData = {
|
||||
id: 'id_string',
|
||||
name: 'name_string',
|
||||
uri: 'uri_string',
|
||||
createdBy: 'createdBy_string',
|
||||
code: 'TEST CODE',
|
||||
links: [
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'state',
|
||||
href: 'state_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
},
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'state',
|
||||
href: 'state_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
},
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'state',
|
||||
href: 'state_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
},
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'state',
|
||||
href: 'state_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
},
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'state',
|
||||
href: 'state_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
},
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'self',
|
||||
href: 'self_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
}
|
||||
],
|
||||
results: { '_webout.json': '_webout.json_string' },
|
||||
logStatistics: {
|
||||
lineCount: 1,
|
||||
modifiedTimeStamp: 'modifiedTimeStamp_string'
|
||||
}
|
||||
}
|
||||
const reqHeaders = `POST https://sas.server.com/compute/sessions/session_id/jobs HTTP/1.1
|
||||
Accept: application/json
|
||||
Content-Type: application/json
|
||||
User-Agent: axios/0.27.2
|
||||
Content-Length: 334
|
||||
host: sas.server.io
|
||||
Connection: close
|
||||
`
|
||||
const resHeaders = ['content-type', 'application/json']
|
||||
const mockedResponse: AxiosResponse = {
|
||||
data: resData,
|
||||
status,
|
||||
statusText: '',
|
||||
headers: {},
|
||||
config: { data: reqData },
|
||||
request: { _header: reqHeaders, res: { rawHeaders: resHeaders } }
|
||||
}
|
||||
|
||||
const requestClient = new RequestClient('')
|
||||
requestClient['defaultInterceptionCallBack'](mockedResponse)
|
||||
|
||||
const expectedLog = `HTTP Request (first 50 lines):
|
||||
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
|
||||
|
||||
HTTP Response Code: ${requestClient['prettifyString'](status)}
|
||||
|
||||
HTTP Response (first 50 lines):
|
||||
${resHeaders[0]}: ${resHeaders[1]}${
|
||||
requestClient['parseInterceptedBody'](resData)
|
||||
? `\n\n${requestClient['parseInterceptedBody'](resData)}`
|
||||
: ''
|
||||
}
|
||||
`
|
||||
|
||||
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
|
||||
})
|
||||
})
|
||||
|
||||
describe('enableVerboseMode', () => {
|
||||
it('should add defaultInterceptionCallBack functions to response interceptors', () => {
|
||||
const requestClient = new RequestClient('')
|
||||
const interceptorSpy = jest.spyOn(
|
||||
requestClient['httpClient'].interceptors.response,
|
||||
'use'
|
||||
)
|
||||
|
||||
requestClient.enableVerboseMode()
|
||||
|
||||
expect(interceptorSpy).toHaveBeenCalledWith(
|
||||
requestClient['defaultInterceptionCallBack'],
|
||||
requestClient['defaultInterceptionCallBack']
|
||||
)
|
||||
})
|
||||
|
||||
it('should add callback functions to response interceptors', () => {
|
||||
const requestClient = new RequestClient('')
|
||||
const interceptorSpy = jest.spyOn(
|
||||
requestClient['httpClient'].interceptors.response,
|
||||
'use'
|
||||
)
|
||||
|
||||
const successCallback = (response: AxiosResponse) => {
|
||||
console.log('success')
|
||||
|
||||
return response
|
||||
}
|
||||
const failureCallback = (response: AxiosResponse) => {
|
||||
console.log('failure')
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
requestClient.enableVerboseMode(successCallback, failureCallback)
|
||||
|
||||
expect(interceptorSpy).toHaveBeenCalledWith(
|
||||
successCallback,
|
||||
failureCallback
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('disableVerboseMode', () => {
|
||||
it('should eject interceptor', () => {
|
||||
const requestClient = new RequestClient('')
|
||||
|
||||
const interceptorSpy = jest.spyOn(
|
||||
requestClient['httpClient'].interceptors.response,
|
||||
'eject'
|
||||
)
|
||||
|
||||
const interceptorId = 100
|
||||
|
||||
requestClient['httpInterceptor'] = interceptorId
|
||||
requestClient.disableVerboseMode()
|
||||
|
||||
expect(interceptorSpy).toHaveBeenCalledWith(interceptorId)
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleError', () => {
|
||||
@@ -209,15 +386,15 @@ describe('RequestClient - Self Signed Server', () => {
|
||||
serverType: ServerType.SasViya
|
||||
})
|
||||
|
||||
await expect(
|
||||
adapterWithoutCertificate.getAccessToken(
|
||||
'clientId',
|
||||
'clientSecret',
|
||||
'authCode'
|
||||
const expectedError = 'self-signed certificate'
|
||||
|
||||
const rejectionErrorMessage = await adapterWithoutCertificate
|
||||
.getAccessToken('clientId', 'clientSecret', 'authCode')
|
||||
.catch((err) =>
|
||||
getTokenRequestErrorPrefixResponse(err.message, ServerType.SasViya)
|
||||
)
|
||||
).rejects.toThrow(
|
||||
`Error while getting access token. ${ERROR_MESSAGES.selfSigned}`
|
||||
)
|
||||
|
||||
expect(rejectionErrorMessage).toEqual(expectedError)
|
||||
})
|
||||
|
||||
it('should response the POST method using insecure flag', async () => {
|
||||
@@ -247,14 +424,18 @@ describe('RequestClient - Self Signed Server', () => {
|
||||
})
|
||||
|
||||
it('should response the POST method with Unauthorized', async () => {
|
||||
await expect(
|
||||
adapter.getAccessToken('clientId', 'clientSecret', 'incorrect')
|
||||
).rejects.toEqual(
|
||||
prefixMessage(
|
||||
new LoginRequiredError(incorrectAuthCodeErr),
|
||||
'Error while getting access token. '
|
||||
const expectedError = new LoginRequiredError({
|
||||
error: 'unauthorized',
|
||||
error_description: 'Bad credentials'
|
||||
})
|
||||
|
||||
const rejectionErrorMessage = await adapter
|
||||
.getAccessToken('clientId', 'clientSecret', 'incorrect')
|
||||
.catch((err) =>
|
||||
getTokenRequestErrorPrefixResponse(err.message, ServerType.SasViya)
|
||||
)
|
||||
)
|
||||
|
||||
expect(rejectionErrorMessage).toEqual(expectedError.message)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { SessionManager } from '../SessionManager'
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import * as dotenv from 'dotenv'
|
||||
import axios from 'axios'
|
||||
import { Logger, LogLevel } from '@sasjs/utils'
|
||||
import { Logger, LogLevel } from '@sasjs/utils/logger'
|
||||
import { Session, Context } from '../types'
|
||||
|
||||
jest.mock('axios')
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
export interface PollOptions {
|
||||
maxPollCount: number
|
||||
pollInterval: number
|
||||
streamLog: boolean
|
||||
pollInterval: number // milliseconds
|
||||
pollStrategy?: PollStrategy
|
||||
streamLog?: boolean
|
||||
logFolderPath?: string
|
||||
}
|
||||
|
||||
export type PollStrategy = PollOptions[]
|
||||
|
||||
@@ -45,6 +45,10 @@ export class SASjsConfig {
|
||||
* Set to `true` to enable additional debugging.
|
||||
*/
|
||||
debug: boolean = true
|
||||
/**
|
||||
* Set to `true` to enable verbose mode that will log a summary of every HTTP response.
|
||||
*/
|
||||
verbose?: boolean = true
|
||||
/**
|
||||
* The name of the compute context to use when calling the Viya services directly.
|
||||
* Example value: 'SAS Job Execution compute context'
|
||||
|
||||
@@ -7,7 +7,7 @@ describe('RootFolderNotFoundError', () => {
|
||||
|
||||
const error = new RootFolderNotFoundError(
|
||||
'/myProject',
|
||||
'https://analytium.co.uk',
|
||||
'https://sas.4gl.io',
|
||||
token
|
||||
)
|
||||
|
||||
@@ -19,7 +19,7 @@ describe('RootFolderNotFoundError', () => {
|
||||
it('when access token is not provided, error message should not contain scopes', () => {
|
||||
const error = new RootFolderNotFoundError(
|
||||
'/myProject',
|
||||
'https://analytium.co.uk'
|
||||
'https://sas.4gl.io'
|
||||
)
|
||||
|
||||
expect(error).toBeInstanceOf(RootFolderNotFoundError)
|
||||
@@ -30,7 +30,7 @@ describe('RootFolderNotFoundError', () => {
|
||||
|
||||
it('should include the folder path and SASDrive URL in the message', () => {
|
||||
const folderPath = '/myProject'
|
||||
const serverUrl = 'https://analytium.co.uk'
|
||||
const serverUrl = 'https://sas.4gl.io'
|
||||
const error = new RootFolderNotFoundError(folderPath, serverUrl)
|
||||
|
||||
expect(error).toBeInstanceOf(RootFolderNotFoundError)
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
export const SASJS_LOGS_SEPARATOR =
|
||||
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'
|
||||
5
src/utils/getFormData.ts
Normal file
5
src/utils/getFormData.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { isNode } from './'
|
||||
import * as NodeFormData from 'form-data'
|
||||
|
||||
export const getFormData = () =>
|
||||
isNode() ? new NodeFormData() : new FormData()
|
||||
@@ -2,7 +2,6 @@ export * from './appendExtraResponseAttributes'
|
||||
export * from './asyncForEach'
|
||||
export * from './compareTimestamps'
|
||||
export * from './convertToCsv'
|
||||
export * from './constants'
|
||||
export * from './createAxiosInstance'
|
||||
export * from './delay'
|
||||
export * from './fetchLogByChunks'
|
||||
@@ -20,3 +19,4 @@ export * from './parseWeboutResponse'
|
||||
export * from './serialize'
|
||||
export * from './splitChunks'
|
||||
export * from './validateInput'
|
||||
export * from './getFormData'
|
||||
|
||||
20
src/utils/spec/getFormData.spec.ts
Normal file
20
src/utils/spec/getFormData.spec.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { getFormData } from '..'
|
||||
import * as isNodeModule from '../isNode'
|
||||
import * as NodeFormData from 'form-data'
|
||||
|
||||
describe('getFormData', () => {
|
||||
it('should return NodeFormData if environment is Node', () => {
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
|
||||
|
||||
expect(getFormData() instanceof NodeFormData).toEqual(true)
|
||||
})
|
||||
|
||||
it('should return FormData if environment is not Node', () => {
|
||||
const formDataMock = () => {}
|
||||
;(global as any).FormData = formDataMock
|
||||
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
|
||||
|
||||
expect(getFormData() instanceof FormData).toEqual(true)
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user