1
0
mirror of https://github.com/sasjs/adapter.git synced 2026-01-05 03:30:05 +00:00

Compare commits

..

26 Commits

Author SHA1 Message Date
Yury Shkoda
76bbb8acf2 chore(sasjsTests): debugging 2023-05-29 18:11:25 +03:00
Yury Shkoda
1ead483921 chore(sasjsTests): debugging 2023-05-29 18:00:57 +03:00
Yury Shkoda
e2cb787f89 chore(sasjs-tests): trying to run only sendArrTests 2023-05-29 17:40:40 +03:00
Yury Shkoda
828aef1873 chore(sasjs-tests): debugging 2023-05-29 17:33:01 +03:00
Yury Shkoda
f6ee1111c5 chore(cypress): debugging 2023-05-29 17:19:08 +03:00
Yury Shkoda
5f8750a8b6 chore(cypress): debugging 2023-05-29 16:54:21 +03:00
Yury Shkoda
d027acacb6 chore(cypress): debugging 2023-05-29 16:45:32 +03:00
Yury Shkoda
3660b9127a chore(sasjsTests): debugging 2023-05-29 16:39:31 +03:00
Yury Shkoda
5ac0f12435 chore(sasjsTests): debugging 2023-05-29 16:28:59 +03:00
Yury Shkoda
ee0c4b007b chore(sasjsTests): decreased defaultCommandTimeout for cypress 2023-05-29 15:41:15 +03:00
Yury Shkoda
db4a4e6d57 chore(sasjsTests): trying @sasjs/adapter@4.3.5 2023-05-29 15:23:38 +03:00
Yury Shkoda
eba30432dd chore(sasjsTests): debugging 2023-05-29 15:07:35 +03:00
Yury Shkoda
6b9cb3af5f chore(sasjsTests): added sleep step 2023-05-29 14:52:06 +03:00
Yury Shkoda
afe612925e chore(sasjsTests): debugging 2023-05-29 14:40:26 +03:00
Yury Shkoda
1f9bed0625 chore(sasjsTest): debugging 2023-05-29 14:27:49 +03:00
Yury Shkoda
51fdea46fc chore(sasjs-tests): debugging 2023-05-29 14:07:28 +03:00
Yury Shkoda
007b00565c chore(sasjsTests): removed pm2 log 2023-05-29 13:54:28 +03:00
Yury Shkoda
38eef00216 chore(sasjsTests): using different user 2023-05-29 13:46:53 +03:00
Yury Shkoda
f1c67432bf chore(sasjs-tests): debugging 2023-05-26 12:21:27 +03:00
Yury Shkoda
3041a0f4b1 chore(sasjs-tests): debugging 2023-05-26 12:07:42 +03:00
Yury Shkoda
6a5529f3f0 chore: debugging 2023-05-26 11:07:31 +03:00
Yury Shkoda
7758b78a88 chore: debugging sasjs 2023-05-26 10:11:02 +03:00
Yury Shkoda
09c1038cbd chore: debugging sasjs 2023-05-26 10:02:07 +03:00
Yury Shkoda
87e2449b6f chore: debugging sasjs 2023-05-26 09:52:37 +03:00
Yury Shkoda
c6b927c525 test: updated unit tests related to tokens operations 2023-05-25 10:35:10 +03:00
Yury Shkoda
4b6445d524 feat: improved error message for requests related to tokens operations 2023-05-25 10:27:54 +03:00
57 changed files with 4847 additions and 5049 deletions

View File

@@ -14,7 +14,7 @@ What code changes have been made to achieve the intent.
No PR (that involves a non-trivial code change) should be merged, unless all items below are confirmed! If an urgent fix is needed - use a tar file. No PR (that involves a non-trivial code change) should be merged, unless all items below are confirmed! If an urgent fix is needed - use a tar file.
- [ ] Unit tests coverage has been increased and a new threshold is set.
- [ ] All `sasjs-cli` unit tests are passing (`npm test`). - [ ] All `sasjs-cli` unit tests are passing (`npm test`).
- (CI Runs this) All `sasjs-tests` are passing. If you want to run it manually (instructions available [here](https://github.com/sasjs/adapter/blob/master/sasjs-tests/README.md)). - (CI Runs this) All `sasjs-tests` are passing. If you want to run it manually (instructions available [here](https://github.com/sasjs/adapter/blob/master/sasjs-tests/README.md)).
- [ ] [Data Controller](https://datacontroller.io) builds and is functional on both SAS 9 and Viya - [ ] [Data Controller](https://datacontroller.io) builds and is functional on both SAS 9 and Viya

View File

@@ -5,3 +5,7 @@ groups:
- YuryShkoda - YuryShkoda
- medjedovicm - medjedovicm
- sabhas - sabhas
- name: SASjs QA
reviewers: 1
usernames:
- VladislavParhomchik

View File

@@ -10,4 +10,4 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: uesteibar/reviewer-lottery@v1 - uses: uesteibar/reviewer-lottery@v1
with: with:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GH_TOKEN }}

View File

@@ -12,7 +12,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [lts/hydrogen] node-version: [lts/fermium]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@@ -22,17 +22,17 @@ jobs:
node-version: ${{ matrix.node-version }} node-version: ${{ matrix.node-version }}
cache: npm cache: npm
- name: Check npm audit # - name: Check npm audit
run: npm audit --production --audit-level=low # run: npm audit --production --audit-level=low
- name: Install Dependencies - name: Install Dependencies
run: npm ci run: npm ci
- name: Check code style # - name: Check code style
run: npm run lint # run: npm run lint
- name: Run unit tests # - name: Run unit tests
run: npm test # run: npm test
- name: Build Package - name: Build Package
run: npm run package:lib run: npm run package:lib
@@ -72,23 +72,27 @@ jobs:
npm install -g replace-in-files-cli npm install -g replace-in-files-cli
cd sasjs-tests cd sasjs-tests
replace-in-files --regex='"@sasjs/adapter".*' --replacement='"@sasjs/adapter":"latest",' ./package.json replace-in-files --regex='"@sasjs/adapter".*' --replacement='"@sasjs/adapter":"latest",' ./package.json
npm i npm i --legacy-peer-deps
replace-in-files --regex='"serverUrl".*' --replacement='"serverUrl":"${{ secrets.SASJS_SERVER_URL }}",' ./public/config.json replace-in-files --regex='"serverUrl".*' --replacement='"serverUrl":"${{ secrets.SASJS_SERVER_URL }}",' ./public/config.json
replace-in-files --regex='"userName".*' --replacement='"userName":"${{ secrets.SASJS_USERNAME }}",' ./public/config.json replace-in-files --regex='"userName".*' --replacement='"userName":"${{ secrets.SASJS_USERNAME_DEV }}",' ./public/config.json
replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD }}",' ./public/config.json replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD_DEV }}",' ./public/config.json
replace-in-files --regex='"serverType".*' --replacement='"serverType":"SASJS",' ./public/config.json replace-in-files --regex='"serverType".*' --replacement='"serverType":"SASJS",' ./public/config.json
npm run update:adapter # npm run update:adapter
pm2 start --name sasjs-test npm -- start pm2 start --name sasjs-test npm -- start
cat ./public/config.json
cat ../cypress.json
- name: Sleep for 10 seconds - name: Sleep for 10 seconds
run: sleep 10s uses: jakejarvis/wait-action@master
shell: bash with:
time: '10s'
- name: Run cypress on sasjs - name: Run cypress on sasjs
run: | run: |
ss -lntu
replace-in-files --regex='"sasjsTestsUrl".*' --replacement='"sasjsTestsUrl":"http://localhost:3000",' ./cypress.json replace-in-files --regex='"sasjsTestsUrl".*' --replacement='"sasjsTestsUrl":"http://localhost:3000",' ./cypress.json
replace-in-files --regex='"username".*' --replacement='"username":"${{ secrets.SASJS_USERNAME }}",' ./cypress.json replace-in-files --regex='"username".*' --replacement='"username":"${{ secrets.SASJS_USERNAME_DEV }}",' ./cypress.json
replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD }}",' ./cypress.json replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD_DEV }}",' ./cypress.json
sh ./sasjs-tests/sasjs-cypress-run.sh ${{ secrets.MATRIX_TOKEN }} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} sh ./sasjs-tests/sasjs-cypress-run.sh ${{ secrets.MATRIX_TOKEN }} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}
# For some reason if coverage report action is run before other commands, those commands can't access the directories and files on which they depend on # For some reason if coverage report action is run before other commands, those commands can't access the directories and files on which they depend on

View File

@@ -11,7 +11,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [lts/hydrogen] node-version: [lts/fermium]
steps: steps:
- name: Checkout - name: Checkout
@@ -37,8 +37,8 @@ jobs:
- name: Push generated docs - name: Push generated docs
uses: peaceiris/actions-gh-pages@v3 uses: peaceiris/actions-gh-pages@v3
with: with:
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GH_TOKEN }}
publish_branch: gh-pages publish_branch: gh-pages
publish_dir: ./docs publish_dir: ./docs
cname: adapter.sasjs.io cname: adapter.sasjs.io

View File

@@ -14,7 +14,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [lts/hydrogen] node-version: [lts/fermium]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@@ -36,7 +36,7 @@ jobs:
- name: Semantic Release - name: Semantic Release
uses: cycjimmy/semantic-release-action@v3 uses: cycjimmy/semantic-release-action@v3
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }} NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Send Matrix message - name: Send Matrix message

View File

@@ -151,11 +151,7 @@ The `request()` method also has optional parameters such as a config object and
The response object will contain returned tables and columns. Table names are always lowercase, and column names uppercase. The response object will contain returned tables and columns. Table names are always lowercase, and column names uppercase.
The adapter will also cache the logs (if debug enabled) and even the work tables. For performance, it is best to keep debug mode off. The adapter will also cache the logs (if debug enabled) and even the work tables. For performance, it is best to keep debug mode off.
### Verbose Mode
Set `verbose` to `true` to enable verbose mode that logs a summary of every HTTP response. Verbose mode can be disabled by calling `disableVerboseMode` method or enabled by `enableVerboseMode` method. Verbose mode also supports `bleached` mode that disables extra colors in req/res summary. To enable `bleached` verbose mode, pass `verbose` equal to `bleached` while instantiating an instance of `RequestClient` or to `setVerboseMode` method. Verbose mode can also be enabled/disabled by `startComputeJob` method.
### Session Manager ### Session Manager
@@ -277,7 +273,6 @@ Configuration on the client side involves passing an object on startup, which ca
* `serverType` - either `SAS9`, `SASVIYA` or `SASJS`. The `SASJS` server type is for use with [sasjs/server](https://github.com/sasjs/server). * `serverType` - either `SAS9`, `SASVIYA` or `SASJS`. The `SASJS` server type is for use with [sasjs/server](https://github.com/sasjs/server).
* `serverUrl` - the location (including http protocol and port) of the SAS Server. Can be omitted, eg if serving directly from the SAS Web Server, or in streaming mode. * `serverUrl` - the location (including http protocol and port) of the SAS Server. Can be omitted, eg if serving directly from the SAS Web Server, or in streaming mode.
* `debug` - if `true` then SAS Logs and extra debug information is returned. * `debug` - if `true` then SAS Logs and extra debug information is returned.
* `verbose` - optional, if `true` then a summary of every HTTP response is logged.
* `loginMechanism` - either `Default` or `Redirected`. See [SAS Logon](#sas-logon) section. * `loginMechanism` - either `Default` or `Redirected`. See [SAS Logon](#sas-logon) section.
* `useComputeApi` - Only relevant when the serverType is `SASVIYA`. If `true` the [Compute API](#using-the-compute-api) is used. If `false` the [JES API](#using-the-jes-api) is used. If `null` or `undefined` the [Web](#using-jes-web-app) approach is used. * `useComputeApi` - Only relevant when the serverType is `SASVIYA`. If `true` the [Compute API](#using-the-compute-api) is used. If `false` the [JES API](#using-the-jes-api) is used. If `null` or `undefined` the [Web](#using-jes-web-app) approach is used.
* `contextName` - Compute context on which the requests will be called. If missing or not provided, defaults to `Job Execution Compute context`. * `contextName` - Compute context on which the requests will be called. If missing or not provided, defaults to `Job Execution Compute context`.

View File

@@ -5,6 +5,9 @@ const testingFinishTimeout = Cypress.env('testingFinishTimeout')
context('sasjs-tests', function () { context('sasjs-tests', function () {
this.beforeAll(() => { this.beforeAll(() => {
cy.task('log', 'beforeAll')
cy.task('log', `sasjsTestsUrl: ${sasjsTestsUrl}`)
cy.visit(sasjsTestsUrl) cy.visit(sasjsTestsUrl)
}) })
@@ -13,35 +16,66 @@ context('sasjs-tests', function () {
}) })
it('Should have all tests successfull', (done) => { it('Should have all tests successfull', (done) => {
cy.task('log', `Should have all tests successfull`)
cy.get('body').then(($body) => { cy.get('body').then(($body) => {
cy.task('log', `22`)
cy.wait(1000).then(() => { cy.wait(1000).then(() => {
const startButton = $body.find( const startButton = $body.find(
'.ui.massive.icon.primary.left.labeled.button' '.ui.massive.icon.primary.left.labeled.button'
)[0] )[0]
// ui massive icon primary left labeled button
cy.task('log', `startButton: ${startButton}`)
if ( if (
!startButton || !startButton ||
(startButton && !Cypress.dom.isVisible(startButton)) (startButton && !Cypress.dom.isVisible(startButton))
) { ) {
cy.task('log', `34`)
cy.task('log', `username: ${username}`)
cy.task('log', `password: ${password}`)
const userNameInput = cy.get('input[placeholder="User Name"]')
const passwordInput = cy.get('input[placeholder="Password"]')
cy.task('log', `userNameInput: ${userNameInput}`)
cy.task('log', `passwordInput: ${passwordInput}`)
cy.get('input[placeholder="User Name"]').type(username) cy.get('input[placeholder="User Name"]').type(username)
cy.get('input[placeholder="Password"]').type(password) cy.get('input[placeholder="Password"]').type(password)
const submitBtn = cy.get('.submit-button')
cy.task('log', `submitBtn: ${submitBtn}`)
cy.get('.submit-button').click() cy.get('.submit-button').click()
} }
cy.get('input[placeholder="User Name"]', { timeout: 40000 }) cy.get('input[placeholder="User Name"]', { timeout: 40000 })
.should('not.exist') .should('not.exist')
.then(() => { .then(() => {
cy.task('log', `46`)
cy.get('.ui.massive.icon.primary.left.labeled.button') cy.get('.ui.massive.icon.primary.left.labeled.button')
.click() .click()
.then(() => { .then(() => {
cy.task('log', `50`)
const loadingButton = $body.find(
'.ui.massive.loading.primary.button'
)[0]
cy.task('log', `loadingButton: ${loadingButton}`)
cy.get('.ui.massive.loading.primary.button', { cy.get('.ui.massive.loading.primary.button', {
timeout: testingFinishTimeout timeout: testingFinishTimeout
}) })
.should('not.exist') .should('not.exist')
.then(() => { .then(() => {
cy.task('log', `56`)
cy.get('span.icon.failed') cy.get('span.icon.failed')
.should('not.exist') .should('not.exist')
.then(() => { .then(() => {
cy.task('log', `60`)
done() done()
}) })
}) })
@@ -51,46 +85,46 @@ context('sasjs-tests', function () {
}) })
}) })
it('Should have all tests successfull with debug on', (done) => { // it('Should have all tests successfull with debug on', (done) => {
cy.get('body').then(($body) => { // cy.get('body').then(($body) => {
cy.wait(1000).then(() => { // cy.wait(1000).then(() => {
const startButton = $body.find( // const startButton = $body.find(
'.ui.massive.icon.primary.left.labeled.button' // '.ui.massive.icon.primary.left.labeled.button'
)[0] // )[0]
if ( // if (
!startButton || // !startButton ||
(startButton && !Cypress.dom.isVisible(startButton)) // (startButton && !Cypress.dom.isVisible(startButton))
) { // ) {
cy.get('input[placeholder="User Name"]').type(username) // cy.get('input[placeholder="User Name"]').type(username)
cy.get('input[placeholder="Password"]').type(password) // cy.get('input[placeholder="Password"]').type(password)
cy.get('.submit-button').click() // cy.get('.submit-button').click()
} // }
cy.get('.ui.fitted.toggle.checkbox label') // cy.get('.ui.fitted.toggle.checkbox label')
.click() // .click()
.then(() => { // .then(() => {
cy.get('input[placeholder="User Name"]', { timeout: 40000 }) // cy.get('input[placeholder="User Name"]', { timeout: 40000 })
.should('not.exist') // .should('not.exist')
.then(() => { // .then(() => {
cy.get('.ui.massive.icon.primary.left.labeled.button') // cy.get('.ui.massive.icon.primary.left.labeled.button')
.click() // .click()
.then(() => { // .then(() => {
cy.get('.ui.massive.loading.primary.button', { // cy.get('.ui.massive.loading.primary.button', {
timeout: testingFinishTimeout // timeout: testingFinishTimeout
}) // })
.should('not.exist') // .should('not.exist')
.then(() => { // .then(() => {
cy.get('span.icon.failed') // cy.get('span.icon.failed')
.should('not.exist') // .should('not.exist')
.then(() => { // .then(() => {
done() // done()
}) // })
}) // })
}) // })
}) // })
}) // })
}) // })
}) // })
}) // })
}) })

View File

@@ -39,4 +39,11 @@ module.exports = (on, config) => {
return launchOptions return launchOptions
} }
}) })
on('task', {
log(message) {
console.log(message)
return null
}
})
} }

View File

@@ -1,7 +1,7 @@
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<script src="https://cdn.jsdelivr.net/combine/npm/chart.js@2.9.3,npm/jquery@3.5.1,npm/@sasjs/adapter@4"></script> <script src="https://cdn.jsdelivr.net/combine/npm/chart.js@2.9.3,npm/jquery@3.5.1,npm/@sasjs/adapter@1"></script>
<script> <script>
var sasJs = new SASjs.default({ var sasJs = new SASjs.default({
appLoc: "/Public/app/readme" appLoc: "/Public/app/readme"

View File

@@ -41,14 +41,7 @@ module.exports = {
// ], // ],
// An object that configures minimum threshold enforcement for coverage results // An object that configures minimum threshold enforcement for coverage results
coverageThreshold: { // coverageThreshold: undefined,
global: {
statements: 63.61,
branches: 44.72,
functions: 53.94,
lines: 64.07
}
},
// A path to a custom dependency extractor // A path to a custom dependency extractor
// dependencyExtractor: undefined, // dependencyExtractor: undefined,

84
package-lock.json generated
View File

@@ -13,7 +13,7 @@
"axios-cookiejar-support": "1.0.1", "axios-cookiejar-support": "1.0.1",
"form-data": "4.0.0", "form-data": "4.0.0",
"https": "1.0.0", "https": "1.0.0",
"tough-cookie": "4.1.3" "tough-cookie": "4.0.0"
}, },
"devDependencies": { "devDependencies": {
"@cypress/webpack-preprocessor": "5.9.1", "@cypress/webpack-preprocessor": "5.9.1",
@@ -21,7 +21,7 @@
"@types/jest": "27.4.0", "@types/jest": "27.4.0",
"@types/mime": "2.0.3", "@types/mime": "2.0.3",
"@types/pem": "1.9.6", "@types/pem": "1.9.6",
"@types/tough-cookie": "4.0.2", "@types/tough-cookie": "4.0.1",
"copyfiles": "2.4.1", "copyfiles": "2.4.1",
"cp": "0.2.0", "cp": "0.2.0",
"cypress": "7.7.0", "cypress": "7.7.0",
@@ -3440,9 +3440,9 @@
"dev": true "dev": true
}, },
"node_modules/@types/tough-cookie": { "node_modules/@types/tough-cookie": {
"version": "4.0.2", "version": "4.0.1",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz",
"integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw==" "integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg=="
}, },
"node_modules/@types/yargs": { "node_modules/@types/yargs": {
"version": "16.0.5", "version": "16.0.5",
@@ -14110,11 +14110,6 @@
"node": ">=0.4.x" "node": ">=0.4.x"
} }
}, },
"node_modules/querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"node_modules/queue-microtask": { "node_modules/queue-microtask": {
"version": "1.2.3", "version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -14462,11 +14457,6 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"node_modules/resolve": { "node_modules/resolve": {
"version": "1.22.1", "version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
@@ -15712,23 +15702,22 @@
} }
}, },
"node_modules/tough-cookie": { "node_modules/tough-cookie": {
"version": "4.1.3", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
"dependencies": { "dependencies": {
"psl": "^1.1.33", "psl": "^1.1.33",
"punycode": "^2.1.1", "punycode": "^2.1.1",
"universalify": "^0.2.0", "universalify": "^0.1.2"
"url-parse": "^1.5.3"
}, },
"engines": { "engines": {
"node": ">=6" "node": ">=6"
} }
}, },
"node_modules/tough-cookie/node_modules/universalify": { "node_modules/tough-cookie/node_modules/universalify": {
"version": "0.2.0", "version": "0.1.2",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==", "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
"engines": { "engines": {
"node": ">= 4.0.0" "node": ">= 4.0.0"
} }
@@ -16362,15 +16351,6 @@
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
"dev": true "dev": true
}, },
"node_modules/url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
"dependencies": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"node_modules/url/node_modules/punycode": { "node_modules/url/node_modules/punycode": {
"version": "1.3.2", "version": "1.3.2",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
@@ -19556,9 +19536,9 @@
"dev": true "dev": true
}, },
"@types/tough-cookie": { "@types/tough-cookie": {
"version": "4.0.2", "version": "4.0.1",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz",
"integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw==" "integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg=="
}, },
"@types/yargs": { "@types/yargs": {
"version": "16.0.5", "version": "16.0.5",
@@ -27572,11 +27552,6 @@
"integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==", "integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==",
"dev": true "dev": true
}, },
"querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"queue-microtask": { "queue-microtask": {
"version": "1.2.3", "version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -27858,11 +27833,6 @@
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
"dev": true "dev": true
}, },
"requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"resolve": { "resolve": {
"version": "1.22.1", "version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
@@ -28829,20 +28799,19 @@
"dev": true "dev": true
}, },
"tough-cookie": { "tough-cookie": {
"version": "4.1.3", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==", "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
"requires": { "requires": {
"psl": "^1.1.33", "psl": "^1.1.33",
"punycode": "^2.1.1", "punycode": "^2.1.1",
"universalify": "^0.2.0", "universalify": "^0.1.2"
"url-parse": "^1.5.3"
}, },
"dependencies": { "dependencies": {
"universalify": { "universalify": {
"version": "0.2.0", "version": "0.1.2",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==" "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
} }
} }
}, },
@@ -29300,15 +29269,6 @@
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
"dev": true "dev": true
}, },
"url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
"requires": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"util": { "util": {
"version": "0.12.5", "version": "0.12.5",
"resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz",

View File

@@ -49,7 +49,7 @@
"@types/jest": "27.4.0", "@types/jest": "27.4.0",
"@types/mime": "2.0.3", "@types/mime": "2.0.3",
"@types/pem": "1.9.6", "@types/pem": "1.9.6",
"@types/tough-cookie": "4.0.2", "@types/tough-cookie": "4.0.1",
"copyfiles": "2.4.1", "copyfiles": "2.4.1",
"cp": "0.2.0", "cp": "0.2.0",
"cypress": "7.7.0", "cypress": "7.7.0",
@@ -82,6 +82,6 @@
"axios-cookiejar-support": "1.0.1", "axios-cookiejar-support": "1.0.1",
"form-data": "4.0.0", "form-data": "4.0.0",
"https": "1.0.0", "https": "1.0.0",
"tough-cookie": "4.1.3" "tough-cookie": "4.0.0"
} }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -4,14 +4,15 @@
"homepage": ".", "homepage": ".",
"private": true, "private": true,
"dependencies": { "dependencies": {
"@sasjs/adapter": "4.3.5",
"@sasjs/test-framework": "1.5.7", "@sasjs/test-framework": "1.5.7",
"@types/jest": "^26.0.20", "@types/jest": "^26.0.20",
"@types/node": "^14.14.41", "@types/node": "^14.14.41",
"@types/react": "^16.0.1", "@types/react": "^17.0.1",
"@types/react-dom": "^16.0.0", "@types/react-dom": "^17.0.0",
"@types/react-router-dom": "^5.1.7", "@types/react-router-dom": "^5.1.7",
"react": "^16.0.1", "react": "^17.0.1",
"react-dom": "^16.0.1", "react-dom": "^17.0.1",
"react-router-dom": "^5.2.0", "react-router-dom": "^5.2.0",
"react-scripts": "^5.0.1", "react-scripts": "^5.0.1",
"typescript": "^4.1.3" "typescript": "^4.1.3"
@@ -21,7 +22,7 @@
"build": "react-scripts build", "build": "react-scripts build",
"test": "react-scripts test", "test": "react-scripts test",
"eject": "react-scripts eject", "eject": "react-scripts eject",
"update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz", "update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz --legacy-peer-deps",
"deploy:tests": "rsync -avhe ssh ./build/* --delete $SSH_ACCOUNT:$DEPLOY_PATH || npm run deploy:tests-win", "deploy:tests": "rsync -avhe ssh ./build/* --delete $SSH_ACCOUNT:$DEPLOY_PATH || npm run deploy:tests-win",
"deploy:tests-win": "scp %DEPLOY_PATH% ./build/*", "deploy:tests-win": "scp %DEPLOY_PATH% ./build/*",
"deploy": "npm run update:adapter && npm run build && npm run deploy:tests" "deploy": "npm run update:adapter && npm run build && npm run deploy:tests"
@@ -42,6 +43,6 @@
] ]
}, },
"devDependencies": { "devDependencies": {
"node-sass": "9.0.0" "node-sass": "7.0.3"
} }
} }

View File

@@ -2,7 +2,7 @@
"userName": "", "userName": "",
"password": "", "password": "",
"sasJsConfig": { "sasJsConfig": {
"serverUrl": "", "serverUrl": "https://sas9.4gl.io",
"appLoc": "/Public/app/adapter-tests/services", "appLoc": "/Public/app/adapter-tests/services",
"serverType": "SASJS", "serverType": "SASJS",
"debug": false, "debug": false,

View File

@@ -1,7 +1,9 @@
{ {
"$schema": "https://cli.sasjs.io/sasjsconfig-schema.json", "$schema": "https://cli.sasjs.io/sasjsconfig-schema.json",
"serviceConfig": { "serviceConfig": {
"serviceFolders": ["sasjs/common"] "serviceFolders": [
"sasjs/common"
]
}, },
"defaultTarget": "4gl", "defaultTarget": "4gl",
"targets": [ "targets": [
@@ -26,4 +28,4 @@
} }
} }
] ]
} }

View File

@@ -11,7 +11,7 @@ const Login = (): ReactElement<{}> => {
const handleSubmit = useCallback( const handleSubmit = useCallback(
(e: any) => { (e: any) => {
e.preventDefault() e.preventDefault()
appContext.adapter.logIn(username, password).then((res) => { appContext.adapter.logIn(username, password).then((res: any) => {
appContext.setIsLoggedIn(res.isLoggedIn) appContext.setIsLoggedIn(res.isLoggedIn)
}) })
}, },

View File

@@ -25,16 +25,10 @@ import { prefixMessage } from '@sasjs/utils/error'
import { pollJobState } from './api/viya/pollJobState' import { pollJobState } from './api/viya/pollJobState'
import { getTokens } from './auth/getTokens' import { getTokens } from './auth/getTokens'
import { uploadTables } from './api/viya/uploadTables' import { uploadTables } from './api/viya/uploadTables'
import { executeOnComputeApi } from './api/viya/executeOnComputeApi' import { executeScript } from './api/viya/executeScript'
import { getAccessTokenForViya } from './auth/getAccessTokenForViya' import { getAccessTokenForViya } from './auth/getAccessTokenForViya'
import { refreshTokensForViya } from './auth/refreshTokensForViya' import { refreshTokensForViya } from './auth/refreshTokensForViya'
interface JobExecutionResult {
result?: { result: object }
log?: string
error?: object
}
/** /**
* A client for interfacing with the SAS Viya REST API. * A client for interfacing with the SAS Viya REST API.
* *
@@ -276,7 +270,7 @@ export class SASViyaApiClient {
* @param debug - when set to true, the log will be returned. * @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code). * @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session * @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
@@ -293,7 +287,7 @@ export class SASViyaApiClient {
printPid = false, printPid = false,
variables?: MacroVar variables?: MacroVar
): Promise<any> { ): Promise<any> {
return executeOnComputeApi( return executeScript(
this.requestClient, this.requestClient,
this.sessionManager, this.sessionManager,
this.rootFolderName, this.rootFolderName,
@@ -627,7 +621,7 @@ export class SASViyaApiClient {
* @param accessToken - an optional access token for an authorized user. * @param accessToken - an optional access token for an authorized user.
* @param waitForResult - a boolean indicating if the function should wait for a result. * @param waitForResult - a boolean indicating if the function should wait for a result.
* @param expectWebout - a boolean indicating whether to expect a _webout response. * @param expectWebout - a boolean indicating whether to expect a _webout response.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
@@ -738,13 +732,11 @@ export class SASViyaApiClient {
debug: boolean, debug: boolean,
data?: any, data?: any,
authConfig?: AuthConfig authConfig?: AuthConfig
): Promise<JobExecutionResult> { ) {
let access_token = (authConfig || {}).access_token let access_token = (authConfig || {}).access_token
if (authConfig) { if (authConfig) {
;({ access_token } = await getTokens(this.requestClient, authConfig)) ;({ access_token } = await getTokens(this.requestClient, authConfig))
} }
if (isRelativePath(sasJob) && !this.rootFolderName) { if (isRelativePath(sasJob) && !this.rootFolderName) {
throw new Error( throw new Error(
'Relative paths cannot be used without specifying a root folder name.' 'Relative paths cannot be used without specifying a root folder name.'
@@ -757,7 +749,6 @@ export class SASViyaApiClient {
const fullFolderPath = isRelativePath(sasJob) const fullFolderPath = isRelativePath(sasJob)
? `${this.rootFolderName}/${folderPath}` ? `${this.rootFolderName}/${folderPath}`
: folderPath : folderPath
await this.populateFolderMap(fullFolderPath, access_token) await this.populateFolderMap(fullFolderPath, access_token)
const jobFolder = this.folderMap.get(fullFolderPath) const jobFolder = this.folderMap.get(fullFolderPath)
@@ -774,8 +765,9 @@ export class SASViyaApiClient {
files = await this.uploadTables(data, access_token) files = await this.uploadTables(data, access_token)
} }
if (!jobToExecute) throw new Error(`Job was not found.`) if (!jobToExecute) {
throw new Error(`Job was not found.`)
}
const jobDefinitionLink = jobToExecute?.links.find( const jobDefinitionLink = jobToExecute?.links.find(
(l) => l.rel === 'getResource' (l) => l.rel === 'getResource'
)?.href )?.href
@@ -815,19 +807,16 @@ export class SASViyaApiClient {
jobDefinition, jobDefinition,
arguments: jobArguments arguments: jobArguments
} }
const { result: postedJob } = await this.requestClient.post<Job>( const { result: postedJob } = await this.requestClient.post<Job>(
`${this.serverUrl}/jobExecution/jobs?_action=wait`, `${this.serverUrl}/jobExecution/jobs?_action=wait`,
postJobRequestBody, postJobRequestBody,
access_token access_token
) )
const jobStatus = await this.pollJobState(postedJob, authConfig).catch( const jobStatus = await this.pollJobState(postedJob, authConfig).catch(
(err) => { (err) => {
throw prefixMessage(err, 'Error while polling job status. ') throw prefixMessage(err, 'Error while polling job status. ')
} }
) )
const { result: currentJob } = await this.requestClient.get<Job>( const { result: currentJob } = await this.requestClient.get<Job>(
`${this.serverUrl}/jobExecution/jobs/${postedJob.id}`, `${this.serverUrl}/jobExecution/jobs/${postedJob.id}`,
access_token access_token
@@ -838,7 +827,6 @@ export class SASViyaApiClient {
const resultLink = currentJob.results['_webout.json'] const resultLink = currentJob.results['_webout.json']
const logLink = currentJob.links.find((l) => l.rel === 'log') const logLink = currentJob.links.find((l) => l.rel === 'log')
if (resultLink) { if (resultLink) {
jobResult = await this.requestClient.get<any>( jobResult = await this.requestClient.get<any>(
`${this.serverUrl}${resultLink}/content`, `${this.serverUrl}${resultLink}/content`,
@@ -846,13 +834,11 @@ export class SASViyaApiClient {
'text/plain' 'text/plain'
) )
} }
if (debug && logLink) { if (debug && logLink) {
log = await this.requestClient log = await this.requestClient
.get<any>(`${this.serverUrl}${logLink.href}/content`, access_token) .get<any>(`${this.serverUrl}${logLink.href}/content`, access_token)
.then((res: any) => res.result.items.map((i: any) => i.line).join('\n')) .then((res: any) => res.result.items.map((i: any) => i.line).join('\n'))
} }
if (jobStatus === 'failed') { if (jobStatus === 'failed') {
throw new JobExecutionError( throw new JobExecutionError(
currentJob.error?.errorCode, currentJob.error?.errorCode,
@@ -860,16 +846,7 @@ export class SASViyaApiClient {
log log
) )
} }
return { result: jobResult?.result, log }
const executionResult: JobExecutionResult = {
result: jobResult?.result,
log
}
const { error } = currentJob
if (error) executionResult.error = error
return executionResult
} }
private async populateFolderMap(folderPath: string, accessToken?: string) { private async populateFolderMap(folderPath: string, accessToken?: string) {

View File

@@ -4,12 +4,7 @@ import {
UploadFile, UploadFile,
EditContextInput, EditContextInput,
PollOptions, PollOptions,
LoginMechanism, LoginMechanism
VerboseMode,
ErrorResponse,
LoginOptions,
LoginResult,
ExecutionQuery
} from './types' } from './types'
import { SASViyaApiClient } from './SASViyaApiClient' import { SASViyaApiClient } from './SASViyaApiClient'
import { SAS9ApiClient } from './SAS9ApiClient' import { SAS9ApiClient } from './SAS9ApiClient'
@@ -34,7 +29,8 @@ import {
Sas9JobExecutor, Sas9JobExecutor,
FileUploader FileUploader
} from './job-execution' } from './job-execution'
import { AxiosResponse, AxiosError } from 'axios' import { ErrorResponse } from './types/errors'
import { LoginOptions, LoginResult } from './types/Login'
interface ExecuteScriptParams { interface ExecuteScriptParams {
linesOfCode: string[] linesOfCode: string[]
@@ -161,23 +157,6 @@ export default class SASjs {
} }
} }
/**
* Executes job on SASJS server.
* @param query - an object containing job path and debug level.
* @param appLoc - an application path.
* @param authConfig - an object for authentication.
* @returns a promise that resolves into job execution result and log.
*/
public async executeJob(
query: ExecutionQuery,
appLoc: string,
authConfig?: AuthConfig
) {
this.isMethodSupported('executeScript', [ServerType.Sasjs])
return await this.sasJSApiClient?.executeJob(query, appLoc, authConfig)
}
/** /**
* Gets compute contexts. * Gets compute contexts.
* @param accessToken - an access token for an authorised user. * @param accessToken - an access token for an authorised user.
@@ -872,10 +851,9 @@ export default class SASjs {
* @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs. * @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs.
* The access token is not required when the user is authenticated via the browser. * The access token is not required when the user is authenticated via the browser.
* @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete. * @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
* @param verboseMode - boolean or a string equal to 'bleached' to enable verbose mode (log every HTTP response).
*/ */
public async startComputeJob( public async startComputeJob(
sasJob: string, sasJob: string,
@@ -885,8 +863,7 @@ export default class SASjs {
waitForResult?: boolean, waitForResult?: boolean,
pollOptions?: PollOptions, pollOptions?: PollOptions,
printPid = false, printPid = false,
variables?: MacroVar, variables?: MacroVar
verboseMode?: VerboseMode
) { ) {
config = { config = {
...this.sasjsConfig, ...this.sasjsConfig,
@@ -900,11 +877,6 @@ export default class SASjs {
) )
} }
if (verboseMode) {
this.requestClient?.setVerboseMode(verboseMode)
this.requestClient?.enableVerboseMode()
} else if (verboseMode === false) this.requestClient?.disableVerboseMode()
return this.sasViyaApiClient?.executeComputeJob( return this.sasViyaApiClient?.executeComputeJob(
sasJob, sasJob,
config.contextName, config.contextName,
@@ -998,8 +970,7 @@ export default class SASjs {
this.requestClient = new RequestClientClass( this.requestClient = new RequestClientClass(
this.sasjsConfig.serverUrl, this.sasjsConfig.serverUrl,
this.sasjsConfig.httpsAgentOptions, this.sasjsConfig.httpsAgentOptions,
this.sasjsConfig.requestHistoryLimit, this.sasjsConfig.requestHistoryLimit
this.sasjsConfig.verbose
) )
} else { } else {
this.requestClient.setConfig( this.requestClient.setConfig(
@@ -1163,31 +1134,4 @@ export default class SASjs {
) )
} }
} }
/**
* Enables verbose mode that will log a summary of every HTTP response.
* @param successCallBack - function that should be triggered on every HTTP response with the status 2**.
* @param errorCallBack - function that should be triggered on every HTTP response with the status different from 2**.
*/
public enableVerboseMode(
successCallBack?: (response: AxiosResponse | AxiosError) => AxiosResponse,
errorCallBack?: (response: AxiosResponse | AxiosError) => AxiosResponse
) {
this.requestClient?.enableVerboseMode(successCallBack, errorCallBack)
}
/**
* Turns off verbose mode to log every HTTP response.
*/
public disableVerboseMode() {
this.requestClient?.disableVerboseMode()
}
/**
* Sets verbose mode.
* @param verboseMode - value of the verbose mode, can be true, false or bleached(without extra colors).
*/
public setVerboseMode = (verboseMode: VerboseMode) => {
this.requestClient?.setVerboseMode(verboseMode)
}
} }

View File

@@ -12,15 +12,11 @@ import { RequestClient } from '../../request/RequestClient'
import { SessionManager } from '../../SessionManager' import { SessionManager } from '../../SessionManager'
import { isRelativePath, fetchLogByChunks } from '../../utils' import { isRelativePath, fetchLogByChunks } from '../../utils'
import { formatDataForRequest } from '../../utils/formatDataForRequest' import { formatDataForRequest } from '../../utils/formatDataForRequest'
import { pollJobState, JobState } from './pollJobState' import { pollJobState } from './pollJobState'
import { uploadTables } from './uploadTables' import { uploadTables } from './uploadTables'
interface JobRequestBody {
[key: string]: number | string | string[]
}
/** /**
* Executes SAS program on the current SAS Viya server using Compute API. * Executes code on the current SAS Viya server.
* @param jobPath - the path to the file being submitted for execution. * @param jobPath - the path to the file being submitted for execution.
* @param linesOfCode - an array of code lines to execute. * @param linesOfCode - an array of code lines to execute.
* @param contextName - the context to execute the code in. * @param contextName - the context to execute the code in.
@@ -29,11 +25,11 @@ interface JobRequestBody {
* @param debug - when set to true, the log will be returned. * @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code). * @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session * @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
export async function executeOnComputeApi( export async function executeScript(
requestClient: RequestClient, requestClient: RequestClient,
sessionManager: SessionManager, sessionManager: SessionManager,
rootFolderName: string, rootFolderName: string,
@@ -50,7 +46,6 @@ export async function executeOnComputeApi(
variables?: MacroVar variables?: MacroVar
): Promise<any> { ): Promise<any> {
let access_token = (authConfig || {}).access_token let access_token = (authConfig || {}).access_token
if (authConfig) { if (authConfig) {
;({ access_token } = await getTokens(requestClient, authConfig)) ;({ access_token } = await getTokens(requestClient, authConfig))
} }
@@ -83,13 +78,27 @@ export async function executeOnComputeApi(
const logger = process.logger || console const logger = process.logger || console
logger.info( logger.info(
`Triggering '${relativeJobPath}' with PID ${ `Triggered '${relativeJobPath}' with PID ${
jobIdVariable.value jobIdVariable.value
} at ${timestampToYYYYMMDDHHMMSS()}` } at ${timestampToYYYYMMDDHHMMSS()}`
) )
} }
} }
const jobArguments: { [key: string]: any } = {
_contextName: contextName,
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: true,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: true
}
if (debug) {
jobArguments['_OMITTEXTLOG'] = false
jobArguments['_OMITSESSIONRESULTS'] = false
}
let fileName let fileName
if (isRelativePath(jobPath)) { if (isRelativePath(jobPath)) {
@@ -98,7 +107,6 @@ export async function executeOnComputeApi(
}` }`
} else { } else {
const jobPathParts = jobPath.split('/') const jobPathParts = jobPath.split('/')
fileName = jobPathParts.pop() fileName = jobPathParts.pop()
} }
@@ -110,6 +118,7 @@ export async function executeOnComputeApi(
} }
if (variables) jobVariables = { ...jobVariables, ...variables } if (variables) jobVariables = { ...jobVariables, ...variables }
if (debug) jobVariables = { ...jobVariables, _DEBUG: 131 } if (debug) jobVariables = { ...jobVariables, _DEBUG: 131 }
let files: any[] = [] let files: any[] = []
@@ -136,12 +145,12 @@ export async function executeOnComputeApi(
} }
// Execute job in session // Execute job in session
const jobRequestBody: JobRequestBody = { const jobRequestBody = {
name: fileName || 'Default Job Name', name: fileName,
description: 'Powered by SASjs', description: 'Powered by SASjs',
code: linesOfCode, code: linesOfCode,
variables: jobVariables, variables: jobVariables,
version: 2 arguments: jobArguments
} }
const { result: postedJob, etag } = await requestClient const { result: postedJob, etag } = await requestClient
@@ -219,7 +228,7 @@ export async function executeOnComputeApi(
) )
} }
if (jobStatus === JobState.Failed || jobStatus === JobState.Error) { if (jobStatus === 'failed' || jobStatus === 'error') {
throw new ComputeJobExecutionError(currentJob, log) throw new ComputeJobExecutionError(currentJob, log)
} }
@@ -270,7 +279,7 @@ export async function executeOnComputeApi(
const error = e as HttpError const error = e as HttpError
if (error.status === 404) { if (error.status === 404) {
return executeOnComputeApi( return executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
rootFolderName, rootFolderName,

View File

@@ -1,88 +1,29 @@
import { AuthConfig } from '@sasjs/utils/types' import { AuthConfig } from '@sasjs/utils/types'
import { Job, PollOptions, PollStrategy } from '../..' import { Job, PollOptions } from '../..'
import { getTokens } from '../../auth/getTokens' import { getTokens } from '../../auth/getTokens'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { JobStatePollError } from '../../types/errors' import { JobStatePollError } from '../../types/errors'
import { Link, WriteStream } from '../../types' import { Link, WriteStream } from '../../types'
import { delay, isNode } from '../../utils' import { delay, isNode } from '../../utils'
export enum JobState {
Completed = 'completed',
Running = 'running',
Pending = 'pending',
Unavailable = 'unavailable',
NoState = '',
Failed = 'failed',
Error = 'error'
}
/**
* Polls job status using default or provided poll options.
* @param requestClient - the pre-configured HTTP request client.
* @param postedJob - the relative or absolute path to the job.
* @param debug - sets the _debug flag in the job arguments.
* @param authConfig - an access token, refresh token, client and secret for an authorized user.
* @param pollOptions - an object containing maxPollCount, pollInterval, streamLog and logFolderPath. It will override the first default poll options in poll strategy if provided.
* Example pollOptions:
* {
* maxPollCount: 200,
* pollInterval: 300,
* streamLog: true, // optional, equals to false by default.
* pollStrategy?: // optional array of poll options that should be applied after 'maxPollCount' of the provided poll options is reached. If not provided the default (see example below) poll strategy will be used.
* }
* Example pollStrategy (values used from default poll strategy):
* [
* { maxPollCount: 200, pollInterval: 300 }, // approximately ~2 mins (including time to get response (~300ms))
* { maxPollCount: 300, pollInterval: 3000 }, // approximately ~5.5 mins (including time to get response (~300ms))
* { maxPollCount: 500, pollInterval: 30000 }, // approximately ~50.5 mins (including time to get response (~300ms))
* { maxPollCount: 3400, pollInterval: 60000 } // approximately ~3015 mins (~125 hours) (including time to get response (~300ms))
* ]
* @returns - a promise which resolves with a job state
*/
export async function pollJobState( export async function pollJobState(
requestClient: RequestClient, requestClient: RequestClient,
postedJob: Job, postedJob: Job,
debug: boolean, debug: boolean,
authConfig?: AuthConfig, authConfig?: AuthConfig,
pollOptions?: PollOptions pollOptions?: PollOptions
): Promise<JobState> { ) {
const logger = process.logger || console const logger = process.logger || console
const streamLog = pollOptions?.streamLog || false let pollInterval = 300
let maxPollCount = 1000
const defaultPollStrategy: PollStrategy = [ const defaultPollOptions: PollOptions = {
{ maxPollCount: 200, pollInterval: 300 }, maxPollCount,
{ maxPollCount: 300, pollInterval: 3000 }, pollInterval,
{ maxPollCount: 500, pollInterval: 30000 }, streamLog: false
{ maxPollCount: 3400, pollInterval: 60000 }
]
let pollStrategy: PollStrategy
if (pollOptions !== undefined) {
pollStrategy = [pollOptions]
let { pollStrategy: providedPollStrategy } = pollOptions
if (providedPollStrategy !== undefined) {
validatePollStrategies(providedPollStrategy)
// INFO: sort by 'maxPollCount'
providedPollStrategy = providedPollStrategy.sort(
(strategyA: PollOptions, strategyB: PollOptions) =>
strategyA.maxPollCount - strategyB.maxPollCount
)
pollStrategy = [...pollStrategy, ...providedPollStrategy]
} else {
pollStrategy = [...pollStrategy, ...defaultPollStrategy]
}
} else {
pollStrategy = defaultPollStrategy
} }
let defaultPollOptions: PollOptions = pollStrategy.splice(0, 1)[0]
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) } pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
const stateLink = postedJob.links.find((l: any) => l.rel === 'state') const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
@@ -90,10 +31,10 @@ export async function pollJobState(
throw new Error(`Job state link was not found.`) throw new Error(`Job state link was not found.`)
} }
let currentState: JobState = await getJobState( let currentState = await getJobState(
requestClient, requestClient,
postedJob, postedJob,
JobState.NoState, '',
debug, debug,
authConfig authConfig
).catch((err) => { ).catch((err) => {
@@ -101,71 +42,73 @@ export async function pollJobState(
`Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`, `Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`,
err err
) )
return 'unavailable'
return JobState.Unavailable
}) })
let pollCount = 0 let pollCount = 0
if (currentState === JobState.Completed) { if (currentState === 'completed') {
return Promise.resolve(currentState) return Promise.resolve(currentState)
} }
let logFileStream let logFileStream
if (streamLog && isNode()) { if (pollOptions.streamLog && isNode()) {
const { getFileStream } = require('./getFileStream') const { getFileStream } = require('./getFileStream')
logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath) logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
} }
// Poll up to the first 100 times with the specified poll interval
let result = await doPoll( let result = await doPoll(
requestClient, requestClient,
postedJob, postedJob,
currentState, currentState,
debug, debug,
pollCount, pollCount,
pollOptions,
authConfig, authConfig,
streamLog, {
...pollOptions,
maxPollCount:
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
},
logFileStream logFileStream
) )
currentState = result.state currentState = result.state
pollCount = result.pollCount pollCount = result.pollCount
if ( if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) {
!needsRetry(currentState) ||
(pollCount >= pollOptions.maxPollCount && !pollStrategy.length)
) {
return currentState return currentState
} }
// INFO: If we get to this point, this is a long-running job that needs longer polling. // If we get to this point, this is a long-running job that needs longer polling.
// We will resume polling with a bigger interval according to the next polling strategy // We will resume polling with a bigger interval of 1 minute
while (pollStrategy.length && needsRetry(currentState)) { let longJobPollOptions: PollOptions = {
defaultPollOptions = pollStrategy.splice(0, 1)[0] maxPollCount: 24 * 60,
pollInterval: 60000,
if (pollOptions) { streamLog: false
defaultPollOptions.logFolderPath = pollOptions.logFolderPath }
} if (pollOptions) {
longJobPollOptions.streamLog = pollOptions.streamLog
result = await doPoll( longJobPollOptions.logFolderPath = pollOptions.logFolderPath
requestClient,
postedJob,
currentState,
debug,
pollCount,
defaultPollOptions,
authConfig,
streamLog,
logFileStream
)
currentState = result.state
pollCount = result.pollCount
} }
if (logFileStream) logFileStream.end() result = await doPoll(
requestClient,
postedJob,
currentState,
debug,
pollCount,
authConfig,
longJobPollOptions,
logFileStream
)
currentState = result.state
pollCount = result.pollCount
if (logFileStream) {
logFileStream.end()
}
return currentState return currentState
} }
@@ -176,13 +119,17 @@ const getJobState = async (
currentState: string, currentState: string,
debug: boolean, debug: boolean,
authConfig?: AuthConfig authConfig?: AuthConfig
): Promise<JobState> => { ) => {
const stateLink = job.links.find((l: any) => l.rel === 'state')! const stateLink = job.links.find((l: any) => l.rel === 'state')
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
if (needsRetry(currentState)) { if (needsRetry(currentState)) {
let tokens let tokens
if (authConfig) {
if (authConfig) tokens = await getTokens(requestClient, authConfig) tokens = await getTokens(requestClient, authConfig)
}
const { result: jobState } = await requestClient const { result: jobState } = await requestClient
.get<string>( .get<string>(
@@ -196,38 +143,48 @@ const getJobState = async (
throw new JobStatePollError(job.id, err) throw new JobStatePollError(job.id, err)
}) })
return jobState.trim() as JobState return jobState.trim()
} else { } else {
return currentState as JobState return currentState
} }
} }
const needsRetry = (state: string) => const needsRetry = (state: string) =>
state === JobState.Running || state === 'running' ||
state === JobState.NoState || state === '' ||
state === JobState.Pending || state === 'pending' ||
state === JobState.Unavailable state === 'unavailable'
const doPoll = async ( const doPoll = async (
requestClient: RequestClient, requestClient: RequestClient,
postedJob: Job, postedJob: Job,
currentState: JobState, currentState: string,
debug: boolean, debug: boolean,
pollCount: number, pollCount: number,
pollOptions: PollOptions,
authConfig?: AuthConfig, authConfig?: AuthConfig,
streamLog?: boolean, pollOptions?: PollOptions,
logStream?: WriteStream logStream?: WriteStream
): Promise<{ state: JobState; pollCount: number }> => { ): Promise<{ state: string; pollCount: number }> => {
const { maxPollCount, pollInterval } = pollOptions let pollInterval = 300
const logger = process.logger || console let maxPollCount = 1000
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')!
let maxErrorCount = 5 let maxErrorCount = 5
let errorCount = 0 let errorCount = 0
let state = currentState let state = currentState
let printedState = JobState.NoState let printedState = ''
let startLogLine = 0 let startLogLine = 0
const logger = process.logger || console
if (pollOptions) {
pollInterval = pollOptions.pollInterval || pollInterval
maxPollCount = pollOptions.maxPollCount || maxPollCount
}
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
while (needsRetry(state) && pollCount <= maxPollCount) { while (needsRetry(state) && pollCount <= maxPollCount) {
state = await getJobState( state = await getJobState(
requestClient, requestClient,
@@ -237,24 +194,21 @@ const doPoll = async (
authConfig authConfig
).catch((err) => { ).catch((err) => {
errorCount++ errorCount++
if (pollCount >= maxPollCount || errorCount >= maxErrorCount) { if (pollCount >= maxPollCount || errorCount >= maxErrorCount) {
throw err throw err
} }
logger.error( logger.error(
`Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`, `Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`,
err err
) )
return 'unavailable'
return JobState.Unavailable
}) })
pollCount++ pollCount++
const jobHref = postedJob.links.find((l: Link) => l.rel === 'self')!.href const jobHref = postedJob.links.find((l: Link) => l.rel === 'self')!.href
if (streamLog) { if (pollOptions?.streamLog) {
const { result: job } = await requestClient.get<Job>( const { result: job } = await requestClient.get<Job>(
jobHref, jobHref,
authConfig?.access_token authConfig?.access_token
@@ -284,45 +238,12 @@ const doPoll = async (
printedState = state printedState = state
} }
if (state !== JobState.Unavailable && errorCount > 0) { if (state != 'unavailable' && errorCount > 0) {
errorCount = 0 errorCount = 0
} }
if (state !== JobState.Completed) { await delay(pollInterval)
await delay(pollInterval)
}
} }
return { state, pollCount } return { state, pollCount }
} }
const validatePollStrategies = (strategy: PollStrategy) => {
const throwError = (message?: string, pollOptions?: PollOptions) => {
throw new Error(
`Poll strategies are not valid.${message ? ` ${message}` : ''}${
pollOptions
? ` Invalid poll strategy: \n${JSON.stringify(pollOptions, null, 2)}`
: ''
}`
)
}
strategy.forEach((pollOptions: PollOptions, i: number) => {
const { maxPollCount, pollInterval } = pollOptions
if (maxPollCount < 1) {
throwError(`'maxPollCount' has to be greater than 0.`, pollOptions)
} else if (i !== 0) {
const previousPollOptions = strategy[i - 1]
if (maxPollCount <= previousPollOptions.maxPollCount) {
throwError(
`'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy.`,
pollOptions
)
}
} else if (pollInterval < 1) {
throwError(`'pollInterval' has to be greater than 0.`, pollOptions)
}
})
}

View File

@@ -1,6 +1,6 @@
import { RequestClient } from '../../../request/RequestClient' import { RequestClient } from '../../../request/RequestClient'
import { SessionManager } from '../../../SessionManager' import { SessionManager } from '../../../SessionManager'
import { executeOnComputeApi } from '../executeOnComputeApi' import { executeScript } from '../executeScript'
import { mockSession, mockAuthConfig, mockJob } from './mockResponses' import { mockSession, mockAuthConfig, mockJob } from './mockResponses'
import * as pollJobStateModule from '../pollJobState' import * as pollJobStateModule from '../pollJobState'
import * as uploadTablesModule from '../uploadTables' import * as uploadTablesModule from '../uploadTables'
@@ -9,13 +9,14 @@ import * as formatDataModule from '../../../utils/formatDataForRequest'
import * as fetchLogsModule from '../../../utils/fetchLogByChunks' import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import { PollOptions } from '../../../types' import { PollOptions } from '../../../types'
import { ComputeJobExecutionError, NotFoundError } from '../../../types/errors' import { ComputeJobExecutionError, NotFoundError } from '../../../types/errors'
import { Logger, LogLevel } from '@sasjs/utils/logger' import { Logger, LogLevel } from '@sasjs/utils'
const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)() const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)()
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
const defaultPollOptions: PollOptions = { const defaultPollOptions: PollOptions = {
maxPollCount: 100, maxPollCount: 100,
pollInterval: 500 pollInterval: 500,
streamLog: false
} }
describe('executeScript', () => { describe('executeScript', () => {
@@ -25,7 +26,7 @@ describe('executeScript', () => {
}) })
it('should not try to get fresh tokens if an authConfig is not provided', async () => { it('should not try to get fresh tokens if an authConfig is not provided', async () => {
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -38,7 +39,7 @@ describe('executeScript', () => {
}) })
it('should try to get fresh tokens if an authConfig is provided', async () => { it('should try to get fresh tokens if an authConfig is provided', async () => {
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -55,7 +56,7 @@ describe('executeScript', () => {
}) })
it('should get a session from the session manager before executing', async () => { it('should get a session from the session manager before executing', async () => {
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -72,7 +73,7 @@ describe('executeScript', () => {
.spyOn(sessionManager, 'getSession') .spyOn(sessionManager, 'getSession')
.mockImplementation(() => Promise.reject('Test Error')) .mockImplementation(() => Promise.reject('Test Error'))
const error = await executeOnComputeApi( const error = await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -85,7 +86,7 @@ describe('executeScript', () => {
}) })
it('should fetch the PID when printPid is true', async () => { it('should fetch the PID when printPid is true', async () => {
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -113,7 +114,7 @@ describe('executeScript', () => {
.spyOn(sessionManager, 'getVariable') .spyOn(sessionManager, 'getVariable')
.mockImplementation(() => Promise.reject('Test Error')) .mockImplementation(() => Promise.reject('Test Error'))
const error = await executeOnComputeApi( const error = await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -139,7 +140,7 @@ describe('executeScript', () => {
Promise.resolve([{ tableName: 'test', file: { id: 1 } }]) Promise.resolve([{ tableName: 'test', file: { id: 1 } }])
) )
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -163,7 +164,7 @@ describe('executeScript', () => {
}) })
it('should format data as CSV when it does not contain semicolons', async () => { it('should format data as CSV when it does not contain semicolons', async () => {
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -189,7 +190,7 @@ describe('executeScript', () => {
.spyOn(formatDataModule, 'formatDataForRequest') .spyOn(formatDataModule, 'formatDataForRequest')
.mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' })) .mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' }))
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -217,7 +218,14 @@ describe('executeScript', () => {
sasjs_tables: 'foo', sasjs_tables: 'foo',
sasjs0data: 'bar' sasjs0data: 'bar'
}, },
version: 2 arguments: {
_contextName: 'test context',
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: true,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: true
}
}, },
mockAuthConfig.access_token mockAuthConfig.access_token
) )
@@ -228,7 +236,7 @@ describe('executeScript', () => {
.spyOn(formatDataModule, 'formatDataForRequest') .spyOn(formatDataModule, 'formatDataForRequest')
.mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' })) .mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' }))
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -257,7 +265,14 @@ describe('executeScript', () => {
sasjs0data: 'bar', sasjs0data: 'bar',
_DEBUG: 131 _DEBUG: 131
}, },
version: 2 arguments: {
_contextName: 'test context',
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: false,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: false
}
}, },
mockAuthConfig.access_token mockAuthConfig.access_token
) )
@@ -268,7 +283,7 @@ describe('executeScript', () => {
.spyOn(requestClient, 'post') .spyOn(requestClient, 'post')
.mockImplementation(() => Promise.reject('Test Error')) .mockImplementation(() => Promise.reject('Test Error'))
const error = await executeOnComputeApi( const error = await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -288,7 +303,7 @@ describe('executeScript', () => {
}) })
it('should immediately return the session when waitForResult is false', async () => { it('should immediately return the session when waitForResult is false', async () => {
const result = await executeOnComputeApi( const result = await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -308,7 +323,7 @@ describe('executeScript', () => {
}) })
it('should poll for job completion when waitForResult is true', async () => { it('should poll for job completion when waitForResult is true', async () => {
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -338,7 +353,7 @@ describe('executeScript', () => {
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.reject('Poll Error')) .mockImplementation(() => Promise.reject('Poll Error'))
const error = await executeOnComputeApi( const error = await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -364,7 +379,7 @@ describe('executeScript', () => {
Promise.reject({ response: { data: 'err=5113,' } }) Promise.reject({ response: { data: 'err=5113,' } })
) )
const error = await executeOnComputeApi( const error = await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -390,7 +405,7 @@ describe('executeScript', () => {
}) })
it('should fetch the logs for the job if debug is true and a log URL is available', async () => { it('should fetch the logs for the job if debug is true and a log URL is available', async () => {
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -415,7 +430,7 @@ describe('executeScript', () => {
}) })
it('should not fetch the logs for the job if debug is false', async () => { it('should not fetch the logs for the job if debug is false', async () => {
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -437,11 +452,9 @@ describe('executeScript', () => {
it('should throw a ComputeJobExecutionError if the job has failed', async () => { it('should throw a ComputeJobExecutionError if the job has failed', async () => {
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => .mockImplementation(() => Promise.resolve('failed'))
Promise.resolve(pollJobStateModule.JobState.Failed)
)
const error: ComputeJobExecutionError = await executeOnComputeApi( const error: ComputeJobExecutionError = await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -472,11 +485,9 @@ describe('executeScript', () => {
it('should throw a ComputeJobExecutionError if the job has errored out', async () => { it('should throw a ComputeJobExecutionError if the job has errored out', async () => {
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => .mockImplementation(() => Promise.resolve('error'))
Promise.resolve(pollJobStateModule.JobState.Error)
)
const error: ComputeJobExecutionError = await executeOnComputeApi( const error: ComputeJobExecutionError = await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -505,7 +516,7 @@ describe('executeScript', () => {
}) })
it('should fetch the result if expectWebout is true', async () => { it('should fetch the result if expectWebout is true', async () => {
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -536,7 +547,7 @@ describe('executeScript', () => {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
}) })
const error = await executeOnComputeApi( const error = await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -570,7 +581,7 @@ describe('executeScript', () => {
}) })
it('should clear the session after execution is complete', async () => { it('should clear the session after execution is complete', async () => {
await executeOnComputeApi( await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -597,7 +608,7 @@ describe('executeScript', () => {
.spyOn(sessionManager, 'clearSession') .spyOn(sessionManager, 'clearSession')
.mockImplementation(() => Promise.reject('Clear Session Error')) .mockImplementation(() => Promise.reject('Clear Session Error'))
const error = await executeOnComputeApi( const error = await executeScript(
requestClient, requestClient,
sessionManager, sessionManager,
'test', 'test',
@@ -643,9 +654,7 @@ const setupMocks = () => {
.mockImplementation(() => Promise.resolve(mockAuthConfig)) .mockImplementation(() => Promise.resolve(mockAuthConfig))
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => .mockImplementation(() => Promise.resolve('completed'))
Promise.resolve(pollJobStateModule.JobState.Completed)
)
jest jest
.spyOn(sessionManager, 'getVariable') .spyOn(sessionManager, 'getVariable')
.mockImplementation(() => .mockImplementation(() =>

View File

@@ -1,4 +1,4 @@
import { Logger, LogLevel } from '@sasjs/utils/logger' import { Logger, LogLevel } from '@sasjs/utils'
import { RequestClient } from '../../../request/RequestClient' import { RequestClient } from '../../../request/RequestClient'
import { mockAuthConfig, mockJob } from './mockResponses' import { mockAuthConfig, mockJob } from './mockResponses'
import { pollJobState } from '../pollJobState' import { pollJobState } from '../pollJobState'
@@ -6,18 +6,17 @@ import * as getTokensModule from '../../../auth/getTokens'
import * as saveLogModule from '../saveLog' import * as saveLogModule from '../saveLog'
import * as getFileStreamModule from '../getFileStream' import * as getFileStreamModule from '../getFileStream'
import * as isNodeModule from '../../../utils/isNode' import * as isNodeModule from '../../../utils/isNode'
import * as delayModule from '../../../utils/delay' import { PollOptions } from '../../../types'
import { PollOptions, PollStrategy } from '../../../types'
import { WriteStream } from 'fs' import { WriteStream } from 'fs'
const baseUrl = 'http://localhost' const baseUrl = 'http://localhost'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
requestClient['httpClient'].defaults.baseURL = baseUrl requestClient['httpClient'].defaults.baseURL = baseUrl
const defaultStreamLog = false const defaultPollOptions: PollOptions = {
const defaultPollStrategy: PollOptions = {
maxPollCount: 100, maxPollCount: 100,
pollInterval: 500 pollInterval: 500,
streamLog: false
} }
describe('pollJobState', () => { describe('pollJobState', () => {
@@ -27,10 +26,13 @@ describe('pollJobState', () => {
}) })
it('should get valid tokens if the authConfig has been provided', async () => { it('should get valid tokens if the authConfig has been provided', async () => {
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(
...defaultPollStrategy, requestClient,
streamLog: defaultStreamLog mockJob,
}) false,
mockAuthConfig,
defaultPollOptions
)
expect(getTokensModule.getTokens).toHaveBeenCalledWith( expect(getTokensModule.getTokens).toHaveBeenCalledWith(
requestClient, requestClient,
@@ -44,7 +46,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollStrategy defaultPollOptions
) )
expect(getTokensModule.getTokens).not.toHaveBeenCalled() expect(getTokensModule.getTokens).not.toHaveBeenCalled()
@@ -56,7 +58,7 @@ describe('pollJobState', () => {
{ ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') }, { ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') },
false, false,
undefined, undefined,
defaultPollStrategy defaultPollOptions
).catch((e: any) => e) ).catch((e: any) => e)
expect((error as Error).message).toContain('Job state link was not found.') expect((error as Error).message).toContain('Job state link was not found.')
@@ -70,7 +72,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
defaultPollStrategy defaultPollOptions
) )
expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3) expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3)
@@ -81,7 +83,7 @@ describe('pollJobState', () => {
const { saveLog } = require('../saveLog') const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollStrategy, ...defaultPollOptions,
streamLog: true streamLog: true
}) })
@@ -94,7 +96,7 @@ describe('pollJobState', () => {
const { saveLog } = require('../saveLog') const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollStrategy, ...defaultPollOptions,
streamLog: true streamLog: true
}) })
@@ -109,7 +111,7 @@ describe('pollJobState', () => {
const { getFileStream } = require('../getFileStream') const { getFileStream } = require('../getFileStream')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollStrategy, ...defaultPollOptions,
streamLog: true streamLog: true
}) })
@@ -125,7 +127,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
defaultPollStrategy defaultPollOptions
) )
expect(saveLogModule.saveLog).not.toHaveBeenCalled() expect(saveLogModule.saveLog).not.toHaveBeenCalled()
@@ -134,18 +136,15 @@ describe('pollJobState', () => {
it('should return the current status when the max poll count is reached', async () => { it('should return the current status when the max poll count is reached', async () => {
mockRunningPoll() mockRunningPoll()
const pollOptions: PollOptions = {
...defaultPollStrategy,
maxPollCount: 1,
pollStrategy: []
}
const state = await pollJobState( const state = await pollJobState(
requestClient, requestClient,
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
pollOptions {
...defaultPollOptions,
maxPollCount: 1
}
) )
expect(state).toEqual('running') expect(state).toEqual('running')
@@ -160,7 +159,7 @@ describe('pollJobState', () => {
false, false,
mockAuthConfig, mockAuthConfig,
{ {
...defaultPollStrategy, ...defaultPollOptions,
maxPollCount: 200, maxPollCount: 200,
pollInterval: 10 pollInterval: 10
} }
@@ -177,7 +176,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollStrategy defaultPollOptions
) )
expect(requestClient.get).toHaveBeenCalledTimes(2) expect(requestClient.get).toHaveBeenCalledTimes(2)
@@ -193,7 +192,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
true, true,
undefined, undefined,
defaultPollStrategy defaultPollOptions
) )
expect((process as any).logger.info).toHaveBeenCalledTimes(4) expect((process as any).logger.info).toHaveBeenCalledTimes(4)
@@ -223,7 +222,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollStrategy defaultPollOptions
) )
expect(requestClient.get).toHaveBeenCalledTimes(2) expect(requestClient.get).toHaveBeenCalledTimes(2)
@@ -238,189 +237,13 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollStrategy defaultPollOptions
).catch((e: any) => e) ).catch((e: any) => e)
expect(error.message).toEqual( expect(error.message).toEqual(
'Error while polling job state for job j0b: Status Error' 'Error while polling job state for job j0b: Status Error'
) )
}) })
it('should change poll strategies', async () => {
mockSimplePoll(6)
const delays: number[] = []
jest.spyOn(delayModule, 'delay').mockImplementation((ms: number) => {
delays.push(ms)
return Promise.resolve()
})
const pollIntervals = [3, 4, 5, 6]
const pollStrategy = [
{ maxPollCount: 2, pollInterval: pollIntervals[1] },
{ maxPollCount: 3, pollInterval: pollIntervals[2] },
{ maxPollCount: 4, pollInterval: pollIntervals[3] }
]
const pollOptions: PollOptions = {
maxPollCount: 1,
pollInterval: pollIntervals[0],
pollStrategy: pollStrategy
}
await pollJobState(requestClient, mockJob, false, undefined, pollOptions)
expect(delays).toEqual([pollIntervals[0], ...pollIntervals])
})
it('should change default poll strategies after completing provided poll options', async () => {
const delays: number[] = []
jest.spyOn(delayModule, 'delay').mockImplementation((ms: number) => {
delays.push(ms)
return Promise.resolve()
})
const customPollOptions: PollOptions = {
maxPollCount: 0,
pollInterval: 0
}
const requests = [
{ maxPollCount: 202, pollInterval: 300 },
{ maxPollCount: 300, pollInterval: 3000 },
{ maxPollCount: 500, pollInterval: 30000 },
{ maxPollCount: 3400, pollInterval: 60000 }
]
// ~200 requests with delay 300ms
let request = requests.splice(0, 1)[0]
let { maxPollCount, pollInterval } = request
// should be only one interval because maxPollCount is equal to 0
const pollIntervals = [customPollOptions.pollInterval]
pollIntervals.push(...Array(maxPollCount - 2).fill(pollInterval))
// ~300 requests with delay 3000
request = requests.splice(0, 1)[0]
let newAmount = request.maxPollCount
pollInterval = request.pollInterval
pollIntervals.push(...Array(newAmount - maxPollCount).fill(pollInterval))
pollIntervals.push(...Array(2).fill(pollInterval))
// ~500 requests with delay 30000
request = requests.splice(0, 1)[0]
let oldAmount = newAmount
newAmount = request.maxPollCount
pollInterval = request.pollInterval
pollIntervals.push(...Array(newAmount - oldAmount - 2).fill(pollInterval))
pollIntervals.push(...Array(2).fill(pollInterval))
// ~3400 requests with delay 60000
request = requests.splice(0, 1)[0]
oldAmount = newAmount
newAmount = request.maxPollCount
pollInterval = request.pollInterval
mockSimplePoll(newAmount)
pollIntervals.push(...Array(newAmount - oldAmount - 2).fill(pollInterval))
await pollJobState(
requestClient,
mockJob,
false,
undefined,
customPollOptions
)
expect(delays).toEqual(pollIntervals)
})
it('should throw an error if not valid poll strategies provided', async () => {
// INFO: 'maxPollCount' has to be > 0
let invalidPollStrategy = {
maxPollCount: 0,
pollInterval: 3
}
let pollStrategy: PollStrategy = [invalidPollStrategy]
let expectedError = new Error(
`Poll strategies are not valid. 'maxPollCount' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
// INFO: 'maxPollCount' has to be > than 'maxPollCount' of the previous strategy
const validPollStrategy = {
maxPollCount: 5,
pollInterval: 2
}
invalidPollStrategy = {
maxPollCount: validPollStrategy.maxPollCount,
pollInterval: 3
}
pollStrategy = [validPollStrategy, invalidPollStrategy]
expectedError = new Error(
`Poll strategies are not valid. 'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
// INFO: invalid 'pollInterval'
invalidPollStrategy = {
maxPollCount: 1,
pollInterval: 0
}
pollStrategy = [invalidPollStrategy]
expectedError = new Error(
`Poll strategies are not valid. 'pollInterval' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
})
}) })
const setupMocks = () => { const setupMocks = () => {
@@ -450,14 +273,11 @@ const setupMocks = () => {
const mockSimplePoll = (runningCount = 2) => { const mockSimplePoll = (runningCount = 2) => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: result:
count === 0 count === 0
@@ -473,14 +293,11 @@ const mockSimplePoll = (runningCount = 2) => {
const mockRunningPoll = () => { const mockRunningPoll = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: count === 0 ? 'pending' : 'running', result: count === 0 ? 'pending' : 'running',
etag: '', etag: '',
@@ -491,14 +308,11 @@ const mockRunningPoll = () => {
const mockLongPoll = () => { const mockLongPoll = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: count <= 102 ? 'running' : 'completed', result: count <= 102 ? 'running' : 'completed',
etag: '', etag: '',
@@ -509,18 +323,14 @@ const mockLongPoll = () => {
const mockPollWithSingleError = () => { const mockPollWithSingleError = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
if (count === 1) { if (count === 1) {
return Promise.reject('Status Error') return Promise.reject('Status Error')
} }
return Promise.resolve({ return Promise.resolve({
result: count === 0 ? 'pending' : 'completed', result: count === 0 ? 'pending' : 'completed',
etag: '', etag: '',
@@ -534,7 +344,6 @@ const mockErroredPoll = () => {
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.reject('Status Error') return Promise.reject('Status Error')
}) })
} }

View File

@@ -1,4 +1,4 @@
import { Logger, LogLevel } from '@sasjs/utils/logger' import { Logger, LogLevel } from '@sasjs/utils'
import { RequestClient } from '../../../request/RequestClient' import { RequestClient } from '../../../request/RequestClient'
import * as fetchLogsModule from '../../../utils/fetchLogByChunks' import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import * as writeStreamModule from '../writeStream' import * as writeStreamModule from '../writeStream'

View File

@@ -5,7 +5,7 @@ import {
fileExists, fileExists,
readFile, readFile,
deleteFile deleteFile
} from '@sasjs/utils/file' } from '@sasjs/utils'
describe('writeStream', () => { describe('writeStream', () => {
const filename = 'test.txt' const filename = 'test.txt'

View File

@@ -1,7 +1,7 @@
import { prefixMessage } from '@sasjs/utils/error' import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix' import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
import { ServerType } from '@sasjs/utils/types' import { ServerType } from '@sasjs/utils'
/** /**
* Exchanges the auth code for an access token for the given client. * Exchanges the auth code for an access token for the given client.

View File

@@ -4,6 +4,7 @@ import { RequestClient } from '../request/RequestClient'
import { CertificateError } from '../types/errors' import { CertificateError } from '../types/errors'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix' import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
// TODO: update func docs
/** /**
* Exchange the auth code for access / refresh tokens for the given client / secret pair. * Exchange the auth code for access / refresh tokens for the given client / secret pair.
* @param requestClient - the pre-configured HTTP request client. * @param requestClient - the pre-configured HTTP request client.
@@ -30,11 +31,10 @@ export async function getAccessTokenForViya(
Authorization: 'Basic ' + token, Authorization: 'Basic ' + token,
Accept: 'application/json' Accept: 'application/json'
} }
const dataJson = {
const dataJson = new URLSearchParams({
grant_type: 'authorization_code', grant_type: 'authorization_code',
code: authCode code: authCode
}) }
const data = new URLSearchParams(dataJson) const data = new URLSearchParams(dataJson)
const authResponse = await requestClient const authResponse = await requestClient

View File

@@ -1,7 +1,7 @@
import { prefixMessage } from '@sasjs/utils/error' import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix' import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
import { ServerType } from '@sasjs/utils/types' import { ServerType } from '@sasjs/utils'
/** /**
* Exchanges the refresh token for an access token for the given client. * Exchanges the refresh token for an access token for the given client.

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils/types' import { AuthConfig } from '@sasjs/utils'
import { generateToken, mockSasjsAuthResponse } from './mockResponses' import { generateToken, mockSasjsAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { getAccessTokenForSasjs } from '../getAccessTokenForSasjs' import { getAccessTokenForSasjs } from '../getAccessTokenForSasjs'

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils/types' import { AuthConfig } from '@sasjs/utils'
import * as NodeFormData from 'form-data' import * as NodeFormData from 'form-data'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils/types' import { AuthConfig } from '@sasjs/utils'
import * as refreshTokensModule from '../refreshTokensForViya' import * as refreshTokensModule from '../refreshTokensForViya'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { getTokens } from '../getTokens' import { getTokens } from '../getTokens'

View File

@@ -1,4 +1,4 @@
import { ServerType } from '@sasjs/utils/types' import { ServerType } from '@sasjs/utils'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { refreshTokensForSasjs } from '../refreshTokensForSasjs' import { refreshTokensForSasjs } from '../refreshTokensForSasjs'

View File

@@ -1,4 +1,4 @@
import { AuthConfig, ServerType } from '@sasjs/utils/types' import { AuthConfig, ServerType } from '@sasjs/utils'
import * as NodeFormData from 'form-data' import * as NodeFormData from 'form-data'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'

View File

@@ -1,6 +1,5 @@
import * as NodeFormData from 'form-data' import * as NodeFormData from 'form-data'
import { convertToCSV } from '../utils/convertToCsv' import { convertToCSV } from '../utils/convertToCsv'
import { isNode } from '../utils'
/** /**
* One of the approaches SASjs takes to send tables-formatted JSON (see README) * One of the approaches SASjs takes to send tables-formatted JSON (see README)
@@ -27,15 +26,12 @@ export const generateFileUploadForm = (
) )
} }
// INFO: unfortunately it is not possible to check if formData is instance of NodeFormData or FormData because it will return true for both if (typeof FormData === 'undefined' && formData instanceof NodeFormData) {
if (isNode()) { formData.append(name, csv, {
// INFO: environment is Node and formData is instance of NodeFormData
;(formData as NodeFormData).append(name, csv, {
filename: `${name}.csv`, filename: `${name}.csv`,
contentType: 'application/csv' contentType: 'application/csv'
}) })
} else { } else {
// INFO: environment is Browser and formData is instance of FormData
const file = new Blob([csv], { const file = new Blob([csv], {
type: 'application/csv' type: 'application/csv'
}) })

View File

@@ -1,7 +1,4 @@
import { generateFileUploadForm } from '../generateFileUploadForm' import { generateFileUploadForm } from '../generateFileUploadForm'
import { convertToCSV } from '../../utils/convertToCsv'
import * as NodeFormData from 'form-data'
import * as isNodeModule from '../../utils/isNode'
describe('generateFileUploadForm', () => { describe('generateFileUploadForm', () => {
beforeAll(() => { beforeAll(() => {
@@ -14,94 +11,44 @@ describe('generateFileUploadForm', () => {
;(global as any).Blob = BlobMock ;(global as any).Blob = BlobMock
}) })
describe('browser', () => { it('should generate file upload form from data', () => {
afterAll(() => { const formData = new FormData()
jest.restoreAllMocks() const testTable = 'sometable'
}) const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter((key: string) =>
Array.isArray(testTableWithNullVars[key])
)[0]
it('should generate file upload form from data', () => { jest.spyOn(formData, 'append').mockImplementation(() => {})
const formData = new FormData()
const testTable = 'sometable'
const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter(
(key: string) => Array.isArray(testTableWithNullVars[key])
)[0]
jest.spyOn(formData, 'append').mockImplementation(() => {}) generateFileUploadForm(formData, testTableWithNullVars)
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
generateFileUploadForm(formData, testTableWithNullVars) expect(formData.append).toHaveBeenCalledOnce()
expect(formData.append).toHaveBeenCalledWith(
expect(formData.append).toHaveBeenCalledOnce() tableName,
expect(formData.append).toHaveBeenCalledWith( {},
tableName, `${tableName}.csv`
{}, )
`${tableName}.csv`
)
})
it('should throw an error if too large string was provided', () => {
const formData = new FormData()
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
)
})
}) })
describe('node', () => { it('should throw an error if too large string was provided', () => {
it('should generate file upload form from data', () => { const formData = new FormData()
const formData = new NodeFormData() const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
const testTable = 'sometable'
const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter(
(key: string) => Array.isArray(testTableWithNullVars[key])
)[0]
const csv = convertToCSV(testTableWithNullVars, tableName)
jest.spyOn(formData, 'append').mockImplementation(() => {}) expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
generateFileUploadForm(formData, testTableWithNullVars) 'The max length of a string value in SASjs is 32765 characters.'
expect(formData.append).toHaveBeenCalledOnce()
expect(formData.append).toHaveBeenCalledWith(tableName, csv, {
contentType: 'application/csv',
filename: `${tableName}.csv`
})
})
it('should throw an error if too large string was provided', () => {
const formData = new NodeFormData()
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
) )
}) )
}) })
}) })

View File

@@ -1,7 +1,8 @@
import { import {
getValidJson, getValidJson,
parseSasViyaDebugResponse, parseSasViyaDebugResponse,
parseWeboutResponse parseWeboutResponse,
SASJS_LOGS_SEPARATOR
} from '../utils' } from '../utils'
import { UploadFile } from '../types/UploadFile' import { UploadFile } from '../types/UploadFile'
import { import {
@@ -92,24 +93,15 @@ export class FileUploader extends BaseJobExecutor {
this.requestClient, this.requestClient,
config.serverUrl config.serverUrl
) )
break break
case ServerType.Sas9: case ServerType.Sas9:
jsonResponse = jsonResponse =
typeof res.result === 'string' typeof res.result === 'string'
? parseWeboutResponse(res.result, uploadUrl) ? parseWeboutResponse(res.result, uploadUrl)
: res.result : res.result
break
case ServerType.Sasjs:
jsonResponse =
typeof res.result === 'string'
? getValidJson(res.result)
: res.result
break break
} }
} else { } else if (this.serverType !== ServerType.Sasjs) {
jsonResponse = jsonResponse =
typeof res.result === 'string' typeof res.result === 'string'
? getValidJson(res.result) ? getValidJson(res.result)

View File

@@ -10,8 +10,8 @@ import {
LoginRequiredError LoginRequiredError
} from '../types/errors' } from '../types/errors'
import { generateFileUploadForm } from '../file/generateFileUploadForm' import { generateFileUploadForm } from '../file/generateFileUploadForm'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getFormData } from '../utils'
import { import {
isRelativePath, isRelativePath,
@@ -53,7 +53,8 @@ export class SasjsJobExecutor extends BaseJobExecutor {
* Use the available form data object (FormData in Browser, NodeFormData in * Use the available form data object (FormData in Browser, NodeFormData in
* Node) * Node)
*/ */
let formData = getFormData() let formData =
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
if (data) { if (data) {
// file upload approach // file upload approach
@@ -92,10 +93,8 @@ export class SasjsJobExecutor extends BaseJobExecutor {
) )
} }
const { result } = res const { result } = res.result
if (result && result.trim()) res.result = getValidJson(result)
if (result && typeof result === 'string' && result.trim())
res.result = getValidJson(result)
this.requestClient!.appendRequest(res, sasJob, config.debug) this.requestClient!.appendRequest(res, sasJob, config.debug)

View File

@@ -16,11 +16,10 @@ import { SASViyaApiClient } from '../SASViyaApiClient'
import { import {
isRelativePath, isRelativePath,
parseSasViyaDebugResponse, parseSasViyaDebugResponse,
appendExtraResponseAttributes, appendExtraResponseAttributes
parseWeboutResponse,
getFormData
} from '../utils' } from '../utils'
import { BaseJobExecutor } from './JobExecutor' import { BaseJobExecutor } from './JobExecutor'
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
export interface WaitingRequstPromise { export interface WaitingRequstPromise {
promise: Promise<any> | null promise: Promise<any> | null
@@ -113,7 +112,8 @@ export class WebJobExecutor extends BaseJobExecutor {
* Use the available form data object (FormData in Browser, NodeFormData in * Use the available form data object (FormData in Browser, NodeFormData in
* Node) * Node)
*/ */
let formData = getFormData() let formData =
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
if (data) { if (data) {
const stringifiedData = JSON.stringify(data) const stringifiedData = JSON.stringify(data)

View File

@@ -233,8 +233,7 @@ export default class SASjs {
this.requestClient = new RequestClient( this.requestClient = new RequestClient(
this.sasjsConfig.serverUrl, this.sasjsConfig.serverUrl,
this.sasjsConfig.httpsAgentOptions, this.sasjsConfig.httpsAgentOptions,
this.sasjsConfig.requestHistoryLimit, this.sasjsConfig.requestHistoryLimit
this.sasjsConfig.verbose
) )
} else { } else {
this.requestClient.setConfig( this.requestClient.setConfig(

View File

@@ -11,6 +11,7 @@ import {
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { import {
isRelativePath, isRelativePath,
parseSasViyaDebugResponse,
appendExtraResponseAttributes, appendExtraResponseAttributes,
convertToCSV convertToCSV
} from '../../utils' } from '../../utils'

View File

@@ -1,10 +1,4 @@
import { import { AxiosInstance, AxiosRequestConfig, AxiosResponse } from 'axios'
AxiosError,
AxiosInstance,
AxiosRequestConfig,
AxiosResponse
} from 'axios'
import axios from 'axios'
import * as https from 'https' import * as https from 'https'
import { CsrfToken } from '..' import { CsrfToken } from '..'
import { isAuthorizeFormRequired, isLogInRequired } from '../auth' import { isAuthorizeFormRequired, isLogInRequired } from '../auth'
@@ -16,7 +10,7 @@ import {
JobExecutionError, JobExecutionError,
CertificateError CertificateError
} from '../types/errors' } from '../types/errors'
import { SASjsRequest, HttpClient, VerboseMode } from '../types' import { SASjsRequest } from '../types'
import { parseWeboutResponse } from '../utils/parseWeboutResponse' import { parseWeboutResponse } from '../utils/parseWeboutResponse'
import { prefixMessage } from '@sasjs/utils/error' import { prefixMessage } from '@sasjs/utils/error'
import { SAS9AuthError } from '../types/errors/SAS9AuthError' import { SAS9AuthError } from '../types/errors/SAS9AuthError'
@@ -26,13 +20,45 @@ import {
createAxiosInstance createAxiosInstance
} from '../utils' } from '../utils'
import { InvalidSASjsCsrfError } from '../types/errors/InvalidSASjsCsrfError' import { InvalidSASjsCsrfError } from '../types/errors/InvalidSASjsCsrfError'
import { inspect } from 'util'
export interface HttpClient {
get<T>(
url: string,
accessToken: string | undefined,
contentType: string,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
post<T>(
url: string,
data: any,
accessToken: string | undefined,
contentType: string,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
put<T>(
url: string,
data: any,
accessToken: string | undefined,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
delete<T>(
url: string,
accessToken: string | undefined
): Promise<{ result: T; etag: string }>
getCsrfToken(type: 'general' | 'file'): CsrfToken | undefined
saveLocalStorageToken(accessToken: string, refreshToken: string): void
clearCsrfTokens(): void
clearLocalStorageTokens(): void
getBaseUrl(): string
}
export class RequestClient implements HttpClient { export class RequestClient implements HttpClient {
private requests: SASjsRequest[] = [] private requests: SASjsRequest[] = []
private requestsLimit: number = 10 private requestsLimit: number = 10
private httpInterceptor?: number
private verboseMode: VerboseMode = false
protected csrfToken: CsrfToken = { headerName: '', value: '' } protected csrfToken: CsrfToken = { headerName: '', value: '' }
protected fileUploadCsrfToken: CsrfToken | undefined protected fileUploadCsrfToken: CsrfToken | undefined
@@ -41,17 +67,10 @@ export class RequestClient implements HttpClient {
constructor( constructor(
protected baseUrl: string, protected baseUrl: string,
httpsAgentOptions?: https.AgentOptions, httpsAgentOptions?: https.AgentOptions,
requestsLimit?: number, requestsLimit?: number
verboseMode?: VerboseMode
) { ) {
this.createHttpClient(baseUrl, httpsAgentOptions) this.createHttpClient(baseUrl, httpsAgentOptions)
if (requestsLimit) this.requestsLimit = requestsLimit if (requestsLimit) this.requestsLimit = requestsLimit
if (verboseMode) {
this.setVerboseMode(verboseMode)
this.enableVerboseMode()
}
} }
public setConfig(baseUrl: string, httpsAgentOptions?: https.AgentOptions) { public setConfig(baseUrl: string, httpsAgentOptions?: https.AgentOptions) {
@@ -71,7 +90,6 @@ export class RequestClient implements HttpClient {
this.csrfToken = { headerName: '', value: '' } this.csrfToken = { headerName: '', value: '' }
this.fileUploadCsrfToken = { headerName: '', value: '' } this.fileUploadCsrfToken = { headerName: '', value: '' }
} }
public clearLocalStorageTokens() { public clearLocalStorageTokens() {
localStorage.setItem('accessToken', '') localStorage.setItem('accessToken', '')
localStorage.setItem('refreshToken', '') localStorage.setItem('refreshToken', '')
@@ -162,7 +180,6 @@ export class RequestClient implements HttpClient {
responseType: contentType === 'text/plain' ? 'text' : 'json', responseType: contentType === 'text/plain' ? 'text' : 'json',
withCredentials: true withCredentials: true
} }
if (contentType === 'text/plain') { if (contentType === 'text/plain') {
requestConfig.transformResponse = undefined requestConfig.transformResponse = undefined
} }
@@ -372,181 +389,6 @@ export class RequestClient implements HttpClient {
}) })
} }
/**
* Adds colors to the string.
* If verboseMode is set to 'bleached', colors should be disabled
* @param str - string to be prettified.
* @returns - prettified string
*/
private prettifyString = (str: any) =>
inspect(str, { colors: this.verboseMode !== 'bleached' })
/**
* Formats HTTP request/response body.
* @param body - HTTP request/response body.
* @returns - formatted string.
*/
private parseInterceptedBody = (body: any) => {
if (!body) return ''
let parsedBody
// Tries to parse body into JSON object.
if (typeof body === 'string') {
try {
parsedBody = JSON.parse(body)
} catch (error) {
parsedBody = body
}
} else {
parsedBody = body
}
const bodyLines = this.prettifyString(parsedBody).split('\n')
// Leaves first 50 lines
if (bodyLines.length > 51) {
bodyLines.splice(50)
bodyLines.push('...')
}
return bodyLines.join('\n')
}
private defaultInterceptionCallBack = (
axiosResponse: AxiosResponse | AxiosError
) => {
// Message indicating absent value.
const noValueMessage = 'Not provided'
// Fallback request object that can be safely used to form request summary.
type FallbackRequest = { _header?: string; res: { rawHeaders: string[] } }
// _header is not present in responses with status 1**
// rawHeaders are not present in responses with status 1**
let fallbackRequest: FallbackRequest = {
_header: `${noValueMessage}\n`,
res: { rawHeaders: [noValueMessage] }
}
// Fallback response object that can be safely used to form response summary.
type FallbackResponse = {
status?: number | string
request?: FallbackRequest
config: { data?: string }
data?: unknown
}
let fallbackResponse: FallbackResponse = axiosResponse
if (axios.isAxiosError(axiosResponse)) {
const { response, request, config } = axiosResponse
// Try to use axiosResponse.response to form response summary.
if (response) {
fallbackResponse = response
} else {
// Try to use axiosResponse.request to form request summary.
if (request) {
const { _header, _currentRequest } = request
// Try to use axiosResponse.request._header to form request summary.
if (_header) {
fallbackRequest._header = _header
}
// Try to use axiosResponse.request._currentRequest._header to form request summary.
else if (_currentRequest && _currentRequest._header) {
fallbackRequest._header = _currentRequest._header
}
const { res } = request
// Try to use axiosResponse.request.res.rawHeaders to form request summary.
if (res && res.rawHeaders) {
fallbackRequest.res.rawHeaders = res.rawHeaders
}
}
// Fallback config that can be safely used to form response summary.
const fallbackConfig = { data: noValueMessage }
fallbackResponse = {
status: noValueMessage,
request: fallbackRequest,
config: config || fallbackConfig,
data: noValueMessage
}
}
}
const { status, config, request, data: resData } = fallbackResponse
const { data: reqData } = config
const { _header: reqHeaders, res } = request || fallbackRequest
const { rawHeaders } = res
// Converts an array of strings into a single string with the following format:
// <headerName>: <headerValue>
const resHeaders = rawHeaders.reduce(
(acc: string, value: string, i: number) => {
if (i % 2 === 0) {
acc += `${i === 0 ? '' : '\n'}${value}`
} else {
acc += `: ${value}`
}
return acc
},
''
)
const parsedResBody = this.parseInterceptedBody(resData)
// HTTP response summary.
process.logger?.info(`HTTP Request (first 50 lines):
${reqHeaders}${this.parseInterceptedBody(reqData)}
HTTP Response Code: ${this.prettifyString(status)}
HTTP Response (first 50 lines):
${resHeaders}${parsedResBody ? `\n\n${parsedResBody}` : ''}
`)
return axiosResponse
}
/**
* Sets verbose mode.
* @param verboseMode - value of the verbose mode, can be true, false or bleached(without extra colors).
*/
public setVerboseMode = (verboseMode: VerboseMode) => {
this.verboseMode = verboseMode
if (this.verboseMode) this.enableVerboseMode()
else this.disableVerboseMode()
}
/**
* Turns on verbose mode to log every HTTP response.
* @param successCallBack - function that should be triggered on every HTTP response with the status 2**.
* @param errorCallBack - function that should be triggered on every HTTP response with the status different from 2**.
*/
public enableVerboseMode = (
successCallBack = this.defaultInterceptionCallBack,
errorCallBack = this.defaultInterceptionCallBack
) => {
this.httpInterceptor = this.httpClient.interceptors.response.use(
successCallBack,
errorCallBack
)
}
/**
* Turns off verbose mode to log every HTTP response.
*/
public disableVerboseMode = () => {
if (this.httpInterceptor) {
this.httpClient.interceptors.response.eject(this.httpInterceptor)
}
}
protected getHeaders = ( protected getHeaders = (
accessToken: string | undefined, accessToken: string | undefined,
contentType: string contentType: string

View File

@@ -1,11 +1,20 @@
import { RequestClient } from './RequestClient' import { RequestClient } from './RequestClient'
import { AxiosResponse } from 'axios' import { AxiosResponse } from 'axios'
import { SasjsParsedResponse } from '../types' import { SASJS_LOGS_SEPARATOR } from '../utils'
interface SasjsParsedResponse<T> {
result: T
log: string
etag: string
status: number
printOutput?: string
}
/** /**
* Specific request client for SASJS. * Specific request client for SASJS.
* Append tokens in headers. * Append tokens in headers.
*/ */
export class SasjsRequestClient extends RequestClient { export class SasjsRequestClient extends RequestClient {
getHeaders = (accessToken: string | undefined, contentType: string) => { getHeaders = (accessToken: string | undefined, contentType: string) => {
const headers: any = {} const headers: any = {}
@@ -36,30 +45,13 @@ export class SasjsRequestClient extends RequestClient {
} }
} catch { } catch {
if (response.data.includes(SASJS_LOGS_SEPARATOR)) { if (response.data.includes(SASJS_LOGS_SEPARATOR)) {
const { data } = response const splittedResponse = response.data.split(SASJS_LOGS_SEPARATOR)
const splittedResponse: string[] = data.split(SASJS_LOGS_SEPARATOR)
webout = splittedResponse.splice(0, 1)[0] webout = splittedResponse[0]
if (webout !== undefined) parsedResponse = webout if (webout !== undefined) parsedResponse = webout
// log can contain nested logs log = splittedResponse[1]
const logs = splittedResponse.splice(0, splittedResponse.length - 1) printOutput = splittedResponse[2]
// tests if string ends with SASJS_LOGS_SEPARATOR
const endingWithLogSepRegExp = new RegExp(`${SASJS_LOGS_SEPARATOR}$`)
// at this point splittedResponse can contain only one item
const lastChunk = splittedResponse[0]
if (lastChunk) {
// if the last chunk doesn't end with SASJS_LOGS_SEPARATOR, then it is a printOutput
// else the last chunk is part of the log and has to be joined
if (!endingWithLogSepRegExp.test(data)) printOutput = lastChunk
else if (logs.length > 1) logs.push(lastChunk)
}
// join logs into single log with SASJS_LOGS_SEPARATOR
log = logs.join(SASJS_LOGS_SEPARATOR)
} else { } else {
parsedResponse = response.data parsedResponse = response.data
} }
@@ -67,7 +59,7 @@ export class SasjsRequestClient extends RequestClient {
const returnResult: SasjsParsedResponse<T> = { const returnResult: SasjsParsedResponse<T> = {
result: parsedResponse as T, result: parsedResponse as T,
log: log || '', log,
etag, etag,
status: response.status status: response.status
} }
@@ -77,6 +69,3 @@ export class SasjsRequestClient extends RequestClient {
return returnResult return returnResult
} }
} }
export const SASJS_LOGS_SEPARATOR =
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'

View File

@@ -1,169 +0,0 @@
import { SASJS_LOGS_SEPARATOR, SasjsRequestClient } from '../SasjsRequestClient'
import { SasjsParsedResponse } from '../../types'
import { AxiosResponse } from 'axios'
describe('SasjsRequestClient', () => {
const requestClient = new SasjsRequestClient('')
const etag = 'etag'
const status = 200
const webout = `hello`
const log = `1 The SAS System Tuesday, 25 July 2023 12:51:00
PROC MIGRATE will preserve current SAS file attributes and is
recommended for converting all your SAS libraries from any
SAS 8 release to SAS 9. For details and examples, please see
http://support.sas.com/rnd/migration/index.html
NOTE: SAS initialization used:
real time 0.01 seconds
cpu time 0.02 seconds
`
const printOutput = 'printOutPut'
describe('parseResponse', () => {})
it('should parse response with 1 log', () => {
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${log}
`,
etag,
status
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with 1 log and printOutput', () => {
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
${printOutput}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${log}
`,
etag,
status,
printOutput: `
${printOutput}`
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with nested logs', () => {
const logWithNestedLog = `root log start
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
root log end`
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${logWithNestedLog}
${SASJS_LOGS_SEPARATOR}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${logWithNestedLog}
`,
etag,
status
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with nested logs and printOutput', () => {
const logWithNestedLog = `root log start
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
log with indentation
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
some SAS code containing ${SASJS_LOGS_SEPARATOR}
root log end`
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${logWithNestedLog}
${SASJS_LOGS_SEPARATOR}
${printOutput}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${logWithNestedLog}
`,
etag,
status,
printOutput: `
${printOutput}`
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
})
describe('SASJS_LOGS_SEPARATOR', () => {
it('SASJS_LOGS_SEPARATOR should be hardcoded', () => {
expect(SASJS_LOGS_SEPARATOR).toEqual(
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'
)
})
})

View File

@@ -2,22 +2,17 @@ import * as pem from 'pem'
import * as http from 'http' import * as http from 'http'
import * as https from 'https' import * as https from 'https'
import { app, mockedAuthResponse } from './SAS_server_app' import { app, mockedAuthResponse } from './SAS_server_app'
import { ServerType } from '@sasjs/utils/types' import { ServerType } from '@sasjs/utils'
import SASjs from '../SASjs' import SASjs from '../SASjs'
import * as axiosModules from '../utils/createAxiosInstance' import * as axiosModules from '../utils/createAxiosInstance'
import axios from 'axios'
import { import {
LoginRequiredError, LoginRequiredError,
AuthorizeError, AuthorizeError,
NotFoundError, NotFoundError,
InternalServerError, InternalServerError
VerboseMode } from '../types/errors'
} from '../types'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefixResponse } from '../auth/getTokenRequestErrorPrefix' import { getTokenRequestErrorPrefixResponse } from '../auth/getTokenRequestErrorPrefix'
import { AxiosResponse, AxiosError } from 'axios'
import { Logger, LogLevel } from '@sasjs/utils/logger'
import * as UtilsModule from 'util'
const axiosActual = jest.requireActual('axios') const axiosActual = jest.requireActual('axios')
@@ -30,6 +25,16 @@ jest
const PORT = 8000 const PORT = 8000
const SERVER_URL = `https://localhost:${PORT}/` const SERVER_URL = `https://localhost:${PORT}/`
const ERROR_MESSAGES = {
selfSigned: 'self signed certificate',
CCA: 'unable to verify the first certificate'
}
const incorrectAuthCodeErr = {
error: 'unauthorized',
error_description: 'Bad credentials'
}
describe('RequestClient', () => { describe('RequestClient', () => {
let server: http.Server let server: http.Server
@@ -75,411 +80,6 @@ describe('RequestClient', () => {
expect(rejectionErrorMessage).toEqual(expectedError.message) expect(rejectionErrorMessage).toEqual(expectedError.message)
}) })
describe('defaultInterceptionCallBack', () => {
const reqHeaders = `POST https://sas.server.com/compute/sessions/session_id/jobs HTTP/1.1
Accept: application/json
Content-Type: application/json
User-Agent: axios/0.27.2
Content-Length: 334
host: sas.server.io
Connection: close
`
const reqData = `{
name: 'test_job',
description: 'Powered by SASjs',
code: ['test_code'],
variables: {
SYS_JES_JOB_URI: '',
_program: '/Public/sasjs/jobs/jobs/test_job'
},
arguments: {
_contextName: 'SAS Job Execution compute context',
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: true,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: true
}
}`
const resHeaders = ['content-type', 'application/json']
const resData = {
id: 'id_string',
name: 'name_string',
uri: 'uri_string',
createdBy: 'createdBy_string',
code: 'TEST CODE',
links: [
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'self',
href: 'self_href_string',
uri: 'uri_string',
type: 'type_string'
}
],
results: { '_webout.json': '_webout.json_string' },
logStatistics: {
lineCount: 1,
modifiedTimeStamp: 'modifiedTimeStamp_string'
}
}
beforeAll(() => {
;(process as any).logger = new Logger(LogLevel.Off)
jest.spyOn((process as any).logger, 'info')
})
it('should log parsed response with status 1**', () => {
const spyIsAxiosError = jest
.spyOn(axios, 'isAxiosError')
.mockImplementation(() => true)
const mockedAxiosError = {
config: {
data: reqData
},
request: {
_currentRequest: {
_header: reqHeaders
}
}
} as AxiosError
const requestClient = new RequestClient('')
requestClient['defaultInterceptionCallBack'](mockedAxiosError)
const noValueMessage = 'Not provided'
const expectedLog = `HTTP Request (first 50 lines):
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
HTTP Response Code: ${requestClient['prettifyString'](noValueMessage)}
HTTP Response (first 50 lines):
${noValueMessage}
\n${requestClient['parseInterceptedBody'](noValueMessage)}
`
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
spyIsAxiosError.mockReset()
})
it('should log parsed response with status 2**', () => {
const status = getRandomStatus([
200, 201, 202, 203, 204, 205, 206, 207, 208, 226
])
const mockedResponse: AxiosResponse = {
data: resData,
status,
statusText: '',
headers: {},
config: { data: reqData },
request: { _header: reqHeaders, res: { rawHeaders: resHeaders } }
}
const requestClient = new RequestClient('')
requestClient['defaultInterceptionCallBack'](mockedResponse)
const expectedLog = `HTTP Request (first 50 lines):
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
HTTP Response Code: ${requestClient['prettifyString'](status)}
HTTP Response (first 50 lines):
${resHeaders[0]}: ${resHeaders[1]}${
requestClient['parseInterceptedBody'](resData)
? `\n\n${requestClient['parseInterceptedBody'](resData)}`
: ''
}
`
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
})
it('should log parsed response with status 3**', () => {
const status = getRandomStatus([300, 301, 302, 303, 304, 307, 308])
const mockedResponse: AxiosResponse = {
data: resData,
status,
statusText: '',
headers: {},
config: { data: reqData },
request: { _header: reqHeaders, res: { rawHeaders: resHeaders } }
}
const requestClient = new RequestClient('')
requestClient['defaultInterceptionCallBack'](mockedResponse)
const expectedLog = `HTTP Request (first 50 lines):
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
HTTP Response Code: ${requestClient['prettifyString'](status)}
HTTP Response (first 50 lines):
${resHeaders[0]}: ${resHeaders[1]}${
requestClient['parseInterceptedBody'](resData)
? `\n\n${requestClient['parseInterceptedBody'](resData)}`
: ''
}
`
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
})
it('should log parsed response with status 4**', () => {
const spyIsAxiosError = jest
.spyOn(axios, 'isAxiosError')
.mockImplementation(() => true)
const status = getRandomStatus([
400, 401, 402, 403, 404, 407, 408, 409, 410, 411, 412, 413, 414, 415,
416, 417, 418, 421, 422, 423, 424, 425, 426, 428, 429, 431, 451
])
const mockedResponse: AxiosResponse = {
data: resData,
status,
statusText: '',
headers: {},
config: { data: reqData },
request: { _header: reqHeaders, res: { rawHeaders: resHeaders } }
}
const mockedAxiosError = {
config: {
data: reqData
},
request: {
_currentRequest: {
_header: reqHeaders
}
},
response: mockedResponse
} as AxiosError
const requestClient = new RequestClient('')
requestClient['defaultInterceptionCallBack'](mockedAxiosError)
const expectedLog = `HTTP Request (first 50 lines):
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
HTTP Response Code: ${requestClient['prettifyString'](status)}
HTTP Response (first 50 lines):
${resHeaders[0]}: ${resHeaders[1]}${
requestClient['parseInterceptedBody'](resData)
? `\n\n${requestClient['parseInterceptedBody'](resData)}`
: ''
}
`
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
spyIsAxiosError.mockReset()
})
it('should log parsed response with status 5**', () => {
const spyIsAxiosError = jest
.spyOn(axios, 'isAxiosError')
.mockImplementation(() => true)
const status = getRandomStatus([
500, 501, 502, 503, 504, 505, 506, 507, 508, 510, 511
])
const mockedResponse: AxiosResponse = {
data: resData,
status,
statusText: '',
headers: {},
config: { data: reqData },
request: { _header: reqHeaders, res: { rawHeaders: resHeaders } }
}
const mockedAxiosError = {
config: {
data: reqData
},
request: {
_currentRequest: {
_header: reqHeaders
}
},
response: mockedResponse
} as AxiosError
const requestClient = new RequestClient('')
requestClient['defaultInterceptionCallBack'](mockedAxiosError)
const expectedLog = `HTTP Request (first 50 lines):
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
HTTP Response Code: ${requestClient['prettifyString'](status)}
HTTP Response (first 50 lines):
${resHeaders[0]}: ${resHeaders[1]}${
requestClient['parseInterceptedBody'](resData)
? `\n\n${requestClient['parseInterceptedBody'](resData)}`
: ''
}
`
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
spyIsAxiosError.mockReset()
})
})
describe('enableVerboseMode', () => {
it('should add defaultInterceptionCallBack functions to response interceptors', () => {
const requestClient = new RequestClient('')
const interceptorSpy = jest.spyOn(
requestClient['httpClient'].interceptors.response,
'use'
)
requestClient.enableVerboseMode()
expect(interceptorSpy).toHaveBeenCalledWith(
requestClient['defaultInterceptionCallBack'],
requestClient['defaultInterceptionCallBack']
)
})
it('should add callback functions to response interceptors', () => {
const requestClient = new RequestClient('')
const interceptorSpy = jest.spyOn(
requestClient['httpClient'].interceptors.response,
'use'
)
const successCallback = (response: AxiosResponse | AxiosError) => {
console.log('success')
return response
}
const failureCallback = (response: AxiosResponse | AxiosError) => {
console.log('failure')
return response
}
requestClient.enableVerboseMode(successCallback, failureCallback)
expect(interceptorSpy).toHaveBeenCalledWith(
successCallback,
failureCallback
)
})
})
describe('setVerboseMode', () => {
it(`should set verbose mode`, () => {
const requestClient = new RequestClient('')
let verbose: VerboseMode = false
requestClient.setVerboseMode(verbose)
expect(requestClient['verboseMode']).toEqual(verbose)
verbose = true
requestClient.setVerboseMode(verbose)
expect(requestClient['verboseMode']).toEqual(verbose)
verbose = 'bleached'
requestClient.setVerboseMode(verbose)
expect(requestClient['verboseMode']).toEqual(verbose)
})
})
describe('prettifyString', () => {
it(`should call inspect without colors when verbose mode is set to 'bleached'`, () => {
const requestClient = new RequestClient('')
let verbose: VerboseMode = 'bleached'
requestClient.setVerboseMode(verbose)
jest.spyOn(UtilsModule, 'inspect')
const testStr = JSON.stringify({ test: 'test' })
requestClient['prettifyString'](testStr)
expect(UtilsModule.inspect).toHaveBeenCalledWith(testStr, {
colors: false
})
})
it(`should call inspect with colors when verbose mode is set to 'true'`, () => {
const requestClient = new RequestClient('')
let verbose: VerboseMode = true
requestClient.setVerboseMode(verbose)
jest.spyOn(UtilsModule, 'inspect')
const testStr = JSON.stringify({ test: 'test' })
requestClient['prettifyString'](testStr)
expect(UtilsModule.inspect).toHaveBeenCalledWith(testStr, {
colors: true
})
})
})
describe('disableVerboseMode', () => {
it('should eject interceptor', () => {
const requestClient = new RequestClient('')
const interceptorSpy = jest.spyOn(
requestClient['httpClient'].interceptors.response,
'eject'
)
const interceptorId = 100
requestClient['httpInterceptor'] = interceptorId
requestClient.disableVerboseMode()
expect(interceptorSpy).toHaveBeenCalledWith(interceptorId)
})
})
describe('handleError', () => { describe('handleError', () => {
const requestClient = new RequestClient('https://localhost:8009') const requestClient = new RequestClient('https://localhost:8009')
const randomError = 'some error' const randomError = 'some error'
@@ -613,7 +213,7 @@ describe('RequestClient - Self Signed Server', () => {
serverType: ServerType.SasViya serverType: ServerType.SasViya
}) })
const expectedError = 'self-signed certificate' const expectedError = 'self signed certificate'
const rejectionErrorMessage = await adapterWithoutCertificate const rejectionErrorMessage = await adapterWithoutCertificate
.getAccessToken('clientId', 'clientSecret', 'authCode') .getAccessToken('clientId', 'clientSecret', 'authCode')
@@ -693,11 +293,3 @@ const createCertificate = async (): Promise<pem.CertificateCreationResult> => {
) )
}) })
} }
/**
* Returns a random status code.
* @param statuses - an array of available statuses.
* @returns - random item from an array of statuses.
*/
const getRandomStatus = (statuses: number[]) =>
statuses[Math.floor(Math.random() * statuses.length)]

View File

@@ -2,7 +2,7 @@ import { SessionManager } from '../SessionManager'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import * as dotenv from 'dotenv' import * as dotenv from 'dotenv'
import axios from 'axios' import axios from 'axios'
import { Logger, LogLevel } from '@sasjs/utils/logger' import { Logger, LogLevel } from '@sasjs/utils'
import { Session, Context } from '../types' import { Session, Context } from '../types'
jest.mock('axios') jest.mock('axios')

View File

@@ -1,9 +1,6 @@
export interface PollOptions { export interface PollOptions {
maxPollCount: number maxPollCount: number
pollInterval: number // milliseconds pollInterval: number
pollStrategy?: PollStrategy streamLog: boolean
streamLog?: boolean
logFolderPath?: string logFolderPath?: string
} }
export type PollStrategy = PollOptions[]

View File

@@ -1,55 +0,0 @@
import { CsrfToken } from '..'
export interface HttpClient {
get<T>(
url: string,
accessToken: string | undefined,
contentType: string,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
post<T>(
url: string,
data: any,
accessToken: string | undefined,
contentType: string,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
put<T>(
url: string,
data: any,
accessToken: string | undefined,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
delete<T>(
url: string,
accessToken: string | undefined
): Promise<{ result: T; etag: string }>
getCsrfToken(type: 'general' | 'file'): CsrfToken | undefined
saveLocalStorageToken(accessToken: string, refreshToken: string): void
clearCsrfTokens(): void
clearLocalStorageTokens(): void
getBaseUrl(): string
}
export interface SASjsRequest {
serviceLink: string
timestamp: Date
sourceCode: string
generatedCode: string
logFile: string
SASWORK: any
}
export interface SasjsParsedResponse<T> {
result: T
log: string
etag: string
status: number
printOutput?: string
}
export type VerboseMode = boolean | 'bleached'

View File

@@ -1,6 +1,5 @@
import * as https from 'https' import * as https from 'https'
import { ServerType } from '@sasjs/utils/types' import { ServerType } from '@sasjs/utils/types'
import { VerboseMode } from '../types'
/** /**
* Specifies the configuration for the SASjs instance - eg where and how to * Specifies the configuration for the SASjs instance - eg where and how to
@@ -46,10 +45,6 @@ export class SASjsConfig {
* Set to `true` to enable additional debugging. * Set to `true` to enable additional debugging.
*/ */
debug: boolean = true debug: boolean = true
/**
* Set to `true` to enable verbose mode that will log a summary of every HTTP response.
*/
verbose?: VerboseMode = true
/** /**
* The name of the compute context to use when calling the Viya services directly. * The name of the compute context to use when calling the Viya services directly.
* Example value: 'SAS Job Execution compute context' * Example value: 'SAS Job Execution compute context'

12
src/types/SASjsRequest.ts Normal file
View File

@@ -0,0 +1,12 @@
/**
* Represents a SASjs request, its response and logs.
*
*/
export interface SASjsRequest {
serviceLink: string
timestamp: Date
sourceCode: string
generatedCode: string
logFile: string
SASWORK: any
}

View File

@@ -7,7 +7,7 @@ describe('RootFolderNotFoundError', () => {
const error = new RootFolderNotFoundError( const error = new RootFolderNotFoundError(
'/myProject', '/myProject',
'https://sas.4gl.io', 'https://analytium.co.uk',
token token
) )
@@ -19,7 +19,7 @@ describe('RootFolderNotFoundError', () => {
it('when access token is not provided, error message should not contain scopes', () => { it('when access token is not provided, error message should not contain scopes', () => {
const error = new RootFolderNotFoundError( const error = new RootFolderNotFoundError(
'/myProject', '/myProject',
'https://sas.4gl.io' 'https://analytium.co.uk'
) )
expect(error).toBeInstanceOf(RootFolderNotFoundError) expect(error).toBeInstanceOf(RootFolderNotFoundError)
@@ -30,7 +30,7 @@ describe('RootFolderNotFoundError', () => {
it('should include the folder path and SASDrive URL in the message', () => { it('should include the folder path and SASDrive URL in the message', () => {
const folderPath = '/myProject' const folderPath = '/myProject'
const serverUrl = 'https://sas.4gl.io' const serverUrl = 'https://analytium.co.uk'
const error = new RootFolderNotFoundError(folderPath, serverUrl) const error = new RootFolderNotFoundError(folderPath, serverUrl)
expect(error).toBeInstanceOf(RootFolderNotFoundError) expect(error).toBeInstanceOf(RootFolderNotFoundError)

View File

@@ -6,12 +6,10 @@ export * from './Job'
export * from './JobDefinition' export * from './JobDefinition'
export * from './JobResult' export * from './JobResult'
export * from './Link' export * from './Link'
export * from './Login'
export * from './SASjsConfig' export * from './SASjsConfig'
export * from './RequestClient' export * from './SASjsRequest'
export * from './Session' export * from './Session'
export * from './UploadFile' export * from './UploadFile'
export * from './PollOptions' export * from './PollOptions'
export * from './WriteStream' export * from './WriteStream'
export * from './ExecuteScript' export * from './ExecuteScript'
export * from './errors'

View File

@@ -1,4 +1,4 @@
import { SASjsRequest } from '../types' import { SASjsRequest } from '../types/SASjsRequest'
/** /**
* Comparator for SASjs request timestamps. * Comparator for SASjs request timestamps.

2
src/utils/constants.ts Normal file
View File

@@ -0,0 +1,2 @@
export const SASJS_LOGS_SEPARATOR =
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'

View File

@@ -1,5 +0,0 @@
import { isNode } from './'
import * as NodeFormData from 'form-data'
export const getFormData = () =>
isNode() ? new NodeFormData() : new FormData()

View File

@@ -2,6 +2,7 @@ export * from './appendExtraResponseAttributes'
export * from './asyncForEach' export * from './asyncForEach'
export * from './compareTimestamps' export * from './compareTimestamps'
export * from './convertToCsv' export * from './convertToCsv'
export * from './constants'
export * from './createAxiosInstance' export * from './createAxiosInstance'
export * from './delay' export * from './delay'
export * from './fetchLogByChunks' export * from './fetchLogByChunks'
@@ -19,4 +20,3 @@ export * from './parseWeboutResponse'
export * from './serialize' export * from './serialize'
export * from './splitChunks' export * from './splitChunks'
export * from './validateInput' export * from './validateInput'
export * from './getFormData'

View File

@@ -1,20 +0,0 @@
import { getFormData } from '..'
import * as isNodeModule from '../isNode'
import * as NodeFormData from 'form-data'
describe('getFormData', () => {
it('should return NodeFormData if environment is Node', () => {
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
expect(getFormData() instanceof NodeFormData).toEqual(true)
})
it('should return FormData if environment is not Node', () => {
const formDataMock = () => {}
;(global as any).FormData = formDataMock
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
expect(getFormData() instanceof FormData).toEqual(true)
})
})