1
0
mirror of https://github.com/sasjs/adapter.git synced 2026-01-03 02:30:06 +00:00

Compare commits

..

26 Commits

Author SHA1 Message Date
Yury Shkoda
76bbb8acf2 chore(sasjsTests): debugging 2023-05-29 18:11:25 +03:00
Yury Shkoda
1ead483921 chore(sasjsTests): debugging 2023-05-29 18:00:57 +03:00
Yury Shkoda
e2cb787f89 chore(sasjs-tests): trying to run only sendArrTests 2023-05-29 17:40:40 +03:00
Yury Shkoda
828aef1873 chore(sasjs-tests): debugging 2023-05-29 17:33:01 +03:00
Yury Shkoda
f6ee1111c5 chore(cypress): debugging 2023-05-29 17:19:08 +03:00
Yury Shkoda
5f8750a8b6 chore(cypress): debugging 2023-05-29 16:54:21 +03:00
Yury Shkoda
d027acacb6 chore(cypress): debugging 2023-05-29 16:45:32 +03:00
Yury Shkoda
3660b9127a chore(sasjsTests): debugging 2023-05-29 16:39:31 +03:00
Yury Shkoda
5ac0f12435 chore(sasjsTests): debugging 2023-05-29 16:28:59 +03:00
Yury Shkoda
ee0c4b007b chore(sasjsTests): decreased defaultCommandTimeout for cypress 2023-05-29 15:41:15 +03:00
Yury Shkoda
db4a4e6d57 chore(sasjsTests): trying @sasjs/adapter@4.3.5 2023-05-29 15:23:38 +03:00
Yury Shkoda
eba30432dd chore(sasjsTests): debugging 2023-05-29 15:07:35 +03:00
Yury Shkoda
6b9cb3af5f chore(sasjsTests): added sleep step 2023-05-29 14:52:06 +03:00
Yury Shkoda
afe612925e chore(sasjsTests): debugging 2023-05-29 14:40:26 +03:00
Yury Shkoda
1f9bed0625 chore(sasjsTest): debugging 2023-05-29 14:27:49 +03:00
Yury Shkoda
51fdea46fc chore(sasjs-tests): debugging 2023-05-29 14:07:28 +03:00
Yury Shkoda
007b00565c chore(sasjsTests): removed pm2 log 2023-05-29 13:54:28 +03:00
Yury Shkoda
38eef00216 chore(sasjsTests): using different user 2023-05-29 13:46:53 +03:00
Yury Shkoda
f1c67432bf chore(sasjs-tests): debugging 2023-05-26 12:21:27 +03:00
Yury Shkoda
3041a0f4b1 chore(sasjs-tests): debugging 2023-05-26 12:07:42 +03:00
Yury Shkoda
6a5529f3f0 chore: debugging 2023-05-26 11:07:31 +03:00
Yury Shkoda
7758b78a88 chore: debugging sasjs 2023-05-26 10:11:02 +03:00
Yury Shkoda
09c1038cbd chore: debugging sasjs 2023-05-26 10:02:07 +03:00
Yury Shkoda
87e2449b6f chore: debugging sasjs 2023-05-26 09:52:37 +03:00
Yury Shkoda
c6b927c525 test: updated unit tests related to tokens operations 2023-05-25 10:35:10 +03:00
Yury Shkoda
4b6445d524 feat: improved error message for requests related to tokens operations 2023-05-25 10:27:54 +03:00
57 changed files with 4847 additions and 5049 deletions

View File

@@ -14,7 +14,7 @@ What code changes have been made to achieve the intent.
No PR (that involves a non-trivial code change) should be merged, unless all items below are confirmed! If an urgent fix is needed - use a tar file.
- [ ] Unit tests coverage has been increased and a new threshold is set.
- [ ] All `sasjs-cli` unit tests are passing (`npm test`).
- (CI Runs this) All `sasjs-tests` are passing. If you want to run it manually (instructions available [here](https://github.com/sasjs/adapter/blob/master/sasjs-tests/README.md)).
- [ ] [Data Controller](https://datacontroller.io) builds and is functional on both SAS 9 and Viya

View File

@@ -5,3 +5,7 @@ groups:
- YuryShkoda
- medjedovicm
- sabhas
- name: SASjs QA
reviewers: 1
usernames:
- VladislavParhomchik

View File

@@ -10,4 +10,4 @@ jobs:
- uses: actions/checkout@v2
- uses: uesteibar/reviewer-lottery@v1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
repo-token: ${{ secrets.GH_TOKEN }}

View File

@@ -12,7 +12,7 @@ jobs:
strategy:
matrix:
node-version: [lts/hydrogen]
node-version: [lts/fermium]
steps:
- uses: actions/checkout@v2
@@ -22,17 +22,17 @@ jobs:
node-version: ${{ matrix.node-version }}
cache: npm
- name: Check npm audit
run: npm audit --production --audit-level=low
# - name: Check npm audit
# run: npm audit --production --audit-level=low
- name: Install Dependencies
run: npm ci
- name: Check code style
run: npm run lint
# - name: Check code style
# run: npm run lint
- name: Run unit tests
run: npm test
# - name: Run unit tests
# run: npm test
- name: Build Package
run: npm run package:lib
@@ -72,23 +72,27 @@ jobs:
npm install -g replace-in-files-cli
cd sasjs-tests
replace-in-files --regex='"@sasjs/adapter".*' --replacement='"@sasjs/adapter":"latest",' ./package.json
npm i
npm i --legacy-peer-deps
replace-in-files --regex='"serverUrl".*' --replacement='"serverUrl":"${{ secrets.SASJS_SERVER_URL }}",' ./public/config.json
replace-in-files --regex='"userName".*' --replacement='"userName":"${{ secrets.SASJS_USERNAME }}",' ./public/config.json
replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD }}",' ./public/config.json
replace-in-files --regex='"userName".*' --replacement='"userName":"${{ secrets.SASJS_USERNAME_DEV }}",' ./public/config.json
replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD_DEV }}",' ./public/config.json
replace-in-files --regex='"serverType".*' --replacement='"serverType":"SASJS",' ./public/config.json
npm run update:adapter
# npm run update:adapter
pm2 start --name sasjs-test npm -- start
cat ./public/config.json
cat ../cypress.json
- name: Sleep for 10 seconds
run: sleep 10s
shell: bash
uses: jakejarvis/wait-action@master
with:
time: '10s'
- name: Run cypress on sasjs
run: |
ss -lntu
replace-in-files --regex='"sasjsTestsUrl".*' --replacement='"sasjsTestsUrl":"http://localhost:3000",' ./cypress.json
replace-in-files --regex='"username".*' --replacement='"username":"${{ secrets.SASJS_USERNAME }}",' ./cypress.json
replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD }}",' ./cypress.json
replace-in-files --regex='"username".*' --replacement='"username":"${{ secrets.SASJS_USERNAME_DEV }}",' ./cypress.json
replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD_DEV }}",' ./cypress.json
sh ./sasjs-tests/sasjs-cypress-run.sh ${{ secrets.MATRIX_TOKEN }} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}
# For some reason if coverage report action is run before other commands, those commands can't access the directories and files on which they depend on

View File

@@ -11,7 +11,7 @@ jobs:
strategy:
matrix:
node-version: [lts/hydrogen]
node-version: [lts/fermium]
steps:
- name: Checkout
@@ -37,8 +37,8 @@ jobs:
- name: Push generated docs
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
github_token: ${{ secrets.GH_TOKEN }}
publish_branch: gh-pages
publish_dir: ./docs
publish_dir: ./docs
cname: adapter.sasjs.io

View File

@@ -14,7 +14,7 @@ jobs:
strategy:
matrix:
node-version: [lts/hydrogen]
node-version: [lts/fermium]
steps:
- uses: actions/checkout@v2
@@ -36,7 +36,7 @@ jobs:
- name: Semantic Release
uses: cycjimmy/semantic-release-action@v3
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
- name: Send Matrix message

View File

@@ -151,11 +151,7 @@ The `request()` method also has optional parameters such as a config object and
The response object will contain returned tables and columns. Table names are always lowercase, and column names uppercase.
The adapter will also cache the logs (if debug enabled) and even the work tables. For performance, it is best to keep debug mode off.
### Verbose Mode
Set `verbose` to `true` to enable verbose mode that logs a summary of every HTTP response. Verbose mode can be disabled by calling `disableVerboseMode` method or enabled by `enableVerboseMode` method. Verbose mode also supports `bleached` mode that disables extra colors in req/res summary. To enable `bleached` verbose mode, pass `verbose` equal to `bleached` while instantiating an instance of `RequestClient` or to `setVerboseMode` method. Verbose mode can also be enabled/disabled by `startComputeJob` method.
The adapter will also cache the logs (if debug enabled) and even the work tables. For performance, it is best to keep debug mode off.
### Session Manager
@@ -277,7 +273,6 @@ Configuration on the client side involves passing an object on startup, which ca
* `serverType` - either `SAS9`, `SASVIYA` or `SASJS`. The `SASJS` server type is for use with [sasjs/server](https://github.com/sasjs/server).
* `serverUrl` - the location (including http protocol and port) of the SAS Server. Can be omitted, eg if serving directly from the SAS Web Server, or in streaming mode.
* `debug` - if `true` then SAS Logs and extra debug information is returned.
* `verbose` - optional, if `true` then a summary of every HTTP response is logged.
* `loginMechanism` - either `Default` or `Redirected`. See [SAS Logon](#sas-logon) section.
* `useComputeApi` - Only relevant when the serverType is `SASVIYA`. If `true` the [Compute API](#using-the-compute-api) is used. If `false` the [JES API](#using-the-jes-api) is used. If `null` or `undefined` the [Web](#using-jes-web-app) approach is used.
* `contextName` - Compute context on which the requests will be called. If missing or not provided, defaults to `Job Execution Compute context`.

View File

@@ -5,6 +5,9 @@ const testingFinishTimeout = Cypress.env('testingFinishTimeout')
context('sasjs-tests', function () {
this.beforeAll(() => {
cy.task('log', 'beforeAll')
cy.task('log', `sasjsTestsUrl: ${sasjsTestsUrl}`)
cy.visit(sasjsTestsUrl)
})
@@ -13,35 +16,66 @@ context('sasjs-tests', function () {
})
it('Should have all tests successfull', (done) => {
cy.task('log', `Should have all tests successfull`)
cy.get('body').then(($body) => {
cy.task('log', `22`)
cy.wait(1000).then(() => {
const startButton = $body.find(
'.ui.massive.icon.primary.left.labeled.button'
)[0]
// ui massive icon primary left labeled button
cy.task('log', `startButton: ${startButton}`)
if (
!startButton ||
(startButton && !Cypress.dom.isVisible(startButton))
) {
cy.task('log', `34`)
cy.task('log', `username: ${username}`)
cy.task('log', `password: ${password}`)
const userNameInput = cy.get('input[placeholder="User Name"]')
const passwordInput = cy.get('input[placeholder="Password"]')
cy.task('log', `userNameInput: ${userNameInput}`)
cy.task('log', `passwordInput: ${passwordInput}`)
cy.get('input[placeholder="User Name"]').type(username)
cy.get('input[placeholder="Password"]').type(password)
const submitBtn = cy.get('.submit-button')
cy.task('log', `submitBtn: ${submitBtn}`)
cy.get('.submit-button').click()
}
cy.get('input[placeholder="User Name"]', { timeout: 40000 })
.should('not.exist')
.then(() => {
cy.task('log', `46`)
cy.get('.ui.massive.icon.primary.left.labeled.button')
.click()
.then(() => {
cy.task('log', `50`)
const loadingButton = $body.find(
'.ui.massive.loading.primary.button'
)[0]
cy.task('log', `loadingButton: ${loadingButton}`)
cy.get('.ui.massive.loading.primary.button', {
timeout: testingFinishTimeout
})
.should('not.exist')
.then(() => {
cy.task('log', `56`)
cy.get('span.icon.failed')
.should('not.exist')
.then(() => {
cy.task('log', `60`)
done()
})
})
@@ -51,46 +85,46 @@ context('sasjs-tests', function () {
})
})
it('Should have all tests successfull with debug on', (done) => {
cy.get('body').then(($body) => {
cy.wait(1000).then(() => {
const startButton = $body.find(
'.ui.massive.icon.primary.left.labeled.button'
)[0]
// it('Should have all tests successfull with debug on', (done) => {
// cy.get('body').then(($body) => {
// cy.wait(1000).then(() => {
// const startButton = $body.find(
// '.ui.massive.icon.primary.left.labeled.button'
// )[0]
if (
!startButton ||
(startButton && !Cypress.dom.isVisible(startButton))
) {
cy.get('input[placeholder="User Name"]').type(username)
cy.get('input[placeholder="Password"]').type(password)
cy.get('.submit-button').click()
}
// if (
// !startButton ||
// (startButton && !Cypress.dom.isVisible(startButton))
// ) {
// cy.get('input[placeholder="User Name"]').type(username)
// cy.get('input[placeholder="Password"]').type(password)
// cy.get('.submit-button').click()
// }
cy.get('.ui.fitted.toggle.checkbox label')
.click()
.then(() => {
cy.get('input[placeholder="User Name"]', { timeout: 40000 })
.should('not.exist')
.then(() => {
cy.get('.ui.massive.icon.primary.left.labeled.button')
.click()
.then(() => {
cy.get('.ui.massive.loading.primary.button', {
timeout: testingFinishTimeout
})
.should('not.exist')
.then(() => {
cy.get('span.icon.failed')
.should('not.exist')
.then(() => {
done()
})
})
})
})
})
})
})
})
// cy.get('.ui.fitted.toggle.checkbox label')
// .click()
// .then(() => {
// cy.get('input[placeholder="User Name"]', { timeout: 40000 })
// .should('not.exist')
// .then(() => {
// cy.get('.ui.massive.icon.primary.left.labeled.button')
// .click()
// .then(() => {
// cy.get('.ui.massive.loading.primary.button', {
// timeout: testingFinishTimeout
// })
// .should('not.exist')
// .then(() => {
// cy.get('span.icon.failed')
// .should('not.exist')
// .then(() => {
// done()
// })
// })
// })
// })
// })
// })
// })
// })
})

View File

@@ -39,4 +39,11 @@ module.exports = (on, config) => {
return launchOptions
}
})
on('task', {
log(message) {
console.log(message)
return null
}
})
}

View File

@@ -1,7 +1,7 @@
<!DOCTYPE html>
<html>
<head>
<script src="https://cdn.jsdelivr.net/combine/npm/chart.js@2.9.3,npm/jquery@3.5.1,npm/@sasjs/adapter@4"></script>
<script src="https://cdn.jsdelivr.net/combine/npm/chart.js@2.9.3,npm/jquery@3.5.1,npm/@sasjs/adapter@1"></script>
<script>
var sasJs = new SASjs.default({
appLoc: "/Public/app/readme"

View File

@@ -41,14 +41,7 @@ module.exports = {
// ],
// An object that configures minimum threshold enforcement for coverage results
coverageThreshold: {
global: {
statements: 63.61,
branches: 44.72,
functions: 53.94,
lines: 64.07
}
},
// coverageThreshold: undefined,
// A path to a custom dependency extractor
// dependencyExtractor: undefined,

84
package-lock.json generated
View File

@@ -13,7 +13,7 @@
"axios-cookiejar-support": "1.0.1",
"form-data": "4.0.0",
"https": "1.0.0",
"tough-cookie": "4.1.3"
"tough-cookie": "4.0.0"
},
"devDependencies": {
"@cypress/webpack-preprocessor": "5.9.1",
@@ -21,7 +21,7 @@
"@types/jest": "27.4.0",
"@types/mime": "2.0.3",
"@types/pem": "1.9.6",
"@types/tough-cookie": "4.0.2",
"@types/tough-cookie": "4.0.1",
"copyfiles": "2.4.1",
"cp": "0.2.0",
"cypress": "7.7.0",
@@ -3440,9 +3440,9 @@
"dev": true
},
"node_modules/@types/tough-cookie": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
"integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz",
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg=="
},
"node_modules/@types/yargs": {
"version": "16.0.5",
@@ -14110,11 +14110,6 @@
"node": ">=0.4.x"
}
},
"node_modules/querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"node_modules/queue-microtask": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -14462,11 +14457,6 @@
"node": ">=0.10.0"
}
},
"node_modules/requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"node_modules/resolve": {
"version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
@@ -15712,23 +15702,22 @@
}
},
"node_modules/tough-cookie": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
"dependencies": {
"psl": "^1.1.33",
"punycode": "^2.1.1",
"universalify": "^0.2.0",
"url-parse": "^1.5.3"
"universalify": "^0.1.2"
},
"engines": {
"node": ">=6"
}
},
"node_modules/tough-cookie/node_modules/universalify": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
"engines": {
"node": ">= 4.0.0"
}
@@ -16362,15 +16351,6 @@
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
"dev": true
},
"node_modules/url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
"dependencies": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"node_modules/url/node_modules/punycode": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
@@ -19556,9 +19536,9 @@
"dev": true
},
"@types/tough-cookie": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
"integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz",
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg=="
},
"@types/yargs": {
"version": "16.0.5",
@@ -27572,11 +27552,6 @@
"integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==",
"dev": true
},
"querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"queue-microtask": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -27858,11 +27833,6 @@
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
"dev": true
},
"requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"resolve": {
"version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
@@ -28829,20 +28799,19 @@
"dev": true
},
"tough-cookie": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
"requires": {
"psl": "^1.1.33",
"punycode": "^2.1.1",
"universalify": "^0.2.0",
"url-parse": "^1.5.3"
"universalify": "^0.1.2"
},
"dependencies": {
"universalify": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg=="
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
}
}
},
@@ -29300,15 +29269,6 @@
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
"dev": true
},
"url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
"requires": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"util": {
"version": "0.12.5",
"resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz",

View File

@@ -49,7 +49,7 @@
"@types/jest": "27.4.0",
"@types/mime": "2.0.3",
"@types/pem": "1.9.6",
"@types/tough-cookie": "4.0.2",
"@types/tough-cookie": "4.0.1",
"copyfiles": "2.4.1",
"cp": "0.2.0",
"cypress": "7.7.0",
@@ -82,6 +82,6 @@
"axios-cookiejar-support": "1.0.1",
"form-data": "4.0.0",
"https": "1.0.0",
"tough-cookie": "4.1.3"
"tough-cookie": "4.0.0"
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -4,14 +4,15 @@
"homepage": ".",
"private": true,
"dependencies": {
"@sasjs/adapter": "4.3.5",
"@sasjs/test-framework": "1.5.7",
"@types/jest": "^26.0.20",
"@types/node": "^14.14.41",
"@types/react": "^16.0.1",
"@types/react-dom": "^16.0.0",
"@types/react": "^17.0.1",
"@types/react-dom": "^17.0.0",
"@types/react-router-dom": "^5.1.7",
"react": "^16.0.1",
"react-dom": "^16.0.1",
"react": "^17.0.1",
"react-dom": "^17.0.1",
"react-router-dom": "^5.2.0",
"react-scripts": "^5.0.1",
"typescript": "^4.1.3"
@@ -21,7 +22,7 @@
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject",
"update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz",
"update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz --legacy-peer-deps",
"deploy:tests": "rsync -avhe ssh ./build/* --delete $SSH_ACCOUNT:$DEPLOY_PATH || npm run deploy:tests-win",
"deploy:tests-win": "scp %DEPLOY_PATH% ./build/*",
"deploy": "npm run update:adapter && npm run build && npm run deploy:tests"
@@ -42,6 +43,6 @@
]
},
"devDependencies": {
"node-sass": "9.0.0"
"node-sass": "7.0.3"
}
}

View File

@@ -2,7 +2,7 @@
"userName": "",
"password": "",
"sasJsConfig": {
"serverUrl": "",
"serverUrl": "https://sas9.4gl.io",
"appLoc": "/Public/app/adapter-tests/services",
"serverType": "SASJS",
"debug": false,

View File

@@ -1,7 +1,9 @@
{
"$schema": "https://cli.sasjs.io/sasjsconfig-schema.json",
"serviceConfig": {
"serviceFolders": ["sasjs/common"]
"serviceFolders": [
"sasjs/common"
]
},
"defaultTarget": "4gl",
"targets": [
@@ -26,4 +28,4 @@
}
}
]
}
}

View File

@@ -11,7 +11,7 @@ const Login = (): ReactElement<{}> => {
const handleSubmit = useCallback(
(e: any) => {
e.preventDefault()
appContext.adapter.logIn(username, password).then((res) => {
appContext.adapter.logIn(username, password).then((res: any) => {
appContext.setIsLoggedIn(res.isLoggedIn)
})
},

View File

@@ -25,16 +25,10 @@ import { prefixMessage } from '@sasjs/utils/error'
import { pollJobState } from './api/viya/pollJobState'
import { getTokens } from './auth/getTokens'
import { uploadTables } from './api/viya/uploadTables'
import { executeOnComputeApi } from './api/viya/executeOnComputeApi'
import { executeScript } from './api/viya/executeScript'
import { getAccessTokenForViya } from './auth/getAccessTokenForViya'
import { refreshTokensForViya } from './auth/refreshTokensForViya'
interface JobExecutionResult {
result?: { result: object }
log?: string
error?: object
}
/**
* A client for interfacing with the SAS Viya REST API.
*
@@ -276,7 +270,7 @@ export class SASViyaApiClient {
* @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables.
*/
@@ -293,7 +287,7 @@ export class SASViyaApiClient {
printPid = false,
variables?: MacroVar
): Promise<any> {
return executeOnComputeApi(
return executeScript(
this.requestClient,
this.sessionManager,
this.rootFolderName,
@@ -627,7 +621,7 @@ export class SASViyaApiClient {
* @param accessToken - an optional access token for an authorized user.
* @param waitForResult - a boolean indicating if the function should wait for a result.
* @param expectWebout - a boolean indicating whether to expect a _webout response.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables.
*/
@@ -738,13 +732,11 @@ export class SASViyaApiClient {
debug: boolean,
data?: any,
authConfig?: AuthConfig
): Promise<JobExecutionResult> {
) {
let access_token = (authConfig || {}).access_token
if (authConfig) {
;({ access_token } = await getTokens(this.requestClient, authConfig))
}
if (isRelativePath(sasJob) && !this.rootFolderName) {
throw new Error(
'Relative paths cannot be used without specifying a root folder name.'
@@ -757,7 +749,6 @@ export class SASViyaApiClient {
const fullFolderPath = isRelativePath(sasJob)
? `${this.rootFolderName}/${folderPath}`
: folderPath
await this.populateFolderMap(fullFolderPath, access_token)
const jobFolder = this.folderMap.get(fullFolderPath)
@@ -774,8 +765,9 @@ export class SASViyaApiClient {
files = await this.uploadTables(data, access_token)
}
if (!jobToExecute) throw new Error(`Job was not found.`)
if (!jobToExecute) {
throw new Error(`Job was not found.`)
}
const jobDefinitionLink = jobToExecute?.links.find(
(l) => l.rel === 'getResource'
)?.href
@@ -815,19 +807,16 @@ export class SASViyaApiClient {
jobDefinition,
arguments: jobArguments
}
const { result: postedJob } = await this.requestClient.post<Job>(
`${this.serverUrl}/jobExecution/jobs?_action=wait`,
postJobRequestBody,
access_token
)
const jobStatus = await this.pollJobState(postedJob, authConfig).catch(
(err) => {
throw prefixMessage(err, 'Error while polling job status. ')
}
)
const { result: currentJob } = await this.requestClient.get<Job>(
`${this.serverUrl}/jobExecution/jobs/${postedJob.id}`,
access_token
@@ -838,7 +827,6 @@ export class SASViyaApiClient {
const resultLink = currentJob.results['_webout.json']
const logLink = currentJob.links.find((l) => l.rel === 'log')
if (resultLink) {
jobResult = await this.requestClient.get<any>(
`${this.serverUrl}${resultLink}/content`,
@@ -846,13 +834,11 @@ export class SASViyaApiClient {
'text/plain'
)
}
if (debug && logLink) {
log = await this.requestClient
.get<any>(`${this.serverUrl}${logLink.href}/content`, access_token)
.then((res: any) => res.result.items.map((i: any) => i.line).join('\n'))
}
if (jobStatus === 'failed') {
throw new JobExecutionError(
currentJob.error?.errorCode,
@@ -860,16 +846,7 @@ export class SASViyaApiClient {
log
)
}
const executionResult: JobExecutionResult = {
result: jobResult?.result,
log
}
const { error } = currentJob
if (error) executionResult.error = error
return executionResult
return { result: jobResult?.result, log }
}
private async populateFolderMap(folderPath: string, accessToken?: string) {

View File

@@ -4,12 +4,7 @@ import {
UploadFile,
EditContextInput,
PollOptions,
LoginMechanism,
VerboseMode,
ErrorResponse,
LoginOptions,
LoginResult,
ExecutionQuery
LoginMechanism
} from './types'
import { SASViyaApiClient } from './SASViyaApiClient'
import { SAS9ApiClient } from './SAS9ApiClient'
@@ -34,7 +29,8 @@ import {
Sas9JobExecutor,
FileUploader
} from './job-execution'
import { AxiosResponse, AxiosError } from 'axios'
import { ErrorResponse } from './types/errors'
import { LoginOptions, LoginResult } from './types/Login'
interface ExecuteScriptParams {
linesOfCode: string[]
@@ -161,23 +157,6 @@ export default class SASjs {
}
}
/**
* Executes job on SASJS server.
* @param query - an object containing job path and debug level.
* @param appLoc - an application path.
* @param authConfig - an object for authentication.
* @returns a promise that resolves into job execution result and log.
*/
public async executeJob(
query: ExecutionQuery,
appLoc: string,
authConfig?: AuthConfig
) {
this.isMethodSupported('executeScript', [ServerType.Sasjs])
return await this.sasJSApiClient?.executeJob(query, appLoc, authConfig)
}
/**
* Gets compute contexts.
* @param accessToken - an access token for an authorised user.
@@ -872,10 +851,9 @@ export default class SASjs {
* @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs.
* The access token is not required when the user is authenticated via the browser.
* @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables.
* @param verboseMode - boolean or a string equal to 'bleached' to enable verbose mode (log every HTTP response).
*/
public async startComputeJob(
sasJob: string,
@@ -885,8 +863,7 @@ export default class SASjs {
waitForResult?: boolean,
pollOptions?: PollOptions,
printPid = false,
variables?: MacroVar,
verboseMode?: VerboseMode
variables?: MacroVar
) {
config = {
...this.sasjsConfig,
@@ -900,11 +877,6 @@ export default class SASjs {
)
}
if (verboseMode) {
this.requestClient?.setVerboseMode(verboseMode)
this.requestClient?.enableVerboseMode()
} else if (verboseMode === false) this.requestClient?.disableVerboseMode()
return this.sasViyaApiClient?.executeComputeJob(
sasJob,
config.contextName,
@@ -998,8 +970,7 @@ export default class SASjs {
this.requestClient = new RequestClientClass(
this.sasjsConfig.serverUrl,
this.sasjsConfig.httpsAgentOptions,
this.sasjsConfig.requestHistoryLimit,
this.sasjsConfig.verbose
this.sasjsConfig.requestHistoryLimit
)
} else {
this.requestClient.setConfig(
@@ -1163,31 +1134,4 @@ export default class SASjs {
)
}
}
/**
* Enables verbose mode that will log a summary of every HTTP response.
* @param successCallBack - function that should be triggered on every HTTP response with the status 2**.
* @param errorCallBack - function that should be triggered on every HTTP response with the status different from 2**.
*/
public enableVerboseMode(
successCallBack?: (response: AxiosResponse | AxiosError) => AxiosResponse,
errorCallBack?: (response: AxiosResponse | AxiosError) => AxiosResponse
) {
this.requestClient?.enableVerboseMode(successCallBack, errorCallBack)
}
/**
* Turns off verbose mode to log every HTTP response.
*/
public disableVerboseMode() {
this.requestClient?.disableVerboseMode()
}
/**
* Sets verbose mode.
* @param verboseMode - value of the verbose mode, can be true, false or bleached(without extra colors).
*/
public setVerboseMode = (verboseMode: VerboseMode) => {
this.requestClient?.setVerboseMode(verboseMode)
}
}

View File

@@ -12,15 +12,11 @@ import { RequestClient } from '../../request/RequestClient'
import { SessionManager } from '../../SessionManager'
import { isRelativePath, fetchLogByChunks } from '../../utils'
import { formatDataForRequest } from '../../utils/formatDataForRequest'
import { pollJobState, JobState } from './pollJobState'
import { pollJobState } from './pollJobState'
import { uploadTables } from './uploadTables'
interface JobRequestBody {
[key: string]: number | string | string[]
}
/**
* Executes SAS program on the current SAS Viya server using Compute API.
* Executes code on the current SAS Viya server.
* @param jobPath - the path to the file being submitted for execution.
* @param linesOfCode - an array of code lines to execute.
* @param contextName - the context to execute the code in.
@@ -29,11 +25,11 @@ interface JobRequestBody {
* @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables.
*/
export async function executeOnComputeApi(
export async function executeScript(
requestClient: RequestClient,
sessionManager: SessionManager,
rootFolderName: string,
@@ -50,7 +46,6 @@ export async function executeOnComputeApi(
variables?: MacroVar
): Promise<any> {
let access_token = (authConfig || {}).access_token
if (authConfig) {
;({ access_token } = await getTokens(requestClient, authConfig))
}
@@ -83,13 +78,27 @@ export async function executeOnComputeApi(
const logger = process.logger || console
logger.info(
`Triggering '${relativeJobPath}' with PID ${
`Triggered '${relativeJobPath}' with PID ${
jobIdVariable.value
} at ${timestampToYYYYMMDDHHMMSS()}`
)
}
}
const jobArguments: { [key: string]: any } = {
_contextName: contextName,
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: true,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: true
}
if (debug) {
jobArguments['_OMITTEXTLOG'] = false
jobArguments['_OMITSESSIONRESULTS'] = false
}
let fileName
if (isRelativePath(jobPath)) {
@@ -98,7 +107,6 @@ export async function executeOnComputeApi(
}`
} else {
const jobPathParts = jobPath.split('/')
fileName = jobPathParts.pop()
}
@@ -110,6 +118,7 @@ export async function executeOnComputeApi(
}
if (variables) jobVariables = { ...jobVariables, ...variables }
if (debug) jobVariables = { ...jobVariables, _DEBUG: 131 }
let files: any[] = []
@@ -136,12 +145,12 @@ export async function executeOnComputeApi(
}
// Execute job in session
const jobRequestBody: JobRequestBody = {
name: fileName || 'Default Job Name',
const jobRequestBody = {
name: fileName,
description: 'Powered by SASjs',
code: linesOfCode,
variables: jobVariables,
version: 2
arguments: jobArguments
}
const { result: postedJob, etag } = await requestClient
@@ -219,7 +228,7 @@ export async function executeOnComputeApi(
)
}
if (jobStatus === JobState.Failed || jobStatus === JobState.Error) {
if (jobStatus === 'failed' || jobStatus === 'error') {
throw new ComputeJobExecutionError(currentJob, log)
}
@@ -270,7 +279,7 @@ export async function executeOnComputeApi(
const error = e as HttpError
if (error.status === 404) {
return executeOnComputeApi(
return executeScript(
requestClient,
sessionManager,
rootFolderName,

View File

@@ -1,88 +1,29 @@
import { AuthConfig } from '@sasjs/utils/types'
import { Job, PollOptions, PollStrategy } from '../..'
import { Job, PollOptions } from '../..'
import { getTokens } from '../../auth/getTokens'
import { RequestClient } from '../../request/RequestClient'
import { JobStatePollError } from '../../types/errors'
import { Link, WriteStream } from '../../types'
import { delay, isNode } from '../../utils'
export enum JobState {
Completed = 'completed',
Running = 'running',
Pending = 'pending',
Unavailable = 'unavailable',
NoState = '',
Failed = 'failed',
Error = 'error'
}
/**
* Polls job status using default or provided poll options.
* @param requestClient - the pre-configured HTTP request client.
* @param postedJob - the relative or absolute path to the job.
* @param debug - sets the _debug flag in the job arguments.
* @param authConfig - an access token, refresh token, client and secret for an authorized user.
* @param pollOptions - an object containing maxPollCount, pollInterval, streamLog and logFolderPath. It will override the first default poll options in poll strategy if provided.
* Example pollOptions:
* {
* maxPollCount: 200,
* pollInterval: 300,
* streamLog: true, // optional, equals to false by default.
* pollStrategy?: // optional array of poll options that should be applied after 'maxPollCount' of the provided poll options is reached. If not provided the default (see example below) poll strategy will be used.
* }
* Example pollStrategy (values used from default poll strategy):
* [
* { maxPollCount: 200, pollInterval: 300 }, // approximately ~2 mins (including time to get response (~300ms))
* { maxPollCount: 300, pollInterval: 3000 }, // approximately ~5.5 mins (including time to get response (~300ms))
* { maxPollCount: 500, pollInterval: 30000 }, // approximately ~50.5 mins (including time to get response (~300ms))
* { maxPollCount: 3400, pollInterval: 60000 } // approximately ~3015 mins (~125 hours) (including time to get response (~300ms))
* ]
* @returns - a promise which resolves with a job state
*/
export async function pollJobState(
requestClient: RequestClient,
postedJob: Job,
debug: boolean,
authConfig?: AuthConfig,
pollOptions?: PollOptions
): Promise<JobState> {
) {
const logger = process.logger || console
const streamLog = pollOptions?.streamLog || false
let pollInterval = 300
let maxPollCount = 1000
const defaultPollStrategy: PollStrategy = [
{ maxPollCount: 200, pollInterval: 300 },
{ maxPollCount: 300, pollInterval: 3000 },
{ maxPollCount: 500, pollInterval: 30000 },
{ maxPollCount: 3400, pollInterval: 60000 }
]
let pollStrategy: PollStrategy
if (pollOptions !== undefined) {
pollStrategy = [pollOptions]
let { pollStrategy: providedPollStrategy } = pollOptions
if (providedPollStrategy !== undefined) {
validatePollStrategies(providedPollStrategy)
// INFO: sort by 'maxPollCount'
providedPollStrategy = providedPollStrategy.sort(
(strategyA: PollOptions, strategyB: PollOptions) =>
strategyA.maxPollCount - strategyB.maxPollCount
)
pollStrategy = [...pollStrategy, ...providedPollStrategy]
} else {
pollStrategy = [...pollStrategy, ...defaultPollStrategy]
}
} else {
pollStrategy = defaultPollStrategy
const defaultPollOptions: PollOptions = {
maxPollCount,
pollInterval,
streamLog: false
}
let defaultPollOptions: PollOptions = pollStrategy.splice(0, 1)[0]
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
@@ -90,10 +31,10 @@ export async function pollJobState(
throw new Error(`Job state link was not found.`)
}
let currentState: JobState = await getJobState(
let currentState = await getJobState(
requestClient,
postedJob,
JobState.NoState,
'',
debug,
authConfig
).catch((err) => {
@@ -101,71 +42,73 @@ export async function pollJobState(
`Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`,
err
)
return JobState.Unavailable
return 'unavailable'
})
let pollCount = 0
if (currentState === JobState.Completed) {
if (currentState === 'completed') {
return Promise.resolve(currentState)
}
let logFileStream
if (streamLog && isNode()) {
if (pollOptions.streamLog && isNode()) {
const { getFileStream } = require('./getFileStream')
logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
}
// Poll up to the first 100 times with the specified poll interval
let result = await doPoll(
requestClient,
postedJob,
currentState,
debug,
pollCount,
pollOptions,
authConfig,
streamLog,
{
...pollOptions,
maxPollCount:
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
},
logFileStream
)
currentState = result.state
pollCount = result.pollCount
if (
!needsRetry(currentState) ||
(pollCount >= pollOptions.maxPollCount && !pollStrategy.length)
) {
if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) {
return currentState
}
// INFO: If we get to this point, this is a long-running job that needs longer polling.
// We will resume polling with a bigger interval according to the next polling strategy
while (pollStrategy.length && needsRetry(currentState)) {
defaultPollOptions = pollStrategy.splice(0, 1)[0]
if (pollOptions) {
defaultPollOptions.logFolderPath = pollOptions.logFolderPath
}
result = await doPoll(
requestClient,
postedJob,
currentState,
debug,
pollCount,
defaultPollOptions,
authConfig,
streamLog,
logFileStream
)
currentState = result.state
pollCount = result.pollCount
// If we get to this point, this is a long-running job that needs longer polling.
// We will resume polling with a bigger interval of 1 minute
let longJobPollOptions: PollOptions = {
maxPollCount: 24 * 60,
pollInterval: 60000,
streamLog: false
}
if (pollOptions) {
longJobPollOptions.streamLog = pollOptions.streamLog
longJobPollOptions.logFolderPath = pollOptions.logFolderPath
}
if (logFileStream) logFileStream.end()
result = await doPoll(
requestClient,
postedJob,
currentState,
debug,
pollCount,
authConfig,
longJobPollOptions,
logFileStream
)
currentState = result.state
pollCount = result.pollCount
if (logFileStream) {
logFileStream.end()
}
return currentState
}
@@ -176,13 +119,17 @@ const getJobState = async (
currentState: string,
debug: boolean,
authConfig?: AuthConfig
): Promise<JobState> => {
const stateLink = job.links.find((l: any) => l.rel === 'state')!
) => {
const stateLink = job.links.find((l: any) => l.rel === 'state')
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
if (needsRetry(currentState)) {
let tokens
if (authConfig) tokens = await getTokens(requestClient, authConfig)
if (authConfig) {
tokens = await getTokens(requestClient, authConfig)
}
const { result: jobState } = await requestClient
.get<string>(
@@ -196,38 +143,48 @@ const getJobState = async (
throw new JobStatePollError(job.id, err)
})
return jobState.trim() as JobState
return jobState.trim()
} else {
return currentState as JobState
return currentState
}
}
const needsRetry = (state: string) =>
state === JobState.Running ||
state === JobState.NoState ||
state === JobState.Pending ||
state === JobState.Unavailable
state === 'running' ||
state === '' ||
state === 'pending' ||
state === 'unavailable'
const doPoll = async (
requestClient: RequestClient,
postedJob: Job,
currentState: JobState,
currentState: string,
debug: boolean,
pollCount: number,
pollOptions: PollOptions,
authConfig?: AuthConfig,
streamLog?: boolean,
pollOptions?: PollOptions,
logStream?: WriteStream
): Promise<{ state: JobState; pollCount: number }> => {
const { maxPollCount, pollInterval } = pollOptions
const logger = process.logger || console
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')!
): Promise<{ state: string; pollCount: number }> => {
let pollInterval = 300
let maxPollCount = 1000
let maxErrorCount = 5
let errorCount = 0
let state = currentState
let printedState = JobState.NoState
let printedState = ''
let startLogLine = 0
const logger = process.logger || console
if (pollOptions) {
pollInterval = pollOptions.pollInterval || pollInterval
maxPollCount = pollOptions.maxPollCount || maxPollCount
}
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
while (needsRetry(state) && pollCount <= maxPollCount) {
state = await getJobState(
requestClient,
@@ -237,24 +194,21 @@ const doPoll = async (
authConfig
).catch((err) => {
errorCount++
if (pollCount >= maxPollCount || errorCount >= maxErrorCount) {
throw err
}
logger.error(
`Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`,
err
)
return JobState.Unavailable
return 'unavailable'
})
pollCount++
const jobHref = postedJob.links.find((l: Link) => l.rel === 'self')!.href
if (streamLog) {
if (pollOptions?.streamLog) {
const { result: job } = await requestClient.get<Job>(
jobHref,
authConfig?.access_token
@@ -284,45 +238,12 @@ const doPoll = async (
printedState = state
}
if (state !== JobState.Unavailable && errorCount > 0) {
if (state != 'unavailable' && errorCount > 0) {
errorCount = 0
}
if (state !== JobState.Completed) {
await delay(pollInterval)
}
await delay(pollInterval)
}
return { state, pollCount }
}
const validatePollStrategies = (strategy: PollStrategy) => {
const throwError = (message?: string, pollOptions?: PollOptions) => {
throw new Error(
`Poll strategies are not valid.${message ? ` ${message}` : ''}${
pollOptions
? ` Invalid poll strategy: \n${JSON.stringify(pollOptions, null, 2)}`
: ''
}`
)
}
strategy.forEach((pollOptions: PollOptions, i: number) => {
const { maxPollCount, pollInterval } = pollOptions
if (maxPollCount < 1) {
throwError(`'maxPollCount' has to be greater than 0.`, pollOptions)
} else if (i !== 0) {
const previousPollOptions = strategy[i - 1]
if (maxPollCount <= previousPollOptions.maxPollCount) {
throwError(
`'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy.`,
pollOptions
)
}
} else if (pollInterval < 1) {
throwError(`'pollInterval' has to be greater than 0.`, pollOptions)
}
})
}

View File

@@ -1,6 +1,6 @@
import { RequestClient } from '../../../request/RequestClient'
import { SessionManager } from '../../../SessionManager'
import { executeOnComputeApi } from '../executeOnComputeApi'
import { executeScript } from '../executeScript'
import { mockSession, mockAuthConfig, mockJob } from './mockResponses'
import * as pollJobStateModule from '../pollJobState'
import * as uploadTablesModule from '../uploadTables'
@@ -9,13 +9,14 @@ import * as formatDataModule from '../../../utils/formatDataForRequest'
import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import { PollOptions } from '../../../types'
import { ComputeJobExecutionError, NotFoundError } from '../../../types/errors'
import { Logger, LogLevel } from '@sasjs/utils/logger'
import { Logger, LogLevel } from '@sasjs/utils'
const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)()
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
const defaultPollOptions: PollOptions = {
maxPollCount: 100,
pollInterval: 500
pollInterval: 500,
streamLog: false
}
describe('executeScript', () => {
@@ -25,7 +26,7 @@ describe('executeScript', () => {
})
it('should not try to get fresh tokens if an authConfig is not provided', async () => {
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -38,7 +39,7 @@ describe('executeScript', () => {
})
it('should try to get fresh tokens if an authConfig is provided', async () => {
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -55,7 +56,7 @@ describe('executeScript', () => {
})
it('should get a session from the session manager before executing', async () => {
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -72,7 +73,7 @@ describe('executeScript', () => {
.spyOn(sessionManager, 'getSession')
.mockImplementation(() => Promise.reject('Test Error'))
const error = await executeOnComputeApi(
const error = await executeScript(
requestClient,
sessionManager,
'test',
@@ -85,7 +86,7 @@ describe('executeScript', () => {
})
it('should fetch the PID when printPid is true', async () => {
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -113,7 +114,7 @@ describe('executeScript', () => {
.spyOn(sessionManager, 'getVariable')
.mockImplementation(() => Promise.reject('Test Error'))
const error = await executeOnComputeApi(
const error = await executeScript(
requestClient,
sessionManager,
'test',
@@ -139,7 +140,7 @@ describe('executeScript', () => {
Promise.resolve([{ tableName: 'test', file: { id: 1 } }])
)
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -163,7 +164,7 @@ describe('executeScript', () => {
})
it('should format data as CSV when it does not contain semicolons', async () => {
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -189,7 +190,7 @@ describe('executeScript', () => {
.spyOn(formatDataModule, 'formatDataForRequest')
.mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' }))
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -217,7 +218,14 @@ describe('executeScript', () => {
sasjs_tables: 'foo',
sasjs0data: 'bar'
},
version: 2
arguments: {
_contextName: 'test context',
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: true,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: true
}
},
mockAuthConfig.access_token
)
@@ -228,7 +236,7 @@ describe('executeScript', () => {
.spyOn(formatDataModule, 'formatDataForRequest')
.mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' }))
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -257,7 +265,14 @@ describe('executeScript', () => {
sasjs0data: 'bar',
_DEBUG: 131
},
version: 2
arguments: {
_contextName: 'test context',
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: false,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: false
}
},
mockAuthConfig.access_token
)
@@ -268,7 +283,7 @@ describe('executeScript', () => {
.spyOn(requestClient, 'post')
.mockImplementation(() => Promise.reject('Test Error'))
const error = await executeOnComputeApi(
const error = await executeScript(
requestClient,
sessionManager,
'test',
@@ -288,7 +303,7 @@ describe('executeScript', () => {
})
it('should immediately return the session when waitForResult is false', async () => {
const result = await executeOnComputeApi(
const result = await executeScript(
requestClient,
sessionManager,
'test',
@@ -308,7 +323,7 @@ describe('executeScript', () => {
})
it('should poll for job completion when waitForResult is true', async () => {
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -338,7 +353,7 @@ describe('executeScript', () => {
.spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.reject('Poll Error'))
const error = await executeOnComputeApi(
const error = await executeScript(
requestClient,
sessionManager,
'test',
@@ -364,7 +379,7 @@ describe('executeScript', () => {
Promise.reject({ response: { data: 'err=5113,' } })
)
const error = await executeOnComputeApi(
const error = await executeScript(
requestClient,
sessionManager,
'test',
@@ -390,7 +405,7 @@ describe('executeScript', () => {
})
it('should fetch the logs for the job if debug is true and a log URL is available', async () => {
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -415,7 +430,7 @@ describe('executeScript', () => {
})
it('should not fetch the logs for the job if debug is false', async () => {
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -437,11 +452,9 @@ describe('executeScript', () => {
it('should throw a ComputeJobExecutionError if the job has failed', async () => {
jest
.spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Failed)
)
.mockImplementation(() => Promise.resolve('failed'))
const error: ComputeJobExecutionError = await executeOnComputeApi(
const error: ComputeJobExecutionError = await executeScript(
requestClient,
sessionManager,
'test',
@@ -472,11 +485,9 @@ describe('executeScript', () => {
it('should throw a ComputeJobExecutionError if the job has errored out', async () => {
jest
.spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Error)
)
.mockImplementation(() => Promise.resolve('error'))
const error: ComputeJobExecutionError = await executeOnComputeApi(
const error: ComputeJobExecutionError = await executeScript(
requestClient,
sessionManager,
'test',
@@ -505,7 +516,7 @@ describe('executeScript', () => {
})
it('should fetch the result if expectWebout is true', async () => {
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -536,7 +547,7 @@ describe('executeScript', () => {
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
})
const error = await executeOnComputeApi(
const error = await executeScript(
requestClient,
sessionManager,
'test',
@@ -570,7 +581,7 @@ describe('executeScript', () => {
})
it('should clear the session after execution is complete', async () => {
await executeOnComputeApi(
await executeScript(
requestClient,
sessionManager,
'test',
@@ -597,7 +608,7 @@ describe('executeScript', () => {
.spyOn(sessionManager, 'clearSession')
.mockImplementation(() => Promise.reject('Clear Session Error'))
const error = await executeOnComputeApi(
const error = await executeScript(
requestClient,
sessionManager,
'test',
@@ -643,9 +654,7 @@ const setupMocks = () => {
.mockImplementation(() => Promise.resolve(mockAuthConfig))
jest
.spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Completed)
)
.mockImplementation(() => Promise.resolve('completed'))
jest
.spyOn(sessionManager, 'getVariable')
.mockImplementation(() =>

View File

@@ -1,4 +1,4 @@
import { Logger, LogLevel } from '@sasjs/utils/logger'
import { Logger, LogLevel } from '@sasjs/utils'
import { RequestClient } from '../../../request/RequestClient'
import { mockAuthConfig, mockJob } from './mockResponses'
import { pollJobState } from '../pollJobState'
@@ -6,18 +6,17 @@ import * as getTokensModule from '../../../auth/getTokens'
import * as saveLogModule from '../saveLog'
import * as getFileStreamModule from '../getFileStream'
import * as isNodeModule from '../../../utils/isNode'
import * as delayModule from '../../../utils/delay'
import { PollOptions, PollStrategy } from '../../../types'
import { PollOptions } from '../../../types'
import { WriteStream } from 'fs'
const baseUrl = 'http://localhost'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
requestClient['httpClient'].defaults.baseURL = baseUrl
const defaultStreamLog = false
const defaultPollStrategy: PollOptions = {
const defaultPollOptions: PollOptions = {
maxPollCount: 100,
pollInterval: 500
pollInterval: 500,
streamLog: false
}
describe('pollJobState', () => {
@@ -27,10 +26,13 @@ describe('pollJobState', () => {
})
it('should get valid tokens if the authConfig has been provided', async () => {
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollStrategy,
streamLog: defaultStreamLog
})
await pollJobState(
requestClient,
mockJob,
false,
mockAuthConfig,
defaultPollOptions
)
expect(getTokensModule.getTokens).toHaveBeenCalledWith(
requestClient,
@@ -44,7 +46,7 @@ describe('pollJobState', () => {
mockJob,
false,
undefined,
defaultPollStrategy
defaultPollOptions
)
expect(getTokensModule.getTokens).not.toHaveBeenCalled()
@@ -56,7 +58,7 @@ describe('pollJobState', () => {
{ ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') },
false,
undefined,
defaultPollStrategy
defaultPollOptions
).catch((e: any) => e)
expect((error as Error).message).toContain('Job state link was not found.')
@@ -70,7 +72,7 @@ describe('pollJobState', () => {
mockJob,
false,
mockAuthConfig,
defaultPollStrategy
defaultPollOptions
)
expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3)
@@ -81,7 +83,7 @@ describe('pollJobState', () => {
const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollStrategy,
...defaultPollOptions,
streamLog: true
})
@@ -94,7 +96,7 @@ describe('pollJobState', () => {
const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollStrategy,
...defaultPollOptions,
streamLog: true
})
@@ -109,7 +111,7 @@ describe('pollJobState', () => {
const { getFileStream } = require('../getFileStream')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollStrategy,
...defaultPollOptions,
streamLog: true
})
@@ -125,7 +127,7 @@ describe('pollJobState', () => {
mockJob,
false,
mockAuthConfig,
defaultPollStrategy
defaultPollOptions
)
expect(saveLogModule.saveLog).not.toHaveBeenCalled()
@@ -134,18 +136,15 @@ describe('pollJobState', () => {
it('should return the current status when the max poll count is reached', async () => {
mockRunningPoll()
const pollOptions: PollOptions = {
...defaultPollStrategy,
maxPollCount: 1,
pollStrategy: []
}
const state = await pollJobState(
requestClient,
mockJob,
false,
mockAuthConfig,
pollOptions
{
...defaultPollOptions,
maxPollCount: 1
}
)
expect(state).toEqual('running')
@@ -160,7 +159,7 @@ describe('pollJobState', () => {
false,
mockAuthConfig,
{
...defaultPollStrategy,
...defaultPollOptions,
maxPollCount: 200,
pollInterval: 10
}
@@ -177,7 +176,7 @@ describe('pollJobState', () => {
mockJob,
false,
undefined,
defaultPollStrategy
defaultPollOptions
)
expect(requestClient.get).toHaveBeenCalledTimes(2)
@@ -193,7 +192,7 @@ describe('pollJobState', () => {
mockJob,
true,
undefined,
defaultPollStrategy
defaultPollOptions
)
expect((process as any).logger.info).toHaveBeenCalledTimes(4)
@@ -223,7 +222,7 @@ describe('pollJobState', () => {
mockJob,
false,
undefined,
defaultPollStrategy
defaultPollOptions
)
expect(requestClient.get).toHaveBeenCalledTimes(2)
@@ -238,189 +237,13 @@ describe('pollJobState', () => {
mockJob,
false,
undefined,
defaultPollStrategy
defaultPollOptions
).catch((e: any) => e)
expect(error.message).toEqual(
'Error while polling job state for job j0b: Status Error'
)
})
it('should change poll strategies', async () => {
mockSimplePoll(6)
const delays: number[] = []
jest.spyOn(delayModule, 'delay').mockImplementation((ms: number) => {
delays.push(ms)
return Promise.resolve()
})
const pollIntervals = [3, 4, 5, 6]
const pollStrategy = [
{ maxPollCount: 2, pollInterval: pollIntervals[1] },
{ maxPollCount: 3, pollInterval: pollIntervals[2] },
{ maxPollCount: 4, pollInterval: pollIntervals[3] }
]
const pollOptions: PollOptions = {
maxPollCount: 1,
pollInterval: pollIntervals[0],
pollStrategy: pollStrategy
}
await pollJobState(requestClient, mockJob, false, undefined, pollOptions)
expect(delays).toEqual([pollIntervals[0], ...pollIntervals])
})
it('should change default poll strategies after completing provided poll options', async () => {
const delays: number[] = []
jest.spyOn(delayModule, 'delay').mockImplementation((ms: number) => {
delays.push(ms)
return Promise.resolve()
})
const customPollOptions: PollOptions = {
maxPollCount: 0,
pollInterval: 0
}
const requests = [
{ maxPollCount: 202, pollInterval: 300 },
{ maxPollCount: 300, pollInterval: 3000 },
{ maxPollCount: 500, pollInterval: 30000 },
{ maxPollCount: 3400, pollInterval: 60000 }
]
// ~200 requests with delay 300ms
let request = requests.splice(0, 1)[0]
let { maxPollCount, pollInterval } = request
// should be only one interval because maxPollCount is equal to 0
const pollIntervals = [customPollOptions.pollInterval]
pollIntervals.push(...Array(maxPollCount - 2).fill(pollInterval))
// ~300 requests with delay 3000
request = requests.splice(0, 1)[0]
let newAmount = request.maxPollCount
pollInterval = request.pollInterval
pollIntervals.push(...Array(newAmount - maxPollCount).fill(pollInterval))
pollIntervals.push(...Array(2).fill(pollInterval))
// ~500 requests with delay 30000
request = requests.splice(0, 1)[0]
let oldAmount = newAmount
newAmount = request.maxPollCount
pollInterval = request.pollInterval
pollIntervals.push(...Array(newAmount - oldAmount - 2).fill(pollInterval))
pollIntervals.push(...Array(2).fill(pollInterval))
// ~3400 requests with delay 60000
request = requests.splice(0, 1)[0]
oldAmount = newAmount
newAmount = request.maxPollCount
pollInterval = request.pollInterval
mockSimplePoll(newAmount)
pollIntervals.push(...Array(newAmount - oldAmount - 2).fill(pollInterval))
await pollJobState(
requestClient,
mockJob,
false,
undefined,
customPollOptions
)
expect(delays).toEqual(pollIntervals)
})
it('should throw an error if not valid poll strategies provided', async () => {
// INFO: 'maxPollCount' has to be > 0
let invalidPollStrategy = {
maxPollCount: 0,
pollInterval: 3
}
let pollStrategy: PollStrategy = [invalidPollStrategy]
let expectedError = new Error(
`Poll strategies are not valid. 'maxPollCount' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
// INFO: 'maxPollCount' has to be > than 'maxPollCount' of the previous strategy
const validPollStrategy = {
maxPollCount: 5,
pollInterval: 2
}
invalidPollStrategy = {
maxPollCount: validPollStrategy.maxPollCount,
pollInterval: 3
}
pollStrategy = [validPollStrategy, invalidPollStrategy]
expectedError = new Error(
`Poll strategies are not valid. 'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
// INFO: invalid 'pollInterval'
invalidPollStrategy = {
maxPollCount: 1,
pollInterval: 0
}
pollStrategy = [invalidPollStrategy]
expectedError = new Error(
`Poll strategies are not valid. 'pollInterval' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
})
})
const setupMocks = () => {
@@ -450,14 +273,11 @@ const setupMocks = () => {
const mockSimplePoll = (runningCount = 2) => {
let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++
if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
}
return Promise.resolve({
result:
count === 0
@@ -473,14 +293,11 @@ const mockSimplePoll = (runningCount = 2) => {
const mockRunningPoll = () => {
let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++
if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
}
return Promise.resolve({
result: count === 0 ? 'pending' : 'running',
etag: '',
@@ -491,14 +308,11 @@ const mockRunningPoll = () => {
const mockLongPoll = () => {
let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++
if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
}
return Promise.resolve({
result: count <= 102 ? 'running' : 'completed',
etag: '',
@@ -509,18 +323,14 @@ const mockLongPoll = () => {
const mockPollWithSingleError = () => {
let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++
if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
}
if (count === 1) {
return Promise.reject('Status Error')
}
return Promise.resolve({
result: count === 0 ? 'pending' : 'completed',
etag: '',
@@ -534,7 +344,6 @@ const mockErroredPoll = () => {
if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
}
return Promise.reject('Status Error')
})
}

View File

@@ -1,4 +1,4 @@
import { Logger, LogLevel } from '@sasjs/utils/logger'
import { Logger, LogLevel } from '@sasjs/utils'
import { RequestClient } from '../../../request/RequestClient'
import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import * as writeStreamModule from '../writeStream'

View File

@@ -5,7 +5,7 @@ import {
fileExists,
readFile,
deleteFile
} from '@sasjs/utils/file'
} from '@sasjs/utils'
describe('writeStream', () => {
const filename = 'test.txt'

View File

@@ -1,7 +1,7 @@
import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
import { ServerType } from '@sasjs/utils/types'
import { ServerType } from '@sasjs/utils'
/**
* Exchanges the auth code for an access token for the given client.

View File

@@ -4,6 +4,7 @@ import { RequestClient } from '../request/RequestClient'
import { CertificateError } from '../types/errors'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
// TODO: update func docs
/**
* Exchange the auth code for access / refresh tokens for the given client / secret pair.
* @param requestClient - the pre-configured HTTP request client.
@@ -30,11 +31,10 @@ export async function getAccessTokenForViya(
Authorization: 'Basic ' + token,
Accept: 'application/json'
}
const dataJson = new URLSearchParams({
const dataJson = {
grant_type: 'authorization_code',
code: authCode
})
}
const data = new URLSearchParams(dataJson)
const authResponse = await requestClient

View File

@@ -1,7 +1,7 @@
import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
import { ServerType } from '@sasjs/utils/types'
import { ServerType } from '@sasjs/utils'
/**
* Exchanges the refresh token for an access token for the given client.

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils/types'
import { AuthConfig } from '@sasjs/utils'
import { generateToken, mockSasjsAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient'
import { getAccessTokenForSasjs } from '../getAccessTokenForSasjs'

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils/types'
import { AuthConfig } from '@sasjs/utils'
import * as NodeFormData from 'form-data'
import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient'

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils/types'
import { AuthConfig } from '@sasjs/utils'
import * as refreshTokensModule from '../refreshTokensForViya'
import { generateToken, mockAuthResponse } from './mockResponses'
import { getTokens } from '../getTokens'

View File

@@ -1,4 +1,4 @@
import { ServerType } from '@sasjs/utils/types'
import { ServerType } from '@sasjs/utils'
import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient'
import { refreshTokensForSasjs } from '../refreshTokensForSasjs'

View File

@@ -1,4 +1,4 @@
import { AuthConfig, ServerType } from '@sasjs/utils/types'
import { AuthConfig, ServerType } from '@sasjs/utils'
import * as NodeFormData from 'form-data'
import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient'

View File

@@ -1,6 +1,5 @@
import * as NodeFormData from 'form-data'
import { convertToCSV } from '../utils/convertToCsv'
import { isNode } from '../utils'
/**
* One of the approaches SASjs takes to send tables-formatted JSON (see README)
@@ -27,15 +26,12 @@ export const generateFileUploadForm = (
)
}
// INFO: unfortunately it is not possible to check if formData is instance of NodeFormData or FormData because it will return true for both
if (isNode()) {
// INFO: environment is Node and formData is instance of NodeFormData
;(formData as NodeFormData).append(name, csv, {
if (typeof FormData === 'undefined' && formData instanceof NodeFormData) {
formData.append(name, csv, {
filename: `${name}.csv`,
contentType: 'application/csv'
})
} else {
// INFO: environment is Browser and formData is instance of FormData
const file = new Blob([csv], {
type: 'application/csv'
})

View File

@@ -1,7 +1,4 @@
import { generateFileUploadForm } from '../generateFileUploadForm'
import { convertToCSV } from '../../utils/convertToCsv'
import * as NodeFormData from 'form-data'
import * as isNodeModule from '../../utils/isNode'
describe('generateFileUploadForm', () => {
beforeAll(() => {
@@ -14,94 +11,44 @@ describe('generateFileUploadForm', () => {
;(global as any).Blob = BlobMock
})
describe('browser', () => {
afterAll(() => {
jest.restoreAllMocks()
})
it('should generate file upload form from data', () => {
const formData = new FormData()
const testTable = 'sometable'
const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter((key: string) =>
Array.isArray(testTableWithNullVars[key])
)[0]
it('should generate file upload form from data', () => {
const formData = new FormData()
const testTable = 'sometable'
const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter(
(key: string) => Array.isArray(testTableWithNullVars[key])
)[0]
jest.spyOn(formData, 'append').mockImplementation(() => {})
jest.spyOn(formData, 'append').mockImplementation(() => {})
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
generateFileUploadForm(formData, testTableWithNullVars)
generateFileUploadForm(formData, testTableWithNullVars)
expect(formData.append).toHaveBeenCalledOnce()
expect(formData.append).toHaveBeenCalledWith(
tableName,
{},
`${tableName}.csv`
)
})
it('should throw an error if too large string was provided', () => {
const formData = new FormData()
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
)
})
expect(formData.append).toHaveBeenCalledOnce()
expect(formData.append).toHaveBeenCalledWith(
tableName,
{},
`${tableName}.csv`
)
})
describe('node', () => {
it('should generate file upload form from data', () => {
const formData = new NodeFormData()
const testTable = 'sometable'
const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter(
(key: string) => Array.isArray(testTableWithNullVars[key])
)[0]
const csv = convertToCSV(testTableWithNullVars, tableName)
it('should throw an error if too large string was provided', () => {
const formData = new FormData()
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
jest.spyOn(formData, 'append').mockImplementation(() => {})
generateFileUploadForm(formData, testTableWithNullVars)
expect(formData.append).toHaveBeenCalledOnce()
expect(formData.append).toHaveBeenCalledWith(tableName, csv, {
contentType: 'application/csv',
filename: `${tableName}.csv`
})
})
it('should throw an error if too large string was provided', () => {
const formData = new NodeFormData()
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
})
)
})
})

View File

@@ -1,7 +1,8 @@
import {
getValidJson,
parseSasViyaDebugResponse,
parseWeboutResponse
parseWeboutResponse,
SASJS_LOGS_SEPARATOR
} from '../utils'
import { UploadFile } from '../types/UploadFile'
import {
@@ -92,24 +93,15 @@ export class FileUploader extends BaseJobExecutor {
this.requestClient,
config.serverUrl
)
break
case ServerType.Sas9:
jsonResponse =
typeof res.result === 'string'
? parseWeboutResponse(res.result, uploadUrl)
: res.result
break
case ServerType.Sasjs:
jsonResponse =
typeof res.result === 'string'
? getValidJson(res.result)
: res.result
break
}
} else {
} else if (this.serverType !== ServerType.Sasjs) {
jsonResponse =
typeof res.result === 'string'
? getValidJson(res.result)

View File

@@ -10,8 +10,8 @@ import {
LoginRequiredError
} from '../types/errors'
import { generateFileUploadForm } from '../file/generateFileUploadForm'
import { RequestClient } from '../request/RequestClient'
import { getFormData } from '../utils'
import {
isRelativePath,
@@ -53,7 +53,8 @@ export class SasjsJobExecutor extends BaseJobExecutor {
* Use the available form data object (FormData in Browser, NodeFormData in
* Node)
*/
let formData = getFormData()
let formData =
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
if (data) {
// file upload approach
@@ -92,10 +93,8 @@ export class SasjsJobExecutor extends BaseJobExecutor {
)
}
const { result } = res
if (result && typeof result === 'string' && result.trim())
res.result = getValidJson(result)
const { result } = res.result
if (result && result.trim()) res.result = getValidJson(result)
this.requestClient!.appendRequest(res, sasJob, config.debug)

View File

@@ -16,11 +16,10 @@ import { SASViyaApiClient } from '../SASViyaApiClient'
import {
isRelativePath,
parseSasViyaDebugResponse,
appendExtraResponseAttributes,
parseWeboutResponse,
getFormData
appendExtraResponseAttributes
} from '../utils'
import { BaseJobExecutor } from './JobExecutor'
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
export interface WaitingRequstPromise {
promise: Promise<any> | null
@@ -113,7 +112,8 @@ export class WebJobExecutor extends BaseJobExecutor {
* Use the available form data object (FormData in Browser, NodeFormData in
* Node)
*/
let formData = getFormData()
let formData =
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
if (data) {
const stringifiedData = JSON.stringify(data)

View File

@@ -233,8 +233,7 @@ export default class SASjs {
this.requestClient = new RequestClient(
this.sasjsConfig.serverUrl,
this.sasjsConfig.httpsAgentOptions,
this.sasjsConfig.requestHistoryLimit,
this.sasjsConfig.verbose
this.sasjsConfig.requestHistoryLimit
)
} else {
this.requestClient.setConfig(

View File

@@ -11,6 +11,7 @@ import {
import { RequestClient } from '../../request/RequestClient'
import {
isRelativePath,
parseSasViyaDebugResponse,
appendExtraResponseAttributes,
convertToCSV
} from '../../utils'

View File

@@ -1,10 +1,4 @@
import {
AxiosError,
AxiosInstance,
AxiosRequestConfig,
AxiosResponse
} from 'axios'
import axios from 'axios'
import { AxiosInstance, AxiosRequestConfig, AxiosResponse } from 'axios'
import * as https from 'https'
import { CsrfToken } from '..'
import { isAuthorizeFormRequired, isLogInRequired } from '../auth'
@@ -16,7 +10,7 @@ import {
JobExecutionError,
CertificateError
} from '../types/errors'
import { SASjsRequest, HttpClient, VerboseMode } from '../types'
import { SASjsRequest } from '../types'
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
import { prefixMessage } from '@sasjs/utils/error'
import { SAS9AuthError } from '../types/errors/SAS9AuthError'
@@ -26,13 +20,45 @@ import {
createAxiosInstance
} from '../utils'
import { InvalidSASjsCsrfError } from '../types/errors/InvalidSASjsCsrfError'
import { inspect } from 'util'
export interface HttpClient {
get<T>(
url: string,
accessToken: string | undefined,
contentType: string,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
post<T>(
url: string,
data: any,
accessToken: string | undefined,
contentType: string,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
put<T>(
url: string,
data: any,
accessToken: string | undefined,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
delete<T>(
url: string,
accessToken: string | undefined
): Promise<{ result: T; etag: string }>
getCsrfToken(type: 'general' | 'file'): CsrfToken | undefined
saveLocalStorageToken(accessToken: string, refreshToken: string): void
clearCsrfTokens(): void
clearLocalStorageTokens(): void
getBaseUrl(): string
}
export class RequestClient implements HttpClient {
private requests: SASjsRequest[] = []
private requestsLimit: number = 10
private httpInterceptor?: number
private verboseMode: VerboseMode = false
protected csrfToken: CsrfToken = { headerName: '', value: '' }
protected fileUploadCsrfToken: CsrfToken | undefined
@@ -41,17 +67,10 @@ export class RequestClient implements HttpClient {
constructor(
protected baseUrl: string,
httpsAgentOptions?: https.AgentOptions,
requestsLimit?: number,
verboseMode?: VerboseMode
requestsLimit?: number
) {
this.createHttpClient(baseUrl, httpsAgentOptions)
if (requestsLimit) this.requestsLimit = requestsLimit
if (verboseMode) {
this.setVerboseMode(verboseMode)
this.enableVerboseMode()
}
}
public setConfig(baseUrl: string, httpsAgentOptions?: https.AgentOptions) {
@@ -71,7 +90,6 @@ export class RequestClient implements HttpClient {
this.csrfToken = { headerName: '', value: '' }
this.fileUploadCsrfToken = { headerName: '', value: '' }
}
public clearLocalStorageTokens() {
localStorage.setItem('accessToken', '')
localStorage.setItem('refreshToken', '')
@@ -162,7 +180,6 @@ export class RequestClient implements HttpClient {
responseType: contentType === 'text/plain' ? 'text' : 'json',
withCredentials: true
}
if (contentType === 'text/plain') {
requestConfig.transformResponse = undefined
}
@@ -372,181 +389,6 @@ export class RequestClient implements HttpClient {
})
}
/**
* Adds colors to the string.
* If verboseMode is set to 'bleached', colors should be disabled
* @param str - string to be prettified.
* @returns - prettified string
*/
private prettifyString = (str: any) =>
inspect(str, { colors: this.verboseMode !== 'bleached' })
/**
* Formats HTTP request/response body.
* @param body - HTTP request/response body.
* @returns - formatted string.
*/
private parseInterceptedBody = (body: any) => {
if (!body) return ''
let parsedBody
// Tries to parse body into JSON object.
if (typeof body === 'string') {
try {
parsedBody = JSON.parse(body)
} catch (error) {
parsedBody = body
}
} else {
parsedBody = body
}
const bodyLines = this.prettifyString(parsedBody).split('\n')
// Leaves first 50 lines
if (bodyLines.length > 51) {
bodyLines.splice(50)
bodyLines.push('...')
}
return bodyLines.join('\n')
}
private defaultInterceptionCallBack = (
axiosResponse: AxiosResponse | AxiosError
) => {
// Message indicating absent value.
const noValueMessage = 'Not provided'
// Fallback request object that can be safely used to form request summary.
type FallbackRequest = { _header?: string; res: { rawHeaders: string[] } }
// _header is not present in responses with status 1**
// rawHeaders are not present in responses with status 1**
let fallbackRequest: FallbackRequest = {
_header: `${noValueMessage}\n`,
res: { rawHeaders: [noValueMessage] }
}
// Fallback response object that can be safely used to form response summary.
type FallbackResponse = {
status?: number | string
request?: FallbackRequest
config: { data?: string }
data?: unknown
}
let fallbackResponse: FallbackResponse = axiosResponse
if (axios.isAxiosError(axiosResponse)) {
const { response, request, config } = axiosResponse
// Try to use axiosResponse.response to form response summary.
if (response) {
fallbackResponse = response
} else {
// Try to use axiosResponse.request to form request summary.
if (request) {
const { _header, _currentRequest } = request
// Try to use axiosResponse.request._header to form request summary.
if (_header) {
fallbackRequest._header = _header
}
// Try to use axiosResponse.request._currentRequest._header to form request summary.
else if (_currentRequest && _currentRequest._header) {
fallbackRequest._header = _currentRequest._header
}
const { res } = request
// Try to use axiosResponse.request.res.rawHeaders to form request summary.
if (res && res.rawHeaders) {
fallbackRequest.res.rawHeaders = res.rawHeaders
}
}
// Fallback config that can be safely used to form response summary.
const fallbackConfig = { data: noValueMessage }
fallbackResponse = {
status: noValueMessage,
request: fallbackRequest,
config: config || fallbackConfig,
data: noValueMessage
}
}
}
const { status, config, request, data: resData } = fallbackResponse
const { data: reqData } = config
const { _header: reqHeaders, res } = request || fallbackRequest
const { rawHeaders } = res
// Converts an array of strings into a single string with the following format:
// <headerName>: <headerValue>
const resHeaders = rawHeaders.reduce(
(acc: string, value: string, i: number) => {
if (i % 2 === 0) {
acc += `${i === 0 ? '' : '\n'}${value}`
} else {
acc += `: ${value}`
}
return acc
},
''
)
const parsedResBody = this.parseInterceptedBody(resData)
// HTTP response summary.
process.logger?.info(`HTTP Request (first 50 lines):
${reqHeaders}${this.parseInterceptedBody(reqData)}
HTTP Response Code: ${this.prettifyString(status)}
HTTP Response (first 50 lines):
${resHeaders}${parsedResBody ? `\n\n${parsedResBody}` : ''}
`)
return axiosResponse
}
/**
* Sets verbose mode.
* @param verboseMode - value of the verbose mode, can be true, false or bleached(without extra colors).
*/
public setVerboseMode = (verboseMode: VerboseMode) => {
this.verboseMode = verboseMode
if (this.verboseMode) this.enableVerboseMode()
else this.disableVerboseMode()
}
/**
* Turns on verbose mode to log every HTTP response.
* @param successCallBack - function that should be triggered on every HTTP response with the status 2**.
* @param errorCallBack - function that should be triggered on every HTTP response with the status different from 2**.
*/
public enableVerboseMode = (
successCallBack = this.defaultInterceptionCallBack,
errorCallBack = this.defaultInterceptionCallBack
) => {
this.httpInterceptor = this.httpClient.interceptors.response.use(
successCallBack,
errorCallBack
)
}
/**
* Turns off verbose mode to log every HTTP response.
*/
public disableVerboseMode = () => {
if (this.httpInterceptor) {
this.httpClient.interceptors.response.eject(this.httpInterceptor)
}
}
protected getHeaders = (
accessToken: string | undefined,
contentType: string

View File

@@ -1,11 +1,20 @@
import { RequestClient } from './RequestClient'
import { AxiosResponse } from 'axios'
import { SasjsParsedResponse } from '../types'
import { SASJS_LOGS_SEPARATOR } from '../utils'
interface SasjsParsedResponse<T> {
result: T
log: string
etag: string
status: number
printOutput?: string
}
/**
* Specific request client for SASJS.
* Append tokens in headers.
*/
export class SasjsRequestClient extends RequestClient {
getHeaders = (accessToken: string | undefined, contentType: string) => {
const headers: any = {}
@@ -36,30 +45,13 @@ export class SasjsRequestClient extends RequestClient {
}
} catch {
if (response.data.includes(SASJS_LOGS_SEPARATOR)) {
const { data } = response
const splittedResponse: string[] = data.split(SASJS_LOGS_SEPARATOR)
const splittedResponse = response.data.split(SASJS_LOGS_SEPARATOR)
webout = splittedResponse.splice(0, 1)[0]
webout = splittedResponse[0]
if (webout !== undefined) parsedResponse = webout
// log can contain nested logs
const logs = splittedResponse.splice(0, splittedResponse.length - 1)
// tests if string ends with SASJS_LOGS_SEPARATOR
const endingWithLogSepRegExp = new RegExp(`${SASJS_LOGS_SEPARATOR}$`)
// at this point splittedResponse can contain only one item
const lastChunk = splittedResponse[0]
if (lastChunk) {
// if the last chunk doesn't end with SASJS_LOGS_SEPARATOR, then it is a printOutput
// else the last chunk is part of the log and has to be joined
if (!endingWithLogSepRegExp.test(data)) printOutput = lastChunk
else if (logs.length > 1) logs.push(lastChunk)
}
// join logs into single log with SASJS_LOGS_SEPARATOR
log = logs.join(SASJS_LOGS_SEPARATOR)
log = splittedResponse[1]
printOutput = splittedResponse[2]
} else {
parsedResponse = response.data
}
@@ -67,7 +59,7 @@ export class SasjsRequestClient extends RequestClient {
const returnResult: SasjsParsedResponse<T> = {
result: parsedResponse as T,
log: log || '',
log,
etag,
status: response.status
}
@@ -77,6 +69,3 @@ export class SasjsRequestClient extends RequestClient {
return returnResult
}
}
export const SASJS_LOGS_SEPARATOR =
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'

View File

@@ -1,169 +0,0 @@
import { SASJS_LOGS_SEPARATOR, SasjsRequestClient } from '../SasjsRequestClient'
import { SasjsParsedResponse } from '../../types'
import { AxiosResponse } from 'axios'
describe('SasjsRequestClient', () => {
const requestClient = new SasjsRequestClient('')
const etag = 'etag'
const status = 200
const webout = `hello`
const log = `1 The SAS System Tuesday, 25 July 2023 12:51:00
PROC MIGRATE will preserve current SAS file attributes and is
recommended for converting all your SAS libraries from any
SAS 8 release to SAS 9. For details and examples, please see
http://support.sas.com/rnd/migration/index.html
NOTE: SAS initialization used:
real time 0.01 seconds
cpu time 0.02 seconds
`
const printOutput = 'printOutPut'
describe('parseResponse', () => {})
it('should parse response with 1 log', () => {
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${log}
`,
etag,
status
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with 1 log and printOutput', () => {
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
${printOutput}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${log}
`,
etag,
status,
printOutput: `
${printOutput}`
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with nested logs', () => {
const logWithNestedLog = `root log start
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
root log end`
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${logWithNestedLog}
${SASJS_LOGS_SEPARATOR}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${logWithNestedLog}
`,
etag,
status
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with nested logs and printOutput', () => {
const logWithNestedLog = `root log start
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
log with indentation
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
some SAS code containing ${SASJS_LOGS_SEPARATOR}
root log end`
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${logWithNestedLog}
${SASJS_LOGS_SEPARATOR}
${printOutput}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${logWithNestedLog}
`,
etag,
status,
printOutput: `
${printOutput}`
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
})
describe('SASJS_LOGS_SEPARATOR', () => {
it('SASJS_LOGS_SEPARATOR should be hardcoded', () => {
expect(SASJS_LOGS_SEPARATOR).toEqual(
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'
)
})
})

View File

@@ -2,22 +2,17 @@ import * as pem from 'pem'
import * as http from 'http'
import * as https from 'https'
import { app, mockedAuthResponse } from './SAS_server_app'
import { ServerType } from '@sasjs/utils/types'
import { ServerType } from '@sasjs/utils'
import SASjs from '../SASjs'
import * as axiosModules from '../utils/createAxiosInstance'
import axios from 'axios'
import {
LoginRequiredError,
AuthorizeError,
NotFoundError,
InternalServerError,
VerboseMode
} from '../types'
InternalServerError
} from '../types/errors'
import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefixResponse } from '../auth/getTokenRequestErrorPrefix'
import { AxiosResponse, AxiosError } from 'axios'
import { Logger, LogLevel } from '@sasjs/utils/logger'
import * as UtilsModule from 'util'
const axiosActual = jest.requireActual('axios')
@@ -30,6 +25,16 @@ jest
const PORT = 8000
const SERVER_URL = `https://localhost:${PORT}/`
const ERROR_MESSAGES = {
selfSigned: 'self signed certificate',
CCA: 'unable to verify the first certificate'
}
const incorrectAuthCodeErr = {
error: 'unauthorized',
error_description: 'Bad credentials'
}
describe('RequestClient', () => {
let server: http.Server
@@ -75,411 +80,6 @@ describe('RequestClient', () => {
expect(rejectionErrorMessage).toEqual(expectedError.message)
})
describe('defaultInterceptionCallBack', () => {
const reqHeaders = `POST https://sas.server.com/compute/sessions/session_id/jobs HTTP/1.1
Accept: application/json
Content-Type: application/json
User-Agent: axios/0.27.2
Content-Length: 334
host: sas.server.io
Connection: close
`
const reqData = `{
name: 'test_job',
description: 'Powered by SASjs',
code: ['test_code'],
variables: {
SYS_JES_JOB_URI: '',
_program: '/Public/sasjs/jobs/jobs/test_job'
},
arguments: {
_contextName: 'SAS Job Execution compute context',
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: true,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: true
}
}`
const resHeaders = ['content-type', 'application/json']
const resData = {
id: 'id_string',
name: 'name_string',
uri: 'uri_string',
createdBy: 'createdBy_string',
code: 'TEST CODE',
links: [
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'self',
href: 'self_href_string',
uri: 'uri_string',
type: 'type_string'
}
],
results: { '_webout.json': '_webout.json_string' },
logStatistics: {
lineCount: 1,
modifiedTimeStamp: 'modifiedTimeStamp_string'
}
}
beforeAll(() => {
;(process as any).logger = new Logger(LogLevel.Off)
jest.spyOn((process as any).logger, 'info')
})
it('should log parsed response with status 1**', () => {
const spyIsAxiosError = jest
.spyOn(axios, 'isAxiosError')
.mockImplementation(() => true)
const mockedAxiosError = {
config: {
data: reqData
},
request: {
_currentRequest: {
_header: reqHeaders
}
}
} as AxiosError
const requestClient = new RequestClient('')
requestClient['defaultInterceptionCallBack'](mockedAxiosError)
const noValueMessage = 'Not provided'
const expectedLog = `HTTP Request (first 50 lines):
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
HTTP Response Code: ${requestClient['prettifyString'](noValueMessage)}
HTTP Response (first 50 lines):
${noValueMessage}
\n${requestClient['parseInterceptedBody'](noValueMessage)}
`
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
spyIsAxiosError.mockReset()
})
it('should log parsed response with status 2**', () => {
const status = getRandomStatus([
200, 201, 202, 203, 204, 205, 206, 207, 208, 226
])
const mockedResponse: AxiosResponse = {
data: resData,
status,
statusText: '',
headers: {},
config: { data: reqData },
request: { _header: reqHeaders, res: { rawHeaders: resHeaders } }
}
const requestClient = new RequestClient('')
requestClient['defaultInterceptionCallBack'](mockedResponse)
const expectedLog = `HTTP Request (first 50 lines):
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
HTTP Response Code: ${requestClient['prettifyString'](status)}
HTTP Response (first 50 lines):
${resHeaders[0]}: ${resHeaders[1]}${
requestClient['parseInterceptedBody'](resData)
? `\n\n${requestClient['parseInterceptedBody'](resData)}`
: ''
}
`
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
})
it('should log parsed response with status 3**', () => {
const status = getRandomStatus([300, 301, 302, 303, 304, 307, 308])
const mockedResponse: AxiosResponse = {
data: resData,
status,
statusText: '',
headers: {},
config: { data: reqData },
request: { _header: reqHeaders, res: { rawHeaders: resHeaders } }
}
const requestClient = new RequestClient('')
requestClient['defaultInterceptionCallBack'](mockedResponse)
const expectedLog = `HTTP Request (first 50 lines):
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
HTTP Response Code: ${requestClient['prettifyString'](status)}
HTTP Response (first 50 lines):
${resHeaders[0]}: ${resHeaders[1]}${
requestClient['parseInterceptedBody'](resData)
? `\n\n${requestClient['parseInterceptedBody'](resData)}`
: ''
}
`
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
})
it('should log parsed response with status 4**', () => {
const spyIsAxiosError = jest
.spyOn(axios, 'isAxiosError')
.mockImplementation(() => true)
const status = getRandomStatus([
400, 401, 402, 403, 404, 407, 408, 409, 410, 411, 412, 413, 414, 415,
416, 417, 418, 421, 422, 423, 424, 425, 426, 428, 429, 431, 451
])
const mockedResponse: AxiosResponse = {
data: resData,
status,
statusText: '',
headers: {},
config: { data: reqData },
request: { _header: reqHeaders, res: { rawHeaders: resHeaders } }
}
const mockedAxiosError = {
config: {
data: reqData
},
request: {
_currentRequest: {
_header: reqHeaders
}
},
response: mockedResponse
} as AxiosError
const requestClient = new RequestClient('')
requestClient['defaultInterceptionCallBack'](mockedAxiosError)
const expectedLog = `HTTP Request (first 50 lines):
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
HTTP Response Code: ${requestClient['prettifyString'](status)}
HTTP Response (first 50 lines):
${resHeaders[0]}: ${resHeaders[1]}${
requestClient['parseInterceptedBody'](resData)
? `\n\n${requestClient['parseInterceptedBody'](resData)}`
: ''
}
`
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
spyIsAxiosError.mockReset()
})
it('should log parsed response with status 5**', () => {
const spyIsAxiosError = jest
.spyOn(axios, 'isAxiosError')
.mockImplementation(() => true)
const status = getRandomStatus([
500, 501, 502, 503, 504, 505, 506, 507, 508, 510, 511
])
const mockedResponse: AxiosResponse = {
data: resData,
status,
statusText: '',
headers: {},
config: { data: reqData },
request: { _header: reqHeaders, res: { rawHeaders: resHeaders } }
}
const mockedAxiosError = {
config: {
data: reqData
},
request: {
_currentRequest: {
_header: reqHeaders
}
},
response: mockedResponse
} as AxiosError
const requestClient = new RequestClient('')
requestClient['defaultInterceptionCallBack'](mockedAxiosError)
const expectedLog = `HTTP Request (first 50 lines):
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
HTTP Response Code: ${requestClient['prettifyString'](status)}
HTTP Response (first 50 lines):
${resHeaders[0]}: ${resHeaders[1]}${
requestClient['parseInterceptedBody'](resData)
? `\n\n${requestClient['parseInterceptedBody'](resData)}`
: ''
}
`
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
spyIsAxiosError.mockReset()
})
})
describe('enableVerboseMode', () => {
it('should add defaultInterceptionCallBack functions to response interceptors', () => {
const requestClient = new RequestClient('')
const interceptorSpy = jest.spyOn(
requestClient['httpClient'].interceptors.response,
'use'
)
requestClient.enableVerboseMode()
expect(interceptorSpy).toHaveBeenCalledWith(
requestClient['defaultInterceptionCallBack'],
requestClient['defaultInterceptionCallBack']
)
})
it('should add callback functions to response interceptors', () => {
const requestClient = new RequestClient('')
const interceptorSpy = jest.spyOn(
requestClient['httpClient'].interceptors.response,
'use'
)
const successCallback = (response: AxiosResponse | AxiosError) => {
console.log('success')
return response
}
const failureCallback = (response: AxiosResponse | AxiosError) => {
console.log('failure')
return response
}
requestClient.enableVerboseMode(successCallback, failureCallback)
expect(interceptorSpy).toHaveBeenCalledWith(
successCallback,
failureCallback
)
})
})
describe('setVerboseMode', () => {
it(`should set verbose mode`, () => {
const requestClient = new RequestClient('')
let verbose: VerboseMode = false
requestClient.setVerboseMode(verbose)
expect(requestClient['verboseMode']).toEqual(verbose)
verbose = true
requestClient.setVerboseMode(verbose)
expect(requestClient['verboseMode']).toEqual(verbose)
verbose = 'bleached'
requestClient.setVerboseMode(verbose)
expect(requestClient['verboseMode']).toEqual(verbose)
})
})
describe('prettifyString', () => {
it(`should call inspect without colors when verbose mode is set to 'bleached'`, () => {
const requestClient = new RequestClient('')
let verbose: VerboseMode = 'bleached'
requestClient.setVerboseMode(verbose)
jest.spyOn(UtilsModule, 'inspect')
const testStr = JSON.stringify({ test: 'test' })
requestClient['prettifyString'](testStr)
expect(UtilsModule.inspect).toHaveBeenCalledWith(testStr, {
colors: false
})
})
it(`should call inspect with colors when verbose mode is set to 'true'`, () => {
const requestClient = new RequestClient('')
let verbose: VerboseMode = true
requestClient.setVerboseMode(verbose)
jest.spyOn(UtilsModule, 'inspect')
const testStr = JSON.stringify({ test: 'test' })
requestClient['prettifyString'](testStr)
expect(UtilsModule.inspect).toHaveBeenCalledWith(testStr, {
colors: true
})
})
})
describe('disableVerboseMode', () => {
it('should eject interceptor', () => {
const requestClient = new RequestClient('')
const interceptorSpy = jest.spyOn(
requestClient['httpClient'].interceptors.response,
'eject'
)
const interceptorId = 100
requestClient['httpInterceptor'] = interceptorId
requestClient.disableVerboseMode()
expect(interceptorSpy).toHaveBeenCalledWith(interceptorId)
})
})
describe('handleError', () => {
const requestClient = new RequestClient('https://localhost:8009')
const randomError = 'some error'
@@ -613,7 +213,7 @@ describe('RequestClient - Self Signed Server', () => {
serverType: ServerType.SasViya
})
const expectedError = 'self-signed certificate'
const expectedError = 'self signed certificate'
const rejectionErrorMessage = await adapterWithoutCertificate
.getAccessToken('clientId', 'clientSecret', 'authCode')
@@ -693,11 +293,3 @@ const createCertificate = async (): Promise<pem.CertificateCreationResult> => {
)
})
}
/**
* Returns a random status code.
* @param statuses - an array of available statuses.
* @returns - random item from an array of statuses.
*/
const getRandomStatus = (statuses: number[]) =>
statuses[Math.floor(Math.random() * statuses.length)]

View File

@@ -2,7 +2,7 @@ import { SessionManager } from '../SessionManager'
import { RequestClient } from '../request/RequestClient'
import * as dotenv from 'dotenv'
import axios from 'axios'
import { Logger, LogLevel } from '@sasjs/utils/logger'
import { Logger, LogLevel } from '@sasjs/utils'
import { Session, Context } from '../types'
jest.mock('axios')

View File

@@ -1,9 +1,6 @@
export interface PollOptions {
maxPollCount: number
pollInterval: number // milliseconds
pollStrategy?: PollStrategy
streamLog?: boolean
pollInterval: number
streamLog: boolean
logFolderPath?: string
}
export type PollStrategy = PollOptions[]

View File

@@ -1,55 +0,0 @@
import { CsrfToken } from '..'
export interface HttpClient {
get<T>(
url: string,
accessToken: string | undefined,
contentType: string,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
post<T>(
url: string,
data: any,
accessToken: string | undefined,
contentType: string,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
put<T>(
url: string,
data: any,
accessToken: string | undefined,
overrideHeaders: { [key: string]: string | number }
): Promise<{ result: T; etag: string }>
delete<T>(
url: string,
accessToken: string | undefined
): Promise<{ result: T; etag: string }>
getCsrfToken(type: 'general' | 'file'): CsrfToken | undefined
saveLocalStorageToken(accessToken: string, refreshToken: string): void
clearCsrfTokens(): void
clearLocalStorageTokens(): void
getBaseUrl(): string
}
export interface SASjsRequest {
serviceLink: string
timestamp: Date
sourceCode: string
generatedCode: string
logFile: string
SASWORK: any
}
export interface SasjsParsedResponse<T> {
result: T
log: string
etag: string
status: number
printOutput?: string
}
export type VerboseMode = boolean | 'bleached'

View File

@@ -1,6 +1,5 @@
import * as https from 'https'
import { ServerType } from '@sasjs/utils/types'
import { VerboseMode } from '../types'
/**
* Specifies the configuration for the SASjs instance - eg where and how to
@@ -46,10 +45,6 @@ export class SASjsConfig {
* Set to `true` to enable additional debugging.
*/
debug: boolean = true
/**
* Set to `true` to enable verbose mode that will log a summary of every HTTP response.
*/
verbose?: VerboseMode = true
/**
* The name of the compute context to use when calling the Viya services directly.
* Example value: 'SAS Job Execution compute context'

12
src/types/SASjsRequest.ts Normal file
View File

@@ -0,0 +1,12 @@
/**
* Represents a SASjs request, its response and logs.
*
*/
export interface SASjsRequest {
serviceLink: string
timestamp: Date
sourceCode: string
generatedCode: string
logFile: string
SASWORK: any
}

View File

@@ -7,7 +7,7 @@ describe('RootFolderNotFoundError', () => {
const error = new RootFolderNotFoundError(
'/myProject',
'https://sas.4gl.io',
'https://analytium.co.uk',
token
)
@@ -19,7 +19,7 @@ describe('RootFolderNotFoundError', () => {
it('when access token is not provided, error message should not contain scopes', () => {
const error = new RootFolderNotFoundError(
'/myProject',
'https://sas.4gl.io'
'https://analytium.co.uk'
)
expect(error).toBeInstanceOf(RootFolderNotFoundError)
@@ -30,7 +30,7 @@ describe('RootFolderNotFoundError', () => {
it('should include the folder path and SASDrive URL in the message', () => {
const folderPath = '/myProject'
const serverUrl = 'https://sas.4gl.io'
const serverUrl = 'https://analytium.co.uk'
const error = new RootFolderNotFoundError(folderPath, serverUrl)
expect(error).toBeInstanceOf(RootFolderNotFoundError)

View File

@@ -6,12 +6,10 @@ export * from './Job'
export * from './JobDefinition'
export * from './JobResult'
export * from './Link'
export * from './Login'
export * from './SASjsConfig'
export * from './RequestClient'
export * from './SASjsRequest'
export * from './Session'
export * from './UploadFile'
export * from './PollOptions'
export * from './WriteStream'
export * from './ExecuteScript'
export * from './errors'

View File

@@ -1,4 +1,4 @@
import { SASjsRequest } from '../types'
import { SASjsRequest } from '../types/SASjsRequest'
/**
* Comparator for SASjs request timestamps.

2
src/utils/constants.ts Normal file
View File

@@ -0,0 +1,2 @@
export const SASJS_LOGS_SEPARATOR =
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'

View File

@@ -1,5 +0,0 @@
import { isNode } from './'
import * as NodeFormData from 'form-data'
export const getFormData = () =>
isNode() ? new NodeFormData() : new FormData()

View File

@@ -2,6 +2,7 @@ export * from './appendExtraResponseAttributes'
export * from './asyncForEach'
export * from './compareTimestamps'
export * from './convertToCsv'
export * from './constants'
export * from './createAxiosInstance'
export * from './delay'
export * from './fetchLogByChunks'
@@ -19,4 +20,3 @@ export * from './parseWeboutResponse'
export * from './serialize'
export * from './splitChunks'
export * from './validateInput'
export * from './getFormData'

View File

@@ -1,20 +0,0 @@
import { getFormData } from '..'
import * as isNodeModule from '../isNode'
import * as NodeFormData from 'form-data'
describe('getFormData', () => {
it('should return NodeFormData if environment is Node', () => {
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
expect(getFormData() instanceof NodeFormData).toEqual(true)
})
it('should return FormData if environment is not Node', () => {
const formDataMock = () => {}
;(global as any).FormData = formDataMock
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
expect(getFormData() instanceof FormData).toEqual(true)
})
})