1
0
mirror of https://github.com/sasjs/adapter.git synced 2026-01-04 03:00:05 +00:00

Compare commits

..

61 Commits

Author SHA1 Message Date
Yury Shkoda
5756638dc2 Merge pull request #825 from sasjs/verboseMode
feat(request-client): made verbose mode easier to configure
2023-08-01 09:52:59 +03:00
Yury Shkoda
e511cd613c docs(verbose): fixed docs 2023-07-31 17:41:04 +03:00
Yury Shkoda
2119c81ebb feat(sasjs-config): added verbose option 2023-07-31 17:37:39 +03:00
Yury Shkoda
ea4b30d6ef feat(request-client): made verbose mode easier to configure 2023-07-31 16:34:09 +03:00
Yury Shkoda
f1e1b33571 Merge pull request #824 from sasjs/startComputeJob-issue
feat(request-client): implemented verbose mode
2023-07-31 10:51:02 +03:00
Yury Shkoda
ccb8599f00 docs(request-client): added comments 2023-07-28 11:55:52 +03:00
Yury Shkoda
5bcd17096b feat(request-client): implemented verbose mode 2023-07-27 19:29:51 +03:00
Allan Bowe
d744ee12a3 Merge pull request #823 from sasjs/@sasjs/server-response-fix
feat(sasjs-request-client): improved parseResponse method
2023-07-26 11:45:04 +01:00
Yury Shkoda
5f15226cd9 test(sasjs-request-client): removed unnecessary part of the log 2023-07-25 17:31:39 +03:00
Yury Shkoda
f31ea28b9c refactor(sasjs-request-client): used SASJS_LOGS_SEPARATOR const 2023-07-25 16:08:16 +03:00
Yury Shkoda
e315e4a619 feat(sasjs-request-client): improved parseResponse method 2023-07-25 16:01:35 +03:00
Yury Shkoda
76bf5b88e9 Merge pull request #818 from sasjs/deps-bump
Dependencies bump
2023-07-12 09:50:50 +03:00
Yury Shkoda
a97ac4eaa6 chore: commiting changes 2023-07-11 15:36:13 +03:00
Yury Shkoda
37cfea6ca7 chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/loader-utils-2.0.4' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:53:19 +03:00
Yury Shkoda
f74c8aca57 chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/json5-1.0.2' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:52:28 +03:00
Yury Shkoda
77baaabfcd chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/http-cache-semantics-4.1.1' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:51:32 +03:00
Yury Shkoda
510ba771f0 chore(deps): Merge branch 'dependabot/npm_and_yarn/sasjs-tests/webpack-5.76.3' of github.com:sasjs/adapter into deps-bump 2023-07-11 14:50:38 +03:00
Allan Bowe
6fce65f4c8 Merge pull request #817 from sasjs/request-sasjs-fix
fix(file-upload-form): fixed form data for node env
2023-07-11 09:58:57 +01:00
Yury Shkoda
fe03faa59f chore(file-upload-form): left comments 2023-07-11 09:26:36 +03:00
Yury Shkoda
6272eeda23 fix(form-data): fixed formData type check 2023-07-10 19:14:47 +03:00
Yury Shkoda
104d1b88b3 chore(deps): bimped tough-cookie and @types/tough-cookie 2023-07-10 17:07:39 +03:00
Yury Shkoda
0d9ba36de8 fix(file-upload-form): fixed form data for node env 2023-07-06 15:49:24 +03:00
Yury Shkoda
4e7a845d99 Merge pull request #816 from sasjs/ci/cd-workwlows-node-version
chore(ci-cd): used Node lts/hydrogen version
2023-07-06 12:42:12 +03:00
Yury Shkoda
716cc513ff chore(ci-cd): used Node lts/hydrogen version 2023-07-05 16:10:45 +03:00
Yury Shkoda
22edcb0a8e Merge pull request #810 from sasjs/pollJobState-improvements
Poll job state improvements
2023-07-05 11:15:42 +03:00
Yury Shkoda
aedf5c1734 chore: Merge branch 'master' of github.com:sasjs/adapter into pollJobState-improvements 2023-07-05 10:49:12 +03:00
Yury Shkoda
784bd20ee0 Merge pull request #814 from sasjs/issue-811-fixed
Issue 811 fixed
2023-07-05 10:27:04 +03:00
Yury Shkoda
61db1e0609 test: fixed unit tests 2023-06-23 18:04:48 +03:00
Yury Shkoda
5c589a6af3 chore: reverted dev changes to build.yml 2023-06-23 17:52:46 +03:00
Yury Shkoda
275cd6dbd3 chore: debugging 2023-06-23 17:20:16 +03:00
Yury Shkoda
d874e07889 fix(file-uploader): fixed parsing response for SASJS 2023-06-23 16:37:25 +03:00
Yury Shkoda
1648cf28d5 chore: Merge branch 'master' of github.com:sasjs/adapter into issue-811-fixed 2023-06-23 15:26:01 +03:00
a4aaeba31c fix: file upload with sasjs server type, json not parsed 2023-06-22 12:48:40 +02:00
Yury Shkoda
6bf68a315c fix(sasjs-utils): fixed imports 2023-06-22 13:37:07 +03:00
Allan Bowe
c0f78d0c1e fix: updating example.html to use v4 of the adapter 2023-06-22 08:56:32 +01:00
Yury Shkoda
e0aebc169f chore: debugging 2023-06-21 18:28:39 +03:00
Yury Shkoda
9a50e5cb63 chore: debugging 2023-06-21 18:16:20 +03:00
Yury Shkoda
a51923dad7 chore: debugging 2023-06-21 18:01:21 +03:00
Yury Shkoda
9aee77f0e3 chore: debugging 2023-06-21 17:44:24 +03:00
Yury Shkoda
c32d037063 chore: debugging 2023-06-21 17:34:16 +03:00
Yury Shkoda
94f7492c31 chore: debugging 2023-06-21 17:19:13 +03:00
Yury Shkoda
d29e0a0f57 chore(sasjs-tests): bumped node-sass 2023-06-21 16:36:22 +03:00
Yury Shkoda
8d7cc11db5 chore(sasjs-tests): bumped node-sass 2023-06-21 16:35:33 +03:00
Yury Shkoda
28e9d1cc6b test(get-token): covered getTokenRequestErrorPrefix 2023-06-21 16:34:26 +03:00
Yury Shkoda
375cec48ca feat(get-token): improved error prefix 2023-06-21 16:33:45 +03:00
7d826685f7 Merge pull request #813 from sasjs/sasjs-job-execute
fix: issue with parsing json in sasjs job executor
2023-06-15 16:08:08 +02:00
f42f6bca00 fix: issue with parsing json in sasjs job executor 2023-06-14 15:08:11 +02:00
Yury Shkoda
4440e5d1f9 fix(types): fixed PollOptions exports 2023-05-17 14:10:17 +03:00
Yury Shkoda
f484a5a6a1 refactor(poll-job-state): updated types and func attributes 2023-05-17 11:16:35 +03:00
Yury Shkoda
5c74186bab feat(poll-strategy): added subsequentStrategies to PollStrategy 2023-05-16 17:48:04 +03:00
Yury Shkoda
ea68c3dff3 docs(poll-job-state): updated docs 2023-05-16 17:42:27 +03:00
Yury Shkoda
153b285670 chore(poll-job-status): renamed PollOptions to PollStrategy and added docs 2023-05-15 16:32:07 +03:00
Yury Shkoda
f9f4aa5aa6 chore(reviewer-lottery): removed QA group 2023-05-15 14:53:55 +03:00
Yury Shkoda
bd02656b3c docs(poll-job-state): added comments 2023-05-15 14:36:18 +03:00
Yury Shkoda
991519a13d fix(execute-job): added error object if it present 2023-05-15 14:26:24 +03:00
Yury Shkoda
615c9d012e feat(poll-job-state): implemented polling strategies 2023-05-15 14:24:11 +03:00
dependabot[bot]
d166231c12 chore(deps): bump webpack from 5.73.0 to 5.76.3 in /sasjs-tests
Bumps [webpack](https://github.com/webpack/webpack) from 5.73.0 to 5.76.3.
- [Release notes](https://github.com/webpack/webpack/releases)
- [Commits](https://github.com/webpack/webpack/compare/v5.73.0...v5.76.3)

---
updated-dependencies:
- dependency-name: webpack
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-03-27 12:12:06 +00:00
dependabot[bot]
4cb150e951 chore(deps): bump http-cache-semantics in /sasjs-tests
Bumps [http-cache-semantics](https://github.com/kornelski/http-cache-semantics) from 4.1.0 to 4.1.1.
- [Release notes](https://github.com/kornelski/http-cache-semantics/releases)
- [Commits](https://github.com/kornelski/http-cache-semantics/compare/v4.1.0...v4.1.1)

---
updated-dependencies:
- dependency-name: http-cache-semantics
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-02-04 13:53:04 +00:00
dependabot[bot]
fc8598473f chore(deps): bump json5 from 1.0.1 to 1.0.2 in /sasjs-tests
Bumps [json5](https://github.com/json5/json5) from 1.0.1 to 1.0.2.
- [Release notes](https://github.com/json5/json5/releases)
- [Changelog](https://github.com/json5/json5/blob/main/CHANGELOG.md)
- [Commits](https://github.com/json5/json5/compare/v1.0.1...v1.0.2)

---
updated-dependencies:
- dependency-name: json5
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-01-08 11:36:38 +00:00
dependabot[bot]
367e0ae25a chore(deps): bump loader-utils from 2.0.2 to 2.0.4 in /sasjs-tests
Bumps [loader-utils](https://github.com/webpack/loader-utils) from 2.0.2 to 2.0.4.
- [Release notes](https://github.com/webpack/loader-utils/releases)
- [Changelog](https://github.com/webpack/loader-utils/blob/v2.0.4/CHANGELOG.md)
- [Commits](https://github.com/webpack/loader-utils/compare/v2.0.2...v2.0.4)

---
updated-dependencies:
- dependency-name: loader-utils
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-11-16 06:56:29 +00:00
dependabot[bot]
85dde61baf chore(deps): bump semver-regex from 3.1.3 to 3.1.4
Bumps [semver-regex](https://github.com/sindresorhus/semver-regex) from 3.1.3 to 3.1.4.
- [Release notes](https://github.com/sindresorhus/semver-regex/releases)
- [Commits](https://github.com/sindresorhus/semver-regex/commits/v3.1.4)

---
updated-dependencies:
- dependency-name: semver-regex
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-06-03 23:43:27 +00:00
50 changed files with 4515 additions and 4723 deletions

View File

@@ -5,7 +5,3 @@ groups:
- YuryShkoda - YuryShkoda
- medjedovicm - medjedovicm
- sabhas - sabhas
- name: SASjs QA
reviewers: 1
usernames:
- VladislavParhomchik

View File

@@ -12,7 +12,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [lts/fermium] node-version: [lts/hydrogen]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@@ -22,17 +22,17 @@ jobs:
node-version: ${{ matrix.node-version }} node-version: ${{ matrix.node-version }}
cache: npm cache: npm
# - name: Check npm audit - name: Check npm audit
# run: npm audit --production --audit-level=low run: npm audit --production --audit-level=low
- name: Install Dependencies - name: Install Dependencies
run: npm ci run: npm ci
# - name: Check code style - name: Check code style
# run: npm run lint run: npm run lint
# - name: Run unit tests - name: Run unit tests
# run: npm test run: npm test
- name: Build Package - name: Build Package
run: npm run package:lib run: npm run package:lib
@@ -72,27 +72,19 @@ jobs:
npm install -g replace-in-files-cli npm install -g replace-in-files-cli
cd sasjs-tests cd sasjs-tests
replace-in-files --regex='"@sasjs/adapter".*' --replacement='"@sasjs/adapter":"latest",' ./package.json replace-in-files --regex='"@sasjs/adapter".*' --replacement='"@sasjs/adapter":"latest",' ./package.json
npm i --legacy-peer-deps npm i
replace-in-files --regex='"serverUrl".*' --replacement='"serverUrl":"${{ secrets.SASJS_SERVER_URL }}",' ./public/config.json replace-in-files --regex='"serverUrl".*' --replacement='"serverUrl":"${{ secrets.SASJS_SERVER_URL }}",' ./public/config.json
replace-in-files --regex='"userName".*' --replacement='"userName":"${{ secrets.SASJS_USERNAME_DEV }}",' ./public/config.json replace-in-files --regex='"userName".*' --replacement='"userName":"${{ secrets.SASJS_USERNAME }}",' ./public/config.json
replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD_DEV }}",' ./public/config.json replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD }}",' ./public/config.json
replace-in-files --regex='"serverType".*' --replacement='"serverType":"SASJS",' ./public/config.json replace-in-files --regex='"serverType".*' --replacement='"serverType":"SASJS",' ./public/config.json
# npm run update:adapter npm run update:adapter
pm2 start --name sasjs-test npm -- start pm2 start --name sasjs-test npm -- start
cat ./public/config.json
cat ../cypress.json
- name: Sleep for 10 seconds
uses: jakejarvis/wait-action@master
with:
time: '10s'
- name: Run cypress on sasjs - name: Run cypress on sasjs
run: | run: |
ss -lntu
replace-in-files --regex='"sasjsTestsUrl".*' --replacement='"sasjsTestsUrl":"http://localhost:3000",' ./cypress.json replace-in-files --regex='"sasjsTestsUrl".*' --replacement='"sasjsTestsUrl":"http://localhost:3000",' ./cypress.json
replace-in-files --regex='"username".*' --replacement='"username":"${{ secrets.SASJS_USERNAME_DEV }}",' ./cypress.json replace-in-files --regex='"username".*' --replacement='"username":"${{ secrets.SASJS_USERNAME }}",' ./cypress.json
replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD_DEV }}",' ./cypress.json replace-in-files --regex='"password".*' --replacement='"password":"${{ secrets.SASJS_PASSWORD }}",' ./cypress.json
sh ./sasjs-tests/sasjs-cypress-run.sh ${{ secrets.MATRIX_TOKEN }} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} sh ./sasjs-tests/sasjs-cypress-run.sh ${{ secrets.MATRIX_TOKEN }} https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}
# For some reason if coverage report action is run before other commands, those commands can't access the directories and files on which they depend on # For some reason if coverage report action is run before other commands, those commands can't access the directories and files on which they depend on

View File

@@ -11,7 +11,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [lts/fermium] node-version: [lts/hydrogen]
steps: steps:
- name: Checkout - name: Checkout

View File

@@ -14,7 +14,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [lts/fermium] node-version: [lts/hydrogen]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2

View File

@@ -151,7 +151,11 @@ The `request()` method also has optional parameters such as a config object and
The response object will contain returned tables and columns. Table names are always lowercase, and column names uppercase. The response object will contain returned tables and columns. Table names are always lowercase, and column names uppercase.
The adapter will also cache the logs (if debug enabled) and even the work tables. For performance, it is best to keep debug mode off. The adapter will also cache the logs (if debug enabled) and even the work tables. For performance, it is best to keep debug mode off.
### Verbose Mode
Set `verbose` to `true` to enable verbose mode that logs a summary of every HTTP response. Verbose mode can be disabled by calling `disableVerboseMode` method or enabled by `enableVerboseMode` method. Verbose mode can also be enabled/disabled by `startComputeJob` method.
### Session Manager ### Session Manager
@@ -273,6 +277,7 @@ Configuration on the client side involves passing an object on startup, which ca
* `serverType` - either `SAS9`, `SASVIYA` or `SASJS`. The `SASJS` server type is for use with [sasjs/server](https://github.com/sasjs/server). * `serverType` - either `SAS9`, `SASVIYA` or `SASJS`. The `SASJS` server type is for use with [sasjs/server](https://github.com/sasjs/server).
* `serverUrl` - the location (including http protocol and port) of the SAS Server. Can be omitted, eg if serving directly from the SAS Web Server, or in streaming mode. * `serverUrl` - the location (including http protocol and port) of the SAS Server. Can be omitted, eg if serving directly from the SAS Web Server, or in streaming mode.
* `debug` - if `true` then SAS Logs and extra debug information is returned. * `debug` - if `true` then SAS Logs and extra debug information is returned.
* `verbose` - optional, if `true` then a summary of every HTTP response is logged.
* `loginMechanism` - either `Default` or `Redirected`. See [SAS Logon](#sas-logon) section. * `loginMechanism` - either `Default` or `Redirected`. See [SAS Logon](#sas-logon) section.
* `useComputeApi` - Only relevant when the serverType is `SASVIYA`. If `true` the [Compute API](#using-the-compute-api) is used. If `false` the [JES API](#using-the-jes-api) is used. If `null` or `undefined` the [Web](#using-jes-web-app) approach is used. * `useComputeApi` - Only relevant when the serverType is `SASVIYA`. If `true` the [Compute API](#using-the-compute-api) is used. If `false` the [JES API](#using-the-jes-api) is used. If `null` or `undefined` the [Web](#using-jes-web-app) approach is used.
* `contextName` - Compute context on which the requests will be called. If missing or not provided, defaults to `Job Execution Compute context`. * `contextName` - Compute context on which the requests will be called. If missing or not provided, defaults to `Job Execution Compute context`.

View File

@@ -5,9 +5,6 @@ const testingFinishTimeout = Cypress.env('testingFinishTimeout')
context('sasjs-tests', function () { context('sasjs-tests', function () {
this.beforeAll(() => { this.beforeAll(() => {
cy.task('log', 'beforeAll')
cy.task('log', `sasjsTestsUrl: ${sasjsTestsUrl}`)
cy.visit(sasjsTestsUrl) cy.visit(sasjsTestsUrl)
}) })
@@ -16,66 +13,35 @@ context('sasjs-tests', function () {
}) })
it('Should have all tests successfull', (done) => { it('Should have all tests successfull', (done) => {
cy.task('log', `Should have all tests successfull`)
cy.get('body').then(($body) => { cy.get('body').then(($body) => {
cy.task('log', `22`)
cy.wait(1000).then(() => { cy.wait(1000).then(() => {
const startButton = $body.find( const startButton = $body.find(
'.ui.massive.icon.primary.left.labeled.button' '.ui.massive.icon.primary.left.labeled.button'
)[0] )[0]
// ui massive icon primary left labeled button
cy.task('log', `startButton: ${startButton}`)
if ( if (
!startButton || !startButton ||
(startButton && !Cypress.dom.isVisible(startButton)) (startButton && !Cypress.dom.isVisible(startButton))
) { ) {
cy.task('log', `34`)
cy.task('log', `username: ${username}`)
cy.task('log', `password: ${password}`)
const userNameInput = cy.get('input[placeholder="User Name"]')
const passwordInput = cy.get('input[placeholder="Password"]')
cy.task('log', `userNameInput: ${userNameInput}`)
cy.task('log', `passwordInput: ${passwordInput}`)
cy.get('input[placeholder="User Name"]').type(username) cy.get('input[placeholder="User Name"]').type(username)
cy.get('input[placeholder="Password"]').type(password) cy.get('input[placeholder="Password"]').type(password)
const submitBtn = cy.get('.submit-button')
cy.task('log', `submitBtn: ${submitBtn}`)
cy.get('.submit-button').click() cy.get('.submit-button').click()
} }
cy.get('input[placeholder="User Name"]', { timeout: 40000 }) cy.get('input[placeholder="User Name"]', { timeout: 40000 })
.should('not.exist') .should('not.exist')
.then(() => { .then(() => {
cy.task('log', `46`)
cy.get('.ui.massive.icon.primary.left.labeled.button') cy.get('.ui.massive.icon.primary.left.labeled.button')
.click() .click()
.then(() => { .then(() => {
cy.task('log', `50`)
const loadingButton = $body.find(
'.ui.massive.loading.primary.button'
)[0]
cy.task('log', `loadingButton: ${loadingButton}`)
cy.get('.ui.massive.loading.primary.button', { cy.get('.ui.massive.loading.primary.button', {
timeout: testingFinishTimeout timeout: testingFinishTimeout
}) })
.should('not.exist') .should('not.exist')
.then(() => { .then(() => {
cy.task('log', `56`)
cy.get('span.icon.failed') cy.get('span.icon.failed')
.should('not.exist') .should('not.exist')
.then(() => { .then(() => {
cy.task('log', `60`)
done() done()
}) })
}) })
@@ -85,46 +51,46 @@ context('sasjs-tests', function () {
}) })
}) })
// it('Should have all tests successfull with debug on', (done) => { it('Should have all tests successfull with debug on', (done) => {
// cy.get('body').then(($body) => { cy.get('body').then(($body) => {
// cy.wait(1000).then(() => { cy.wait(1000).then(() => {
// const startButton = $body.find( const startButton = $body.find(
// '.ui.massive.icon.primary.left.labeled.button' '.ui.massive.icon.primary.left.labeled.button'
// )[0] )[0]
// if ( if (
// !startButton || !startButton ||
// (startButton && !Cypress.dom.isVisible(startButton)) (startButton && !Cypress.dom.isVisible(startButton))
// ) { ) {
// cy.get('input[placeholder="User Name"]').type(username) cy.get('input[placeholder="User Name"]').type(username)
// cy.get('input[placeholder="Password"]').type(password) cy.get('input[placeholder="Password"]').type(password)
// cy.get('.submit-button').click() cy.get('.submit-button').click()
// } }
// cy.get('.ui.fitted.toggle.checkbox label') cy.get('.ui.fitted.toggle.checkbox label')
// .click() .click()
// .then(() => { .then(() => {
// cy.get('input[placeholder="User Name"]', { timeout: 40000 }) cy.get('input[placeholder="User Name"]', { timeout: 40000 })
// .should('not.exist') .should('not.exist')
// .then(() => { .then(() => {
// cy.get('.ui.massive.icon.primary.left.labeled.button') cy.get('.ui.massive.icon.primary.left.labeled.button')
// .click() .click()
// .then(() => { .then(() => {
// cy.get('.ui.massive.loading.primary.button', { cy.get('.ui.massive.loading.primary.button', {
// timeout: testingFinishTimeout timeout: testingFinishTimeout
// }) })
// .should('not.exist') .should('not.exist')
// .then(() => { .then(() => {
// cy.get('span.icon.failed') cy.get('span.icon.failed')
// .should('not.exist') .should('not.exist')
// .then(() => { .then(() => {
// done() done()
// }) })
// }) })
// }) })
// }) })
// }) })
// }) })
// }) })
// }) })
}) })

View File

@@ -39,11 +39,4 @@ module.exports = (on, config) => {
return launchOptions return launchOptions
} }
}) })
on('task', {
log(message) {
console.log(message)
return null
}
})
} }

View File

@@ -1,7 +1,7 @@
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<script src="https://cdn.jsdelivr.net/combine/npm/chart.js@2.9.3,npm/jquery@3.5.1,npm/@sasjs/adapter@1"></script> <script src="https://cdn.jsdelivr.net/combine/npm/chart.js@2.9.3,npm/jquery@3.5.1,npm/@sasjs/adapter@4"></script>
<script> <script>
var sasJs = new SASjs.default({ var sasJs = new SASjs.default({
appLoc: "/Public/app/readme" appLoc: "/Public/app/readme"

84
package-lock.json generated
View File

@@ -13,7 +13,7 @@
"axios-cookiejar-support": "1.0.1", "axios-cookiejar-support": "1.0.1",
"form-data": "4.0.0", "form-data": "4.0.0",
"https": "1.0.0", "https": "1.0.0",
"tough-cookie": "4.0.0" "tough-cookie": "4.1.3"
}, },
"devDependencies": { "devDependencies": {
"@cypress/webpack-preprocessor": "5.9.1", "@cypress/webpack-preprocessor": "5.9.1",
@@ -21,7 +21,7 @@
"@types/jest": "27.4.0", "@types/jest": "27.4.0",
"@types/mime": "2.0.3", "@types/mime": "2.0.3",
"@types/pem": "1.9.6", "@types/pem": "1.9.6",
"@types/tough-cookie": "4.0.1", "@types/tough-cookie": "4.0.2",
"copyfiles": "2.4.1", "copyfiles": "2.4.1",
"cp": "0.2.0", "cp": "0.2.0",
"cypress": "7.7.0", "cypress": "7.7.0",
@@ -3440,9 +3440,9 @@
"dev": true "dev": true
}, },
"node_modules/@types/tough-cookie": { "node_modules/@types/tough-cookie": {
"version": "4.0.1", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg==" "integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
}, },
"node_modules/@types/yargs": { "node_modules/@types/yargs": {
"version": "16.0.5", "version": "16.0.5",
@@ -14110,6 +14110,11 @@
"node": ">=0.4.x" "node": ">=0.4.x"
} }
}, },
"node_modules/querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"node_modules/queue-microtask": { "node_modules/queue-microtask": {
"version": "1.2.3", "version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -14457,6 +14462,11 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"node_modules/resolve": { "node_modules/resolve": {
"version": "1.22.1", "version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
@@ -15702,22 +15712,23 @@
} }
}, },
"node_modules/tough-cookie": { "node_modules/tough-cookie": {
"version": "4.0.0", "version": "4.1.3",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
"dependencies": { "dependencies": {
"psl": "^1.1.33", "psl": "^1.1.33",
"punycode": "^2.1.1", "punycode": "^2.1.1",
"universalify": "^0.1.2" "universalify": "^0.2.0",
"url-parse": "^1.5.3"
}, },
"engines": { "engines": {
"node": ">=6" "node": ">=6"
} }
}, },
"node_modules/tough-cookie/node_modules/universalify": { "node_modules/tough-cookie/node_modules/universalify": {
"version": "0.1.2", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
"engines": { "engines": {
"node": ">= 4.0.0" "node": ">= 4.0.0"
} }
@@ -16351,6 +16362,15 @@
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
"dev": true "dev": true
}, },
"node_modules/url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
"dependencies": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"node_modules/url/node_modules/punycode": { "node_modules/url/node_modules/punycode": {
"version": "1.3.2", "version": "1.3.2",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
@@ -19536,9 +19556,9 @@
"dev": true "dev": true
}, },
"@types/tough-cookie": { "@types/tough-cookie": {
"version": "4.0.1", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz", "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg==" "integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
}, },
"@types/yargs": { "@types/yargs": {
"version": "16.0.5", "version": "16.0.5",
@@ -27552,6 +27572,11 @@
"integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==", "integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==",
"dev": true "dev": true
}, },
"querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"queue-microtask": { "queue-microtask": {
"version": "1.2.3", "version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -27833,6 +27858,11 @@
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
"dev": true "dev": true
}, },
"requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
},
"resolve": { "resolve": {
"version": "1.22.1", "version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
@@ -28799,19 +28829,20 @@
"dev": true "dev": true
}, },
"tough-cookie": { "tough-cookie": {
"version": "4.0.0", "version": "4.1.3",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", "integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
"requires": { "requires": {
"psl": "^1.1.33", "psl": "^1.1.33",
"punycode": "^2.1.1", "punycode": "^2.1.1",
"universalify": "^0.1.2" "universalify": "^0.2.0",
"url-parse": "^1.5.3"
}, },
"dependencies": { "dependencies": {
"universalify": { "universalify": {
"version": "0.1.2", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==" "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg=="
} }
} }
}, },
@@ -29269,6 +29300,15 @@
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
"dev": true "dev": true
}, },
"url-parse": {
"version": "1.5.10",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
"requires": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"util": { "util": {
"version": "0.12.5", "version": "0.12.5",
"resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz",

View File

@@ -49,7 +49,7 @@
"@types/jest": "27.4.0", "@types/jest": "27.4.0",
"@types/mime": "2.0.3", "@types/mime": "2.0.3",
"@types/pem": "1.9.6", "@types/pem": "1.9.6",
"@types/tough-cookie": "4.0.1", "@types/tough-cookie": "4.0.2",
"copyfiles": "2.4.1", "copyfiles": "2.4.1",
"cp": "0.2.0", "cp": "0.2.0",
"cypress": "7.7.0", "cypress": "7.7.0",
@@ -82,6 +82,6 @@
"axios-cookiejar-support": "1.0.1", "axios-cookiejar-support": "1.0.1",
"form-data": "4.0.0", "form-data": "4.0.0",
"https": "1.0.0", "https": "1.0.0",
"tough-cookie": "4.0.0" "tough-cookie": "4.1.3"
} }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -4,15 +4,14 @@
"homepage": ".", "homepage": ".",
"private": true, "private": true,
"dependencies": { "dependencies": {
"@sasjs/adapter": "4.3.5",
"@sasjs/test-framework": "1.5.7", "@sasjs/test-framework": "1.5.7",
"@types/jest": "^26.0.20", "@types/jest": "^26.0.20",
"@types/node": "^14.14.41", "@types/node": "^14.14.41",
"@types/react": "^17.0.1", "@types/react": "^16.0.1",
"@types/react-dom": "^17.0.0", "@types/react-dom": "^16.0.0",
"@types/react-router-dom": "^5.1.7", "@types/react-router-dom": "^5.1.7",
"react": "^17.0.1", "react": "^16.0.1",
"react-dom": "^17.0.1", "react-dom": "^16.0.1",
"react-router-dom": "^5.2.0", "react-router-dom": "^5.2.0",
"react-scripts": "^5.0.1", "react-scripts": "^5.0.1",
"typescript": "^4.1.3" "typescript": "^4.1.3"
@@ -22,7 +21,7 @@
"build": "react-scripts build", "build": "react-scripts build",
"test": "react-scripts test", "test": "react-scripts test",
"eject": "react-scripts eject", "eject": "react-scripts eject",
"update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz --legacy-peer-deps", "update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz",
"deploy:tests": "rsync -avhe ssh ./build/* --delete $SSH_ACCOUNT:$DEPLOY_PATH || npm run deploy:tests-win", "deploy:tests": "rsync -avhe ssh ./build/* --delete $SSH_ACCOUNT:$DEPLOY_PATH || npm run deploy:tests-win",
"deploy:tests-win": "scp %DEPLOY_PATH% ./build/*", "deploy:tests-win": "scp %DEPLOY_PATH% ./build/*",
"deploy": "npm run update:adapter && npm run build && npm run deploy:tests" "deploy": "npm run update:adapter && npm run build && npm run deploy:tests"
@@ -43,6 +42,6 @@
] ]
}, },
"devDependencies": { "devDependencies": {
"node-sass": "7.0.3" "node-sass": "9.0.0"
} }
} }

View File

@@ -2,7 +2,7 @@
"userName": "", "userName": "",
"password": "", "password": "",
"sasJsConfig": { "sasJsConfig": {
"serverUrl": "https://sas9.4gl.io", "serverUrl": "",
"appLoc": "/Public/app/adapter-tests/services", "appLoc": "/Public/app/adapter-tests/services",
"serverType": "SASJS", "serverType": "SASJS",
"debug": false, "debug": false,

View File

@@ -1,9 +1,7 @@
{ {
"$schema": "https://cli.sasjs.io/sasjsconfig-schema.json", "$schema": "https://cli.sasjs.io/sasjsconfig-schema.json",
"serviceConfig": { "serviceConfig": {
"serviceFolders": [ "serviceFolders": ["sasjs/common"]
"sasjs/common"
]
}, },
"defaultTarget": "4gl", "defaultTarget": "4gl",
"targets": [ "targets": [
@@ -28,4 +26,4 @@
} }
} }
] ]
} }

View File

@@ -11,7 +11,7 @@ const Login = (): ReactElement<{}> => {
const handleSubmit = useCallback( const handleSubmit = useCallback(
(e: any) => { (e: any) => {
e.preventDefault() e.preventDefault()
appContext.adapter.logIn(username, password).then((res: any) => { appContext.adapter.logIn(username, password).then((res) => {
appContext.setIsLoggedIn(res.isLoggedIn) appContext.setIsLoggedIn(res.isLoggedIn)
}) })
}, },

View File

@@ -29,6 +29,12 @@ import { executeScript } from './api/viya/executeScript'
import { getAccessTokenForViya } from './auth/getAccessTokenForViya' import { getAccessTokenForViya } from './auth/getAccessTokenForViya'
import { refreshTokensForViya } from './auth/refreshTokensForViya' import { refreshTokensForViya } from './auth/refreshTokensForViya'
interface JobExecutionResult {
result?: { result: object }
log?: string
error?: object
}
/** /**
* A client for interfacing with the SAS Viya REST API. * A client for interfacing with the SAS Viya REST API.
* *
@@ -270,7 +276,7 @@ export class SASViyaApiClient {
* @param debug - when set to true, the log will be returned. * @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code). * @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session * @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
@@ -621,7 +627,7 @@ export class SASViyaApiClient {
* @param accessToken - an optional access token for an authorized user. * @param accessToken - an optional access token for an authorized user.
* @param waitForResult - a boolean indicating if the function should wait for a result. * @param waitForResult - a boolean indicating if the function should wait for a result.
* @param expectWebout - a boolean indicating whether to expect a _webout response. * @param expectWebout - a boolean indicating whether to expect a _webout response.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
@@ -732,11 +738,13 @@ export class SASViyaApiClient {
debug: boolean, debug: boolean,
data?: any, data?: any,
authConfig?: AuthConfig authConfig?: AuthConfig
) { ): Promise<JobExecutionResult> {
let access_token = (authConfig || {}).access_token let access_token = (authConfig || {}).access_token
if (authConfig) { if (authConfig) {
;({ access_token } = await getTokens(this.requestClient, authConfig)) ;({ access_token } = await getTokens(this.requestClient, authConfig))
} }
if (isRelativePath(sasJob) && !this.rootFolderName) { if (isRelativePath(sasJob) && !this.rootFolderName) {
throw new Error( throw new Error(
'Relative paths cannot be used without specifying a root folder name.' 'Relative paths cannot be used without specifying a root folder name.'
@@ -749,6 +757,7 @@ export class SASViyaApiClient {
const fullFolderPath = isRelativePath(sasJob) const fullFolderPath = isRelativePath(sasJob)
? `${this.rootFolderName}/${folderPath}` ? `${this.rootFolderName}/${folderPath}`
: folderPath : folderPath
await this.populateFolderMap(fullFolderPath, access_token) await this.populateFolderMap(fullFolderPath, access_token)
const jobFolder = this.folderMap.get(fullFolderPath) const jobFolder = this.folderMap.get(fullFolderPath)
@@ -765,9 +774,8 @@ export class SASViyaApiClient {
files = await this.uploadTables(data, access_token) files = await this.uploadTables(data, access_token)
} }
if (!jobToExecute) { if (!jobToExecute) throw new Error(`Job was not found.`)
throw new Error(`Job was not found.`)
}
const jobDefinitionLink = jobToExecute?.links.find( const jobDefinitionLink = jobToExecute?.links.find(
(l) => l.rel === 'getResource' (l) => l.rel === 'getResource'
)?.href )?.href
@@ -807,16 +815,19 @@ export class SASViyaApiClient {
jobDefinition, jobDefinition,
arguments: jobArguments arguments: jobArguments
} }
const { result: postedJob } = await this.requestClient.post<Job>( const { result: postedJob } = await this.requestClient.post<Job>(
`${this.serverUrl}/jobExecution/jobs?_action=wait`, `${this.serverUrl}/jobExecution/jobs?_action=wait`,
postJobRequestBody, postJobRequestBody,
access_token access_token
) )
const jobStatus = await this.pollJobState(postedJob, authConfig).catch( const jobStatus = await this.pollJobState(postedJob, authConfig).catch(
(err) => { (err) => {
throw prefixMessage(err, 'Error while polling job status. ') throw prefixMessage(err, 'Error while polling job status. ')
} }
) )
const { result: currentJob } = await this.requestClient.get<Job>( const { result: currentJob } = await this.requestClient.get<Job>(
`${this.serverUrl}/jobExecution/jobs/${postedJob.id}`, `${this.serverUrl}/jobExecution/jobs/${postedJob.id}`,
access_token access_token
@@ -827,6 +838,7 @@ export class SASViyaApiClient {
const resultLink = currentJob.results['_webout.json'] const resultLink = currentJob.results['_webout.json']
const logLink = currentJob.links.find((l) => l.rel === 'log') const logLink = currentJob.links.find((l) => l.rel === 'log')
if (resultLink) { if (resultLink) {
jobResult = await this.requestClient.get<any>( jobResult = await this.requestClient.get<any>(
`${this.serverUrl}${resultLink}/content`, `${this.serverUrl}${resultLink}/content`,
@@ -834,11 +846,13 @@ export class SASViyaApiClient {
'text/plain' 'text/plain'
) )
} }
if (debug && logLink) { if (debug && logLink) {
log = await this.requestClient log = await this.requestClient
.get<any>(`${this.serverUrl}${logLink.href}/content`, access_token) .get<any>(`${this.serverUrl}${logLink.href}/content`, access_token)
.then((res: any) => res.result.items.map((i: any) => i.line).join('\n')) .then((res: any) => res.result.items.map((i: any) => i.line).join('\n'))
} }
if (jobStatus === 'failed') { if (jobStatus === 'failed') {
throw new JobExecutionError( throw new JobExecutionError(
currentJob.error?.errorCode, currentJob.error?.errorCode,
@@ -846,7 +860,16 @@ export class SASViyaApiClient {
log log
) )
} }
return { result: jobResult?.result, log }
const executionResult: JobExecutionResult = {
result: jobResult?.result,
log
}
const { error } = currentJob
if (error) executionResult.error = error
return executionResult
} }
private async populateFolderMap(folderPath: string, accessToken?: string) { private async populateFolderMap(folderPath: string, accessToken?: string) {

View File

@@ -31,6 +31,7 @@ import {
} from './job-execution' } from './job-execution'
import { ErrorResponse } from './types/errors' import { ErrorResponse } from './types/errors'
import { LoginOptions, LoginResult } from './types/Login' import { LoginOptions, LoginResult } from './types/Login'
import { AxiosResponse } from 'axios'
interface ExecuteScriptParams { interface ExecuteScriptParams {
linesOfCode: string[] linesOfCode: string[]
@@ -851,9 +852,10 @@ export default class SASjs {
* @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs. * @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs.
* The access token is not required when the user is authenticated via the browser. * The access token is not required when the user is authenticated via the browser.
* @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete. * @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
* @param verboseMode - boolean to enable verbose mode (log every HTTP response).
*/ */
public async startComputeJob( public async startComputeJob(
sasJob: string, sasJob: string,
@@ -863,7 +865,8 @@ export default class SASjs {
waitForResult?: boolean, waitForResult?: boolean,
pollOptions?: PollOptions, pollOptions?: PollOptions,
printPid = false, printPid = false,
variables?: MacroVar variables?: MacroVar,
verboseMode?: boolean
) { ) {
config = { config = {
...this.sasjsConfig, ...this.sasjsConfig,
@@ -877,6 +880,9 @@ export default class SASjs {
) )
} }
if (verboseMode) this.requestClient?.enableVerboseMode()
else if (verboseMode === false) this.requestClient?.disableVerboseMode()
return this.sasViyaApiClient?.executeComputeJob( return this.sasViyaApiClient?.executeComputeJob(
sasJob, sasJob,
config.contextName, config.contextName,
@@ -970,7 +976,8 @@ export default class SASjs {
this.requestClient = new RequestClientClass( this.requestClient = new RequestClientClass(
this.sasjsConfig.serverUrl, this.sasjsConfig.serverUrl,
this.sasjsConfig.httpsAgentOptions, this.sasjsConfig.httpsAgentOptions,
this.sasjsConfig.requestHistoryLimit this.sasjsConfig.requestHistoryLimit,
this.sasjsConfig.verbose
) )
} else { } else {
this.requestClient.setConfig( this.requestClient.setConfig(
@@ -1134,4 +1141,23 @@ export default class SASjs {
) )
} }
} }
/**
* Enables verbose mode that will log a summary of every HTTP response.
* @param successCallBack - function that should be triggered on every HTTP response with the status 2**.
* @param errorCallBack - function that should be triggered on every HTTP response with the status different from 2**.
*/
public enableVerboseMode(
successCallBack?: (response: AxiosResponse) => AxiosResponse,
errorCallBack?: (response: AxiosResponse) => AxiosResponse
) {
this.requestClient?.enableVerboseMode(successCallBack, errorCallBack)
}
/**
* Turns off verbose mode to log every HTTP response.
*/
public disableVerboseMode() {
this.requestClient?.disableVerboseMode()
}
} }

View File

@@ -12,7 +12,7 @@ import { RequestClient } from '../../request/RequestClient'
import { SessionManager } from '../../SessionManager' import { SessionManager } from '../../SessionManager'
import { isRelativePath, fetchLogByChunks } from '../../utils' import { isRelativePath, fetchLogByChunks } from '../../utils'
import { formatDataForRequest } from '../../utils/formatDataForRequest' import { formatDataForRequest } from '../../utils/formatDataForRequest'
import { pollJobState } from './pollJobState' import { pollJobState, JobState } from './pollJobState'
import { uploadTables } from './uploadTables' import { uploadTables } from './uploadTables'
/** /**
@@ -25,7 +25,7 @@ import { uploadTables } from './uploadTables'
* @param debug - when set to true, the log will be returned. * @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code). * @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session * @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }. * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job. * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables. * @param variables - an object that represents macro variables.
*/ */
@@ -228,7 +228,7 @@ export async function executeScript(
) )
} }
if (jobStatus === 'failed' || jobStatus === 'error') { if (jobStatus === JobState.Failed || jobStatus === JobState.Error) {
throw new ComputeJobExecutionError(currentJob, log) throw new ComputeJobExecutionError(currentJob, log)
} }

View File

@@ -1,29 +1,88 @@
import { AuthConfig } from '@sasjs/utils/types' import { AuthConfig } from '@sasjs/utils/types'
import { Job, PollOptions } from '../..' import { Job, PollOptions, PollStrategy } from '../..'
import { getTokens } from '../../auth/getTokens' import { getTokens } from '../../auth/getTokens'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { JobStatePollError } from '../../types/errors' import { JobStatePollError } from '../../types/errors'
import { Link, WriteStream } from '../../types' import { Link, WriteStream } from '../../types'
import { delay, isNode } from '../../utils' import { delay, isNode } from '../../utils'
export enum JobState {
Completed = 'completed',
Running = 'running',
Pending = 'pending',
Unavailable = 'unavailable',
NoState = '',
Failed = 'failed',
Error = 'error'
}
/**
* Polls job status using default or provided poll options.
* @param requestClient - the pre-configured HTTP request client.
* @param postedJob - the relative or absolute path to the job.
* @param debug - sets the _debug flag in the job arguments.
* @param authConfig - an access token, refresh token, client and secret for an authorized user.
* @param pollOptions - an object containing maxPollCount, pollInterval, streamLog and logFolderPath. It will override the first default poll options in poll strategy if provided.
* Example pollOptions:
* {
* maxPollCount: 200,
* pollInterval: 300,
* streamLog: true, // optional, equals to false by default.
* pollStrategy?: // optional array of poll options that should be applied after 'maxPollCount' of the provided poll options is reached. If not provided the default (see example below) poll strategy will be used.
* }
* Example pollStrategy (values used from default poll strategy):
* [
* { maxPollCount: 200, pollInterval: 300 }, // approximately ~2 mins (including time to get response (~300ms))
* { maxPollCount: 300, pollInterval: 3000 }, // approximately ~5.5 mins (including time to get response (~300ms))
* { maxPollCount: 500, pollInterval: 30000 }, // approximately ~50.5 mins (including time to get response (~300ms))
* { maxPollCount: 3400, pollInterval: 60000 } // approximately ~3015 mins (~125 hours) (including time to get response (~300ms))
* ]
* @returns - a promise which resolves with a job state
*/
export async function pollJobState( export async function pollJobState(
requestClient: RequestClient, requestClient: RequestClient,
postedJob: Job, postedJob: Job,
debug: boolean, debug: boolean,
authConfig?: AuthConfig, authConfig?: AuthConfig,
pollOptions?: PollOptions pollOptions?: PollOptions
) { ): Promise<JobState> {
const logger = process.logger || console const logger = process.logger || console
let pollInterval = 300 const streamLog = pollOptions?.streamLog || false
let maxPollCount = 1000
const defaultPollOptions: PollOptions = { const defaultPollStrategy: PollStrategy = [
maxPollCount, { maxPollCount: 200, pollInterval: 300 },
pollInterval, { maxPollCount: 300, pollInterval: 3000 },
streamLog: false { maxPollCount: 500, pollInterval: 30000 },
{ maxPollCount: 3400, pollInterval: 60000 }
]
let pollStrategy: PollStrategy
if (pollOptions !== undefined) {
pollStrategy = [pollOptions]
let { pollStrategy: providedPollStrategy } = pollOptions
if (providedPollStrategy !== undefined) {
validatePollStrategies(providedPollStrategy)
// INFO: sort by 'maxPollCount'
providedPollStrategy = providedPollStrategy.sort(
(strategyA: PollOptions, strategyB: PollOptions) =>
strategyA.maxPollCount - strategyB.maxPollCount
)
pollStrategy = [...pollStrategy, ...providedPollStrategy]
} else {
pollStrategy = [...pollStrategy, ...defaultPollStrategy]
}
} else {
pollStrategy = defaultPollStrategy
} }
let defaultPollOptions: PollOptions = pollStrategy.splice(0, 1)[0]
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) } pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
const stateLink = postedJob.links.find((l: any) => l.rel === 'state') const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
@@ -31,10 +90,10 @@ export async function pollJobState(
throw new Error(`Job state link was not found.`) throw new Error(`Job state link was not found.`)
} }
let currentState = await getJobState( let currentState: JobState = await getJobState(
requestClient, requestClient,
postedJob, postedJob,
'', JobState.NoState,
debug, debug,
authConfig authConfig
).catch((err) => { ).catch((err) => {
@@ -42,73 +101,71 @@ export async function pollJobState(
`Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`, `Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`,
err err
) )
return 'unavailable'
return JobState.Unavailable
}) })
let pollCount = 0 let pollCount = 0
if (currentState === 'completed') { if (currentState === JobState.Completed) {
return Promise.resolve(currentState) return Promise.resolve(currentState)
} }
let logFileStream let logFileStream
if (pollOptions.streamLog && isNode()) { if (streamLog && isNode()) {
const { getFileStream } = require('./getFileStream') const { getFileStream } = require('./getFileStream')
logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath) logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
} }
// Poll up to the first 100 times with the specified poll interval
let result = await doPoll( let result = await doPoll(
requestClient, requestClient,
postedJob, postedJob,
currentState, currentState,
debug, debug,
pollCount, pollCount,
pollOptions,
authConfig, authConfig,
{ streamLog,
...pollOptions,
maxPollCount:
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
},
logFileStream logFileStream
) )
currentState = result.state currentState = result.state
pollCount = result.pollCount pollCount = result.pollCount
if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) { if (
!needsRetry(currentState) ||
(pollCount >= pollOptions.maxPollCount && !pollStrategy.length)
) {
return currentState return currentState
} }
// If we get to this point, this is a long-running job that needs longer polling. // INFO: If we get to this point, this is a long-running job that needs longer polling.
// We will resume polling with a bigger interval of 1 minute // We will resume polling with a bigger interval according to the next polling strategy
let longJobPollOptions: PollOptions = { while (pollStrategy.length && needsRetry(currentState)) {
maxPollCount: 24 * 60, defaultPollOptions = pollStrategy.splice(0, 1)[0]
pollInterval: 60000,
streamLog: false if (pollOptions) {
} defaultPollOptions.logFolderPath = pollOptions.logFolderPath
if (pollOptions) { }
longJobPollOptions.streamLog = pollOptions.streamLog
longJobPollOptions.logFolderPath = pollOptions.logFolderPath result = await doPoll(
requestClient,
postedJob,
currentState,
debug,
pollCount,
defaultPollOptions,
authConfig,
streamLog,
logFileStream
)
currentState = result.state
pollCount = result.pollCount
} }
result = await doPoll( if (logFileStream) logFileStream.end()
requestClient,
postedJob,
currentState,
debug,
pollCount,
authConfig,
longJobPollOptions,
logFileStream
)
currentState = result.state
pollCount = result.pollCount
if (logFileStream) {
logFileStream.end()
}
return currentState return currentState
} }
@@ -119,17 +176,13 @@ const getJobState = async (
currentState: string, currentState: string,
debug: boolean, debug: boolean,
authConfig?: AuthConfig authConfig?: AuthConfig
) => { ): Promise<JobState> => {
const stateLink = job.links.find((l: any) => l.rel === 'state') const stateLink = job.links.find((l: any) => l.rel === 'state')!
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
if (needsRetry(currentState)) { if (needsRetry(currentState)) {
let tokens let tokens
if (authConfig) {
tokens = await getTokens(requestClient, authConfig) if (authConfig) tokens = await getTokens(requestClient, authConfig)
}
const { result: jobState } = await requestClient const { result: jobState } = await requestClient
.get<string>( .get<string>(
@@ -143,48 +196,38 @@ const getJobState = async (
throw new JobStatePollError(job.id, err) throw new JobStatePollError(job.id, err)
}) })
return jobState.trim() return jobState.trim() as JobState
} else { } else {
return currentState return currentState as JobState
} }
} }
const needsRetry = (state: string) => const needsRetry = (state: string) =>
state === 'running' || state === JobState.Running ||
state === '' || state === JobState.NoState ||
state === 'pending' || state === JobState.Pending ||
state === 'unavailable' state === JobState.Unavailable
const doPoll = async ( const doPoll = async (
requestClient: RequestClient, requestClient: RequestClient,
postedJob: Job, postedJob: Job,
currentState: string, currentState: JobState,
debug: boolean, debug: boolean,
pollCount: number, pollCount: number,
pollOptions: PollOptions,
authConfig?: AuthConfig, authConfig?: AuthConfig,
pollOptions?: PollOptions, streamLog?: boolean,
logStream?: WriteStream logStream?: WriteStream
): Promise<{ state: string; pollCount: number }> => { ): Promise<{ state: JobState; pollCount: number }> => {
let pollInterval = 300 const { maxPollCount, pollInterval } = pollOptions
let maxPollCount = 1000 const logger = process.logger || console
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')!
let maxErrorCount = 5 let maxErrorCount = 5
let errorCount = 0 let errorCount = 0
let state = currentState let state = currentState
let printedState = '' let printedState = JobState.NoState
let startLogLine = 0 let startLogLine = 0
const logger = process.logger || console
if (pollOptions) {
pollInterval = pollOptions.pollInterval || pollInterval
maxPollCount = pollOptions.maxPollCount || maxPollCount
}
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
while (needsRetry(state) && pollCount <= maxPollCount) { while (needsRetry(state) && pollCount <= maxPollCount) {
state = await getJobState( state = await getJobState(
requestClient, requestClient,
@@ -194,21 +237,24 @@ const doPoll = async (
authConfig authConfig
).catch((err) => { ).catch((err) => {
errorCount++ errorCount++
if (pollCount >= maxPollCount || errorCount >= maxErrorCount) { if (pollCount >= maxPollCount || errorCount >= maxErrorCount) {
throw err throw err
} }
logger.error( logger.error(
`Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`, `Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`,
err err
) )
return 'unavailable'
return JobState.Unavailable
}) })
pollCount++ pollCount++
const jobHref = postedJob.links.find((l: Link) => l.rel === 'self')!.href const jobHref = postedJob.links.find((l: Link) => l.rel === 'self')!.href
if (pollOptions?.streamLog) { if (streamLog) {
const { result: job } = await requestClient.get<Job>( const { result: job } = await requestClient.get<Job>(
jobHref, jobHref,
authConfig?.access_token authConfig?.access_token
@@ -238,12 +284,45 @@ const doPoll = async (
printedState = state printedState = state
} }
if (state != 'unavailable' && errorCount > 0) { if (state !== JobState.Unavailable && errorCount > 0) {
errorCount = 0 errorCount = 0
} }
await delay(pollInterval) if (state !== JobState.Completed) {
await delay(pollInterval)
}
} }
return { state, pollCount } return { state, pollCount }
} }
const validatePollStrategies = (strategy: PollStrategy) => {
const throwError = (message?: string, pollOptions?: PollOptions) => {
throw new Error(
`Poll strategies are not valid.${message ? ` ${message}` : ''}${
pollOptions
? ` Invalid poll strategy: \n${JSON.stringify(pollOptions, null, 2)}`
: ''
}`
)
}
strategy.forEach((pollOptions: PollOptions, i: number) => {
const { maxPollCount, pollInterval } = pollOptions
if (maxPollCount < 1) {
throwError(`'maxPollCount' has to be greater than 0.`, pollOptions)
} else if (i !== 0) {
const previousPollOptions = strategy[i - 1]
if (maxPollCount <= previousPollOptions.maxPollCount) {
throwError(
`'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy.`,
pollOptions
)
}
} else if (pollInterval < 1) {
throwError(`'pollInterval' has to be greater than 0.`, pollOptions)
}
})
}

View File

@@ -9,14 +9,13 @@ import * as formatDataModule from '../../../utils/formatDataForRequest'
import * as fetchLogsModule from '../../../utils/fetchLogByChunks' import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import { PollOptions } from '../../../types' import { PollOptions } from '../../../types'
import { ComputeJobExecutionError, NotFoundError } from '../../../types/errors' import { ComputeJobExecutionError, NotFoundError } from '../../../types/errors'
import { Logger, LogLevel } from '@sasjs/utils' import { Logger, LogLevel } from '@sasjs/utils/logger'
const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)() const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)()
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
const defaultPollOptions: PollOptions = { const defaultPollOptions: PollOptions = {
maxPollCount: 100, maxPollCount: 100,
pollInterval: 500, pollInterval: 500
streamLog: false
} }
describe('executeScript', () => { describe('executeScript', () => {
@@ -452,7 +451,9 @@ describe('executeScript', () => {
it('should throw a ComputeJobExecutionError if the job has failed', async () => { it('should throw a ComputeJobExecutionError if the job has failed', async () => {
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('failed')) .mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Failed)
)
const error: ComputeJobExecutionError = await executeScript( const error: ComputeJobExecutionError = await executeScript(
requestClient, requestClient,
@@ -485,7 +486,9 @@ describe('executeScript', () => {
it('should throw a ComputeJobExecutionError if the job has errored out', async () => { it('should throw a ComputeJobExecutionError if the job has errored out', async () => {
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('error')) .mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Error)
)
const error: ComputeJobExecutionError = await executeScript( const error: ComputeJobExecutionError = await executeScript(
requestClient, requestClient,
@@ -654,7 +657,9 @@ const setupMocks = () => {
.mockImplementation(() => Promise.resolve(mockAuthConfig)) .mockImplementation(() => Promise.resolve(mockAuthConfig))
jest jest
.spyOn(pollJobStateModule, 'pollJobState') .spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('completed')) .mockImplementation(() =>
Promise.resolve(pollJobStateModule.JobState.Completed)
)
jest jest
.spyOn(sessionManager, 'getVariable') .spyOn(sessionManager, 'getVariable')
.mockImplementation(() => .mockImplementation(() =>

View File

@@ -1,4 +1,4 @@
import { Logger, LogLevel } from '@sasjs/utils' import { Logger, LogLevel } from '@sasjs/utils/logger'
import { RequestClient } from '../../../request/RequestClient' import { RequestClient } from '../../../request/RequestClient'
import { mockAuthConfig, mockJob } from './mockResponses' import { mockAuthConfig, mockJob } from './mockResponses'
import { pollJobState } from '../pollJobState' import { pollJobState } from '../pollJobState'
@@ -6,17 +6,18 @@ import * as getTokensModule from '../../../auth/getTokens'
import * as saveLogModule from '../saveLog' import * as saveLogModule from '../saveLog'
import * as getFileStreamModule from '../getFileStream' import * as getFileStreamModule from '../getFileStream'
import * as isNodeModule from '../../../utils/isNode' import * as isNodeModule from '../../../utils/isNode'
import { PollOptions } from '../../../types' import * as delayModule from '../../../utils/delay'
import { PollOptions, PollStrategy } from '../../../types'
import { WriteStream } from 'fs' import { WriteStream } from 'fs'
const baseUrl = 'http://localhost' const baseUrl = 'http://localhost'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)() const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
requestClient['httpClient'].defaults.baseURL = baseUrl requestClient['httpClient'].defaults.baseURL = baseUrl
const defaultPollOptions: PollOptions = { const defaultStreamLog = false
const defaultPollStrategy: PollOptions = {
maxPollCount: 100, maxPollCount: 100,
pollInterval: 500, pollInterval: 500
streamLog: false
} }
describe('pollJobState', () => { describe('pollJobState', () => {
@@ -26,13 +27,10 @@ describe('pollJobState', () => {
}) })
it('should get valid tokens if the authConfig has been provided', async () => { it('should get valid tokens if the authConfig has been provided', async () => {
await pollJobState( await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
requestClient, ...defaultPollStrategy,
mockJob, streamLog: defaultStreamLog
false, })
mockAuthConfig,
defaultPollOptions
)
expect(getTokensModule.getTokens).toHaveBeenCalledWith( expect(getTokensModule.getTokens).toHaveBeenCalledWith(
requestClient, requestClient,
@@ -46,7 +44,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect(getTokensModule.getTokens).not.toHaveBeenCalled() expect(getTokensModule.getTokens).not.toHaveBeenCalled()
@@ -58,7 +56,7 @@ describe('pollJobState', () => {
{ ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') }, { ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') },
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
).catch((e: any) => e) ).catch((e: any) => e)
expect((error as Error).message).toContain('Job state link was not found.') expect((error as Error).message).toContain('Job state link was not found.')
@@ -72,7 +70,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
defaultPollOptions defaultPollStrategy
) )
expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3) expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3)
@@ -83,7 +81,7 @@ describe('pollJobState', () => {
const { saveLog } = require('../saveLog') const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollStrategy,
streamLog: true streamLog: true
}) })
@@ -96,7 +94,7 @@ describe('pollJobState', () => {
const { saveLog } = require('../saveLog') const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollStrategy,
streamLog: true streamLog: true
}) })
@@ -111,7 +109,7 @@ describe('pollJobState', () => {
const { getFileStream } = require('../getFileStream') const { getFileStream } = require('../getFileStream')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, { await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions, ...defaultPollStrategy,
streamLog: true streamLog: true
}) })
@@ -127,7 +125,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
defaultPollOptions defaultPollStrategy
) )
expect(saveLogModule.saveLog).not.toHaveBeenCalled() expect(saveLogModule.saveLog).not.toHaveBeenCalled()
@@ -136,15 +134,18 @@ describe('pollJobState', () => {
it('should return the current status when the max poll count is reached', async () => { it('should return the current status when the max poll count is reached', async () => {
mockRunningPoll() mockRunningPoll()
const pollOptions: PollOptions = {
...defaultPollStrategy,
maxPollCount: 1,
pollStrategy: []
}
const state = await pollJobState( const state = await pollJobState(
requestClient, requestClient,
mockJob, mockJob,
false, false,
mockAuthConfig, mockAuthConfig,
{ pollOptions
...defaultPollOptions,
maxPollCount: 1
}
) )
expect(state).toEqual('running') expect(state).toEqual('running')
@@ -159,7 +160,7 @@ describe('pollJobState', () => {
false, false,
mockAuthConfig, mockAuthConfig,
{ {
...defaultPollOptions, ...defaultPollStrategy,
maxPollCount: 200, maxPollCount: 200,
pollInterval: 10 pollInterval: 10
} }
@@ -176,7 +177,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect(requestClient.get).toHaveBeenCalledTimes(2) expect(requestClient.get).toHaveBeenCalledTimes(2)
@@ -192,7 +193,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
true, true,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect((process as any).logger.info).toHaveBeenCalledTimes(4) expect((process as any).logger.info).toHaveBeenCalledTimes(4)
@@ -222,7 +223,7 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
) )
expect(requestClient.get).toHaveBeenCalledTimes(2) expect(requestClient.get).toHaveBeenCalledTimes(2)
@@ -237,13 +238,119 @@ describe('pollJobState', () => {
mockJob, mockJob,
false, false,
undefined, undefined,
defaultPollOptions defaultPollStrategy
).catch((e: any) => e) ).catch((e: any) => e)
expect(error.message).toEqual( expect(error.message).toEqual(
'Error while polling job state for job j0b: Status Error' 'Error while polling job state for job j0b: Status Error'
) )
}) })
it('should change poll strategies', async () => {
mockSimplePoll(6)
const delays: number[] = []
jest.spyOn(delayModule, 'delay').mockImplementation((ms: number) => {
delays.push(ms)
return Promise.resolve()
})
const pollIntervals = [3, 4, 5, 6]
const pollStrategy = [
{ maxPollCount: 2, pollInterval: pollIntervals[1] },
{ maxPollCount: 3, pollInterval: pollIntervals[2] },
{ maxPollCount: 4, pollInterval: pollIntervals[3] }
]
const pollOptions: PollOptions = {
maxPollCount: 1,
pollInterval: pollIntervals[0],
pollStrategy: pollStrategy
}
await pollJobState(requestClient, mockJob, false, undefined, pollOptions)
expect(delays).toEqual([pollIntervals[0], ...pollIntervals])
})
it('should throw an error if not valid poll strategies provided', async () => {
// INFO: 'maxPollCount' has to be > 0
let invalidPollStrategy = {
maxPollCount: 0,
pollInterval: 3
}
let pollStrategy: PollStrategy = [invalidPollStrategy]
let expectedError = new Error(
`Poll strategies are not valid. 'maxPollCount' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
// INFO: 'maxPollCount' has to be > than 'maxPollCount' of the previous strategy
const validPollStrategy = {
maxPollCount: 5,
pollInterval: 2
}
invalidPollStrategy = {
maxPollCount: validPollStrategy.maxPollCount,
pollInterval: 3
}
pollStrategy = [validPollStrategy, invalidPollStrategy]
expectedError = new Error(
`Poll strategies are not valid. 'maxPollCount' has to be greater than 'maxPollCount' in previous poll strategy. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
// INFO: invalid 'pollInterval'
invalidPollStrategy = {
maxPollCount: 1,
pollInterval: 0
}
pollStrategy = [invalidPollStrategy]
expectedError = new Error(
`Poll strategies are not valid. 'pollInterval' has to be greater than 0. Invalid poll strategy: \n${JSON.stringify(
invalidPollStrategy,
null,
2
)}`
)
await expect(
pollJobState(requestClient, mockJob, false, undefined, {
...defaultPollStrategy,
pollStrategy: pollStrategy
})
).rejects.toThrow(expectedError)
})
}) })
const setupMocks = () => { const setupMocks = () => {
@@ -273,11 +380,14 @@ const setupMocks = () => {
const mockSimplePoll = (runningCount = 2) => { const mockSimplePoll = (runningCount = 2) => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: result:
count === 0 count === 0
@@ -293,11 +403,14 @@ const mockSimplePoll = (runningCount = 2) => {
const mockRunningPoll = () => { const mockRunningPoll = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: count === 0 ? 'pending' : 'running', result: count === 0 ? 'pending' : 'running',
etag: '', etag: '',
@@ -308,11 +421,14 @@ const mockRunningPoll = () => {
const mockLongPoll = () => { const mockLongPoll = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.resolve({ return Promise.resolve({
result: count <= 102 ? 'running' : 'completed', result: count <= 102 ? 'running' : 'completed',
etag: '', etag: '',
@@ -323,14 +439,18 @@ const mockLongPoll = () => {
const mockPollWithSingleError = () => { const mockPollWithSingleError = () => {
let count = 0 let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => { jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++ count++
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
if (count === 1) { if (count === 1) {
return Promise.reject('Status Error') return Promise.reject('Status Error')
} }
return Promise.resolve({ return Promise.resolve({
result: count === 0 ? 'pending' : 'completed', result: count === 0 ? 'pending' : 'completed',
etag: '', etag: '',
@@ -344,6 +464,7 @@ const mockErroredPoll = () => {
if (url.includes('job')) { if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 }) return Promise.resolve({ result: mockJob, etag: '', status: 200 })
} }
return Promise.reject('Status Error') return Promise.reject('Status Error')
}) })
} }

View File

@@ -1,4 +1,4 @@
import { Logger, LogLevel } from '@sasjs/utils' import { Logger, LogLevel } from '@sasjs/utils/logger'
import { RequestClient } from '../../../request/RequestClient' import { RequestClient } from '../../../request/RequestClient'
import * as fetchLogsModule from '../../../utils/fetchLogByChunks' import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import * as writeStreamModule from '../writeStream' import * as writeStreamModule from '../writeStream'

View File

@@ -5,7 +5,7 @@ import {
fileExists, fileExists,
readFile, readFile,
deleteFile deleteFile
} from '@sasjs/utils' } from '@sasjs/utils/file'
describe('writeStream', () => { describe('writeStream', () => {
const filename = 'test.txt' const filename = 'test.txt'

View File

@@ -1,7 +1,7 @@
import { prefixMessage } from '@sasjs/utils/error' import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix' import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
import { ServerType } from '@sasjs/utils' import { ServerType } from '@sasjs/utils/types'
/** /**
* Exchanges the auth code for an access token for the given client. * Exchanges the auth code for an access token for the given client.

View File

@@ -4,7 +4,6 @@ import { RequestClient } from '../request/RequestClient'
import { CertificateError } from '../types/errors' import { CertificateError } from '../types/errors'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix' import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
// TODO: update func docs
/** /**
* Exchange the auth code for access / refresh tokens for the given client / secret pair. * Exchange the auth code for access / refresh tokens for the given client / secret pair.
* @param requestClient - the pre-configured HTTP request client. * @param requestClient - the pre-configured HTTP request client.
@@ -31,10 +30,11 @@ export async function getAccessTokenForViya(
Authorization: 'Basic ' + token, Authorization: 'Basic ' + token,
Accept: 'application/json' Accept: 'application/json'
} }
const dataJson = {
const dataJson = new URLSearchParams({
grant_type: 'authorization_code', grant_type: 'authorization_code',
code: authCode code: authCode
} })
const data = new URLSearchParams(dataJson) const data = new URLSearchParams(dataJson)
const authResponse = await requestClient const authResponse = await requestClient

View File

@@ -1,7 +1,7 @@
import { prefixMessage } from '@sasjs/utils/error' import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix' import { getTokenRequestErrorPrefix } from './getTokenRequestErrorPrefix'
import { ServerType } from '@sasjs/utils' import { ServerType } from '@sasjs/utils/types'
/** /**
* Exchanges the refresh token for an access token for the given client. * Exchanges the refresh token for an access token for the given client.

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils' import { AuthConfig } from '@sasjs/utils/types'
import { generateToken, mockSasjsAuthResponse } from './mockResponses' import { generateToken, mockSasjsAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { getAccessTokenForSasjs } from '../getAccessTokenForSasjs' import { getAccessTokenForSasjs } from '../getAccessTokenForSasjs'

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils' import { AuthConfig } from '@sasjs/utils/types'
import * as NodeFormData from 'form-data' import * as NodeFormData from 'form-data'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'

View File

@@ -1,4 +1,4 @@
import { AuthConfig } from '@sasjs/utils' import { AuthConfig } from '@sasjs/utils/types'
import * as refreshTokensModule from '../refreshTokensForViya' import * as refreshTokensModule from '../refreshTokensForViya'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { getTokens } from '../getTokens' import { getTokens } from '../getTokens'

View File

@@ -1,4 +1,4 @@
import { ServerType } from '@sasjs/utils' import { ServerType } from '@sasjs/utils/types'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { refreshTokensForSasjs } from '../refreshTokensForSasjs' import { refreshTokensForSasjs } from '../refreshTokensForSasjs'

View File

@@ -1,4 +1,4 @@
import { AuthConfig, ServerType } from '@sasjs/utils' import { AuthConfig, ServerType } from '@sasjs/utils/types'
import * as NodeFormData from 'form-data' import * as NodeFormData from 'form-data'
import { generateToken, mockAuthResponse } from './mockResponses' import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'

View File

@@ -1,5 +1,6 @@
import * as NodeFormData from 'form-data' import * as NodeFormData from 'form-data'
import { convertToCSV } from '../utils/convertToCsv' import { convertToCSV } from '../utils/convertToCsv'
import { isNode } from '../utils'
/** /**
* One of the approaches SASjs takes to send tables-formatted JSON (see README) * One of the approaches SASjs takes to send tables-formatted JSON (see README)
@@ -26,12 +27,15 @@ export const generateFileUploadForm = (
) )
} }
if (typeof FormData === 'undefined' && formData instanceof NodeFormData) { // INFO: unfortunately it is not possible to check if formData is instance of NodeFormData or FormData because it will return true for both
formData.append(name, csv, { if (isNode()) {
// INFO: environment is Node and formData is instance of NodeFormData
;(formData as NodeFormData).append(name, csv, {
filename: `${name}.csv`, filename: `${name}.csv`,
contentType: 'application/csv' contentType: 'application/csv'
}) })
} else { } else {
// INFO: environment is Browser and formData is instance of FormData
const file = new Blob([csv], { const file = new Blob([csv], {
type: 'application/csv' type: 'application/csv'
}) })

View File

@@ -1,4 +1,7 @@
import { generateFileUploadForm } from '../generateFileUploadForm' import { generateFileUploadForm } from '../generateFileUploadForm'
import { convertToCSV } from '../../utils/convertToCsv'
import * as NodeFormData from 'form-data'
import * as isNodeModule from '../../utils/isNode'
describe('generateFileUploadForm', () => { describe('generateFileUploadForm', () => {
beforeAll(() => { beforeAll(() => {
@@ -11,44 +14,94 @@ describe('generateFileUploadForm', () => {
;(global as any).Blob = BlobMock ;(global as any).Blob = BlobMock
}) })
it('should generate file upload form from data', () => { describe('browser', () => {
const formData = new FormData() afterAll(() => {
const testTable = 'sometable' jest.restoreAllMocks()
const testTableWithNullVars: { [key: string]: any } = { })
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter((key: string) =>
Array.isArray(testTableWithNullVars[key])
)[0]
jest.spyOn(formData, 'append').mockImplementation(() => {}) it('should generate file upload form from data', () => {
const formData = new FormData()
const testTable = 'sometable'
const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter(
(key: string) => Array.isArray(testTableWithNullVars[key])
)[0]
generateFileUploadForm(formData, testTableWithNullVars) jest.spyOn(formData, 'append').mockImplementation(() => {})
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
expect(formData.append).toHaveBeenCalledOnce() generateFileUploadForm(formData, testTableWithNullVars)
expect(formData.append).toHaveBeenCalledWith(
tableName, expect(formData.append).toHaveBeenCalledOnce()
{}, expect(formData.append).toHaveBeenCalledWith(
`${tableName}.csv` tableName,
) {},
`${tableName}.csv`
)
})
it('should throw an error if too large string was provided', () => {
const formData = new FormData()
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
)
})
}) })
it('should throw an error if too large string was provided', () => { describe('node', () => {
const formData = new FormData() it('should generate file upload form from data', () => {
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] } const formData = new NodeFormData()
const testTable = 'sometable'
const testTableWithNullVars: { [key: string]: any } = {
[testTable]: [
{ var1: 'string', var2: 232, nullvar: 'A' },
{ var1: 'string', var2: 232, nullvar: 'B' },
{ var1: 'string', var2: 232, nullvar: '_' },
{ var1: 'string', var2: 232, nullvar: 0 },
{ var1: 'string', var2: 232, nullvar: 'z' },
{ var1: 'string', var2: 232, nullvar: null }
],
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
}
const tableName = Object.keys(testTableWithNullVars).filter(
(key: string) => Array.isArray(testTableWithNullVars[key])
)[0]
const csv = convertToCSV(testTableWithNullVars, tableName)
expect(() => generateFileUploadForm(formData, data)).toThrow( jest.spyOn(formData, 'append').mockImplementation(() => {})
new Error(
'The max length of a string value in SASjs is 32765 characters.' generateFileUploadForm(formData, testTableWithNullVars)
expect(formData.append).toHaveBeenCalledOnce()
expect(formData.append).toHaveBeenCalledWith(tableName, csv, {
contentType: 'application/csv',
filename: `${tableName}.csv`
})
})
it('should throw an error if too large string was provided', () => {
const formData = new NodeFormData()
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
expect(() => generateFileUploadForm(formData, data)).toThrow(
new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
) )
) })
}) })
}) })

View File

@@ -1,8 +1,7 @@
import { import {
getValidJson, getValidJson,
parseSasViyaDebugResponse, parseSasViyaDebugResponse,
parseWeboutResponse, parseWeboutResponse
SASJS_LOGS_SEPARATOR
} from '../utils' } from '../utils'
import { UploadFile } from '../types/UploadFile' import { UploadFile } from '../types/UploadFile'
import { import {
@@ -93,15 +92,24 @@ export class FileUploader extends BaseJobExecutor {
this.requestClient, this.requestClient,
config.serverUrl config.serverUrl
) )
break break
case ServerType.Sas9: case ServerType.Sas9:
jsonResponse = jsonResponse =
typeof res.result === 'string' typeof res.result === 'string'
? parseWeboutResponse(res.result, uploadUrl) ? parseWeboutResponse(res.result, uploadUrl)
: res.result : res.result
break
case ServerType.Sasjs:
jsonResponse =
typeof res.result === 'string'
? getValidJson(res.result)
: res.result
break break
} }
} else if (this.serverType !== ServerType.Sasjs) { } else {
jsonResponse = jsonResponse =
typeof res.result === 'string' typeof res.result === 'string'
? getValidJson(res.result) ? getValidJson(res.result)

View File

@@ -10,8 +10,8 @@ import {
LoginRequiredError LoginRequiredError
} from '../types/errors' } from '../types/errors'
import { generateFileUploadForm } from '../file/generateFileUploadForm' import { generateFileUploadForm } from '../file/generateFileUploadForm'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getFormData } from '../utils'
import { import {
isRelativePath, isRelativePath,
@@ -53,8 +53,7 @@ export class SasjsJobExecutor extends BaseJobExecutor {
* Use the available form data object (FormData in Browser, NodeFormData in * Use the available form data object (FormData in Browser, NodeFormData in
* Node) * Node)
*/ */
let formData = let formData = getFormData()
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
if (data) { if (data) {
// file upload approach // file upload approach
@@ -93,8 +92,10 @@ export class SasjsJobExecutor extends BaseJobExecutor {
) )
} }
const { result } = res.result const { result } = res
if (result && result.trim()) res.result = getValidJson(result)
if (result && typeof result === 'string' && result.trim())
res.result = getValidJson(result)
this.requestClient!.appendRequest(res, sasJob, config.debug) this.requestClient!.appendRequest(res, sasJob, config.debug)

View File

@@ -16,10 +16,11 @@ import { SASViyaApiClient } from '../SASViyaApiClient'
import { import {
isRelativePath, isRelativePath,
parseSasViyaDebugResponse, parseSasViyaDebugResponse,
appendExtraResponseAttributes appendExtraResponseAttributes,
parseWeboutResponse,
getFormData
} from '../utils' } from '../utils'
import { BaseJobExecutor } from './JobExecutor' import { BaseJobExecutor } from './JobExecutor'
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
export interface WaitingRequstPromise { export interface WaitingRequstPromise {
promise: Promise<any> | null promise: Promise<any> | null
@@ -112,8 +113,7 @@ export class WebJobExecutor extends BaseJobExecutor {
* Use the available form data object (FormData in Browser, NodeFormData in * Use the available form data object (FormData in Browser, NodeFormData in
* Node) * Node)
*/ */
let formData = let formData = getFormData()
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
if (data) { if (data) {
const stringifiedData = JSON.stringify(data) const stringifiedData = JSON.stringify(data)

View File

@@ -233,7 +233,8 @@ export default class SASjs {
this.requestClient = new RequestClient( this.requestClient = new RequestClient(
this.sasjsConfig.serverUrl, this.sasjsConfig.serverUrl,
this.sasjsConfig.httpsAgentOptions, this.sasjsConfig.httpsAgentOptions,
this.sasjsConfig.requestHistoryLimit this.sasjsConfig.requestHistoryLimit,
this.sasjsConfig.verbose
) )
} else { } else {
this.requestClient.setConfig( this.requestClient.setConfig(

View File

@@ -11,7 +11,6 @@ import {
import { RequestClient } from '../../request/RequestClient' import { RequestClient } from '../../request/RequestClient'
import { import {
isRelativePath, isRelativePath,
parseSasViyaDebugResponse,
appendExtraResponseAttributes, appendExtraResponseAttributes,
convertToCSV convertToCSV
} from '../../utils' } from '../../utils'

View File

@@ -20,6 +20,7 @@ import {
createAxiosInstance createAxiosInstance
} from '../utils' } from '../utils'
import { InvalidSASjsCsrfError } from '../types/errors/InvalidSASjsCsrfError' import { InvalidSASjsCsrfError } from '../types/errors/InvalidSASjsCsrfError'
import { inspect } from 'util'
export interface HttpClient { export interface HttpClient {
get<T>( get<T>(
@@ -59,6 +60,7 @@ export interface HttpClient {
export class RequestClient implements HttpClient { export class RequestClient implements HttpClient {
private requests: SASjsRequest[] = [] private requests: SASjsRequest[] = []
private requestsLimit: number = 10 private requestsLimit: number = 10
private httpInterceptor?: number
protected csrfToken: CsrfToken = { headerName: '', value: '' } protected csrfToken: CsrfToken = { headerName: '', value: '' }
protected fileUploadCsrfToken: CsrfToken | undefined protected fileUploadCsrfToken: CsrfToken | undefined
@@ -67,10 +69,14 @@ export class RequestClient implements HttpClient {
constructor( constructor(
protected baseUrl: string, protected baseUrl: string,
httpsAgentOptions?: https.AgentOptions, httpsAgentOptions?: https.AgentOptions,
requestsLimit?: number requestsLimit?: number,
verboseMode?: boolean
) { ) {
this.createHttpClient(baseUrl, httpsAgentOptions) this.createHttpClient(baseUrl, httpsAgentOptions)
if (requestsLimit) this.requestsLimit = requestsLimit if (requestsLimit) this.requestsLimit = requestsLimit
if (verboseMode) this.enableVerboseMode()
} }
public setConfig(baseUrl: string, httpsAgentOptions?: https.AgentOptions) { public setConfig(baseUrl: string, httpsAgentOptions?: https.AgentOptions) {
@@ -180,6 +186,7 @@ export class RequestClient implements HttpClient {
responseType: contentType === 'text/plain' ? 'text' : 'json', responseType: contentType === 'text/plain' ? 'text' : 'json',
withCredentials: true withCredentials: true
} }
if (contentType === 'text/plain') { if (contentType === 'text/plain') {
requestConfig.transformResponse = undefined requestConfig.transformResponse = undefined
} }
@@ -389,6 +396,105 @@ export class RequestClient implements HttpClient {
}) })
} }
/**
* Adds colors to the string.
* @param str - string to be prettified.
* @returns - prettified string
*/
private prettifyString = (str: any) => inspect(str, { colors: true })
/**
* Formats HTTP request/response body.
* @param body - HTTP request/response body.
* @returns - formatted string.
*/
private parseInterceptedBody = (body: any) => {
if (!body) return ''
let parsedBody
// Tries to parse body into JSON object.
if (typeof body === 'string') {
try {
parsedBody = JSON.parse(body)
} catch (error) {
parsedBody = body
}
} else {
parsedBody = body
}
const bodyLines = this.prettifyString(parsedBody).split('\n')
// Leaves first 50 lines
if (bodyLines.length > 51) {
bodyLines.splice(50)
bodyLines.push('...')
}
return bodyLines.join('\n')
}
private defaultInterceptionCallBack = (response: AxiosResponse) => {
const { status, config, request, data: resData } = response
const { data: reqData } = config
const { _header: reqHeaders, res } = request
const { rawHeaders } = res
// Converts an array of strings into a single string with the following format:
// <headerName>: <headerValue>
const resHeaders = rawHeaders.reduce(
(acc: string, value: string, i: number) => {
if (i % 2 === 0) {
acc += `${i === 0 ? '' : '\n'}${value}`
} else {
acc += `: ${value}`
}
return acc
},
''
)
const parsedResBody = this.parseInterceptedBody(resData)
// HTTP response summary.
process.logger?.info(`HTTP Request (first 50 lines):
${reqHeaders}${this.parseInterceptedBody(reqData)}
HTTP Response Code: ${this.prettifyString(status)}
HTTP Response (first 50 lines):
${resHeaders}${parsedResBody ? `\n\n${parsedResBody}` : ''}
`)
return response
}
/**
* Turns on verbose mode to log every HTTP response.
* @param successCallBack - function that should be triggered on every HTTP response with the status 2**.
* @param errorCallBack - function that should be triggered on every HTTP response with the status different from 2**.
*/
public enableVerboseMode = (
successCallBack = this.defaultInterceptionCallBack,
errorCallBack = this.defaultInterceptionCallBack
) => {
this.httpInterceptor = this.httpClient.interceptors.response.use(
successCallBack,
errorCallBack
)
}
/**
* Turns off verbose mode to log every HTTP response.
*/
public disableVerboseMode = () => {
if (this.httpInterceptor) {
this.httpClient.interceptors.response.eject(this.httpInterceptor)
}
}
protected getHeaders = ( protected getHeaders = (
accessToken: string | undefined, accessToken: string | undefined,
contentType: string contentType: string

View File

@@ -1,8 +1,7 @@
import { RequestClient } from './RequestClient' import { RequestClient } from './RequestClient'
import { AxiosResponse } from 'axios' import { AxiosResponse } from 'axios'
import { SASJS_LOGS_SEPARATOR } from '../utils'
interface SasjsParsedResponse<T> { export interface SasjsParsedResponse<T> {
result: T result: T
log: string log: string
etag: string etag: string
@@ -45,13 +44,30 @@ export class SasjsRequestClient extends RequestClient {
} }
} catch { } catch {
if (response.data.includes(SASJS_LOGS_SEPARATOR)) { if (response.data.includes(SASJS_LOGS_SEPARATOR)) {
const splittedResponse = response.data.split(SASJS_LOGS_SEPARATOR) const { data } = response
const splittedResponse: string[] = data.split(SASJS_LOGS_SEPARATOR)
webout = splittedResponse[0] webout = splittedResponse.splice(0, 1)[0]
if (webout !== undefined) parsedResponse = webout if (webout !== undefined) parsedResponse = webout
log = splittedResponse[1] // log can contain nested logs
printOutput = splittedResponse[2] const logs = splittedResponse.splice(0, splittedResponse.length - 1)
// tests if string ends with SASJS_LOGS_SEPARATOR
const endingWithLogSepRegExp = new RegExp(`${SASJS_LOGS_SEPARATOR}$`)
// at this point splittedResponse can contain only one item
const lastChunk = splittedResponse[0]
if (lastChunk) {
// if the last chunk doesn't end with SASJS_LOGS_SEPARATOR, then it is a printOutput
// else the last chunk is part of the log and has to be joined
if (!endingWithLogSepRegExp.test(data)) printOutput = lastChunk
else if (logs.length > 1) logs.push(lastChunk)
}
// join logs into single log with SASJS_LOGS_SEPARATOR
log = logs.join(SASJS_LOGS_SEPARATOR)
} else { } else {
parsedResponse = response.data parsedResponse = response.data
} }
@@ -59,7 +75,7 @@ export class SasjsRequestClient extends RequestClient {
const returnResult: SasjsParsedResponse<T> = { const returnResult: SasjsParsedResponse<T> = {
result: parsedResponse as T, result: parsedResponse as T,
log, log: log || '',
etag, etag,
status: response.status status: response.status
} }
@@ -69,3 +85,6 @@ export class SasjsRequestClient extends RequestClient {
return returnResult return returnResult
} }
} }
export const SASJS_LOGS_SEPARATOR =
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'

View File

@@ -0,0 +1,172 @@
import {
SASJS_LOGS_SEPARATOR,
SasjsRequestClient,
SasjsParsedResponse
} from '../SasjsRequestClient'
import { AxiosResponse } from 'axios'
describe('SasjsRequestClient', () => {
const requestClient = new SasjsRequestClient('')
const etag = 'etag'
const status = 200
const webout = `hello`
const log = `1 The SAS System Tuesday, 25 July 2023 12:51:00
PROC MIGRATE will preserve current SAS file attributes and is
recommended for converting all your SAS libraries from any
SAS 8 release to SAS 9. For details and examples, please see
http://support.sas.com/rnd/migration/index.html
NOTE: SAS initialization used:
real time 0.01 seconds
cpu time 0.02 seconds
`
const printOutput = 'printOutPut'
describe('parseResponse', () => {})
it('should parse response with 1 log', () => {
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${log}
`,
etag,
status
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with 1 log and printOutput', () => {
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
${printOutput}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${log}
`,
etag,
status,
printOutput: `
${printOutput}`
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with nested logs', () => {
const logWithNestedLog = `root log start
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
root log end`
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${logWithNestedLog}
${SASJS_LOGS_SEPARATOR}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${logWithNestedLog}
`,
etag,
status
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
it('should parse response with nested logs and printOutput', () => {
const logWithNestedLog = `root log start
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
log with indentation
${SASJS_LOGS_SEPARATOR}
${log}
${SASJS_LOGS_SEPARATOR}
some SAS code containing ${SASJS_LOGS_SEPARATOR}
root log end`
const response: AxiosResponse<any> = {
data: `${webout}
${SASJS_LOGS_SEPARATOR}
${logWithNestedLog}
${SASJS_LOGS_SEPARATOR}
${printOutput}`,
status,
statusText: 'ok',
headers: { etag },
config: {}
}
const expectedParsedResponse: SasjsParsedResponse<string> = {
result: `${webout}
`,
log: `
${logWithNestedLog}
`,
etag,
status,
printOutput: `
${printOutput}`
}
expect(requestClient['parseResponse'](response)).toEqual(
expectedParsedResponse
)
})
})
describe('SASJS_LOGS_SEPARATOR', () => {
it('SASJS_LOGS_SEPARATOR should be hardcoded', () => {
expect(SASJS_LOGS_SEPARATOR).toEqual(
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'
)
})
})

View File

@@ -2,7 +2,7 @@ import * as pem from 'pem'
import * as http from 'http' import * as http from 'http'
import * as https from 'https' import * as https from 'https'
import { app, mockedAuthResponse } from './SAS_server_app' import { app, mockedAuthResponse } from './SAS_server_app'
import { ServerType } from '@sasjs/utils' import { ServerType } from '@sasjs/utils/types'
import SASjs from '../SASjs' import SASjs from '../SASjs'
import * as axiosModules from '../utils/createAxiosInstance' import * as axiosModules from '../utils/createAxiosInstance'
import { import {
@@ -13,6 +13,8 @@ import {
} from '../types/errors' } from '../types/errors'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import { getTokenRequestErrorPrefixResponse } from '../auth/getTokenRequestErrorPrefix' import { getTokenRequestErrorPrefixResponse } from '../auth/getTokenRequestErrorPrefix'
import { AxiosResponse } from 'axios'
import { Logger, LogLevel } from '@sasjs/utils/logger'
const axiosActual = jest.requireActual('axios') const axiosActual = jest.requireActual('axios')
@@ -25,16 +27,6 @@ jest
const PORT = 8000 const PORT = 8000
const SERVER_URL = `https://localhost:${PORT}/` const SERVER_URL = `https://localhost:${PORT}/`
const ERROR_MESSAGES = {
selfSigned: 'self signed certificate',
CCA: 'unable to verify the first certificate'
}
const incorrectAuthCodeErr = {
error: 'unauthorized',
error_description: 'Bad credentials'
}
describe('RequestClient', () => { describe('RequestClient', () => {
let server: http.Server let server: http.Server
@@ -80,6 +72,187 @@ describe('RequestClient', () => {
expect(rejectionErrorMessage).toEqual(expectedError.message) expect(rejectionErrorMessage).toEqual(expectedError.message)
}) })
describe('defaultInterceptionCallBack', () => {
beforeAll(() => {
;(process as any).logger = new Logger(LogLevel.Off)
})
it('should log parsed response', () => {
jest.spyOn((process as any).logger, 'info')
const status = 200
const reqData = `{
name: 'test_job',
description: 'Powered by SASjs',
code: ['test_code'],
variables: {
SYS_JES_JOB_URI: '',
_program: '/Public/sasjs/jobs/jobs/test_job'
},
arguments: {
_contextName: 'SAS Job Execution compute context',
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: true,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: true
}
}`
const resData = {
id: 'id_string',
name: 'name_string',
uri: 'uri_string',
createdBy: 'createdBy_string',
code: 'TEST CODE',
links: [
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'state',
href: 'state_href_string',
uri: 'uri_string',
type: 'type_string'
},
{
method: 'method_string',
rel: 'self',
href: 'self_href_string',
uri: 'uri_string',
type: 'type_string'
}
],
results: { '_webout.json': '_webout.json_string' },
logStatistics: {
lineCount: 1,
modifiedTimeStamp: 'modifiedTimeStamp_string'
}
}
const reqHeaders = `POST https://sas.server.com/compute/sessions/session_id/jobs HTTP/1.1
Accept: application/json
Content-Type: application/json
User-Agent: axios/0.27.2
Content-Length: 334
host: sas.server.io
Connection: close
`
const resHeaders = ['content-type', 'application/json']
const mockedResponse: AxiosResponse = {
data: resData,
status,
statusText: '',
headers: {},
config: { data: reqData },
request: { _header: reqHeaders, res: { rawHeaders: resHeaders } }
}
const requestClient = new RequestClient('')
requestClient['defaultInterceptionCallBack'](mockedResponse)
const expectedLog = `HTTP Request (first 50 lines):
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
HTTP Response Code: ${requestClient['prettifyString'](status)}
HTTP Response (first 50 lines):
${resHeaders[0]}: ${resHeaders[1]}${
requestClient['parseInterceptedBody'](resData)
? `\n\n${requestClient['parseInterceptedBody'](resData)}`
: ''
}
`
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
})
})
describe('enableVerboseMode', () => {
it('should add defaultInterceptionCallBack functions to response interceptors', () => {
const requestClient = new RequestClient('')
const interceptorSpy = jest.spyOn(
requestClient['httpClient'].interceptors.response,
'use'
)
requestClient.enableVerboseMode()
expect(interceptorSpy).toHaveBeenCalledWith(
requestClient['defaultInterceptionCallBack'],
requestClient['defaultInterceptionCallBack']
)
})
it('should add callback functions to response interceptors', () => {
const requestClient = new RequestClient('')
const interceptorSpy = jest.spyOn(
requestClient['httpClient'].interceptors.response,
'use'
)
const successCallback = (response: AxiosResponse) => {
console.log('success')
return response
}
const failureCallback = (response: AxiosResponse) => {
console.log('failure')
return response
}
requestClient.enableVerboseMode(successCallback, failureCallback)
expect(interceptorSpy).toHaveBeenCalledWith(
successCallback,
failureCallback
)
})
})
describe('disableVerboseMode', () => {
it('should eject interceptor', () => {
const requestClient = new RequestClient('')
const interceptorSpy = jest.spyOn(
requestClient['httpClient'].interceptors.response,
'eject'
)
const interceptorId = 100
requestClient['httpInterceptor'] = interceptorId
requestClient.disableVerboseMode()
expect(interceptorSpy).toHaveBeenCalledWith(interceptorId)
})
})
describe('handleError', () => { describe('handleError', () => {
const requestClient = new RequestClient('https://localhost:8009') const requestClient = new RequestClient('https://localhost:8009')
const randomError = 'some error' const randomError = 'some error'
@@ -213,7 +386,7 @@ describe('RequestClient - Self Signed Server', () => {
serverType: ServerType.SasViya serverType: ServerType.SasViya
}) })
const expectedError = 'self signed certificate' const expectedError = 'self-signed certificate'
const rejectionErrorMessage = await adapterWithoutCertificate const rejectionErrorMessage = await adapterWithoutCertificate
.getAccessToken('clientId', 'clientSecret', 'authCode') .getAccessToken('clientId', 'clientSecret', 'authCode')

View File

@@ -2,7 +2,7 @@ import { SessionManager } from '../SessionManager'
import { RequestClient } from '../request/RequestClient' import { RequestClient } from '../request/RequestClient'
import * as dotenv from 'dotenv' import * as dotenv from 'dotenv'
import axios from 'axios' import axios from 'axios'
import { Logger, LogLevel } from '@sasjs/utils' import { Logger, LogLevel } from '@sasjs/utils/logger'
import { Session, Context } from '../types' import { Session, Context } from '../types'
jest.mock('axios') jest.mock('axios')

View File

@@ -1,6 +1,9 @@
export interface PollOptions { export interface PollOptions {
maxPollCount: number maxPollCount: number
pollInterval: number pollInterval: number // milliseconds
streamLog: boolean pollStrategy?: PollStrategy
streamLog?: boolean
logFolderPath?: string logFolderPath?: string
} }
export type PollStrategy = PollOptions[]

View File

@@ -45,6 +45,10 @@ export class SASjsConfig {
* Set to `true` to enable additional debugging. * Set to `true` to enable additional debugging.
*/ */
debug: boolean = true debug: boolean = true
/**
* Set to `true` to enable verbose mode that will log a summary of every HTTP response.
*/
verbose?: boolean = true
/** /**
* The name of the compute context to use when calling the Viya services directly. * The name of the compute context to use when calling the Viya services directly.
* Example value: 'SAS Job Execution compute context' * Example value: 'SAS Job Execution compute context'

View File

@@ -7,7 +7,7 @@ describe('RootFolderNotFoundError', () => {
const error = new RootFolderNotFoundError( const error = new RootFolderNotFoundError(
'/myProject', '/myProject',
'https://analytium.co.uk', 'https://sas.4gl.io',
token token
) )
@@ -19,7 +19,7 @@ describe('RootFolderNotFoundError', () => {
it('when access token is not provided, error message should not contain scopes', () => { it('when access token is not provided, error message should not contain scopes', () => {
const error = new RootFolderNotFoundError( const error = new RootFolderNotFoundError(
'/myProject', '/myProject',
'https://analytium.co.uk' 'https://sas.4gl.io'
) )
expect(error).toBeInstanceOf(RootFolderNotFoundError) expect(error).toBeInstanceOf(RootFolderNotFoundError)
@@ -30,7 +30,7 @@ describe('RootFolderNotFoundError', () => {
it('should include the folder path and SASDrive URL in the message', () => { it('should include the folder path and SASDrive URL in the message', () => {
const folderPath = '/myProject' const folderPath = '/myProject'
const serverUrl = 'https://analytium.co.uk' const serverUrl = 'https://sas.4gl.io'
const error = new RootFolderNotFoundError(folderPath, serverUrl) const error = new RootFolderNotFoundError(folderPath, serverUrl)
expect(error).toBeInstanceOf(RootFolderNotFoundError) expect(error).toBeInstanceOf(RootFolderNotFoundError)

View File

@@ -1,2 +0,0 @@
export const SASJS_LOGS_SEPARATOR =
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'

5
src/utils/getFormData.ts Normal file
View File

@@ -0,0 +1,5 @@
import { isNode } from './'
import * as NodeFormData from 'form-data'
export const getFormData = () =>
isNode() ? new NodeFormData() : new FormData()

View File

@@ -2,7 +2,6 @@ export * from './appendExtraResponseAttributes'
export * from './asyncForEach' export * from './asyncForEach'
export * from './compareTimestamps' export * from './compareTimestamps'
export * from './convertToCsv' export * from './convertToCsv'
export * from './constants'
export * from './createAxiosInstance' export * from './createAxiosInstance'
export * from './delay' export * from './delay'
export * from './fetchLogByChunks' export * from './fetchLogByChunks'
@@ -20,3 +19,4 @@ export * from './parseWeboutResponse'
export * from './serialize' export * from './serialize'
export * from './splitChunks' export * from './splitChunks'
export * from './validateInput' export * from './validateInput'
export * from './getFormData'

View File

@@ -0,0 +1,20 @@
import { getFormData } from '..'
import * as isNodeModule from '../isNode'
import * as NodeFormData from 'form-data'
describe('getFormData', () => {
it('should return NodeFormData if environment is Node', () => {
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
expect(getFormData() instanceof NodeFormData).toEqual(true)
})
it('should return FormData if environment is not Node', () => {
const formDataMock = () => {}
;(global as any).FormData = formDataMock
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
expect(getFormData() instanceof FormData).toEqual(true)
})
})