diff --git a/.all-contributorsrc b/.all-contributorsrc
new file mode 100644
index 0000000..c055968
--- /dev/null
+++ b/.all-contributorsrc
@@ -0,0 +1,103 @@
+{
+ "projectName": "adapter",
+ "projectOwner": "sasjs",
+ "repoType": "github",
+ "repoHost": "https://github.com",
+ "files": [
+ "README.md"
+ ],
+ "imageSize": 100,
+ "commit": false,
+ "commitConvention": "angular",
+ "contributors": [
+ {
+ "login": "krishna-acondy",
+ "name": "Krishna Acondy",
+ "avatar_url": "https://avatars.githubusercontent.com/u/2980428?v=4",
+ "profile": "https://krishna-acondy.io/",
+ "contributions": [
+ "code",
+ "infra",
+ "blog",
+ "content",
+ "ideas",
+ "video"
+ ]
+ },
+ {
+ "login": "YuryShkoda",
+ "name": "Yury Shkoda",
+ "avatar_url": "https://avatars.githubusercontent.com/u/25773492?v=4",
+ "profile": "https://www.erudicat.com/",
+ "contributions": [
+ "code",
+ "infra",
+ "ideas",
+ "test",
+ "video"
+ ]
+ },
+ {
+ "login": "medjedovicm",
+ "name": "Mihajlo Medjedovic",
+ "avatar_url": "https://avatars.githubusercontent.com/u/18329105?v=4",
+ "profile": "https://github.com/medjedovicm",
+ "contributions": [
+ "code",
+ "infra",
+ "test",
+ "review"
+ ]
+ },
+ {
+ "login": "allanbowe",
+ "name": "Allan Bowe",
+ "avatar_url": "https://avatars.githubusercontent.com/u/4420615?v=4",
+ "profile": "https://github.com/allanbowe",
+ "contributions": [
+ "code",
+ "review",
+ "test",
+ "mentoring",
+ "maintenance"
+ ]
+ },
+ {
+ "login": "saadjutt01",
+ "name": "Muhammad Saad ",
+ "avatar_url": "https://avatars.githubusercontent.com/u/8914650?v=4",
+ "profile": "https://github.com/saadjutt01",
+ "contributions": [
+ "code",
+ "review",
+ "test",
+ "mentoring",
+ "infra"
+ ]
+ },
+ {
+ "login": "sabhas",
+ "name": "Sabir Hassan",
+ "avatar_url": "https://avatars.githubusercontent.com/u/82647447?v=4",
+ "profile": "https://github.com/sabhas",
+ "contributions": [
+ "code",
+ "review",
+ "test",
+ "ideas"
+ ]
+ },
+ {
+ "login": "VladislavParhomchik",
+ "name": "VladislavParhomchik",
+ "avatar_url": "https://avatars.githubusercontent.com/u/83717836?v=4",
+ "profile": "https://github.com/VladislavParhomchik",
+ "contributions": [
+ "test",
+ "review"
+ ]
+ }
+ ],
+ "contributorsPerLine": 7,
+ "skipCi": true
+}
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 188d999..2aee220 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
- node-version: [12.x]
+ node-version: [15.x]
steps:
- uses: actions/checkout@v2
@@ -27,6 +27,10 @@ jobs:
run: npm run lint
- name: Run unit tests
run: npm test
+ - name: Generate coverage report
+ uses: artiomtr/jest-coverage-report-action@v2.0-rc.2
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Build Package
run: npm run package:lib
env:
diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md
index 744ed15..03f5c8a 100644
--- a/PULL_REQUEST_TEMPLATE.md
+++ b/PULL_REQUEST_TEMPLATE.md
@@ -12,9 +12,9 @@ What code changes have been made to achieve the intent.
## Checks
-No PR (that involves a non-trivial code change) should be merged, unless all four of the items below are confirmed! If an urgent fix is needed - use a tar file.
+No PR (that involves a non-trivial code change) should be merged, unless all items below are confirmed! If an urgent fix is needed - use a tar file.
+
-- [ ] Code is formatted correctly (`npm run lint:fix`).
-- [ ] All unit tests are passing (`npm test`).
- [ ] All `sasjs-cli` unit tests are passing (`npm test`).
- [ ] All `sasjs-tests` are passing (instructions available [here](https://github.com/sasjs/adapter/blob/master/sasjs-tests/README.md)).
+- [ ] [Data Controller](https://datacontroller.io) builds and is functional on both SAS 9 and Viya
diff --git a/README.md b/README.md
index dfbb8d6..a7f7905 100644
--- a/README.md
+++ b/README.md
@@ -172,7 +172,7 @@ Configuration on the client side involves passing an object on startup, which ca
* `serverType` - either `SAS9` or `SASVIYA`.
* `serverUrl` - the location (including http protocol and port) of the SAS Server. Can be omitted, eg if serving directly from the SAS Web Server, or in streaming mode.
* `debug` - if `true` then SAS Logs and extra debug information is returned.
-* `useComputeApi` - Only relevant when the serverType is `SASVIYA`. If `true` the [Compute API](#using-the-compute-api) is used. If `false` the [JES API](#using-the-jes-api) is used. If `null` or `undefined` the [Web](#using-jes-web-app) approach is used.
+* `useComputeApi` - Only relevant when the serverType is `SASVIYA`. If `true` the [Compute API](#using-the-compute-api) is used. If `false` the [JES API](#using-the-jes-api) is used. If `null` or `undefined` the [Web](#using-jes-web-app) approach is used.
* `contextName` - Compute context on which the requests will be called. If missing or not provided, defaults to `Job Execution Compute context`.
The adapter supports a number of approaches for interfacing with Viya (`serverType` is `SASVIYA`). For maximum performance, be sure to [configure your compute context](https://sasjs.io/guide-viya/#shared-account-and-server-re-use) with `reuseServerProcesses` as `true` and a system account in `runServerAs`. This functionality is available since Viya 3.5. This configuration is supported when [creating contexts using the CLI](https://sasjs.io/sasjs-cli-context/#sasjs-context-create).
@@ -234,3 +234,32 @@ If you are a SAS 9 or SAS Viya customer you can also request a copy of [Data Con
If you find this library useful, help us grow our star graph!

+
+## Contributors ✨
+
+[](#contributors-)
+
+
+Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
+
+
+
+
+
+
+
+
+
+
+
+This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
diff --git a/package-lock.json b/package-lock.json
index d7f1fb2..7a0628e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1109,10 +1109,11 @@
}
},
"@sasjs/utils": {
- "version": "2.23.2",
- "resolved": "https://registry.npmjs.org/@sasjs/utils/-/utils-2.23.2.tgz",
- "integrity": "sha512-PGHrqLgi/QajseksqD/2CSL+b45tLLQQUZrBW/PpHzvx2qcwXxfrWvnSo6v3UwUigxgyu+xkPK5AIlEJ81Tndw==",
+ "version": "2.27.1",
+ "resolved": "https://registry.npmjs.org/@sasjs/utils/-/utils-2.27.1.tgz",
+ "integrity": "sha512-CYTQwEj89cc7H3tGiQQcyDkZYaWRc1HZJpOF8o2RHYS37fIAOy0SyyJdq6mcQ74Nb1u5AmFXPFIvnRCMEcTYeQ==",
"requires": {
+ "@types/fs-extra": "^9.0.11",
"@types/prompts": "^2.0.13",
"chalk": "^4.1.1",
"cli-table": "^0.3.6",
@@ -1120,6 +1121,7 @@
"fs-extra": "^10.0.0",
"jwt-decode": "^3.1.2",
"prompts": "^2.4.1",
+ "rimraf": "^3.0.2",
"valid-url": "^1.0.9"
}
},
@@ -1253,6 +1255,15 @@
"integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==",
"dev": true
},
+ "@types/axios": {
+ "version": "0.14.0",
+ "resolved": "https://registry.npmjs.org/@types/axios/-/axios-0.14.0.tgz",
+ "integrity": "sha1-7CMA++fX3d1+udOr+HmZlkyvzkY=",
+ "dev": true,
+ "requires": {
+ "axios": "*"
+ }
+ },
"@types/babel__core": {
"version": "7.1.14",
"resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.14.tgz",
@@ -1315,11 +1326,28 @@
}
},
"@types/estree": {
- "version": "0.0.48",
- "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.48.tgz",
- "integrity": "sha512-LfZwXoGUDo0C3me81HXgkBg5CTQYb6xzEl+fNmbO4JdRiSKQ8A0GD1OBBvKAIsbCUgoyAty7m99GqqMQe784ew==",
+ "version": "0.0.50",
+ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.50.tgz",
+ "integrity": "sha512-C6N5s2ZFtuZRj54k2/zyRhNDjJwwcViAM3Nbm8zjBpbqAdZ00mr0CFxvSKeO8Y/e03WVFLpQMdHYVfUd6SB+Hw==",
"dev": true
},
+ "@types/form-data": {
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/@types/form-data/-/form-data-2.5.0.tgz",
+ "integrity": "sha512-23/wYiuckYYtFpL+4RPWiWmRQH2BjFuqCUi2+N3amB1a1Drv+i/byTrGvlLwRVLFNAZbwpbQ7JvTK+VCAPMbcg==",
+ "dev": true,
+ "requires": {
+ "form-data": "*"
+ }
+ },
+ "@types/fs-extra": {
+ "version": "9.0.12",
+ "resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.12.tgz",
+ "integrity": "sha512-I+bsBr67CurCGnSenZZ7v94gd3tc3+Aj2taxMT4yu4ABLuOgOjeFxX3dokG24ztSRg5tnT00sL8BszO7gSMoIw==",
+ "requires": {
+ "@types/node": "*"
+ }
+ },
"@types/graceful-fs": {
"version": "4.1.5",
"resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.5.tgz",
@@ -1354,9 +1382,9 @@
}
},
"@types/jest": {
- "version": "26.0.23",
- "resolved": "https://registry.npmjs.org/@types/jest/-/jest-26.0.23.tgz",
- "integrity": "sha512-ZHLmWMJ9jJ9PTiT58juykZpL7KjwJywFN3Rr2pTSkyQfydf/rk22yS7W8p5DaVUMQ2BQC7oYiU3FjbTM/mYrOA==",
+ "version": "26.0.24",
+ "resolved": "https://registry.npmjs.org/@types/jest/-/jest-26.0.24.tgz",
+ "integrity": "sha512-E/X5Vib8BWqZNRlDxj9vYXhsDwPYbPINqKF9BsnSoon4RQ0D9moEuLD8txgyypFLH7J4+Lho9Nr/c8H0Fi+17w==",
"dev": true,
"requires": {
"jest-diff": "^26.0.0",
@@ -1437,9 +1465,9 @@
"dev": true
},
"@types/tough-cookie": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.0.tgz",
- "integrity": "sha512-I99sngh224D0M7XgW1s120zxCt3VYQ3IQsuw3P3jbq5GG4yc79+ZjyKznyOGIQrflfylLgcfekeZW/vk0yng6A==",
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz",
+ "integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg==",
"dev": true
},
"@types/yargs": {
@@ -1468,148 +1496,148 @@
}
},
"@webassemblyjs/ast": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.0.tgz",
- "integrity": "sha512-kX2W49LWsbthrmIRMbQZuQDhGtjyqXfEmmHyEi4XWnSZtPmxY0+3anPIzsnRb45VH/J55zlOfWvZuY47aJZTJg==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz",
+ "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==",
"dev": true,
"requires": {
- "@webassemblyjs/helper-numbers": "1.11.0",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.0"
+ "@webassemblyjs/helper-numbers": "1.11.1",
+ "@webassemblyjs/helper-wasm-bytecode": "1.11.1"
}
},
"@webassemblyjs/floating-point-hex-parser": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.0.tgz",
- "integrity": "sha512-Q/aVYs/VnPDVYvsCBL/gSgwmfjeCb4LW8+TMrO3cSzJImgv8lxxEPM2JA5jMrivE7LSz3V+PFqtMbls3m1exDA==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz",
+ "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==",
"dev": true
},
"@webassemblyjs/helper-api-error": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.0.tgz",
- "integrity": "sha512-baT/va95eXiXb2QflSx95QGT5ClzWpGaa8L7JnJbgzoYeaA27FCvuBXU758l+KXWRndEmUXjP0Q5fibhavIn8w==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz",
+ "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==",
"dev": true
},
"@webassemblyjs/helper-buffer": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.0.tgz",
- "integrity": "sha512-u9HPBEl4DS+vA8qLQdEQ6N/eJQ7gT7aNvMIo8AAWvAl/xMrcOSiI2M0MAnMCy3jIFke7bEee/JwdX1nUpCtdyA==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz",
+ "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==",
"dev": true
},
"@webassemblyjs/helper-numbers": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.0.tgz",
- "integrity": "sha512-DhRQKelIj01s5IgdsOJMKLppI+4zpmcMQ3XboFPLwCpSNH6Hqo1ritgHgD0nqHeSYqofA6aBN/NmXuGjM1jEfQ==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz",
+ "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==",
"dev": true,
"requires": {
- "@webassemblyjs/floating-point-hex-parser": "1.11.0",
- "@webassemblyjs/helper-api-error": "1.11.0",
+ "@webassemblyjs/floating-point-hex-parser": "1.11.1",
+ "@webassemblyjs/helper-api-error": "1.11.1",
"@xtuc/long": "4.2.2"
}
},
"@webassemblyjs/helper-wasm-bytecode": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.0.tgz",
- "integrity": "sha512-MbmhvxXExm542tWREgSFnOVo07fDpsBJg3sIl6fSp9xuu75eGz5lz31q7wTLffwL3Za7XNRCMZy210+tnsUSEA==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz",
+ "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==",
"dev": true
},
"@webassemblyjs/helper-wasm-section": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.0.tgz",
- "integrity": "sha512-3Eb88hcbfY/FCukrg6i3EH8H2UsD7x8Vy47iVJrP967A9JGqgBVL9aH71SETPx1JrGsOUVLo0c7vMCN22ytJew==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz",
+ "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==",
"dev": true,
"requires": {
- "@webassemblyjs/ast": "1.11.0",
- "@webassemblyjs/helper-buffer": "1.11.0",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.0",
- "@webassemblyjs/wasm-gen": "1.11.0"
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/helper-buffer": "1.11.1",
+ "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
+ "@webassemblyjs/wasm-gen": "1.11.1"
}
},
"@webassemblyjs/ieee754": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.0.tgz",
- "integrity": "sha512-KXzOqpcYQwAfeQ6WbF6HXo+0udBNmw0iXDmEK5sFlmQdmND+tr773Ti8/5T/M6Tl/413ArSJErATd8In3B+WBA==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz",
+ "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==",
"dev": true,
"requires": {
"@xtuc/ieee754": "^1.2.0"
}
},
"@webassemblyjs/leb128": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.0.tgz",
- "integrity": "sha512-aqbsHa1mSQAbeeNcl38un6qVY++hh8OpCOzxhixSYgbRfNWcxJNJQwe2rezK9XEcssJbbWIkblaJRwGMS9zp+g==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz",
+ "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==",
"dev": true,
"requires": {
"@xtuc/long": "4.2.2"
}
},
"@webassemblyjs/utf8": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.0.tgz",
- "integrity": "sha512-A/lclGxH6SpSLSyFowMzO/+aDEPU4hvEiooCMXQPcQFPPJaYcPQNKGOCLUySJsYJ4trbpr+Fs08n4jelkVTGVw==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz",
+ "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==",
"dev": true
},
"@webassemblyjs/wasm-edit": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.0.tgz",
- "integrity": "sha512-JHQ0damXy0G6J9ucyKVXO2j08JVJ2ntkdJlq1UTiUrIgfGMmA7Ik5VdC/L8hBK46kVJgujkBIoMtT8yVr+yVOQ==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz",
+ "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==",
"dev": true,
"requires": {
- "@webassemblyjs/ast": "1.11.0",
- "@webassemblyjs/helper-buffer": "1.11.0",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.0",
- "@webassemblyjs/helper-wasm-section": "1.11.0",
- "@webassemblyjs/wasm-gen": "1.11.0",
- "@webassemblyjs/wasm-opt": "1.11.0",
- "@webassemblyjs/wasm-parser": "1.11.0",
- "@webassemblyjs/wast-printer": "1.11.0"
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/helper-buffer": "1.11.1",
+ "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
+ "@webassemblyjs/helper-wasm-section": "1.11.1",
+ "@webassemblyjs/wasm-gen": "1.11.1",
+ "@webassemblyjs/wasm-opt": "1.11.1",
+ "@webassemblyjs/wasm-parser": "1.11.1",
+ "@webassemblyjs/wast-printer": "1.11.1"
}
},
"@webassemblyjs/wasm-gen": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.0.tgz",
- "integrity": "sha512-BEUv1aj0WptCZ9kIS30th5ILASUnAPEvE3tVMTrItnZRT9tXCLW2LEXT8ezLw59rqPP9klh9LPmpU+WmRQmCPQ==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz",
+ "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==",
"dev": true,
"requires": {
- "@webassemblyjs/ast": "1.11.0",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.0",
- "@webassemblyjs/ieee754": "1.11.0",
- "@webassemblyjs/leb128": "1.11.0",
- "@webassemblyjs/utf8": "1.11.0"
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
+ "@webassemblyjs/ieee754": "1.11.1",
+ "@webassemblyjs/leb128": "1.11.1",
+ "@webassemblyjs/utf8": "1.11.1"
}
},
"@webassemblyjs/wasm-opt": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.0.tgz",
- "integrity": "sha512-tHUSP5F4ywyh3hZ0+fDQuWxKx3mJiPeFufg+9gwTpYp324mPCQgnuVKwzLTZVqj0duRDovnPaZqDwoyhIO8kYg==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz",
+ "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==",
"dev": true,
"requires": {
- "@webassemblyjs/ast": "1.11.0",
- "@webassemblyjs/helper-buffer": "1.11.0",
- "@webassemblyjs/wasm-gen": "1.11.0",
- "@webassemblyjs/wasm-parser": "1.11.0"
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/helper-buffer": "1.11.1",
+ "@webassemblyjs/wasm-gen": "1.11.1",
+ "@webassemblyjs/wasm-parser": "1.11.1"
}
},
"@webassemblyjs/wasm-parser": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.0.tgz",
- "integrity": "sha512-6L285Sgu9gphrcpDXINvm0M9BskznnzJTE7gYkjDbxET28shDqp27wpruyx3C2S/dvEwiigBwLA1cz7lNUi0kw==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz",
+ "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==",
"dev": true,
"requires": {
- "@webassemblyjs/ast": "1.11.0",
- "@webassemblyjs/helper-api-error": "1.11.0",
- "@webassemblyjs/helper-wasm-bytecode": "1.11.0",
- "@webassemblyjs/ieee754": "1.11.0",
- "@webassemblyjs/leb128": "1.11.0",
- "@webassemblyjs/utf8": "1.11.0"
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/helper-api-error": "1.11.1",
+ "@webassemblyjs/helper-wasm-bytecode": "1.11.1",
+ "@webassemblyjs/ieee754": "1.11.1",
+ "@webassemblyjs/leb128": "1.11.1",
+ "@webassemblyjs/utf8": "1.11.1"
}
},
"@webassemblyjs/wast-printer": {
- "version": "1.11.0",
- "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.0.tgz",
- "integrity": "sha512-Fg5OX46pRdTgB7rKIUojkh9vXaVN6sGYCnEiJN1GYkb0RPwShZXp6KTDqmoMdQPKhcroOXh3fEzmkWmCYaKYhQ==",
+ "version": "1.11.1",
+ "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz",
+ "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==",
"dev": true,
"requires": {
- "@webassemblyjs/ast": "1.11.0",
+ "@webassemblyjs/ast": "1.11.1",
"@xtuc/long": "4.2.2"
}
},
@@ -1663,9 +1691,9 @@
"dev": true
},
"acorn": {
- "version": "8.3.0",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.3.0.tgz",
- "integrity": "sha512-tqPKHZ5CaBJw0Xmy0ZZvLs1qTV+BNFSyvn77ASXkpBNfIRk8ev26fKrD9iLGwGA9zedPao52GSHzq8lyZG0NUw==",
+ "version": "8.4.1",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.4.1.tgz",
+ "integrity": "sha512-asabaBSkEKosYKMITunzX177CXxQ4Q8BSSzMTKD+FefUhipQC70gfW5SiUDhYQ3vk8G+81HqQk7Fv9OXwwn9KA==",
"dev": true
},
"acorn-globals": {
@@ -2076,8 +2104,7 @@
"balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
- "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
- "dev": true
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"base": {
"version": "0.11.2",
@@ -2183,7 +2210,6 @@
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
- "dev": true,
"requires": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
@@ -2672,8 +2698,7 @@
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
- "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
- "dev": true
+ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
},
"consola": {
"version": "2.15.3",
@@ -3534,9 +3559,9 @@
}
},
"es-module-lexer": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.6.0.tgz",
- "integrity": "sha512-f8kcHX1ArhllUtb/wVSyvygoKCznIjnxhLxy7TCvIiMdT7fL4ZDTIKaadMe6eLvOXg6Wk02UeoFgUoZ2EKZZUA==",
+ "version": "0.7.1",
+ "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.7.1.tgz",
+ "integrity": "sha512-MgtWFl5No+4S3TmhDmCz2ObFGm6lEpTnzbQi+Dd+pw4mlTIZTmM2iAs5gRlmx5zS9luzobCSBSI90JM/1/JgOw==",
"dev": true
},
"es-to-primitive": {
@@ -4116,8 +4141,7 @@
"fs.realpath": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
- "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=",
- "dev": true
+ "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
},
"fsevents": {
"version": "2.3.2",
@@ -4254,7 +4278,6 @@
"version": "7.1.7",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz",
"integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==",
- "dev": true,
"requires": {
"fs.realpath": "^1.0.0",
"inflight": "^1.0.4",
@@ -4624,7 +4647,6 @@
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
"integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=",
- "dev": true,
"requires": {
"once": "^1.3.0",
"wrappy": "1"
@@ -4633,8 +4655,7 @@
"inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
- "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
- "dev": true
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"ini": {
"version": "1.3.8",
@@ -7292,7 +7313,6 @@
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
- "dev": true,
"requires": {
"brace-expansion": "^1.1.7"
}
@@ -9697,7 +9717,6 @@
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
- "dev": true,
"requires": {
"wrappy": "1"
}
@@ -9907,8 +9926,7 @@
"path-is-absolute": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
- "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=",
- "dev": true
+ "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
},
"path-key": {
"version": "3.1.1",
@@ -10157,7 +10175,8 @@
"querystring": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz",
- "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA="
+ "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=",
+ "dev": true
},
"querystring-es3": {
"version": "0.2.1",
@@ -10399,7 +10418,6 @@
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
"integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
- "dev": true,
"requires": {
"glob": "^7.1.3"
}
@@ -11673,14 +11691,13 @@
}
},
"typedoc": {
- "version": "0.21.2",
- "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.21.2.tgz",
- "integrity": "sha512-SR1ByJB3USg+jxoxwzMRP07g/0f/cQUE5t7gOh1iTUyjTPyJohu9YSKRlK+MSXXqlhIq+m0jkEHEG5HoY7/Adg==",
+ "version": "0.21.4",
+ "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.21.4.tgz",
+ "integrity": "sha512-slZQhvD9U0d9KacktYAyuNMMOXJRFNHy+Gd8xY2Qrqq3eTTTv3frv3N4au/cFnab9t3T5WA0Orb6QUjMc+1bDA==",
"dev": true,
"requires": {
"glob": "^7.1.7",
"handlebars": "^4.7.7",
- "lodash": "^4.17.21",
"lunr": "^2.3.9",
"marked": "^2.1.1",
"minimatch": "^3.0.0",
@@ -11777,9 +11794,9 @@
}
},
"typescript": {
- "version": "4.3.4",
- "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.3.4.tgz",
- "integrity": "sha512-uauPG7XZn9F/mo+7MrsRjyvbxFpzemRjKEZXS4AK83oP2KKOJPvb+9cO/gmnv8arWZvhnjVOXz7B49m1l0e9Ew==",
+ "version": "4.3.5",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.3.5.tgz",
+ "integrity": "sha512-DqQgihaQ9cUrskJo9kIyW/+g0Vxsk8cDtZ52a3NGh0YNTfpUSArXSohyUGnvbPazEPLu398C0UxmKSOrPumUzA==",
"dev": true
},
"uglify-js": {
@@ -11898,6 +11915,7 @@
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz",
"integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=",
+ "dev": true,
"requires": {
"punycode": "1.3.2",
"querystring": "0.2.0"
@@ -11906,7 +11924,8 @@
"punycode": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
- "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0="
+ "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=",
+ "dev": true
}
}
},
@@ -12058,21 +12077,21 @@
"dev": true
},
"webpack": {
- "version": "5.41.1",
- "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.41.1.tgz",
- "integrity": "sha512-AJZIIsqJ/MVTmegEq9Tlw5mk5EHdGiJbDdz9qP15vmUH+oxI1FdWcL0E9EO8K/zKaRPWqEs7G/OPxq1P61u5Ug==",
+ "version": "5.44.0",
+ "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.44.0.tgz",
+ "integrity": "sha512-I1S1w4QLoKmH19pX6YhYN0NiSXaWY8Ou00oA+aMcr9IUGeF5azns+IKBkfoAAG9Bu5zOIzZt/mN35OffBya8AQ==",
"dev": true,
"requires": {
"@types/eslint-scope": "^3.7.0",
- "@types/estree": "^0.0.48",
- "@webassemblyjs/ast": "1.11.0",
- "@webassemblyjs/wasm-edit": "1.11.0",
- "@webassemblyjs/wasm-parser": "1.11.0",
- "acorn": "^8.2.1",
+ "@types/estree": "^0.0.50",
+ "@webassemblyjs/ast": "1.11.1",
+ "@webassemblyjs/wasm-edit": "1.11.1",
+ "@webassemblyjs/wasm-parser": "1.11.1",
+ "acorn": "^8.4.1",
"browserslist": "^4.14.5",
"chrome-trace-event": "^1.0.2",
"enhanced-resolve": "^5.8.0",
- "es-module-lexer": "^0.6.0",
+ "es-module-lexer": "^0.7.1",
"eslint-scope": "5.1.1",
"events": "^3.2.0",
"glob-to-regexp": "^0.4.1",
@@ -12249,8 +12268,7 @@
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
- "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
- "dev": true
+ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
},
"write-file-atomic": {
"version": "3.0.3",
diff --git a/package.json b/package.json
index 9f85d08..30514e4 100644
--- a/package.json
+++ b/package.json
@@ -41,16 +41,17 @@
"license": "ISC",
"devDependencies": {
"@cypress/webpack-preprocessor": "^5.9.1",
- "@types/jest": "^26.0.23",
+ "@types/axios": "^0.14.0",
+ "@types/form-data": "^2.5.0",
+ "@types/jest": "^26.0.24",
"@types/mime": "^2.0.3",
- "@types/tough-cookie": "^4.0.0",
+ "@types/tough-cookie": "^4.0.1",
"copyfiles": "^2.4.1",
"cp": "^0.2.0",
"cypress": "^7.7.0",
"dotenv": "^10.0.0",
"jest": "^27.0.6",
"jest-extended": "^0.11.5",
- "mime": "^2.5.2",
"node-polyfill-webpack-plugin": "^1.1.4",
"path": "^0.12.7",
"process": "^0.11.10",
@@ -61,21 +62,23 @@
"ts-loader": "^9.2.2",
"tslint": "^6.1.3",
"tslint-config-prettier": "^1.18.0",
- "typedoc": "^0.21.2",
+ "typedoc": "^0.21.4",
"typedoc-neo-theme": "^1.1.1",
"typedoc-plugin-external-module-name": "^4.0.6",
- "typescript": "^4.3.4",
- "webpack": "^5.41.1",
+ "typescript": "^4.3.5",
+ "webpack": "^5.44.0",
"webpack-cli": "^4.7.2"
},
"main": "index.js",
"dependencies": {
- "@sasjs/utils": "^2.23.2",
+ "@sasjs/utils": "^2.27.1",
"axios": "^0.21.1",
"axios-cookiejar-support": "^1.0.1",
"form-data": "^4.0.0",
"https": "^1.0.0",
- "tough-cookie": "^4.0.0",
- "url": "^0.11.0"
+ "tough-cookie": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=15"
}
}
diff --git a/src/FileUploader.ts b/src/FileUploader.ts
index 148f534..7001bcb 100644
--- a/src/FileUploader.ts
+++ b/src/FileUploader.ts
@@ -1,4 +1,4 @@
-import { isUrl } from './utils'
+import { isUrl, getValidJson, parseSasViyaDebugResponse } from './utils'
import { UploadFile } from './types/UploadFile'
import { ErrorResponse, LoginRequiredError } from './types/errors'
import { RequestClient } from './request/RequestClient'
@@ -63,13 +63,28 @@ export class FileUploader {
return this.requestClient
.post(uploadUrl, formData, undefined, 'application/json', headers)
- .then((res) => {
- let result
+ .then(async (res) => {
+ // for web approach on Viya
+ if (
+ this.sasjsConfig.debug &&
+ (this.sasjsConfig.useComputeApi === null ||
+ this.sasjsConfig.useComputeApi === undefined) &&
+ this.sasjsConfig.serverType === ServerType.SasViya
+ ) {
+ const jsonResponse = await parseSasViyaDebugResponse(
+ res.result as string,
+ this.requestClient,
+ this.sasjsConfig.serverUrl
+ )
+ return typeof jsonResponse === 'string'
+ ? getValidJson(jsonResponse)
+ : jsonResponse
+ }
- result =
- typeof res.result === 'string' ? JSON.parse(res.result) : res.result
+ return typeof res.result === 'string'
+ ? getValidJson(res.result)
+ : res.result
- return result
//TODO: append to SASjs requests
})
.catch((err: Error) => {
diff --git a/src/SASViyaApiClient.ts b/src/SASViyaApiClient.ts
index bfcdb81..e68082c 100644
--- a/src/SASViyaApiClient.ts
+++ b/src/SASViyaApiClient.ts
@@ -1,10 +1,4 @@
-import {
- convertToCSV,
- isRelativePath,
- isUri,
- isUrl,
- fetchLogByChunks
-} from './utils'
+import { isRelativePath, isUri, isUrl } from './utils'
import * as NodeFormData from 'form-data'
import {
Job,
@@ -17,25 +11,19 @@ import {
JobDefinition,
PollOptions
} from './types'
-import {
- ComputeJobExecutionError,
- JobExecutionError,
- NotFoundError
-} from './types/errors'
-import { formatDataForRequest } from './utils/formatDataForRequest'
+import { JobExecutionError } from './types/errors'
import { SessionManager } from './SessionManager'
import { ContextManager } from './ContextManager'
-import { timestampToYYYYMMDDHHMMSS } from '@sasjs/utils/time'
-import {
- isAccessTokenExpiring,
- isRefreshTokenExpiring
-} from '@sasjs/utils/auth'
-import { Logger, LogLevel } from '@sasjs/utils/logger'
import { SasAuthResponse, MacroVar, AuthConfig } from '@sasjs/utils/types'
import { isAuthorizeFormRequired } from './auth/isAuthorizeFormRequired'
import { RequestClient } from './request/RequestClient'
import { prefixMessage } from '@sasjs/utils/error'
-import * as mime from 'mime'
+import { pollJobState } from './api/viya/pollJobState'
+import { getTokens } from './auth/getTokens'
+import { uploadTables } from './api/viya/uploadTables'
+import { executeScript } from './api/viya/executeScript'
+import { getAccessToken } from './auth/getAccessToken'
+import { refreshTokens } from './auth/refreshTokens'
/**
* A client for interfacing with the SAS Viya REST API.
@@ -171,13 +159,6 @@ export class SASViyaApiClient {
throw new Error(`Execution context ${contextName} not found.`)
}
- const createSessionRequest = {
- method: 'POST',
- headers: {
- Authorization: `Bearer ${accessToken}`,
- 'Content-Type': 'application/json'
- }
- }
const { result: createdSession } = await this.requestClient.post(
`/compute/contexts/${executionContext.id}/sessions`,
{},
@@ -292,249 +273,22 @@ export class SASViyaApiClient {
printPid = false,
variables?: MacroVar
): Promise {
- let access_token = (authConfig || {}).access_token
- if (authConfig) {
- ;({ access_token } = await this.getTokens(authConfig))
- }
-
- const logger = process.logger || console
-
- try {
- let executionSessionId: string
-
- const session = await this.sessionManager
- .getSession(access_token)
- .catch((err) => {
- throw prefixMessage(err, 'Error while getting session. ')
- })
-
- executionSessionId = session!.id
-
- if (printPid) {
- const { result: jobIdVariable } = await this.sessionManager
- .getVariable(executionSessionId, 'SYSJOBID', access_token)
- .catch((err) => {
- throw prefixMessage(err, 'Error while getting session variable. ')
- })
-
- if (jobIdVariable && jobIdVariable.value) {
- const relativeJobPath = this.rootFolderName
- ? jobPath.split(this.rootFolderName).join('').replace(/^\//, '')
- : jobPath
-
- const logger = new Logger(debug ? LogLevel.Debug : LogLevel.Info)
-
- logger.info(
- `Triggered '${relativeJobPath}' with PID ${
- jobIdVariable.value
- } at ${timestampToYYYYMMDDHHMMSS()}`
- )
- }
- }
-
- const jobArguments: { [key: string]: any } = {
- _contextName: contextName,
- _OMITJSONLISTING: true,
- _OMITJSONLOG: true,
- _OMITSESSIONRESULTS: true,
- _OMITTEXTLISTING: true,
- _OMITTEXTLOG: true
- }
-
- if (debug) {
- jobArguments['_OMITTEXTLOG'] = false
- jobArguments['_OMITSESSIONRESULTS'] = false
- }
-
- let fileName
-
- if (isRelativePath(jobPath)) {
- fileName = `exec-${
- jobPath.includes('/') ? jobPath.split('/')[1] : jobPath
- }`
- } else {
- const jobPathParts = jobPath.split('/')
- fileName = jobPathParts.pop()
- }
-
- let jobVariables: any = {
- SYS_JES_JOB_URI: '',
- _program: isRelativePath(jobPath)
- ? this.rootFolderName + '/' + jobPath
- : jobPath
- }
-
- if (variables) jobVariables = { ...jobVariables, ...variables }
-
- if (debug) jobVariables = { ...jobVariables, _DEBUG: 131 }
-
- let files: any[] = []
-
- if (data) {
- if (JSON.stringify(data).includes(';')) {
- files = await this.uploadTables(data, access_token).catch((err) => {
- throw prefixMessage(err, 'Error while uploading tables. ')
- })
-
- jobVariables['_webin_file_count'] = files.length
-
- files.forEach((fileInfo, index) => {
- jobVariables[
- `_webin_fileuri${index + 1}`
- ] = `/files/files/${fileInfo.file.id}`
- jobVariables[`_webin_name${index + 1}`] = fileInfo.tableName
- })
- } else {
- jobVariables = { ...jobVariables, ...formatDataForRequest(data) }
- }
- }
-
- // Execute job in session
- const jobRequestBody = {
- name: fileName,
- description: 'Powered by SASjs',
- code: linesOfCode,
- variables: jobVariables,
- arguments: jobArguments
- }
-
- const { result: postedJob, etag } = await this.requestClient
- .post(
- `/compute/sessions/${executionSessionId}/jobs`,
- jobRequestBody,
- access_token
- )
- .catch((err) => {
- throw prefixMessage(err, 'Error while posting job. ')
- })
-
- if (!waitForResult) return session
-
- if (debug) {
- logger.info(`Job has been submitted for '${fileName}'.`)
- logger.info(
- `You can monitor the job progress at '${this.serverUrl}${
- postedJob.links.find((l: any) => l.rel === 'state')!.href
- }'.`
- )
- }
-
- const jobStatus = await this.pollJobState(
- postedJob,
- etag,
- authConfig,
- pollOptions
- ).catch(async (err) => {
- const error = err?.response?.data
- const result = /err=[0-9]*,/.exec(error)
-
- const errorCode = '5113'
- if (result?.[0]?.slice(4, -1) === errorCode) {
- const sessionLogUrl =
- postedJob.links.find((l: any) => l.rel === 'up')!.href + '/log'
- const logCount = 1000000
- err.log = await fetchLogByChunks(
- this.requestClient,
- access_token!,
- sessionLogUrl,
- logCount
- )
- }
- throw prefixMessage(err, 'Error while polling job status. ')
- })
-
- if (authConfig) {
- ;({ access_token } = await this.getTokens(authConfig))
- }
-
- const { result: currentJob } = await this.requestClient
- .get(
- `/compute/sessions/${executionSessionId}/jobs/${postedJob.id}`,
- access_token
- )
- .catch((err) => {
- throw prefixMessage(err, 'Error while getting job. ')
- })
-
- let jobResult
- let log = ''
-
- const logLink = currentJob.links.find((l) => l.rel === 'log')
-
- if (debug && logLink) {
- const logUrl = `${logLink.href}/content`
- const logCount = currentJob.logStatistics?.lineCount ?? 1000000
- log = await fetchLogByChunks(
- this.requestClient,
- access_token!,
- logUrl,
- logCount
- )
- }
-
- if (jobStatus === 'failed' || jobStatus === 'error') {
- return Promise.reject(new ComputeJobExecutionError(currentJob, log))
- }
-
- let resultLink
-
- if (expectWebout) {
- resultLink = `/compute/sessions/${executionSessionId}/filerefs/_webout/content`
- } else {
- return { job: currentJob, log }
- }
-
- if (resultLink) {
- jobResult = await this.requestClient
- .get(resultLink, access_token, 'text/plain')
- .catch(async (e) => {
- if (e instanceof NotFoundError) {
- if (logLink) {
- const logUrl = `${logLink.href}/content`
- const logCount = currentJob.logStatistics?.lineCount ?? 1000000
- log = await fetchLogByChunks(
- this.requestClient,
- access_token!,
- logUrl,
- logCount
- )
-
- return Promise.reject({
- status: 500,
- log
- })
- }
- }
-
- return {
- result: JSON.stringify(e)
- }
- })
- }
-
- await this.sessionManager
- .clearSession(executionSessionId, access_token)
- .catch((err) => {
- throw prefixMessage(err, 'Error while clearing session. ')
- })
-
- return { result: jobResult?.result, log }
- } catch (e) {
- if (e && e.status === 404) {
- return this.executeScript(
- jobPath,
- linesOfCode,
- contextName,
- authConfig,
- data,
- debug,
- false,
- true
- )
- } else {
- throw prefixMessage(e, 'Error while executing script. ')
- }
- }
+ return executeScript(
+ this.requestClient,
+ this.sessionManager,
+ this.rootFolderName,
+ jobPath,
+ linesOfCode,
+ contextName,
+ authConfig,
+ data,
+ debug,
+ expectWebout,
+ waitForResult,
+ pollOptions,
+ printPid,
+ variables
+ )
}
/**
@@ -581,9 +335,6 @@ export class SASViyaApiClient {
const formData = new NodeFormData()
formData.append('file', contentBuffer, fileName)
- const mimeType =
- mime.getType(fileName.match(/\.[0-9a-z]+$/i)?.[0] || '') ?? 'text/plain'
-
return (
await this.requestClient.post(
`/files/files?parentFolderUri=${parentFolderUri}&typeDefName=file#rawUpload`,
@@ -769,37 +520,7 @@ export class SASViyaApiClient {
clientSecret: string,
authCode: string
): Promise {
- const url = this.serverUrl + '/SASLogon/oauth/token'
- let token
- if (typeof Buffer === 'undefined') {
- token = btoa(clientId + ':' + clientSecret)
- } else {
- token = Buffer.from(clientId + ':' + clientSecret).toString('base64')
- }
- const headers = {
- Authorization: 'Basic ' + token
- }
-
- let formData
- if (typeof FormData === 'undefined') {
- formData = new NodeFormData()
- } else {
- formData = new FormData()
- }
- formData.append('grant_type', 'authorization_code')
- formData.append('code', authCode)
-
- const authResponse = await this.requestClient
- .post(
- url,
- formData,
- undefined,
- 'multipart/form-data; boundary=' + (formData as any)._boundary,
- headers
- )
- .then((res) => res.result as SasAuthResponse)
-
- return authResponse
+ return getAccessToken(this.requestClient, clientId, clientSecret, authCode)
}
/**
@@ -813,39 +534,12 @@ export class SASViyaApiClient {
clientSecret: string,
refreshToken: string
) {
- const url = this.serverUrl + '/SASLogon/oauth/token'
- let token
- if (typeof Buffer === 'undefined') {
- token = btoa(clientId + ':' + clientSecret)
- } else {
- token = Buffer.from(clientId + ':' + clientSecret).toString('base64')
- }
- const headers = {
- Authorization: 'Basic ' + token
- }
-
- let formData
- if (typeof FormData === 'undefined') {
- formData = new NodeFormData()
- formData.append('grant_type', 'refresh_token')
- formData.append('refresh_token', refreshToken)
- } else {
- formData = new FormData()
- formData.append('grant_type', 'refresh_token')
- formData.append('refresh_token', refreshToken)
- }
-
- const authResponse = await this.requestClient
- .post(
- url,
- formData,
- undefined,
- 'multipart/form-data; boundary=' + (formData as any)._boundary,
- headers
- )
- .then((res) => res.result)
-
- return authResponse
+ return refreshTokens(
+ this.requestClient,
+ clientId,
+ clientSecret,
+ refreshToken
+ )
}
/**
@@ -892,7 +586,7 @@ export class SASViyaApiClient {
) {
let access_token = (authConfig || {}).access_token
if (authConfig) {
- ;({ access_token } = await this.getTokens(authConfig))
+ ;({ access_token } = await getTokens(this.requestClient, authConfig))
}
if (isRelativePath(sasJob) && !this.rootFolderName) {
@@ -988,7 +682,7 @@ export class SASViyaApiClient {
) {
let access_token = (authConfig || {}).access_token
if (authConfig) {
- ;({ access_token } = await this.getTokens(authConfig))
+ ;({ access_token } = await getTokens(this.requestClient, authConfig))
}
if (isRelativePath(sasJob) && !this.rootFolderName) {
throw new Error(
@@ -1060,18 +754,16 @@ export class SASViyaApiClient {
jobDefinition,
arguments: jobArguments
}
- const { result: postedJob, etag } = await this.requestClient.post(
+ const { result: postedJob } = await this.requestClient.post(
`${this.serverUrl}/jobExecution/jobs?_action=wait`,
postJobRequestBody,
access_token
)
- const jobStatus = await this.pollJobState(
- postedJob,
- etag,
- authConfig
- ).catch((err) => {
- throw prefixMessage(err, 'Error while polling job status. ')
- })
+ const jobStatus = await this.pollJobState(postedJob, authConfig).catch(
+ (err) => {
+ throw prefixMessage(err, 'Error while polling job status. ')
+ }
+ )
const { result: currentJob } = await this.requestClient.get(
`${this.serverUrl}/jobExecution/jobs/${postedJob.id}`,
access_token
@@ -1137,157 +829,22 @@ export class SASViyaApiClient {
this.folderMap.set(path, itemsAtRoot)
}
- // REFACTOR: set default value for 'pollOptions' attribute
private async pollJobState(
- postedJob: any,
- etag: string | null,
+ postedJob: Job,
authConfig?: AuthConfig,
pollOptions?: PollOptions
) {
- const logger = process.logger || console
-
- let POLL_INTERVAL = 300
- let MAX_POLL_COUNT = 1000
- let MAX_ERROR_COUNT = 5
- let access_token = (authConfig || {}).access_token
- if (authConfig) {
- ;({ access_token } = await this.getTokens(authConfig))
- }
-
- if (pollOptions) {
- POLL_INTERVAL = pollOptions.POLL_INTERVAL || POLL_INTERVAL
- MAX_POLL_COUNT = pollOptions.MAX_POLL_COUNT || MAX_POLL_COUNT
- }
-
- let postedJobState = ''
- let pollCount = 0
- let errorCount = 0
- const headers: any = {
- 'Content-Type': 'application/json',
- 'If-None-Match': etag
- }
- if (access_token) {
- headers.Authorization = `Bearer ${access_token}`
- }
- const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
- if (!stateLink) {
- Promise.reject(`Job state link was not found.`)
- }
-
- const { result: state } = await this.requestClient
- .get(
- `${this.serverUrl}${stateLink.href}?_action=wait&wait=300`,
- access_token,
- 'text/plain',
- {},
- this.debug
- )
- .catch((err) => {
- console.error(
- `Error fetching job state from ${this.serverUrl}${stateLink.href}. Starting poll, assuming job to be running.`,
- err
- )
- return { result: 'unavailable' }
- })
-
- const currentState = state.trim()
- if (currentState === 'completed') {
- return Promise.resolve(currentState)
- }
-
- return new Promise(async (resolve, _) => {
- let printedState = ''
-
- const interval = setInterval(async () => {
- if (
- postedJobState === 'running' ||
- postedJobState === '' ||
- postedJobState === 'pending' ||
- postedJobState === 'unavailable'
- ) {
- if (authConfig) {
- ;({ access_token } = await this.getTokens(authConfig))
- }
-
- if (stateLink) {
- const { result: jobState } = await this.requestClient
- .get(
- `${this.serverUrl}${stateLink.href}?_action=wait&wait=300`,
- access_token,
- 'text/plain',
- {},
- this.debug
- )
- .catch((err) => {
- errorCount++
- if (
- pollCount >= MAX_POLL_COUNT ||
- errorCount >= MAX_ERROR_COUNT
- ) {
- throw prefixMessage(
- err,
- 'Error while getting job state after interval. '
- )
- }
- console.error(
- `Error fetching job state from ${this.serverUrl}${stateLink.href}. Resuming poll, assuming job to be running.`,
- err
- )
- return { result: 'unavailable' }
- })
-
- postedJobState = jobState.trim()
- if (postedJobState != 'unavailable' && errorCount > 0) {
- errorCount = 0
- }
-
- if (this.debug && printedState !== postedJobState) {
- logger.info('Polling job status...')
- logger.info(`Current job state: ${postedJobState}`)
-
- printedState = postedJobState
- }
-
- pollCount++
-
- if (pollCount >= MAX_POLL_COUNT) {
- resolve(postedJobState)
- }
- }
- } else {
- clearInterval(interval)
- resolve(postedJobState)
- }
- }, POLL_INTERVAL)
- })
+ return pollJobState(
+ this.requestClient,
+ postedJob,
+ this.debug,
+ authConfig,
+ pollOptions
+ )
}
private async uploadTables(data: any, accessToken?: string) {
- const uploadedFiles = []
- const headers: any = {
- 'Content-Type': 'application/json'
- }
- if (accessToken) {
- headers.Authorization = `Bearer ${accessToken}`
- }
-
- for (const tableName in data) {
- const csv = convertToCSV(data[tableName])
- if (csv === 'ERROR: LARGE STRING LENGTH') {
- throw new Error(
- 'The max length of a string value in SASjs is 32765 characters.'
- )
- }
-
- const uploadResponse = await this.requestClient
- .uploadFile(`${this.serverUrl}/files/files#rawUpload`, csv, accessToken)
- .catch((err) => {
- throw prefixMessage(err, 'Error while uploading file. ')
- })
-
- uploadedFiles.push({ tableName, file: uploadResponse.result })
- }
- return uploadedFiles
+ return uploadTables(this.requestClient, data, accessToken)
}
private async getFolderDetails(
@@ -1376,14 +933,6 @@ export class SASViyaApiClient {
? sourceFolder
: await this.getFolderUri(sourceFolder, accessToken)
- const requestInfo = {
- method: 'GET',
- headers: {
- 'Content-Type': 'application/json',
- Authorization: 'Bearer ' + accessToken
- }
- }
-
const { result: members } = await this.requestClient.get<{ items: any[] }>(
`${this.serverUrl}${sourceFolderUri}/members?limit=${limit}`,
accessToken
@@ -1490,21 +1039,4 @@ export class SASViyaApiClient {
return movedFolder
}
-
- private async getTokens(authConfig: AuthConfig): Promise {
- const logger = process.logger || console
- let { access_token, refresh_token, client, secret } = authConfig
- if (
- isAccessTokenExpiring(access_token) ||
- isRefreshTokenExpiring(refresh_token)
- ) {
- logger.info('Refreshing access and refresh tokens.')
- ;({ access_token, refresh_token } = await this.refreshTokens(
- client,
- secret,
- refresh_token
- ))
- }
- return { access_token, refresh_token, client, secret }
- }
}
diff --git a/src/SASjs.ts b/src/SASjs.ts
index d60ad27..705386d 100644
--- a/src/SASjs.ts
+++ b/src/SASjs.ts
@@ -4,7 +4,12 @@ import { SASViyaApiClient } from './SASViyaApiClient'
import { SAS9ApiClient } from './SAS9ApiClient'
import { FileUploader } from './FileUploader'
import { AuthManager } from './auth'
-import { ServerType, MacroVar, AuthConfig } from '@sasjs/utils/types'
+import {
+ ServerType,
+ MacroVar,
+ AuthConfig,
+ ExtraResponseAttributes
+} from '@sasjs/utils/types'
import { RequestClient } from './request/RequestClient'
import {
JobExecutor,
@@ -14,7 +19,6 @@ import {
Sas9JobExecutor
} from './job-execution'
import { ErrorResponse } from './types/errors'
-import { ExtraResponseAttributes } from '@sasjs/utils/types'
const defaultConfig: SASjsConfig = {
serverUrl: '',
@@ -540,11 +544,22 @@ export default class SASjs {
* Process). Is prepended at runtime with the value of `appLoc`.
* @param files - array of files to be uploaded, including File object and file name.
* @param params - request URL parameters.
+ * @param overrideSasjsConfig - object to override existing config (optional)
*/
- public uploadFile(sasJob: string, files: UploadFile[], params: any) {
- const fileUploader =
- this.fileUploader ||
- new FileUploader(this.sasjsConfig, this.jobsPath, this.requestClient!)
+ public uploadFile(
+ sasJob: string,
+ files: UploadFile[],
+ params: any,
+ overrideSasjsConfig?: any
+ ) {
+ const fileUploader = overrideSasjsConfig
+ ? new FileUploader(
+ { ...this.sasjsConfig, ...overrideSasjsConfig },
+ this.jobsPath,
+ this.requestClient!
+ )
+ : this.fileUploader ||
+ new FileUploader(this.sasjsConfig, this.jobsPath, this.requestClient!)
return fileUploader.uploadFile(sasJob, files, params)
}
diff --git a/src/api/viya/executeScript.ts b/src/api/viya/executeScript.ts
new file mode 100644
index 0000000..e54143f
--- /dev/null
+++ b/src/api/viya/executeScript.ts
@@ -0,0 +1,293 @@
+import { timestampToYYYYMMDDHHMMSS } from '@sasjs/utils/time'
+import { AuthConfig, MacroVar } from '@sasjs/utils/types'
+import { prefixMessage } from '@sasjs/utils/error'
+import {
+ PollOptions,
+ Job,
+ ComputeJobExecutionError,
+ NotFoundError
+} from '../..'
+import { getTokens } from '../../auth/getTokens'
+import { RequestClient } from '../../request/RequestClient'
+import { SessionManager } from '../../SessionManager'
+import { isRelativePath, fetchLogByChunks } from '../../utils'
+import { formatDataForRequest } from '../../utils/formatDataForRequest'
+import { pollJobState } from './pollJobState'
+import { uploadTables } from './uploadTables'
+
+/**
+ * Executes code on the current SAS Viya server.
+ * @param jobPath - the path to the file being submitted for execution.
+ * @param linesOfCode - an array of code lines to execute.
+ * @param contextName - the context to execute the code in.
+ * @param authConfig - an object containing an access token, refresh token, client ID and secret.
+ * @param data - execution data.
+ * @param debug - when set to true, the log will be returned.
+ * @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
+ * @param waitForResult - when set to true, function will return the session
+ * @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
+ * @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
+ * @param variables - an object that represents macro variables.
+ */
+export async function executeScript(
+ requestClient: RequestClient,
+ sessionManager: SessionManager,
+ rootFolderName: string,
+ jobPath: string,
+ linesOfCode: string[],
+ contextName: string,
+ authConfig?: AuthConfig,
+ data: any = null,
+ debug: boolean = false,
+ expectWebout = false,
+ waitForResult = true,
+ pollOptions?: PollOptions,
+ printPid = false,
+ variables?: MacroVar
+): Promise {
+ let access_token = (authConfig || {}).access_token
+ if (authConfig) {
+ ;({ access_token } = await getTokens(requestClient, authConfig))
+ }
+
+ const logger = process.logger || console
+
+ try {
+ let executionSessionId: string
+
+ const session = await sessionManager
+ .getSession(access_token)
+ .catch((err) => {
+ throw prefixMessage(err, 'Error while getting session. ')
+ })
+
+ executionSessionId = session!.id
+
+ if (printPid) {
+ const { result: jobIdVariable } = await sessionManager
+ .getVariable(executionSessionId, 'SYSJOBID', access_token)
+ .catch((err) => {
+ throw prefixMessage(err, 'Error while getting session variable. ')
+ })
+
+ if (jobIdVariable && jobIdVariable.value) {
+ const relativeJobPath = rootFolderName
+ ? jobPath.split(rootFolderName).join('').replace(/^\//, '')
+ : jobPath
+
+ const logger = process.logger || console
+
+ logger.info(
+ `Triggered '${relativeJobPath}' with PID ${
+ jobIdVariable.value
+ } at ${timestampToYYYYMMDDHHMMSS()}`
+ )
+ }
+ }
+
+ const jobArguments: { [key: string]: any } = {
+ _contextName: contextName,
+ _OMITJSONLISTING: true,
+ _OMITJSONLOG: true,
+ _OMITSESSIONRESULTS: true,
+ _OMITTEXTLISTING: true,
+ _OMITTEXTLOG: true
+ }
+
+ if (debug) {
+ jobArguments['_OMITTEXTLOG'] = false
+ jobArguments['_OMITSESSIONRESULTS'] = false
+ }
+
+ let fileName
+
+ if (isRelativePath(jobPath)) {
+ fileName = `exec-${
+ jobPath.includes('/') ? jobPath.split('/')[1] : jobPath
+ }`
+ } else {
+ const jobPathParts = jobPath.split('/')
+ fileName = jobPathParts.pop()
+ }
+
+ let jobVariables: any = {
+ SYS_JES_JOB_URI: '',
+ _program: isRelativePath(jobPath)
+ ? rootFolderName + '/' + jobPath
+ : jobPath
+ }
+
+ if (variables) jobVariables = { ...jobVariables, ...variables }
+
+ if (debug) jobVariables = { ...jobVariables, _DEBUG: 131 }
+
+ let files: any[] = []
+
+ if (data) {
+ if (JSON.stringify(data).includes(';')) {
+ files = await uploadTables(requestClient, data, access_token).catch(
+ (err) => {
+ throw prefixMessage(err, 'Error while uploading tables. ')
+ }
+ )
+
+ jobVariables['_webin_file_count'] = files.length
+
+ files.forEach((fileInfo, index) => {
+ jobVariables[
+ `_webin_fileuri${index + 1}`
+ ] = `/files/files/${fileInfo.file.id}`
+ jobVariables[`_webin_name${index + 1}`] = fileInfo.tableName
+ })
+ } else {
+ jobVariables = { ...jobVariables, ...formatDataForRequest(data) }
+ }
+ }
+
+ // Execute job in session
+ const jobRequestBody = {
+ name: fileName,
+ description: 'Powered by SASjs',
+ code: linesOfCode,
+ variables: jobVariables,
+ arguments: jobArguments
+ }
+
+ const { result: postedJob, etag } = await requestClient
+ .post(
+ `/compute/sessions/${executionSessionId}/jobs`,
+ jobRequestBody,
+ access_token
+ )
+ .catch((err) => {
+ throw prefixMessage(err, 'Error while posting job. ')
+ })
+
+ if (!waitForResult) return session
+
+ if (debug) {
+ logger.info(`Job has been submitted for '${fileName}'.`)
+ logger.info(
+ `You can monitor the job progress at '${requestClient.getBaseUrl()}${
+ postedJob.links.find((l: any) => l.rel === 'state')!.href
+ }'.`
+ )
+ }
+
+ const jobStatus = await pollJobState(
+ requestClient,
+ postedJob,
+ debug,
+ authConfig,
+ pollOptions
+ ).catch(async (err) => {
+ const error = err?.response?.data
+ const result = /err=[0-9]*,/.exec(error)
+
+ const errorCode = '5113'
+ if (result?.[0]?.slice(4, -1) === errorCode) {
+ const sessionLogUrl =
+ postedJob.links.find((l: any) => l.rel === 'up')!.href + '/log'
+ const logCount = 1000000
+ err.log = await fetchLogByChunks(
+ requestClient,
+ access_token!,
+ sessionLogUrl,
+ logCount
+ )
+ }
+ throw prefixMessage(err, 'Error while polling job status. ')
+ })
+
+ if (authConfig) {
+ ;({ access_token } = await getTokens(requestClient, authConfig))
+ }
+
+ const { result: currentJob } = await requestClient
+ .get(
+ `/compute/sessions/${executionSessionId}/jobs/${postedJob.id}`,
+ access_token
+ )
+ .catch((err) => {
+ throw prefixMessage(err, 'Error while getting job. ')
+ })
+
+ let jobResult
+ let log = ''
+
+ const logLink = currentJob.links.find((l) => l.rel === 'log')
+
+ if (debug && logLink) {
+ const logUrl = `${logLink.href}/content`
+ const logCount = currentJob.logStatistics?.lineCount ?? 1000000
+ log = await fetchLogByChunks(
+ requestClient,
+ access_token!,
+ logUrl,
+ logCount
+ )
+ }
+
+ if (jobStatus === 'failed' || jobStatus === 'error') {
+ throw new ComputeJobExecutionError(currentJob, log)
+ }
+
+ if (!expectWebout) {
+ return { job: currentJob, log }
+ }
+
+ const resultLink = `/compute/sessions/${executionSessionId}/filerefs/_webout/content`
+
+ jobResult = await requestClient
+ .get(resultLink, access_token, 'text/plain')
+ .catch(async (e) => {
+ if (e instanceof NotFoundError) {
+ if (logLink) {
+ const logUrl = `${logLink.href}/content`
+ const logCount = currentJob.logStatistics?.lineCount ?? 1000000
+ log = await fetchLogByChunks(
+ requestClient,
+ access_token!,
+ logUrl,
+ logCount
+ )
+
+ return Promise.reject({
+ status: 500,
+ log
+ })
+ }
+ }
+
+ return {
+ result: JSON.stringify(e)
+ }
+ })
+
+ await sessionManager
+ .clearSession(executionSessionId, access_token)
+ .catch((err) => {
+ throw prefixMessage(err, 'Error while clearing session. ')
+ })
+
+ return { result: jobResult?.result, log }
+ } catch (e) {
+ if (e && e.status === 404) {
+ return executeScript(
+ requestClient,
+ sessionManager,
+ rootFolderName,
+ jobPath,
+ linesOfCode,
+ contextName,
+ authConfig,
+ data,
+ debug,
+ false,
+ true
+ )
+ } else {
+ throw prefixMessage(e, 'Error while executing script. ')
+ }
+ }
+}
diff --git a/src/api/viya/getFileStream.ts b/src/api/viya/getFileStream.ts
new file mode 100644
index 0000000..c647f3e
--- /dev/null
+++ b/src/api/viya/getFileStream.ts
@@ -0,0 +1,17 @@
+import { isFolder } from '@sasjs/utils/file'
+import { generateTimestamp } from '@sasjs/utils/time'
+import { Job } from '../../types'
+
+export const getFileStream = async (job: Job, filePath?: string) => {
+ const { createWriteStream } = require('@sasjs/utils/file')
+ const logPath = filePath || process.cwd()
+ const isFolderPath = await isFolder(logPath)
+ if (isFolderPath) {
+ const logFileName = `${job.name || 'job'}-${generateTimestamp()}.log`
+ const path = require('path')
+ const logFilePath = path.join(filePath || process.cwd(), logFileName)
+ return await createWriteStream(logFilePath)
+ } else {
+ return await createWriteStream(logPath)
+ }
+}
diff --git a/src/api/viya/pollJobState.ts b/src/api/viya/pollJobState.ts
new file mode 100644
index 0000000..c4b05d0
--- /dev/null
+++ b/src/api/viya/pollJobState.ts
@@ -0,0 +1,250 @@
+import { AuthConfig } from '@sasjs/utils/types'
+import { Job, PollOptions } from '../..'
+import { getTokens } from '../../auth/getTokens'
+import { RequestClient } from '../../request/RequestClient'
+import { JobStatePollError } from '../../types/errors'
+import { Link, WriteStream } from '../../types'
+import { isNode } from '../../utils'
+
+export async function pollJobState(
+ requestClient: RequestClient,
+ postedJob: Job,
+ debug: boolean,
+ authConfig?: AuthConfig,
+ pollOptions?: PollOptions
+) {
+ const logger = process.logger || console
+
+ let pollInterval = 300
+ let maxPollCount = 1000
+
+ const defaultPollOptions: PollOptions = {
+ maxPollCount,
+ pollInterval,
+ streamLog: false
+ }
+
+ pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
+
+ const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
+ if (!stateLink) {
+ throw new Error(`Job state link was not found.`)
+ }
+
+ let currentState = await getJobState(
+ requestClient,
+ postedJob,
+ '',
+ debug,
+ authConfig
+ ).catch((err) => {
+ logger.error(
+ `Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`,
+ err
+ )
+ return 'unavailable'
+ })
+
+ let pollCount = 0
+
+ if (currentState === 'completed') {
+ return Promise.resolve(currentState)
+ }
+
+ let logFileStream
+ if (pollOptions.streamLog && isNode()) {
+ const { getFileStream } = require('./getFileStream')
+ logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
+ }
+
+ // Poll up to the first 100 times with the specified poll interval
+ let result = await doPoll(
+ requestClient,
+ postedJob,
+ currentState,
+ debug,
+ pollCount,
+ authConfig,
+ {
+ ...pollOptions,
+ maxPollCount:
+ pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
+ },
+ logFileStream
+ )
+
+ currentState = result.state
+ pollCount = result.pollCount
+
+ if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) {
+ return currentState
+ }
+
+ // If we get to this point, this is a long-running job that needs longer polling.
+ // We will resume polling with a bigger interval of 1 minute
+ let longJobPollOptions: PollOptions = {
+ maxPollCount: 24 * 60,
+ pollInterval: 60000,
+ streamLog: false
+ }
+ if (pollOptions) {
+ longJobPollOptions.streamLog = pollOptions.streamLog
+ longJobPollOptions.logFolderPath = pollOptions.logFolderPath
+ }
+
+ result = await doPoll(
+ requestClient,
+ postedJob,
+ currentState,
+ debug,
+ pollCount,
+ authConfig,
+ longJobPollOptions,
+ logFileStream
+ )
+
+ currentState = result.state
+ pollCount = result.pollCount
+
+ if (logFileStream) {
+ logFileStream.end()
+ }
+
+ return currentState
+}
+
+const getJobState = async (
+ requestClient: RequestClient,
+ job: Job,
+ currentState: string,
+ debug: boolean,
+ authConfig?: AuthConfig
+) => {
+ const stateLink = job.links.find((l: any) => l.rel === 'state')
+ if (!stateLink) {
+ throw new Error(`Job state link was not found.`)
+ }
+
+ if (needsRetry(currentState)) {
+ let tokens
+ if (authConfig) {
+ tokens = await getTokens(requestClient, authConfig)
+ }
+
+ const { result: jobState } = await requestClient
+ .get(
+ `${stateLink.href}?_action=wait&wait=300`,
+ tokens?.access_token,
+ 'text/plain',
+ {},
+ debug
+ )
+ .catch((err) => {
+ throw new JobStatePollError(job.id, err)
+ })
+
+ return jobState.trim()
+ } else {
+ return currentState
+ }
+}
+
+const needsRetry = (state: string) =>
+ state === 'running' ||
+ state === '' ||
+ state === 'pending' ||
+ state === 'unavailable'
+
+const doPoll = async (
+ requestClient: RequestClient,
+ postedJob: Job,
+ currentState: string,
+ debug: boolean,
+ pollCount: number,
+ authConfig?: AuthConfig,
+ pollOptions?: PollOptions,
+ logStream?: WriteStream
+): Promise<{ state: string; pollCount: number }> => {
+ let pollInterval = 300
+ let maxPollCount = 1000
+ let maxErrorCount = 5
+ let errorCount = 0
+ let state = currentState
+ let printedState = ''
+ let startLogLine = 0
+
+ const logger = process.logger || console
+
+ if (pollOptions) {
+ pollInterval = pollOptions.pollInterval || pollInterval
+ maxPollCount = pollOptions.maxPollCount || maxPollCount
+ }
+
+ const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')
+ if (!stateLink) {
+ throw new Error(`Job state link was not found.`)
+ }
+
+ while (needsRetry(state) && pollCount <= maxPollCount) {
+ state = await getJobState(
+ requestClient,
+ postedJob,
+ state,
+ debug,
+ authConfig
+ ).catch((err) => {
+ errorCount++
+ if (pollCount >= maxPollCount || errorCount >= maxErrorCount) {
+ throw err
+ }
+ logger.error(
+ `Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`,
+ err
+ )
+ return 'unavailable'
+ })
+
+ pollCount++
+
+ if (pollOptions?.streamLog) {
+ const jobUrl = postedJob.links.find((l: Link) => l.rel === 'self')
+ const { result: job } = await requestClient.get(
+ jobUrl!.href,
+ authConfig?.access_token
+ )
+
+ const endLogLine = job.logStatistics?.lineCount ?? 1000000
+
+ const { saveLog } = isNode() ? require('./saveLog') : { saveLog: null }
+ if (saveLog) {
+ await saveLog(
+ postedJob,
+ requestClient,
+ startLogLine,
+ endLogLine,
+ logStream,
+ authConfig?.access_token
+ )
+ }
+
+ startLogLine += endLogLine
+ }
+
+ if (debug && printedState !== state) {
+ logger.info('Polling job status...')
+ logger.info(`Current job state: ${state}`)
+
+ printedState = state
+ }
+
+ if (state != 'unavailable' && errorCount > 0) {
+ errorCount = 0
+ }
+
+ await delay(pollInterval)
+ }
+
+ return { state, pollCount }
+}
+
+const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms))
diff --git a/src/api/viya/saveLog.ts b/src/api/viya/saveLog.ts
new file mode 100644
index 0000000..2b5ec08
--- /dev/null
+++ b/src/api/viya/saveLog.ts
@@ -0,0 +1,55 @@
+import { Job } from '../..'
+import { RequestClient } from '../../request/RequestClient'
+import { fetchLog } from '../../utils'
+import { WriteStream } from '../../types'
+import { writeStream } from './writeStream'
+
+/**
+ * Appends logs to a supplied write stream.
+ * This is useful for getting quick feedback on longer running jobs.
+ * @param job - the job to fetch logs for
+ * @param requestClient - the pre-configured HTTP request client
+ * @param startLine - the line at which to start fetching the log
+ * @param endLine - the line at which to stop fetching the log
+ * @param logFileStream - the write stream to which the log is appended
+ * @accessToken - an optional access token for authentication/authorization
+ * The access token is not required when fetching logs from the browser.
+ */
+export async function saveLog(
+ job: Job,
+ requestClient: RequestClient,
+ startLine: number,
+ endLine: number,
+ logFileStream?: WriteStream,
+ accessToken?: string
+) {
+ if (!accessToken) {
+ throw new Error(
+ `Logs for job ${job.id} cannot be fetched without a valid access token.`
+ )
+ }
+
+ if (!logFileStream) {
+ throw new Error(
+ `Logs for job ${job.id} cannot be written without a valid write stream.`
+ )
+ }
+
+ const logger = process.logger || console
+ const jobLogUrl = job.links.find((l) => l.rel === 'log')
+
+ if (!jobLogUrl) {
+ throw new Error(`Log URL for job ${job.id} was not found.`)
+ }
+
+ const log = await fetchLog(
+ requestClient,
+ accessToken,
+ `${jobLogUrl.href}/content`,
+ startLine,
+ endLine
+ )
+
+ logger.info(`Writing logs to ${logFileStream.path}`)
+ await writeStream(logFileStream, log || '')
+}
diff --git a/src/api/viya/spec/executeScript.spec.ts b/src/api/viya/spec/executeScript.spec.ts
new file mode 100644
index 0000000..9eb80c4
--- /dev/null
+++ b/src/api/viya/spec/executeScript.spec.ts
@@ -0,0 +1,675 @@
+import { RequestClient } from '../../../request/RequestClient'
+import { SessionManager } from '../../../SessionManager'
+import { executeScript } from '../executeScript'
+import { mockSession, mockAuthConfig, mockJob } from './mockResponses'
+import * as pollJobStateModule from '../pollJobState'
+import * as uploadTablesModule from '../uploadTables'
+import * as getTokensModule from '../../../auth/getTokens'
+import * as formatDataModule from '../../../utils/formatDataForRequest'
+import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
+import { PollOptions } from '../../../types'
+import { ComputeJobExecutionError, NotFoundError } from '../../../types/errors'
+import { Logger, LogLevel } from '@sasjs/utils'
+
+const sessionManager = new (>SessionManager)()
+const requestClient = new (>RequestClient)()
+const defaultPollOptions: PollOptions = {
+ maxPollCount: 100,
+ pollInterval: 500,
+ streamLog: false
+}
+
+describe('executeScript', () => {
+ beforeEach(() => {
+ ;(process as any).logger = new Logger(LogLevel.Off)
+ setupMocks()
+ })
+
+ it('should not try to get fresh tokens if an authConfig is not provided', async () => {
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put hello'],
+ 'test context'
+ )
+
+ expect(getTokensModule.getTokens).not.toHaveBeenCalled()
+ })
+
+ it('should try to get fresh tokens if an authConfig is provided', async () => {
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put hello'],
+ 'test context',
+ mockAuthConfig
+ )
+
+ expect(getTokensModule.getTokens).toHaveBeenCalledWith(
+ requestClient,
+ mockAuthConfig
+ )
+ })
+
+ it('should get a session from the session manager before executing', async () => {
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put hello'],
+ 'test context'
+ )
+
+ expect(sessionManager.getSession).toHaveBeenCalledWith(undefined)
+ })
+
+ it('should handle errors while getting a session', async () => {
+ jest
+ .spyOn(sessionManager, 'getSession')
+ .mockImplementation(() => Promise.reject('Test Error'))
+
+ const error = await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put hello'],
+ 'test context'
+ ).catch((e) => e)
+
+ expect(error).toContain('Error while getting session.')
+ })
+
+ it('should fetch the PID when printPid is true', async () => {
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put hello'],
+ 'test context',
+ mockAuthConfig,
+ null,
+ false,
+ false,
+ false,
+ defaultPollOptions,
+ true
+ )
+
+ expect(sessionManager.getVariable).toHaveBeenCalledWith(
+ mockSession.id,
+ 'SYSJOBID',
+ mockAuthConfig.access_token
+ )
+ })
+
+ it('should handle errors while getting the job PID', async () => {
+ jest
+ .spyOn(sessionManager, 'getVariable')
+ .mockImplementation(() => Promise.reject('Test Error'))
+
+ const error = await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put hello'],
+ 'test context',
+ mockAuthConfig,
+ null,
+ false,
+ false,
+ false,
+ defaultPollOptions,
+ true
+ ).catch((e) => e)
+
+ expect(error).toContain('Error while getting session variable.')
+ })
+
+ it('should use the file upload approach when data contains semicolons', async () => {
+ jest
+ .spyOn(uploadTablesModule, 'uploadTables')
+ .mockImplementation(() =>
+ Promise.resolve([{ tableName: 'test', file: { id: 1 } }])
+ )
+
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put hello'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar;' },
+ false,
+ false,
+ false,
+ defaultPollOptions,
+ true
+ )
+
+ expect(uploadTablesModule.uploadTables).toHaveBeenCalledWith(
+ requestClient,
+ { foo: 'bar;' },
+ mockAuthConfig.access_token
+ )
+ })
+
+ it('should format data as CSV when it does not contain semicolons', async () => {
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put hello'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ false,
+ false,
+ false,
+ defaultPollOptions,
+ true
+ )
+
+ expect(formatDataModule.formatDataForRequest).toHaveBeenCalledWith({
+ foo: 'bar'
+ })
+ })
+
+ it('should submit a job for execution via the compute API', async () => {
+ jest
+ .spyOn(formatDataModule, 'formatDataForRequest')
+ .mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' }))
+
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ false,
+ false,
+ false,
+ defaultPollOptions,
+ true
+ )
+
+ expect(requestClient.post).toHaveBeenCalledWith(
+ `/compute/sessions/${mockSession.id}/jobs`,
+ {
+ name: 'exec-test',
+ description: 'Powered by SASjs',
+ code: ['%put "hello";'],
+ variables: {
+ SYS_JES_JOB_URI: '',
+ _program: 'test/test',
+ sasjs_tables: 'foo',
+ sasjs0data: 'bar'
+ },
+ arguments: {
+ _contextName: 'test context',
+ _OMITJSONLISTING: true,
+ _OMITJSONLOG: true,
+ _OMITSESSIONRESULTS: true,
+ _OMITTEXTLISTING: true,
+ _OMITTEXTLOG: true
+ }
+ },
+ mockAuthConfig.access_token
+ )
+ })
+
+ it('should set the correct variables when debug is true', async () => {
+ jest
+ .spyOn(formatDataModule, 'formatDataForRequest')
+ .mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' }))
+
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ true,
+ false,
+ false,
+ defaultPollOptions,
+ true
+ )
+
+ expect(requestClient.post).toHaveBeenCalledWith(
+ `/compute/sessions/${mockSession.id}/jobs`,
+ {
+ name: 'exec-test',
+ description: 'Powered by SASjs',
+ code: ['%put "hello";'],
+ variables: {
+ SYS_JES_JOB_URI: '',
+ _program: 'test/test',
+ sasjs_tables: 'foo',
+ sasjs0data: 'bar',
+ _DEBUG: 131
+ },
+ arguments: {
+ _contextName: 'test context',
+ _OMITJSONLISTING: true,
+ _OMITJSONLOG: true,
+ _OMITSESSIONRESULTS: false,
+ _OMITTEXTLISTING: true,
+ _OMITTEXTLOG: false
+ }
+ },
+ mockAuthConfig.access_token
+ )
+ })
+
+ it('should handle errors during job submission', async () => {
+ jest
+ .spyOn(requestClient, 'post')
+ .mockImplementation(() => Promise.reject('Test Error'))
+
+ const error = await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ true,
+ false,
+ false,
+ defaultPollOptions,
+ true
+ ).catch((e) => e)
+
+ expect(error).toContain('Error while posting job')
+ })
+
+ it('should immediately return the session when waitForResult is false', async () => {
+ const result = await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ true,
+ false,
+ false,
+ defaultPollOptions,
+ true
+ )
+
+ expect(result).toEqual(mockSession)
+ })
+
+ it('should poll for job completion when waitForResult is true', async () => {
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ false,
+ false,
+ true,
+ defaultPollOptions,
+ true
+ )
+
+ expect(pollJobStateModule.pollJobState).toHaveBeenCalledWith(
+ requestClient,
+ mockJob,
+ false,
+ mockAuthConfig,
+ defaultPollOptions
+ )
+ })
+
+ it('should handle general errors when polling for job status', async () => {
+ jest
+ .spyOn(pollJobStateModule, 'pollJobState')
+ .mockImplementation(() => Promise.reject('Poll Error'))
+
+ const error = await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ false,
+ false,
+ true,
+ defaultPollOptions,
+ true
+ ).catch((e) => e)
+
+ expect(error).toContain('Error while polling job status.')
+ })
+
+ it('should fetch the log and append it to the error in case of a 5113 error code', async () => {
+ jest
+ .spyOn(pollJobStateModule, 'pollJobState')
+ .mockImplementation(() =>
+ Promise.reject({ response: { data: 'err=5113,' } })
+ )
+
+ const error = await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ false,
+ false,
+ true,
+ defaultPollOptions,
+ true
+ ).catch((e) => e)
+
+ expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
+ requestClient,
+ mockAuthConfig.access_token,
+ mockJob.links.find((l) => l.rel === 'up')!.href + '/log',
+ 1000000
+ )
+ expect(error.log).toEqual('Test Log')
+ })
+
+ it('should fetch the logs for the job if debug is true and a log URL is available', async () => {
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ true,
+ false,
+ true,
+ defaultPollOptions,
+ true
+ )
+
+ expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
+ requestClient,
+ mockAuthConfig.access_token,
+ mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
+ mockJob.logStatistics.lineCount
+ )
+ })
+
+ it('should not fetch the logs for the job if debug is false', async () => {
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ false,
+ false,
+ true,
+ defaultPollOptions,
+ true
+ )
+
+ expect(fetchLogsModule.fetchLogByChunks).not.toHaveBeenCalled()
+ })
+
+ it('should throw a ComputeJobExecutionError if the job has failed', async () => {
+ jest
+ .spyOn(pollJobStateModule, 'pollJobState')
+ .mockImplementation(() => Promise.resolve('failed'))
+
+ const error: ComputeJobExecutionError = await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ true,
+ false,
+ true,
+ defaultPollOptions,
+ true
+ ).catch((e) => e)
+
+ expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
+ requestClient,
+ mockAuthConfig.access_token,
+ mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
+ mockJob.logStatistics.lineCount
+ )
+
+ expect(error).toBeInstanceOf(ComputeJobExecutionError)
+ expect(error.log).toEqual('Test Log')
+ expect(error.job).toEqual(mockJob)
+ })
+
+ it('should throw a ComputeJobExecutionError if the job has errored out', async () => {
+ jest
+ .spyOn(pollJobStateModule, 'pollJobState')
+ .mockImplementation(() => Promise.resolve('error'))
+
+ const error: ComputeJobExecutionError = await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ true,
+ false,
+ true,
+ defaultPollOptions,
+ true
+ ).catch((e) => e)
+
+ expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
+ requestClient,
+ mockAuthConfig.access_token,
+ mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
+ mockJob.logStatistics.lineCount
+ )
+
+ expect(error).toBeInstanceOf(ComputeJobExecutionError)
+ expect(error.log).toEqual('Test Log')
+ expect(error.job).toEqual(mockJob)
+ })
+
+ it('should fetch the result if expectWebout is true', async () => {
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ false,
+ true,
+ true,
+ defaultPollOptions,
+ true
+ )
+
+ expect(requestClient.get).toHaveBeenCalledWith(
+ `/compute/sessions/${mockSession.id}/filerefs/_webout/content`,
+ mockAuthConfig.access_token,
+ 'text/plain'
+ )
+ })
+
+ it('should fetch the logs if the webout file was not found', async () => {
+ jest.spyOn(requestClient, 'get').mockImplementation((url, ...rest) => {
+ if (url.includes('_webout')) {
+ return Promise.reject(new NotFoundError(url))
+ }
+ return Promise.resolve({ result: mockJob, etag: '', status: 200 })
+ })
+
+ const error = await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ false,
+ true,
+ true,
+ defaultPollOptions,
+ true
+ ).catch((e) => e)
+
+ expect(requestClient.get).toHaveBeenCalledWith(
+ `/compute/sessions/${mockSession.id}/filerefs/_webout/content`,
+ mockAuthConfig.access_token,
+ 'text/plain'
+ )
+
+ expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
+ requestClient,
+ mockAuthConfig.access_token,
+ mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
+ mockJob.logStatistics.lineCount
+ )
+
+ expect(error.status).toEqual(500)
+ expect(error.log).toEqual('Test Log')
+ })
+
+ it('should clear the session after execution is complete', async () => {
+ await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ false,
+ true,
+ true,
+ defaultPollOptions,
+ true
+ )
+
+ expect(sessionManager.clearSession).toHaveBeenCalledWith(
+ mockSession.id,
+ mockAuthConfig.access_token
+ )
+ })
+
+ it('should handle errors while clearing a session', async () => {
+ jest
+ .spyOn(sessionManager, 'clearSession')
+ .mockImplementation(() => Promise.reject('Clear Session Error'))
+
+ const error = await executeScript(
+ requestClient,
+ sessionManager,
+ 'test',
+ 'test',
+ ['%put "hello";'],
+ 'test context',
+ mockAuthConfig,
+ { foo: 'bar' },
+ false,
+ true,
+ true,
+ defaultPollOptions,
+ true
+ ).catch((e) => e)
+
+ expect(error).toContain('Error while clearing session.')
+ })
+})
+
+const setupMocks = () => {
+ jest.restoreAllMocks()
+ jest.mock('../../../request/RequestClient')
+ jest.mock('../../../SessionManager')
+ jest.mock('../../../auth/getTokens')
+ jest.mock('../pollJobState')
+ jest.mock('../uploadTables')
+ jest.mock('../../../utils/formatDataForRequest')
+ jest.mock('../../../utils/fetchLogByChunks')
+
+ jest
+ .spyOn(requestClient, 'post')
+ .mockImplementation(() => Promise.resolve({ result: mockJob, etag: '' }))
+ jest
+ .spyOn(requestClient, 'get')
+ .mockImplementation(() =>
+ Promise.resolve({ result: mockJob, etag: '', status: 200 })
+ )
+ jest
+ .spyOn(requestClient, 'delete')
+ .mockImplementation(() => Promise.resolve({ result: {}, etag: '' }))
+ jest
+ .spyOn(getTokensModule, 'getTokens')
+ .mockImplementation(() => Promise.resolve(mockAuthConfig))
+ jest
+ .spyOn(pollJobStateModule, 'pollJobState')
+ .mockImplementation(() => Promise.resolve('completed'))
+ jest
+ .spyOn(sessionManager, 'getVariable')
+ .mockImplementation(() =>
+ Promise.resolve({ result: { value: 'test' }, etag: 'test', status: 200 })
+ )
+ jest
+ .spyOn(sessionManager, 'getSession')
+ .mockImplementation(() => Promise.resolve(mockSession))
+ jest
+ .spyOn(sessionManager, 'clearSession')
+ .mockImplementation(() => Promise.resolve())
+ jest
+ .spyOn(formatDataModule, 'formatDataForRequest')
+ .mockImplementation(() => ({ sasjs_tables: 'test', sasjs0data: 'test' }))
+ jest
+ .spyOn(fetchLogsModule, 'fetchLogByChunks')
+ .mockImplementation(() => Promise.resolve('Test Log'))
+}
diff --git a/src/api/viya/spec/getFileStream.spec.ts b/src/api/viya/spec/getFileStream.spec.ts
new file mode 100644
index 0000000..9ab766b
--- /dev/null
+++ b/src/api/viya/spec/getFileStream.spec.ts
@@ -0,0 +1,41 @@
+import { Logger, LogLevel } from '@sasjs/utils/logger'
+import * as path from 'path'
+import * as fileModule from '@sasjs/utils/file'
+import { getFileStream } from '../getFileStream'
+import { mockJob } from './mockResponses'
+import { WriteStream } from '../../../types'
+
+describe('getFileStream', () => {
+ beforeEach(() => {
+ ;(process as any).logger = new Logger(LogLevel.Off)
+ setupMocks()
+ })
+ it('should use the given log path if it points to a file', async () => {
+ const { createWriteStream } = require('@sasjs/utils/file')
+
+ await getFileStream(mockJob, path.join(__dirname, 'test.log'))
+
+ expect(createWriteStream).toHaveBeenCalledWith(
+ path.join(__dirname, 'test.log')
+ )
+ })
+
+ it('should generate a log file path with a timestamp if it points to a folder', async () => {
+ const { createWriteStream } = require('@sasjs/utils/file')
+
+ await getFileStream(mockJob, __dirname)
+
+ expect(createWriteStream).not.toHaveBeenCalledWith(__dirname)
+ expect(createWriteStream).toHaveBeenCalledWith(
+ expect.stringContaining(path.join(__dirname, 'test job-20'))
+ )
+ })
+})
+
+const setupMocks = () => {
+ jest.restoreAllMocks()
+ jest.mock('@sasjs/utils/file/file')
+ jest
+ .spyOn(fileModule, 'createWriteStream')
+ .mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
+}
diff --git a/src/api/viya/spec/mockResponses.ts b/src/api/viya/spec/mockResponses.ts
new file mode 100644
index 0000000..22580f7
--- /dev/null
+++ b/src/api/viya/spec/mockResponses.ts
@@ -0,0 +1,73 @@
+import { AuthConfig } from '@sasjs/utils/types'
+import { Job, Session } from '../../../types'
+
+export const mockSession: Session = {
+ id: 's35510n',
+ state: 'idle',
+ links: [],
+ attributes: {
+ sessionInactiveTimeout: 1
+ },
+ creationTimeStamp: new Date().valueOf().toString()
+}
+
+export const mockJob: Job = {
+ id: 'j0b',
+ name: 'test job',
+ uri: '/j0b',
+ createdBy: 'test user',
+ results: {
+ '_webout.json': 'test'
+ },
+ logStatistics: {
+ lineCount: 100,
+ modifiedTimeStamp: new Date().valueOf().toString()
+ },
+ links: [
+ {
+ rel: 'log',
+ href: '/log',
+ method: 'GET',
+ type: 'log',
+ uri: 'log'
+ },
+ {
+ rel: 'self',
+ href: '/job',
+ method: 'GET',
+ type: 'job',
+ uri: 'job'
+ },
+ {
+ rel: 'state',
+ href: '/state',
+ method: 'GET',
+ type: 'state',
+ uri: 'state'
+ },
+ {
+ rel: 'up',
+ href: '/job',
+ method: 'GET',
+ type: 'up',
+ uri: 'job'
+ }
+ ]
+}
+
+export const mockAuthConfig: AuthConfig = {
+ client: 'cl13nt',
+ secret: '53cr3t',
+ access_token: 'acc355',
+ refresh_token: 'r3fr35h'
+}
+
+export class MockStream {
+ _write(chunk: string, _: any, next: Function) {
+ next()
+ }
+
+ reset() {}
+
+ destroy() {}
+}
diff --git a/src/api/viya/spec/pollJobState.spec.ts b/src/api/viya/spec/pollJobState.spec.ts
new file mode 100644
index 0000000..74f39e1
--- /dev/null
+++ b/src/api/viya/spec/pollJobState.spec.ts
@@ -0,0 +1,346 @@
+import { Logger, LogLevel } from '@sasjs/utils'
+import { RequestClient } from '../../../request/RequestClient'
+import { mockAuthConfig, mockJob } from './mockResponses'
+import { pollJobState } from '../pollJobState'
+import * as getTokensModule from '../../../auth/getTokens'
+import * as saveLogModule from '../saveLog'
+import * as getFileStreamModule from '../getFileStream'
+import * as isNodeModule from '../../../utils/isNode'
+import { PollOptions } from '../../../types'
+import { WriteStream } from 'fs'
+
+const requestClient = new (>RequestClient)()
+const defaultPollOptions: PollOptions = {
+ maxPollCount: 100,
+ pollInterval: 500,
+ streamLog: false
+}
+
+describe('pollJobState', () => {
+ beforeEach(() => {
+ ;(process as any).logger = new Logger(LogLevel.Off)
+ setupMocks()
+ })
+
+ it('should get valid tokens if the authConfig has been provided', async () => {
+ await pollJobState(
+ requestClient,
+ mockJob,
+ false,
+ mockAuthConfig,
+ defaultPollOptions
+ )
+
+ expect(getTokensModule.getTokens).toHaveBeenCalledWith(
+ requestClient,
+ mockAuthConfig
+ )
+ })
+
+ it('should not attempt to get tokens if the authConfig has not been provided', async () => {
+ await pollJobState(
+ requestClient,
+ mockJob,
+ false,
+ undefined,
+ defaultPollOptions
+ )
+
+ expect(getTokensModule.getTokens).not.toHaveBeenCalled()
+ })
+
+ it('should throw an error if the job does not have a state link', async () => {
+ const error = await pollJobState(
+ requestClient,
+ { ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') },
+ false,
+ undefined,
+ defaultPollOptions
+ ).catch((e) => e)
+
+ expect((error as Error).message).toContain('Job state link was not found.')
+ })
+
+ it('should attempt to refresh tokens before each poll', async () => {
+ mockSimplePoll()
+
+ await pollJobState(
+ requestClient,
+ mockJob,
+ false,
+ mockAuthConfig,
+ defaultPollOptions
+ )
+
+ expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3)
+ })
+
+ it('should attempt to fetch and save the log after each poll when streamLog is true', async () => {
+ mockSimplePoll()
+ const { saveLog } = require('../saveLog')
+
+ await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
+ ...defaultPollOptions,
+ streamLog: true
+ })
+
+ expect(saveLog).toHaveBeenCalledTimes(2)
+ })
+
+ it('should create a write stream in Node.js environment when streamLog is true', async () => {
+ mockSimplePoll()
+ const { getFileStream } = require('../getFileStream')
+ const { saveLog } = require('../saveLog')
+
+ await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
+ ...defaultPollOptions,
+ streamLog: true
+ })
+
+ expect(getFileStream).toHaveBeenCalled()
+ expect(saveLog).toHaveBeenCalledTimes(2)
+ })
+
+ it('should not create a write stream in a non-Node.js environment', async () => {
+ mockSimplePoll()
+ jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
+ const { saveLog } = require('../saveLog')
+ const { getFileStream } = require('../getFileStream')
+
+ await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
+ ...defaultPollOptions,
+ streamLog: true
+ })
+
+ expect(getFileStream).not.toHaveBeenCalled()
+ expect(saveLog).not.toHaveBeenCalled()
+ })
+
+ it('should not attempt to fetch and save the log after each poll when streamLog is false', async () => {
+ mockSimplePoll()
+
+ await pollJobState(
+ requestClient,
+ mockJob,
+ false,
+ mockAuthConfig,
+ defaultPollOptions
+ )
+
+ expect(saveLogModule.saveLog).not.toHaveBeenCalled()
+ })
+
+ it('should return the current status when the max poll count is reached', async () => {
+ mockRunningPoll()
+
+ const state = await pollJobState(
+ requestClient,
+ mockJob,
+ false,
+ mockAuthConfig,
+ {
+ ...defaultPollOptions,
+ maxPollCount: 1
+ }
+ )
+
+ expect(state).toEqual('running')
+ })
+
+ it('should poll with a larger interval for longer running jobs', async () => {
+ mockLongPoll()
+
+ const state = await pollJobState(
+ requestClient,
+ mockJob,
+ false,
+ mockAuthConfig,
+ {
+ ...defaultPollOptions,
+ maxPollCount: 200,
+ pollInterval: 10
+ }
+ )
+
+ expect(state).toEqual('completed')
+ }, 200000)
+
+ it('should continue polling until the job completes or errors', async () => {
+ mockSimplePoll(1)
+
+ const state = await pollJobState(
+ requestClient,
+ mockJob,
+ false,
+ undefined,
+ defaultPollOptions
+ )
+
+ expect(requestClient.get).toHaveBeenCalledTimes(2)
+ expect(state).toEqual('completed')
+ })
+
+ it('should print the state to the console when debug is on', async () => {
+ jest.spyOn((process as any).logger, 'info')
+ mockSimplePoll()
+
+ await pollJobState(
+ requestClient,
+ mockJob,
+ true,
+ undefined,
+ defaultPollOptions
+ )
+
+ expect((process as any).logger.info).toHaveBeenCalledTimes(4)
+ expect((process as any).logger.info).toHaveBeenNthCalledWith(
+ 1,
+ 'Polling job status...'
+ )
+ expect((process as any).logger.info).toHaveBeenNthCalledWith(
+ 2,
+ 'Current job state: running'
+ )
+ expect((process as any).logger.info).toHaveBeenNthCalledWith(
+ 3,
+ 'Polling job status...'
+ )
+ expect((process as any).logger.info).toHaveBeenNthCalledWith(
+ 4,
+ 'Current job state: completed'
+ )
+ })
+
+ it('should continue polling when there is a single error in between', async () => {
+ mockPollWithSingleError()
+
+ const state = await pollJobState(
+ requestClient,
+ mockJob,
+ false,
+ undefined,
+ defaultPollOptions
+ )
+
+ expect(requestClient.get).toHaveBeenCalledTimes(2)
+ expect(state).toEqual('completed')
+ })
+
+ it('should throw an error when the error count exceeds the set value of 5', async () => {
+ mockErroredPoll()
+
+ const error = await pollJobState(
+ requestClient,
+ mockJob,
+ false,
+ undefined,
+ defaultPollOptions
+ ).catch((e) => e)
+
+ expect(error.message).toEqual(
+ 'Error while polling job state for job j0b: Status Error'
+ )
+ })
+})
+
+const setupMocks = () => {
+ jest.restoreAllMocks()
+ jest.mock('../../../request/RequestClient')
+ jest.mock('../../../auth/getTokens')
+ jest.mock('../saveLog')
+ jest.mock('../getFileStream')
+ jest.mock('../../../utils/isNode')
+
+ jest
+ .spyOn(requestClient, 'get')
+ .mockImplementation(() =>
+ Promise.resolve({ result: 'completed', etag: '', status: 200 })
+ )
+ jest
+ .spyOn(getTokensModule, 'getTokens')
+ .mockImplementation(() => Promise.resolve(mockAuthConfig))
+ jest
+ .spyOn(saveLogModule, 'saveLog')
+ .mockImplementation(() => Promise.resolve())
+ jest
+ .spyOn(getFileStreamModule, 'getFileStream')
+ .mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
+ jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
+}
+
+const mockSimplePoll = (runningCount = 2) => {
+ let count = 0
+ jest.spyOn(requestClient, 'get').mockImplementation((url) => {
+ count++
+ if (url.includes('job')) {
+ return Promise.resolve({ result: mockJob, etag: '', status: 200 })
+ }
+ return Promise.resolve({
+ result:
+ count === 0
+ ? 'pending'
+ : count <= runningCount
+ ? 'running'
+ : 'completed',
+ etag: '',
+ status: 200
+ })
+ })
+}
+
+const mockRunningPoll = () => {
+ let count = 0
+ jest.spyOn(requestClient, 'get').mockImplementation((url) => {
+ count++
+ if (url.includes('job')) {
+ return Promise.resolve({ result: mockJob, etag: '', status: 200 })
+ }
+ return Promise.resolve({
+ result: count === 0 ? 'pending' : 'running',
+ etag: '',
+ status: 200
+ })
+ })
+}
+
+const mockLongPoll = () => {
+ let count = 0
+ jest.spyOn(requestClient, 'get').mockImplementation((url) => {
+ count++
+ if (url.includes('job')) {
+ return Promise.resolve({ result: mockJob, etag: '', status: 200 })
+ }
+ return Promise.resolve({
+ result: count <= 102 ? 'running' : 'completed',
+ etag: '',
+ status: 200
+ })
+ })
+}
+
+const mockPollWithSingleError = () => {
+ let count = 0
+ jest.spyOn(requestClient, 'get').mockImplementation((url) => {
+ count++
+ if (url.includes('job')) {
+ return Promise.resolve({ result: mockJob, etag: '', status: 200 })
+ }
+ if (count === 1) {
+ return Promise.reject('Status Error')
+ }
+ return Promise.resolve({
+ result: count === 0 ? 'pending' : 'completed',
+ etag: '',
+ status: 200
+ })
+ })
+}
+
+const mockErroredPoll = () => {
+ jest.spyOn(requestClient, 'get').mockImplementation((url) => {
+ if (url.includes('job')) {
+ return Promise.resolve({ result: mockJob, etag: '', status: 200 })
+ }
+ return Promise.reject('Status Error')
+ })
+}
diff --git a/src/api/viya/spec/saveLog.spec.ts b/src/api/viya/spec/saveLog.spec.ts
new file mode 100644
index 0000000..261438e
--- /dev/null
+++ b/src/api/viya/spec/saveLog.spec.ts
@@ -0,0 +1,73 @@
+import { Logger, LogLevel } from '@sasjs/utils'
+import { RequestClient } from '../../../request/RequestClient'
+import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
+import * as writeStreamModule from '../writeStream'
+import { saveLog } from '../saveLog'
+import { mockJob } from './mockResponses'
+import { WriteStream } from '../../../types'
+
+const requestClient = new (>RequestClient)()
+const stream = {} as unknown as WriteStream
+
+describe('saveLog', () => {
+ beforeEach(() => {
+ ;(process as any).logger = new Logger(LogLevel.Off)
+ setupMocks()
+ })
+
+ it('should throw an error when a valid access token is not provided', async () => {
+ const error = await saveLog(mockJob, requestClient, 0, 100, stream).catch(
+ (e) => e
+ )
+
+ expect(error.message).toContain(
+ `Logs for job ${mockJob.id} cannot be fetched without a valid access token.`
+ )
+ })
+
+ it('should throw an error when the log URL is not available', async () => {
+ const error = await saveLog(
+ { ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'log') },
+ requestClient,
+ 0,
+ 100,
+ stream,
+ 't0k3n'
+ ).catch((e) => e)
+
+ expect(error.message).toContain(
+ `Log URL for job ${mockJob.id} was not found.`
+ )
+ })
+
+ it('should fetch and save logs to the given path', async () => {
+ await saveLog(mockJob, requestClient, 0, 100, stream, 't0k3n')
+
+ expect(fetchLogsModule.fetchLog).toHaveBeenCalledWith(
+ requestClient,
+ 't0k3n',
+ '/log/content',
+ 0,
+ 100
+ )
+ expect(writeStreamModule.writeStream).toHaveBeenCalledWith(
+ stream,
+ 'Test Log'
+ )
+ })
+})
+
+const setupMocks = () => {
+ jest.restoreAllMocks()
+ jest.mock('../../../request/RequestClient')
+ jest.mock('../../../utils/fetchLogByChunks')
+ jest.mock('@sasjs/utils')
+ jest.mock('../writeStream')
+
+ jest
+ .spyOn(fetchLogsModule, 'fetchLog')
+ .mockImplementation(() => Promise.resolve('Test Log'))
+ jest
+ .spyOn(writeStreamModule, 'writeStream')
+ .mockImplementation(() => Promise.resolve())
+}
diff --git a/src/api/viya/spec/uploadTables.spec.ts b/src/api/viya/spec/uploadTables.spec.ts
new file mode 100644
index 0000000..529c6e2
--- /dev/null
+++ b/src/api/viya/spec/uploadTables.spec.ts
@@ -0,0 +1,67 @@
+import { RequestClient } from '../../../request/RequestClient'
+import * as convertToCsvModule from '../../../utils/convertToCsv'
+import { uploadTables } from '../uploadTables'
+
+const requestClient = new (>RequestClient)()
+
+describe('uploadTables', () => {
+ beforeEach(() => {
+ setupMocks()
+ })
+
+ it('should return a list of uploaded files', async () => {
+ const data = { foo: 'bar' }
+
+ const files = await uploadTables(requestClient, data, 't0k3n')
+
+ expect(files).toEqual([{ tableName: 'foo', file: 'test-file' }])
+ expect(requestClient.uploadFile).toHaveBeenCalledTimes(1)
+ expect(requestClient.uploadFile).toHaveBeenCalledWith(
+ '/files/files#rawUpload',
+ 'Test CSV',
+ 't0k3n'
+ )
+ })
+
+ it('should throw an error when the CSV exceeds the maximum length', async () => {
+ const data = { foo: 'bar' }
+ jest
+ .spyOn(convertToCsvModule, 'convertToCSV')
+ .mockImplementation(() => 'ERROR: LARGE STRING LENGTH')
+
+ const error = await uploadTables(requestClient, data, 't0k3n').catch(
+ (e) => e
+ )
+
+ expect(requestClient.uploadFile).not.toHaveBeenCalled()
+ expect(error.message).toEqual(
+ 'The max length of a string value in SASjs is 32765 characters.'
+ )
+ })
+
+ it('should throw an error when the file upload fails', async () => {
+ const data = { foo: 'bar' }
+ jest
+ .spyOn(requestClient, 'uploadFile')
+ .mockImplementation(() => Promise.reject('Upload Error'))
+
+ const error = await uploadTables(requestClient, data, 't0k3n').catch(
+ (e) => e
+ )
+
+ expect(error).toContain('Error while uploading file.')
+ })
+})
+
+const setupMocks = () => {
+ jest.restoreAllMocks()
+ jest.mock('../../../utils/convertToCsv')
+ jest
+ .spyOn(convertToCsvModule, 'convertToCSV')
+ .mockImplementation(() => 'Test CSV')
+ jest
+ .spyOn(requestClient, 'uploadFile')
+ .mockImplementation(() =>
+ Promise.resolve({ result: 'test-file', etag: '' })
+ )
+}
diff --git a/src/api/viya/spec/writeStream.spec.ts b/src/api/viya/spec/writeStream.spec.ts
new file mode 100644
index 0000000..358c82a
--- /dev/null
+++ b/src/api/viya/spec/writeStream.spec.ts
@@ -0,0 +1,25 @@
+import { WriteStream } from '../../../types'
+import { writeStream } from '../writeStream'
+import 'jest-extended'
+
+describe('writeStream', () => {
+ const stream: WriteStream = {
+ write: jest.fn(),
+ path: 'test'
+ }
+
+ it('should resolve when the stream is written successfully', async () => {
+ expect(writeStream(stream, 'test')).toResolve()
+
+ expect(stream.write).toHaveBeenCalledWith('test\n', expect.anything())
+ })
+
+ it('should reject when the write errors out', async () => {
+ jest
+ .spyOn(stream, 'write')
+ .mockImplementation((_, callback) => callback(new Error('Test Error')))
+ const error = await writeStream(stream, 'test').catch((e) => e)
+
+ expect(error.message).toEqual('Test Error')
+ })
+})
diff --git a/src/api/viya/uploadTables.ts b/src/api/viya/uploadTables.ts
new file mode 100644
index 0000000..b9e4402
--- /dev/null
+++ b/src/api/viya/uploadTables.ts
@@ -0,0 +1,37 @@
+import { prefixMessage } from '@sasjs/utils/error'
+import { RequestClient } from '../../request/RequestClient'
+import { convertToCSV } from '../../utils/convertToCsv'
+
+/**
+ * Uploads tables to SAS as specially formatted CSVs.
+ * This is more compact than JSON, and easier to read within SAS.
+ * @param requestClient - the pre-configured HTTP request client
+ * @param data - the JSON representation of the data to be uploaded
+ * @param accessToken - an optional access token for authentication/authorization
+ * The access token is not required when uploading tables from the browser.
+ */
+export async function uploadTables(
+ requestClient: RequestClient,
+ data: any,
+ accessToken?: string
+) {
+ const uploadedFiles = []
+
+ for (const tableName in data) {
+ const csv = convertToCSV(data[tableName])
+ if (csv === 'ERROR: LARGE STRING LENGTH') {
+ throw new Error(
+ 'The max length of a string value in SASjs is 32765 characters.'
+ )
+ }
+
+ const uploadResponse = await requestClient
+ .uploadFile(`/files/files#rawUpload`, csv, accessToken)
+ .catch((err) => {
+ throw prefixMessage(err, 'Error while uploading file. ')
+ })
+
+ uploadedFiles.push({ tableName, file: uploadResponse.result })
+ }
+ return uploadedFiles
+}
diff --git a/src/api/viya/writeStream.ts b/src/api/viya/writeStream.ts
new file mode 100644
index 0000000..0baaaa0
--- /dev/null
+++ b/src/api/viya/writeStream.ts
@@ -0,0 +1,15 @@
+import { WriteStream } from '../../types'
+
+export const writeStream = async (
+ stream: WriteStream,
+ content: string
+): Promise => {
+ return new Promise((resolve, reject) => {
+ stream.write(content + '\n', (e) => {
+ if (e) {
+ return reject(e)
+ }
+ return resolve()
+ })
+ })
+}
diff --git a/src/auth/getAccessToken.ts b/src/auth/getAccessToken.ts
new file mode 100644
index 0000000..d51833e
--- /dev/null
+++ b/src/auth/getAccessToken.ts
@@ -0,0 +1,53 @@
+import { SasAuthResponse } from '@sasjs/utils/types'
+import { prefixMessage } from '@sasjs/utils/error'
+import * as NodeFormData from 'form-data'
+import { RequestClient } from '../request/RequestClient'
+
+/**
+ * Exchanges the auth code for an access token for the given client.
+ * @param requestClient - the pre-configured HTTP request client
+ * @param clientId - the client ID to authenticate with.
+ * @param clientSecret - the client secret to authenticate with.
+ * @param authCode - the auth code received from the server.
+ */
+export async function getAccessToken(
+ requestClient: RequestClient,
+ clientId: string,
+ clientSecret: string,
+ authCode: string
+): Promise {
+ const url = '/SASLogon/oauth/token'
+ let token
+ if (typeof Buffer === 'undefined') {
+ token = btoa(clientId + ':' + clientSecret)
+ } else {
+ token = Buffer.from(clientId + ':' + clientSecret).toString('base64')
+ }
+ const headers = {
+ Authorization: 'Basic ' + token
+ }
+
+ let formData
+ if (typeof FormData === 'undefined') {
+ formData = new NodeFormData()
+ } else {
+ formData = new FormData()
+ }
+ formData.append('grant_type', 'authorization_code')
+ formData.append('code', authCode)
+
+ const authResponse = await requestClient
+ .post(
+ url,
+ formData,
+ undefined,
+ 'multipart/form-data; boundary=' + (formData as any)._boundary,
+ headers
+ )
+ .then((res) => res.result as SasAuthResponse)
+ .catch((err) => {
+ throw prefixMessage(err, 'Error while getting access token')
+ })
+
+ return authResponse
+}
diff --git a/src/auth/getTokens.ts b/src/auth/getTokens.ts
new file mode 100644
index 0000000..fe7779d
--- /dev/null
+++ b/src/auth/getTokens.ts
@@ -0,0 +1,40 @@
+import {
+ isAccessTokenExpiring,
+ isRefreshTokenExpiring,
+ hasTokenExpired
+} from '@sasjs/utils/auth'
+import { AuthConfig } from '@sasjs/utils/types'
+import { RequestClient } from '../request/RequestClient'
+import { refreshTokens } from './refreshTokens'
+
+/**
+ * Returns the auth configuration, refreshing the tokens if necessary.
+ * @param requestClient - the pre-configured HTTP request client
+ * @param authConfig - an object containing a client ID, secret, access token and refresh token
+ */
+export async function getTokens(
+ requestClient: RequestClient,
+ authConfig: AuthConfig
+): Promise {
+ const logger = process.logger || console
+ let { access_token, refresh_token, client, secret } = authConfig
+ if (
+ isAccessTokenExpiring(access_token) ||
+ isRefreshTokenExpiring(refresh_token)
+ ) {
+ if (hasTokenExpired(refresh_token)) {
+ const error =
+ 'Unable to obtain new access token. Your refresh token has expired.'
+ logger.error(error)
+ throw new Error(error)
+ }
+ logger.info('Refreshing access and refresh tokens.')
+ ;({ access_token, refresh_token } = await refreshTokens(
+ requestClient,
+ client,
+ secret,
+ refresh_token
+ ))
+ }
+ return { access_token, refresh_token, client, secret }
+}
diff --git a/src/auth/refreshTokens.ts b/src/auth/refreshTokens.ts
new file mode 100644
index 0000000..5871d63
--- /dev/null
+++ b/src/auth/refreshTokens.ts
@@ -0,0 +1,49 @@
+import { SasAuthResponse } from '@sasjs/utils/types'
+import { prefixMessage } from '@sasjs/utils/error'
+import * as NodeFormData from 'form-data'
+import { RequestClient } from '../request/RequestClient'
+
+/**
+ * Exchanges the refresh token for an access token for the given client.
+ * @param requestClient - the pre-configured HTTP request client
+ * @param clientId - the client ID to authenticate with.
+ * @param clientSecret - the client secret to authenticate with.
+ * @param authCode - the refresh token received from the server.
+ */
+export async function refreshTokens(
+ requestClient: RequestClient,
+ clientId: string,
+ clientSecret: string,
+ refreshToken: string
+) {
+ const url = '/SASLogon/oauth/token'
+ let token
+ token =
+ typeof Buffer === 'undefined'
+ ? btoa(clientId + ':' + clientSecret)
+ : Buffer.from(clientId + ':' + clientSecret).toString('base64')
+
+ const headers = {
+ Authorization: 'Basic ' + token
+ }
+
+ const formData =
+ typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
+ formData.append('grant_type', 'refresh_token')
+ formData.append('refresh_token', refreshToken)
+
+ const authResponse = await requestClient
+ .post(
+ url,
+ formData,
+ undefined,
+ 'multipart/form-data; boundary=' + (formData as any)._boundary,
+ headers
+ )
+ .then((res) => res.result)
+ .catch((err) => {
+ throw prefixMessage(err, 'Error while refreshing tokens')
+ })
+
+ return authResponse
+}
diff --git a/src/auth/spec/getAccessToken.spec.ts b/src/auth/spec/getAccessToken.spec.ts
new file mode 100644
index 0000000..e4fa00f
--- /dev/null
+++ b/src/auth/spec/getAccessToken.spec.ts
@@ -0,0 +1,75 @@
+import { AuthConfig } from '@sasjs/utils'
+import * as NodeFormData from 'form-data'
+import { generateToken, mockAuthResponse } from './mockResponses'
+import { RequestClient } from '../../request/RequestClient'
+import { getAccessToken } from '../getAccessToken'
+
+const requestClient = new (>RequestClient)()
+
+describe('getAccessToken', () => {
+ it('should attempt to refresh tokens', async () => {
+ setupMocks()
+ const access_token = generateToken(30)
+ const refresh_token = generateToken(30)
+ const authConfig: AuthConfig = {
+ access_token,
+ refresh_token,
+ client: 'cl13nt',
+ secret: 's3cr3t'
+ }
+ jest
+ .spyOn(requestClient, 'post')
+ .mockImplementation(() =>
+ Promise.resolve({ result: mockAuthResponse, etag: '' })
+ )
+ const token = Buffer.from(
+ authConfig.client + ':' + authConfig.secret
+ ).toString('base64')
+
+ await getAccessToken(
+ requestClient,
+ authConfig.client,
+ authConfig.secret,
+ authConfig.refresh_token
+ )
+
+ expect(requestClient.post).toHaveBeenCalledWith(
+ '/SASLogon/oauth/token',
+ expect.any(NodeFormData),
+ undefined,
+ expect.stringContaining('multipart/form-data; boundary='),
+ {
+ Authorization: 'Basic ' + token
+ }
+ )
+ })
+
+ it('should handle errors while refreshing tokens', async () => {
+ setupMocks()
+ const access_token = generateToken(30)
+ const refresh_token = generateToken(30)
+ const authConfig: AuthConfig = {
+ access_token,
+ refresh_token,
+ client: 'cl13nt',
+ secret: 's3cr3t'
+ }
+ jest
+ .spyOn(requestClient, 'post')
+ .mockImplementation(() => Promise.reject('Token Error'))
+
+ const error = await getAccessToken(
+ requestClient,
+ authConfig.client,
+ authConfig.secret,
+ authConfig.refresh_token
+ ).catch((e) => e)
+
+ expect(error).toContain('Error while getting access token')
+ })
+})
+
+const setupMocks = () => {
+ jest.restoreAllMocks()
+ jest.mock('../../request/RequestClient')
+}
diff --git a/src/auth/spec/getTokens.spec.ts b/src/auth/spec/getTokens.spec.ts
new file mode 100644
index 0000000..de4397c
--- /dev/null
+++ b/src/auth/spec/getTokens.spec.ts
@@ -0,0 +1,79 @@
+import { AuthConfig } from '@sasjs/utils'
+import * as refreshTokensModule from '../refreshTokens'
+import { generateToken, mockAuthResponse } from './mockResponses'
+import { getTokens } from '../getTokens'
+import { RequestClient } from '../../request/RequestClient'
+
+const requestClient = new (>RequestClient)()
+
+describe('getTokens', () => {
+ it('should attempt to refresh tokens if the access token is expiring', async () => {
+ setupMocks()
+ const access_token = generateToken(30)
+ const refresh_token = generateToken(86400000)
+ const authConfig: AuthConfig = {
+ access_token,
+ refresh_token,
+ client: 'cl13nt',
+ secret: 's3cr3t'
+ }
+
+ await getTokens(requestClient, authConfig)
+
+ expect(refreshTokensModule.refreshTokens).toHaveBeenCalledWith(
+ requestClient,
+ authConfig.client,
+ authConfig.secret,
+ authConfig.refresh_token
+ )
+ })
+
+ it('should attempt to refresh tokens if the refresh token is expiring', async () => {
+ setupMocks()
+ const access_token = generateToken(86400000)
+ const refresh_token = generateToken(30)
+ const authConfig: AuthConfig = {
+ access_token,
+ refresh_token,
+ client: 'cl13nt',
+ secret: 's3cr3t'
+ }
+
+ await getTokens(requestClient, authConfig)
+
+ expect(refreshTokensModule.refreshTokens).toHaveBeenCalledWith(
+ requestClient,
+ authConfig.client,
+ authConfig.secret,
+ authConfig.refresh_token
+ )
+ })
+
+ it('should throw an error if the refresh token has already expired', async () => {
+ setupMocks()
+ const access_token = generateToken(86400000)
+ const refresh_token = generateToken(-36000)
+ const authConfig: AuthConfig = {
+ access_token,
+ refresh_token,
+ client: 'cl13nt',
+ secret: 's3cr3t'
+ }
+ const expectedError =
+ 'Unable to obtain new access token. Your refresh token has expired.'
+
+ const error = await getTokens(requestClient, authConfig).catch((e) => e)
+
+ expect(error.message).toEqual(expectedError)
+ })
+})
+
+const setupMocks = () => {
+ jest.restoreAllMocks()
+ jest.mock('../../request/RequestClient')
+ jest.mock('../refreshTokens')
+
+ jest
+ .spyOn(refreshTokensModule, 'refreshTokens')
+ .mockImplementation(() => Promise.resolve(mockAuthResponse))
+}
diff --git a/src/auth/spec/mockResponses.ts b/src/auth/spec/mockResponses.ts
index 4ffcfb2..e15391a 100644
--- a/src/auth/spec/mockResponses.ts
+++ b/src/auth/spec/mockResponses.ts
@@ -1,2 +1,24 @@
+import { SasAuthResponse } from '@sasjs/utils/types'
+
export const mockLoginAuthoriseRequiredResponse = `