mirror of
https://github.com/sasjs/adapter.git
synced 2025-12-11 01:14:36 +00:00
Compare commits
413 Commits
v2.2.6
...
debug-sasj
| Author | SHA1 | Date | |
|---|---|---|---|
| 2c6198ae25 | |||
| 73f50c0435 | |||
| 5ee57f3d07 | |||
|
|
ed72c5c48c | ||
| 146b0715bf | |||
| dfc1d567a5 | |||
| 779200f5fc | |||
| cf4c4cfca9 | |||
|
|
ad4eead4ca | ||
|
|
aa9383a483 | ||
|
|
ba105f609c | ||
|
|
b831b93133 | ||
|
|
0c3aab673a | ||
| 33e7564e8f | |||
| 77c4c473c1 | |||
| 47ff1a2293 | |||
|
|
ffae344476 | ||
|
|
4f62cd0148 | ||
| bd92c1925e | |||
|
|
6c29d7823b | ||
| 3c9f133374 | |||
| e72195ca5d | |||
| 3e7ddf59b4 | |||
| cd67fb38dc | |||
| 78149e6c54 | |||
| 63e220c5be | |||
| 8464e506e0 | |||
| 0bc69401e5 | |||
| 47fe7686cb | |||
|
|
dd2b3671fd | ||
| bd03b2b06d | |||
|
|
2b2b8e6429 | ||
|
|
5375d0a208 | ||
|
|
f2da84829e | ||
|
|
f172ad66bc | ||
|
|
046c58bb80 | ||
|
|
bf825a4f65 | ||
|
|
d58cff9081 | ||
|
|
7ab1964746 | ||
|
|
b118280a77 | ||
|
|
5317c14d54 | ||
|
|
85fed5cd76 | ||
|
|
6f9196c690 | ||
|
|
2d0a73e74d | ||
|
|
ac8821baec | ||
|
|
0b9284e481 | ||
|
|
7b7a80c502 | ||
|
|
1ace15a308 | ||
|
|
e1b3ef7c8c | ||
| 710056bded | |||
|
|
fb7a0f43e1 | ||
|
|
6c901f1c21 | ||
| 26f008d527 | |||
| 56ebc7be3b | |||
|
|
0ea66f6d37 | ||
|
|
cb30ed2b98 | ||
|
|
dfbe2d8f94 | ||
|
|
eac9da22bf | ||
|
|
626fc2e15f | ||
|
|
87e2edbd6c | ||
|
|
7cf681bea3 | ||
|
|
281a145bef | ||
|
|
15d5f9ec91 | ||
|
|
0a6c5a0ec4 | ||
|
|
2a9526d056 | ||
|
|
c2ff28c323 | ||
|
|
fbaa2327c6 | ||
|
|
50710ee1df | ||
|
|
062ba91c17 | ||
| 6dd1d47bb2 | |||
| e70a9645ef | |||
| aeabc29e55 | |||
|
|
9600fa2512 | ||
|
|
7951817480 | ||
|
|
405eea1d6c | ||
|
|
e3f189eed4 | ||
|
|
0bb42c5e3c | ||
|
|
c02eac196e | ||
|
|
3fb0d863e9 | ||
|
|
6d573d3897 | ||
|
|
33280d7a5b | ||
|
|
507722da0d | ||
|
|
c8e029cff4 | ||
|
|
7bd2e31f3b | ||
|
|
cfa0c8b9af | ||
|
|
df9c1c643f | ||
|
|
5c8d311ae8 | ||
| e1a76bc45a | |||
| 85e5ade93a | |||
| 4a61fb8f7f | |||
| 5347aeba09 | |||
|
|
7ac7c5e52b | ||
| 5098342dfe | |||
| c69be8ffc3 | |||
| 69999d8e8b | |||
|
|
bec4180dcf | ||
|
|
1bb7807c25 | ||
|
|
816f1d19d4 | ||
| d38d032309 | |||
| d2a90c77fd | |||
| 8a0f14b780 | |||
| f6cb2c4fac | |||
|
|
1594f0c7db | ||
|
|
7cb2a43f95 | ||
|
|
6e85c7a588 | ||
|
|
a68f6962fd | ||
|
|
a650ba15dd | ||
|
|
6ca1b489fc | ||
|
|
a5c9f11c75 | ||
|
|
1ff3937d11 | ||
|
|
d4725d2e54 | ||
|
|
db578564ba | ||
|
|
d4ebef4290 | ||
|
|
b9f368193d | ||
|
|
4257ec78aa | ||
|
|
a0fbe1a740 | ||
|
|
123b9fb535 | ||
|
|
f57c7b8f7d | ||
| 89590f9a37 | |||
| 5d61bebc9e | |||
| 99afa6e7e4 | |||
| b590a9f41b | |||
| 4466ee30d2 | |||
| db372950b4 | |||
| 46f5e07f11 | |||
|
|
1c90f4f455 | ||
|
|
0114a80e38 | ||
|
|
13be2f9c70 | ||
|
|
e396091aa7 | ||
|
|
a00cb1ebec | ||
|
|
7b1264d140 | ||
|
|
04ccbf6843 | ||
|
|
369b9fb023 | ||
|
|
76487b00e9 | ||
|
|
2d0515e25b | ||
|
|
b132b99586 | ||
|
|
5a7b4a1de4 | ||
|
|
6cac008b61 | ||
|
|
929ec6eb1c | ||
|
|
5a35237de5 | ||
|
|
5d77bbba8b | ||
|
|
eda021b6a5 | ||
|
|
259c479ef0 | ||
|
|
a962b8e7cf | ||
|
|
eb0e7247a6 | ||
| ccc77cb9d1 | |||
|
|
5cb5bbdb55 | ||
|
|
ac6cd7be82 | ||
|
|
63f5f4d03d | ||
|
|
a164fb7df9 | ||
|
|
336ba207cf | ||
|
|
3cfd45cc62 | ||
|
|
f7fb917282 | ||
|
|
a182037883 | ||
|
|
f9e79fb756 | ||
|
|
aaf0eef62b | ||
|
|
fafa0c3567 | ||
|
|
4a6845ad6a | ||
|
|
61d66c6f82 | ||
| 123fbc7235 | |||
|
|
eae8694a29 | ||
|
|
2b16be3aef | ||
|
|
d8d4da9c9a | ||
|
|
0b755b7304 | ||
|
|
0816b7b1f9 | ||
|
|
97d45e87ec | ||
|
|
57ef0647b5 | ||
|
|
a34eebba44 | ||
| 857e39eb33 | |||
| 9bd7d84975 | |||
| 731e38bce3 | |||
|
|
2ee6c45d16 | ||
|
|
b80283f8af | ||
|
|
291e23e40a | ||
| d53d1e1e6a | |||
| 8cf249e8fd | |||
| 5d7cfe1e6c | |||
| abc15fb3ab | |||
| 8cc4270e48 | |||
|
|
56b2ba026a | ||
|
|
8beda1ad6c | ||
|
|
b18b471549 | ||
| 93c9a34591 | |||
|
|
9493492dea | ||
|
|
9b976d48ca | ||
|
|
00b19de497 | ||
|
|
f4cdd2d607 | ||
|
|
cdc0c12ec4 | ||
|
|
bc6f109c48 | ||
|
|
cfab64cfa0 | ||
|
|
d4c8c58552 | ||
|
|
2b8cb51a50 | ||
|
|
e068d3263c | ||
| 630f2e9c37 | |||
| 51ac6b052b | |||
| c32258eb3c | |||
|
|
88f50e3c74 | ||
|
|
bfe5ac0ff7 | ||
|
|
d50f5a030a | ||
|
|
c320caec99 | ||
|
|
16a5b2b012 | ||
|
|
2951e0cc2d | ||
|
|
6bb4a7ea18 | ||
|
|
2827978fe5 | ||
|
|
541c19c1a4 | ||
|
|
c5e995f8d6 | ||
|
|
8bf36da566 | ||
| ccb4ec6e03 | |||
| 06ebb52bc9 | |||
|
|
6e23a0362f | ||
| a59d78bcf7 | |||
| 33d4ee92a7 | |||
| dadce3d4c9 | |||
|
|
b61cf34723 | ||
|
|
22445d1268 | ||
|
|
cba9dacb37 | ||
|
|
a055b36c5c | ||
| 06895cc9f8 | |||
| 24496a997a | |||
| 6419686269 | |||
|
|
4554c9100c | ||
| 919c83c143 | |||
| 00ba2957fb | |||
| 5beda6547a | |||
| bd49b3757a | |||
|
|
b32352a369 | ||
| b306f11148 | |||
|
|
8c4955cb65 | ||
|
|
155f2bb0e8 | ||
| 3ca971134a | |||
| 488d8b9316 | |||
| c20bdba4ae | |||
| 0be2d69aee | |||
| a6e67c3478 | |||
| 5968988984 | |||
| 31cd01610a | |||
| a67824762c | |||
|
|
0336541d40 | ||
|
|
01de3836d7 | ||
|
|
c571bb8490 | ||
|
|
5b4d354ea2 | ||
|
|
b0ce0dc40a | ||
| 88f70a7966 | |||
| 89ff323206 | |||
| d4357d939e | |||
|
|
6cb76f0b5c | ||
|
|
ba2baa36c0 | ||
|
|
e36cd785e8 | ||
| 2fa3a353fa | |||
|
|
bdb1ffb2ef | ||
|
|
84090661cf | ||
|
|
68e14bbf05 | ||
|
|
e4f23334d3 | ||
|
|
5593963b89 | ||
|
|
81c9138b93 | ||
|
|
83fa82108b | ||
|
|
76039c3ec7 | ||
|
|
9b57c9ca1c | ||
|
|
4018cf95ba | ||
|
|
173b6e3e8d | ||
|
|
0ed5447aff | ||
|
|
6344a906d8 | ||
|
|
b2c135ae61 | ||
|
|
2032aacba3 | ||
|
|
fadccfc94c | ||
|
|
551e4e43c1 | ||
|
|
1867658cde | ||
| 3fff4f9c4d | |||
|
|
3f119432db | ||
| 0b18fddc3e | |||
| 19503e0b31 | |||
| d8bdc02f09 | |||
| 2d0833061f | |||
|
|
5dfc4e4086 | ||
|
|
c5824a8a8d | ||
|
|
2147c59314 | ||
|
|
56a1960fff | ||
| b8c9522a55 | |||
| b461cff731 | |||
| 728167fd71 | |||
| 460575b462 | |||
|
|
b247da249a | ||
|
|
e79089b880 | ||
|
|
fe907e1c43 | ||
|
|
e95e894365 | ||
|
|
82414d8b8b | ||
|
|
456fa68f0f | ||
|
|
076adc1f6a | ||
|
|
9676488ff2 | ||
|
|
e9affb862d | ||
|
|
e04371510e | ||
|
|
19657a1c12 | ||
|
|
6424c82ac9 | ||
|
|
fcab18191f | ||
|
|
f157612a0e | ||
|
|
b8cb7d52e7 | ||
|
|
d8d1968162 | ||
|
|
0e1d1f1d99 | ||
|
|
0b055dd05f | ||
|
|
ba91c29ba8 | ||
|
|
bd19457c2a | ||
|
|
b0570e1cd9 | ||
|
|
a5f1b59f7b | ||
|
|
01ca29fc01 | ||
|
|
ed9648fdf9 | ||
|
|
7e17aa6eb3 | ||
|
|
9caee9941a | ||
|
|
e309e7a4f4 | ||
|
|
c47441d6d4 | ||
|
|
1844bc48ac | ||
|
|
7a5adebdb5 | ||
|
|
b39f0c577b | ||
|
|
15f4065cd8 | ||
|
|
4c67665b4d | ||
|
|
55e64ae9d6 | ||
|
|
76d0b82b4c | ||
|
|
95d65d270d | ||
|
|
4e5c9c1ccd | ||
|
|
3267af0724 | ||
|
|
75120424d0 | ||
|
|
f13c7e5cf1 | ||
|
|
53a7b1c9e6 | ||
|
|
8c30cbff13 | ||
|
|
8f3a7f33f8 | ||
|
|
67ec27bab7 | ||
|
|
c1b200b0d8 | ||
|
|
e03ec996d6 | ||
|
|
ad8dbfd4ec | ||
|
|
15a774ff81 | ||
|
|
98114c5591 | ||
|
|
f8c6318a88 | ||
|
|
dffcb66d54 | ||
|
|
67c7147e62 | ||
|
|
50d1b4d824 | ||
|
|
dc98ce3b0b | ||
|
|
cf1e3f3835 | ||
|
|
2f913e9363 | ||
|
|
05a9864df8 | ||
|
|
3a0d764dfa | ||
|
|
310087b895 | ||
|
|
dc39ecd4a8 | ||
|
|
99e192c5de | ||
|
|
b86658ef9b | ||
|
|
88f08e8864 | ||
|
|
80e5de5d65 | ||
|
|
665734b168 | ||
|
|
5543f467e6 | ||
|
|
a32c0879b3 | ||
|
|
bb2ad5bb9a | ||
|
|
6f2f11d112 | ||
|
|
9b32b28aa7 | ||
|
|
fef65bbfd2 | ||
|
|
efeba71612 | ||
|
|
8f54002b1e | ||
|
|
9d6882799d | ||
|
|
73a3acee68 | ||
|
|
0a88220e04 | ||
|
|
c8e1779272 | ||
|
|
8bd3580e23 | ||
|
|
f732b32873 | ||
|
|
65b18f9148 | ||
|
|
10b1676a35 | ||
|
|
b9bd09d3e8 | ||
|
|
537f687b94 | ||
|
|
bfd532f813 | ||
|
|
4f2b4f46a8 | ||
|
|
077cc9458d | ||
|
|
0a7ab394a4 | ||
|
|
f873febfde | ||
|
|
55e8ce359b | ||
|
|
99d7c8f119 | ||
|
|
b3c90f09d6 | ||
|
|
2401962c53 | ||
|
|
362b4d4db3 | ||
|
|
8aea325139 | ||
|
|
bb370061a2 | ||
|
|
48442f7769 | ||
|
|
e67a8531ce | ||
|
|
ef4f020e2a | ||
|
|
2feceeb2f9 | ||
|
|
eaec922fea | ||
|
|
de94777fff | ||
|
|
0aa0ae65e0 | ||
|
|
4b0d62d59b | ||
|
|
b3ef50e9eb | ||
|
|
d30a1890a1 | ||
|
|
f1c2569de3 | ||
|
|
4826388cdd | ||
| e88736056a | |||
| 9da2a29a72 | |||
| dce8a08a86 | |||
| 1fabb9e610 | |||
| 23db0ac80d | |||
| 28370341d8 | |||
|
|
a023ffe850 | ||
|
|
a4e77ecf6e | ||
|
|
7efc0a1fb2 | ||
|
|
c3e2b2ce70 | ||
|
|
dde1228b1d | ||
|
|
b3474b6dfb | ||
|
|
179a04ae31 | ||
|
|
2bdcbda54c | ||
|
|
a123392c56 | ||
|
|
719135e366 | ||
|
|
ba619554b7 | ||
|
|
6a3ab7032f | ||
|
|
d818d14cb4 | ||
|
|
599c130395 | ||
|
|
9ef2759e27 | ||
|
|
43355c88d4 | ||
|
|
15e1acaf4f | ||
|
|
6f60ac5cc7 | ||
|
|
51a09d049c |
103
.all-contributorsrc
Normal file
103
.all-contributorsrc
Normal file
@@ -0,0 +1,103 @@
|
||||
{
|
||||
"projectName": "adapter",
|
||||
"projectOwner": "sasjs",
|
||||
"repoType": "github",
|
||||
"repoHost": "https://github.com",
|
||||
"files": [
|
||||
"README.md"
|
||||
],
|
||||
"imageSize": 100,
|
||||
"commit": false,
|
||||
"commitConvention": "angular",
|
||||
"contributors": [
|
||||
{
|
||||
"login": "krishna-acondy",
|
||||
"name": "Krishna Acondy",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/2980428?v=4",
|
||||
"profile": "https://krishna-acondy.io/",
|
||||
"contributions": [
|
||||
"code",
|
||||
"infra",
|
||||
"blog",
|
||||
"content",
|
||||
"ideas",
|
||||
"video"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "YuryShkoda",
|
||||
"name": "Yury Shkoda",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/25773492?v=4",
|
||||
"profile": "https://www.erudicat.com/",
|
||||
"contributions": [
|
||||
"code",
|
||||
"infra",
|
||||
"ideas",
|
||||
"test",
|
||||
"video"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "medjedovicm",
|
||||
"name": "Mihajlo Medjedovic",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/18329105?v=4",
|
||||
"profile": "https://github.com/medjedovicm",
|
||||
"contributions": [
|
||||
"code",
|
||||
"infra",
|
||||
"test",
|
||||
"review"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "allanbowe",
|
||||
"name": "Allan Bowe",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/4420615?v=4",
|
||||
"profile": "https://github.com/allanbowe",
|
||||
"contributions": [
|
||||
"code",
|
||||
"review",
|
||||
"test",
|
||||
"mentoring",
|
||||
"maintenance"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "saadjutt01",
|
||||
"name": "Muhammad Saad ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/8914650?v=4",
|
||||
"profile": "https://github.com/saadjutt01",
|
||||
"contributions": [
|
||||
"code",
|
||||
"review",
|
||||
"test",
|
||||
"mentoring",
|
||||
"infra"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "sabhas",
|
||||
"name": "Sabir Hassan",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/82647447?v=4",
|
||||
"profile": "https://github.com/sabhas",
|
||||
"contributions": [
|
||||
"code",
|
||||
"review",
|
||||
"test",
|
||||
"ideas"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "VladislavParhomchik",
|
||||
"name": "VladislavParhomchik",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/83717836?v=4",
|
||||
"profile": "https://github.com/VladislavParhomchik",
|
||||
"contributions": [
|
||||
"test",
|
||||
"review"
|
||||
]
|
||||
}
|
||||
],
|
||||
"contributorsPerLine": 7,
|
||||
"skipCi": true
|
||||
}
|
||||
18
.git-hooks/commit-msg
Executable file
18
.git-hooks/commit-msg
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/bin/sh
|
||||
RED="\033[1;31m"
|
||||
GREEN="\033[1;32m"
|
||||
|
||||
# Get the commit message (the parameter we're given is just the path to the
|
||||
# temporary file which holds the message).
|
||||
commit_message=$(cat "$1")
|
||||
|
||||
if (echo "$commit_message" | grep -Eq "^(build|chore|ci|docs|feat|fix|perf|refactor|revert|style|test)(\([a-z0-9 -\*]+\))?!?: .+$") then
|
||||
echo "${GREEN} ✔ Commit message meets Conventional Commit standards"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "${RED}❌ Commit message does not meet the Conventional Commit standard!"
|
||||
echo "An example of a valid message is:"
|
||||
echo " feat(login): add the 'remember me' button"
|
||||
echo "ℹ More details at: https://www.conventionalcommits.org/en/v1.0.0/#summary"
|
||||
exit 1
|
||||
7
.github/dependabot.yml
vendored
Normal file
7
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: npm
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
5
.github/reviewer-lottery.yml
vendored
5
.github/reviewer-lottery.yml
vendored
@@ -7,3 +7,8 @@ groups:
|
||||
- saadjutt01
|
||||
- medjedovicm
|
||||
- allanbowe
|
||||
- sabhas
|
||||
- name: SASjs QA
|
||||
reviewers: 1
|
||||
usernames:
|
||||
- VladislavParhomchik
|
||||
|
||||
6
.github/workflows/build.yml
vendored
6
.github/workflows/build.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12.x]
|
||||
node-version: [15.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -27,6 +27,10 @@ jobs:
|
||||
run: npm run lint
|
||||
- name: Run unit tests
|
||||
run: npm test
|
||||
- name: Generate coverage report
|
||||
uses: artiomtr/jest-coverage-report-action@v2.0-rc.2
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build Package
|
||||
run: npm run package:lib
|
||||
env:
|
||||
|
||||
5
.npmignore
Normal file
5
.npmignore
Normal file
@@ -0,0 +1,5 @@
|
||||
sasjs-tests/
|
||||
docs/
|
||||
.github/
|
||||
*.md
|
||||
*.spec.ts
|
||||
@@ -1,5 +1,9 @@
|
||||
# Change Log
|
||||
|
||||
Since March 2020 the changelog is managed by github releases - see [https://github.com/sasjs/adapter/releases](https://github.com/sasjs/adapter/releases).
|
||||
|
||||
## Changes up to 5th March 2020
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
<a name="1.9.0"></a>
|
||||
|
||||
@@ -2,75 +2,127 @@
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||
level of experience, education, socio-economic status, nationality, personal
|
||||
appearance, race, religion, or sexual identity and orientation.
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at support@macropeople.com. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
reported to the community leaders responsible for enforcement at
|
||||
https://sasapps.io/contact-us.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see
|
||||
https://www.contributor-covenant.org/faq
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Macro People
|
||||
Copyright (c) 2021 Macro People
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
@@ -12,7 +12,9 @@ What code changes have been made to achieve the intent.
|
||||
|
||||
## Checks
|
||||
|
||||
- [ ] Code is formatted correctly (`npm run lint:fix`).
|
||||
- [ ] All unit tests are passing (`npm test`).
|
||||
No PR (that involves a non-trivial code change) should be merged, unless all items below are confirmed! If an urgent fix is needed - use a tar file.
|
||||
|
||||
|
||||
- [ ] All `sasjs-cli` unit tests are passing (`npm test`).
|
||||
- [ ] All `sasjs-tests` are passing (instructions available [here](https://github.com/sasjs/adapter/blob/master/sasjs-tests/README.md)).
|
||||
- [ ] [Data Controller](https://datacontroller.io) builds and is functional on both SAS 9 and Viya
|
||||
|
||||
223
README.md
223
README.md
@@ -1,12 +1,28 @@
|
||||
[](https://www.jsdelivr.com/package/npm/@sasjs/adapter)
|
||||
|
||||
# @sasjs/adapter
|
||||
|
||||
[![npm package][npm-image]][npm-url]
|
||||
[![Github Workflow][githubworkflow-image]][githubworkflow-url]
|
||||
[![Dependency Status][dependency-image]][dependency-url]
|
||||
[]()
|
||||

|
||||
[](/LICENSE)
|
||||

|
||||

|
||||
[](https://gitpod.io/#https://github.com/sasjs/adapter)
|
||||
|
||||
|
||||
[npm-image]:https://img.shields.io/npm/v/@sasjs/adapter.svg
|
||||
[npm-url]:http://npmjs.org/package/@sasjs/adapter
|
||||
[githubworkflow-image]:https://github.com/sasjs/adapter/actions/workflows/build.yml/badge.svg
|
||||
[githubworkflow-url]:https://github.com/sasjs/adapter/blob/main/.github/workflows/build.yml
|
||||
[dependency-image]:https://david-dm.org/sasjs/adapter.svg
|
||||
[dependency-url]:https://github.com/sasjs/adapter/blob/main/package.json
|
||||
|
||||
SASjs is a open-source framework for building Web Apps on SAS® platforms. You can use as much or as little of it as you like. This repository contains the JS adapter, the part that handles the to/from SAS communication on the client side. There are 3 ways to install it:
|
||||
|
||||
1 - `npm install @sasjs/adapter` - for use in a node project
|
||||
|
||||
2 - [Download](https://cdn.jsdelivr.net/npm/@sasjs/adapter@1/index.js) and use a copy of the latest JS file
|
||||
2 - [Download](https://cdn.jsdelivr.net/npm/@sasjs/adapter@2/index.js) and use a copy of the latest JS file
|
||||
|
||||
3 - Reference directly from the CDN - in which case click [here](https://www.jsdelivr.com/package/npm/@sasjs/adapter?tab=collection) and select "SRI" to get the script tag with the integrity hash.
|
||||
|
||||
@@ -41,8 +57,209 @@ parmcards4;
|
||||
|
||||
You now have a simple web app with a backend service!
|
||||
|
||||
## Detailed Overview
|
||||
|
||||
The SASjs adapter is a JS library and a set of SAS Macros that handle the communication between the frontend app and backend SAS services.
|
||||
|
||||
There are three parts to consider:
|
||||
|
||||
1. JS request / response
|
||||
2. SAS inputs / outputs
|
||||
3. Configuration
|
||||
|
||||
### JS Request / Response
|
||||
|
||||
To install the library you can simply run `npm i @sasjs/adapter` or include a `<script>` tag with a reference to our [CDN](https://www.jsdelivr.com/package/npm/@sasjs/adapter).
|
||||
|
||||
Full technical documentation is available [here](https://adapter.sasjs.io). The main parts are:
|
||||
|
||||
### Instantiation
|
||||
The following code will instantiate an instance of the adapter:
|
||||
|
||||
```javascript
|
||||
let sasJs = new SASjs.default(
|
||||
{
|
||||
appLoc: "/Your/SAS/Folder",
|
||||
serverType:"SAS9"
|
||||
}
|
||||
);
|
||||
```
|
||||
If you've installed it via NPM, you can import it as a default import like so:
|
||||
```
|
||||
import SASjs from '@sasjs/adapter';
|
||||
```
|
||||
You can then instantiate it with:
|
||||
```
|
||||
const sasJs = new SASjs({your config})
|
||||
```
|
||||
|
||||
More on the config later.
|
||||
|
||||
### SAS Logon
|
||||
The login process can be handled directly, as below, or as a callback function to a SAS request.
|
||||
|
||||
```javascript
|
||||
sasJs.logIn('USERNAME','PASSWORD'
|
||||
).then((response) => {
|
||||
if (response.isLoggedIn === true) {
|
||||
console.log('do stuff')
|
||||
} else {
|
||||
console.log('do other stuff')
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Request / Response
|
||||
A simple request can be sent to SAS in the following fashion:
|
||||
|
||||
```javascript
|
||||
sasJs.request("/path/to/my/service", dataObject)
|
||||
.then((response) => {
|
||||
// all tables are in the response object, eg:
|
||||
console.log(response.tablewith2cols1row[0].COL1.value)
|
||||
})
|
||||
```
|
||||
We supply the path to the SAS service, and a data object. The data object can be null (for services with no input), or can contain one or more tables in the following format:
|
||||
|
||||
```javascript
|
||||
let dataObject={
|
||||
"tablewith2cols1row": [{
|
||||
"col1": "val1",
|
||||
"col2": 42
|
||||
}],
|
||||
"tablewith1col2rows": [{
|
||||
"col": "row1"
|
||||
}, {
|
||||
"col": "row2"
|
||||
}]
|
||||
};
|
||||
```
|
||||
|
||||
There are optional parameters such as a config object and a callback login function.
|
||||
|
||||
The response object will contain returned tables and columns. Table names are always lowercase, and column names uppercase.
|
||||
|
||||
The adapter will also cache the logs (if debug enabled) and even the work tables. For performance, it is best to keep debug mode off.
|
||||
|
||||
## SAS Inputs / Outputs
|
||||
|
||||
The SAS side is handled by a number of macros in the [macro core](https://github.com/sasjs/core) library.
|
||||
|
||||
The following snippet shows the process of SAS tables arriving / leaving:
|
||||
```sas
|
||||
/* fetch all input tables sent from frontend - they arrive as work tables */
|
||||
%webout(FETCH)
|
||||
|
||||
/* some sas code */
|
||||
data some sas tables;
|
||||
set from js;
|
||||
run;
|
||||
|
||||
%webout(OPEN) /* open the JSON to be returned */
|
||||
%webout(OBJ,some) /* `some` table is sent in object format */
|
||||
%webout(ARR,sas) /* `sas` table is sent in array format, smaller filesize */
|
||||
%webout(OBJ,tables,fmt=N) /* unformatted (raw) data */
|
||||
%webout(OBJ,tables,label=newtable) /* rename tables on export */
|
||||
%webout(CLOSE) /* close the JSON and send some extra useful variables too */
|
||||
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Configuration on the client side involves passing an object on startup, which can also be passed with each request. Technical documentation on the SASjsConfig class is available [here](https://adapter.sasjs.io/classes/types.sasjsconfig.html). The main config items are:
|
||||
|
||||
* `appLoc` - this is the folder under which the SAS services will be created.
|
||||
* `serverType` - either `SAS9` or `SASVIYA`.
|
||||
* `serverUrl` - the location (including http protocol and port) of the SAS Server. Can be omitted, eg if serving directly from the SAS Web Server, or in streaming mode.
|
||||
* `debug` - if `true` then SAS Logs and extra debug information is returned.
|
||||
* `useComputeApi` - Only relevant when the serverType is `SASVIYA`. If `true` the [Compute API](#using-the-compute-api) is used. If `false` the [JES API](#using-the-jes-api) is used. If `null` or `undefined` the [Web](#using-jes-web-app) approach is used.
|
||||
* `contextName` - Compute context on which the requests will be called. If missing or not provided, defaults to `Job Execution Compute context`.
|
||||
|
||||
The adapter supports a number of approaches for interfacing with Viya (`serverType` is `SASVIYA`). For maximum performance, be sure to [configure your compute context](https://sasjs.io/guide-viya/#shared-account-and-server-re-use) with `reuseServerProcesses` as `true` and a system account in `runServerAs`. This functionality is available since Viya 3.5. This configuration is supported when [creating contexts using the CLI](https://sasjs.io/sasjs-cli-context/#sasjs-context-create).
|
||||
|
||||
### Using JES Web App
|
||||
|
||||
In this setup, all requests are routed through the JES web app, at `YOURSERVER/SASJobExecution?_program=/your/program`. This is the most reliable method, and also the slowest. One request is made to the JES app, and remaining requests (getting job uri, session spawning, passing parameters, running the program, fetching the log) are handled by the SAS server inside the JES app.
|
||||
|
||||
```
|
||||
{
|
||||
appLoc:"/Your/Path",
|
||||
serverType:"SASVIYA",
|
||||
contextName: 'yourComputeContext'
|
||||
}
|
||||
```
|
||||
|
||||
Note - to use the web approach, the `useComputeApi` property must be `undefined` or `null`.
|
||||
|
||||
### Using the JES API
|
||||
Here we are running Jobs using the Job Execution Service except this time we are making the requests directly using the REST API instead of through the JES Web App. This is helpful when we need to call web services outside of a browser (eg with the SASjs CLI or other commandline tools). To save one network request, the adapter prefetches the JOB URIs and passes them in the `__job` parameter. Depending on your network bandwidth, it may or may not be faster than the JES Web approach.
|
||||
|
||||
This approach (`useComputeApi: false`) also ensures that jobs are displayed in Environment Manager.
|
||||
|
||||
```
|
||||
{
|
||||
appLoc:"/Your/Path",
|
||||
serverType:"SASVIYA",
|
||||
useComputeApi: false,
|
||||
contextName: 'yourComputeContext'
|
||||
}
|
||||
```
|
||||
|
||||
### Using the Compute API
|
||||
This approach is by far the fastest, as a result of the optimisations we have built into the adapter. With this configuration, in the first sasjs request, we take a URI map of the services in the target folder, and create a session manager. This manager will spawn a additional session every time a request is made. Subsequent requests will use the existing 'hot' session, if it exists. Sessions are always deleted after every use, which actually makes this _less_ resource intensive than a typical JES web app, in which all sessions are kept alive by default for 15 minutes.
|
||||
|
||||
With this approach (`useComputeApi: true`), the requests/logs will _not_ appear in the list in Environment manager.
|
||||
|
||||
```
|
||||
{
|
||||
appLoc:"/Your/Path",
|
||||
serverType:"SASVIYA",
|
||||
useComputeApi: true,
|
||||
contextName: 'yourComputeContext'
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
# More resources
|
||||
|
||||
For more information and examples specific to this adapter you can check out the [user guide](https://sasjs.io/sasjs-adapter/) or the [technical](http://adapter.sasjs.io/) documentation.
|
||||
|
||||
For more information on building web apps in general, check out these [resources](https://sasjs.io/training/resources/) or contact the [author](https://www.linkedin.com/in/allanbowe/) directly.
|
||||
|
||||
If you are a SAS 9 or SAS Viya customer you can also request a copy of [Data Controller](https://datacontroller.io) - free for up to 5 users, this tool makes use of all parts of the SASjs framework.
|
||||
|
||||
|
||||
## Star Gazing
|
||||
|
||||
If you find this library useful, help us grow our star graph!
|
||||
|
||||

|
||||
|
||||
## Contributors ✨
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
||||
[](#contributors-)
|
||||
<!-- ALL-CONTRIBUTORS-BADGE:END -->
|
||||
|
||||
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
|
||||
<!-- prettier-ignore-start -->
|
||||
<!-- markdownlint-disable -->
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center"><a href="https://krishna-acondy.io/"><img src="https://avatars.githubusercontent.com/u/2980428?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Krishna Acondy</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=krishna-acondy" title="Code">💻</a> <a href="#infra-krishna-acondy" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#blog-krishna-acondy" title="Blogposts">📝</a> <a href="#content-krishna-acondy" title="Content">🖋</a> <a href="#ideas-krishna-acondy" title="Ideas, Planning, & Feedback">🤔</a> <a href="#video-krishna-acondy" title="Videos">📹</a></td>
|
||||
<td align="center"><a href="https://www.erudicat.com/"><img src="https://avatars.githubusercontent.com/u/25773492?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Yury Shkoda</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=YuryShkoda" title="Code">💻</a> <a href="#infra-YuryShkoda" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#ideas-YuryShkoda" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/sasjs/adapter/commits?author=YuryShkoda" title="Tests">⚠️</a> <a href="#video-YuryShkoda" title="Videos">📹</a></td>
|
||||
<td align="center"><a href="https://github.com/medjedovicm"><img src="https://avatars.githubusercontent.com/u/18329105?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Mihajlo Medjedovic</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=medjedovicm" title="Code">💻</a> <a href="#infra-medjedovicm" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="https://github.com/sasjs/adapter/commits?author=medjedovicm" title="Tests">⚠️</a> <a href="https://github.com/sasjs/adapter/pulls?q=is%3Apr+reviewed-by%3Amedjedovicm" title="Reviewed Pull Requests">👀</a></td>
|
||||
<td align="center"><a href="https://github.com/allanbowe"><img src="https://avatars.githubusercontent.com/u/4420615?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Allan Bowe</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=allanbowe" title="Code">💻</a> <a href="https://github.com/sasjs/adapter/pulls?q=is%3Apr+reviewed-by%3Aallanbowe" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/sasjs/adapter/commits?author=allanbowe" title="Tests">⚠️</a> <a href="#mentoring-allanbowe" title="Mentoring">🧑🏫</a> <a href="#maintenance-allanbowe" title="Maintenance">🚧</a></td>
|
||||
<td align="center"><a href="https://github.com/saadjutt01"><img src="https://avatars.githubusercontent.com/u/8914650?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Muhammad Saad </b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=saadjutt01" title="Code">💻</a> <a href="https://github.com/sasjs/adapter/pulls?q=is%3Apr+reviewed-by%3Asaadjutt01" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/sasjs/adapter/commits?author=saadjutt01" title="Tests">⚠️</a> <a href="#mentoring-saadjutt01" title="Mentoring">🧑🏫</a> <a href="#infra-saadjutt01" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
<td align="center"><a href="https://github.com/sabhas"><img src="https://avatars.githubusercontent.com/u/82647447?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Sabir Hassan</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=sabhas" title="Code">💻</a> <a href="https://github.com/sasjs/adapter/pulls?q=is%3Apr+reviewed-by%3Asabhas" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/sasjs/adapter/commits?author=sabhas" title="Tests">⚠️</a> <a href="#ideas-sabhas" title="Ideas, Planning, & Feedback">🤔</a></td>
|
||||
<td align="center"><a href="https://github.com/VladislavParhomchik"><img src="https://avatars.githubusercontent.com/u/83717836?v=4?s=100" width="100px;" alt=""/><br /><sub><b>VladislavParhomchik</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=VladislavParhomchik" title="Tests">⚠️</a> <a href="https://github.com/sasjs/adapter/pulls?q=is%3Apr+reviewed-by%3AVladislavParhomchik" title="Reviewed Pull Requests">👀</a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<!-- markdownlint-restore -->
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
||||
|
||||
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!
|
||||
|
||||
16
checkNodeVersion.js
Normal file
16
checkNodeVersion.js
Normal file
@@ -0,0 +1,16 @@
|
||||
const result = process.versions
|
||||
if (result && result.node) {
|
||||
if (parseInt(result.node) < 14) {
|
||||
console.log(
|
||||
'\x1b[31m%s\x1b[0m',
|
||||
`❌ Process failed due to Node Version,\nPlease install and use Node Version >= 14\nYour current Node Version is: ${result.node}`
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
} else {
|
||||
console.log(
|
||||
'\x1b[31m%s\x1b[0m',
|
||||
'Something went wrong while checking Node version'
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
286
docs/classes/types_errors.authorizeerror.html
Normal file
286
docs/classes/types_errors.authorizeerror.html
Normal file
File diff suppressed because one or more lines are too long
304
docs/classes/types_errors.computejobexecutionerror.html
Normal file
304
docs/classes/types_errors.computejobexecutionerror.html
Normal file
File diff suppressed because one or more lines are too long
209
docs/classes/types_errors.errorresponse.html
Normal file
209
docs/classes/types_errors.errorresponse.html
Normal file
File diff suppressed because one or more lines are too long
259
docs/classes/types_errors.internalservererror.html
Normal file
259
docs/classes/types_errors.internalservererror.html
Normal file
File diff suppressed because one or more lines are too long
325
docs/classes/types_errors.jobexecutionerror.html
Normal file
325
docs/classes/types_errors.jobexecutionerror.html
Normal file
File diff suppressed because one or more lines are too long
259
docs/classes/types_errors.loginrequirederror.html
Normal file
259
docs/classes/types_errors.loginrequirederror.html
Normal file
File diff suppressed because one or more lines are too long
283
docs/classes/types_errors.notfounderror.html
Normal file
283
docs/classes/types_errors.notfounderror.html
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
133
docs/index.html
133
docs/index.html
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
200
docs/interfaces/job_execution.waitingrequstpromise.html
Normal file
200
docs/interfaces/job_execution.waitingrequstpromise.html
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
209
docs/interfaces/types.logstatistics.html
Normal file
209
docs/interfaces/types.logstatistics.html
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
133
docs/modules/types_errors.html
Normal file
133
docs/modules/types_errors.html
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
26040
package-lock.json
generated
26040
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
53
package.json
53
package.json
@@ -1,17 +1,21 @@
|
||||
{
|
||||
"name": "@sasjs/adapter",
|
||||
"description": "JavaScript adapter for SAS",
|
||||
"homepage": "https://adapter.sasjs.io",
|
||||
"scripts": {
|
||||
"build": "rimraf build && rimraf node && mkdir node && cp -r src/* node && webpack && rimraf build/src && rimraf node",
|
||||
"package:lib": "npm run build && cp ./package.json build && cd build && npm version \"5.0.0\" && npm pack",
|
||||
"preinstall": "node checkNodeVersion",
|
||||
"prebuild": "node checkNodeVersion",
|
||||
"build": "rimraf build && rimraf node && mkdir node && copyfiles -u 1 \"./src/**/*\" ./node && webpack && rimraf build/src && rimraf node",
|
||||
"package:lib": "npm run build && copyfiles ./package.json ./checkNodeVersion.js build && cd build && npm version \"5.0.0\" && npm pack",
|
||||
"publish:lib": "npm run build && cd build && npm publish",
|
||||
"lint:fix": "npx prettier --write 'src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}'",
|
||||
"lint": "npx prettier --check 'src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}'",
|
||||
"lint:fix": "npx prettier --write \"src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\" && npx prettier --write \"sasjs-tests/src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\"",
|
||||
"lint": "npx prettier --check \"src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\" && npx prettier --check \"sasjs-tests/src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\"",
|
||||
"test": "jest --silent --coverage",
|
||||
"prepublishOnly": "cp -r ./build/* . && rm -rf ./build",
|
||||
"postpublish": "git clean -fd",
|
||||
"semantic-release": "semantic-release",
|
||||
"typedoc": "typedoc"
|
||||
"typedoc": "typedoc",
|
||||
"prepare": "git rev-parse --git-dir && git config core.hooksPath ./.git-hooks && git config core.autocrlf false || true"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
@@ -36,31 +40,40 @@
|
||||
},
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^26.0.20",
|
||||
"@types/axios": "^0.14.0",
|
||||
"@types/form-data": "^2.5.0",
|
||||
"@types/jest": "^27.0.1",
|
||||
"@types/mime": "^2.0.3",
|
||||
"@types/tough-cookie": "^4.0.1",
|
||||
"copyfiles": "^2.4.1",
|
||||
"cp": "^0.2.0",
|
||||
"dotenv": "^8.2.0",
|
||||
"jest": "^26.6.3",
|
||||
"dotenv": "^10.0.0",
|
||||
"jest": "^27.1.0",
|
||||
"jest-extended": "^0.11.5",
|
||||
"node-polyfill-webpack-plugin": "^1.1.4",
|
||||
"path": "^0.12.7",
|
||||
"process": "^0.11.10",
|
||||
"rimraf": "^3.0.2",
|
||||
"semantic-release": "^17.4.1",
|
||||
"terser-webpack-plugin": "^4.2.3",
|
||||
"ts-jest": "^25.5.1",
|
||||
"ts-loader": "^8.0.17",
|
||||
"semantic-release": "^17.4.7",
|
||||
"terser-webpack-plugin": "^5.2.0",
|
||||
"ts-jest": "^27.0.3",
|
||||
"ts-loader": "^9.2.2",
|
||||
"tslint": "^6.1.3",
|
||||
"tslint-config-prettier": "^1.18.0",
|
||||
"typedoc": "^0.20.30",
|
||||
"typedoc-neo-theme": "^1.1.0",
|
||||
"typedoc": "^0.21.9",
|
||||
"typedoc-neo-theme": "^1.1.1",
|
||||
"typedoc-plugin-external-module-name": "^4.0.6",
|
||||
"typescript": "^3.9.9",
|
||||
"webpack": "^5.24.4",
|
||||
"webpack-cli": "^4.5.0"
|
||||
"typescript": "4.3.5",
|
||||
"webpack": "^5.44.0",
|
||||
"webpack-cli": "^4.7.2"
|
||||
},
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@sasjs/utils": "^2.6.3",
|
||||
"@sasjs/utils": "^2.30.0",
|
||||
"axios": "^0.21.1",
|
||||
"form-data": "^3.0.0",
|
||||
"https": "^1.0.0"
|
||||
"axios-cookiejar-support": "^1.0.1",
|
||||
"form-data": "^4.0.0",
|
||||
"https": "^1.0.0",
|
||||
"tough-cookie": "^4.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"trailingComma": "none",
|
||||
"tabWidth": 2,
|
||||
"semi": true,
|
||||
"singleQuote": false
|
||||
"semi": false,
|
||||
"singleQuote": true
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ When developing on `@sasjs/adapter`, it's good practice to run the test suite ag
|
||||
|
||||
You can use the provided `update:adapter` NPM script for this.
|
||||
|
||||
```
|
||||
```bash
|
||||
npm run update:adapter
|
||||
```
|
||||
|
||||
@@ -37,10 +37,23 @@ To be able to run the `deploy` script, two environment variables need to be set:
|
||||
|
||||
So you can run the script like so:
|
||||
|
||||
```
|
||||
```bash
|
||||
SSH_ACCOUNT=me@my-sas-server.com DEPLOY_PATH=/var/www/html/my-folder/sasjs-tests npm run deploy
|
||||
```
|
||||
|
||||
If you are on `WINDOWS`, you will first need to install one dependency:
|
||||
```bash
|
||||
npm i -g copyfiles
|
||||
```
|
||||
and then run to build:
|
||||
```bash
|
||||
npm run update:adapter && npm run build
|
||||
```
|
||||
when it finishes run to deploy:
|
||||
```bash
|
||||
scp -rp ./build/* me@my-sas-server.com:/var/www/html/my-folder/sasjs-tests
|
||||
```
|
||||
|
||||
If you'd like to deploy just `sasjs-tests` without changing the adapter version, you can use the `deploy:tests` script, while also setting the same environment variables as above.
|
||||
|
||||
## 3. Creating the required SAS services
|
||||
@@ -49,12 +62,12 @@ The below services need to be created on your SAS server, at the location specif
|
||||
|
||||
### SAS 9
|
||||
|
||||
```
|
||||
|
||||
```sas
|
||||
filename mc url "https://raw.githubusercontent.com/sasjs/core/main/all.sas";
|
||||
%inc mc;
|
||||
filename ft15f001 temp;
|
||||
parmcards4;
|
||||
%webout(FETCH)
|
||||
%webout(OPEN)
|
||||
%macro x();
|
||||
%do i=1 %to &_webin_file_count; %webout(OBJ,&&_webin_name&i) %end;
|
||||
@@ -63,6 +76,7 @@ parmcards4;
|
||||
;;;;
|
||||
%mm_createwebservice(path=/Public/app/common,name=sendObj)
|
||||
parmcards4;
|
||||
%webout(FETCH)
|
||||
%webout(OPEN)
|
||||
%macro x();
|
||||
%do i=1 %to &_webin_file_count; %webout(ARR,&&_webin_name&i) %end;
|
||||
@@ -70,11 +84,24 @@ parmcards4;
|
||||
%webout(CLOSE)
|
||||
;;;;
|
||||
%mm_createwebservice(path=/Public/app/common,name=sendArr)
|
||||
parmcards4;
|
||||
let he who hath understanding, reckon the number of the beast
|
||||
;;;;
|
||||
%mm_createwebservice(path=/Public/app/common,name=makeErr)
|
||||
parmcards4;
|
||||
%webout(OPEN)
|
||||
data _null_;
|
||||
file _webout;
|
||||
put ' the discovery channel ';
|
||||
run;
|
||||
%webout(CLOSE)
|
||||
;;;;
|
||||
%mm_createwebservice(path=/Public/app/common,name=invalidJSON)
|
||||
```
|
||||
|
||||
### SAS Viya
|
||||
|
||||
```
|
||||
```sas
|
||||
filename mc url "https://raw.githubusercontent.com/sasjs/core/main/all.sas";
|
||||
%inc mc;
|
||||
filename ft15f001 temp;
|
||||
@@ -113,6 +140,15 @@ If you can trust yourself when all men doubt you,
|
||||
But make allowance for their doubting too;
|
||||
;;;;
|
||||
%mp_createwebservice(path=/Public/app/common,name=makeErr)
|
||||
parmcards4;
|
||||
%webout(OPEN)
|
||||
data _null_;
|
||||
file _webout;
|
||||
put ' the discovery channel ';
|
||||
run;
|
||||
%webout(CLOSE)
|
||||
;;;;
|
||||
%mp_createwebservice(path=/Public/app/common,name=invalidJSON)
|
||||
```
|
||||
|
||||
You should now be able to access the tests in your browser at the deployed path on your server.
|
||||
|
||||
8229
sasjs-tests/package-lock.json
generated
8229
sasjs-tests/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,10 +4,10 @@
|
||||
"homepage": ".",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@sasjs/adapter": "^2.2.4",
|
||||
"@sasjs/adapter": "file:../build/sasjs-adapter-5.0.0.tgz",
|
||||
"@sasjs/test-framework": "^1.4.0",
|
||||
"@types/jest": "^26.0.20",
|
||||
"@types/node": "^14.14.25",
|
||||
"@types/node": "^14.14.41",
|
||||
"@types/react": "^17.0.1",
|
||||
"@types/react-dom": "^17.0.0",
|
||||
"@types/react-router-dom": "^5.1.7",
|
||||
@@ -23,8 +23,9 @@
|
||||
"test": "react-scripts test",
|
||||
"eject": "react-scripts eject",
|
||||
"update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz",
|
||||
"deploy:tests": "npm run build && rsync -avhe ssh ./build/* --delete $SSH_ACCOUNT:$DEPLOY_PATH",
|
||||
"deploy": "npm run update:adapter && npm run deploy:tests"
|
||||
"deploy:tests": "rsync -avhe ssh ./build/* --delete sabhas@sas.analytium.co.uk:/var/www/html/sabhas/sasjs-test || npm run deploy:tests-win",
|
||||
"deploy:tests-win": "scp %DEPLOY_PATH% ./build/*",
|
||||
"deploy": "npm run update:adapter && npm run build && npm run deploy:tests"
|
||||
},
|
||||
"eslintConfig": {
|
||||
"extends": "react-app"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"userName": "",
|
||||
"password": "",
|
||||
"sasJsConfig": {
|
||||
"serverUrl": "",
|
||||
"serverUrl": "https://sas.analytium.co.uk/",
|
||||
"appLoc": "/Public/app",
|
||||
"serverType": "SASVIYA",
|
||||
"debug": false,
|
||||
|
||||
@@ -1,34 +1,39 @@
|
||||
import React, { ReactElement, useState, useContext, useEffect } from "react";
|
||||
import { TestSuiteRunner, TestSuite, AppContext } from "@sasjs/test-framework";
|
||||
import { basicTests } from "./testSuites/Basic";
|
||||
import { sendArrTests, sendObjTests } from "./testSuites/RequestData";
|
||||
import { specialCaseTests } from "./testSuites/SpecialCases";
|
||||
import { sasjsRequestTests } from "./testSuites/SasjsRequests";
|
||||
import "@sasjs/test-framework/dist/index.css";
|
||||
import { computeTests } from "./testSuites/Compute";
|
||||
import React, { ReactElement, useState, useContext, useEffect } from 'react'
|
||||
import { TestSuiteRunner, TestSuite, AppContext } from '@sasjs/test-framework'
|
||||
import { basicTests } from './testSuites/Basic'
|
||||
import { sendArrTests, sendObjTests } from './testSuites/RequestData'
|
||||
import { specialCaseTests } from './testSuites/SpecialCases'
|
||||
import { sasjsRequestTests } from './testSuites/SasjsRequests'
|
||||
import '@sasjs/test-framework/dist/index.css'
|
||||
import { computeTests } from './testSuites/Compute'
|
||||
|
||||
const App = (): ReactElement<{}> => {
|
||||
const { adapter, config } = useContext(AppContext);
|
||||
const [testSuites, setTestSuites] = useState<TestSuite[]>([]);
|
||||
const { adapter, config } = useContext(AppContext)
|
||||
const [testSuites, setTestSuites] = useState<TestSuite[]>([])
|
||||
|
||||
useEffect(() => {
|
||||
if (adapter) {
|
||||
setTestSuites([
|
||||
basicTests(adapter, config.userName, config.password),
|
||||
sendArrTests(adapter),
|
||||
sendObjTests(adapter),
|
||||
specialCaseTests(adapter),
|
||||
sasjsRequestTests(adapter),
|
||||
computeTests(adapter)
|
||||
]);
|
||||
const testSuites = [
|
||||
// basicTests(adapter, config.userName, config.password),
|
||||
// sendArrTests(adapter),
|
||||
// sendObjTests(adapter),
|
||||
// specialCaseTests(adapter),
|
||||
sasjsRequestTests(adapter)
|
||||
]
|
||||
|
||||
// if (adapter.getSasjsConfig().serverType === 'SASVIYA') {
|
||||
// testSuites.push(computeTests(adapter))
|
||||
// }
|
||||
|
||||
setTestSuites(testSuites)
|
||||
}
|
||||
}, [adapter, config]);
|
||||
}, [adapter, config])
|
||||
|
||||
return (
|
||||
<div className="app">
|
||||
{adapter && testSuites && <TestSuiteRunner testSuites={testSuites} />}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
)
|
||||
}
|
||||
|
||||
export default App;
|
||||
export default App
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
import React, { ReactElement, useState, useCallback, useContext } from "react";
|
||||
import "./Login.scss";
|
||||
import { AppContext } from "@sasjs/test-framework";
|
||||
import { Redirect } from "react-router-dom";
|
||||
import React, { ReactElement, useState, useCallback, useContext } from 'react'
|
||||
import './Login.scss'
|
||||
import { AppContext } from '@sasjs/test-framework'
|
||||
import { Redirect } from 'react-router-dom'
|
||||
|
||||
const Login = (): ReactElement<{}> => {
|
||||
const [username, setUsername] = useState("");
|
||||
const [password, setPassword] = useState("");
|
||||
const appContext = useContext(AppContext);
|
||||
const [username, setUsername] = useState('')
|
||||
const [password, setPassword] = useState('')
|
||||
const appContext = useContext(AppContext)
|
||||
|
||||
const handleSubmit = useCallback(
|
||||
(e) => {
|
||||
e.preventDefault();
|
||||
e.preventDefault()
|
||||
appContext.adapter.logIn(username, password).then((res) => {
|
||||
appContext.setIsLoggedIn(res.isLoggedIn);
|
||||
});
|
||||
appContext.setIsLoggedIn(res.isLoggedIn)
|
||||
})
|
||||
},
|
||||
[username, password, appContext]
|
||||
);
|
||||
)
|
||||
|
||||
return !appContext.isLoggedIn ? (
|
||||
<div className="login-container">
|
||||
@@ -48,7 +48,7 @@ const Login = (): ReactElement<{}> => {
|
||||
</div>
|
||||
) : (
|
||||
<Redirect to="/" />
|
||||
);
|
||||
};
|
||||
)
|
||||
}
|
||||
|
||||
export default Login;
|
||||
export default Login
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
import React, { ReactElement, useContext, FunctionComponent } from "react";
|
||||
import { Redirect, Route } from "react-router-dom";
|
||||
import { AppContext } from "@sasjs/test-framework";
|
||||
import React, { ReactElement, useContext, FunctionComponent } from 'react'
|
||||
import { Redirect, Route } from 'react-router-dom'
|
||||
import { AppContext } from '@sasjs/test-framework'
|
||||
|
||||
interface PrivateRouteProps {
|
||||
component: FunctionComponent;
|
||||
exact?: boolean;
|
||||
path: string;
|
||||
component: FunctionComponent
|
||||
exact?: boolean
|
||||
path: string
|
||||
}
|
||||
|
||||
const PrivateRoute = (
|
||||
props: PrivateRouteProps
|
||||
): ReactElement<PrivateRouteProps> => {
|
||||
const { component, path, exact } = props;
|
||||
const appContext = useContext(AppContext);
|
||||
const { component, path, exact } = props
|
||||
const appContext = useContext(AppContext)
|
||||
return appContext.isLoggedIn ? (
|
||||
<Route component={component} path={path} exact={exact} />
|
||||
) : (
|
||||
<Redirect to="/login" />
|
||||
);
|
||||
};
|
||||
)
|
||||
}
|
||||
|
||||
export default PrivateRoute;
|
||||
export default PrivateRoute
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import React from "react";
|
||||
import ReactDOM from "react-dom";
|
||||
import { Route, HashRouter, Switch } from "react-router-dom";
|
||||
import "./index.scss";
|
||||
import * as serviceWorker from "./serviceWorker";
|
||||
import { AppProvider } from "@sasjs/test-framework";
|
||||
import PrivateRoute from "./PrivateRoute";
|
||||
import Login from "./Login";
|
||||
import App from "./App";
|
||||
import React from 'react'
|
||||
import ReactDOM from 'react-dom'
|
||||
import { Route, HashRouter, Switch } from 'react-router-dom'
|
||||
import './index.scss'
|
||||
import * as serviceWorker from './serviceWorker'
|
||||
import { AppProvider } from '@sasjs/test-framework'
|
||||
import PrivateRoute from './PrivateRoute'
|
||||
import Login from './Login'
|
||||
import App from './App'
|
||||
|
||||
ReactDOM.render(
|
||||
<AppProvider>
|
||||
@@ -17,10 +17,10 @@ ReactDOM.render(
|
||||
</Switch>
|
||||
</HashRouter>
|
||||
</AppProvider>,
|
||||
document.getElementById("root")
|
||||
);
|
||||
document.getElementById('root')
|
||||
)
|
||||
|
||||
// If you want your app to work offline and load faster, you can change
|
||||
// unregister() to register() below. Note this comes with some pitfalls.
|
||||
// Learn more about service workers: https://bit.ly/CRA-PWA
|
||||
serviceWorker.unregister();
|
||||
serviceWorker.unregister()
|
||||
|
||||
@@ -11,46 +11,46 @@
|
||||
// opt-in, read https://bit.ly/CRA-PWA
|
||||
|
||||
const isLocalhost = Boolean(
|
||||
window.location.hostname === "localhost" ||
|
||||
window.location.hostname === 'localhost' ||
|
||||
// [::1] is the IPv6 localhost address.
|
||||
window.location.hostname === "[::1]" ||
|
||||
window.location.hostname === '[::1]' ||
|
||||
// 127.0.0.0/8 are considered localhost for IPv4.
|
||||
window.location.hostname.match(
|
||||
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
|
||||
)
|
||||
);
|
||||
)
|
||||
|
||||
export function register(config) {
|
||||
if (process.env.NODE_ENV === "production" && "serviceWorker" in navigator) {
|
||||
if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
|
||||
// The URL constructor is available in all browsers that support SW.
|
||||
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
|
||||
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href)
|
||||
if (publicUrl.origin !== window.location.origin) {
|
||||
// Our service worker won't work if PUBLIC_URL is on a different origin
|
||||
// from what our page is served on. This might happen if a CDN is used to
|
||||
// serve assets; see https://github.com/facebook/create-react-app/issues/2374
|
||||
return;
|
||||
return
|
||||
}
|
||||
|
||||
window.addEventListener("load", () => {
|
||||
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
|
||||
window.addEventListener('load', () => {
|
||||
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`
|
||||
|
||||
if (isLocalhost) {
|
||||
// This is running on localhost. Let's check if a service worker still exists or not.
|
||||
checkValidServiceWorker(swUrl, config);
|
||||
checkValidServiceWorker(swUrl, config)
|
||||
|
||||
// Add some additional logging to localhost, pointing developers to the
|
||||
// service worker/PWA documentation.
|
||||
navigator.serviceWorker.ready.then(() => {
|
||||
console.log(
|
||||
"This web app is being served cache-first by a service " +
|
||||
"worker. To learn more, visit https://bit.ly/CRA-PWA"
|
||||
);
|
||||
});
|
||||
'This web app is being served cache-first by a service ' +
|
||||
'worker. To learn more, visit https://bit.ly/CRA-PWA'
|
||||
)
|
||||
})
|
||||
} else {
|
||||
// Is not localhost. Just register service worker
|
||||
registerValidSW(swUrl, config);
|
||||
registerValidSW(swUrl, config)
|
||||
}
|
||||
});
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,83 +59,83 @@ function registerValidSW(swUrl, config) {
|
||||
.register(swUrl)
|
||||
.then((registration) => {
|
||||
registration.onupdatefound = () => {
|
||||
const installingWorker = registration.installing;
|
||||
const installingWorker = registration.installing
|
||||
if (installingWorker == null) {
|
||||
return;
|
||||
return
|
||||
}
|
||||
installingWorker.onstatechange = () => {
|
||||
if (installingWorker.state === "installed") {
|
||||
if (installingWorker.state === 'installed') {
|
||||
if (navigator.serviceWorker.controller) {
|
||||
// At this point, the updated precached content has been fetched,
|
||||
// but the previous service worker will still serve the older
|
||||
// content until all client tabs are closed.
|
||||
console.log(
|
||||
"New content is available and will be used when all " +
|
||||
"tabs for this page are closed. See https://bit.ly/CRA-PWA."
|
||||
);
|
||||
'New content is available and will be used when all ' +
|
||||
'tabs for this page are closed. See https://bit.ly/CRA-PWA.'
|
||||
)
|
||||
|
||||
// Execute callback
|
||||
if (config && config.onUpdate) {
|
||||
config.onUpdate(registration);
|
||||
config.onUpdate(registration)
|
||||
}
|
||||
} else {
|
||||
// At this point, everything has been precached.
|
||||
// It's the perfect time to display a
|
||||
// "Content is cached for offline use." message.
|
||||
console.log("Content is cached for offline use.");
|
||||
console.log('Content is cached for offline use.')
|
||||
|
||||
// Execute callback
|
||||
if (config && config.onSuccess) {
|
||||
config.onSuccess(registration);
|
||||
config.onSuccess(registration)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error during service worker registration:", error);
|
||||
});
|
||||
console.error('Error during service worker registration:', error)
|
||||
})
|
||||
}
|
||||
|
||||
function checkValidServiceWorker(swUrl, config) {
|
||||
// Check if the service worker can be found. If it can't reload the page.
|
||||
fetch(swUrl, {
|
||||
headers: { "Service-Worker": "script" }
|
||||
headers: { 'Service-Worker': 'script' }
|
||||
})
|
||||
.then((response) => {
|
||||
// Ensure service worker exists, and that we really are getting a JS file.
|
||||
const contentType = response.headers.get("content-type");
|
||||
const contentType = response.headers.get('content-type')
|
||||
if (
|
||||
response.status === 404 ||
|
||||
(contentType != null && contentType.indexOf("javascript") === -1)
|
||||
(contentType != null && contentType.indexOf('javascript') === -1)
|
||||
) {
|
||||
// No service worker found. Probably a different app. Reload the page.
|
||||
navigator.serviceWorker.ready.then((registration) => {
|
||||
registration.unregister().then(() => {
|
||||
window.location.reload();
|
||||
});
|
||||
});
|
||||
window.location.reload()
|
||||
})
|
||||
})
|
||||
} else {
|
||||
// Service worker found. Proceed as normal.
|
||||
registerValidSW(swUrl, config);
|
||||
registerValidSW(swUrl, config)
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
console.log(
|
||||
"No internet connection found. App is running in offline mode."
|
||||
);
|
||||
});
|
||||
'No internet connection found. App is running in offline mode.'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
export function unregister() {
|
||||
if ("serviceWorker" in navigator) {
|
||||
if ('serviceWorker' in navigator) {
|
||||
navigator.serviceWorker.ready
|
||||
.then((registration) => {
|
||||
registration.unregister();
|
||||
registration.unregister()
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(error.message);
|
||||
});
|
||||
console.error(error.message)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
// allows you to do things like:
|
||||
// expect(element).toHaveTextContent(/react/i)
|
||||
// learn more: https://github.com/testing-library/jest-dom
|
||||
import "@testing-library/jest-dom/extend-expect";
|
||||
import '@testing-library/jest-dom/extend-expect'
|
||||
|
||||
@@ -1,97 +1,102 @@
|
||||
import SASjs, { SASjsConfig } from "@sasjs/adapter";
|
||||
import { TestSuite } from "@sasjs/test-framework";
|
||||
import { ServerType } from "@sasjs/utils/types";
|
||||
import SASjs, { SASjsConfig } from '@sasjs/adapter'
|
||||
import { TestSuite } from '@sasjs/test-framework'
|
||||
import { ServerType } from '@sasjs/utils/types'
|
||||
|
||||
const stringData: any = { table1: [{ col1: "first col value" }] };
|
||||
const stringData: any = { table1: [{ col1: 'first col value' }] }
|
||||
|
||||
const defaultConfig: SASjsConfig = {
|
||||
serverUrl: window.location.origin,
|
||||
pathSAS9: "/SASStoredProcess/do",
|
||||
pathSASViya: "/SASJobExecution",
|
||||
appLoc: "/Public/seedapp",
|
||||
pathSAS9: '/SASStoredProcess/do',
|
||||
pathSASViya: '/SASJobExecution',
|
||||
appLoc: '/Public/seedapp',
|
||||
serverType: ServerType.SasViya,
|
||||
debug: false,
|
||||
contextName: "SAS Job Execution compute context",
|
||||
contextName: 'SAS Job Execution compute context',
|
||||
useComputeApi: false,
|
||||
allowInsecureRequests: false
|
||||
};
|
||||
}
|
||||
|
||||
const customConfig = {
|
||||
serverUrl: "http://url.com",
|
||||
pathSAS9: "sas9",
|
||||
pathSASViya: "viya",
|
||||
appLoc: "/Public/seedapp",
|
||||
serverUrl: 'http://url.com',
|
||||
pathSAS9: 'sas9',
|
||||
pathSASViya: 'viya',
|
||||
appLoc: '/Public/seedapp',
|
||||
serverType: ServerType.Sas9,
|
||||
debug: false
|
||||
};
|
||||
}
|
||||
|
||||
export const basicTests = (
|
||||
adapter: SASjs,
|
||||
userName: string,
|
||||
password: string
|
||||
): TestSuite => ({
|
||||
name: "Basic Tests",
|
||||
name: 'Basic Tests',
|
||||
tests: [
|
||||
{
|
||||
title: "Log in",
|
||||
description: "Should log the user in",
|
||||
title: 'Log in',
|
||||
description: 'Should log the user in',
|
||||
test: async () => {
|
||||
return adapter.logIn(userName, password);
|
||||
return adapter.logIn(userName, password)
|
||||
},
|
||||
assertion: (response: any) =>
|
||||
response && response.isLoggedIn && response.userName === userName
|
||||
},
|
||||
{
|
||||
title: "Multiple Log in attempts",
|
||||
title: 'Multiple Log in attempts',
|
||||
description:
|
||||
"Should fail on first attempt and should log the user in on second attempt",
|
||||
'Should fail on first attempt and should log the user in on second attempt',
|
||||
test: async () => {
|
||||
await adapter.logOut();
|
||||
await adapter.logIn("invalid", "invalid");
|
||||
return adapter.logIn(userName, password);
|
||||
await adapter.logOut()
|
||||
await adapter.logIn('invalid', 'invalid')
|
||||
return adapter.logIn(userName, password)
|
||||
},
|
||||
assertion: (response: any) =>
|
||||
response && response.isLoggedIn && response.userName === userName
|
||||
},
|
||||
{
|
||||
title: "Trigger login callback",
|
||||
title: 'Trigger login callback',
|
||||
description:
|
||||
"Should trigger required login callback and after successful login, it should finish the request",
|
||||
'Should trigger required login callback and after successful login, it should finish the request',
|
||||
test: async () => {
|
||||
await adapter.logOut();
|
||||
|
||||
return await adapter.request("common/sendArr", stringData, null, () => {
|
||||
adapter.logIn(userName, password);
|
||||
});
|
||||
await adapter.logOut()
|
||||
|
||||
return await adapter.request(
|
||||
'common/sendArr',
|
||||
stringData,
|
||||
undefined,
|
||||
() => {
|
||||
adapter.logIn(userName, password)
|
||||
}
|
||||
)
|
||||
},
|
||||
assertion: (response: any) => {
|
||||
return response.table1[0][0] === stringData.table1[0].col1;
|
||||
return response.table1[0][0] === stringData.table1[0].col1
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Request with debug on",
|
||||
title: 'Request with debug on',
|
||||
description:
|
||||
"Should complete successful request with debugging switched on",
|
||||
'Should complete successful request with debugging switched on',
|
||||
test: async () => {
|
||||
const config = {
|
||||
debug: true
|
||||
}
|
||||
|
||||
return await adapter.request("common/sendArr", stringData, config)
|
||||
return await adapter.request('common/sendArr', stringData, config)
|
||||
},
|
||||
assertion: (response: any) => {
|
||||
return response.table1[0][0] === stringData.table1[0].col1;
|
||||
return response.table1[0][0] === stringData.table1[0].col1
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Default config",
|
||||
title: 'Default config',
|
||||
description:
|
||||
"Should instantiate with default config when none is provided",
|
||||
'Should instantiate with default config when none is provided',
|
||||
test: async () => {
|
||||
return Promise.resolve(new SASjs());
|
||||
return Promise.resolve(new SASjs())
|
||||
},
|
||||
assertion: (sasjsInstance: SASjs) => {
|
||||
const sasjsConfig = sasjsInstance.getSasjsConfig();
|
||||
const sasjsConfig = sasjsInstance.getSasjsConfig()
|
||||
|
||||
return (
|
||||
sasjsConfig.serverUrl === defaultConfig.serverUrl &&
|
||||
@@ -100,17 +105,17 @@ export const basicTests = (
|
||||
sasjsConfig.appLoc === defaultConfig.appLoc &&
|
||||
sasjsConfig.serverType === defaultConfig.serverType &&
|
||||
sasjsConfig.debug === defaultConfig.debug
|
||||
);
|
||||
)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Custom config",
|
||||
description: "Should use fully custom config whenever supplied",
|
||||
title: 'Custom config',
|
||||
description: 'Should use fully custom config whenever supplied',
|
||||
test: async () => {
|
||||
return Promise.resolve(new SASjs(customConfig));
|
||||
return Promise.resolve(new SASjs(customConfig))
|
||||
},
|
||||
assertion: (sasjsInstance: SASjs) => {
|
||||
const sasjsConfig = sasjsInstance.getSasjsConfig();
|
||||
const sasjsConfig = sasjsInstance.getSasjsConfig()
|
||||
return (
|
||||
sasjsConfig.serverUrl === customConfig.serverUrl &&
|
||||
sasjsConfig.pathSAS9 === customConfig.pathSAS9 &&
|
||||
@@ -118,28 +123,51 @@ export const basicTests = (
|
||||
sasjsConfig.appLoc === customConfig.appLoc &&
|
||||
sasjsConfig.serverType === customConfig.serverType &&
|
||||
sasjsConfig.debug === customConfig.debug
|
||||
);
|
||||
)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Config overrides",
|
||||
description: "Should override default config with supplied properties",
|
||||
title: 'Config overrides',
|
||||
description: 'Should override default config with supplied properties',
|
||||
test: async () => {
|
||||
return Promise.resolve(
|
||||
new SASjs({ serverUrl: "http://test.com", debug: false })
|
||||
);
|
||||
new SASjs({ serverUrl: 'http://test.com', debug: false })
|
||||
)
|
||||
},
|
||||
assertion: (sasjsInstance: SASjs) => {
|
||||
const sasjsConfig = sasjsInstance.getSasjsConfig();
|
||||
const sasjsConfig = sasjsInstance.getSasjsConfig()
|
||||
return (
|
||||
sasjsConfig.serverUrl === "http://test.com" &&
|
||||
sasjsConfig.serverUrl === 'http://test.com' &&
|
||||
sasjsConfig.pathSAS9 === defaultConfig.pathSAS9 &&
|
||||
sasjsConfig.pathSASViya === defaultConfig.pathSASViya &&
|
||||
sasjsConfig.appLoc === defaultConfig.appLoc &&
|
||||
sasjsConfig.serverType === defaultConfig.serverType &&
|
||||
sasjsConfig.debug === false
|
||||
);
|
||||
)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: 'Request with extra attributes on JES approach',
|
||||
description:
|
||||
'Should complete successful request with extra attributes present in response',
|
||||
test: async () => {
|
||||
const config = {
|
||||
useComputeApi: false
|
||||
}
|
||||
|
||||
return await adapter.request(
|
||||
'common/sendArr',
|
||||
stringData,
|
||||
config,
|
||||
undefined,
|
||||
undefined,
|
||||
['file', 'data']
|
||||
)
|
||||
},
|
||||
assertion: (response: any) => {
|
||||
const responseKeys: any = Object.keys(response)
|
||||
return responseKeys.includes('file') && responseKeys.includes('data')
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
})
|
||||
|
||||
@@ -1,106 +1,100 @@
|
||||
import SASjs from "@sasjs/adapter";
|
||||
import { TestSuite } from "@sasjs/test-framework";
|
||||
import SASjs from '@sasjs/adapter'
|
||||
import { TestSuite } from '@sasjs/test-framework'
|
||||
|
||||
export const computeTests = (adapter: SASjs): TestSuite => ({
|
||||
name: "Compute",
|
||||
name: 'Compute',
|
||||
tests: [
|
||||
{
|
||||
title: "Start Compute Job - not waiting for result",
|
||||
description: "Should start a compute job and return the session",
|
||||
title: 'Start Compute Job - not waiting for result',
|
||||
description: 'Should start a compute job and return the session',
|
||||
test: () => {
|
||||
const data: any = { table1: [{ col1: "first col value" }] };
|
||||
return adapter.startComputeJob("/Public/app/common/sendArr", data);
|
||||
const data: any = { table1: [{ col1: 'first col value' }] }
|
||||
return adapter.startComputeJob('/Public/app/common/sendArr', data)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const expectedProperties = ["id", "applicationName", "attributes"];
|
||||
return validate(expectedProperties, res);
|
||||
const expectedProperties = ['id', 'applicationName', 'attributes']
|
||||
return validate(expectedProperties, res)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Start Compute Job - waiting for result",
|
||||
description: "Should start a compute job and return the job",
|
||||
title: 'Start Compute Job - waiting for result',
|
||||
description: 'Should start a compute job and return the job',
|
||||
test: () => {
|
||||
const data: any = { table1: [{ col1: "first col value" }] };
|
||||
const data: any = { table1: [{ col1: 'first col value' }] }
|
||||
return adapter.startComputeJob(
|
||||
"/Public/app/common/sendArr",
|
||||
'/Public/app/common/sendArr',
|
||||
data,
|
||||
{},
|
||||
"",
|
||||
undefined,
|
||||
true
|
||||
);
|
||||
)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const expectedProperties = [
|
||||
"id",
|
||||
"state",
|
||||
"creationTimeStamp",
|
||||
"jobConditionCode"
|
||||
];
|
||||
return validate(expectedProperties, res.job);
|
||||
'id',
|
||||
'state',
|
||||
'creationTimeStamp',
|
||||
'jobConditionCode'
|
||||
]
|
||||
return validate(expectedProperties, res.job)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Execute Script Viya - complete job",
|
||||
description: "Should execute sas file and return log",
|
||||
title: 'Execute Script Viya - complete job',
|
||||
description: 'Should execute sas file and return log',
|
||||
test: () => {
|
||||
const fileLines = [
|
||||
`data;`,
|
||||
`do x=1 to 100;`,
|
||||
`output;`,
|
||||
`end;`,
|
||||
`run;`
|
||||
];
|
||||
const fileLines = [`data;`, `do x=1 to 100;`, `output;`, `end;`, `run;`]
|
||||
|
||||
return adapter.executeScriptSASViya(
|
||||
"sasCode.sas",
|
||||
'sasCode.sas',
|
||||
fileLines,
|
||||
"SAS Studio compute context",
|
||||
'SAS Studio compute context',
|
||||
undefined,
|
||||
true
|
||||
);
|
||||
)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const expectedLogContent = `1 data;\\n2 do x=1 to 100;\\n3 output;\\n4 end;\\n5 run;\\n\\n`;
|
||||
const expectedLogContent = `1 data;\\n2 do x=1 to 100;\\n3 output;\\n4 end;\\n5 run;\\n\\n`
|
||||
|
||||
return validateLog(expectedLogContent, res.log);
|
||||
return validateLog(expectedLogContent, res.log)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Execute Script Viya - failed job",
|
||||
description: "Should execute sas file and return log",
|
||||
title: 'Execute Script Viya - failed job',
|
||||
description: 'Should execute sas file and return log',
|
||||
test: () => {
|
||||
const fileLines = [`%abort;`];
|
||||
const fileLines = [`%abort;`]
|
||||
|
||||
return adapter
|
||||
.executeScriptSASViya(
|
||||
"sasCode.sas",
|
||||
'sasCode.sas',
|
||||
fileLines,
|
||||
"SAS Studio compute context",
|
||||
'SAS Studio compute context',
|
||||
undefined,
|
||||
true
|
||||
)
|
||||
.catch((err: any) => err);
|
||||
.catch((err: any) => err)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const expectedLogContent = `1 %abort;\\nERROR: The %ABORT statement is not valid in open code.\\n`;
|
||||
const expectedLogContent = `1 %abort;\\nERROR: The %ABORT statement is not valid in open code.\\n`
|
||||
|
||||
return validateLog(expectedLogContent, res.log);
|
||||
return validateLog(expectedLogContent, res.log)
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
})
|
||||
|
||||
const validateLog = (text: string, log: string): boolean => {
|
||||
const isValid = JSON.stringify(log).includes(text);
|
||||
const isValid = JSON.stringify(log).includes(text)
|
||||
|
||||
return isValid;
|
||||
};
|
||||
return isValid
|
||||
}
|
||||
|
||||
const validate = (expectedProperties: string[], data: any): boolean => {
|
||||
const actualProperties = Object.keys(data);
|
||||
const actualProperties = Object.keys(data)
|
||||
|
||||
const isValid = expectedProperties.every((property) =>
|
||||
actualProperties.includes(property)
|
||||
);
|
||||
return isValid;
|
||||
};
|
||||
)
|
||||
return isValid
|
||||
}
|
||||
|
||||
@@ -1,111 +1,112 @@
|
||||
import SASjs from "@sasjs/adapter";
|
||||
import { TestSuite } from "@sasjs/test-framework";
|
||||
import SASjs from '@sasjs/adapter'
|
||||
import { TestSuite } from '@sasjs/test-framework'
|
||||
|
||||
const stringData: any = { table1: [{ col1: "first col value" }] };
|
||||
const numericData: any = { table1: [{ col1: 3.14159265 }] };
|
||||
const stringData: any = { table1: [{ col1: 'first col value' }] }
|
||||
const numericData: any = { table1: [{ col1: 3.14159265 }] }
|
||||
const multiColumnData: any = {
|
||||
table1: [{ col1: 42, col2: 1.618, col3: "x", col4: "x" }]
|
||||
};
|
||||
table1: [{ col1: 42, col2: 1.618, col3: 'x', col4: 'x' }]
|
||||
}
|
||||
const multipleRowsWithNulls: any = {
|
||||
table1: [
|
||||
{ col1: 42, col2: null, col3: "x", col4: "" },
|
||||
{ col1: 42, col2: null, col3: "x", col4: "" },
|
||||
{ col1: 42, col2: null, col3: "x", col4: "" },
|
||||
{ col1: 42, col2: 1.62, col3: "x", col4: "x" },
|
||||
{ col1: 42, col2: 1.62, col3: "x", col4: "x" }
|
||||
{ col1: 42, col2: null, col3: 'x', col4: '' },
|
||||
{ col1: 42, col2: null, col3: 'x', col4: '' },
|
||||
{ col1: 42, col2: null, col3: 'x', col4: '' },
|
||||
{ col1: 42, col2: 1.62, col3: 'x', col4: 'x' },
|
||||
{ col1: 42, col2: 1.62, col3: 'x', col4: 'x' }
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
const multipleColumnsWithNulls: any = {
|
||||
table1: [
|
||||
{ col1: 42, col2: null, col3: "x", col4: null },
|
||||
{ col1: 42, col2: null, col3: "x", col4: null },
|
||||
{ col1: 42, col2: null, col3: "x", col4: null },
|
||||
{ col1: 42, col2: null, col3: "x", col4: "" },
|
||||
{ col1: 42, col2: null, col3: "x", col4: "" }
|
||||
{ col1: 42, col2: null, col3: 'x', col4: null },
|
||||
{ col1: 42, col2: null, col3: 'x', col4: null },
|
||||
{ col1: 42, col2: null, col3: 'x', col4: null },
|
||||
{ col1: 42, col2: null, col3: 'x', col4: '' },
|
||||
{ col1: 42, col2: null, col3: 'x', col4: '' }
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
const getLongStringData = (length = 32764) => {
|
||||
let x = "X";
|
||||
let x = 'X'
|
||||
for (let i = 1; i <= length; i++) {
|
||||
x = x + "X";
|
||||
x = x + 'X'
|
||||
}
|
||||
const data: any = { table1: [{ col1: x }] };
|
||||
return data;
|
||||
};
|
||||
const data: any = { table1: [{ col1: x }] }
|
||||
return data
|
||||
}
|
||||
|
||||
const getLargeObjectData = () => {
|
||||
const data = { table1: [{ big: "data" }] };
|
||||
const data = { table1: [{ big: 'data' }] }
|
||||
|
||||
for (let i = 1; i < 10000; i++) {
|
||||
data.table1.push(data.table1[0]);
|
||||
data.table1.push(data.table1[0])
|
||||
}
|
||||
|
||||
return data;
|
||||
};
|
||||
return data
|
||||
}
|
||||
|
||||
export const sendArrTests = (adapter: SASjs): TestSuite => ({
|
||||
name: "sendArr",
|
||||
name: 'sendArr',
|
||||
tests: [
|
||||
{
|
||||
title: "Absolute paths",
|
||||
description: "Should work with absolute paths to SAS jobs",
|
||||
title: 'Absolute paths',
|
||||
description: 'Should work with absolute paths to SAS jobs',
|
||||
test: () => {
|
||||
return adapter.request("/Public/app/common/sendArr", stringData);
|
||||
return adapter.request('/Public/app/common/sendArr', stringData)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
return res.table1[0][0] === stringData.table1[0].col1;
|
||||
return res.table1[0][0] === stringData.table1[0].col1
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Single string value",
|
||||
description: "Should send an array with a single string value",
|
||||
title: 'Single string value',
|
||||
description: 'Should send an array with a single string value',
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", stringData);
|
||||
return adapter.request('common/sendArr', stringData)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
return res.table1[0][0] === stringData.table1[0].col1;
|
||||
return res.table1[0][0] === stringData.table1[0].col1
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Long string value",
|
||||
title: 'Long string value',
|
||||
description:
|
||||
"Should send an array with a long string value under 32765 characters",
|
||||
'Should send an array with a long string value under 32765 characters',
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", getLongStringData());
|
||||
return adapter.request('common/sendArr', getLongStringData())
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const longStringData = getLongStringData();
|
||||
return res.table1[0][0] === longStringData.table1[0].col1;
|
||||
const longStringData = getLongStringData()
|
||||
return res.table1[0][0] === longStringData.table1[0].col1
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Overly long string value",
|
||||
title: 'Overly long string value',
|
||||
description:
|
||||
"Should error out with long string values over 32765 characters",
|
||||
'Should error out with long string values over 32765 characters',
|
||||
test: () => {
|
||||
const data = getLongStringData(32767);
|
||||
return adapter.request("common/sendArr", data).catch((e) => e);
|
||||
const data = getLongStringData(32767)
|
||||
return adapter.request('common/sendArr', data).catch((e) => e)
|
||||
},
|
||||
assertion: (error: any) => {
|
||||
return !!error && !!error.error && !!error.error.message;
|
||||
return !!error && !!error.error && !!error.error.message
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Single numeric value",
|
||||
description: "Should send an array with a single numeric value",
|
||||
title: 'Single numeric value',
|
||||
description: 'Should send an array with a single numeric value',
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", numericData);
|
||||
return adapter.request('common/sendArr', numericData)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
return res.table1[0][0] === numericData.table1[0].col1;
|
||||
return res.table1[0][0] === numericData.table1[0].col1
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Multiple columns",
|
||||
description: "Should handle data with multiple columns",
|
||||
title: 'Multiple columns',
|
||||
description: 'Should handle data with multiple columns',
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", multiColumnData);
|
||||
return adapter.request('common/sendArr', multiColumnData)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
return (
|
||||
@@ -113,143 +114,189 @@ export const sendArrTests = (adapter: SASjs): TestSuite => ({
|
||||
res.table1[0][1] === multiColumnData.table1[0].col2 &&
|
||||
res.table1[0][2] === multiColumnData.table1[0].col3 &&
|
||||
res.table1[0][3] === multiColumnData.table1[0].col4
|
||||
);
|
||||
)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Multiple rows with nulls",
|
||||
description: "Should handle data with multiple rows with null values",
|
||||
title: 'Multiple rows with nulls',
|
||||
description: 'Should handle data with multiple rows with null values',
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", multipleRowsWithNulls);
|
||||
return adapter.request('common/sendArr', multipleRowsWithNulls)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
let result = true;
|
||||
let result = true
|
||||
multipleRowsWithNulls.table1.forEach((_: any, index: number) => {
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][0] === multipleRowsWithNulls.table1[index].col1;
|
||||
res.table1[index][0] === multipleRowsWithNulls.table1[index].col1
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][1] === multipleRowsWithNulls.table1[index].col2;
|
||||
res.table1[index][1] === multipleRowsWithNulls.table1[index].col2
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][2] === multipleRowsWithNulls.table1[index].col3;
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][3] === multipleRowsWithNulls.table1[index].col4;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Multiple columns with nulls",
|
||||
description: "Should handle data with multiple columns with null values",
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", multipleColumnsWithNulls);
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
let result = true;
|
||||
multipleColumnsWithNulls.table1.forEach((_: any, index: number) => {
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][0] ===
|
||||
multipleColumnsWithNulls.table1[index].col1;
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][1] ===
|
||||
multipleColumnsWithNulls.table1[index].col2;
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][2] ===
|
||||
multipleColumnsWithNulls.table1[index].col3;
|
||||
res.table1[index][2] === multipleRowsWithNulls.table1[index].col3
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][3] ===
|
||||
(multipleColumnsWithNulls.table1[index].col4 || "");
|
||||
});
|
||||
return result;
|
||||
(multipleRowsWithNulls.table1[index].col4 || ' ')
|
||||
})
|
||||
return result
|
||||
}
|
||||
},
|
||||
{
|
||||
title: 'Multiple columns with nulls',
|
||||
description: 'Should handle data with multiple columns with null values',
|
||||
test: () => {
|
||||
return adapter.request('common/sendArr', multipleColumnsWithNulls)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
let result = true
|
||||
multipleColumnsWithNulls.table1.forEach((_: any, index: number) => {
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][0] === multipleColumnsWithNulls.table1[index].col1
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][1] === multipleColumnsWithNulls.table1[index].col2
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][2] === multipleColumnsWithNulls.table1[index].col3
|
||||
result =
|
||||
result &&
|
||||
res.table1[index][3] ===
|
||||
(multipleColumnsWithNulls.table1[index].col4 || ' ')
|
||||
})
|
||||
return result
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
})
|
||||
|
||||
export const sendObjTests = (adapter: SASjs): TestSuite => ({
|
||||
name: "sendObj",
|
||||
name: 'sendObj',
|
||||
tests: [
|
||||
{
|
||||
title: "Invalid column name",
|
||||
description: "Should throw an error",
|
||||
title: 'Table name starts with numeric',
|
||||
description: 'Should throw an error',
|
||||
test: async () => {
|
||||
const invalidData: any = {
|
||||
"1 invalid table": [{ col1: 42 }]
|
||||
};
|
||||
return adapter.request("common/sendObj", invalidData).catch((e) => e);
|
||||
'1InvalidTable': [{ col1: 42 }]
|
||||
}
|
||||
return adapter.request('common/sendObj', invalidData).catch((e) => e)
|
||||
},
|
||||
assertion: (error: any) =>
|
||||
!!error && !!error.error && !!error.error.message
|
||||
},
|
||||
{
|
||||
title: "Single string value",
|
||||
description: "Should send an object with a single string value",
|
||||
title: 'Table name contains a space',
|
||||
description: 'Should throw an error',
|
||||
test: async () => {
|
||||
const invalidData: any = {
|
||||
'an invalidTable': [{ col1: 42 }]
|
||||
}
|
||||
return adapter.request('common/sendObj', invalidData).catch((e) => e)
|
||||
},
|
||||
assertion: (error: any) =>
|
||||
!!error && !!error.error && !!error.error.message
|
||||
},
|
||||
{
|
||||
title: 'Table name contains a special character',
|
||||
description: 'Should throw an error',
|
||||
test: async () => {
|
||||
const invalidData: any = {
|
||||
'anInvalidTable#': [{ col1: 42 }]
|
||||
}
|
||||
return adapter.request('common/sendObj', invalidData).catch((e) => e)
|
||||
},
|
||||
assertion: (error: any) =>
|
||||
!!error && !!error.error && !!error.error.message
|
||||
},
|
||||
{
|
||||
title: 'Table name exceeds max length of 32 characters',
|
||||
description: 'Should throw an error',
|
||||
test: async () => {
|
||||
const invalidData: any = {
|
||||
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: [{ col1: 42 }]
|
||||
}
|
||||
return adapter.request('common/sendObj', invalidData).catch((e) => e)
|
||||
},
|
||||
assertion: (error: any) =>
|
||||
!!error && !!error.error && !!error.error.message
|
||||
},
|
||||
{
|
||||
title: "Invalid data object's structure",
|
||||
description: 'Should throw an error',
|
||||
test: async () => {
|
||||
const invalidData: any = {
|
||||
inData: [[{ data: 'value' }]]
|
||||
}
|
||||
return adapter.request('common/sendObj', invalidData).catch((e) => e)
|
||||
},
|
||||
assertion: (error: any) =>
|
||||
!!error && !!error.error && !!error.error.message
|
||||
},
|
||||
{
|
||||
title: 'Single string value',
|
||||
description: 'Should send an object with a single string value',
|
||||
test: () => {
|
||||
return adapter.request("common/sendObj", stringData);
|
||||
return adapter.request('common/sendObj', stringData)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
return res.table1[0].COL1 === stringData.table1[0].col1;
|
||||
return res.table1[0].COL1 === stringData.table1[0].col1
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Long string value",
|
||||
title: 'Long string value',
|
||||
description:
|
||||
"Should send an object with a long string value under 32765 characters",
|
||||
'Should send an object with a long string value under 32765 characters',
|
||||
test: () => {
|
||||
return adapter.request("common/sendObj", getLongStringData());
|
||||
return adapter.request('common/sendObj', getLongStringData())
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const longStringData = getLongStringData();
|
||||
return res.table1[0].COL1 === longStringData.table1[0].col1;
|
||||
const longStringData = getLongStringData()
|
||||
return res.table1[0].COL1 === longStringData.table1[0].col1
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Overly long string value",
|
||||
title: 'Overly long string value',
|
||||
description:
|
||||
"Should error out with long string values over 32765 characters",
|
||||
'Should error out with long string values over 32765 characters',
|
||||
test: () => {
|
||||
return adapter
|
||||
.request("common/sendObj", getLongStringData(32767))
|
||||
.catch((e) => e);
|
||||
.request('common/sendObj', getLongStringData(32767))
|
||||
.catch((e) => e)
|
||||
},
|
||||
assertion: (error: any) => {
|
||||
return !!error && !!error.error && !!error.error.message;
|
||||
return !!error && !!error.error && !!error.error.message
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Single numeric value",
|
||||
description: "Should send an object with a single numeric value",
|
||||
title: 'Single numeric value',
|
||||
description: 'Should send an object with a single numeric value',
|
||||
test: () => {
|
||||
return adapter.request("common/sendObj", numericData);
|
||||
return adapter.request('common/sendObj', numericData)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
return res.table1[0].COL1 === numericData.table1[0].col1;
|
||||
return res.table1[0].COL1 === numericData.table1[0].col1
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
title: "Large data volume",
|
||||
description: "Should send an object with a large amount of data",
|
||||
title: 'Large data volume',
|
||||
description: 'Should send an object with a large amount of data',
|
||||
test: () => {
|
||||
return adapter.request("common/sendObj", getLargeObjectData());
|
||||
return adapter.request('common/sendObj', getLargeObjectData())
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const data = getLargeObjectData();
|
||||
return res.table1[9000].BIG === data.table1[9000].big;
|
||||
const data = getLargeObjectData()
|
||||
return res.table1[9000].BIG === data.table1[9000].big
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Multiple columns",
|
||||
description: "Should handle data with multiple columns",
|
||||
title: 'Multiple columns',
|
||||
description: 'Should handle data with multiple columns',
|
||||
test: () => {
|
||||
return adapter.request("common/sendObj", multiColumnData);
|
||||
return adapter.request('common/sendObj', multiColumnData)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
return (
|
||||
@@ -257,62 +304,63 @@ export const sendObjTests = (adapter: SASjs): TestSuite => ({
|
||||
res.table1[0].COL2 === multiColumnData.table1[0].col2 &&
|
||||
res.table1[0].COL3 === multiColumnData.table1[0].col3 &&
|
||||
res.table1[0].COL4 === multiColumnData.table1[0].col4
|
||||
);
|
||||
)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Multiple rows with nulls",
|
||||
description: "Should handle data with multiple rows with null values",
|
||||
title: 'Multiple rows with nulls',
|
||||
description: 'Should handle data with multiple rows with null values',
|
||||
test: () => {
|
||||
return adapter.request("common/sendObj", multipleRowsWithNulls);
|
||||
return adapter.request('common/sendObj', multipleRowsWithNulls)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
let result = true;
|
||||
let result = true
|
||||
multipleRowsWithNulls.table1.forEach((_: any, index: number) => {
|
||||
result =
|
||||
result &&
|
||||
res.table1[index].COL1 === multipleRowsWithNulls.table1[index].col1;
|
||||
res.table1[index].COL1 === multipleRowsWithNulls.table1[index].col1
|
||||
result =
|
||||
result &&
|
||||
res.table1[index].COL2 === multipleRowsWithNulls.table1[index].col2;
|
||||
res.table1[index].COL2 === multipleRowsWithNulls.table1[index].col2
|
||||
result =
|
||||
result &&
|
||||
res.table1[index].COL3 === multipleRowsWithNulls.table1[index].col3;
|
||||
res.table1[index].COL3 === multipleRowsWithNulls.table1[index].col3
|
||||
result =
|
||||
result &&
|
||||
res.table1[index].COL4 === multipleRowsWithNulls.table1[index].col4;
|
||||
});
|
||||
return result;
|
||||
res.table1[index].COL4 ===
|
||||
(multipleRowsWithNulls.table1[index].col4 || ' ')
|
||||
})
|
||||
return result
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Multiple columns with nulls",
|
||||
description: "Should handle data with multiple columns with null values",
|
||||
title: 'Multiple columns with nulls',
|
||||
description: 'Should handle data with multiple columns with null values',
|
||||
test: () => {
|
||||
return adapter.request("common/sendObj", multipleColumnsWithNulls);
|
||||
return adapter.request('common/sendObj', multipleColumnsWithNulls)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
let result = true;
|
||||
let result = true
|
||||
multipleColumnsWithNulls.table1.forEach((_: any, index: number) => {
|
||||
result =
|
||||
result &&
|
||||
res.table1[index].COL1 ===
|
||||
multipleColumnsWithNulls.table1[index].col1;
|
||||
multipleColumnsWithNulls.table1[index].col1
|
||||
result =
|
||||
result &&
|
||||
res.table1[index].COL2 ===
|
||||
multipleColumnsWithNulls.table1[index].col2;
|
||||
multipleColumnsWithNulls.table1[index].col2
|
||||
result =
|
||||
result &&
|
||||
res.table1[index].COL3 ===
|
||||
multipleColumnsWithNulls.table1[index].col3;
|
||||
multipleColumnsWithNulls.table1[index].col3
|
||||
result =
|
||||
result &&
|
||||
res.table1[index].COL4 ===
|
||||
(multipleColumnsWithNulls.table1[index].col4 || "");
|
||||
});
|
||||
return result;
|
||||
(multipleColumnsWithNulls.table1[index].col4 || ' ')
|
||||
})
|
||||
return result
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
})
|
||||
|
||||
@@ -1,49 +1,49 @@
|
||||
import SASjs from "@sasjs/adapter";
|
||||
import { TestSuite } from "@sasjs/test-framework";
|
||||
import SASjs from '@sasjs/adapter'
|
||||
import { TestSuite } from '@sasjs/test-framework'
|
||||
|
||||
const data: any = { table1: [{ col1: "first col value" }] };
|
||||
const data: any = { table1: [{ col1: 'first col value' }] }
|
||||
|
||||
export const sasjsRequestTests = (adapter: SASjs): TestSuite => ({
|
||||
name: "SASjs Requests",
|
||||
name: 'SASjs Requests',
|
||||
tests: [
|
||||
{
|
||||
title: "WORK tables",
|
||||
description: "Should get WORK tables after request",
|
||||
title: 'WORK tables',
|
||||
description: 'Should get WORK tables after request',
|
||||
test: async () => {
|
||||
return adapter.request("common/sendArr", data);
|
||||
return adapter.request('common/sendArr', data)
|
||||
},
|
||||
assertion: () => {
|
||||
const requests = adapter.getSasRequests();
|
||||
const requests = adapter.getSasRequests()
|
||||
if (adapter.getSasjsConfig().debug) {
|
||||
return requests[0].SASWORK !== null;
|
||||
return requests[0].SASWORK !== null
|
||||
} else {
|
||||
return requests[0].SASWORK === null;
|
||||
return requests[0].SASWORK === null
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Make error and capture log",
|
||||
title: 'Make error and capture log',
|
||||
description:
|
||||
"Should make an error and capture log, in the same time it is testing if debug override is working",
|
||||
'Should make an error and capture log, in the same time it is testing if debug override is working',
|
||||
test: async () => {
|
||||
return adapter
|
||||
.request("common/makeErr", data, { debug: true })
|
||||
.request('common/makeErr', data, { debug: true })
|
||||
.catch(() => {
|
||||
const sasRequests = adapter.getSasRequests();
|
||||
const sasRequests = adapter.getSasRequests()
|
||||
const makeErrRequest: any =
|
||||
sasRequests.find((req) => req.serviceLink.includes("makeErr")) ||
|
||||
null;
|
||||
sasRequests.find((req) => req.serviceLink.includes('makeErr')) ||
|
||||
null
|
||||
|
||||
if (!makeErrRequest) return false;
|
||||
if (!makeErrRequest) return false
|
||||
|
||||
return !!(
|
||||
makeErrRequest.logFile && makeErrRequest.logFile.length > 0
|
||||
);
|
||||
});
|
||||
)
|
||||
})
|
||||
},
|
||||
assertion: (response) => {
|
||||
return response;
|
||||
return response
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
})
|
||||
|
||||
@@ -1,91 +1,92 @@
|
||||
import SASjs from "@sasjs/adapter";
|
||||
import { TestSuite } from "@sasjs/test-framework";
|
||||
import SASjs from '@sasjs/adapter'
|
||||
import { TestSuite } from '@sasjs/test-framework'
|
||||
|
||||
const specialCharData: any = {
|
||||
table1: [
|
||||
{
|
||||
tab: "\t",
|
||||
lf: "\n",
|
||||
cr: "\r",
|
||||
semicolon: ";semi",
|
||||
percent: "%",
|
||||
tab: '\t',
|
||||
lf: '\n',
|
||||
cr: '\r',
|
||||
semicolon: ';semi',
|
||||
percent: '%',
|
||||
singleQuote: "'",
|
||||
doubleQuote: '"',
|
||||
crlf: "\r\n",
|
||||
euro: "€euro",
|
||||
banghash: "!#banghash"
|
||||
crlf: '\r\n',
|
||||
euro: '€euro',
|
||||
banghash: '!#banghash',
|
||||
dot: '.'
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
const moreSpecialCharData: any = {
|
||||
table1: [
|
||||
{
|
||||
speech0: '"speech',
|
||||
pct: "%percent",
|
||||
pct: '%percent',
|
||||
speech: '"speech',
|
||||
slash: "\\slash",
|
||||
slashWithSpecial: "\\\tslash",
|
||||
macvar: "&sysuserid",
|
||||
chinese: "传/傳chinese",
|
||||
sigma: "Σsigma",
|
||||
at: "@at",
|
||||
serbian: "Српски",
|
||||
dollar: "$"
|
||||
slash: '\\slash',
|
||||
slashWithSpecial: '\\\tslash',
|
||||
macvar: '&sysuserid',
|
||||
chinese: '传/傳chinese',
|
||||
sigma: 'Σsigma',
|
||||
at: '@at',
|
||||
serbian: 'Српски',
|
||||
dollar: '$'
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
const getWideData = () => {
|
||||
const cols: any = {};
|
||||
const cols: any = {}
|
||||
for (let i = 1; i <= 10000; i++) {
|
||||
cols["col" + i] = "test" + i;
|
||||
cols['col' + i] = 'test' + i
|
||||
}
|
||||
|
||||
const data: any = {
|
||||
table1: [cols]
|
||||
};
|
||||
}
|
||||
|
||||
return data;
|
||||
};
|
||||
return data
|
||||
}
|
||||
|
||||
const getTables = () => {
|
||||
const tables: any = {};
|
||||
const tables: any = {}
|
||||
|
||||
for (let i = 1; i <= 100; i++) {
|
||||
tables["table" + i] = [{ col1: "x", col2: "x", col3: "x", col4: "x" }];
|
||||
tables['table' + i] = [{ col1: 'x', col2: 'x', col3: 'x', col4: 'x' }]
|
||||
}
|
||||
return tables;
|
||||
};
|
||||
return tables
|
||||
}
|
||||
|
||||
const getLargeDataset = () => {
|
||||
const rows: any = [];
|
||||
const rows: any = []
|
||||
const colData: string =
|
||||
"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx";
|
||||
'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
||||
for (let i = 1; i <= 10000; i++) {
|
||||
rows.push({ col1: colData, col2: colData, col3: colData, col4: colData });
|
||||
rows.push({ col1: colData, col2: colData, col3: colData, col4: colData })
|
||||
}
|
||||
|
||||
const data: any = {
|
||||
table1: rows
|
||||
};
|
||||
}
|
||||
|
||||
return data;
|
||||
};
|
||||
return data
|
||||
}
|
||||
|
||||
const errorAndCsrfData: any = {
|
||||
error: [{ col1: "q", col2: "w", col3: "e", col4: "r" }],
|
||||
_csrf: [{ col1: "q", col2: "w", col3: "e", col4: "r" }]
|
||||
};
|
||||
error: [{ col1: 'q', col2: 'w', col3: 'e', col4: 'r' }],
|
||||
_csrf: [{ col1: 'q', col2: 'w', col3: 'e', col4: 'r' }]
|
||||
}
|
||||
|
||||
export const specialCaseTests = (adapter: SASjs): TestSuite => ({
|
||||
name: "Special Cases",
|
||||
name: 'Special Cases',
|
||||
tests: [
|
||||
{
|
||||
title: "Common special characters",
|
||||
description: "Should handle common special characters",
|
||||
title: 'Common special characters',
|
||||
description: 'Should handle common special characters',
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", specialCharData);
|
||||
return adapter.request('common/sendArr', specialCharData)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
return (
|
||||
@@ -96,17 +97,18 @@ export const specialCaseTests = (adapter: SASjs): TestSuite => ({
|
||||
res.table1[0][4] === specialCharData.table1[0].percent &&
|
||||
res.table1[0][5] === specialCharData.table1[0].singleQuote &&
|
||||
res.table1[0][6] === specialCharData.table1[0].doubleQuote &&
|
||||
res.table1[0][7] === "\n" &&
|
||||
res.table1[0][7] === '\n' &&
|
||||
res.table1[0][8] === specialCharData.table1[0].euro &&
|
||||
res.table1[0][9] === specialCharData.table1[0].banghash
|
||||
);
|
||||
res.table1[0][9] === specialCharData.table1[0].banghash &&
|
||||
res.table1[0][10] === specialCharData.table1[0].dot
|
||||
)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Other special characters",
|
||||
description: "Should handle other special characters",
|
||||
title: 'Other special characters',
|
||||
description: 'Should handle other special characters',
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", moreSpecialCharData);
|
||||
return adapter.request('common/sendArr', moreSpecialCharData)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
return (
|
||||
@@ -121,50 +123,50 @@ export const specialCaseTests = (adapter: SASjs): TestSuite => ({
|
||||
res.table1[0][8] === moreSpecialCharData.table1[0].at &&
|
||||
res.table1[0][9] === moreSpecialCharData.table1[0].serbian &&
|
||||
res.table1[0][10] === moreSpecialCharData.table1[0].dollar
|
||||
);
|
||||
)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Wide table with sendArr",
|
||||
description: "Should handle data with 10000 columns",
|
||||
title: 'Wide table with sendArr',
|
||||
description: 'Should handle data with 10000 columns',
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", getWideData());
|
||||
return adapter.request('common/sendArr', getWideData())
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const data = getWideData();
|
||||
let result = true;
|
||||
const data = getWideData()
|
||||
let result = true
|
||||
for (let i = 0; i <= 10; i++) {
|
||||
result =
|
||||
result && res.table1[0][i] === data.table1[0]["col" + (i + 1)];
|
||||
result && res.table1[0][i] === data.table1[0]['col' + (i + 1)]
|
||||
}
|
||||
return result;
|
||||
return result
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Wide table with sendObj",
|
||||
description: "Should handle data with 10000 columns",
|
||||
title: 'Wide table with sendObj',
|
||||
description: 'Should handle data with 10000 columns',
|
||||
test: () => {
|
||||
return adapter.request("common/sendObj", getWideData());
|
||||
return adapter.request('common/sendObj', getWideData())
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const data = getWideData();
|
||||
let result = true;
|
||||
const data = getWideData()
|
||||
let result = true
|
||||
for (let i = 0; i <= 10; i++) {
|
||||
result =
|
||||
result &&
|
||||
res.table1[0]["COL" + (i + 1)] === data.table1[0]["col" + (i + 1)];
|
||||
res.table1[0]['COL' + (i + 1)] === data.table1[0]['col' + (i + 1)]
|
||||
}
|
||||
return result;
|
||||
return result
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Multiple tables",
|
||||
description: "Should handle data with 100 tables",
|
||||
title: 'Multiple tables',
|
||||
description: 'Should handle data with 100 tables',
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", getTables());
|
||||
return adapter.request('common/sendArr', getTables())
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const data = getTables();
|
||||
const data = getTables()
|
||||
return (
|
||||
res.table1[0][0] === data.table1[0].col1 &&
|
||||
res.table1[0][1] === data.table1[0].col2 &&
|
||||
@@ -174,45 +176,45 @@ export const specialCaseTests = (adapter: SASjs): TestSuite => ({
|
||||
res.table50[0][1] === data.table50[0].col2 &&
|
||||
res.table50[0][2] === data.table50[0].col3 &&
|
||||
res.table50[0][3] === data.table50[0].col4
|
||||
);
|
||||
)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Large dataset with sendObj",
|
||||
description: "Should handle 5mb of data",
|
||||
title: 'Large dataset with sendObj',
|
||||
description: 'Should handle 5mb of data',
|
||||
test: () => {
|
||||
return adapter.request("common/sendObj", getLargeDataset());
|
||||
return adapter.request('common/sendObj', getLargeDataset())
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const data = getLargeDataset();
|
||||
let result = true;
|
||||
const data = getLargeDataset()
|
||||
let result = true
|
||||
for (let i = 0; i <= 10; i++) {
|
||||
result = result && res.table1[i][0] === data.table1[i][0];
|
||||
result = result && res.table1[i][0] === data.table1[i][0]
|
||||
}
|
||||
return result;
|
||||
return result
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Large dataset with sendArr",
|
||||
description: "Should handle 5mb of data",
|
||||
title: 'Large dataset with sendArr',
|
||||
description: 'Should handle 5mb of data',
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", getLargeDataset());
|
||||
return adapter.request('common/sendArr', getLargeDataset())
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
const data = getLargeDataset();
|
||||
let result = true;
|
||||
const data = getLargeDataset()
|
||||
let result = true
|
||||
for (let i = 0; i <= 10; i++) {
|
||||
result =
|
||||
result && res.table1[i][0] === Object.values(data.table1[i])[0];
|
||||
result && res.table1[i][0] === Object.values(data.table1[i])[0]
|
||||
}
|
||||
return result;
|
||||
return result
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Error and _csrf tables with sendArr",
|
||||
description: "Should handle error and _csrf tables",
|
||||
title: 'Error and _csrf tables with sendArr',
|
||||
description: 'Should handle error and _csrf tables',
|
||||
test: () => {
|
||||
return adapter.request("common/sendArr", errorAndCsrfData);
|
||||
return adapter.request('common/sendArr', errorAndCsrfData)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
return (
|
||||
@@ -224,14 +226,14 @@ export const specialCaseTests = (adapter: SASjs): TestSuite => ({
|
||||
res._csrf[0][1] === errorAndCsrfData._csrf[0].col2 &&
|
||||
res._csrf[0][2] === errorAndCsrfData._csrf[0].col3 &&
|
||||
res._csrf[0][3] === errorAndCsrfData._csrf[0].col4
|
||||
);
|
||||
)
|
||||
}
|
||||
},
|
||||
{
|
||||
title: "Error and _csrf tables with sendObj",
|
||||
description: "Should handle error and _csrf tables",
|
||||
title: 'Error and _csrf tables with sendObj',
|
||||
description: 'Should handle error and _csrf tables',
|
||||
test: () => {
|
||||
return adapter.request("common/sendObj", errorAndCsrfData);
|
||||
return adapter.request('common/sendObj', errorAndCsrfData)
|
||||
},
|
||||
assertion: (res: any) => {
|
||||
return (
|
||||
@@ -243,8 +245,8 @@ export const specialCaseTests = (adapter: SASjs): TestSuite => ({
|
||||
res._csrf[0].COL2 === errorAndCsrfData._csrf[0].col2 &&
|
||||
res._csrf[0].COL3 === errorAndCsrfData._csrf[0].col3 &&
|
||||
res._csrf[0].COL4 === errorAndCsrfData._csrf[0].col4
|
||||
);
|
||||
)
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
})
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
export const assert = (
|
||||
expression: boolean | (() => boolean),
|
||||
message = "Assertion failed"
|
||||
message = 'Assertion failed'
|
||||
) => {
|
||||
let result;
|
||||
let result
|
||||
try {
|
||||
if (typeof expression === "boolean") {
|
||||
result = expression;
|
||||
if (typeof expression === 'boolean') {
|
||||
result = expression
|
||||
} else {
|
||||
result = expression();
|
||||
result = expression()
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(message);
|
||||
throw new Error(message);
|
||||
console.error(message)
|
||||
throw new Error(message)
|
||||
}
|
||||
if (!!result) {
|
||||
return;
|
||||
return
|
||||
} else {
|
||||
console.error(message);
|
||||
throw new Error(message);
|
||||
console.error(message)
|
||||
throw new Error(message)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Context, EditContextInput, ContextAllAttributes } from './types'
|
||||
import { isUrl } from './utils'
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import { RequestClient } from './request/RequestClient'
|
||||
import { AuthConfig } from '@sasjs/utils/types'
|
||||
|
||||
export class ContextManager {
|
||||
private defaultComputeContexts = [
|
||||
@@ -314,9 +315,7 @@ export class ContextManager {
|
||||
contextId: string,
|
||||
accessToken?: string
|
||||
): Promise<ContextAllAttributes> {
|
||||
const {
|
||||
result: context
|
||||
} = await this.requestClient
|
||||
const { result: context } = await this.requestClient
|
||||
.get<ContextAllAttributes>(
|
||||
`${this.serverUrl}/compute/contexts/${contextId}`,
|
||||
accessToken
|
||||
@@ -330,12 +329,12 @@ export class ContextManager {
|
||||
|
||||
public async getExecutableContexts(
|
||||
executeScript: Function,
|
||||
accessToken?: string
|
||||
authConfig?: AuthConfig
|
||||
) {
|
||||
const { result: contexts } = await this.requestClient
|
||||
.get<{ items: Context[] }>(
|
||||
`${this.serverUrl}/compute/contexts?limit=10000`,
|
||||
accessToken
|
||||
authConfig?.access_token
|
||||
)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while fetching compute contexts.')
|
||||
@@ -352,7 +351,7 @@ export class ContextManager {
|
||||
`test-${context.name}`,
|
||||
linesOfCode,
|
||||
context.name,
|
||||
accessToken,
|
||||
authConfig,
|
||||
null,
|
||||
false,
|
||||
true,
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
import { isUrl } from './utils'
|
||||
import { isUrl, getValidJson, parseSasViyaDebugResponse } from './utils'
|
||||
import { UploadFile } from './types/UploadFile'
|
||||
import { ErrorResponse, LoginRequiredError } from './types'
|
||||
import { ErrorResponse, LoginRequiredError } from './types/errors'
|
||||
import { RequestClient } from './request/RequestClient'
|
||||
import { ServerType } from '@sasjs/utils/types'
|
||||
import SASjs from './SASjs'
|
||||
import { Server } from 'https'
|
||||
import { SASjsConfig } from './types'
|
||||
import { config } from 'process'
|
||||
|
||||
export class FileUploader {
|
||||
constructor(
|
||||
private appLoc: string,
|
||||
serverUrl: string,
|
||||
private sasjsConfig: SASjsConfig,
|
||||
private jobsPath: string,
|
||||
private requestClient: RequestClient
|
||||
) {
|
||||
if (serverUrl) isUrl(serverUrl)
|
||||
if (this.sasjsConfig.serverUrl) isUrl(this.sasjsConfig.serverUrl)
|
||||
}
|
||||
|
||||
public uploadFile(sasJob: string, files: UploadFile[], params: any) {
|
||||
@@ -29,8 +33,8 @@ export class FileUploader {
|
||||
}
|
||||
}
|
||||
|
||||
const program = this.appLoc
|
||||
? this.appLoc.replace(/\/?$/, '/') + sasJob.replace(/^\//, '')
|
||||
const program = this.sasjsConfig.appLoc
|
||||
? this.sasjsConfig.appLoc.replace(/\/?$/, '/') + sasJob.replace(/^\//, '')
|
||||
: sasJob
|
||||
const uploadUrl = `${this.jobsPath}/?${
|
||||
'_program=' + program
|
||||
@@ -44,6 +48,12 @@ export class FileUploader {
|
||||
|
||||
const csrfToken = this.requestClient.getCsrfToken('file')
|
||||
if (csrfToken) formData.append('_csrf', csrfToken.value)
|
||||
if (this.sasjsConfig.debug) formData.append('_debug', '131')
|
||||
if (
|
||||
this.sasjsConfig.serverType === ServerType.SasViya &&
|
||||
this.sasjsConfig.contextName
|
||||
)
|
||||
formData.append('_contextname', this.sasjsConfig.contextName)
|
||||
|
||||
const headers = {
|
||||
'cache-control': 'no-cache',
|
||||
@@ -51,11 +61,38 @@ export class FileUploader {
|
||||
'Content-Type': 'text/plain'
|
||||
}
|
||||
|
||||
// currently only web approach is supported for file upload
|
||||
// therefore log is part of response with debug enabled and must be parsed
|
||||
return this.requestClient
|
||||
.post(uploadUrl, formData, undefined, 'application/json', headers)
|
||||
.then((res) =>
|
||||
typeof res.result === 'string' ? JSON.parse(res.result) : res.result
|
||||
.post(
|
||||
uploadUrl,
|
||||
formData,
|
||||
undefined,
|
||||
'application/json',
|
||||
headers,
|
||||
this.sasjsConfig.debug,
|
||||
true,
|
||||
sasJob
|
||||
)
|
||||
.then(async (res) => {
|
||||
if (
|
||||
this.sasjsConfig.serverType === ServerType.SasViya &&
|
||||
this.sasjsConfig.debug
|
||||
) {
|
||||
const jsonResponse = await parseSasViyaDebugResponse(
|
||||
res.result as string,
|
||||
this.requestClient,
|
||||
this.sasjsConfig.serverUrl
|
||||
)
|
||||
return jsonResponse
|
||||
}
|
||||
|
||||
return typeof res.result === 'string'
|
||||
? getValidJson(res.result)
|
||||
: res.result
|
||||
|
||||
//TODO: append to SASjs requests
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
if (err instanceof LoginRequiredError) {
|
||||
return Promise.reject(
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import axios, { AxiosInstance } from 'axios'
|
||||
import { generateTimestamp } from '@sasjs/utils/time'
|
||||
import * as NodeFormData from 'form-data'
|
||||
import { Sas9RequestClient } from './request/Sas9RequestClient'
|
||||
import { isUrl } from './utils'
|
||||
|
||||
/**
|
||||
@@ -6,11 +8,15 @@ import { isUrl } from './utils'
|
||||
*
|
||||
*/
|
||||
export class SAS9ApiClient {
|
||||
private httpClient: AxiosInstance
|
||||
private requestClient: Sas9RequestClient
|
||||
|
||||
constructor(private serverUrl: string) {
|
||||
constructor(
|
||||
private serverUrl: string,
|
||||
private jobsPath: string,
|
||||
allowInsecureRequests: boolean
|
||||
) {
|
||||
if (serverUrl) isUrl(serverUrl)
|
||||
this.httpClient = axios.create({ baseURL: this.serverUrl })
|
||||
this.requestClient = new Sas9RequestClient(serverUrl, allowInsecureRequests)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -33,27 +39,61 @@ export class SAS9ApiClient {
|
||||
/**
|
||||
* Executes code on a SAS9 server.
|
||||
* @param linesOfCode - an array of code lines to execute.
|
||||
* @param serverName - the server to execute the code on.
|
||||
* @param repositoryName - the repository to execute the code in.
|
||||
* @param userName - the user name to log into the current SAS server.
|
||||
* @param password - the password to log into the current SAS server.
|
||||
*/
|
||||
public async executeScript(
|
||||
linesOfCode: string[],
|
||||
serverName: string,
|
||||
repositoryName: string
|
||||
userName: string,
|
||||
password: string
|
||||
) {
|
||||
const requestPayload = linesOfCode.join('\n')
|
||||
await this.requestClient.login(userName, password, this.jobsPath)
|
||||
|
||||
const executeScriptResponse = await this.httpClient.put(
|
||||
`/sas/servers/${serverName}/cmd?repositoryName=${repositoryName}`,
|
||||
`command=${requestPayload}`,
|
||||
{
|
||||
headers: {
|
||||
Accept: 'application/json'
|
||||
},
|
||||
responseType: 'text'
|
||||
}
|
||||
// This piece of code forces a webout to prevent Stored Process Errors.
|
||||
const forceOutputCode = [
|
||||
'data _null_;',
|
||||
'file _webout;',
|
||||
`put 'Executed sasjs run';`,
|
||||
'run;'
|
||||
]
|
||||
const formData = generateFileUploadForm(
|
||||
[...linesOfCode, ...forceOutputCode].join('\n')
|
||||
)
|
||||
|
||||
return executeScriptResponse.data
|
||||
const codeInjectorPath = `/User Folders/${userName}/My Folder/sasjs/runner`
|
||||
const contentType =
|
||||
'multipart/form-data; boundary=' + formData.getBoundary()
|
||||
const contentLength = formData.getLengthSync()
|
||||
|
||||
const headers = {
|
||||
'cache-control': 'no-cache',
|
||||
Accept: '*/*',
|
||||
'Content-Type': contentType,
|
||||
'Content-Length': contentLength,
|
||||
Connection: 'keep-alive'
|
||||
}
|
||||
const storedProcessUrl = `${this.jobsPath}/?${
|
||||
'_program=' + codeInjectorPath + '&_debug=log'
|
||||
}`
|
||||
const response = await this.requestClient.post(
|
||||
storedProcessUrl,
|
||||
formData,
|
||||
undefined,
|
||||
contentType,
|
||||
headers
|
||||
)
|
||||
|
||||
return response.result as string
|
||||
}
|
||||
}
|
||||
|
||||
const generateFileUploadForm = (data: any): NodeFormData => {
|
||||
const formData = new NodeFormData()
|
||||
const filename = `sasjs-execute-sas9-${generateTimestamp('')}.sas`
|
||||
formData.append(filename, data, {
|
||||
filename,
|
||||
contentType: 'text/plain'
|
||||
})
|
||||
|
||||
return formData
|
||||
}
|
||||
|
||||
51
src/SASViyaApiClient.spec.ts
Normal file
51
src/SASViyaApiClient.spec.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { Logger, LogLevel } from '@sasjs/utils/logger'
|
||||
import { RequestClient } from './request/RequestClient'
|
||||
import { SASViyaApiClient } from './SASViyaApiClient'
|
||||
import { Folder } from './types'
|
||||
import { RootFolderNotFoundError } from './types/errors'
|
||||
|
||||
const mockFolder: Folder = {
|
||||
id: '1',
|
||||
uri: '/folder',
|
||||
links: [],
|
||||
memberCount: 1
|
||||
}
|
||||
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
const sasViyaApiClient = new SASViyaApiClient(
|
||||
'https://test.com',
|
||||
'/test',
|
||||
'test context',
|
||||
requestClient
|
||||
)
|
||||
|
||||
describe('SASViyaApiClient', () => {
|
||||
beforeEach(() => {
|
||||
;(process as any).logger = new Logger(LogLevel.Off)
|
||||
setupMocks()
|
||||
})
|
||||
|
||||
it('should throw an error when the root folder is not found on the server', async () => {
|
||||
jest
|
||||
.spyOn(requestClient, 'get')
|
||||
.mockImplementation(() => Promise.reject('Not Found'))
|
||||
const error = await sasViyaApiClient
|
||||
.createFolder('test', '/foo')
|
||||
.catch((e) => e)
|
||||
expect(error).toBeInstanceOf(RootFolderNotFoundError)
|
||||
})
|
||||
})
|
||||
|
||||
const setupMocks = () => {
|
||||
jest
|
||||
.spyOn(requestClient, 'get')
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve({ result: mockFolder, etag: '', status: 200 })
|
||||
)
|
||||
|
||||
jest
|
||||
.spyOn(requestClient, 'post')
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve({ result: mockFolder, etag: '', status: 200 })
|
||||
)
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { convertToCSV, isRelativePath, isUri, isUrl } from './utils'
|
||||
import { isRelativePath, isUri, isUrl } from './utils'
|
||||
import * as NodeFormData from 'form-data'
|
||||
import {
|
||||
Job,
|
||||
@@ -6,22 +6,24 @@ import {
|
||||
Context,
|
||||
ContextAllAttributes,
|
||||
Folder,
|
||||
File,
|
||||
EditContextInput,
|
||||
JobDefinition,
|
||||
PollOptions,
|
||||
ComputeJobExecutionError,
|
||||
JobExecutionError
|
||||
PollOptions
|
||||
} from './types'
|
||||
import { formatDataForRequest } from './utils/formatDataForRequest'
|
||||
import { JobExecutionError, RootFolderNotFoundError } from './types/errors'
|
||||
import { SessionManager } from './SessionManager'
|
||||
import { ContextManager } from './ContextManager'
|
||||
import { timestampToYYYYMMDDHHMMSS } from '@sasjs/utils/time'
|
||||
import { Logger, LogLevel } from '@sasjs/utils/logger'
|
||||
import { SasAuthResponse, MacroVar, AuthConfig } from '@sasjs/utils/types'
|
||||
import { isAuthorizeFormRequired } from './auth/isAuthorizeFormRequired'
|
||||
import { RequestClient } from './request/RequestClient'
|
||||
import { NotFoundError } from './types/NotFoundError'
|
||||
import { SasAuthResponse } from '@sasjs/utils/types'
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import { pollJobState } from './api/viya/pollJobState'
|
||||
import { getTokens } from './auth/getTokens'
|
||||
import { uploadTables } from './api/viya/uploadTables'
|
||||
import { executeScript } from './api/viya/executeScript'
|
||||
import { getAccessToken } from './auth/getAccessToken'
|
||||
import { refreshTokens } from './auth/refreshTokens'
|
||||
|
||||
/**
|
||||
* A client for interfacing with the SAS Viya REST API.
|
||||
@@ -120,14 +122,14 @@ export class SASViyaApiClient {
|
||||
|
||||
/**
|
||||
* Returns all compute contexts on this server that the user has access to.
|
||||
* @param accessToken - an access token for an authorized user.
|
||||
* @param authConfig - an access token, refresh token, client and secret for an authorized user.
|
||||
*/
|
||||
public async getExecutableContexts(accessToken?: string) {
|
||||
public async getExecutableContexts(authConfig?: AuthConfig) {
|
||||
const bindedExecuteScript = this.executeScript.bind(this)
|
||||
|
||||
return await this.contextManager.getExecutableContexts(
|
||||
bindedExecuteScript,
|
||||
accessToken
|
||||
authConfig
|
||||
)
|
||||
}
|
||||
|
||||
@@ -157,13 +159,6 @@ export class SASViyaApiClient {
|
||||
throw new Error(`Execution context ${contextName} not found.`)
|
||||
}
|
||||
|
||||
const createSessionRequest = {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
}
|
||||
const { result: createdSession } = await this.requestClient.post<Session>(
|
||||
`/compute/contexts/${executionContext.id}/sessions`,
|
||||
{},
|
||||
@@ -256,246 +251,44 @@ export class SASViyaApiClient {
|
||||
* @param jobPath - the path to the file being submitted for execution.
|
||||
* @param linesOfCode - an array of code lines to execute.
|
||||
* @param contextName - the context to execute the code in.
|
||||
* @param accessToken - an access token for an authorized user.
|
||||
* @param authConfig - an object containing an access token, refresh token, client ID and secret.
|
||||
* @param data - execution data.
|
||||
* @param debug - when set to true, the log will be returned.
|
||||
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
|
||||
* @param waitForResult - when set to true, function will return the session
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
*/
|
||||
public async executeScript(
|
||||
jobPath: string,
|
||||
linesOfCode: string[],
|
||||
contextName: string,
|
||||
accessToken?: string,
|
||||
authConfig?: AuthConfig,
|
||||
data = null,
|
||||
debug: boolean = false,
|
||||
expectWebout = false,
|
||||
waitForResult = true,
|
||||
pollOptions?: PollOptions,
|
||||
printPid = false
|
||||
printPid = false,
|
||||
variables?: MacroVar
|
||||
): Promise<any> {
|
||||
try {
|
||||
const headers: any = {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
if (accessToken) headers.Authorization = `Bearer ${accessToken}`
|
||||
|
||||
let executionSessionId: string
|
||||
|
||||
const session = await this.sessionManager
|
||||
.getSession(accessToken)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while getting session. ')
|
||||
})
|
||||
|
||||
executionSessionId = session!.id
|
||||
|
||||
if (printPid) {
|
||||
const { result: jobIdVariable } = await this.sessionManager
|
||||
.getVariable(executionSessionId, 'SYSJOBID', accessToken)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while getting session variable. ')
|
||||
})
|
||||
|
||||
if (jobIdVariable && jobIdVariable.value) {
|
||||
const relativeJobPath = this.rootFolderName
|
||||
? jobPath.split(this.rootFolderName).join('').replace(/^\//, '')
|
||||
: jobPath
|
||||
|
||||
const logger = new Logger(debug ? LogLevel.Debug : LogLevel.Info)
|
||||
|
||||
logger.info(
|
||||
`Triggered '${relativeJobPath}' with PID ${
|
||||
jobIdVariable.value
|
||||
} at ${timestampToYYYYMMDDHHMMSS()}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const jobArguments: { [key: string]: any } = {
|
||||
_contextName: contextName,
|
||||
_OMITJSONLISTING: true,
|
||||
_OMITJSONLOG: true,
|
||||
_OMITSESSIONRESULTS: true,
|
||||
_OMITTEXTLISTING: true,
|
||||
_OMITTEXTLOG: true
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
jobArguments['_OMITTEXTLOG'] = false
|
||||
jobArguments['_OMITSESSIONRESULTS'] = false
|
||||
jobArguments['_DEBUG'] = 131
|
||||
}
|
||||
|
||||
let fileName
|
||||
|
||||
if (isRelativePath(jobPath)) {
|
||||
fileName = `exec-${
|
||||
jobPath.includes('/') ? jobPath.split('/')[1] : jobPath
|
||||
}`
|
||||
} else {
|
||||
const jobPathParts = jobPath.split('/')
|
||||
fileName = jobPathParts.pop()
|
||||
}
|
||||
|
||||
let jobVariables: any = {
|
||||
SYS_JES_JOB_URI: '',
|
||||
_program: isRelativePath(jobPath)
|
||||
? this.rootFolderName + '/' + jobPath
|
||||
: jobPath
|
||||
}
|
||||
|
||||
let files: any[] = []
|
||||
|
||||
if (data) {
|
||||
if (JSON.stringify(data).includes(';')) {
|
||||
files = await this.uploadTables(data, accessToken).catch((err) => {
|
||||
throw prefixMessage(err, 'Error while uploading tables. ')
|
||||
})
|
||||
|
||||
jobVariables['_webin_file_count'] = files.length
|
||||
|
||||
files.forEach((fileInfo, index) => {
|
||||
jobVariables[
|
||||
`_webin_fileuri${index + 1}`
|
||||
] = `/files/files/${fileInfo.file.id}`
|
||||
jobVariables[`_webin_name${index + 1}`] = fileInfo.tableName
|
||||
})
|
||||
} else {
|
||||
jobVariables = { ...jobVariables, ...formatDataForRequest(data) }
|
||||
}
|
||||
}
|
||||
|
||||
// Execute job in session
|
||||
const jobRequestBody = {
|
||||
name: fileName,
|
||||
description: 'Powered by SASjs',
|
||||
code: linesOfCode,
|
||||
variables: jobVariables,
|
||||
arguments: jobArguments
|
||||
}
|
||||
|
||||
const { result: postedJob, etag } = await this.requestClient
|
||||
.post<Job>(
|
||||
`/compute/sessions/${executionSessionId}/jobs`,
|
||||
jobRequestBody,
|
||||
accessToken
|
||||
)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while posting job. ')
|
||||
})
|
||||
|
||||
if (!waitForResult) return session
|
||||
|
||||
if (debug) {
|
||||
console.log(`Job has been submitted for '${fileName}'.`)
|
||||
console.log(
|
||||
`You can monitor the job progress at '${this.serverUrl}${
|
||||
postedJob.links.find((l: any) => l.rel === 'state')!.href
|
||||
}'.`
|
||||
)
|
||||
}
|
||||
|
||||
const jobStatus = await this.pollJobState(
|
||||
postedJob,
|
||||
etag,
|
||||
accessToken,
|
||||
pollOptions
|
||||
).catch((err) => {
|
||||
throw prefixMessage(err, 'Error while polling job status. ')
|
||||
})
|
||||
|
||||
const { result: currentJob } = await this.requestClient
|
||||
.get<Job>(
|
||||
`/compute/sessions/${executionSessionId}/jobs/${postedJob.id}`,
|
||||
accessToken
|
||||
)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while getting job. ')
|
||||
})
|
||||
|
||||
let jobResult
|
||||
let log
|
||||
|
||||
const logLink = currentJob.links.find((l) => l.rel === 'log')
|
||||
|
||||
if (debug && logLink) {
|
||||
log = await this.requestClient
|
||||
.get<any>(`${logLink.href}/content?limit=10000`, accessToken)
|
||||
.then((res: any) =>
|
||||
res.result.items.map((i: any) => i.line).join('\n')
|
||||
)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while getting log. ')
|
||||
})
|
||||
}
|
||||
|
||||
if (jobStatus === 'failed' || jobStatus === 'error') {
|
||||
return Promise.reject(new ComputeJobExecutionError(currentJob, log))
|
||||
}
|
||||
|
||||
let resultLink
|
||||
|
||||
if (expectWebout) {
|
||||
resultLink = `/compute/sessions/${executionSessionId}/filerefs/_webout/content`
|
||||
} else {
|
||||
return { job: currentJob, log }
|
||||
}
|
||||
|
||||
if (resultLink) {
|
||||
jobResult = await this.requestClient
|
||||
.get<any>(resultLink, accessToken, 'text/plain')
|
||||
.catch(async (e) => {
|
||||
if (e instanceof NotFoundError) {
|
||||
if (logLink) {
|
||||
log = await this.requestClient
|
||||
.get<any>(`${logLink.href}/content?limit=10000`, accessToken)
|
||||
.then((res: any) =>
|
||||
res.result.items.map((i: any) => i.line).join('\n')
|
||||
)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while getting log. ')
|
||||
})
|
||||
|
||||
return Promise.reject({
|
||||
status: 500,
|
||||
log
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
result: JSON.stringify(e)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
await this.sessionManager
|
||||
.clearSession(executionSessionId, accessToken)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while clearing session. ')
|
||||
})
|
||||
|
||||
return { result: jobResult?.result, log }
|
||||
} catch (e) {
|
||||
if (e && e.status === 404) {
|
||||
return this.executeScript(
|
||||
jobPath,
|
||||
linesOfCode,
|
||||
contextName,
|
||||
accessToken,
|
||||
data,
|
||||
debug,
|
||||
false,
|
||||
true
|
||||
)
|
||||
} else {
|
||||
throw prefixMessage(e, 'Error while executing script. ')
|
||||
}
|
||||
}
|
||||
return executeScript(
|
||||
this.requestClient,
|
||||
this.sessionManager,
|
||||
this.rootFolderName,
|
||||
jobPath,
|
||||
linesOfCode,
|
||||
contextName,
|
||||
authConfig,
|
||||
data,
|
||||
debug,
|
||||
expectWebout,
|
||||
waitForResult,
|
||||
pollOptions,
|
||||
printPid,
|
||||
variables
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -509,6 +302,50 @@ export class SASViyaApiClient {
|
||||
.then((res) => res.result)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file. Path to or URI of the parent folder is required.
|
||||
* @param fileName - the name of the new file.
|
||||
* @param contentBuffer - the content of the new file in Buffer.
|
||||
* @param parentFolderPath - the full path to the parent folder. If not
|
||||
* provided, the parentFolderUri must be provided.
|
||||
* @param parentFolderUri - the URI (eg /folders/folders/UUID) of the parent
|
||||
* folder. If not provided, the parentFolderPath must be provided.
|
||||
* @param accessToken - an access token for authorizing the request.
|
||||
*/
|
||||
public async createFile(
|
||||
fileName: string,
|
||||
contentBuffer: Buffer,
|
||||
parentFolderPath?: string,
|
||||
parentFolderUri?: string,
|
||||
accessToken?: string
|
||||
): Promise<File> {
|
||||
if (!parentFolderPath && !parentFolderUri) {
|
||||
throw new Error('Path or URI of the parent folder is required.')
|
||||
}
|
||||
|
||||
if (!parentFolderUri && parentFolderPath) {
|
||||
parentFolderUri = await this.getFolderUri(parentFolderPath, accessToken)
|
||||
}
|
||||
|
||||
const headers = {
|
||||
Accept: 'application/vnd.sas.file+json',
|
||||
'Content-Disposition': `filename="${fileName}";`
|
||||
}
|
||||
|
||||
const formData = new NodeFormData()
|
||||
formData.append('file', contentBuffer, fileName)
|
||||
|
||||
return (
|
||||
await this.requestClient.post<File>(
|
||||
`/files/files?parentFolderUri=${parentFolderUri}&typeDefName=file#rawUpload`,
|
||||
formData,
|
||||
accessToken,
|
||||
'multipart/form-data; boundary=' + (formData as any)._boundary,
|
||||
headers
|
||||
)
|
||||
).result
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a folder. Path to or URI of the parent folder is required.
|
||||
* @param folderName - the name of the new folder.
|
||||
@@ -526,6 +363,7 @@ export class SASViyaApiClient {
|
||||
accessToken?: string,
|
||||
isForced?: boolean
|
||||
): Promise<Folder> {
|
||||
const logger = process.logger || console
|
||||
if (!parentFolderPath && !parentFolderUri) {
|
||||
throw new Error('Path or URI of the parent folder is required.')
|
||||
}
|
||||
@@ -533,7 +371,7 @@ export class SASViyaApiClient {
|
||||
if (!parentFolderUri && parentFolderPath) {
|
||||
parentFolderUri = await this.getFolderUri(parentFolderPath, accessToken)
|
||||
if (!parentFolderUri) {
|
||||
console.log(
|
||||
logger.info(
|
||||
`Parent folder at path '${parentFolderPath}' is not present.`
|
||||
)
|
||||
|
||||
@@ -543,9 +381,13 @@ export class SASViyaApiClient {
|
||||
)
|
||||
const newFolderName = `${parentFolderPath.split('/').pop()}`
|
||||
if (newParentFolderPath === '') {
|
||||
throw new Error('Root folder has to be present on the server.')
|
||||
throw new RootFolderNotFoundError(
|
||||
parentFolderPath,
|
||||
this.serverUrl,
|
||||
accessToken
|
||||
)
|
||||
}
|
||||
console.log(
|
||||
logger.info(
|
||||
`Creating parent folder:\n'${newFolderName}' in '${newParentFolderPath}'`
|
||||
)
|
||||
const parentFolder = await this.createFolder(
|
||||
@@ -554,7 +396,7 @@ export class SASViyaApiClient {
|
||||
undefined,
|
||||
accessToken
|
||||
)
|
||||
console.log(
|
||||
logger.info(
|
||||
`Parent folder '${newFolderName}' has been successfully created.`
|
||||
)
|
||||
parentFolderUri = `/folders/folders/${parentFolder.id}`
|
||||
@@ -571,16 +413,15 @@ export class SASViyaApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
result: createFolderResponse
|
||||
} = await this.requestClient.post<Folder>(
|
||||
`/folders/folders?parentFolderUri=${parentFolderUri}`,
|
||||
{
|
||||
name: folderName,
|
||||
type: 'folder'
|
||||
},
|
||||
accessToken
|
||||
)
|
||||
const { result: createFolderResponse } =
|
||||
await this.requestClient.post<Folder>(
|
||||
`/folders/folders?parentFolderUri=${parentFolderUri}`,
|
||||
{
|
||||
name: folderName,
|
||||
type: 'folder'
|
||||
},
|
||||
accessToken
|
||||
)
|
||||
|
||||
// update folder map with newly created folder.
|
||||
await this.populateFolderMap(
|
||||
@@ -683,39 +524,7 @@ export class SASViyaApiClient {
|
||||
clientSecret: string,
|
||||
authCode: string
|
||||
): Promise<SasAuthResponse> {
|
||||
const url = this.serverUrl + '/SASLogon/oauth/token'
|
||||
let token
|
||||
if (typeof Buffer === 'undefined') {
|
||||
token = btoa(clientId + ':' + clientSecret)
|
||||
} else {
|
||||
token = Buffer.from(clientId + ':' + clientSecret).toString('base64')
|
||||
}
|
||||
const headers = {
|
||||
Authorization: 'Basic ' + token
|
||||
}
|
||||
|
||||
let formData
|
||||
if (typeof FormData === 'undefined') {
|
||||
formData = new NodeFormData()
|
||||
formData.append('grant_type', 'authorization_code')
|
||||
formData.append('code', authCode)
|
||||
} else {
|
||||
formData = new FormData()
|
||||
formData.append('grant_type', 'authorization_code')
|
||||
formData.append('code', authCode)
|
||||
}
|
||||
|
||||
const authResponse = await this.requestClient
|
||||
.post(
|
||||
url,
|
||||
formData,
|
||||
undefined,
|
||||
'multipart/form-data; boundary=' + (formData as any)._boundary,
|
||||
headers
|
||||
)
|
||||
.then((res) => res.result as SasAuthResponse)
|
||||
|
||||
return authResponse
|
||||
return getAccessToken(this.requestClient, clientId, clientSecret, authCode)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -729,39 +538,12 @@ export class SASViyaApiClient {
|
||||
clientSecret: string,
|
||||
refreshToken: string
|
||||
) {
|
||||
const url = this.serverUrl + '/SASLogon/oauth/token'
|
||||
let token
|
||||
if (typeof Buffer === 'undefined') {
|
||||
token = btoa(clientId + ':' + clientSecret)
|
||||
} else {
|
||||
token = Buffer.from(clientId + ':' + clientSecret).toString('base64')
|
||||
}
|
||||
const headers = {
|
||||
Authorization: 'Basic ' + token
|
||||
}
|
||||
|
||||
let formData
|
||||
if (typeof FormData === 'undefined') {
|
||||
formData = new NodeFormData()
|
||||
formData.append('grant_type', 'refresh_token')
|
||||
formData.append('refresh_token', refreshToken)
|
||||
} else {
|
||||
formData = new FormData()
|
||||
formData.append('grant_type', 'refresh_token')
|
||||
formData.append('refresh_token', refreshToken)
|
||||
}
|
||||
|
||||
const authResponse = await this.requestClient
|
||||
.post<SasAuthResponse>(
|
||||
url,
|
||||
formData,
|
||||
undefined,
|
||||
'multipart/form-data; boundary=' + (formData as any)._boundary,
|
||||
headers
|
||||
)
|
||||
.then((res) => res.result)
|
||||
|
||||
return authResponse
|
||||
return refreshTokens(
|
||||
this.requestClient,
|
||||
clientId,
|
||||
clientSecret,
|
||||
refreshToken
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -792,18 +574,25 @@ export class SASViyaApiClient {
|
||||
* @param expectWebout - a boolean indicating whether to expect a _webout response.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
*/
|
||||
public async executeComputeJob(
|
||||
sasJob: string,
|
||||
contextName: string,
|
||||
debug?: boolean,
|
||||
data?: any,
|
||||
accessToken?: string,
|
||||
authConfig?: AuthConfig,
|
||||
waitForResult = true,
|
||||
expectWebout = false,
|
||||
pollOptions?: PollOptions,
|
||||
printPid = false
|
||||
printPid = false,
|
||||
variables?: MacroVar
|
||||
) {
|
||||
let access_token = (authConfig || {}).access_token
|
||||
if (authConfig) {
|
||||
;({ access_token } = await getTokens(this.requestClient, authConfig))
|
||||
}
|
||||
|
||||
if (isRelativePath(sasJob) && !this.rootFolderName) {
|
||||
throw new Error(
|
||||
'Relative paths cannot be used without specifying a root folder name'
|
||||
@@ -817,7 +606,7 @@ export class SASViyaApiClient {
|
||||
? `${this.rootFolderName}/${folderPath}`
|
||||
: folderPath
|
||||
|
||||
await this.populateFolderMap(fullFolderPath, accessToken).catch((err) => {
|
||||
await this.populateFolderMap(fullFolderPath, access_token).catch((err) => {
|
||||
throw prefixMessage(err, 'Error while populating folder map. ')
|
||||
})
|
||||
|
||||
@@ -829,12 +618,6 @@ export class SASViyaApiClient {
|
||||
)
|
||||
}
|
||||
|
||||
const headers: any = { 'Content-Type': 'application/json' }
|
||||
|
||||
if (!!accessToken) {
|
||||
headers.Authorization = `Bearer ${accessToken}`
|
||||
}
|
||||
|
||||
const jobToExecute = jobFolder?.find((item) => item.name === jobName)
|
||||
|
||||
if (!jobToExecute) {
|
||||
@@ -852,12 +635,10 @@ export class SASViyaApiClient {
|
||||
throw new Error(`URI of job definition was not found.`)
|
||||
}
|
||||
|
||||
const {
|
||||
result: jobDefinition
|
||||
} = await this.requestClient
|
||||
const { result: jobDefinition } = await this.requestClient
|
||||
.get<JobDefinition>(
|
||||
`${this.serverUrl}${jobDefinitionLink.href}`,
|
||||
accessToken
|
||||
access_token
|
||||
)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while getting job definition. ')
|
||||
@@ -877,13 +658,14 @@ export class SASViyaApiClient {
|
||||
sasJob,
|
||||
linesToExecute,
|
||||
contextName,
|
||||
accessToken,
|
||||
authConfig,
|
||||
data,
|
||||
debug,
|
||||
expectWebout,
|
||||
waitForResult,
|
||||
pollOptions,
|
||||
printPid
|
||||
printPid,
|
||||
variables
|
||||
)
|
||||
}
|
||||
|
||||
@@ -900,8 +682,12 @@ export class SASViyaApiClient {
|
||||
contextName: string,
|
||||
debug: boolean,
|
||||
data?: any,
|
||||
accessToken?: string
|
||||
authConfig?: AuthConfig
|
||||
) {
|
||||
let access_token = (authConfig || {}).access_token
|
||||
if (authConfig) {
|
||||
;({ access_token } = await getTokens(this.requestClient, authConfig))
|
||||
}
|
||||
if (isRelativePath(sasJob) && !this.rootFolderName) {
|
||||
throw new Error(
|
||||
'Relative paths cannot be used without specifying a root folder name.'
|
||||
@@ -914,7 +700,7 @@ export class SASViyaApiClient {
|
||||
const fullFolderPath = isRelativePath(sasJob)
|
||||
? `${this.rootFolderName}/${folderPath}`
|
||||
: folderPath
|
||||
await this.populateFolderMap(fullFolderPath, accessToken)
|
||||
await this.populateFolderMap(fullFolderPath, access_token)
|
||||
|
||||
const jobFolder = this.folderMap.get(fullFolderPath)
|
||||
if (!jobFolder) {
|
||||
@@ -927,7 +713,7 @@ export class SASViyaApiClient {
|
||||
|
||||
let files: any[] = []
|
||||
if (data && Object.keys(data).length) {
|
||||
files = await this.uploadTables(data, accessToken)
|
||||
files = await this.uploadTables(data, access_token)
|
||||
}
|
||||
|
||||
if (!jobToExecute) {
|
||||
@@ -939,7 +725,7 @@ export class SASViyaApiClient {
|
||||
|
||||
const { result: jobDefinition } = await this.requestClient.get<Job>(
|
||||
`${this.serverUrl}${jobDefinitionLink}`,
|
||||
accessToken
|
||||
access_token
|
||||
)
|
||||
|
||||
const jobArguments: { [key: string]: any } = {
|
||||
@@ -972,21 +758,19 @@ export class SASViyaApiClient {
|
||||
jobDefinition,
|
||||
arguments: jobArguments
|
||||
}
|
||||
const { result: postedJob, etag } = await this.requestClient.post<Job>(
|
||||
const { result: postedJob } = await this.requestClient.post<Job>(
|
||||
`${this.serverUrl}/jobExecution/jobs?_action=wait`,
|
||||
postJobRequestBody,
|
||||
accessToken
|
||||
access_token
|
||||
)
|
||||
const jobStatus = await this.pollJobState(postedJob, authConfig).catch(
|
||||
(err) => {
|
||||
throw prefixMessage(err, 'Error while polling job status. ')
|
||||
}
|
||||
)
|
||||
const jobStatus = await this.pollJobState(
|
||||
postedJob,
|
||||
etag,
|
||||
accessToken
|
||||
).catch((err) => {
|
||||
throw prefixMessage(err, 'Error while polling job status. ')
|
||||
})
|
||||
const { result: currentJob } = await this.requestClient.get<Job>(
|
||||
`${this.serverUrl}/jobExecution/jobs/${postedJob.id}`,
|
||||
accessToken
|
||||
access_token
|
||||
)
|
||||
|
||||
let jobResult
|
||||
@@ -997,13 +781,25 @@ export class SASViyaApiClient {
|
||||
if (resultLink) {
|
||||
jobResult = await this.requestClient.get<any>(
|
||||
`${this.serverUrl}${resultLink}/content`,
|
||||
accessToken,
|
||||
'text/plain'
|
||||
access_token,
|
||||
'text/plain',
|
||||
{},
|
||||
debug,
|
||||
true,
|
||||
sasJob
|
||||
)
|
||||
}
|
||||
if (debug && logLink) {
|
||||
log = await this.requestClient
|
||||
.get<any>(`${this.serverUrl}${logLink.href}/content`, accessToken)
|
||||
.get<any>(
|
||||
`${this.serverUrl}${logLink.href}/content`,
|
||||
access_token,
|
||||
'application/json',
|
||||
{},
|
||||
debug,
|
||||
true,
|
||||
sasJob
|
||||
)
|
||||
.then((res: any) => res.result.items.map((i: any) => i.line).join('\n'))
|
||||
}
|
||||
if (jobStatus === 'failed') {
|
||||
@@ -1049,122 +845,22 @@ export class SASViyaApiClient {
|
||||
this.folderMap.set(path, itemsAtRoot)
|
||||
}
|
||||
|
||||
// REFACTOR: set default value for 'pollOptions' attribute
|
||||
private async pollJobState(
|
||||
postedJob: any,
|
||||
etag: string | null,
|
||||
accessToken?: string,
|
||||
postedJob: Job,
|
||||
authConfig?: AuthConfig,
|
||||
pollOptions?: PollOptions
|
||||
) {
|
||||
let POLL_INTERVAL = 300
|
||||
let MAX_POLL_COUNT = 1000
|
||||
|
||||
if (pollOptions) {
|
||||
POLL_INTERVAL = pollOptions.POLL_INTERVAL || POLL_INTERVAL
|
||||
MAX_POLL_COUNT = pollOptions.MAX_POLL_COUNT || MAX_POLL_COUNT
|
||||
}
|
||||
|
||||
let postedJobState = ''
|
||||
let pollCount = 0
|
||||
const headers: any = {
|
||||
'Content-Type': 'application/json',
|
||||
'If-None-Match': etag
|
||||
}
|
||||
if (accessToken) {
|
||||
headers.Authorization = `Bearer ${accessToken}`
|
||||
}
|
||||
const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
|
||||
if (!stateLink) {
|
||||
Promise.reject(`Job state link was not found.`)
|
||||
}
|
||||
|
||||
const { result: state } = await this.requestClient
|
||||
.get<string>(
|
||||
`${this.serverUrl}${stateLink.href}?_action=wait&wait=30`,
|
||||
accessToken,
|
||||
'text/plain'
|
||||
)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while getting job state. ')
|
||||
})
|
||||
|
||||
const currentState = state.trim()
|
||||
if (currentState === 'completed') {
|
||||
return Promise.resolve(currentState)
|
||||
}
|
||||
|
||||
return new Promise(async (resolve, _) => {
|
||||
let printedState = ''
|
||||
|
||||
const interval = setInterval(async () => {
|
||||
if (
|
||||
postedJobState === 'running' ||
|
||||
postedJobState === '' ||
|
||||
postedJobState === 'pending'
|
||||
) {
|
||||
if (stateLink) {
|
||||
const { result: jobState } = await this.requestClient
|
||||
.get<string>(
|
||||
`${this.serverUrl}${stateLink.href}?_action=wait&wait=30`,
|
||||
accessToken,
|
||||
'text/plain'
|
||||
)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(
|
||||
err,
|
||||
'Error while getting job state after interval. '
|
||||
)
|
||||
})
|
||||
|
||||
postedJobState = jobState.trim()
|
||||
|
||||
if (this.debug && printedState !== postedJobState) {
|
||||
console.log('Polling job status...')
|
||||
console.log(`Current job state: ${postedJobState}`)
|
||||
|
||||
printedState = postedJobState
|
||||
}
|
||||
|
||||
pollCount++
|
||||
|
||||
if (pollCount >= MAX_POLL_COUNT) {
|
||||
resolve(postedJobState)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
clearInterval(interval)
|
||||
resolve(postedJobState)
|
||||
}
|
||||
}, POLL_INTERVAL)
|
||||
})
|
||||
return pollJobState(
|
||||
this.requestClient,
|
||||
postedJob,
|
||||
this.debug,
|
||||
authConfig,
|
||||
pollOptions
|
||||
)
|
||||
}
|
||||
|
||||
private async uploadTables(data: any, accessToken?: string) {
|
||||
const uploadedFiles = []
|
||||
const headers: any = {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
if (accessToken) {
|
||||
headers.Authorization = `Bearer ${accessToken}`
|
||||
}
|
||||
|
||||
for (const tableName in data) {
|
||||
const csv = convertToCSV(data[tableName])
|
||||
if (csv === 'ERROR: LARGE STRING LENGTH') {
|
||||
throw new Error(
|
||||
'The max length of a string value in SASjs is 32765 characters.'
|
||||
)
|
||||
}
|
||||
|
||||
const uploadResponse = await this.requestClient
|
||||
.uploadFile(`${this.serverUrl}/files/files#rawUpload`, csv, accessToken)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while uploading file. ')
|
||||
})
|
||||
|
||||
uploadedFiles.push({ tableName, file: uploadResponse.result })
|
||||
}
|
||||
return uploadedFiles
|
||||
return uploadTables(this.requestClient, data, accessToken)
|
||||
}
|
||||
|
||||
private async getFolderDetails(
|
||||
@@ -1253,14 +949,6 @@ export class SASViyaApiClient {
|
||||
? sourceFolder
|
||||
: await this.getFolderUri(sourceFolder, accessToken)
|
||||
|
||||
const requestInfo = {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: 'Bearer ' + accessToken
|
||||
}
|
||||
}
|
||||
|
||||
const { result: members } = await this.requestClient.get<{ items: any[] }>(
|
||||
`${this.serverUrl}${sourceFolderUri}/members?limit=${limit}`,
|
||||
accessToken
|
||||
@@ -1310,6 +998,9 @@ export class SASViyaApiClient {
|
||||
accessToken
|
||||
)
|
||||
|
||||
if (!sourceFolderUri) {
|
||||
return undefined
|
||||
}
|
||||
const sourceFolderId = sourceFolderUri?.split('/').pop()
|
||||
|
||||
const { result: folder } = await this.requestClient
|
||||
|
||||
323
src/SASjs.ts
323
src/SASjs.ts
@@ -4,14 +4,21 @@ import { SASViyaApiClient } from './SASViyaApiClient'
|
||||
import { SAS9ApiClient } from './SAS9ApiClient'
|
||||
import { FileUploader } from './FileUploader'
|
||||
import { AuthManager } from './auth'
|
||||
import { ServerType } from '@sasjs/utils/types'
|
||||
import {
|
||||
ServerType,
|
||||
MacroVar,
|
||||
AuthConfig,
|
||||
ExtraResponseAttributes
|
||||
} from '@sasjs/utils/types'
|
||||
import { RequestClient } from './request/RequestClient'
|
||||
import {
|
||||
JobExecutor,
|
||||
WebJobExecutor,
|
||||
ComputeJobExecutor,
|
||||
JesJobExecutor
|
||||
JesJobExecutor,
|
||||
Sas9JobExecutor
|
||||
} from './job-execution'
|
||||
import { ErrorResponse } from './types/errors'
|
||||
|
||||
const defaultConfig: SASjsConfig = {
|
||||
serverUrl: '',
|
||||
@@ -21,7 +28,7 @@ const defaultConfig: SASjsConfig = {
|
||||
serverType: ServerType.SasViya,
|
||||
debug: false,
|
||||
contextName: 'SAS Job Execution compute context',
|
||||
useComputeApi: false,
|
||||
useComputeApi: null,
|
||||
allowInsecureRequests: false
|
||||
}
|
||||
|
||||
@@ -40,8 +47,10 @@ export default class SASjs {
|
||||
private webJobExecutor: JobExecutor | null = null
|
||||
private computeJobExecutor: JobExecutor | null = null
|
||||
private jesJobExecutor: JobExecutor | null = null
|
||||
private sas9JobExecutor: JobExecutor | null = null
|
||||
|
||||
constructor(config?: any) {
|
||||
console.log('from SASjs constructor')
|
||||
this.sasjsConfig = {
|
||||
...defaultConfig,
|
||||
...config
|
||||
@@ -56,15 +65,15 @@ export default class SASjs {
|
||||
|
||||
public async executeScriptSAS9(
|
||||
linesOfCode: string[],
|
||||
serverName: string,
|
||||
repositoryName: string
|
||||
userName: string,
|
||||
password: string
|
||||
) {
|
||||
this.isMethodSupported('executeScriptSAS9', ServerType.Sas9)
|
||||
|
||||
return await this.sas9ApiClient?.executeScript(
|
||||
linesOfCode,
|
||||
serverName,
|
||||
repositoryName
|
||||
userName,
|
||||
password
|
||||
)
|
||||
}
|
||||
|
||||
@@ -99,12 +108,12 @@ export default class SASjs {
|
||||
|
||||
/**
|
||||
* Gets executable compute contexts.
|
||||
* @param accessToken - an access token for an authorized user.
|
||||
* @param authConfig - an access token, refresh token, client and secret for an authorized user.
|
||||
*/
|
||||
public async getExecutableContexts(accessToken: string) {
|
||||
public async getExecutableContexts(authConfig: AuthConfig) {
|
||||
this.isMethodSupported('getExecutableContexts', ServerType.SasViya)
|
||||
|
||||
return await this.sasViyaApiClient!.getExecutableContexts(accessToken)
|
||||
return await this.sasViyaApiClient!.getExecutableContexts(authConfig)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -236,14 +245,14 @@ export default class SASjs {
|
||||
* @param fileName - name of the file to run. It will be converted to path to the file being submitted for execution.
|
||||
* @param linesOfCode - lines of sas code from the file to run.
|
||||
* @param contextName - context name on which code will be run on the server.
|
||||
* @param accessToken - (optional) the access token for authorizing the request.
|
||||
* @param authConfig - (optional) the access token, refresh token, client and secret for authorizing the request.
|
||||
* @param debug - (optional) if true, global debug config will be overriden
|
||||
*/
|
||||
public async executeScriptSASViya(
|
||||
fileName: string,
|
||||
linesOfCode: string[],
|
||||
contextName: string,
|
||||
accessToken?: string,
|
||||
authConfig?: AuthConfig,
|
||||
debug?: boolean
|
||||
) {
|
||||
this.isMethodSupported('executeScriptSASViya', ServerType.SasViya)
|
||||
@@ -257,14 +266,14 @@ export default class SASjs {
|
||||
fileName,
|
||||
linesOfCode,
|
||||
contextName,
|
||||
accessToken,
|
||||
authConfig,
|
||||
null,
|
||||
debug ? debug : this.sasjsConfig.debug
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a folder at SAS file system.
|
||||
* Creates a folder in the logical SAS folder tree
|
||||
* @param folderName - name of the folder to be created.
|
||||
* @param parentFolderPath - the full path (eg `/Public/example/myFolder`) of the parent folder.
|
||||
* @param parentFolderUri - the URI of the parent folder.
|
||||
@@ -296,6 +305,40 @@ export default class SASjs {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a file in the logical SAS folder tree
|
||||
* @param fileName - name of the file to be created.
|
||||
* @param content - content of the file to be created.
|
||||
* @param parentFolderPath - the full path (eg `/Public/example/myFolder`) of the parent folder.
|
||||
* @param parentFolderUri - the URI of the parent folder.
|
||||
* @param accessToken - the access token to authorizing the request.
|
||||
* @param sasApiClient - a client for interfacing with SAS API.
|
||||
*/
|
||||
public async createFile(
|
||||
fileName: string,
|
||||
content: Buffer,
|
||||
parentFolderPath: string,
|
||||
parentFolderUri?: string,
|
||||
accessToken?: string,
|
||||
sasApiClient?: SASViyaApiClient
|
||||
) {
|
||||
if (sasApiClient)
|
||||
return await sasApiClient.createFile(
|
||||
fileName,
|
||||
content,
|
||||
parentFolderPath,
|
||||
parentFolderUri,
|
||||
accessToken
|
||||
)
|
||||
return await this.sasViyaApiClient!.createFile(
|
||||
fileName,
|
||||
content,
|
||||
parentFolderPath,
|
||||
parentFolderUri,
|
||||
accessToken
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a folder from the SAS file system.
|
||||
* @param folderPath - path of the folder to be fetched.
|
||||
@@ -502,16 +545,22 @@ export default class SASjs {
|
||||
* Process). Is prepended at runtime with the value of `appLoc`.
|
||||
* @param files - array of files to be uploaded, including File object and file name.
|
||||
* @param params - request URL parameters.
|
||||
* @param overrideSasjsConfig - object to override existing config (optional)
|
||||
*/
|
||||
public uploadFile(sasJob: string, files: UploadFile[], params: any) {
|
||||
const fileUploader =
|
||||
this.fileUploader ||
|
||||
new FileUploader(
|
||||
this.sasjsConfig.appLoc,
|
||||
this.sasjsConfig.serverUrl,
|
||||
this.jobsPath,
|
||||
this.requestClient!
|
||||
)
|
||||
public uploadFile(
|
||||
sasJob: string,
|
||||
files: UploadFile[],
|
||||
params: any,
|
||||
overrideSasjsConfig?: any
|
||||
) {
|
||||
const fileUploader = overrideSasjsConfig
|
||||
? new FileUploader(
|
||||
{ ...this.sasjsConfig, ...overrideSasjsConfig },
|
||||
this.jobsPath,
|
||||
this.requestClient!
|
||||
)
|
||||
: this.fileUploader ||
|
||||
new FileUploader(this.sasjsConfig, this.jobsPath, this.requestClient!)
|
||||
|
||||
return fileUploader.uploadFile(sasJob, files, params)
|
||||
}
|
||||
@@ -530,47 +579,143 @@ export default class SASjs {
|
||||
* @param config - provide any changes to the config here, for instance to
|
||||
* enable/disable `debug`. Any change provided will override the global config,
|
||||
* for that particular function call.
|
||||
* @param loginRequiredCallback - provide a function here to be called if the
|
||||
* @param loginRequiredCallback - a function that is called if the
|
||||
* user is not logged in (eg to display a login form). The request will be
|
||||
* resubmitted after logon.
|
||||
* resubmitted after successful login.
|
||||
* When using a `loginRequiredCallback`, the call to the request will look, for example, like so:
|
||||
* `await request(sasJobPath, data, config, () => setIsLoggedIn(false))`
|
||||
* If you are not passing in any data and configuration, it will look like so:
|
||||
* `await request(sasJobPath, {}, {}, () => setIsLoggedIn(false))`
|
||||
* @param extraResponseAttributes - a array of predefined values that are used
|
||||
* to provide extra attributes (same names as those values) to be added in response
|
||||
* Supported values are declared in ExtraResponseAttributes type.
|
||||
*/
|
||||
public async request(
|
||||
sasJob: string,
|
||||
data: any,
|
||||
config: any = {},
|
||||
loginRequiredCallback?: any,
|
||||
accessToken?: string
|
||||
data: { [key: string]: any } | null,
|
||||
config: { [key: string]: any } = {},
|
||||
loginRequiredCallback?: () => any,
|
||||
authConfig?: AuthConfig,
|
||||
extraResponseAttributes: ExtraResponseAttributes[] = []
|
||||
) {
|
||||
config = {
|
||||
...this.sasjsConfig,
|
||||
...config
|
||||
}
|
||||
|
||||
if (config.serverType === ServerType.SasViya && config.contextName) {
|
||||
if (config.useComputeApi) {
|
||||
return await this.computeJobExecutor!.execute(
|
||||
sasJob,
|
||||
data,
|
||||
config,
|
||||
loginRequiredCallback,
|
||||
accessToken
|
||||
)
|
||||
const validationResult = this.validateInput(data)
|
||||
|
||||
if (validationResult.status) {
|
||||
if (
|
||||
config.serverType !== ServerType.Sas9 &&
|
||||
config.useComputeApi !== undefined &&
|
||||
config.useComputeApi !== null
|
||||
) {
|
||||
if (config.useComputeApi) {
|
||||
console.log(615)
|
||||
return await this.computeJobExecutor!.execute(
|
||||
sasJob,
|
||||
data,
|
||||
config,
|
||||
loginRequiredCallback,
|
||||
authConfig
|
||||
)
|
||||
} else {
|
||||
if (!config.contextName)
|
||||
config = {
|
||||
...config,
|
||||
contextName: 'SAS Job Execution compute context'
|
||||
}
|
||||
return await this.jesJobExecutor!.execute(
|
||||
sasJob,
|
||||
data,
|
||||
config,
|
||||
loginRequiredCallback,
|
||||
authConfig,
|
||||
extraResponseAttributes
|
||||
)
|
||||
}
|
||||
} else if (
|
||||
config.serverType === ServerType.Sas9 &&
|
||||
config.username &&
|
||||
config.password
|
||||
) {
|
||||
return await this.sas9JobExecutor!.execute(sasJob, data, config)
|
||||
} else {
|
||||
return await this.jesJobExecutor!.execute(
|
||||
return await this.webJobExecutor!.execute(
|
||||
sasJob,
|
||||
data,
|
||||
config,
|
||||
loginRequiredCallback,
|
||||
accessToken
|
||||
authConfig,
|
||||
extraResponseAttributes
|
||||
)
|
||||
}
|
||||
} else {
|
||||
return await this.webJobExecutor!.execute(
|
||||
sasJob,
|
||||
data,
|
||||
config,
|
||||
loginRequiredCallback
|
||||
)
|
||||
return Promise.reject(new ErrorResponse(validationResult.msg))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function validates the input data structure and table naming convention
|
||||
*
|
||||
* @param data A json object that contains one or more tables, it can also be null
|
||||
* @returns An object which contains two attributes: 1) status: boolean, 2) msg: string
|
||||
*/
|
||||
private validateInput(data: { [key: string]: any } | null): {
|
||||
status: boolean
|
||||
msg: string
|
||||
} {
|
||||
if (data === null) return { status: true, msg: '' }
|
||||
for (const key in data) {
|
||||
if (!key.match(/^[a-zA-Z_]/)) {
|
||||
return {
|
||||
status: false,
|
||||
msg: 'First letter of table should be alphabet or underscore.'
|
||||
}
|
||||
}
|
||||
|
||||
if (!key.match(/^[a-zA-Z_][a-zA-Z0-9_]*$/)) {
|
||||
return { status: false, msg: 'Table name should be alphanumeric.' }
|
||||
}
|
||||
|
||||
if (key.length > 32) {
|
||||
return {
|
||||
status: false,
|
||||
msg: 'Maximum length for table name could be 32 characters.'
|
||||
}
|
||||
}
|
||||
|
||||
if (this.getType(data[key]) !== 'Array') {
|
||||
return {
|
||||
status: false,
|
||||
msg: 'Parameter data contains invalid table structure.'
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < data[key].length; i++) {
|
||||
if (this.getType(data[key][i]) !== 'object') {
|
||||
return {
|
||||
status: false,
|
||||
msg: `Table ${key} contains invalid structure.`
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return { status: true, msg: '' }
|
||||
}
|
||||
|
||||
/**
|
||||
* this function returns the type of variable
|
||||
*
|
||||
* @param data it could be anything, like string, array, object etc.
|
||||
* @returns a string which tells the type of input parameter
|
||||
*/
|
||||
private getType(data: any): string {
|
||||
if (Array.isArray(data)) {
|
||||
return 'Array'
|
||||
} else {
|
||||
return typeof data
|
||||
}
|
||||
}
|
||||
|
||||
@@ -611,7 +756,11 @@ export default class SASjs {
|
||||
)
|
||||
sasApiClient.debug = this.sasjsConfig.debug
|
||||
} else if (this.sasjsConfig.serverType === ServerType.Sas9) {
|
||||
sasApiClient = new SAS9ApiClient(serverUrl)
|
||||
sasApiClient = new SAS9ApiClient(
|
||||
serverUrl,
|
||||
this.jobsPath,
|
||||
this.sasjsConfig.allowInsecureRequests
|
||||
)
|
||||
}
|
||||
} else {
|
||||
let sasClientConfig: any = null
|
||||
@@ -653,20 +802,22 @@ export default class SASjs {
|
||||
* @param config - provide any changes to the config here, for instance to
|
||||
* enable/disable `debug`. Any change provided will override the global config,
|
||||
* for that particular function call.
|
||||
* @param accessToken - a valid access token that is authorised to execute compute jobs.
|
||||
* @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs.
|
||||
* The access token is not required when the user is authenticated via the browser.
|
||||
* @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete.
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
*/
|
||||
public async startComputeJob(
|
||||
sasJob: string,
|
||||
data: any,
|
||||
config: any = {},
|
||||
accessToken?: string,
|
||||
authConfig?: AuthConfig,
|
||||
waitForResult?: boolean,
|
||||
pollOptions?: PollOptions,
|
||||
printPid = false
|
||||
printPid = false,
|
||||
variables?: MacroVar
|
||||
) {
|
||||
config = {
|
||||
...this.sasjsConfig,
|
||||
@@ -685,11 +836,12 @@ export default class SASjs {
|
||||
config.contextName,
|
||||
config.debug,
|
||||
data,
|
||||
accessToken,
|
||||
authConfig,
|
||||
!!waitForResult,
|
||||
false,
|
||||
pollOptions,
|
||||
printPid
|
||||
printPid,
|
||||
variables
|
||||
)
|
||||
}
|
||||
|
||||
@@ -705,25 +857,43 @@ export default class SASjs {
|
||||
* @param accessToken - an access token for an authorized user.
|
||||
*/
|
||||
public async fetchLogFileContent(logUrl: string, accessToken?: string) {
|
||||
return await this.requestClient!.get(logUrl, accessToken).then((res) =>
|
||||
JSON.stringify(res.result)
|
||||
)
|
||||
return await this.requestClient!.get(logUrl, accessToken).then((res) => {
|
||||
if (!res)
|
||||
return Promise.reject(
|
||||
new ErrorResponse(
|
||||
'Error while fetching log. Response was not provided.'
|
||||
)
|
||||
)
|
||||
|
||||
try {
|
||||
const result = JSON.stringify(res.result)
|
||||
|
||||
return result
|
||||
} catch (err) {
|
||||
return Promise.reject(
|
||||
new ErrorResponse(
|
||||
'Error while fetching log. The result is not valid.',
|
||||
err
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* this method returns an array of SASjsRequest
|
||||
* @returns SASjsRequest[]
|
||||
*/
|
||||
public getSasRequests() {
|
||||
const requests = [
|
||||
...this.webJobExecutor!.getRequests(),
|
||||
...this.computeJobExecutor!.getRequests(),
|
||||
...this.jesJobExecutor!.getRequests()
|
||||
]
|
||||
console.log('from getSASRequests')
|
||||
const requests = this.requestClient!.getRequests()
|
||||
const sortedRequests = requests.sort(compareTimestamps)
|
||||
console.log('sortedRequests', sortedRequests)
|
||||
return sortedRequests
|
||||
}
|
||||
|
||||
public clearSasRequests() {
|
||||
this.webJobExecutor!.clearRequests()
|
||||
this.computeJobExecutor!.clearRequests()
|
||||
this.jesJobExecutor!.clearRequests()
|
||||
this.requestClient!.clearRequests()
|
||||
}
|
||||
|
||||
private setupConfiguration() {
|
||||
@@ -782,12 +952,16 @@ export default class SASjs {
|
||||
if (this.sasjsConfig.serverType === ServerType.Sas9) {
|
||||
if (this.sas9ApiClient)
|
||||
this.sas9ApiClient!.setConfig(this.sasjsConfig.serverUrl)
|
||||
else this.sas9ApiClient = new SAS9ApiClient(this.sasjsConfig.serverUrl)
|
||||
else
|
||||
this.sas9ApiClient = new SAS9ApiClient(
|
||||
this.sasjsConfig.serverUrl,
|
||||
this.jobsPath,
|
||||
this.sasjsConfig.allowInsecureRequests
|
||||
)
|
||||
}
|
||||
|
||||
this.fileUploader = new FileUploader(
|
||||
this.sasjsConfig.appLoc,
|
||||
this.sasjsConfig.serverUrl,
|
||||
this.sasjsConfig,
|
||||
this.jobsPath,
|
||||
this.requestClient
|
||||
)
|
||||
@@ -800,6 +974,13 @@ export default class SASjs {
|
||||
this.sasViyaApiClient!
|
||||
)
|
||||
|
||||
this.sas9JobExecutor = new Sas9JobExecutor(
|
||||
this.sasjsConfig.serverUrl,
|
||||
this.sasjsConfig.serverType!,
|
||||
this.jobsPath,
|
||||
this.sasjsConfig.allowInsecureRequests
|
||||
)
|
||||
|
||||
this.computeJobExecutor = new ComputeJobExecutor(
|
||||
this.sasjsConfig.serverUrl,
|
||||
this.sasViyaApiClient!
|
||||
@@ -830,6 +1011,16 @@ export default class SASjs {
|
||||
isForced
|
||||
)
|
||||
break
|
||||
case 'file':
|
||||
await this.createFile(
|
||||
member.name,
|
||||
member.code,
|
||||
parentFolder,
|
||||
undefined,
|
||||
accessToken,
|
||||
sasApiClient
|
||||
)
|
||||
break
|
||||
case 'service':
|
||||
await this.createJobDefinition(
|
||||
member.name,
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
import { Session, Context, CsrfToken, SessionVariable } from './types'
|
||||
import { Session, Context, SessionVariable } from './types'
|
||||
import { NoSessionStateError } from './types/errors'
|
||||
import { asyncForEach, isUrl } from './utils'
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import { RequestClient } from './request/RequestClient'
|
||||
|
||||
const MAX_SESSION_COUNT = 1
|
||||
const RETRY_LIMIT: number = 3
|
||||
let RETRY_COUNT: number = 0
|
||||
const INTERNAL_SAS_ERROR = {
|
||||
status: 304,
|
||||
message: 'Not Modified'
|
||||
}
|
||||
|
||||
export class SessionManager {
|
||||
private loggedErrors: NoSessionStateError[] = []
|
||||
|
||||
constructor(
|
||||
private serverUrl: string,
|
||||
private contextName: string,
|
||||
@@ -91,10 +88,7 @@ export class SessionManager {
|
||||
}
|
||||
|
||||
private async createAndWaitForSession(accessToken?: string) {
|
||||
const {
|
||||
result: createdSession,
|
||||
etag
|
||||
} = await this.requestClient
|
||||
const { result: createdSession, etag } = await this.requestClient
|
||||
.post<Session>(
|
||||
`${this.serverUrl}/compute/contexts/${
|
||||
this.currentContext!.id
|
||||
@@ -160,58 +154,75 @@ export class SessionManager {
|
||||
session: Session,
|
||||
etag: string | null,
|
||||
accessToken?: string
|
||||
) {
|
||||
): Promise<string> {
|
||||
const logger = process.logger || console
|
||||
|
||||
let sessionState = session.state
|
||||
|
||||
const stateLink = session.links.find((l: any) => l.rel === 'state')
|
||||
|
||||
return new Promise(async (resolve, _) => {
|
||||
if (
|
||||
sessionState === 'pending' ||
|
||||
sessionState === 'running' ||
|
||||
sessionState === ''
|
||||
) {
|
||||
if (stateLink) {
|
||||
if (this.debug && !this.printedSessionState.printed) {
|
||||
console.log('Polling session status...')
|
||||
if (
|
||||
sessionState === 'pending' ||
|
||||
sessionState === 'running' ||
|
||||
sessionState === ''
|
||||
) {
|
||||
if (stateLink) {
|
||||
if (this.debug && !this.printedSessionState.printed) {
|
||||
logger.info('Polling session status...')
|
||||
|
||||
this.printedSessionState.printed = true
|
||||
}
|
||||
this.printedSessionState.printed = true
|
||||
}
|
||||
|
||||
const state = await this.getSessionState(
|
||||
const { result: state, responseStatus: responseStatus } =
|
||||
await this.getSessionState(
|
||||
`${this.serverUrl}${stateLink.href}?wait=30`,
|
||||
etag!,
|
||||
accessToken
|
||||
).catch((err) => {
|
||||
throw err
|
||||
throw prefixMessage(err, 'Error while getting session state.')
|
||||
})
|
||||
|
||||
sessionState = state.trim()
|
||||
sessionState = state.trim()
|
||||
|
||||
if (this.debug && this.printedSessionState.state !== sessionState) {
|
||||
console.log(`Current session state is '${sessionState}'`)
|
||||
if (this.debug && this.printedSessionState.state !== sessionState) {
|
||||
logger.info(`Current session state is '${sessionState}'`)
|
||||
|
||||
this.printedSessionState.state = sessionState
|
||||
this.printedSessionState.printed = false
|
||||
}
|
||||
|
||||
// There is an internal error present in SAS Viya 3.5
|
||||
// Retry to wait for a session status in such case of SAS internal error
|
||||
if (
|
||||
sessionState === INTERNAL_SAS_ERROR.message &&
|
||||
RETRY_COUNT < RETRY_LIMIT
|
||||
) {
|
||||
RETRY_COUNT++
|
||||
|
||||
resolve(this.waitForSession(session, etag, accessToken))
|
||||
}
|
||||
|
||||
resolve(sessionState)
|
||||
this.printedSessionState.state = sessionState
|
||||
this.printedSessionState.printed = false
|
||||
}
|
||||
|
||||
if (!sessionState) {
|
||||
const stateError = new NoSessionStateError(
|
||||
responseStatus,
|
||||
this.serverUrl + stateLink.href,
|
||||
session.links.find((l: any) => l.rel === 'log')?.href as string
|
||||
)
|
||||
|
||||
if (
|
||||
!this.loggedErrors.find(
|
||||
(err: NoSessionStateError) =>
|
||||
err.serverResponseStatus === stateError.serverResponseStatus
|
||||
)
|
||||
) {
|
||||
this.loggedErrors.push(stateError)
|
||||
|
||||
logger.info(stateError.message)
|
||||
}
|
||||
|
||||
return await this.waitForSession(session, etag, accessToken)
|
||||
}
|
||||
|
||||
this.loggedErrors = []
|
||||
|
||||
return sessionState
|
||||
} else {
|
||||
resolve(sessionState)
|
||||
throw 'Error while getting session state link.'
|
||||
}
|
||||
})
|
||||
} else {
|
||||
this.loggedErrors = []
|
||||
|
||||
return sessionState
|
||||
}
|
||||
}
|
||||
|
||||
private async getSessionState(
|
||||
@@ -221,11 +232,11 @@ export class SessionManager {
|
||||
) {
|
||||
return await this.requestClient
|
||||
.get(url, accessToken, 'text/plain', { 'If-None-Match': etag })
|
||||
.then((res) => res.result as string)
|
||||
.then((res) => ({
|
||||
result: res.result as string,
|
||||
responseStatus: res.status
|
||||
}))
|
||||
.catch((err) => {
|
||||
if (err.status === INTERNAL_SAS_ERROR.status)
|
||||
return INTERNAL_SAS_ERROR.message
|
||||
|
||||
throw err
|
||||
})
|
||||
}
|
||||
|
||||
301
src/api/viya/executeScript.ts
Normal file
301
src/api/viya/executeScript.ts
Normal file
@@ -0,0 +1,301 @@
|
||||
import { timestampToYYYYMMDDHHMMSS } from '@sasjs/utils/time'
|
||||
import { AuthConfig, MacroVar } from '@sasjs/utils/types'
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import {
|
||||
PollOptions,
|
||||
Job,
|
||||
ComputeJobExecutionError,
|
||||
NotFoundError
|
||||
} from '../..'
|
||||
import { getTokens } from '../../auth/getTokens'
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
import { SessionManager } from '../../SessionManager'
|
||||
import { isRelativePath, fetchLogByChunks } from '../../utils'
|
||||
import { formatDataForRequest } from '../../utils/formatDataForRequest'
|
||||
import { pollJobState } from './pollJobState'
|
||||
import { uploadTables } from './uploadTables'
|
||||
|
||||
/**
|
||||
* Executes code on the current SAS Viya server.
|
||||
* @param jobPath - the path to the file being submitted for execution.
|
||||
* @param linesOfCode - an array of code lines to execute.
|
||||
* @param contextName - the context to execute the code in.
|
||||
* @param authConfig - an object containing an access token, refresh token, client ID and secret.
|
||||
* @param data - execution data.
|
||||
* @param debug - when set to true, the log will be returned.
|
||||
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
|
||||
* @param waitForResult - when set to true, function will return the session
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
*/
|
||||
export async function executeScript(
|
||||
requestClient: RequestClient,
|
||||
sessionManager: SessionManager,
|
||||
rootFolderName: string,
|
||||
jobPath: string,
|
||||
linesOfCode: string[],
|
||||
contextName: string,
|
||||
authConfig?: AuthConfig,
|
||||
data: any = null,
|
||||
debug: boolean = false,
|
||||
expectWebout = false,
|
||||
waitForResult = true,
|
||||
pollOptions?: PollOptions,
|
||||
printPid = false,
|
||||
variables?: MacroVar
|
||||
): Promise<any> {
|
||||
let access_token = (authConfig || {}).access_token
|
||||
if (authConfig) {
|
||||
;({ access_token } = await getTokens(requestClient, authConfig))
|
||||
}
|
||||
|
||||
const logger = process.logger || console
|
||||
|
||||
try {
|
||||
let executionSessionId: string
|
||||
|
||||
const session = await sessionManager
|
||||
.getSession(access_token)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while getting session. ')
|
||||
})
|
||||
|
||||
executionSessionId = session!.id
|
||||
|
||||
if (printPid) {
|
||||
const { result: jobIdVariable } = await sessionManager
|
||||
.getVariable(executionSessionId, 'SYSJOBID', access_token)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while getting session variable. ')
|
||||
})
|
||||
|
||||
if (jobIdVariable && jobIdVariable.value) {
|
||||
const relativeJobPath = rootFolderName
|
||||
? jobPath.split(rootFolderName).join('').replace(/^\//, '')
|
||||
: jobPath
|
||||
|
||||
const logger = process.logger || console
|
||||
|
||||
logger.info(
|
||||
`Triggered '${relativeJobPath}' with PID ${
|
||||
jobIdVariable.value
|
||||
} at ${timestampToYYYYMMDDHHMMSS()}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const jobArguments: { [key: string]: any } = {
|
||||
_contextName: contextName,
|
||||
_OMITJSONLISTING: true,
|
||||
_OMITJSONLOG: true,
|
||||
_OMITSESSIONRESULTS: true,
|
||||
_OMITTEXTLISTING: true,
|
||||
_OMITTEXTLOG: true
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
jobArguments['_OMITTEXTLOG'] = false
|
||||
jobArguments['_OMITSESSIONRESULTS'] = false
|
||||
}
|
||||
|
||||
let fileName
|
||||
|
||||
if (isRelativePath(jobPath)) {
|
||||
fileName = `exec-${
|
||||
jobPath.includes('/') ? jobPath.split('/')[1] : jobPath
|
||||
}`
|
||||
} else {
|
||||
const jobPathParts = jobPath.split('/')
|
||||
fileName = jobPathParts.pop()
|
||||
}
|
||||
|
||||
let jobVariables: any = {
|
||||
SYS_JES_JOB_URI: '',
|
||||
_program: isRelativePath(jobPath)
|
||||
? rootFolderName + '/' + jobPath
|
||||
: jobPath
|
||||
}
|
||||
|
||||
if (variables) jobVariables = { ...jobVariables, ...variables }
|
||||
|
||||
if (debug) jobVariables = { ...jobVariables, _DEBUG: 131 }
|
||||
|
||||
let files: any[] = []
|
||||
|
||||
if (data) {
|
||||
if (JSON.stringify(data).includes(';')) {
|
||||
files = await uploadTables(requestClient, data, access_token).catch(
|
||||
(err) => {
|
||||
throw prefixMessage(err, 'Error while uploading tables. ')
|
||||
}
|
||||
)
|
||||
|
||||
jobVariables['_webin_file_count'] = files.length
|
||||
|
||||
files.forEach((fileInfo, index) => {
|
||||
jobVariables[
|
||||
`_webin_fileuri${index + 1}`
|
||||
] = `/files/files/${fileInfo.file.id}`
|
||||
jobVariables[`_webin_name${index + 1}`] = fileInfo.tableName
|
||||
})
|
||||
} else {
|
||||
jobVariables = { ...jobVariables, ...formatDataForRequest(data) }
|
||||
}
|
||||
}
|
||||
|
||||
// Execute job in session
|
||||
const jobRequestBody = {
|
||||
name: fileName,
|
||||
description: 'Powered by SASjs',
|
||||
code: linesOfCode,
|
||||
variables: jobVariables,
|
||||
arguments: jobArguments
|
||||
}
|
||||
|
||||
const { result: postedJob, etag } = await requestClient
|
||||
.post<Job>(
|
||||
`/compute/sessions/${executionSessionId}/jobs`,
|
||||
jobRequestBody,
|
||||
access_token
|
||||
)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while posting job. ')
|
||||
})
|
||||
|
||||
if (!waitForResult) return session
|
||||
|
||||
if (debug) {
|
||||
logger.info(`Job has been submitted for '${fileName}'.`)
|
||||
logger.info(
|
||||
`You can monitor the job progress at '${requestClient.getBaseUrl()}${
|
||||
postedJob.links.find((l: any) => l.rel === 'state')!.href
|
||||
}'.`
|
||||
)
|
||||
}
|
||||
|
||||
const jobStatus = await pollJobState(
|
||||
requestClient,
|
||||
postedJob,
|
||||
debug,
|
||||
authConfig,
|
||||
pollOptions
|
||||
).catch(async (err) => {
|
||||
const error = err?.response?.data
|
||||
const result = /err=[0-9]*,/.exec(error)
|
||||
|
||||
const errorCode = '5113'
|
||||
if (result?.[0]?.slice(4, -1) === errorCode) {
|
||||
const sessionLogUrl =
|
||||
postedJob.links.find((l: any) => l.rel === 'up')!.href + '/log'
|
||||
const logCount = 1000000
|
||||
err.log = await fetchLogByChunks(
|
||||
requestClient,
|
||||
access_token!,
|
||||
sessionLogUrl,
|
||||
logCount
|
||||
)
|
||||
}
|
||||
throw prefixMessage(err, 'Error while polling job status. ')
|
||||
})
|
||||
|
||||
if (authConfig) {
|
||||
;({ access_token } = await getTokens(requestClient, authConfig))
|
||||
}
|
||||
|
||||
const { result: currentJob } = await requestClient
|
||||
.get<Job>(
|
||||
`/compute/sessions/${executionSessionId}/jobs/${postedJob.id}`,
|
||||
access_token
|
||||
)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while getting job. ')
|
||||
})
|
||||
|
||||
let jobResult
|
||||
let log = ''
|
||||
|
||||
const logLink = currentJob.links.find((l) => l.rel === 'log')
|
||||
|
||||
if (debug && logLink) {
|
||||
const logUrl = `${logLink.href}/content`
|
||||
const logCount = currentJob.logStatistics?.lineCount ?? 1000000
|
||||
log = await fetchLogByChunks(
|
||||
requestClient,
|
||||
access_token!,
|
||||
logUrl,
|
||||
logCount
|
||||
)
|
||||
}
|
||||
|
||||
if (jobStatus === 'failed' || jobStatus === 'error') {
|
||||
throw new ComputeJobExecutionError(currentJob, log)
|
||||
}
|
||||
|
||||
if (!expectWebout) {
|
||||
return { job: currentJob, log }
|
||||
}
|
||||
|
||||
const resultLink = `/compute/sessions/${executionSessionId}/filerefs/_webout/content`
|
||||
|
||||
jobResult = await requestClient
|
||||
.get<any>(
|
||||
resultLink,
|
||||
access_token,
|
||||
'text/plain',
|
||||
{},
|
||||
debug,
|
||||
true,
|
||||
jobPath
|
||||
)
|
||||
.catch(async (e) => {
|
||||
if (e instanceof NotFoundError) {
|
||||
if (logLink) {
|
||||
const logUrl = `${logLink.href}/content`
|
||||
const logCount = currentJob.logStatistics?.lineCount ?? 1000000
|
||||
log = await fetchLogByChunks(
|
||||
requestClient,
|
||||
access_token!,
|
||||
logUrl,
|
||||
logCount
|
||||
)
|
||||
|
||||
return Promise.reject({
|
||||
status: 500,
|
||||
log
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
result: JSON.stringify(e)
|
||||
}
|
||||
})
|
||||
|
||||
await sessionManager
|
||||
.clearSession(executionSessionId, access_token)
|
||||
.catch((err) => {
|
||||
throw prefixMessage(err, 'Error while clearing session. ')
|
||||
})
|
||||
|
||||
return { result: jobResult?.result, log }
|
||||
} catch (e) {
|
||||
if (e && e.status === 404) {
|
||||
return executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
rootFolderName,
|
||||
jobPath,
|
||||
linesOfCode,
|
||||
contextName,
|
||||
authConfig,
|
||||
data,
|
||||
debug,
|
||||
false,
|
||||
true
|
||||
)
|
||||
} else {
|
||||
throw prefixMessage(e, 'Error while executing script. ')
|
||||
}
|
||||
}
|
||||
}
|
||||
17
src/api/viya/getFileStream.ts
Normal file
17
src/api/viya/getFileStream.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { isFolder } from '@sasjs/utils/file'
|
||||
import { generateTimestamp } from '@sasjs/utils/time'
|
||||
import { Job } from '../../types'
|
||||
|
||||
export const getFileStream = async (job: Job, filePath?: string) => {
|
||||
const { createWriteStream } = require('@sasjs/utils/file')
|
||||
const logPath = filePath || process.cwd()
|
||||
const isFolderPath = await isFolder(logPath)
|
||||
if (isFolderPath) {
|
||||
const logFileName = `${job.name || 'job'}-${generateTimestamp()}.log`
|
||||
const path = require('path')
|
||||
const logFilePath = path.join(filePath || process.cwd(), logFileName)
|
||||
return await createWriteStream(logFilePath)
|
||||
} else {
|
||||
return await createWriteStream(logPath)
|
||||
}
|
||||
}
|
||||
250
src/api/viya/pollJobState.ts
Normal file
250
src/api/viya/pollJobState.ts
Normal file
@@ -0,0 +1,250 @@
|
||||
import { AuthConfig } from '@sasjs/utils/types'
|
||||
import { Job, PollOptions } from '../..'
|
||||
import { getTokens } from '../../auth/getTokens'
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
import { JobStatePollError } from '../../types/errors'
|
||||
import { Link, WriteStream } from '../../types'
|
||||
import { isNode } from '../../utils'
|
||||
|
||||
export async function pollJobState(
|
||||
requestClient: RequestClient,
|
||||
postedJob: Job,
|
||||
debug: boolean,
|
||||
authConfig?: AuthConfig,
|
||||
pollOptions?: PollOptions
|
||||
) {
|
||||
const logger = process.logger || console
|
||||
|
||||
let pollInterval = 300
|
||||
let maxPollCount = 1000
|
||||
|
||||
const defaultPollOptions: PollOptions = {
|
||||
maxPollCount,
|
||||
pollInterval,
|
||||
streamLog: false
|
||||
}
|
||||
|
||||
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
|
||||
|
||||
const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
|
||||
if (!stateLink) {
|
||||
throw new Error(`Job state link was not found.`)
|
||||
}
|
||||
|
||||
let currentState = await getJobState(
|
||||
requestClient,
|
||||
postedJob,
|
||||
'',
|
||||
debug,
|
||||
authConfig
|
||||
).catch((err) => {
|
||||
logger.error(
|
||||
`Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`,
|
||||
err
|
||||
)
|
||||
return 'unavailable'
|
||||
})
|
||||
|
||||
let pollCount = 0
|
||||
|
||||
if (currentState === 'completed') {
|
||||
return Promise.resolve(currentState)
|
||||
}
|
||||
|
||||
let logFileStream
|
||||
if (pollOptions.streamLog && isNode()) {
|
||||
const { getFileStream } = require('./getFileStream')
|
||||
logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
|
||||
}
|
||||
|
||||
// Poll up to the first 100 times with the specified poll interval
|
||||
let result = await doPoll(
|
||||
requestClient,
|
||||
postedJob,
|
||||
currentState,
|
||||
debug,
|
||||
pollCount,
|
||||
authConfig,
|
||||
{
|
||||
...pollOptions,
|
||||
maxPollCount:
|
||||
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
|
||||
},
|
||||
logFileStream
|
||||
)
|
||||
|
||||
currentState = result.state
|
||||
pollCount = result.pollCount
|
||||
|
||||
if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) {
|
||||
return currentState
|
||||
}
|
||||
|
||||
// If we get to this point, this is a long-running job that needs longer polling.
|
||||
// We will resume polling with a bigger interval of 1 minute
|
||||
let longJobPollOptions: PollOptions = {
|
||||
maxPollCount: 24 * 60,
|
||||
pollInterval: 60000,
|
||||
streamLog: false
|
||||
}
|
||||
if (pollOptions) {
|
||||
longJobPollOptions.streamLog = pollOptions.streamLog
|
||||
longJobPollOptions.logFolderPath = pollOptions.logFolderPath
|
||||
}
|
||||
|
||||
result = await doPoll(
|
||||
requestClient,
|
||||
postedJob,
|
||||
currentState,
|
||||
debug,
|
||||
pollCount,
|
||||
authConfig,
|
||||
longJobPollOptions,
|
||||
logFileStream
|
||||
)
|
||||
|
||||
currentState = result.state
|
||||
pollCount = result.pollCount
|
||||
|
||||
if (logFileStream) {
|
||||
logFileStream.end()
|
||||
}
|
||||
|
||||
return currentState
|
||||
}
|
||||
|
||||
const getJobState = async (
|
||||
requestClient: RequestClient,
|
||||
job: Job,
|
||||
currentState: string,
|
||||
debug: boolean,
|
||||
authConfig?: AuthConfig
|
||||
) => {
|
||||
const stateLink = job.links.find((l: any) => l.rel === 'state')
|
||||
if (!stateLink) {
|
||||
throw new Error(`Job state link was not found.`)
|
||||
}
|
||||
|
||||
if (needsRetry(currentState)) {
|
||||
let tokens
|
||||
if (authConfig) {
|
||||
tokens = await getTokens(requestClient, authConfig)
|
||||
}
|
||||
|
||||
const { result: jobState } = await requestClient
|
||||
.get<string>(
|
||||
`${stateLink.href}?_action=wait&wait=300`,
|
||||
tokens?.access_token,
|
||||
'text/plain',
|
||||
{},
|
||||
debug
|
||||
)
|
||||
.catch((err) => {
|
||||
throw new JobStatePollError(job.id, err)
|
||||
})
|
||||
|
||||
return jobState.trim()
|
||||
} else {
|
||||
return currentState
|
||||
}
|
||||
}
|
||||
|
||||
const needsRetry = (state: string) =>
|
||||
state === 'running' ||
|
||||
state === '' ||
|
||||
state === 'pending' ||
|
||||
state === 'unavailable'
|
||||
|
||||
const doPoll = async (
|
||||
requestClient: RequestClient,
|
||||
postedJob: Job,
|
||||
currentState: string,
|
||||
debug: boolean,
|
||||
pollCount: number,
|
||||
authConfig?: AuthConfig,
|
||||
pollOptions?: PollOptions,
|
||||
logStream?: WriteStream
|
||||
): Promise<{ state: string; pollCount: number }> => {
|
||||
let pollInterval = 300
|
||||
let maxPollCount = 1000
|
||||
let maxErrorCount = 5
|
||||
let errorCount = 0
|
||||
let state = currentState
|
||||
let printedState = ''
|
||||
let startLogLine = 0
|
||||
|
||||
const logger = process.logger || console
|
||||
|
||||
if (pollOptions) {
|
||||
pollInterval = pollOptions.pollInterval || pollInterval
|
||||
maxPollCount = pollOptions.maxPollCount || maxPollCount
|
||||
}
|
||||
|
||||
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')
|
||||
if (!stateLink) {
|
||||
throw new Error(`Job state link was not found.`)
|
||||
}
|
||||
|
||||
while (needsRetry(state) && pollCount <= maxPollCount) {
|
||||
state = await getJobState(
|
||||
requestClient,
|
||||
postedJob,
|
||||
state,
|
||||
debug,
|
||||
authConfig
|
||||
).catch((err) => {
|
||||
errorCount++
|
||||
if (pollCount >= maxPollCount || errorCount >= maxErrorCount) {
|
||||
throw err
|
||||
}
|
||||
logger.error(
|
||||
`Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`,
|
||||
err
|
||||
)
|
||||
return 'unavailable'
|
||||
})
|
||||
|
||||
pollCount++
|
||||
|
||||
if (pollOptions?.streamLog) {
|
||||
const jobUrl = postedJob.links.find((l: Link) => l.rel === 'self')
|
||||
const { result: job } = await requestClient.get<Job>(
|
||||
jobUrl!.href,
|
||||
authConfig?.access_token
|
||||
)
|
||||
|
||||
const endLogLine = job.logStatistics?.lineCount ?? 1000000
|
||||
|
||||
const { saveLog } = isNode() ? require('./saveLog') : { saveLog: null }
|
||||
if (saveLog) {
|
||||
await saveLog(
|
||||
postedJob,
|
||||
requestClient,
|
||||
startLogLine,
|
||||
endLogLine,
|
||||
logStream,
|
||||
authConfig?.access_token
|
||||
)
|
||||
}
|
||||
|
||||
startLogLine += endLogLine
|
||||
}
|
||||
|
||||
if (debug && printedState !== state) {
|
||||
logger.info('Polling job status...')
|
||||
logger.info(`Current job state: ${state}`)
|
||||
|
||||
printedState = state
|
||||
}
|
||||
|
||||
if (state != 'unavailable' && errorCount > 0) {
|
||||
errorCount = 0
|
||||
}
|
||||
|
||||
await delay(pollInterval)
|
||||
}
|
||||
|
||||
return { state, pollCount }
|
||||
}
|
||||
|
||||
const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms))
|
||||
55
src/api/viya/saveLog.ts
Normal file
55
src/api/viya/saveLog.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { Job } from '../..'
|
||||
import { RequestClient } from '../../request/RequestClient'
|
||||
import { fetchLog } from '../../utils'
|
||||
import { WriteStream } from '../../types'
|
||||
import { writeStream } from './writeStream'
|
||||
|
||||
/**
|
||||
* Appends logs to a supplied write stream.
|
||||
* This is useful for getting quick feedback on longer running jobs.
|
||||
* @param job - the job to fetch logs for
|
||||
* @param requestClient - the pre-configured HTTP request client
|
||||
* @param startLine - the line at which to start fetching the log
|
||||
* @param endLine - the line at which to stop fetching the log
|
||||
* @param logFileStream - the write stream to which the log is appended
|
||||
* @accessToken - an optional access token for authentication/authorization
|
||||
* The access token is not required when fetching logs from the browser.
|
||||
*/
|
||||
export async function saveLog(
|
||||
job: Job,
|
||||
requestClient: RequestClient,
|
||||
startLine: number,
|
||||
endLine: number,
|
||||
logFileStream?: WriteStream,
|
||||
accessToken?: string
|
||||
) {
|
||||
if (!accessToken) {
|
||||
throw new Error(
|
||||
`Logs for job ${job.id} cannot be fetched without a valid access token.`
|
||||
)
|
||||
}
|
||||
|
||||
if (!logFileStream) {
|
||||
throw new Error(
|
||||
`Logs for job ${job.id} cannot be written without a valid write stream.`
|
||||
)
|
||||
}
|
||||
|
||||
const logger = process.logger || console
|
||||
const jobLogUrl = job.links.find((l) => l.rel === 'log')
|
||||
|
||||
if (!jobLogUrl) {
|
||||
throw new Error(`Log URL for job ${job.id} was not found.`)
|
||||
}
|
||||
|
||||
const log = await fetchLog(
|
||||
requestClient,
|
||||
accessToken,
|
||||
`${jobLogUrl.href}/content`,
|
||||
startLine,
|
||||
endLine
|
||||
)
|
||||
|
||||
logger.info(`Writing logs to ${logFileStream.path}`)
|
||||
await writeStream(logFileStream, log || '')
|
||||
}
|
||||
675
src/api/viya/spec/executeScript.spec.ts
Normal file
675
src/api/viya/spec/executeScript.spec.ts
Normal file
@@ -0,0 +1,675 @@
|
||||
import { RequestClient } from '../../../request/RequestClient'
|
||||
import { SessionManager } from '../../../SessionManager'
|
||||
import { executeScript } from '../executeScript'
|
||||
import { mockSession, mockAuthConfig, mockJob } from './mockResponses'
|
||||
import * as pollJobStateModule from '../pollJobState'
|
||||
import * as uploadTablesModule from '../uploadTables'
|
||||
import * as getTokensModule from '../../../auth/getTokens'
|
||||
import * as formatDataModule from '../../../utils/formatDataForRequest'
|
||||
import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
|
||||
import { PollOptions } from '../../../types'
|
||||
import { ComputeJobExecutionError, NotFoundError } from '../../../types/errors'
|
||||
import { Logger, LogLevel } from '@sasjs/utils'
|
||||
|
||||
const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)()
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
const defaultPollOptions: PollOptions = {
|
||||
maxPollCount: 100,
|
||||
pollInterval: 500,
|
||||
streamLog: false
|
||||
}
|
||||
|
||||
describe('executeScript', () => {
|
||||
beforeEach(() => {
|
||||
;(process as any).logger = new Logger(LogLevel.Off)
|
||||
setupMocks()
|
||||
})
|
||||
|
||||
it('should not try to get fresh tokens if an authConfig is not provided', async () => {
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put hello'],
|
||||
'test context'
|
||||
)
|
||||
|
||||
expect(getTokensModule.getTokens).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should try to get fresh tokens if an authConfig is provided', async () => {
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put hello'],
|
||||
'test context',
|
||||
mockAuthConfig
|
||||
)
|
||||
|
||||
expect(getTokensModule.getTokens).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
mockAuthConfig
|
||||
)
|
||||
})
|
||||
|
||||
it('should get a session from the session manager before executing', async () => {
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put hello'],
|
||||
'test context'
|
||||
)
|
||||
|
||||
expect(sessionManager.getSession).toHaveBeenCalledWith(undefined)
|
||||
})
|
||||
|
||||
it('should handle errors while getting a session', async () => {
|
||||
jest
|
||||
.spyOn(sessionManager, 'getSession')
|
||||
.mockImplementation(() => Promise.reject('Test Error'))
|
||||
|
||||
const error = await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put hello'],
|
||||
'test context'
|
||||
).catch((e) => e)
|
||||
|
||||
expect(error).toContain('Error while getting session.')
|
||||
})
|
||||
|
||||
it('should fetch the PID when printPid is true', async () => {
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put hello'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
null,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
defaultPollOptions,
|
||||
true
|
||||
)
|
||||
|
||||
expect(sessionManager.getVariable).toHaveBeenCalledWith(
|
||||
mockSession.id,
|
||||
'SYSJOBID',
|
||||
mockAuthConfig.access_token
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle errors while getting the job PID', async () => {
|
||||
jest
|
||||
.spyOn(sessionManager, 'getVariable')
|
||||
.mockImplementation(() => Promise.reject('Test Error'))
|
||||
|
||||
const error = await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put hello'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
null,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
defaultPollOptions,
|
||||
true
|
||||
).catch((e) => e)
|
||||
|
||||
expect(error).toContain('Error while getting session variable.')
|
||||
})
|
||||
|
||||
it('should use the file upload approach when data contains semicolons', async () => {
|
||||
jest
|
||||
.spyOn(uploadTablesModule, 'uploadTables')
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve([{ tableName: 'test', file: { id: 1 } }])
|
||||
)
|
||||
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put hello'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar;' },
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
defaultPollOptions,
|
||||
true
|
||||
)
|
||||
|
||||
expect(uploadTablesModule.uploadTables).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
{ foo: 'bar;' },
|
||||
mockAuthConfig.access_token
|
||||
)
|
||||
})
|
||||
|
||||
it('should format data as CSV when it does not contain semicolons', async () => {
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put hello'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
defaultPollOptions,
|
||||
true
|
||||
)
|
||||
|
||||
expect(formatDataModule.formatDataForRequest).toHaveBeenCalledWith({
|
||||
foo: 'bar'
|
||||
})
|
||||
})
|
||||
|
||||
it('should submit a job for execution via the compute API', async () => {
|
||||
jest
|
||||
.spyOn(formatDataModule, 'formatDataForRequest')
|
||||
.mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' }))
|
||||
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
defaultPollOptions,
|
||||
true
|
||||
)
|
||||
|
||||
expect(requestClient.post).toHaveBeenCalledWith(
|
||||
`/compute/sessions/${mockSession.id}/jobs`,
|
||||
{
|
||||
name: 'exec-test',
|
||||
description: 'Powered by SASjs',
|
||||
code: ['%put "hello";'],
|
||||
variables: {
|
||||
SYS_JES_JOB_URI: '',
|
||||
_program: 'test/test',
|
||||
sasjs_tables: 'foo',
|
||||
sasjs0data: 'bar'
|
||||
},
|
||||
arguments: {
|
||||
_contextName: 'test context',
|
||||
_OMITJSONLISTING: true,
|
||||
_OMITJSONLOG: true,
|
||||
_OMITSESSIONRESULTS: true,
|
||||
_OMITTEXTLISTING: true,
|
||||
_OMITTEXTLOG: true
|
||||
}
|
||||
},
|
||||
mockAuthConfig.access_token
|
||||
)
|
||||
})
|
||||
|
||||
it('should set the correct variables when debug is true', async () => {
|
||||
jest
|
||||
.spyOn(formatDataModule, 'formatDataForRequest')
|
||||
.mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' }))
|
||||
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
defaultPollOptions,
|
||||
true
|
||||
)
|
||||
|
||||
expect(requestClient.post).toHaveBeenCalledWith(
|
||||
`/compute/sessions/${mockSession.id}/jobs`,
|
||||
{
|
||||
name: 'exec-test',
|
||||
description: 'Powered by SASjs',
|
||||
code: ['%put "hello";'],
|
||||
variables: {
|
||||
SYS_JES_JOB_URI: '',
|
||||
_program: 'test/test',
|
||||
sasjs_tables: 'foo',
|
||||
sasjs0data: 'bar',
|
||||
_DEBUG: 131
|
||||
},
|
||||
arguments: {
|
||||
_contextName: 'test context',
|
||||
_OMITJSONLISTING: true,
|
||||
_OMITJSONLOG: true,
|
||||
_OMITSESSIONRESULTS: false,
|
||||
_OMITTEXTLISTING: true,
|
||||
_OMITTEXTLOG: false
|
||||
}
|
||||
},
|
||||
mockAuthConfig.access_token
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle errors during job submission', async () => {
|
||||
jest
|
||||
.spyOn(requestClient, 'post')
|
||||
.mockImplementation(() => Promise.reject('Test Error'))
|
||||
|
||||
const error = await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
defaultPollOptions,
|
||||
true
|
||||
).catch((e) => e)
|
||||
|
||||
expect(error).toContain('Error while posting job')
|
||||
})
|
||||
|
||||
it('should immediately return the session when waitForResult is false', async () => {
|
||||
const result = await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
defaultPollOptions,
|
||||
true
|
||||
)
|
||||
|
||||
expect(result).toEqual(mockSession)
|
||||
})
|
||||
|
||||
it('should poll for job completion when waitForResult is true', async () => {
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
defaultPollOptions,
|
||||
true
|
||||
)
|
||||
|
||||
expect(pollJobStateModule.pollJobState).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
defaultPollOptions
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle general errors when polling for job status', async () => {
|
||||
jest
|
||||
.spyOn(pollJobStateModule, 'pollJobState')
|
||||
.mockImplementation(() => Promise.reject('Poll Error'))
|
||||
|
||||
const error = await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
defaultPollOptions,
|
||||
true
|
||||
).catch((e) => e)
|
||||
|
||||
expect(error).toContain('Error while polling job status.')
|
||||
})
|
||||
|
||||
it('should fetch the log and append it to the error in case of a 5113 error code', async () => {
|
||||
jest
|
||||
.spyOn(pollJobStateModule, 'pollJobState')
|
||||
.mockImplementation(() =>
|
||||
Promise.reject({ response: { data: 'err=5113,' } })
|
||||
)
|
||||
|
||||
const error = await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
defaultPollOptions,
|
||||
true
|
||||
).catch((e) => e)
|
||||
|
||||
expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
mockAuthConfig.access_token,
|
||||
mockJob.links.find((l) => l.rel === 'up')!.href + '/log',
|
||||
1000000
|
||||
)
|
||||
expect(error.log).toEqual('Test Log')
|
||||
})
|
||||
|
||||
it('should fetch the logs for the job if debug is true and a log URL is available', async () => {
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
defaultPollOptions,
|
||||
true
|
||||
)
|
||||
|
||||
expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
mockAuthConfig.access_token,
|
||||
mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
|
||||
mockJob.logStatistics.lineCount
|
||||
)
|
||||
})
|
||||
|
||||
it('should not fetch the logs for the job if debug is false', async () => {
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
defaultPollOptions,
|
||||
true
|
||||
)
|
||||
|
||||
expect(fetchLogsModule.fetchLogByChunks).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should throw a ComputeJobExecutionError if the job has failed', async () => {
|
||||
jest
|
||||
.spyOn(pollJobStateModule, 'pollJobState')
|
||||
.mockImplementation(() => Promise.resolve('failed'))
|
||||
|
||||
const error: ComputeJobExecutionError = await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
defaultPollOptions,
|
||||
true
|
||||
).catch((e) => e)
|
||||
|
||||
expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
mockAuthConfig.access_token,
|
||||
mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
|
||||
mockJob.logStatistics.lineCount
|
||||
)
|
||||
|
||||
expect(error).toBeInstanceOf(ComputeJobExecutionError)
|
||||
expect(error.log).toEqual('Test Log')
|
||||
expect(error.job).toEqual(mockJob)
|
||||
})
|
||||
|
||||
it('should throw a ComputeJobExecutionError if the job has errored out', async () => {
|
||||
jest
|
||||
.spyOn(pollJobStateModule, 'pollJobState')
|
||||
.mockImplementation(() => Promise.resolve('error'))
|
||||
|
||||
const error: ComputeJobExecutionError = await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
defaultPollOptions,
|
||||
true
|
||||
).catch((e) => e)
|
||||
|
||||
expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
mockAuthConfig.access_token,
|
||||
mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
|
||||
mockJob.logStatistics.lineCount
|
||||
)
|
||||
|
||||
expect(error).toBeInstanceOf(ComputeJobExecutionError)
|
||||
expect(error.log).toEqual('Test Log')
|
||||
expect(error.job).toEqual(mockJob)
|
||||
})
|
||||
|
||||
it('should fetch the result if expectWebout is true', async () => {
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
defaultPollOptions,
|
||||
true
|
||||
)
|
||||
|
||||
expect(requestClient.get).toHaveBeenCalledWith(
|
||||
`/compute/sessions/${mockSession.id}/filerefs/_webout/content`,
|
||||
mockAuthConfig.access_token,
|
||||
'text/plain'
|
||||
)
|
||||
})
|
||||
|
||||
it('should fetch the logs if the webout file was not found', async () => {
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url, ...rest) => {
|
||||
if (url.includes('_webout')) {
|
||||
return Promise.reject(new NotFoundError(url))
|
||||
}
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
})
|
||||
|
||||
const error = await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
defaultPollOptions,
|
||||
true
|
||||
).catch((e) => e)
|
||||
|
||||
expect(requestClient.get).toHaveBeenCalledWith(
|
||||
`/compute/sessions/${mockSession.id}/filerefs/_webout/content`,
|
||||
mockAuthConfig.access_token,
|
||||
'text/plain'
|
||||
)
|
||||
|
||||
expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
mockAuthConfig.access_token,
|
||||
mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
|
||||
mockJob.logStatistics.lineCount
|
||||
)
|
||||
|
||||
expect(error.status).toEqual(500)
|
||||
expect(error.log).toEqual('Test Log')
|
||||
})
|
||||
|
||||
it('should clear the session after execution is complete', async () => {
|
||||
await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
defaultPollOptions,
|
||||
true
|
||||
)
|
||||
|
||||
expect(sessionManager.clearSession).toHaveBeenCalledWith(
|
||||
mockSession.id,
|
||||
mockAuthConfig.access_token
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle errors while clearing a session', async () => {
|
||||
jest
|
||||
.spyOn(sessionManager, 'clearSession')
|
||||
.mockImplementation(() => Promise.reject('Clear Session Error'))
|
||||
|
||||
const error = await executeScript(
|
||||
requestClient,
|
||||
sessionManager,
|
||||
'test',
|
||||
'test',
|
||||
['%put "hello";'],
|
||||
'test context',
|
||||
mockAuthConfig,
|
||||
{ foo: 'bar' },
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
defaultPollOptions,
|
||||
true
|
||||
).catch((e) => e)
|
||||
|
||||
expect(error).toContain('Error while clearing session.')
|
||||
})
|
||||
})
|
||||
|
||||
const setupMocks = () => {
|
||||
jest.restoreAllMocks()
|
||||
jest.mock('../../../request/RequestClient')
|
||||
jest.mock('../../../SessionManager')
|
||||
jest.mock('../../../auth/getTokens')
|
||||
jest.mock('../pollJobState')
|
||||
jest.mock('../uploadTables')
|
||||
jest.mock('../../../utils/formatDataForRequest')
|
||||
jest.mock('../../../utils/fetchLogByChunks')
|
||||
|
||||
jest
|
||||
.spyOn(requestClient, 'post')
|
||||
.mockImplementation(() => Promise.resolve({ result: mockJob, etag: '' }))
|
||||
jest
|
||||
.spyOn(requestClient, 'get')
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
)
|
||||
jest
|
||||
.spyOn(requestClient, 'delete')
|
||||
.mockImplementation(() => Promise.resolve({ result: {}, etag: '' }))
|
||||
jest
|
||||
.spyOn(getTokensModule, 'getTokens')
|
||||
.mockImplementation(() => Promise.resolve(mockAuthConfig))
|
||||
jest
|
||||
.spyOn(pollJobStateModule, 'pollJobState')
|
||||
.mockImplementation(() => Promise.resolve('completed'))
|
||||
jest
|
||||
.spyOn(sessionManager, 'getVariable')
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve({ result: { value: 'test' }, etag: 'test', status: 200 })
|
||||
)
|
||||
jest
|
||||
.spyOn(sessionManager, 'getSession')
|
||||
.mockImplementation(() => Promise.resolve(mockSession))
|
||||
jest
|
||||
.spyOn(sessionManager, 'clearSession')
|
||||
.mockImplementation(() => Promise.resolve())
|
||||
jest
|
||||
.spyOn(formatDataModule, 'formatDataForRequest')
|
||||
.mockImplementation(() => ({ sasjs_tables: 'test', sasjs0data: 'test' }))
|
||||
jest
|
||||
.spyOn(fetchLogsModule, 'fetchLogByChunks')
|
||||
.mockImplementation(() => Promise.resolve('Test Log'))
|
||||
}
|
||||
41
src/api/viya/spec/getFileStream.spec.ts
Normal file
41
src/api/viya/spec/getFileStream.spec.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { Logger, LogLevel } from '@sasjs/utils/logger'
|
||||
import * as path from 'path'
|
||||
import * as fileModule from '@sasjs/utils/file'
|
||||
import { getFileStream } from '../getFileStream'
|
||||
import { mockJob } from './mockResponses'
|
||||
import { WriteStream } from '../../../types'
|
||||
|
||||
describe('getFileStream', () => {
|
||||
beforeEach(() => {
|
||||
;(process as any).logger = new Logger(LogLevel.Off)
|
||||
setupMocks()
|
||||
})
|
||||
it('should use the given log path if it points to a file', async () => {
|
||||
const { createWriteStream } = require('@sasjs/utils/file')
|
||||
|
||||
await getFileStream(mockJob, path.join(__dirname, 'test.log'))
|
||||
|
||||
expect(createWriteStream).toHaveBeenCalledWith(
|
||||
path.join(__dirname, 'test.log')
|
||||
)
|
||||
})
|
||||
|
||||
it('should generate a log file path with a timestamp if it points to a folder', async () => {
|
||||
const { createWriteStream } = require('@sasjs/utils/file')
|
||||
|
||||
await getFileStream(mockJob, __dirname)
|
||||
|
||||
expect(createWriteStream).not.toHaveBeenCalledWith(__dirname)
|
||||
expect(createWriteStream).toHaveBeenCalledWith(
|
||||
expect.stringContaining(path.join(__dirname, 'test job-20'))
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
const setupMocks = () => {
|
||||
jest.restoreAllMocks()
|
||||
jest.mock('@sasjs/utils/file/file')
|
||||
jest
|
||||
.spyOn(fileModule, 'createWriteStream')
|
||||
.mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
|
||||
}
|
||||
73
src/api/viya/spec/mockResponses.ts
Normal file
73
src/api/viya/spec/mockResponses.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { AuthConfig } from '@sasjs/utils/types'
|
||||
import { Job, Session } from '../../../types'
|
||||
|
||||
export const mockSession: Session = {
|
||||
id: 's35510n',
|
||||
state: 'idle',
|
||||
links: [],
|
||||
attributes: {
|
||||
sessionInactiveTimeout: 1
|
||||
},
|
||||
creationTimeStamp: new Date().valueOf().toString()
|
||||
}
|
||||
|
||||
export const mockJob: Job = {
|
||||
id: 'j0b',
|
||||
name: 'test job',
|
||||
uri: '/j0b',
|
||||
createdBy: 'test user',
|
||||
results: {
|
||||
'_webout.json': 'test'
|
||||
},
|
||||
logStatistics: {
|
||||
lineCount: 100,
|
||||
modifiedTimeStamp: new Date().valueOf().toString()
|
||||
},
|
||||
links: [
|
||||
{
|
||||
rel: 'log',
|
||||
href: '/log',
|
||||
method: 'GET',
|
||||
type: 'log',
|
||||
uri: 'log'
|
||||
},
|
||||
{
|
||||
rel: 'self',
|
||||
href: '/job',
|
||||
method: 'GET',
|
||||
type: 'job',
|
||||
uri: 'job'
|
||||
},
|
||||
{
|
||||
rel: 'state',
|
||||
href: '/state',
|
||||
method: 'GET',
|
||||
type: 'state',
|
||||
uri: 'state'
|
||||
},
|
||||
{
|
||||
rel: 'up',
|
||||
href: '/job',
|
||||
method: 'GET',
|
||||
type: 'up',
|
||||
uri: 'job'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
export const mockAuthConfig: AuthConfig = {
|
||||
client: 'cl13nt',
|
||||
secret: '53cr3t',
|
||||
access_token: 'acc355',
|
||||
refresh_token: 'r3fr35h'
|
||||
}
|
||||
|
||||
export class MockStream {
|
||||
_write(chunk: string, _: any, next: Function) {
|
||||
next()
|
||||
}
|
||||
|
||||
reset() {}
|
||||
|
||||
destroy() {}
|
||||
}
|
||||
346
src/api/viya/spec/pollJobState.spec.ts
Normal file
346
src/api/viya/spec/pollJobState.spec.ts
Normal file
@@ -0,0 +1,346 @@
|
||||
import { Logger, LogLevel } from '@sasjs/utils'
|
||||
import { RequestClient } from '../../../request/RequestClient'
|
||||
import { mockAuthConfig, mockJob } from './mockResponses'
|
||||
import { pollJobState } from '../pollJobState'
|
||||
import * as getTokensModule from '../../../auth/getTokens'
|
||||
import * as saveLogModule from '../saveLog'
|
||||
import * as getFileStreamModule from '../getFileStream'
|
||||
import * as isNodeModule from '../../../utils/isNode'
|
||||
import { PollOptions } from '../../../types'
|
||||
import { WriteStream } from 'fs'
|
||||
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
const defaultPollOptions: PollOptions = {
|
||||
maxPollCount: 100,
|
||||
pollInterval: 500,
|
||||
streamLog: false
|
||||
}
|
||||
|
||||
describe('pollJobState', () => {
|
||||
beforeEach(() => {
|
||||
;(process as any).logger = new Logger(LogLevel.Off)
|
||||
setupMocks()
|
||||
})
|
||||
|
||||
it('should get valid tokens if the authConfig has been provided', async () => {
|
||||
await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
defaultPollOptions
|
||||
)
|
||||
|
||||
expect(getTokensModule.getTokens).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
mockAuthConfig
|
||||
)
|
||||
})
|
||||
|
||||
it('should not attempt to get tokens if the authConfig has not been provided', async () => {
|
||||
await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
)
|
||||
|
||||
expect(getTokensModule.getTokens).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should throw an error if the job does not have a state link', async () => {
|
||||
const error = await pollJobState(
|
||||
requestClient,
|
||||
{ ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') },
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
).catch((e) => e)
|
||||
|
||||
expect((error as Error).message).toContain('Job state link was not found.')
|
||||
})
|
||||
|
||||
it('should attempt to refresh tokens before each poll', async () => {
|
||||
mockSimplePoll()
|
||||
|
||||
await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
defaultPollOptions
|
||||
)
|
||||
|
||||
expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3)
|
||||
})
|
||||
|
||||
it('should attempt to fetch and save the log after each poll when streamLog is true', async () => {
|
||||
mockSimplePoll()
|
||||
const { saveLog } = require('../saveLog')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
expect(saveLog).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should create a write stream in Node.js environment when streamLog is true', async () => {
|
||||
mockSimplePoll()
|
||||
const { getFileStream } = require('../getFileStream')
|
||||
const { saveLog } = require('../saveLog')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
expect(getFileStream).toHaveBeenCalled()
|
||||
expect(saveLog).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('should not create a write stream in a non-Node.js environment', async () => {
|
||||
mockSimplePoll()
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
|
||||
const { saveLog } = require('../saveLog')
|
||||
const { getFileStream } = require('../getFileStream')
|
||||
|
||||
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
|
||||
...defaultPollOptions,
|
||||
streamLog: true
|
||||
})
|
||||
|
||||
expect(getFileStream).not.toHaveBeenCalled()
|
||||
expect(saveLog).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not attempt to fetch and save the log after each poll when streamLog is false', async () => {
|
||||
mockSimplePoll()
|
||||
|
||||
await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
defaultPollOptions
|
||||
)
|
||||
|
||||
expect(saveLogModule.saveLog).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return the current status when the max poll count is reached', async () => {
|
||||
mockRunningPoll()
|
||||
|
||||
const state = await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
{
|
||||
...defaultPollOptions,
|
||||
maxPollCount: 1
|
||||
}
|
||||
)
|
||||
|
||||
expect(state).toEqual('running')
|
||||
})
|
||||
|
||||
it('should poll with a larger interval for longer running jobs', async () => {
|
||||
mockLongPoll()
|
||||
|
||||
const state = await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
mockAuthConfig,
|
||||
{
|
||||
...defaultPollOptions,
|
||||
maxPollCount: 200,
|
||||
pollInterval: 10
|
||||
}
|
||||
)
|
||||
|
||||
expect(state).toEqual('completed')
|
||||
}, 200000)
|
||||
|
||||
it('should continue polling until the job completes or errors', async () => {
|
||||
mockSimplePoll(1)
|
||||
|
||||
const state = await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
)
|
||||
|
||||
expect(requestClient.get).toHaveBeenCalledTimes(2)
|
||||
expect(state).toEqual('completed')
|
||||
})
|
||||
|
||||
it('should print the state to the console when debug is on', async () => {
|
||||
jest.spyOn((process as any).logger, 'info')
|
||||
mockSimplePoll()
|
||||
|
||||
await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
true,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
)
|
||||
|
||||
expect((process as any).logger.info).toHaveBeenCalledTimes(4)
|
||||
expect((process as any).logger.info).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
'Polling job status...'
|
||||
)
|
||||
expect((process as any).logger.info).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
'Current job state: running'
|
||||
)
|
||||
expect((process as any).logger.info).toHaveBeenNthCalledWith(
|
||||
3,
|
||||
'Polling job status...'
|
||||
)
|
||||
expect((process as any).logger.info).toHaveBeenNthCalledWith(
|
||||
4,
|
||||
'Current job state: completed'
|
||||
)
|
||||
})
|
||||
|
||||
it('should continue polling when there is a single error in between', async () => {
|
||||
mockPollWithSingleError()
|
||||
|
||||
const state = await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
)
|
||||
|
||||
expect(requestClient.get).toHaveBeenCalledTimes(2)
|
||||
expect(state).toEqual('completed')
|
||||
})
|
||||
|
||||
it('should throw an error when the error count exceeds the set value of 5', async () => {
|
||||
mockErroredPoll()
|
||||
|
||||
const error = await pollJobState(
|
||||
requestClient,
|
||||
mockJob,
|
||||
false,
|
||||
undefined,
|
||||
defaultPollOptions
|
||||
).catch((e) => e)
|
||||
|
||||
expect(error.message).toEqual(
|
||||
'Error while polling job state for job j0b: Status Error'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
const setupMocks = () => {
|
||||
jest.restoreAllMocks()
|
||||
jest.mock('../../../request/RequestClient')
|
||||
jest.mock('../../../auth/getTokens')
|
||||
jest.mock('../saveLog')
|
||||
jest.mock('../getFileStream')
|
||||
jest.mock('../../../utils/isNode')
|
||||
|
||||
jest
|
||||
.spyOn(requestClient, 'get')
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve({ result: 'completed', etag: '', status: 200 })
|
||||
)
|
||||
jest
|
||||
.spyOn(getTokensModule, 'getTokens')
|
||||
.mockImplementation(() => Promise.resolve(mockAuthConfig))
|
||||
jest
|
||||
.spyOn(saveLogModule, 'saveLog')
|
||||
.mockImplementation(() => Promise.resolve())
|
||||
jest
|
||||
.spyOn(getFileStreamModule, 'getFileStream')
|
||||
.mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
|
||||
}
|
||||
|
||||
const mockSimplePoll = (runningCount = 2) => {
|
||||
let count = 0
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
return Promise.resolve({
|
||||
result:
|
||||
count === 0
|
||||
? 'pending'
|
||||
: count <= runningCount
|
||||
? 'running'
|
||||
: 'completed',
|
||||
etag: '',
|
||||
status: 200
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const mockRunningPoll = () => {
|
||||
let count = 0
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
return Promise.resolve({
|
||||
result: count === 0 ? 'pending' : 'running',
|
||||
etag: '',
|
||||
status: 200
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const mockLongPoll = () => {
|
||||
let count = 0
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
return Promise.resolve({
|
||||
result: count <= 102 ? 'running' : 'completed',
|
||||
etag: '',
|
||||
status: 200
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const mockPollWithSingleError = () => {
|
||||
let count = 0
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
count++
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
if (count === 1) {
|
||||
return Promise.reject('Status Error')
|
||||
}
|
||||
return Promise.resolve({
|
||||
result: count === 0 ? 'pending' : 'completed',
|
||||
etag: '',
|
||||
status: 200
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const mockErroredPoll = () => {
|
||||
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
|
||||
if (url.includes('job')) {
|
||||
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
|
||||
}
|
||||
return Promise.reject('Status Error')
|
||||
})
|
||||
}
|
||||
73
src/api/viya/spec/saveLog.spec.ts
Normal file
73
src/api/viya/spec/saveLog.spec.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { Logger, LogLevel } from '@sasjs/utils'
|
||||
import { RequestClient } from '../../../request/RequestClient'
|
||||
import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
|
||||
import * as writeStreamModule from '../writeStream'
|
||||
import { saveLog } from '../saveLog'
|
||||
import { mockJob } from './mockResponses'
|
||||
import { WriteStream } from '../../../types'
|
||||
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
const stream = {} as unknown as WriteStream
|
||||
|
||||
describe('saveLog', () => {
|
||||
beforeEach(() => {
|
||||
;(process as any).logger = new Logger(LogLevel.Off)
|
||||
setupMocks()
|
||||
})
|
||||
|
||||
it('should throw an error when a valid access token is not provided', async () => {
|
||||
const error = await saveLog(mockJob, requestClient, 0, 100, stream).catch(
|
||||
(e) => e
|
||||
)
|
||||
|
||||
expect(error.message).toContain(
|
||||
`Logs for job ${mockJob.id} cannot be fetched without a valid access token.`
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw an error when the log URL is not available', async () => {
|
||||
const error = await saveLog(
|
||||
{ ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'log') },
|
||||
requestClient,
|
||||
0,
|
||||
100,
|
||||
stream,
|
||||
't0k3n'
|
||||
).catch((e) => e)
|
||||
|
||||
expect(error.message).toContain(
|
||||
`Log URL for job ${mockJob.id} was not found.`
|
||||
)
|
||||
})
|
||||
|
||||
it('should fetch and save logs to the given path', async () => {
|
||||
await saveLog(mockJob, requestClient, 0, 100, stream, 't0k3n')
|
||||
|
||||
expect(fetchLogsModule.fetchLog).toHaveBeenCalledWith(
|
||||
requestClient,
|
||||
't0k3n',
|
||||
'/log/content',
|
||||
0,
|
||||
100
|
||||
)
|
||||
expect(writeStreamModule.writeStream).toHaveBeenCalledWith(
|
||||
stream,
|
||||
'Test Log'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
const setupMocks = () => {
|
||||
jest.restoreAllMocks()
|
||||
jest.mock('../../../request/RequestClient')
|
||||
jest.mock('../../../utils/fetchLogByChunks')
|
||||
jest.mock('@sasjs/utils')
|
||||
jest.mock('../writeStream')
|
||||
|
||||
jest
|
||||
.spyOn(fetchLogsModule, 'fetchLog')
|
||||
.mockImplementation(() => Promise.resolve('Test Log'))
|
||||
jest
|
||||
.spyOn(writeStreamModule, 'writeStream')
|
||||
.mockImplementation(() => Promise.resolve())
|
||||
}
|
||||
67
src/api/viya/spec/uploadTables.spec.ts
Normal file
67
src/api/viya/spec/uploadTables.spec.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { RequestClient } from '../../../request/RequestClient'
|
||||
import * as convertToCsvModule from '../../../utils/convertToCsv'
|
||||
import { uploadTables } from '../uploadTables'
|
||||
|
||||
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
|
||||
|
||||
describe('uploadTables', () => {
|
||||
beforeEach(() => {
|
||||
setupMocks()
|
||||
})
|
||||
|
||||
it('should return a list of uploaded files', async () => {
|
||||
const data = { foo: 'bar' }
|
||||
|
||||
const files = await uploadTables(requestClient, data, 't0k3n')
|
||||
|
||||
expect(files).toEqual([{ tableName: 'foo', file: 'test-file' }])
|
||||
expect(requestClient.uploadFile).toHaveBeenCalledTimes(1)
|
||||
expect(requestClient.uploadFile).toHaveBeenCalledWith(
|
||||
'/files/files#rawUpload',
|
||||
'Test CSV',
|
||||
't0k3n'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw an error when the CSV exceeds the maximum length', async () => {
|
||||
const data = { foo: 'bar' }
|
||||
jest
|
||||
.spyOn(convertToCsvModule, 'convertToCSV')
|
||||
.mockImplementation(() => 'ERROR: LARGE STRING LENGTH')
|
||||
|
||||
const error = await uploadTables(requestClient, data, 't0k3n').catch(
|
||||
(e) => e
|
||||
)
|
||||
|
||||
expect(requestClient.uploadFile).not.toHaveBeenCalled()
|
||||
expect(error.message).toEqual(
|
||||
'The max length of a string value in SASjs is 32765 characters.'
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw an error when the file upload fails', async () => {
|
||||
const data = { foo: 'bar' }
|
||||
jest
|
||||
.spyOn(requestClient, 'uploadFile')
|
||||
.mockImplementation(() => Promise.reject('Upload Error'))
|
||||
|
||||
const error = await uploadTables(requestClient, data, 't0k3n').catch(
|
||||
(e) => e
|
||||
)
|
||||
|
||||
expect(error).toContain('Error while uploading file.')
|
||||
})
|
||||
})
|
||||
|
||||
const setupMocks = () => {
|
||||
jest.restoreAllMocks()
|
||||
jest.mock('../../../utils/convertToCsv')
|
||||
jest
|
||||
.spyOn(convertToCsvModule, 'convertToCSV')
|
||||
.mockImplementation(() => 'Test CSV')
|
||||
jest
|
||||
.spyOn(requestClient, 'uploadFile')
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve({ result: 'test-file', etag: '' })
|
||||
)
|
||||
}
|
||||
25
src/api/viya/spec/writeStream.spec.ts
Normal file
25
src/api/viya/spec/writeStream.spec.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { WriteStream } from '../../../types'
|
||||
import { writeStream } from '../writeStream'
|
||||
import 'jest-extended'
|
||||
|
||||
describe('writeStream', () => {
|
||||
const stream: WriteStream = {
|
||||
write: jest.fn(),
|
||||
path: 'test'
|
||||
}
|
||||
|
||||
it('should resolve when the stream is written successfully', async () => {
|
||||
expect(writeStream(stream, 'test')).toResolve()
|
||||
|
||||
expect(stream.write).toHaveBeenCalledWith('test\n', expect.anything())
|
||||
})
|
||||
|
||||
it('should reject when the write errors out', async () => {
|
||||
jest
|
||||
.spyOn(stream, 'write')
|
||||
.mockImplementation((_, callback) => callback(new Error('Test Error')))
|
||||
const error = await writeStream(stream, 'test').catch((e) => e)
|
||||
|
||||
expect(error.message).toEqual('Test Error')
|
||||
})
|
||||
})
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user