1
0
mirror of https://github.com/sasjs/adapter.git synced 2025-12-11 01:14:36 +00:00

Compare commits

...

269 Commits

Author SHA1 Message Date
Allan Bowe
2d0a73e74d Merge pull request #480 from sasjs/issue-477
fix: update error message when folder not found
2021-07-28 08:37:26 +03:00
Krishna Acondy
7b7a80c502 chore(root-folder-not-found): add test 2021-07-27 08:20:30 +01:00
Krishna Acondy
1ace15a308 fix(root-folder-not-found): create RootFolderNotFoundError class 2021-07-27 07:52:19 +01:00
Allan Bowe
e1b3ef7c8c Merge pull request #495 from sasjs/contributors
chore: contributors
2021-07-26 20:26:15 +03:00
710056bded fix: create a utility throwError and add test case for it 2021-07-26 15:30:19 +05:00
26f008d527 chore: remove console log statement 2021-07-26 11:09:31 +05:00
56ebc7be3b chore: merge master into issue-477 2021-07-26 11:06:13 +05:00
Allan Bowe
0ea66f6d37 Merge pull request #494 from sasjs/fix-browser-issue
fix(browser): only import file I/O functions when running in Node.js environments
2021-07-25 10:00:51 +03:00
Allan Bowe
cb30ed2b98 Merge branch 'master' into contributors 2021-07-24 23:14:16 +03:00
Allan Bowe
dfbe2d8f94 chore: contributors 2021-07-24 21:31:51 +03:00
Krishna Acondy
eac9da22bf chore(test): fix assertion 2021-07-24 10:27:31 +01:00
Krishna Acondy
626fc2e15f fix(path): make log file path platform-agnostic 2021-07-24 09:53:39 +01:00
Krishna Acondy
87e2edbd6c chore(test): fix long poll count 2021-07-24 00:12:11 +01:00
Krishna Acondy
7cf681bea3 chore(tests): fix tests 2021-07-23 22:24:48 +01:00
Krishna Acondy
281a145bef fix(node): only create and write file stream if running in node 2021-07-23 22:24:41 +01:00
Krishna Acondy
15d5f9ec91 chore(paths): fix import paths 2021-07-23 22:24:21 +01:00
Krishna Acondy
0a6c5a0ec4 fix(fs): replace fs imports with locally defined WriteStream interface 2021-07-23 22:24:04 +01:00
Krishna Acondy
2a9526d056 fix(node): add util to check if running in node 2021-07-23 22:23:05 +01:00
Allan Bowe
c2ff28c323 Update PULL_REQUEST_TEMPLATE.md 2021-07-23 13:04:38 +03:00
Allan Bowe
50710ee1df Merge pull request #476 from sasjs/issue-170
fix: file upload with debug enabled
2021-07-23 11:41:06 +03:00
Krishna Acondy
062ba91c17 Merge pull request #486 from sasjs/fix-poll-logic
fix(poll): add default poll options
2021-07-22 14:53:03 +01:00
6dd1d47bb2 fix: merge main into issue-477 and fixed conflicts 2021-07-22 16:13:46 +05:00
e70a9645ef fix: remove jwtDecode import statement 2021-07-22 15:56:22 +05:00
aeabc29e55 fix: remove serverurl argument from createFolder method and move decode token to utils project 2021-07-22 15:47:37 +05:00
Krishna Acondy
9600fa2512 fix(poll): add default poll options 2021-07-22 11:31:10 +01:00
Krishna Acondy
7951817480 Merge pull request #485 from sasjs/log-file-paths
fix(stream-log): use filepath if provided
2021-07-22 09:57:31 +01:00
Krishna Acondy
405eea1d6c chore(infra): set minimum node version to 15 2021-07-22 09:41:30 +01:00
Krishna Acondy
e3f189eed4 chore(test): fix test 2021-07-22 09:31:32 +01:00
Krishna Acondy
0bb42c5e3c fix(streamlog): use filepath if provided 2021-07-22 09:25:55 +01:00
Allan Bowe
c02eac196e Merge pull request #483 from sasjs/all-contributors/add-medjedovicm
docs: add medjedovicm as a contributor for code
2021-07-21 18:55:43 +03:00
Allan Bowe
3fb0d863e9 Update README.md 2021-07-21 18:55:01 +03:00
allcontributors[bot]
6d573d3897 docs: create .all-contributorsrc [skip ci] 2021-07-21 15:53:39 +00:00
allcontributors[bot]
33280d7a5b docs: update README.md [skip ci] 2021-07-21 15:53:38 +00:00
Allan Bowe
507722da0d Merge pull request #465 from sasjs/stream-job-logs
feat(stream-logs): Save logs to file during job status poll
2021-07-21 18:49:50 +03:00
Krishna Acondy
c8e029cff4 chore(deps): bump utils 2021-07-21 08:37:45 +01:00
Krishna Acondy
7bd2e31f3b chore(cleanup): remove console logs 2021-07-21 08:13:45 +01:00
Krishna Acondy
cfa0c8b9af chore(refactor): only fetch job if streaming logs, fix tests, add JSDoc comments 2021-07-21 08:12:34 +01:00
Krishna Acondy
df9c1c643f chore(merge): pull in changes from master 2021-07-20 09:26:34 +01:00
Krishna Acondy
5c8d311ae8 chore(streamlog): optimise polling mechanism 2021-07-20 09:25:39 +01:00
e1a76bc45a fix: update error message when folder not found 2021-07-19 21:53:58 +05:00
85e5ade93a fix: handle the case when array is passed in getValidJson method 2021-07-19 13:01:18 +05:00
4a61fb8f7f chore: update variable name from config to ovverrideSasjsConfig 2021-07-19 13:00:06 +05:00
5347aeba09 fix: replace isValidJson with getValidJson 2021-07-18 23:24:22 +05:00
Sabir Hassan
7ac7c5e52b Merge branch 'master' into issue-170 2021-07-18 21:56:33 +05:00
5098342dfe fix: retrieve content from the iframe in first response when viya Web approach used with debug enabled 2021-07-18 21:39:57 +05:00
c69be8ffc3 fix: move parseSasViyaDebugResponse method to utils folder 2021-07-18 21:37:08 +05:00
69999d8e8b fix: update fileUpload method to override existing config 2021-07-18 21:34:16 +05:00
Muhammad Saad
bec4180dcf Merge pull request #467 from sasjs/removed-url-package
fix: removed url package
2021-07-16 17:02:24 +05:00
Saad Jutt
1bb7807c25 chore(merge): Merge branch 'master' into removed-url-package 2021-07-16 04:12:20 +05:00
Allan Bowe
816f1d19d4 Merge pull request #471 from sasjs/windows-tests
Windows tests
2021-07-15 16:22:49 +03:00
d38d032309 chore: readme updates 2021-07-15 13:01:12 +02:00
d2a90c77fd chore: readme update 2021-07-15 10:57:44 +02:00
8a0f14b780 chore: windows fallback 2021-07-15 10:43:30 +02:00
f6cb2c4fac chore: sasjs-tests windows 2021-07-15 10:41:10 +02:00
Krishna Acondy
1594f0c7db chore(merge): pull in changes from master 2021-07-15 07:33:44 +01:00
Allan Bowe
7cb2a43f95 Merge pull request #462 from sasjs/json-fix
fix: invalid json checking
2021-07-14 20:46:16 +03:00
Allan Bowe
6e85c7a588 Merge branch 'master' into json-fix 2021-07-14 20:44:06 +03:00
Allan Bowe
a68f6962fd Merge pull request #450 from sasjs/allanbowe-patch-1
Update README.md
2021-07-14 20:42:09 +03:00
Allan Bowe
a650ba15dd Merge branch 'master' into allanbowe-patch-1 2021-07-14 20:41:51 +03:00
Allan Bowe
6ca1b489fc Merge pull request #466 from sasjs/session-state-fix
fix(session): provide more info if could not get session state
2021-07-14 20:41:25 +03:00
Yury Shkoda
a5c9f11c75 test(session): cover case when could not get session state 2021-07-14 14:17:20 +03:00
Krishna Acondy
1ff3937d11 chore(deps): update dependencies 2021-07-14 08:03:54 +01:00
Krishna Acondy
d4725d2e54 chore(refactor): change property name in PollOptions 2021-07-14 07:50:25 +01:00
Saad Jutt
db578564ba fix: removed url package 2021-07-13 17:11:49 +05:00
Yury Shkoda
d4ebef4290 fix(session): provide more info if could not get session state 2021-07-13 14:50:46 +03:00
Krishna Acondy
b9f368193d chore(refactor): add more tests 2021-07-13 08:12:15 +01:00
Krishna Acondy
4257ec78aa chore(ci): add coverage report to build workflow 2021-07-12 20:45:09 +01:00
Krishna Acondy
a0fbe1a740 chore(ci): add coverage report action 2021-07-12 20:42:49 +01:00
Krishna Acondy
123b9fb535 chore(refactor): split up and add tests for core functionality 2021-07-12 20:31:17 +01:00
Krishna Acondy
f57c7b8f7d chore(deps): up utils version 2021-07-12 20:30:42 +01:00
89590f9a37 chore: only removed 2021-07-12 14:42:19 +02:00
5d61bebc9e fix: isValidJson function returns the JSON parsed 2021-07-12 14:29:43 +02:00
99afa6e7e4 style: lint 2021-07-12 12:58:27 +02:00
b590a9f41b chore(tests): testing the isValidJson function 2021-07-12 12:58:04 +02:00
4466ee30d2 chore: preventing double parse of invalid json check 2021-07-12 11:02:01 +02:00
db372950b4 chore: type fix 2021-07-09 15:17:33 +02:00
46f5e07f11 fix: invalid json checking 2021-07-09 13:36:12 +02:00
Krishna Acondy
1c90f4f455 chore(*): remove log 2021-07-09 09:29:57 +01:00
Krishna Acondy
0114a80e38 chore(execute): add tests for executeScript 2021-07-09 09:17:49 +01:00
Krishna Acondy
13be2f9c70 chore(*): remove unused dependencies and variables, fix imports 2021-07-09 09:17:26 +01:00
Krishna Acondy
e396091aa7 chore(merge): pull in changes from master 2021-07-08 09:04:49 +01:00
Krishna Acondy
a00cb1ebec Merge pull request #461 from sasjs/fix-utils-imports
fix(imports): change imports from main barrel into internal barrels
2021-07-08 08:58:14 +01:00
Krishna Acondy
7b1264d140 fix(imports): change imports from main barrel into internal barrels 2021-07-08 08:46:28 +01:00
Krishna Acondy
04ccbf6843 feat(log): write logs to file when polling for job status 2021-07-07 10:02:14 +01:00
Allan Bowe
369b9fb023 Merge branch 'master' into allanbowe-patch-1 2021-07-05 12:02:01 +03:00
Allan Bowe
76487b00e9 Merge pull request #453 from sasjs/support-contribute-on-windows
fix: updated 'prepare' + using copyfiles instead of cp
2021-07-05 12:01:38 +03:00
Saad Jutt
2d0515e25b chore(merge): Merged master branch 2021-07-05 13:17:08 +05:00
Saad Jutt
b132b99586 fix: globstars support on mac with copyfiles 2021-07-05 13:11:40 +05:00
Krishna Acondy
5a7b4a1de4 Merge pull request #447 from sasjs/token-expiry-utils
fix(auth-utils): move auth functions to utils library, fix webpack config
2021-07-05 08:17:59 +01:00
Saad Jutt
6cac008b61 fix: updated 'prepare' + using copyfiles instead of cp 2021-07-05 04:06:04 +05:00
Allan Bowe
929ec6eb1c Update README.md 2021-07-02 12:50:03 +03:00
Krishna Acondy
5a35237de5 fix(build): add node polyfill plugin and stub fs and readline when building for the browser 2021-07-01 09:11:03 +01:00
Krishna Acondy
5d77bbba8b fix(auth): use token functions from utils library 2021-07-01 09:10:32 +01:00
Yury Shkoda
eda021b6a5 Merge pull request #431 from sasjs/issue-409
fix: if response does not contain valid JSON throw error #409
2021-07-01 08:03:30 +03:00
Yury Shkoda
259c479ef0 Merge branch 'master' into issue-409 2021-07-01 07:58:23 +03:00
Krishna Acondy
a962b8e7cf Merge pull request #445 from sasjs/handle-304-status
fix(session): fixed polling session state, refresh token before server calls
2021-06-30 18:07:44 +01:00
Krishna Acondy
eb0e7247a6 fix(scripts): change git hook script to prepare 2021-06-30 18:05:52 +01:00
ccc77cb9d1 chore: remove console.log statements 2021-06-30 21:42:46 +05:00
Krishna Acondy
5cb5bbdb55 fix(execution): refresh tokens before fetching results 2021-06-30 15:19:12 +01:00
Yury Shkoda
ac6cd7be82 fix(session): fixed polling session state 2021-06-30 16:55:09 +03:00
Sabir Hassan
63f5f4d03d Merge branch 'master' into issue-409 2021-06-30 15:35:43 +05:00
Krishna Acondy
a164fb7df9 Merge pull request #432 from sasjs/job-refresh-tokens
fix(job-execution): refresh access token if it has expired during job status checks
2021-06-30 11:10:34 +01:00
Krishna Acondy
336ba207cf chore(deps): upgrade dependencies 2021-06-30 07:35:21 +01:00
Krishna Acondy
3cfd45cc62 chore(merge): pull in changes from master 2021-06-30 07:26:15 +01:00
Yury Shkoda
f7fb917282 Merge pull request #441 from sasjs/allanbowe-patch-1
Update README.md
2021-06-30 08:23:17 +03:00
Allan Bowe
a182037883 Update README.md 2021-06-29 15:36:37 +03:00
Krishna Acondy
f9e79fb756 chore(*): remove unused variables 2021-06-29 10:23:35 +01:00
Krishna Acondy
aaf0eef62b chore(tests): fix method arguments 2021-06-29 07:32:47 +01:00
Krishna Acondy
fafa0c3567 Merge branch 'master' into job-refresh-tokens 2021-06-28 08:50:46 +01:00
Allan Bowe
4a6845ad6a Merge pull request #437 from sasjs/issue-420
fix: on viya when calling api pass debug parameter to correct section
2021-06-27 13:19:33 +03:00
Sabir Hassan
61d66c6f82 Merge branch 'master' into issue-420 2021-06-25 14:09:34 +05:00
123fbc7235 fix: on viya when calling api pass debug parameter to correct section #420 2021-06-25 13:49:45 +05:00
Krishna Acondy
eae8694a29 Merge branch 'master' into job-refresh-tokens 2021-06-25 09:15:33 +01:00
Krishna Acondy
2b16be3aef chore(*): refactor to use logger if available 2021-06-25 09:14:29 +01:00
Sabir Hassan
d8d4da9c9a Merge branch 'master' into issue-409 2021-06-24 16:15:40 +05:00
VladislavParhomchik
0b755b7304 Merge pull request #436 from sasjs/allanbowe-patch-1
chore(*): Update PULL_REQUEST_TEMPLATE.md
2021-06-24 10:22:13 +03:00
Allan Bowe
0816b7b1f9 Update PULL_REQUEST_TEMPLATE.md 2021-06-24 10:04:29 +03:00
Krishna Acondy
97d45e87ec chore(merge): pull in changes from master 2021-06-24 07:21:12 +01:00
Krishna Acondy
57ef0647b5 fix(auth): refresh access tokens if expiring during job status check 2021-06-24 07:20:54 +01:00
Allan Bowe
a34eebba44 Merge pull request #433 from sasjs/issue-308
Web approach including context name, file upload including debug and context name
2021-06-24 00:07:43 +03:00
857e39eb33 chore(git): Merge branch 'issue-308' of github.com:sasjs/adapter into issue-308 2021-06-23 11:15:06 +02:00
9bd7d84975 style: lint 2021-06-23 11:14:54 +02:00
731e38bce3 Merge branch 'master' into issue-308 2021-06-23 11:10:42 +02:00
Krishna Acondy
2ee6c45d16 Merge branch 'master' into job-refresh-tokens 2021-06-23 08:29:22 +01:00
Allan Bowe
b80283f8af Merge pull request #421 from sasjs/dependabot/npm_and_yarn/semantic-release-17.4.4
chore(deps-dev): bump semantic-release from 17.4.3 to 17.4.4
2021-06-22 22:19:38 +03:00
Allan Bowe
291e23e40a Update README.md 2021-06-22 18:53:12 +03:00
d53d1e1e6a chore: tests fix 2021-06-22 17:05:13 +02:00
8cf249e8fd style: lint 2021-06-22 16:51:43 +02:00
5d7cfe1e6c chore: fixing useComputeApi defaults 2021-06-22 16:51:14 +02:00
abc15fb3ab chore: fix file upload call 2021-06-22 13:23:18 +02:00
8cc4270e48 fix: web approach contextname, upload file: context name and debug parameter 2021-06-22 13:19:11 +02:00
Krishna Acondy
56b2ba026a chore(merge): pull in changes from master 2021-06-22 07:41:42 +01:00
Krishna Acondy
8beda1ad6c fix(*): pass in authConfig in place of accessToken 2021-06-22 07:38:12 +01:00
Krishna Acondy
b18b471549 fix(job-execution): refresh access token if it has expired during job status checks 2021-06-21 08:59:12 +01:00
93c9a34591 fix: if response is not valid json throw error #409 2021-06-21 00:45:37 +05:00
dependabot[bot]
9493492dea chore(deps-dev): bump semantic-release from 17.4.3 to 17.4.4
Bumps [semantic-release](https://github.com/semantic-release/semantic-release) from 17.4.3 to 17.4.4.
- [Release notes](https://github.com/semantic-release/semantic-release/releases)
- [Commits](https://github.com/semantic-release/semantic-release/compare/v17.4.3...v17.4.4)

---
updated-dependencies:
- dependency-name: semantic-release
  dependency-type: direct:development
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2021-06-18 08:42:31 +00:00
VladislavParhomchik
9b976d48ca Merge pull request #423 from sasjs/issue-394
fix: SAS9 performs CAS Authentication after login
2021-06-18 11:41:13 +03:00
Saad Jutt
00b19de497 chore: sasjs-tests package-lock.json updated 2021-06-17 19:35:24 +05:00
Saad Jutt
f4cdd2d607 fix: CAS Authentication upon SAS9 login 2021-06-17 19:33:58 +05:00
Yury Shkoda
cdc0c12ec4 chore(lint): fixed lint scriptes on Windows 2021-06-17 15:35:36 +03:00
Saad Jutt
bc6f109c48 fix: make duplicate request only if payload is present 2021-06-17 14:37:46 +05:00
Saad Jutt
cfab64cfa0 fix: first request after login redirects from server 2021-06-17 08:31:40 +05:00
Krishna Acondy
d4c8c58552 Merge pull request #422 from sasjs/assign-qa-reviewer
chore(reviewers): add Sabir and Vlad as reviewers
2021-06-16 07:50:58 +01:00
Krishna Acondy
2b8cb51a50 chore(reviewers): add Sabir to list of devs, create separate QA list with Vlad 2021-06-16 07:42:23 +01:00
Krishna Acondy
e068d3263c Merge pull request #419 from sasjs/issue-327
fix(*): SASWORK is not being parsed correctly
2021-06-15 08:41:58 +01:00
630f2e9c37 fix: test regarding Request with extra attributes on JES approach fixed 2021-06-15 11:29:21 +05:00
51ac6b052b fix: test case which check extra attributes on JES approach fixed 2021-06-14 23:21:17 +05:00
c32258eb3c fix: code modified in appendRequest method fixes #327 2021-06-14 23:18:26 +05:00
Allan Bowe
88f50e3c74 Update README.md 2021-06-14 21:11:18 +03:00
Krishna Acondy
bfe5ac0ff7 Merge pull request #417 from sasjs/force-sas9-webout
fix(sas9): force webout output when executing arbitrary code on SAS9
2021-06-14 09:17:32 +01:00
Krishna Acondy
d50f5a030a chore(lint): fix formatting 2021-06-14 09:12:11 +01:00
Krishna Acondy
c320caec99 fix(sas9): force webout output when executing arbitrary code on SAS9 2021-06-14 09:10:26 +01:00
Allan Bowe
16a5b2b012 Merge pull request #414 from sasjs/issue-276
fix: Issue 276
2021-06-13 21:20:18 +03:00
Allan Bowe
2951e0cc2d Merge branch 'master' into issue-276 2021-06-13 21:04:56 +03:00
Allan Bowe
6bb4a7ea18 Update SASjs.ts
fix grammar
2021-06-13 21:01:15 +03:00
Allan Bowe
2827978fe5 Merge pull request #390 from sasjs/service-pack-with-file-resource
feat: create file resource while deploying service pack for viya
2021-06-13 14:52:09 +03:00
Saad Jutt
541c19c1a4 chore(merge): Merge branch 'service-pack-with-file-resource' of github.com:sasjs/adapter into service-pack-with-file-resource 2021-06-13 16:26:27 +05:00
Saad Jutt
c5e995f8d6 chore: TSDoc comments updated 2021-06-13 16:25:04 +05:00
Allan Bowe
8bf36da566 Merge branch 'master' into service-pack-with-file-resource 2021-06-13 11:56:54 +03:00
ccb4ec6e03 chore: code refactored for better readability 2021-06-11 22:53:06 +05:00
06ebb52bc9 chore(merge): merge master into issue-276 2021-06-10 22:12:36 +05:00
Yury Shkoda
6e23a0362f Merge pull request #411 from sasjs/issue-408
feat: select extra attributes in JES response
2021-06-10 19:38:16 +03:00
a59d78bcf7 chore(git): Merge branch 'master' into issue-408 2021-06-10 15:06:10 +02:00
33d4ee92a7 chore: updated utils and comment 2021-06-10 15:03:51 +02:00
dadce3d4c9 chore: added extra attributes type from @sasjs/utils 2021-06-10 14:22:31 +02:00
Saad Jutt
b61cf34723 chore(merge): Merge branch 'master' into service-pack-with-file-resource 2021-06-10 16:55:35 +05:00
Saad Jutt
22445d1268 fix: uploading file Buffer with FormData 2021-06-10 16:49:20 +05:00
Allan Bowe
cba9dacb37 Merge branch 'master' into issue-276 2021-06-10 14:03:14 +03:00
Yury Shkoda
a055b36c5c Merge pull request #389 from sasjs/issue-381
fix: sas fails with verifying credentials
2021-06-10 13:42:21 +03:00
06895cc9f8 style: lint 2021-06-10 12:08:56 +02:00
24496a997a chore: addressing comments 2021-06-10 12:08:16 +02:00
6419686269 chore: lint fixes 2021-06-09 17:28:27 +00:00
Sabir Hassan
4554c9100c Merge branch 'master' into issue-276 2021-06-09 16:51:49 +05:00
919c83c143 chore: lint fixes 2021-06-09 16:40:29 +05:00
00ba2957fb Merge branch 'master' into issue-381 2021-06-09 13:10:06 +02:00
5beda6547a Merge branch 'master' into issue-408 2021-06-09 13:09:59 +02:00
bd49b3757a chore(git): Merge branch 'master' into issue-408 2021-06-09 13:05:48 +02:00
Yury Shkoda
b32352a369 Merge pull request #413 from sasjs/webpack-fix
fix(webpack): removed process plugin from nodeConfig
2021-06-09 14:04:47 +03:00
b306f11148 chore(git): Merge branch 'master' into issue-381 2021-06-09 13:04:47 +02:00
Yury Shkoda
8c4955cb65 chore(git): merge branch 'master' of https://github.com/sasjs/adapter into webpack-fix 2021-06-09 13:58:59 +03:00
Yury Shkoda
155f2bb0e8 fix(webpack): removed process plugin from nodeConfig 2021-06-09 13:53:27 +03:00
3ca971134a Merge pull request #366 from sasjs/snyk-upgrade-0c3cac4dc7e5009cbff727c995cc3ebe
[Snyk] Upgrade @types/node from 14.14.25 to 14.14.41
2021-06-09 11:06:22 +02:00
488d8b9316 chore(git): Merge branch 'master' into issue-381 2021-06-09 10:38:25 +02:00
c20bdba4ae Merge branch 'master' into snyk-upgrade-0c3cac4dc7e5009cbff727c995cc3ebe 2021-06-09 10:36:10 +02:00
0be2d69aee Merge pull request #404 from sasjs/dependabot/npm_and_yarn/ts-jest-27.0.3
chore(deps-dev): bump ts-jest from 27.0.2 to 27.0.3
2021-06-09 10:33:18 +02:00
a6e67c3478 chore(merge): branch 'master' into dependabot/npm_and_yarn/ts-jest-27.0.3 2021-06-09 10:28:05 +02:00
5968988984 Merge pull request #405 from sasjs/dependabot/npm_and_yarn/webpack-cli-4.7.2
chore(deps-dev): bump webpack-cli from 4.7.0 to 4.7.2
2021-06-09 10:24:12 +02:00
31cd01610a Merge branch 'master' into dependabot/npm_and_yarn/webpack-cli-4.7.2 2021-06-09 10:21:58 +02:00
a67824762c Merge pull request #412 from sasjs/dependabot/npm_and_yarn/sasjs/utils-2.18.0
chore(deps): bump @sasjs/utils from 2.17.1 to 2.18.0
2021-06-09 10:21:37 +02:00
dependabot-preview[bot]
0336541d40 chore(deps-dev): bump webpack-cli from 4.7.0 to 4.7.2
Bumps [webpack-cli](https://github.com/webpack/webpack-cli) from 4.7.0 to 4.7.2.
- [Release notes](https://github.com/webpack/webpack-cli/releases)
- [Changelog](https://github.com/webpack/webpack-cli/blob/master/CHANGELOG.md)
- [Commits](https://github.com/webpack/webpack-cli/compare/webpack-cli@4.7.0...webpack-cli@4.7.2)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2021-06-09 08:01:57 +00:00
dependabot-preview[bot]
01de3836d7 chore(deps): bump @sasjs/utils from 2.17.1 to 2.18.0
Bumps [@sasjs/utils](https://github.com/sasjs/utils) from 2.17.1 to 2.18.0.
- [Release notes](https://github.com/sasjs/utils/releases)
- [Commits](https://github.com/sasjs/utils/compare/v2.17.1...v2.18.0)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2021-06-09 08:01:54 +00:00
Krishna Acondy
c571bb8490 Merge pull request #342 from sasjs/dependabot/add-v2-config-file
Upgrade to GitHub-native Dependabot
2021-06-09 08:59:58 +01:00
Krishna Acondy
5b4d354ea2 chore(*): remove ignores 2021-06-09 08:53:57 +01:00
Krishna Acondy
b0ce0dc40a Merge branch 'master' into dependabot/add-v2-config-file 2021-06-09 08:53:24 +01:00
88f70a7966 chore: merge 2021-06-08 17:01:41 +02:00
89ff323206 style: lint 2021-06-08 16:55:10 +02:00
d4357d939e test: extra attributes on JES 2021-06-08 16:54:46 +02:00
Allan Bowe
6cb76f0b5c chore: merge fix 2021-06-08 13:18:16 +00:00
Allan Bowe
ba2baa36c0 chore: updating merge conflicts 2021-06-08 13:14:29 +00:00
Yury Shkoda
e36cd785e8 Merge pull request #410 from sasjs/macro-vars
feat(variables): added macro variables to executeComputeJob method
2021-06-08 14:50:33 +03:00
2fa3a353fa feat: select extra attributes in JES response 2021-06-08 13:25:08 +02:00
Yury Shkoda
bdb1ffb2ef chore(cleanup): removed console.log 2021-06-08 13:40:35 +03:00
Yury Shkoda
84090661cf chore(git): Merge branch 'master' of https://github.com/sasjs/adapter into macro-vars 2021-06-08 13:31:46 +03:00
Yury Shkoda
68e14bbf05 feat(variables): added macro variables to executeComputeJob method 2021-06-08 13:03:02 +03:00
Allan Bowe
e4f23334d3 Merge pull request #407 from sasjs/fix-built-package
fix(build): provide process module for compatibility with browser
2021-06-08 11:03:46 +03:00
Krishna Acondy
5593963b89 fix(build): provide process module for compatibility with browser 2021-06-08 08:42:48 +01:00
Krishna Acondy
81c9138b93 Merge branch 'master' into dependabot/npm_and_yarn/ts-jest-27.0.3 2021-06-07 09:09:13 +01:00
Krishna Acondy
83fa82108b Merge pull request #401 from sasjs/dependabot/npm_and_yarn/sasjs/utils-2.17.1
chore(deps): bump @sasjs/utils from 2.10.2 to 2.17.1
2021-06-07 09:08:59 +01:00
dependabot-preview[bot]
76039c3ec7 chore(deps-dev): bump ts-jest from 27.0.2 to 27.0.3
Bumps [ts-jest](https://github.com/kulshekhar/ts-jest) from 27.0.2 to 27.0.3.
- [Release notes](https://github.com/kulshekhar/ts-jest/releases)
- [Changelog](https://github.com/kulshekhar/ts-jest/blob/master/CHANGELOG.md)
- [Commits](https://github.com/kulshekhar/ts-jest/compare/v27.0.2...v27.0.3)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2021-06-07 08:07:47 +00:00
Krishna Acondy
9b57c9ca1c Merge branch 'master' into snyk-upgrade-0c3cac4dc7e5009cbff727c995cc3ebe 2021-06-07 09:05:35 +01:00
dependabot-preview[bot]
4018cf95ba chore(deps): bump @sasjs/utils from 2.10.2 to 2.17.1
Bumps [@sasjs/utils](https://github.com/sasjs/utils) from 2.10.2 to 2.17.1.
- [Release notes](https://github.com/sasjs/utils/releases)
- [Commits](https://github.com/sasjs/utils/compare/v2.10.2...v2.17.1)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2021-06-07 07:58:28 +00:00
Krishna Acondy
173b6e3e8d Merge pull request #400 from sasjs/sas9-execute-code
feat(sas9-support): execute arbitrary code on SAS9 servers
2021-06-07 08:56:56 +01:00
Krishna Acondy
0ed5447aff chore(sas9-api): fix filename 2021-06-07 08:45:50 +01:00
Krishna Acondy
6344a906d8 chore(tests): fix tests - remove done callback 2021-06-07 08:37:44 +01:00
Allan Bowe
b2c135ae61 Merge branch 'master' into issue-381 2021-06-07 10:34:16 +03:00
Krishna Acondy
2032aacba3 chore(deps): update package versions 2021-06-04 08:59:15 +01:00
Krishna Acondy
fadccfc94c chore(refactor): upgrade utils, refactor to use timestamp generator 2021-06-04 08:40:27 +01:00
Krishna Acondy
551e4e43c1 feat(sas9-support): execute arbitrary code on SAS9 using SASjs runner 2021-06-04 08:37:50 +01:00
sabir_hassan
1867658cde fix: add validations for table name and table structure #276 2021-06-03 15:08:48 +05:00
3fff4f9c4d Merge pull request #395 from sasjs/makeErr
fix: adding makeErr for SAS 9 in sajss-tests
2021-06-02 15:46:39 +02:00
Allan Bowe
3f119432db fix: adding makeErr for SAS 9 in sajss-tests 2021-06-02 16:44:09 +03:00
0b18fddc3e chore: merge 2021-06-02 11:06:34 +02:00
19503e0b31 style: lint 2021-06-02 11:01:19 +02:00
d8bdc02f09 chore: sasjs-tests compute only on viya, login order fix 2021-06-02 11:00:08 +02:00
2d0833061f chore: merge branch 'master' into issue-381 2021-06-01 11:52:52 +02:00
Yury Shkoda
5dfc4e4086 Merge branch 'master' into issue-381 2021-05-31 08:03:44 +03:00
Saad Jutt
c5824a8a8d fix: using mime package to determine content-type 2021-05-30 23:47:31 +05:00
Allan Bowe
2147c59314 Merge pull request #388 from sasjs/sas9-auth-error
fix(sas9-support): Throw error when invalid credentials are supplied
2021-05-30 08:51:24 +03:00
Saad Jutt
56a1960fff feat: create file resource while deploying service pack for viya 2021-05-30 05:58:17 +05:00
b8c9522a55 chore: packages 2021-05-28 16:58:56 +02:00
b461cff731 Merge branch 'master' into issue-381 2021-05-28 15:24:01 +02:00
728167fd71 test: fix 2021-05-28 15:22:57 +02:00
460575b462 fix: when sas fails with verifying credentials, resend request with new csrf token 2021-05-28 15:05:44 +02:00
Krishna Acondy
b247da249a chore(git-hooks): allow numbers in commit message 2021-05-28 08:52:18 +01:00
Krishna Acondy
e79089b880 fix(sas9-support): throw error with invalid credentials 2021-05-28 08:52:00 +01:00
Krishna Acondy
fe907e1c43 Merge pull request #384 from sasjs/sas9-support
feat(sas9-support): add support for SAS9 job execution outside of the browser
2021-05-28 07:46:47 +01:00
Allan Bowe
e95e894365 Merge branch 'master' into sas9-support 2021-05-27 12:29:30 +03:00
Allan Bowe
82414d8b8b Merge pull request #379 from sasjs/dependabot/npm_and_yarn/sasjs/utils-2.14.0
chore(deps): bump @sasjs/utils from 2.10.2 to 2.14.0
2021-05-27 12:29:13 +03:00
Allan Bowe
456fa68f0f Merge branch 'master' into dependabot/npm_and_yarn/sasjs/utils-2.14.0 2021-05-27 11:55:31 +03:00
Allan Bowe
076adc1f6a Merge pull request #334 from sasjs/dependabot/npm_and_yarn/typedoc-0.20.36
chore(deps-dev): bump typedoc from 0.20.35 to 0.20.36
2021-05-27 11:54:52 +03:00
Krishna Acondy
9676488ff2 chore(refactor): remove unnecessary variables, use jobs path from config 2021-05-27 08:40:50 +01:00
Krishna Acondy
e9affb862d chore(merge): update branch 2021-05-27 08:32:11 +01:00
Krishna Acondy
e04371510e chore(update): update branch with changes from master 2021-05-27 08:30:20 +01:00
dependabot-preview[bot]
19657a1c12 chore(deps-dev): bump typedoc from 0.20.35 to 0.20.36
Bumps [typedoc](https://github.com/TypeStrong/TypeDoc) from 0.20.35 to 0.20.36.
- [Release notes](https://github.com/TypeStrong/TypeDoc/releases)
- [Commits](https://github.com/TypeStrong/TypeDoc/compare/v0.20.35...v0.20.36)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2021-05-27 07:22:22 +00:00
dependabot-preview[bot]
6424c82ac9 chore(deps): bump @sasjs/utils from 2.10.2 to 2.14.0
Bumps [@sasjs/utils](https://github.com/sasjs/utils) from 2.10.2 to 2.14.0.
- [Release notes](https://github.com/sasjs/utils/releases)
- [Commits](https://github.com/sasjs/utils/compare/v2.10.2...v2.14.0)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2021-05-27 07:22:21 +00:00
Allan Bowe
fcab18191f Merge pull request #382 from sasjs/dependabot/npm_and_yarn/browserslist-4.16.6
chore(deps): [security] bump browserslist from 4.16.4 to 4.16.6
2021-05-27 10:20:21 +03:00
Krishna Acondy
f157612a0e Merge branch 'master' into sas9-support 2021-05-27 08:16:49 +01:00
Krishna Acondy
b8cb7d52e7 chore(*): remove unused loader 2021-05-27 08:08:47 +01:00
Krishna Acondy
d8d1968162 chore(*): fix formatting 2021-05-27 08:06:21 +01:00
Krishna Acondy
0e1d1f1d99 chore(dep): remove unused dependency 2021-05-27 08:04:19 +01:00
Krishna Acondy
0b055dd05f feat(sas9-support): add support for SAS9 via username/password login 2021-05-27 08:00:15 +01:00
dependabot-preview[bot]
ba91c29ba8 chore(deps): [security] bump browserslist from 4.16.4 to 4.16.6
Bumps [browserslist](https://github.com/browserslist/browserslist) from 4.16.4 to 4.16.6. **This update includes a security fix.**
- [Release notes](https://github.com/browserslist/browserslist/releases)
- [Changelog](https://github.com/browserslist/browserslist/blob/main/CHANGELOG.md)
- [Commits](https://github.com/browserslist/browserslist/compare/4.16.4...4.16.6)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2021-05-26 08:24:56 +00:00
Allan Bowe
bd19457c2a Merge branch 'master' of github.com:sasjs/adapter 2021-05-26 11:23:13 +03:00
Allan Bowe
b0570e1cd9 chore: automated commit 2021-05-26 11:23:08 +03:00
Allan Bowe
a5f1b59f7b Merge pull request #374 from sasjs/git-hooks
feat(git): enabled git hook enforcing conventional commits
2021-05-20 09:23:25 +03:00
Yury Shkoda
01ca29fc01 feat(git): enabled git hook enforcing conventional commits 2021-05-20 08:22:40 +03:00
Allan Bowe
ed9648fdf9 chore: automated commit 2021-05-16 22:02:23 +03:00
Allan Bowe
7e17aa6eb3 chore: automated commit 2021-05-16 22:00:10 +03:00
Allan Bowe
9caee9941a Merge pull request #371 from sasjs/qualityfixes
fix: readme badges
2021-05-16 21:59:15 +03:00
Allan Bowe
e309e7a4f4 fix: readme badges 2021-05-16 21:56:26 +03:00
Allan Bowe
c47441d6d4 Merge pull request #370 from sasjs/qualityfixes
fix: metadata in package.json
2021-05-16 21:41:11 +03:00
Allan Bowe
1844bc48ac Merge branch 'master' into qualityfixes 2021-05-16 21:40:40 +03:00
Allan Bowe
7a5adebdb5 fix: metadata in package.json 2021-05-16 21:40:14 +03:00
Allan Bowe
b39f0c577b Merge pull request #369 from sasjs/qualityfixes
Qualityfixes
2021-05-16 21:34:55 +03:00
Allan Bowe
15f4065cd8 fix: metadata updates (readme, changelog, url in package.json) 2021-05-16 20:25:56 +03:00
Allan Bowe
4c67665b4d fix: adding npmignore (should reduce the bundle size from 8.88mb) 2021-05-16 13:52:13 +03:00
snyk-bot
55e64ae9d6 fix: upgrade @types/node from 14.14.25 to 14.14.41
Snyk has created this PR to upgrade @types/node from 14.14.25 to 14.14.41.

See this package in npm:
https://www.npmjs.com/package/@types/node

See this project in Snyk:
https://app.snyk.io/org/allanbowe/project/acbafb55-1a7a-485d-a36b-42650bb03cf6?utm_source=github&utm_medium=upgrade-pr
2021-05-15 21:55:56 +00:00
Krishna Acondy
f8c6318a88 chore(*): attempt SAS9 job executor 2021-05-11 08:15:48 +01:00
dependabot-preview[bot]
9b32b28aa7 Upgrade to GitHub-native Dependabot 2021-04-29 15:44:24 +00:00
81 changed files with 42877 additions and 4672 deletions

103
.all-contributorsrc Normal file
View File

@@ -0,0 +1,103 @@
{
"projectName": "adapter",
"projectOwner": "sasjs",
"repoType": "github",
"repoHost": "https://github.com",
"files": [
"README.md"
],
"imageSize": 100,
"commit": false,
"commitConvention": "angular",
"contributors": [
{
"login": "krishna-acondy",
"name": "Krishna Acondy",
"avatar_url": "https://avatars.githubusercontent.com/u/2980428?v=4",
"profile": "https://krishna-acondy.io/",
"contributions": [
"code",
"infra",
"blog",
"content",
"ideas",
"video"
]
},
{
"login": "YuryShkoda",
"name": "Yury Shkoda",
"avatar_url": "https://avatars.githubusercontent.com/u/25773492?v=4",
"profile": "https://www.erudicat.com/",
"contributions": [
"code",
"infra",
"ideas",
"test",
"video"
]
},
{
"login": "medjedovicm",
"name": "Mihajlo Medjedovic",
"avatar_url": "https://avatars.githubusercontent.com/u/18329105?v=4",
"profile": "https://github.com/medjedovicm",
"contributions": [
"code",
"infra",
"test",
"review"
]
},
{
"login": "allanbowe",
"name": "Allan Bowe",
"avatar_url": "https://avatars.githubusercontent.com/u/4420615?v=4",
"profile": "https://github.com/allanbowe",
"contributions": [
"code",
"review",
"test",
"mentoring",
"maintenance"
]
},
{
"login": "saadjutt01",
"name": "Muhammad Saad ",
"avatar_url": "https://avatars.githubusercontent.com/u/8914650?v=4",
"profile": "https://github.com/saadjutt01",
"contributions": [
"code",
"review",
"test",
"mentoring",
"infra"
]
},
{
"login": "sabhas",
"name": "Sabir Hassan",
"avatar_url": "https://avatars.githubusercontent.com/u/82647447?v=4",
"profile": "https://github.com/sabhas",
"contributions": [
"code",
"review",
"test",
"ideas"
]
},
{
"login": "VladislavParhomchik",
"name": "VladislavParhomchik",
"avatar_url": "https://avatars.githubusercontent.com/u/83717836?v=4",
"profile": "https://github.com/VladislavParhomchik",
"contributions": [
"test",
"review"
]
}
],
"contributorsPerLine": 7,
"skipCi": true
}

18
.git-hooks/commit-msg Executable file
View File

@@ -0,0 +1,18 @@
#!/bin/sh
RED="\033[1;31m"
GREEN="\033[1;32m"
# Get the commit message (the parameter we're given is just the path to the
# temporary file which holds the message).
commit_message=$(cat "$1")
if (echo "$commit_message" | grep -Eq "^(build|chore|ci|docs|feat|fix|perf|refactor|revert|style|test)(\([a-z0-9 -\*]+\))?!?: .+$") then
echo "${GREEN} ✔ Commit message meets Conventional Commit standards"
exit 0
fi
echo "${RED}❌ Commit message does not meet the Conventional Commit standard!"
echo "An example of a valid message is:"
echo " feat(login): add the 'remember me' button"
echo " More details at: https://www.conventionalcommits.org/en/v1.0.0/#summary"
exit 1

7
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,7 @@
version: 2
updates:
- package-ecosystem: npm
directory: "/"
schedule:
interval: daily
open-pull-requests-limit: 10

View File

@@ -7,3 +7,8 @@ groups:
- saadjutt01
- medjedovicm
- allanbowe
- sabhas
- name: SASjs QA
reviewers: 1
usernames:
- VladislavParhomchik

View File

@@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
node-version: [12.x]
node-version: [15.x]
steps:
- uses: actions/checkout@v2
@@ -27,6 +27,10 @@ jobs:
run: npm run lint
- name: Run unit tests
run: npm test
- name: Generate coverage report
uses: artiomtr/jest-coverage-report-action@v2.0-rc.2
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Build Package
run: npm run package:lib
env:

4
.npmignore Normal file
View File

@@ -0,0 +1,4 @@
sasjs-tests/
docs/
.github/
CONTRIBUTING.md

View File

@@ -1,5 +1,9 @@
# Change Log
Since March 2020 the changelog is managed by github releases - see [https://github.com/sasjs/adapter/releases](https://github.com/sasjs/adapter/releases).
## Changes up to 5th March 2020
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
<a name="1.9.0"></a>

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2020 Macro People
Copyright (c) 2021 Macro People
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -12,7 +12,9 @@ What code changes have been made to achieve the intent.
## Checks
- [ ] Code is formatted correctly (`npm run lint:fix`).
- [ ] All unit tests are passing (`npm test`).
No PR (that involves a non-trivial code change) should be merged, unless all items below are confirmed! If an urgent fix is needed - use a tar file.
- [ ] All `sasjs-cli` unit tests are passing (`npm test`).
- [ ] All `sasjs-tests` are passing (instructions available [here](https://github.com/sasjs/adapter/blob/master/sasjs-tests/README.md)).
- [ ] [Data Controller](https://datacontroller.io) builds and is functional on both SAS 9 and Viya

View File

@@ -1,7 +1,23 @@
[![](https://data.jsdelivr.com/v1/package/npm/@sasjs/adapter/badge)](https://www.jsdelivr.com/package/npm/@sasjs/adapter)
# @sasjs/adapter
[![npm package][npm-image]][npm-url]
[![Github Workflow][githubworkflow-image]][githubworkflow-url]
[![Dependency Status][dependency-image]][dependency-url]
[![npm](https://img.shields.io/npm/dt/@sasjs/adapter)]()
![Snyk Vulnerabilities for npm package](https://img.shields.io/snyk/vulnerabilities/npm/@sasjs/adapter)
[![License](https://img.shields.io/apm/l/atomic-design-ui.svg)](/LICENSE)
![GitHub top language](https://img.shields.io/github/languages/top/sasjs/adapter)
![GitHub issues](https://img.shields.io/github/issues/sasjs/adapter)
[![Gitpod ready-to-code](https://img.shields.io/badge/Gitpod-ready--to--code-908a85?logo=gitpod)](https://gitpod.io/#https://github.com/sasjs/adapter)
[npm-image]:https://img.shields.io/npm/v/@sasjs/adapter.svg
[npm-url]:http://npmjs.org/package/@sasjs/adapter
[githubworkflow-image]:https://github.com/sasjs/adapter/actions/workflows/build.yml/badge.svg
[githubworkflow-url]:https://github.com/sasjs/adapter/blob/main/.github/workflows/build.yml
[dependency-image]:https://david-dm.org/sasjs/adapter.svg
[dependency-url]:https://github.com/sasjs/adapter/blob/main/package.json
SASjs is a open-source framework for building Web Apps on SAS® platforms. You can use as much or as little of it as you like. This repository contains the JS adapter, the part that handles the to/from SAS communication on the client side. There are 3 ways to install it:
1 - `npm install @sasjs/adapter` - for use in a node project
@@ -156,35 +172,43 @@ Configuration on the client side involves passing an object on startup, which ca
* `serverType` - either `SAS9` or `SASVIYA`.
* `serverUrl` - the location (including http protocol and port) of the SAS Server. Can be omitted, eg if serving directly from the SAS Web Server, or in streaming mode.
* `debug` - if `true` then SAS Logs and extra debug information is returned.
* `useComputeApi` - if `true` and the serverType is `SASVIYA` then the REST APIs will be called directly (rather than using the JES web service).
* `contextName` - if missing or blank, and `useComputeApi` is `true` and `serverType` is `SASVIYA` then the JES API will be used.
* `useComputeApi` - Only relevant when the serverType is `SASVIYA`. If `true` the [Compute API](#using-the-compute-api) is used. If `false` the [JES API](#using-the-jes-api) is used. If `null` or `undefined` the [Web](#using-jes-web-app) approach is used.
* `contextName` - Compute context on which the requests will be called. If missing or not provided, defaults to `Job Execution Compute context`.
The adapter supports a number of approaches for interfacing with Viya (`serverType` is `SASVIYA`). For maximum performance, be sure to [configure your compute context](https://sasjs.io/guide-viya/#shared-account-and-server-re-use) with `reuseServerProcesses` as `true` and a system account in `runServerAs`. This functionality is available since Viya 3.5. This configuration is supported when [creating contexts using the CLI](https://sasjs.io/sasjs-cli-context/#sasjs-context-create).
### Using JES Web App
In this setup, all requests are routed through the JES web app, at `YOURSERVER/SASJobExecution`. This is the most reliable method, and also the slowest. One request is made to the JES app, and remaining requests (getting job uri, session spawning, passing parameters, running the program, fetching the log) are made on the SAS server by the JES app.
```
{
appLoc:"/Your/Path",
serverType:"SASVIYA"
}
```
### Using the JES API
Here we are running Jobs using the Job Execution Service except this time we are making the requests directly using the REST API instead of through the JES Web App. This is helpful when we need to call web services outside of a browser (eg with the SASjs CLI or other commandline tools). To save one network request, the adapter prefetches the JOB URIs and passes them in the `__job` parameter.
In this setup, all requests are routed through the JES web app, at `YOURSERVER/SASJobExecution?_program=/your/program`. This is the most reliable method, and also the slowest. One request is made to the JES app, and remaining requests (getting job uri, session spawning, passing parameters, running the program, fetching the log) are handled by the SAS server inside the JES app.
```
{
appLoc:"/Your/Path",
serverType:"SASVIYA",
useComputeApi: true
contextName: 'yourComputeContext'
}
```
Note - to use the web approach, the `useComputeApi` property must be `undefined` or `null`.
### Using the JES API
Here we are running Jobs using the Job Execution Service except this time we are making the requests directly using the REST API instead of through the JES Web App. This is helpful when we need to call web services outside of a browser (eg with the SASjs CLI or other commandline tools). To save one network request, the adapter prefetches the JOB URIs and passes them in the `__job` parameter. Depending on your network bandwidth, it may or may not be faster than the JES Web approach.
This approach (`useComputeApi: false`) also ensures that jobs are displayed in Environment Manager.
```
{
appLoc:"/Your/Path",
serverType:"SASVIYA",
useComputeApi: false,
contextName: 'yourComputeContext'
}
```
### Using the Compute API
This approach is by far the fastest, as a result of the optimisations we have built into the adapter. With this configuration, in the first sasjs request, we take a URI map of the services in the target folder, and create a session manager - which spawns an extra session. The next time a request is made, the adapter will use the 'hot' session. Sessions are deleted after every use, which actually makes this _less_ resource intensive than a typical JES web app, in which all sessions are kept alive by default for 15 minutes.
This approach is by far the fastest, as a result of the optimisations we have built into the adapter. With this configuration, in the first sasjs request, we take a URI map of the services in the target folder, and create a session manager. This manager will spawn a additional session every time a request is made. Subsequent requests will use the existing 'hot' session, if it exists. Sessions are always deleted after every use, which actually makes this _less_ resource intensive than a typical JES web app, in which all sessions are kept alive by default for 15 minutes.
With this approach (`useComputeApi: true`), the requests/logs will _not_ appear in the list in Environment manager.
```
{
@@ -198,8 +222,44 @@ This approach is by far the fastest, as a result of the optimisations we have bu
# More resources
For more information and examples specific to this adapter you can check out the [user guide](https://sasjs.io/sasjs-adapter/) or the [technical](http://adapter.sasjs.io/) documentation.
For more information and examples specific to this adapter you can check out the [user guide](https://sasjs.io/sasjs-adapter/) or the [technical](http://adapter.sasjs.io/) documentation.
For more information on building web apps in general, check out these [resources](https://sasjs.io/training/resources/) or contact the [author](https://www.linkedin.com/in/allanbowe/) directly.
If you are a SAS 9 or SAS Viya customer you can also request a copy of [Data Controller](https://datacontroller.io) - free for up to 5 users, this tool makes use of all parts of the SASjs framework.
## Star Gazing
If you find this library useful, help us grow our star graph!
![](https://starchart.cc/sasjs/adapter.svg)
## Contributors ✨
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
[![All Contributors](https://img.shields.io/badge/all_contributors-7-orange.svg?style=flat-square)](#contributors-)
<!-- ALL-CONTRIBUTORS-BADGE:END -->
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
<!-- prettier-ignore-start -->
<!-- markdownlint-disable -->
<table>
<tr>
<td align="center"><a href="https://krishna-acondy.io/"><img src="https://avatars.githubusercontent.com/u/2980428?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Krishna Acondy</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=krishna-acondy" title="Code">💻</a> <a href="#infra-krishna-acondy" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#blog-krishna-acondy" title="Blogposts">📝</a> <a href="#content-krishna-acondy" title="Content">🖋</a> <a href="#ideas-krishna-acondy" title="Ideas, Planning, & Feedback">🤔</a> <a href="#video-krishna-acondy" title="Videos">📹</a></td>
<td align="center"><a href="https://www.erudicat.com/"><img src="https://avatars.githubusercontent.com/u/25773492?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Yury Shkoda</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=YuryShkoda" title="Code">💻</a> <a href="#infra-YuryShkoda" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="#ideas-YuryShkoda" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/sasjs/adapter/commits?author=YuryShkoda" title="Tests">⚠️</a> <a href="#video-YuryShkoda" title="Videos">📹</a></td>
<td align="center"><a href="https://github.com/medjedovicm"><img src="https://avatars.githubusercontent.com/u/18329105?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Mihajlo Medjedovic</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=medjedovicm" title="Code">💻</a> <a href="#infra-medjedovicm" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a> <a href="https://github.com/sasjs/adapter/commits?author=medjedovicm" title="Tests">⚠️</a> <a href="https://github.com/sasjs/adapter/pulls?q=is%3Apr+reviewed-by%3Amedjedovicm" title="Reviewed Pull Requests">👀</a></td>
<td align="center"><a href="https://github.com/allanbowe"><img src="https://avatars.githubusercontent.com/u/4420615?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Allan Bowe</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=allanbowe" title="Code">💻</a> <a href="https://github.com/sasjs/adapter/pulls?q=is%3Apr+reviewed-by%3Aallanbowe" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/sasjs/adapter/commits?author=allanbowe" title="Tests">⚠️</a> <a href="#mentoring-allanbowe" title="Mentoring">🧑‍🏫</a> <a href="#maintenance-allanbowe" title="Maintenance">🚧</a></td>
<td align="center"><a href="https://github.com/saadjutt01"><img src="https://avatars.githubusercontent.com/u/8914650?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Muhammad Saad </b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=saadjutt01" title="Code">💻</a> <a href="https://github.com/sasjs/adapter/pulls?q=is%3Apr+reviewed-by%3Asaadjutt01" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/sasjs/adapter/commits?author=saadjutt01" title="Tests">⚠️</a> <a href="#mentoring-saadjutt01" title="Mentoring">🧑‍🏫</a> <a href="#infra-saadjutt01" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
<td align="center"><a href="https://github.com/sabhas"><img src="https://avatars.githubusercontent.com/u/82647447?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Sabir Hassan</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=sabhas" title="Code">💻</a> <a href="https://github.com/sasjs/adapter/pulls?q=is%3Apr+reviewed-by%3Asabhas" title="Reviewed Pull Requests">👀</a> <a href="https://github.com/sasjs/adapter/commits?author=sabhas" title="Tests">⚠️</a> <a href="#ideas-sabhas" title="Ideas, Planning, & Feedback">🤔</a></td>
<td align="center"><a href="https://github.com/VladislavParhomchik"><img src="https://avatars.githubusercontent.com/u/83717836?v=4?s=100" width="100px;" alt=""/><br /><sub><b>VladislavParhomchik</b></sub></a><br /><a href="https://github.com/sasjs/adapter/commits?author=VladislavParhomchik" title="Tests">⚠️</a> <a href="https://github.com/sasjs/adapter/pulls?q=is%3Apr+reviewed-by%3AVladislavParhomchik" title="Reviewed Pull Requests">👀</a></td>
</tr>
</table>
<!-- markdownlint-restore -->
<!-- prettier-ignore-end -->
<!-- ALL-CONTRIBUTORS-LIST:END -->
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!

20481
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,17 +1,19 @@
{
"name": "@sasjs/adapter",
"description": "JavaScript adapter for SAS",
"homepage": "https://adapter.sasjs.io",
"scripts": {
"build": "rimraf build && rimraf node && mkdir node && cp -r src/* node && webpack && rimraf build/src && rimraf node",
"package:lib": "npm run build && cp ./package.json build && cd build && npm version \"5.0.0\" && npm pack",
"build": "rimraf build && rimraf node && mkdir node && copyfiles -u 1 \"./src/**/*\" ./node && webpack && rimraf build/src && rimraf node",
"package:lib": "npm run build && copyfiles ./package.json build && cd build && npm version \"5.0.0\" && npm pack",
"publish:lib": "npm run build && cd build && npm publish",
"lint:fix": "npx prettier --write 'src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}' && npx prettier --write 'sasjs-tests/src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}'",
"lint": "npx prettier --check 'src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}' && npx prettier --check 'sasjs-tests/src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}'",
"lint:fix": "npx prettier --write \"src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\" && npx prettier --write \"sasjs-tests/src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\"",
"lint": "npx prettier --check \"src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\" && npx prettier --check \"sasjs-tests/src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\"",
"test": "jest --silent --coverage",
"prepublishOnly": "cp -r ./build/* . && rm -rf ./build",
"postpublish": "git clean -fd",
"semantic-release": "semantic-release",
"typedoc": "typedoc"
"typedoc": "typedoc",
"prepare": "git rev-parse --git-dir && git config core.hooksPath ./.git-hooks && git config core.autocrlf false || true"
},
"publishConfig": {
"access": "public"
@@ -36,31 +38,43 @@
},
"license": "ISC",
"devDependencies": {
"@types/jest": "^26.0.22",
"@types/axios": "^0.14.0",
"@types/form-data": "^2.5.0",
"@types/jest": "^26.0.24",
"@types/mime": "^2.0.3",
"@types/tough-cookie": "^4.0.1",
"copyfiles": "^2.4.1",
"cp": "^0.2.0",
"dotenv": "^8.2.0",
"jest": "^26.6.3",
"dotenv": "^10.0.0",
"jest": "^27.0.6",
"jest-extended": "^0.11.5",
"node-polyfill-webpack-plugin": "^1.1.4",
"path": "^0.12.7",
"process": "^0.11.10",
"rimraf": "^3.0.2",
"semantic-release": "^17.4.2",
"terser-webpack-plugin": "^4.2.3",
"ts-jest": "^25.5.1",
"ts-loader": "^9.1.2",
"semantic-release": "^17.4.4",
"terser-webpack-plugin": "^5.1.4",
"ts-jest": "^27.0.3",
"ts-loader": "^9.2.2",
"tslint": "^6.1.3",
"tslint-config-prettier": "^1.18.0",
"typedoc": "^0.20.35",
"typedoc-neo-theme": "^1.1.0",
"typedoc": "^0.21.4",
"typedoc-neo-theme": "^1.1.1",
"typedoc-plugin-external-module-name": "^4.0.6",
"typescript": "^3.9.9",
"webpack": "^5.33.2",
"webpack-cli": "^4.7.0"
"typescript": "^4.3.5",
"webpack": "^5.44.0",
"webpack-cli": "^4.7.2"
},
"main": "index.js",
"dependencies": {
"@sasjs/utils": "^2.10.2",
"@sasjs/utils": "^2.27.1",
"axios": "^0.21.1",
"axios-cookiejar-support": "^1.0.1",
"form-data": "^4.0.0",
"https": "^1.0.0"
"https": "^1.0.0",
"tough-cookie": "^4.0.0"
},
"engines": {
"node": ">=15"
}
}

View File

@@ -6,7 +6,7 @@ When developing on `@sasjs/adapter`, it's good practice to run the test suite ag
You can use the provided `update:adapter` NPM script for this.
```
```bash
npm run update:adapter
```
@@ -37,10 +37,23 @@ To be able to run the `deploy` script, two environment variables need to be set:
So you can run the script like so:
```
```bash
SSH_ACCOUNT=me@my-sas-server.com DEPLOY_PATH=/var/www/html/my-folder/sasjs-tests npm run deploy
```
If you are on `WINDOWS`, you will first need to install one dependency:
```bash
npm i -g copyfiles
```
and then run to build:
```bash
npm run update:adapter && npm run build
```
when it finishes run to deploy:
```bash
scp -rp ./build/* me@my-sas-server.com:/var/www/html/my-folder/sasjs-tests
```
If you'd like to deploy just `sasjs-tests` without changing the adapter version, you can use the `deploy:tests` script, while also setting the same environment variables as above.
## 3. Creating the required SAS services
@@ -49,12 +62,12 @@ The below services need to be created on your SAS server, at the location specif
### SAS 9
```
```sas
filename mc url "https://raw.githubusercontent.com/sasjs/core/main/all.sas";
%inc mc;
filename ft15f001 temp;
parmcards4;
%webout(FETCH)
%webout(OPEN)
%macro x();
%do i=1 %to &_webin_file_count; %webout(OBJ,&&_webin_name&i) %end;
@@ -63,6 +76,7 @@ parmcards4;
;;;;
%mm_createwebservice(path=/Public/app/common,name=sendObj)
parmcards4;
%webout(FETCH)
%webout(OPEN)
%macro x();
%do i=1 %to &_webin_file_count; %webout(ARR,&&_webin_name&i) %end;
@@ -70,11 +84,24 @@ parmcards4;
%webout(CLOSE)
;;;;
%mm_createwebservice(path=/Public/app/common,name=sendArr)
parmcards4;
let he who hath understanding, reckon the number of the beast
;;;;
%mm_createwebservice(path=/Public/app/common,name=makeErr)
parmcards4;
%webout(OPEN)
data _null_;
file _webout;
put ' the discovery channel ';
run;
%webout(CLOSE)
;;;;
%mm_createwebservice(path=/Public/app/common,name=invalidJSON)
```
### SAS Viya
```
```sas
filename mc url "https://raw.githubusercontent.com/sasjs/core/main/all.sas";
%inc mc;
filename ft15f001 temp;
@@ -113,6 +140,15 @@ If you can trust yourself when all men doubt you,
But make allowance for their doubting too;
;;;;
%mp_createwebservice(path=/Public/app/common,name=makeErr)
parmcards4;
%webout(OPEN)
data _null_;
file _webout;
put ' the discovery channel ';
run;
%webout(CLOSE)
;;;;
%mp_createwebservice(path=/Public/app/common,name=invalidJSON)
```
You should now be able to access the tests in your browser at the deployed path on your server.

File diff suppressed because it is too large Load Diff

View File

@@ -7,7 +7,7 @@
"@sasjs/adapter": "file:../build/sasjs-adapter-5.0.0.tgz",
"@sasjs/test-framework": "^1.4.0",
"@types/jest": "^26.0.20",
"@types/node": "^14.14.25",
"@types/node": "^14.14.41",
"@types/react": "^17.0.1",
"@types/react-dom": "^17.0.0",
"@types/react-router-dom": "^5.1.7",
@@ -23,7 +23,8 @@
"test": "react-scripts test",
"eject": "react-scripts eject",
"update:adapter": "cd .. && npm run package:lib && cd sasjs-tests && npm i ../build/sasjs-adapter-5.0.0.tgz",
"deploy:tests": "rsync -avhe ssh ./build/* --delete $SSH_ACCOUNT:$DEPLOY_PATH",
"deploy:tests": "rsync -avhe ssh ./build/* --delete $SSH_ACCOUNT:$DEPLOY_PATH || npm run deploy:tests-win",
"deploy:tests-win": "scp %DEPLOY_PATH% ./build/*",
"deploy": "npm run update:adapter && npm run build && npm run deploy:tests"
},
"eslintConfig": {

View File

@@ -13,14 +13,19 @@ const App = (): ReactElement<{}> => {
useEffect(() => {
if (adapter) {
setTestSuites([
const testSuites = [
basicTests(adapter, config.userName, config.password),
sendArrTests(adapter),
sendObjTests(adapter),
specialCaseTests(adapter),
sasjsRequestTests(adapter),
computeTests(adapter)
])
sasjsRequestTests(adapter)
]
if (adapter.getSasjsConfig().serverType === 'SASVIYA') {
testSuites.push(computeTests(adapter))
}
setTestSuites(testSuites)
}
}, [adapter, config])

View File

@@ -145,6 +145,29 @@ export const basicTests = (
sasjsConfig.debug === false
)
}
},
{
title: 'Request with extra attributes on JES approach',
description:
'Should complete successful request with extra attributes present in response',
test: async () => {
const config = {
useComputeApi: false
}
return await adapter.request(
'common/sendArr',
stringData,
config,
undefined,
undefined,
['file', 'data']
)
},
assertion: (response: any) => {
const responseKeys: any = Object.keys(response)
return responseKeys.includes('file') && responseKeys.includes('data')
}
}
]
})

View File

@@ -25,7 +25,7 @@ export const computeTests = (adapter: SASjs): TestSuite => ({
'/Public/app/common/sendArr',
data,
{},
'',
undefined,
true
)
},

View File

@@ -176,11 +176,59 @@ export const sendObjTests = (adapter: SASjs): TestSuite => ({
name: 'sendObj',
tests: [
{
title: 'Invalid column name',
title: 'Table name starts with numeric',
description: 'Should throw an error',
test: async () => {
const invalidData: any = {
'1 invalid table': [{ col1: 42 }]
'1InvalidTable': [{ col1: 42 }]
}
return adapter.request('common/sendObj', invalidData).catch((e) => e)
},
assertion: (error: any) =>
!!error && !!error.error && !!error.error.message
},
{
title: 'Table name contains a space',
description: 'Should throw an error',
test: async () => {
const invalidData: any = {
'an invalidTable': [{ col1: 42 }]
}
return adapter.request('common/sendObj', invalidData).catch((e) => e)
},
assertion: (error: any) =>
!!error && !!error.error && !!error.error.message
},
{
title: 'Table name contains a special character',
description: 'Should throw an error',
test: async () => {
const invalidData: any = {
'anInvalidTable#': [{ col1: 42 }]
}
return adapter.request('common/sendObj', invalidData).catch((e) => e)
},
assertion: (error: any) =>
!!error && !!error.error && !!error.error.message
},
{
title: 'Table name exceeds max length of 32 characters',
description: 'Should throw an error',
test: async () => {
const invalidData: any = {
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx: [{ col1: 42 }]
}
return adapter.request('common/sendObj', invalidData).catch((e) => e)
},
assertion: (error: any) =>
!!error && !!error.error && !!error.error.message
},
{
title: "Invalid data object's structure",
description: 'Should throw an error',
test: async () => {
const invalidData: any = {
inData: [[{ data: 'value' }]]
}
return adapter.request('common/sendObj', invalidData).catch((e) => e)
},

View File

@@ -2,6 +2,7 @@ import { Context, EditContextInput, ContextAllAttributes } from './types'
import { isUrl } from './utils'
import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from './request/RequestClient'
import { AuthConfig } from '@sasjs/utils/types'
export class ContextManager {
private defaultComputeContexts = [
@@ -328,12 +329,12 @@ export class ContextManager {
public async getExecutableContexts(
executeScript: Function,
accessToken?: string
authConfig?: AuthConfig
) {
const { result: contexts } = await this.requestClient
.get<{ items: Context[] }>(
`${this.serverUrl}/compute/contexts?limit=10000`,
accessToken
authConfig?.access_token
)
.catch((err) => {
throw prefixMessage(err, 'Error while fetching compute contexts.')
@@ -350,7 +351,7 @@ export class ContextManager {
`test-${context.name}`,
linesOfCode,
context.name,
accessToken,
authConfig,
null,
false,
true,

View File

@@ -1,16 +1,20 @@
import { isUrl } from './utils'
import { isUrl, getValidJson, parseSasViyaDebugResponse } from './utils'
import { UploadFile } from './types/UploadFile'
import { ErrorResponse, LoginRequiredError } from './types/errors'
import { RequestClient } from './request/RequestClient'
import { ServerType } from '@sasjs/utils/types'
import SASjs from './SASjs'
import { Server } from 'https'
import { SASjsConfig } from './types'
import { config } from 'process'
export class FileUploader {
constructor(
private appLoc: string,
serverUrl: string,
private sasjsConfig: SASjsConfig,
private jobsPath: string,
private requestClient: RequestClient
) {
if (serverUrl) isUrl(serverUrl)
if (this.sasjsConfig.serverUrl) isUrl(this.sasjsConfig.serverUrl)
}
public uploadFile(sasJob: string, files: UploadFile[], params: any) {
@@ -29,8 +33,8 @@ export class FileUploader {
}
}
const program = this.appLoc
? this.appLoc.replace(/\/?$/, '/') + sasJob.replace(/^\//, '')
const program = this.sasjsConfig.appLoc
? this.sasjsConfig.appLoc.replace(/\/?$/, '/') + sasJob.replace(/^\//, '')
: sasJob
const uploadUrl = `${this.jobsPath}/?${
'_program=' + program
@@ -44,6 +48,12 @@ export class FileUploader {
const csrfToken = this.requestClient.getCsrfToken('file')
if (csrfToken) formData.append('_csrf', csrfToken.value)
if (this.sasjsConfig.debug) formData.append('_debug', '131')
if (
this.sasjsConfig.serverType === ServerType.SasViya &&
this.sasjsConfig.contextName
)
formData.append('_contextname', this.sasjsConfig.contextName)
const headers = {
'cache-control': 'no-cache',
@@ -53,9 +63,30 @@ export class FileUploader {
return this.requestClient
.post(uploadUrl, formData, undefined, 'application/json', headers)
.then((res) =>
typeof res.result === 'string' ? JSON.parse(res.result) : res.result
)
.then(async (res) => {
// for web approach on Viya
if (
this.sasjsConfig.debug &&
(this.sasjsConfig.useComputeApi === null ||
this.sasjsConfig.useComputeApi === undefined) &&
this.sasjsConfig.serverType === ServerType.SasViya
) {
const jsonResponse = await parseSasViyaDebugResponse(
res.result as string,
this.requestClient,
this.sasjsConfig.serverUrl
)
return typeof jsonResponse === 'string'
? getValidJson(jsonResponse)
: jsonResponse
}
return typeof res.result === 'string'
? getValidJson(res.result)
: res.result
//TODO: append to SASjs requests
})
.catch((err: Error) => {
if (err instanceof LoginRequiredError) {
return Promise.reject(

View File

@@ -1,4 +1,6 @@
import axios, { AxiosInstance } from 'axios'
import { generateTimestamp } from '@sasjs/utils/time'
import * as NodeFormData from 'form-data'
import { Sas9RequestClient } from './request/Sas9RequestClient'
import { isUrl } from './utils'
/**
@@ -6,11 +8,11 @@ import { isUrl } from './utils'
*
*/
export class SAS9ApiClient {
private httpClient: AxiosInstance
private requestClient: Sas9RequestClient
constructor(private serverUrl: string) {
constructor(private serverUrl: string, private jobsPath: string) {
if (serverUrl) isUrl(serverUrl)
this.httpClient = axios.create({ baseURL: this.serverUrl })
this.requestClient = new Sas9RequestClient(serverUrl, false)
}
/**
@@ -33,27 +35,61 @@ export class SAS9ApiClient {
/**
* Executes code on a SAS9 server.
* @param linesOfCode - an array of code lines to execute.
* @param serverName - the server to execute the code on.
* @param repositoryName - the repository to execute the code in.
* @param userName - the user name to log into the current SAS server.
* @param password - the password to log into the current SAS server.
*/
public async executeScript(
linesOfCode: string[],
serverName: string,
repositoryName: string
userName: string,
password: string
) {
const requestPayload = linesOfCode.join('\n')
await this.requestClient.login(userName, password, this.jobsPath)
const executeScriptResponse = await this.httpClient.put(
`/sas/servers/${serverName}/cmd?repositoryName=${repositoryName}`,
`command=${requestPayload}`,
{
headers: {
Accept: 'application/json'
},
responseType: 'text'
}
// This piece of code forces a webout to prevent Stored Process Errors.
const forceOutputCode = [
'data _null_;',
'file _webout;',
`put 'Executed sasjs run';`,
'run;'
]
const formData = generateFileUploadForm(
[...linesOfCode, ...forceOutputCode].join('\n')
)
return executeScriptResponse.data
const codeInjectorPath = `/User Folders/${userName}/My Folder/sasjs/runner`
const contentType =
'multipart/form-data; boundary=' + formData.getBoundary()
const contentLength = formData.getLengthSync()
const headers = {
'cache-control': 'no-cache',
Accept: '*/*',
'Content-Type': contentType,
'Content-Length': contentLength,
Connection: 'keep-alive'
}
const storedProcessUrl = `${this.jobsPath}/?${
'_program=' + codeInjectorPath + '&_debug=log'
}`
const response = await this.requestClient.post(
storedProcessUrl,
formData,
undefined,
contentType,
headers
)
return response.result as string
}
}
const generateFileUploadForm = (data: any): NodeFormData => {
const formData = new NodeFormData()
const filename = `sasjs-execute-sas9-${generateTimestamp('')}.sas`
formData.append(filename, data, {
filename,
contentType: 'text/plain'
})
return formData
}

View File

@@ -0,0 +1,51 @@
import { Logger, LogLevel } from '@sasjs/utils/logger'
import { RequestClient } from './request/RequestClient'
import { SASViyaApiClient } from './SASViyaApiClient'
import { Folder } from './types'
import { RootFolderNotFoundError } from './types/errors'
const mockFolder: Folder = {
id: '1',
uri: '/folder',
links: [],
memberCount: 1
}
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
const sasViyaApiClient = new SASViyaApiClient(
'https://test.com',
'/test',
'test context',
requestClient
)
describe('SASViyaApiClient', () => {
beforeEach(() => {
;(process as any).logger = new Logger(LogLevel.Off)
setupMocks()
})
it('should throw an error when the root folder is not found on the server', async () => {
jest
.spyOn(requestClient, 'get')
.mockImplementation(() => Promise.reject('Not Found'))
const error = await sasViyaApiClient
.createFolder('test', '/foo')
.catch((e) => e)
expect(error).toBeInstanceOf(RootFolderNotFoundError)
})
})
const setupMocks = () => {
jest
.spyOn(requestClient, 'get')
.mockImplementation(() =>
Promise.resolve({ result: mockFolder, etag: '', status: 200 })
)
jest
.spyOn(requestClient, 'post')
.mockImplementation(() =>
Promise.resolve({ result: mockFolder, etag: '', status: 200 })
)
}

View File

@@ -1,10 +1,4 @@
import {
convertToCSV,
isRelativePath,
isUri,
isUrl,
fetchLogByChunks
} from './utils'
import { isRelativePath, isUri, isUrl } from './utils'
import * as NodeFormData from 'form-data'
import {
Job,
@@ -12,24 +6,24 @@ import {
Context,
ContextAllAttributes,
Folder,
File,
EditContextInput,
JobDefinition,
PollOptions
} from './types'
import {
ComputeJobExecutionError,
JobExecutionError,
NotFoundError
} from './types/errors'
import { formatDataForRequest } from './utils/formatDataForRequest'
import { JobExecutionError, RootFolderNotFoundError } from './types/errors'
import { SessionManager } from './SessionManager'
import { ContextManager } from './ContextManager'
import { timestampToYYYYMMDDHHMMSS } from '@sasjs/utils/time'
import { Logger, LogLevel } from '@sasjs/utils/logger'
import { SasAuthResponse, MacroVar, AuthConfig } from '@sasjs/utils/types'
import { isAuthorizeFormRequired } from './auth/isAuthorizeFormRequired'
import { RequestClient } from './request/RequestClient'
import { SasAuthResponse } from '@sasjs/utils/types'
import { prefixMessage } from '@sasjs/utils/error'
import { pollJobState } from './api/viya/pollJobState'
import { getTokens } from './auth/getTokens'
import { uploadTables } from './api/viya/uploadTables'
import { executeScript } from './api/viya/executeScript'
import { getAccessToken } from './auth/getAccessToken'
import { refreshTokens } from './auth/refreshTokens'
/**
* A client for interfacing with the SAS Viya REST API.
@@ -128,14 +122,14 @@ export class SASViyaApiClient {
/**
* Returns all compute contexts on this server that the user has access to.
* @param accessToken - an access token for an authorized user.
* @param authConfig - an access token, refresh token, client and secret for an authorized user.
*/
public async getExecutableContexts(accessToken?: string) {
public async getExecutableContexts(authConfig?: AuthConfig) {
const bindedExecuteScript = this.executeScript.bind(this)
return await this.contextManager.getExecutableContexts(
bindedExecuteScript,
accessToken
authConfig
)
}
@@ -165,13 +159,6 @@ export class SASViyaApiClient {
throw new Error(`Execution context ${contextName} not found.`)
}
const createSessionRequest = {
method: 'POST',
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json'
}
}
const { result: createdSession } = await this.requestClient.post<Session>(
`/compute/contexts/${executionContext.id}/sessions`,
{},
@@ -264,261 +251,44 @@ export class SASViyaApiClient {
* @param jobPath - the path to the file being submitted for execution.
* @param linesOfCode - an array of code lines to execute.
* @param contextName - the context to execute the code in.
* @param accessToken - an access token for an authorized user.
* @param authConfig - an object containing an access token, refresh token, client ID and secret.
* @param data - execution data.
* @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables.
*/
public async executeScript(
jobPath: string,
linesOfCode: string[],
contextName: string,
accessToken?: string,
authConfig?: AuthConfig,
data = null,
debug: boolean = false,
expectWebout = false,
waitForResult = true,
pollOptions?: PollOptions,
printPid = false
printPid = false,
variables?: MacroVar
): Promise<any> {
try {
const headers: any = {
'Content-Type': 'application/json'
}
if (accessToken) headers.Authorization = `Bearer ${accessToken}`
let executionSessionId: string
const session = await this.sessionManager
.getSession(accessToken)
.catch((err) => {
throw prefixMessage(err, 'Error while getting session. ')
})
executionSessionId = session!.id
if (printPid) {
const { result: jobIdVariable } = await this.sessionManager
.getVariable(executionSessionId, 'SYSJOBID', accessToken)
.catch((err) => {
throw prefixMessage(err, 'Error while getting session variable. ')
})
if (jobIdVariable && jobIdVariable.value) {
const relativeJobPath = this.rootFolderName
? jobPath.split(this.rootFolderName).join('').replace(/^\//, '')
: jobPath
const logger = new Logger(debug ? LogLevel.Debug : LogLevel.Info)
logger.info(
`Triggered '${relativeJobPath}' with PID ${
jobIdVariable.value
} at ${timestampToYYYYMMDDHHMMSS()}`
)
}
}
const jobArguments: { [key: string]: any } = {
_contextName: contextName,
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: true,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: true
}
if (debug) {
jobArguments['_OMITTEXTLOG'] = false
jobArguments['_OMITSESSIONRESULTS'] = false
jobArguments['_DEBUG'] = 131
}
let fileName
if (isRelativePath(jobPath)) {
fileName = `exec-${
jobPath.includes('/') ? jobPath.split('/')[1] : jobPath
}`
} else {
const jobPathParts = jobPath.split('/')
fileName = jobPathParts.pop()
}
let jobVariables: any = {
SYS_JES_JOB_URI: '',
_program: isRelativePath(jobPath)
? this.rootFolderName + '/' + jobPath
: jobPath
}
let files: any[] = []
if (data) {
if (JSON.stringify(data).includes(';')) {
files = await this.uploadTables(data, accessToken).catch((err) => {
throw prefixMessage(err, 'Error while uploading tables. ')
})
jobVariables['_webin_file_count'] = files.length
files.forEach((fileInfo, index) => {
jobVariables[
`_webin_fileuri${index + 1}`
] = `/files/files/${fileInfo.file.id}`
jobVariables[`_webin_name${index + 1}`] = fileInfo.tableName
})
} else {
jobVariables = { ...jobVariables, ...formatDataForRequest(data) }
}
}
// Execute job in session
const jobRequestBody = {
name: fileName,
description: 'Powered by SASjs',
code: linesOfCode,
variables: jobVariables,
arguments: jobArguments
}
const { result: postedJob, etag } = await this.requestClient
.post<Job>(
`/compute/sessions/${executionSessionId}/jobs`,
jobRequestBody,
accessToken
)
.catch((err) => {
throw prefixMessage(err, 'Error while posting job. ')
})
if (!waitForResult) return session
if (debug) {
console.log(`Job has been submitted for '${fileName}'.`)
console.log(
`You can monitor the job progress at '${this.serverUrl}${
postedJob.links.find((l: any) => l.rel === 'state')!.href
}'.`
)
}
const jobStatus = await this.pollJobState(
postedJob,
etag,
accessToken,
pollOptions
).catch(async (err) => {
const error = err?.response?.data
const result = /err=[0-9]*,/.exec(error)
const errorCode = '5113'
if (result?.[0]?.slice(4, -1) === errorCode) {
const sessionLogUrl =
postedJob.links.find((l: any) => l.rel === 'up')!.href + '/log'
const logCount = 1000000
err.log = await fetchLogByChunks(
this.requestClient,
accessToken!,
sessionLogUrl,
logCount
)
}
throw prefixMessage(err, 'Error while polling job status. ')
})
const { result: currentJob } = await this.requestClient
.get<Job>(
`/compute/sessions/${executionSessionId}/jobs/${postedJob.id}`,
accessToken
)
.catch((err) => {
throw prefixMessage(err, 'Error while getting job. ')
})
let jobResult
let log = ''
const logLink = currentJob.links.find((l) => l.rel === 'log')
if (debug && logLink) {
const logUrl = `${logLink.href}/content`
const logCount = currentJob.logStatistics?.lineCount ?? 1000000
log = await fetchLogByChunks(
this.requestClient,
accessToken!,
logUrl,
logCount
)
}
if (jobStatus === 'failed' || jobStatus === 'error') {
return Promise.reject(new ComputeJobExecutionError(currentJob, log))
}
let resultLink
if (expectWebout) {
resultLink = `/compute/sessions/${executionSessionId}/filerefs/_webout/content`
} else {
return { job: currentJob, log }
}
if (resultLink) {
jobResult = await this.requestClient
.get<any>(resultLink, accessToken, 'text/plain')
.catch(async (e) => {
if (e instanceof NotFoundError) {
if (logLink) {
const logUrl = `${logLink.href}/content`
const logCount = currentJob.logStatistics?.lineCount ?? 1000000
log = await fetchLogByChunks(
this.requestClient,
accessToken!,
logUrl,
logCount
)
return Promise.reject({
status: 500,
log
})
}
}
return {
result: JSON.stringify(e)
}
})
}
await this.sessionManager
.clearSession(executionSessionId, accessToken)
.catch((err) => {
throw prefixMessage(err, 'Error while clearing session. ')
})
return { result: jobResult?.result, log }
} catch (e) {
if (e && e.status === 404) {
return this.executeScript(
jobPath,
linesOfCode,
contextName,
accessToken,
data,
debug,
false,
true
)
} else {
throw prefixMessage(e, 'Error while executing script. ')
}
}
return executeScript(
this.requestClient,
this.sessionManager,
this.rootFolderName,
jobPath,
linesOfCode,
contextName,
authConfig,
data,
debug,
expectWebout,
waitForResult,
pollOptions,
printPid,
variables
)
}
/**
@@ -532,6 +302,50 @@ export class SASViyaApiClient {
.then((res) => res.result)
}
/**
* Creates a file. Path to or URI of the parent folder is required.
* @param fileName - the name of the new file.
* @param contentBuffer - the content of the new file in Buffer.
* @param parentFolderPath - the full path to the parent folder. If not
* provided, the parentFolderUri must be provided.
* @param parentFolderUri - the URI (eg /folders/folders/UUID) of the parent
* folder. If not provided, the parentFolderPath must be provided.
* @param accessToken - an access token for authorizing the request.
*/
public async createFile(
fileName: string,
contentBuffer: Buffer,
parentFolderPath?: string,
parentFolderUri?: string,
accessToken?: string
): Promise<File> {
if (!parentFolderPath && !parentFolderUri) {
throw new Error('Path or URI of the parent folder is required.')
}
if (!parentFolderUri && parentFolderPath) {
parentFolderUri = await this.getFolderUri(parentFolderPath, accessToken)
}
const headers = {
Accept: 'application/vnd.sas.file+json',
'Content-Disposition': `filename="${fileName}";`
}
const formData = new NodeFormData()
formData.append('file', contentBuffer, fileName)
return (
await this.requestClient.post<File>(
`/files/files?parentFolderUri=${parentFolderUri}&typeDefName=file#rawUpload`,
formData,
accessToken,
'multipart/form-data; boundary=' + (formData as any)._boundary,
headers
)
).result
}
/**
* Creates a folder. Path to or URI of the parent folder is required.
* @param folderName - the name of the new folder.
@@ -549,6 +363,7 @@ export class SASViyaApiClient {
accessToken?: string,
isForced?: boolean
): Promise<Folder> {
const logger = process.logger || console
if (!parentFolderPath && !parentFolderUri) {
throw new Error('Path or URI of the parent folder is required.')
}
@@ -556,7 +371,7 @@ export class SASViyaApiClient {
if (!parentFolderUri && parentFolderPath) {
parentFolderUri = await this.getFolderUri(parentFolderPath, accessToken)
if (!parentFolderUri) {
console.log(
logger.info(
`Parent folder at path '${parentFolderPath}' is not present.`
)
@@ -566,9 +381,13 @@ export class SASViyaApiClient {
)
const newFolderName = `${parentFolderPath.split('/').pop()}`
if (newParentFolderPath === '') {
throw new Error('Root folder has to be present on the server.')
throw new RootFolderNotFoundError(
parentFolderPath,
this.serverUrl,
accessToken
)
}
console.log(
logger.info(
`Creating parent folder:\n'${newFolderName}' in '${newParentFolderPath}'`
)
const parentFolder = await this.createFolder(
@@ -577,7 +396,7 @@ export class SASViyaApiClient {
undefined,
accessToken
)
console.log(
logger.info(
`Parent folder '${newFolderName}' has been successfully created.`
)
parentFolderUri = `/folders/folders/${parentFolder.id}`
@@ -705,39 +524,7 @@ export class SASViyaApiClient {
clientSecret: string,
authCode: string
): Promise<SasAuthResponse> {
const url = this.serverUrl + '/SASLogon/oauth/token'
let token
if (typeof Buffer === 'undefined') {
token = btoa(clientId + ':' + clientSecret)
} else {
token = Buffer.from(clientId + ':' + clientSecret).toString('base64')
}
const headers = {
Authorization: 'Basic ' + token
}
let formData
if (typeof FormData === 'undefined') {
formData = new NodeFormData()
formData.append('grant_type', 'authorization_code')
formData.append('code', authCode)
} else {
formData = new FormData()
formData.append('grant_type', 'authorization_code')
formData.append('code', authCode)
}
const authResponse = await this.requestClient
.post(
url,
formData,
undefined,
'multipart/form-data; boundary=' + (formData as any)._boundary,
headers
)
.then((res) => res.result as SasAuthResponse)
return authResponse
return getAccessToken(this.requestClient, clientId, clientSecret, authCode)
}
/**
@@ -751,39 +538,12 @@ export class SASViyaApiClient {
clientSecret: string,
refreshToken: string
) {
const url = this.serverUrl + '/SASLogon/oauth/token'
let token
if (typeof Buffer === 'undefined') {
token = btoa(clientId + ':' + clientSecret)
} else {
token = Buffer.from(clientId + ':' + clientSecret).toString('base64')
}
const headers = {
Authorization: 'Basic ' + token
}
let formData
if (typeof FormData === 'undefined') {
formData = new NodeFormData()
formData.append('grant_type', 'refresh_token')
formData.append('refresh_token', refreshToken)
} else {
formData = new FormData()
formData.append('grant_type', 'refresh_token')
formData.append('refresh_token', refreshToken)
}
const authResponse = await this.requestClient
.post<SasAuthResponse>(
url,
formData,
undefined,
'multipart/form-data; boundary=' + (formData as any)._boundary,
headers
)
.then((res) => res.result)
return authResponse
return refreshTokens(
this.requestClient,
clientId,
clientSecret,
refreshToken
)
}
/**
@@ -814,18 +574,25 @@ export class SASViyaApiClient {
* @param expectWebout - a boolean indicating whether to expect a _webout response.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables.
*/
public async executeComputeJob(
sasJob: string,
contextName: string,
debug?: boolean,
data?: any,
accessToken?: string,
authConfig?: AuthConfig,
waitForResult = true,
expectWebout = false,
pollOptions?: PollOptions,
printPid = false
printPid = false,
variables?: MacroVar
) {
let access_token = (authConfig || {}).access_token
if (authConfig) {
;({ access_token } = await getTokens(this.requestClient, authConfig))
}
if (isRelativePath(sasJob) && !this.rootFolderName) {
throw new Error(
'Relative paths cannot be used without specifying a root folder name'
@@ -839,7 +606,7 @@ export class SASViyaApiClient {
? `${this.rootFolderName}/${folderPath}`
: folderPath
await this.populateFolderMap(fullFolderPath, accessToken).catch((err) => {
await this.populateFolderMap(fullFolderPath, access_token).catch((err) => {
throw prefixMessage(err, 'Error while populating folder map. ')
})
@@ -851,12 +618,6 @@ export class SASViyaApiClient {
)
}
const headers: any = { 'Content-Type': 'application/json' }
if (!!accessToken) {
headers.Authorization = `Bearer ${accessToken}`
}
const jobToExecute = jobFolder?.find((item) => item.name === jobName)
if (!jobToExecute) {
@@ -877,7 +638,7 @@ export class SASViyaApiClient {
const { result: jobDefinition } = await this.requestClient
.get<JobDefinition>(
`${this.serverUrl}${jobDefinitionLink.href}`,
accessToken
access_token
)
.catch((err) => {
throw prefixMessage(err, 'Error while getting job definition. ')
@@ -897,13 +658,14 @@ export class SASViyaApiClient {
sasJob,
linesToExecute,
contextName,
accessToken,
authConfig,
data,
debug,
expectWebout,
waitForResult,
pollOptions,
printPid
printPid,
variables
)
}
@@ -920,8 +682,12 @@ export class SASViyaApiClient {
contextName: string,
debug: boolean,
data?: any,
accessToken?: string
authConfig?: AuthConfig
) {
let access_token = (authConfig || {}).access_token
if (authConfig) {
;({ access_token } = await getTokens(this.requestClient, authConfig))
}
if (isRelativePath(sasJob) && !this.rootFolderName) {
throw new Error(
'Relative paths cannot be used without specifying a root folder name.'
@@ -934,7 +700,7 @@ export class SASViyaApiClient {
const fullFolderPath = isRelativePath(sasJob)
? `${this.rootFolderName}/${folderPath}`
: folderPath
await this.populateFolderMap(fullFolderPath, accessToken)
await this.populateFolderMap(fullFolderPath, access_token)
const jobFolder = this.folderMap.get(fullFolderPath)
if (!jobFolder) {
@@ -947,7 +713,7 @@ export class SASViyaApiClient {
let files: any[] = []
if (data && Object.keys(data).length) {
files = await this.uploadTables(data, accessToken)
files = await this.uploadTables(data, access_token)
}
if (!jobToExecute) {
@@ -959,7 +725,7 @@ export class SASViyaApiClient {
const { result: jobDefinition } = await this.requestClient.get<Job>(
`${this.serverUrl}${jobDefinitionLink}`,
accessToken
access_token
)
const jobArguments: { [key: string]: any } = {
@@ -992,21 +758,19 @@ export class SASViyaApiClient {
jobDefinition,
arguments: jobArguments
}
const { result: postedJob, etag } = await this.requestClient.post<Job>(
const { result: postedJob } = await this.requestClient.post<Job>(
`${this.serverUrl}/jobExecution/jobs?_action=wait`,
postJobRequestBody,
accessToken
access_token
)
const jobStatus = await this.pollJobState(postedJob, authConfig).catch(
(err) => {
throw prefixMessage(err, 'Error while polling job status. ')
}
)
const jobStatus = await this.pollJobState(
postedJob,
etag,
accessToken
).catch((err) => {
throw prefixMessage(err, 'Error while polling job status. ')
})
const { result: currentJob } = await this.requestClient.get<Job>(
`${this.serverUrl}/jobExecution/jobs/${postedJob.id}`,
accessToken
access_token
)
let jobResult
@@ -1017,13 +781,13 @@ export class SASViyaApiClient {
if (resultLink) {
jobResult = await this.requestClient.get<any>(
`${this.serverUrl}${resultLink}/content`,
accessToken,
access_token,
'text/plain'
)
}
if (debug && logLink) {
log = await this.requestClient
.get<any>(`${this.serverUrl}${logLink.href}/content`, accessToken)
.get<any>(`${this.serverUrl}${logLink.href}/content`, access_token)
.then((res: any) => res.result.items.map((i: any) => i.line).join('\n'))
}
if (jobStatus === 'failed') {
@@ -1069,147 +833,22 @@ export class SASViyaApiClient {
this.folderMap.set(path, itemsAtRoot)
}
// REFACTOR: set default value for 'pollOptions' attribute
private async pollJobState(
postedJob: any,
etag: string | null,
accessToken?: string,
postedJob: Job,
authConfig?: AuthConfig,
pollOptions?: PollOptions
) {
let POLL_INTERVAL = 300
let MAX_POLL_COUNT = 1000
let MAX_ERROR_COUNT = 5
if (pollOptions) {
POLL_INTERVAL = pollOptions.POLL_INTERVAL || POLL_INTERVAL
MAX_POLL_COUNT = pollOptions.MAX_POLL_COUNT || MAX_POLL_COUNT
}
let postedJobState = ''
let pollCount = 0
let errorCount = 0
const headers: any = {
'Content-Type': 'application/json',
'If-None-Match': etag
}
if (accessToken) {
headers.Authorization = `Bearer ${accessToken}`
}
const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
if (!stateLink) {
Promise.reject(`Job state link was not found.`)
}
const { result: state } = await this.requestClient
.get<string>(
`${this.serverUrl}${stateLink.href}?_action=wait&wait=300`,
accessToken,
'text/plain',
{},
this.debug
)
.catch((err) => {
console.error(
`Error fetching job state from ${this.serverUrl}${stateLink.href}. Starting poll, assuming job to be running.`,
err
)
return { result: 'unavailable' }
})
const currentState = state.trim()
if (currentState === 'completed') {
return Promise.resolve(currentState)
}
return new Promise(async (resolve, _) => {
let printedState = ''
const interval = setInterval(async () => {
if (
postedJobState === 'running' ||
postedJobState === '' ||
postedJobState === 'pending' ||
postedJobState === 'unavailable'
) {
if (stateLink) {
const { result: jobState } = await this.requestClient
.get<string>(
`${this.serverUrl}${stateLink.href}?_action=wait&wait=300`,
accessToken,
'text/plain',
{},
this.debug
)
.catch((err) => {
errorCount++
if (
pollCount >= MAX_POLL_COUNT ||
errorCount >= MAX_ERROR_COUNT
) {
throw prefixMessage(
err,
'Error while getting job state after interval. '
)
}
console.error(
`Error fetching job state from ${this.serverUrl}${stateLink.href}. Resuming poll, assuming job to be running.`,
err
)
return { result: 'unavailable' }
})
postedJobState = jobState.trim()
if (postedJobState != 'unavailable' && errorCount > 0) {
errorCount = 0
}
if (this.debug && printedState !== postedJobState) {
console.log('Polling job status...')
console.log(`Current job state: ${postedJobState}`)
printedState = postedJobState
}
pollCount++
if (pollCount >= MAX_POLL_COUNT) {
resolve(postedJobState)
}
}
} else {
clearInterval(interval)
resolve(postedJobState)
}
}, POLL_INTERVAL)
})
return pollJobState(
this.requestClient,
postedJob,
this.debug,
authConfig,
pollOptions
)
}
private async uploadTables(data: any, accessToken?: string) {
const uploadedFiles = []
const headers: any = {
'Content-Type': 'application/json'
}
if (accessToken) {
headers.Authorization = `Bearer ${accessToken}`
}
for (const tableName in data) {
const csv = convertToCSV(data[tableName])
if (csv === 'ERROR: LARGE STRING LENGTH') {
throw new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
}
const uploadResponse = await this.requestClient
.uploadFile(`${this.serverUrl}/files/files#rawUpload`, csv, accessToken)
.catch((err) => {
throw prefixMessage(err, 'Error while uploading file. ')
})
uploadedFiles.push({ tableName, file: uploadResponse.result })
}
return uploadedFiles
return uploadTables(this.requestClient, data, accessToken)
}
private async getFolderDetails(
@@ -1298,14 +937,6 @@ export class SASViyaApiClient {
? sourceFolder
: await this.getFolderUri(sourceFolder, accessToken)
const requestInfo = {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: 'Bearer ' + accessToken
}
}
const { result: members } = await this.requestClient.get<{ items: any[] }>(
`${this.serverUrl}${sourceFolderUri}/members?limit=${limit}`,
accessToken
@@ -1355,6 +986,9 @@ export class SASViyaApiClient {
accessToken
)
if (!sourceFolderUri) {
return undefined
}
const sourceFolderId = sourceFolderUri?.split('/').pop()
const { result: folder } = await this.requestClient

View File

@@ -4,13 +4,19 @@ import { SASViyaApiClient } from './SASViyaApiClient'
import { SAS9ApiClient } from './SAS9ApiClient'
import { FileUploader } from './FileUploader'
import { AuthManager } from './auth'
import { ServerType } from '@sasjs/utils/types'
import {
ServerType,
MacroVar,
AuthConfig,
ExtraResponseAttributes
} from '@sasjs/utils/types'
import { RequestClient } from './request/RequestClient'
import {
JobExecutor,
WebJobExecutor,
ComputeJobExecutor,
JesJobExecutor
JesJobExecutor,
Sas9JobExecutor
} from './job-execution'
import { ErrorResponse } from './types/errors'
@@ -22,7 +28,7 @@ const defaultConfig: SASjsConfig = {
serverType: ServerType.SasViya,
debug: false,
contextName: 'SAS Job Execution compute context',
useComputeApi: false,
useComputeApi: null,
allowInsecureRequests: false
}
@@ -41,6 +47,7 @@ export default class SASjs {
private webJobExecutor: JobExecutor | null = null
private computeJobExecutor: JobExecutor | null = null
private jesJobExecutor: JobExecutor | null = null
private sas9JobExecutor: JobExecutor | null = null
constructor(config?: any) {
this.sasjsConfig = {
@@ -57,15 +64,15 @@ export default class SASjs {
public async executeScriptSAS9(
linesOfCode: string[],
serverName: string,
repositoryName: string
userName: string,
password: string
) {
this.isMethodSupported('executeScriptSAS9', ServerType.Sas9)
return await this.sas9ApiClient?.executeScript(
linesOfCode,
serverName,
repositoryName
userName,
password
)
}
@@ -100,12 +107,12 @@ export default class SASjs {
/**
* Gets executable compute contexts.
* @param accessToken - an access token for an authorized user.
* @param authConfig - an access token, refresh token, client and secret for an authorized user.
*/
public async getExecutableContexts(accessToken: string) {
public async getExecutableContexts(authConfig: AuthConfig) {
this.isMethodSupported('getExecutableContexts', ServerType.SasViya)
return await this.sasViyaApiClient!.getExecutableContexts(accessToken)
return await this.sasViyaApiClient!.getExecutableContexts(authConfig)
}
/**
@@ -237,14 +244,14 @@ export default class SASjs {
* @param fileName - name of the file to run. It will be converted to path to the file being submitted for execution.
* @param linesOfCode - lines of sas code from the file to run.
* @param contextName - context name on which code will be run on the server.
* @param accessToken - (optional) the access token for authorizing the request.
* @param authConfig - (optional) the access token, refresh token, client and secret for authorizing the request.
* @param debug - (optional) if true, global debug config will be overriden
*/
public async executeScriptSASViya(
fileName: string,
linesOfCode: string[],
contextName: string,
accessToken?: string,
authConfig?: AuthConfig,
debug?: boolean
) {
this.isMethodSupported('executeScriptSASViya', ServerType.SasViya)
@@ -258,14 +265,14 @@ export default class SASjs {
fileName,
linesOfCode,
contextName,
accessToken,
authConfig,
null,
debug ? debug : this.sasjsConfig.debug
)
}
/**
* Creates a folder at SAS file system.
* Creates a folder in the logical SAS folder tree
* @param folderName - name of the folder to be created.
* @param parentFolderPath - the full path (eg `/Public/example/myFolder`) of the parent folder.
* @param parentFolderUri - the URI of the parent folder.
@@ -297,6 +304,40 @@ export default class SASjs {
)
}
/**
* Creates a file in the logical SAS folder tree
* @param fileName - name of the file to be created.
* @param content - content of the file to be created.
* @param parentFolderPath - the full path (eg `/Public/example/myFolder`) of the parent folder.
* @param parentFolderUri - the URI of the parent folder.
* @param accessToken - the access token to authorizing the request.
* @param sasApiClient - a client for interfacing with SAS API.
*/
public async createFile(
fileName: string,
content: Buffer,
parentFolderPath: string,
parentFolderUri?: string,
accessToken?: string,
sasApiClient?: SASViyaApiClient
) {
if (sasApiClient)
return await sasApiClient.createFile(
fileName,
content,
parentFolderPath,
parentFolderUri,
accessToken
)
return await this.sasViyaApiClient!.createFile(
fileName,
content,
parentFolderPath,
parentFolderUri,
accessToken
)
}
/**
* Fetches a folder from the SAS file system.
* @param folderPath - path of the folder to be fetched.
@@ -503,16 +544,22 @@ export default class SASjs {
* Process). Is prepended at runtime with the value of `appLoc`.
* @param files - array of files to be uploaded, including File object and file name.
* @param params - request URL parameters.
* @param overrideSasjsConfig - object to override existing config (optional)
*/
public uploadFile(sasJob: string, files: UploadFile[], params: any) {
const fileUploader =
this.fileUploader ||
new FileUploader(
this.sasjsConfig.appLoc,
this.sasjsConfig.serverUrl,
this.jobsPath,
this.requestClient!
)
public uploadFile(
sasJob: string,
files: UploadFile[],
params: any,
overrideSasjsConfig?: any
) {
const fileUploader = overrideSasjsConfig
? new FileUploader(
{ ...this.sasjsConfig, ...overrideSasjsConfig },
this.jobsPath,
this.requestClient!
)
: this.fileUploader ||
new FileUploader(this.sasjsConfig, this.jobsPath, this.requestClient!)
return fileUploader.uploadFile(sasJob, files, params)
}
@@ -538,44 +585,130 @@ export default class SASjs {
* `await request(sasJobPath, data, config, () => setIsLoggedIn(false))`
* If you are not passing in any data and configuration, it will look like so:
* `await request(sasJobPath, {}, {}, () => setIsLoggedIn(false))`
* @param extraResponseAttributes - a array of predefined values that are used
* to provide extra attributes (same names as those values) to be added in response
* Supported values are declared in ExtraResponseAttributes type.
*/
public async request(
sasJob: string,
data: { [key: string]: any },
data: { [key: string]: any } | null,
config: { [key: string]: any } = {},
loginRequiredCallback?: () => any,
accessToken?: string
authConfig?: AuthConfig,
extraResponseAttributes: ExtraResponseAttributes[] = []
) {
config = {
...this.sasjsConfig,
...config
}
if (config.serverType === ServerType.SasViya && config.contextName) {
if (config.useComputeApi) {
return await this.computeJobExecutor!.execute(
sasJob,
data,
config,
loginRequiredCallback,
accessToken
)
const validationResult = this.validateInput(data)
if (validationResult.status) {
if (
config.serverType !== ServerType.Sas9 &&
config.useComputeApi !== undefined &&
config.useComputeApi !== null
) {
if (config.useComputeApi) {
return await this.computeJobExecutor!.execute(
sasJob,
data,
config,
loginRequiredCallback,
authConfig
)
} else {
return await this.jesJobExecutor!.execute(
sasJob,
data,
config,
loginRequiredCallback,
authConfig,
extraResponseAttributes
)
}
} else if (
config.serverType === ServerType.Sas9 &&
config.username &&
config.password
) {
return await this.sas9JobExecutor!.execute(sasJob, data, config)
} else {
return await this.jesJobExecutor!.execute(
return await this.webJobExecutor!.execute(
sasJob,
data,
config,
loginRequiredCallback,
accessToken
authConfig,
extraResponseAttributes
)
}
} else {
return await this.webJobExecutor!.execute(
sasJob,
data,
config,
loginRequiredCallback
)
return Promise.reject(new ErrorResponse(validationResult.msg))
}
}
/**
* This function validates the input data structure and table naming convention
*
* @param data A json object that contains one or more tables, it can also be null
* @returns An object which contains two attributes: 1) status: boolean, 2) msg: string
*/
private validateInput(data: { [key: string]: any } | null): {
status: boolean
msg: string
} {
if (data === null) return { status: true, msg: '' }
for (const key in data) {
if (!key.match(/^[a-zA-Z_]/)) {
return {
status: false,
msg: 'First letter of table should be alphabet or underscore.'
}
}
if (!key.match(/^[a-zA-Z_][a-zA-Z0-9_]*$/)) {
return { status: false, msg: 'Table name should be alphanumeric.' }
}
if (key.length > 32) {
return {
status: false,
msg: 'Maximum length for table name could be 32 characters.'
}
}
if (this.getType(data[key]) !== 'Array') {
return {
status: false,
msg: 'Parameter data contains invalid table structure.'
}
}
for (let i = 0; i < data[key].length; i++) {
if (this.getType(data[key][i]) !== 'object') {
return {
status: false,
msg: `Table ${key} contains invalid structure.`
}
}
}
}
return { status: true, msg: '' }
}
/**
* this function returns the type of variable
*
* @param data it could be anything, like string, array, object etc.
* @returns a string which tells the type of input parameter
*/
private getType(data: any): string {
if (Array.isArray(data)) {
return 'Array'
} else {
return typeof data
}
}
@@ -616,7 +749,7 @@ export default class SASjs {
)
sasApiClient.debug = this.sasjsConfig.debug
} else if (this.sasjsConfig.serverType === ServerType.Sas9) {
sasApiClient = new SAS9ApiClient(serverUrl)
sasApiClient = new SAS9ApiClient(serverUrl, this.jobsPath)
}
} else {
let sasClientConfig: any = null
@@ -658,20 +791,22 @@ export default class SASjs {
* @param config - provide any changes to the config here, for instance to
* enable/disable `debug`. Any change provided will override the global config,
* for that particular function call.
* @param accessToken - a valid access token that is authorised to execute compute jobs.
* @param authConfig - a valid client, secret, refresh and access tokens that are authorised to execute compute jobs.
* The access token is not required when the user is authenticated via the browser.
* @param waitForResult - a boolean that indicates whether the function needs to wait for execution to complete.
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables.
*/
public async startComputeJob(
sasJob: string,
data: any,
config: any = {},
accessToken?: string,
authConfig?: AuthConfig,
waitForResult?: boolean,
pollOptions?: PollOptions,
printPid = false
printPid = false,
variables?: MacroVar
) {
config = {
...this.sasjsConfig,
@@ -690,11 +825,12 @@ export default class SASjs {
config.contextName,
config.debug,
data,
accessToken,
authConfig,
!!waitForResult,
false,
pollOptions,
printPid
printPid,
variables
)
}
@@ -805,12 +941,15 @@ export default class SASjs {
if (this.sasjsConfig.serverType === ServerType.Sas9) {
if (this.sas9ApiClient)
this.sas9ApiClient!.setConfig(this.sasjsConfig.serverUrl)
else this.sas9ApiClient = new SAS9ApiClient(this.sasjsConfig.serverUrl)
else
this.sas9ApiClient = new SAS9ApiClient(
this.sasjsConfig.serverUrl,
this.jobsPath
)
}
this.fileUploader = new FileUploader(
this.sasjsConfig.appLoc,
this.sasjsConfig.serverUrl,
this.sasjsConfig,
this.jobsPath,
this.requestClient
)
@@ -823,6 +962,12 @@ export default class SASjs {
this.sasViyaApiClient!
)
this.sas9JobExecutor = new Sas9JobExecutor(
this.sasjsConfig.serverUrl,
this.sasjsConfig.serverType!,
this.jobsPath
)
this.computeJobExecutor = new ComputeJobExecutor(
this.sasjsConfig.serverUrl,
this.sasViyaApiClient!
@@ -853,6 +998,16 @@ export default class SASjs {
isForced
)
break
case 'file':
await this.createFile(
member.name,
member.code,
parentFolder,
undefined,
accessToken,
sasApiClient
)
break
case 'service':
await this.createJobDefinition(
member.name,

View File

@@ -1,4 +1,5 @@
import { Session, Context, CsrfToken, SessionVariable } from './types'
import { Session, Context, SessionVariable } from './types'
import { NoSessionStateError } from './types/errors'
import { asyncForEach, isUrl } from './utils'
import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from './request/RequestClient'
@@ -6,10 +7,6 @@ import { RequestClient } from './request/RequestClient'
const MAX_SESSION_COUNT = 1
const RETRY_LIMIT: number = 3
let RETRY_COUNT: number = 0
const INTERNAL_SAS_ERROR = {
status: 304,
message: 'Not Modified'
}
export class SessionManager {
constructor(
@@ -158,11 +155,13 @@ export class SessionManager {
etag: string | null,
accessToken?: string
) {
const logger = process.logger || console
let sessionState = session.state
const stateLink = session.links.find((l: any) => l.rel === 'state')
return new Promise(async (resolve, _) => {
return new Promise(async (resolve, reject) => {
if (
sessionState === 'pending' ||
sessionState === 'running' ||
@@ -170,23 +169,24 @@ export class SessionManager {
) {
if (stateLink) {
if (this.debug && !this.printedSessionState.printed) {
console.log('Polling session status...')
logger.info('Polling session status...')
this.printedSessionState.printed = true
}
const state = await this.getSessionState(
`${this.serverUrl}${stateLink.href}?wait=30`,
etag!,
accessToken
).catch((err) => {
throw err
})
const { result: state, responseStatus: responseStatus } =
await this.getSessionState(
`${this.serverUrl}${stateLink.href}?wait=30`,
etag!,
accessToken
).catch((err) => {
throw prefixMessage(err, 'Error while getting session state.')
})
sessionState = state.trim()
if (this.debug && this.printedSessionState.state !== sessionState) {
console.log(`Current session state is '${sessionState}'`)
logger.info(`Current session state is '${sessionState}'`)
this.printedSessionState.state = sessionState
this.printedSessionState.printed = false
@@ -194,13 +194,21 @@ export class SessionManager {
// There is an internal error present in SAS Viya 3.5
// Retry to wait for a session status in such case of SAS internal error
if (
sessionState === INTERNAL_SAS_ERROR.message &&
RETRY_COUNT < RETRY_LIMIT
) {
RETRY_COUNT++
if (!sessionState) {
if (RETRY_COUNT < RETRY_LIMIT) {
RETRY_COUNT++
resolve(this.waitForSession(session, etag, accessToken))
resolve(this.waitForSession(session, etag, accessToken))
} else {
reject(
new NoSessionStateError(
responseStatus,
this.serverUrl + stateLink.href,
session.links.find((l: any) => l.rel === 'log')
?.href as string
)
)
}
}
resolve(sessionState)
@@ -218,11 +226,11 @@ export class SessionManager {
) {
return await this.requestClient
.get(url, accessToken, 'text/plain', { 'If-None-Match': etag })
.then((res) => res.result as string)
.then((res) => ({
result: res.result as string,
responseStatus: res.status
}))
.catch((err) => {
if (err.status === INTERNAL_SAS_ERROR.status)
return INTERNAL_SAS_ERROR.message
throw err
})
}

View File

@@ -0,0 +1,293 @@
import { timestampToYYYYMMDDHHMMSS } from '@sasjs/utils/time'
import { AuthConfig, MacroVar } from '@sasjs/utils/types'
import { prefixMessage } from '@sasjs/utils/error'
import {
PollOptions,
Job,
ComputeJobExecutionError,
NotFoundError
} from '../..'
import { getTokens } from '../../auth/getTokens'
import { RequestClient } from '../../request/RequestClient'
import { SessionManager } from '../../SessionManager'
import { isRelativePath, fetchLogByChunks } from '../../utils'
import { formatDataForRequest } from '../../utils/formatDataForRequest'
import { pollJobState } from './pollJobState'
import { uploadTables } from './uploadTables'
/**
* Executes code on the current SAS Viya server.
* @param jobPath - the path to the file being submitted for execution.
* @param linesOfCode - an array of code lines to execute.
* @param contextName - the context to execute the code in.
* @param authConfig - an object containing an access token, refresh token, client ID and secret.
* @param data - execution data.
* @param debug - when set to true, the log will be returned.
* @param expectWebout - when set to true, the automatic _webout fileref will be checked for content, and that content returned. This fileref is used when the Job contains a SASjs web request (as opposed to executing arbitrary SAS code).
* @param waitForResult - when set to true, function will return the session
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { MAX_POLL_COUNT: 24 * 60 * 60, POLL_INTERVAL: 1000 }.
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
* @param variables - an object that represents macro variables.
*/
export async function executeScript(
requestClient: RequestClient,
sessionManager: SessionManager,
rootFolderName: string,
jobPath: string,
linesOfCode: string[],
contextName: string,
authConfig?: AuthConfig,
data: any = null,
debug: boolean = false,
expectWebout = false,
waitForResult = true,
pollOptions?: PollOptions,
printPid = false,
variables?: MacroVar
): Promise<any> {
let access_token = (authConfig || {}).access_token
if (authConfig) {
;({ access_token } = await getTokens(requestClient, authConfig))
}
const logger = process.logger || console
try {
let executionSessionId: string
const session = await sessionManager
.getSession(access_token)
.catch((err) => {
throw prefixMessage(err, 'Error while getting session. ')
})
executionSessionId = session!.id
if (printPid) {
const { result: jobIdVariable } = await sessionManager
.getVariable(executionSessionId, 'SYSJOBID', access_token)
.catch((err) => {
throw prefixMessage(err, 'Error while getting session variable. ')
})
if (jobIdVariable && jobIdVariable.value) {
const relativeJobPath = rootFolderName
? jobPath.split(rootFolderName).join('').replace(/^\//, '')
: jobPath
const logger = process.logger || console
logger.info(
`Triggered '${relativeJobPath}' with PID ${
jobIdVariable.value
} at ${timestampToYYYYMMDDHHMMSS()}`
)
}
}
const jobArguments: { [key: string]: any } = {
_contextName: contextName,
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: true,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: true
}
if (debug) {
jobArguments['_OMITTEXTLOG'] = false
jobArguments['_OMITSESSIONRESULTS'] = false
}
let fileName
if (isRelativePath(jobPath)) {
fileName = `exec-${
jobPath.includes('/') ? jobPath.split('/')[1] : jobPath
}`
} else {
const jobPathParts = jobPath.split('/')
fileName = jobPathParts.pop()
}
let jobVariables: any = {
SYS_JES_JOB_URI: '',
_program: isRelativePath(jobPath)
? rootFolderName + '/' + jobPath
: jobPath
}
if (variables) jobVariables = { ...jobVariables, ...variables }
if (debug) jobVariables = { ...jobVariables, _DEBUG: 131 }
let files: any[] = []
if (data) {
if (JSON.stringify(data).includes(';')) {
files = await uploadTables(requestClient, data, access_token).catch(
(err) => {
throw prefixMessage(err, 'Error while uploading tables. ')
}
)
jobVariables['_webin_file_count'] = files.length
files.forEach((fileInfo, index) => {
jobVariables[
`_webin_fileuri${index + 1}`
] = `/files/files/${fileInfo.file.id}`
jobVariables[`_webin_name${index + 1}`] = fileInfo.tableName
})
} else {
jobVariables = { ...jobVariables, ...formatDataForRequest(data) }
}
}
// Execute job in session
const jobRequestBody = {
name: fileName,
description: 'Powered by SASjs',
code: linesOfCode,
variables: jobVariables,
arguments: jobArguments
}
const { result: postedJob, etag } = await requestClient
.post<Job>(
`/compute/sessions/${executionSessionId}/jobs`,
jobRequestBody,
access_token
)
.catch((err) => {
throw prefixMessage(err, 'Error while posting job. ')
})
if (!waitForResult) return session
if (debug) {
logger.info(`Job has been submitted for '${fileName}'.`)
logger.info(
`You can monitor the job progress at '${requestClient.getBaseUrl()}${
postedJob.links.find((l: any) => l.rel === 'state')!.href
}'.`
)
}
const jobStatus = await pollJobState(
requestClient,
postedJob,
debug,
authConfig,
pollOptions
).catch(async (err) => {
const error = err?.response?.data
const result = /err=[0-9]*,/.exec(error)
const errorCode = '5113'
if (result?.[0]?.slice(4, -1) === errorCode) {
const sessionLogUrl =
postedJob.links.find((l: any) => l.rel === 'up')!.href + '/log'
const logCount = 1000000
err.log = await fetchLogByChunks(
requestClient,
access_token!,
sessionLogUrl,
logCount
)
}
throw prefixMessage(err, 'Error while polling job status. ')
})
if (authConfig) {
;({ access_token } = await getTokens(requestClient, authConfig))
}
const { result: currentJob } = await requestClient
.get<Job>(
`/compute/sessions/${executionSessionId}/jobs/${postedJob.id}`,
access_token
)
.catch((err) => {
throw prefixMessage(err, 'Error while getting job. ')
})
let jobResult
let log = ''
const logLink = currentJob.links.find((l) => l.rel === 'log')
if (debug && logLink) {
const logUrl = `${logLink.href}/content`
const logCount = currentJob.logStatistics?.lineCount ?? 1000000
log = await fetchLogByChunks(
requestClient,
access_token!,
logUrl,
logCount
)
}
if (jobStatus === 'failed' || jobStatus === 'error') {
throw new ComputeJobExecutionError(currentJob, log)
}
if (!expectWebout) {
return { job: currentJob, log }
}
const resultLink = `/compute/sessions/${executionSessionId}/filerefs/_webout/content`
jobResult = await requestClient
.get<any>(resultLink, access_token, 'text/plain')
.catch(async (e) => {
if (e instanceof NotFoundError) {
if (logLink) {
const logUrl = `${logLink.href}/content`
const logCount = currentJob.logStatistics?.lineCount ?? 1000000
log = await fetchLogByChunks(
requestClient,
access_token!,
logUrl,
logCount
)
return Promise.reject({
status: 500,
log
})
}
}
return {
result: JSON.stringify(e)
}
})
await sessionManager
.clearSession(executionSessionId, access_token)
.catch((err) => {
throw prefixMessage(err, 'Error while clearing session. ')
})
return { result: jobResult?.result, log }
} catch (e) {
if (e && e.status === 404) {
return executeScript(
requestClient,
sessionManager,
rootFolderName,
jobPath,
linesOfCode,
contextName,
authConfig,
data,
debug,
false,
true
)
} else {
throw prefixMessage(e, 'Error while executing script. ')
}
}
}

View File

@@ -0,0 +1,17 @@
import { isFolder } from '@sasjs/utils/file'
import { generateTimestamp } from '@sasjs/utils/time'
import { Job } from '../../types'
export const getFileStream = async (job: Job, filePath?: string) => {
const { createWriteStream } = require('@sasjs/utils/file')
const logPath = filePath || process.cwd()
const isFolderPath = await isFolder(logPath)
if (isFolderPath) {
const logFileName = `${job.name || 'job'}-${generateTimestamp()}.log`
const path = require('path')
const logFilePath = path.join(filePath || process.cwd(), logFileName)
return await createWriteStream(logFilePath)
} else {
return await createWriteStream(logPath)
}
}

View File

@@ -0,0 +1,250 @@
import { AuthConfig } from '@sasjs/utils/types'
import { Job, PollOptions } from '../..'
import { getTokens } from '../../auth/getTokens'
import { RequestClient } from '../../request/RequestClient'
import { JobStatePollError } from '../../types/errors'
import { Link, WriteStream } from '../../types'
import { isNode } from '../../utils'
export async function pollJobState(
requestClient: RequestClient,
postedJob: Job,
debug: boolean,
authConfig?: AuthConfig,
pollOptions?: PollOptions
) {
const logger = process.logger || console
let pollInterval = 300
let maxPollCount = 1000
const defaultPollOptions: PollOptions = {
maxPollCount,
pollInterval,
streamLog: false
}
pollOptions = { ...defaultPollOptions, ...(pollOptions || {}) }
const stateLink = postedJob.links.find((l: any) => l.rel === 'state')
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
let currentState = await getJobState(
requestClient,
postedJob,
'',
debug,
authConfig
).catch((err) => {
logger.error(
`Error fetching job state from ${stateLink.href}. Starting poll, assuming job to be running.`,
err
)
return 'unavailable'
})
let pollCount = 0
if (currentState === 'completed') {
return Promise.resolve(currentState)
}
let logFileStream
if (pollOptions.streamLog && isNode()) {
const { getFileStream } = require('./getFileStream')
logFileStream = await getFileStream(postedJob, pollOptions.logFolderPath)
}
// Poll up to the first 100 times with the specified poll interval
let result = await doPoll(
requestClient,
postedJob,
currentState,
debug,
pollCount,
authConfig,
{
...pollOptions,
maxPollCount:
pollOptions.maxPollCount <= 100 ? pollOptions.maxPollCount : 100
},
logFileStream
)
currentState = result.state
pollCount = result.pollCount
if (!needsRetry(currentState) || pollCount >= pollOptions.maxPollCount) {
return currentState
}
// If we get to this point, this is a long-running job that needs longer polling.
// We will resume polling with a bigger interval of 1 minute
let longJobPollOptions: PollOptions = {
maxPollCount: 24 * 60,
pollInterval: 60000,
streamLog: false
}
if (pollOptions) {
longJobPollOptions.streamLog = pollOptions.streamLog
longJobPollOptions.logFolderPath = pollOptions.logFolderPath
}
result = await doPoll(
requestClient,
postedJob,
currentState,
debug,
pollCount,
authConfig,
longJobPollOptions,
logFileStream
)
currentState = result.state
pollCount = result.pollCount
if (logFileStream) {
logFileStream.end()
}
return currentState
}
const getJobState = async (
requestClient: RequestClient,
job: Job,
currentState: string,
debug: boolean,
authConfig?: AuthConfig
) => {
const stateLink = job.links.find((l: any) => l.rel === 'state')
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
if (needsRetry(currentState)) {
let tokens
if (authConfig) {
tokens = await getTokens(requestClient, authConfig)
}
const { result: jobState } = await requestClient
.get<string>(
`${stateLink.href}?_action=wait&wait=300`,
tokens?.access_token,
'text/plain',
{},
debug
)
.catch((err) => {
throw new JobStatePollError(job.id, err)
})
return jobState.trim()
} else {
return currentState
}
}
const needsRetry = (state: string) =>
state === 'running' ||
state === '' ||
state === 'pending' ||
state === 'unavailable'
const doPoll = async (
requestClient: RequestClient,
postedJob: Job,
currentState: string,
debug: boolean,
pollCount: number,
authConfig?: AuthConfig,
pollOptions?: PollOptions,
logStream?: WriteStream
): Promise<{ state: string; pollCount: number }> => {
let pollInterval = 300
let maxPollCount = 1000
let maxErrorCount = 5
let errorCount = 0
let state = currentState
let printedState = ''
let startLogLine = 0
const logger = process.logger || console
if (pollOptions) {
pollInterval = pollOptions.pollInterval || pollInterval
maxPollCount = pollOptions.maxPollCount || maxPollCount
}
const stateLink = postedJob.links.find((l: Link) => l.rel === 'state')
if (!stateLink) {
throw new Error(`Job state link was not found.`)
}
while (needsRetry(state) && pollCount <= maxPollCount) {
state = await getJobState(
requestClient,
postedJob,
state,
debug,
authConfig
).catch((err) => {
errorCount++
if (pollCount >= maxPollCount || errorCount >= maxErrorCount) {
throw err
}
logger.error(
`Error fetching job state from ${stateLink.href}. Resuming poll, assuming job to be running.`,
err
)
return 'unavailable'
})
pollCount++
if (pollOptions?.streamLog) {
const jobUrl = postedJob.links.find((l: Link) => l.rel === 'self')
const { result: job } = await requestClient.get<Job>(
jobUrl!.href,
authConfig?.access_token
)
const endLogLine = job.logStatistics?.lineCount ?? 1000000
const { saveLog } = isNode() ? require('./saveLog') : { saveLog: null }
if (saveLog) {
await saveLog(
postedJob,
requestClient,
startLogLine,
endLogLine,
logStream,
authConfig?.access_token
)
}
startLogLine += endLogLine
}
if (debug && printedState !== state) {
logger.info('Polling job status...')
logger.info(`Current job state: ${state}`)
printedState = state
}
if (state != 'unavailable' && errorCount > 0) {
errorCount = 0
}
await delay(pollInterval)
}
return { state, pollCount }
}
const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms))

55
src/api/viya/saveLog.ts Normal file
View File

@@ -0,0 +1,55 @@
import { Job } from '../..'
import { RequestClient } from '../../request/RequestClient'
import { fetchLog } from '../../utils'
import { WriteStream } from '../../types'
import { writeStream } from './writeStream'
/**
* Appends logs to a supplied write stream.
* This is useful for getting quick feedback on longer running jobs.
* @param job - the job to fetch logs for
* @param requestClient - the pre-configured HTTP request client
* @param startLine - the line at which to start fetching the log
* @param endLine - the line at which to stop fetching the log
* @param logFileStream - the write stream to which the log is appended
* @accessToken - an optional access token for authentication/authorization
* The access token is not required when fetching logs from the browser.
*/
export async function saveLog(
job: Job,
requestClient: RequestClient,
startLine: number,
endLine: number,
logFileStream?: WriteStream,
accessToken?: string
) {
if (!accessToken) {
throw new Error(
`Logs for job ${job.id} cannot be fetched without a valid access token.`
)
}
if (!logFileStream) {
throw new Error(
`Logs for job ${job.id} cannot be written without a valid write stream.`
)
}
const logger = process.logger || console
const jobLogUrl = job.links.find((l) => l.rel === 'log')
if (!jobLogUrl) {
throw new Error(`Log URL for job ${job.id} was not found.`)
}
const log = await fetchLog(
requestClient,
accessToken,
`${jobLogUrl.href}/content`,
startLine,
endLine
)
logger.info(`Writing logs to ${logFileStream.path}`)
await writeStream(logFileStream, log || '')
}

View File

@@ -0,0 +1,675 @@
import { RequestClient } from '../../../request/RequestClient'
import { SessionManager } from '../../../SessionManager'
import { executeScript } from '../executeScript'
import { mockSession, mockAuthConfig, mockJob } from './mockResponses'
import * as pollJobStateModule from '../pollJobState'
import * as uploadTablesModule from '../uploadTables'
import * as getTokensModule from '../../../auth/getTokens'
import * as formatDataModule from '../../../utils/formatDataForRequest'
import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import { PollOptions } from '../../../types'
import { ComputeJobExecutionError, NotFoundError } from '../../../types/errors'
import { Logger, LogLevel } from '@sasjs/utils'
const sessionManager = new (<jest.Mock<SessionManager>>SessionManager)()
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
const defaultPollOptions: PollOptions = {
maxPollCount: 100,
pollInterval: 500,
streamLog: false
}
describe('executeScript', () => {
beforeEach(() => {
;(process as any).logger = new Logger(LogLevel.Off)
setupMocks()
})
it('should not try to get fresh tokens if an authConfig is not provided', async () => {
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put hello'],
'test context'
)
expect(getTokensModule.getTokens).not.toHaveBeenCalled()
})
it('should try to get fresh tokens if an authConfig is provided', async () => {
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put hello'],
'test context',
mockAuthConfig
)
expect(getTokensModule.getTokens).toHaveBeenCalledWith(
requestClient,
mockAuthConfig
)
})
it('should get a session from the session manager before executing', async () => {
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put hello'],
'test context'
)
expect(sessionManager.getSession).toHaveBeenCalledWith(undefined)
})
it('should handle errors while getting a session', async () => {
jest
.spyOn(sessionManager, 'getSession')
.mockImplementation(() => Promise.reject('Test Error'))
const error = await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put hello'],
'test context'
).catch((e) => e)
expect(error).toContain('Error while getting session.')
})
it('should fetch the PID when printPid is true', async () => {
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put hello'],
'test context',
mockAuthConfig,
null,
false,
false,
false,
defaultPollOptions,
true
)
expect(sessionManager.getVariable).toHaveBeenCalledWith(
mockSession.id,
'SYSJOBID',
mockAuthConfig.access_token
)
})
it('should handle errors while getting the job PID', async () => {
jest
.spyOn(sessionManager, 'getVariable')
.mockImplementation(() => Promise.reject('Test Error'))
const error = await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put hello'],
'test context',
mockAuthConfig,
null,
false,
false,
false,
defaultPollOptions,
true
).catch((e) => e)
expect(error).toContain('Error while getting session variable.')
})
it('should use the file upload approach when data contains semicolons', async () => {
jest
.spyOn(uploadTablesModule, 'uploadTables')
.mockImplementation(() =>
Promise.resolve([{ tableName: 'test', file: { id: 1 } }])
)
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put hello'],
'test context',
mockAuthConfig,
{ foo: 'bar;' },
false,
false,
false,
defaultPollOptions,
true
)
expect(uploadTablesModule.uploadTables).toHaveBeenCalledWith(
requestClient,
{ foo: 'bar;' },
mockAuthConfig.access_token
)
})
it('should format data as CSV when it does not contain semicolons', async () => {
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put hello'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
false,
false,
false,
defaultPollOptions,
true
)
expect(formatDataModule.formatDataForRequest).toHaveBeenCalledWith({
foo: 'bar'
})
})
it('should submit a job for execution via the compute API', async () => {
jest
.spyOn(formatDataModule, 'formatDataForRequest')
.mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' }))
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
false,
false,
false,
defaultPollOptions,
true
)
expect(requestClient.post).toHaveBeenCalledWith(
`/compute/sessions/${mockSession.id}/jobs`,
{
name: 'exec-test',
description: 'Powered by SASjs',
code: ['%put "hello";'],
variables: {
SYS_JES_JOB_URI: '',
_program: 'test/test',
sasjs_tables: 'foo',
sasjs0data: 'bar'
},
arguments: {
_contextName: 'test context',
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: true,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: true
}
},
mockAuthConfig.access_token
)
})
it('should set the correct variables when debug is true', async () => {
jest
.spyOn(formatDataModule, 'formatDataForRequest')
.mockImplementation(() => ({ sasjs_tables: 'foo', sasjs0data: 'bar' }))
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
true,
false,
false,
defaultPollOptions,
true
)
expect(requestClient.post).toHaveBeenCalledWith(
`/compute/sessions/${mockSession.id}/jobs`,
{
name: 'exec-test',
description: 'Powered by SASjs',
code: ['%put "hello";'],
variables: {
SYS_JES_JOB_URI: '',
_program: 'test/test',
sasjs_tables: 'foo',
sasjs0data: 'bar',
_DEBUG: 131
},
arguments: {
_contextName: 'test context',
_OMITJSONLISTING: true,
_OMITJSONLOG: true,
_OMITSESSIONRESULTS: false,
_OMITTEXTLISTING: true,
_OMITTEXTLOG: false
}
},
mockAuthConfig.access_token
)
})
it('should handle errors during job submission', async () => {
jest
.spyOn(requestClient, 'post')
.mockImplementation(() => Promise.reject('Test Error'))
const error = await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
true,
false,
false,
defaultPollOptions,
true
).catch((e) => e)
expect(error).toContain('Error while posting job')
})
it('should immediately return the session when waitForResult is false', async () => {
const result = await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
true,
false,
false,
defaultPollOptions,
true
)
expect(result).toEqual(mockSession)
})
it('should poll for job completion when waitForResult is true', async () => {
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
false,
false,
true,
defaultPollOptions,
true
)
expect(pollJobStateModule.pollJobState).toHaveBeenCalledWith(
requestClient,
mockJob,
false,
mockAuthConfig,
defaultPollOptions
)
})
it('should handle general errors when polling for job status', async () => {
jest
.spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.reject('Poll Error'))
const error = await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
false,
false,
true,
defaultPollOptions,
true
).catch((e) => e)
expect(error).toContain('Error while polling job status.')
})
it('should fetch the log and append it to the error in case of a 5113 error code', async () => {
jest
.spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() =>
Promise.reject({ response: { data: 'err=5113,' } })
)
const error = await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
false,
false,
true,
defaultPollOptions,
true
).catch((e) => e)
expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
requestClient,
mockAuthConfig.access_token,
mockJob.links.find((l) => l.rel === 'up')!.href + '/log',
1000000
)
expect(error.log).toEqual('Test Log')
})
it('should fetch the logs for the job if debug is true and a log URL is available', async () => {
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
true,
false,
true,
defaultPollOptions,
true
)
expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
requestClient,
mockAuthConfig.access_token,
mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
mockJob.logStatistics.lineCount
)
})
it('should not fetch the logs for the job if debug is false', async () => {
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
false,
false,
true,
defaultPollOptions,
true
)
expect(fetchLogsModule.fetchLogByChunks).not.toHaveBeenCalled()
})
it('should throw a ComputeJobExecutionError if the job has failed', async () => {
jest
.spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('failed'))
const error: ComputeJobExecutionError = await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
true,
false,
true,
defaultPollOptions,
true
).catch((e) => e)
expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
requestClient,
mockAuthConfig.access_token,
mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
mockJob.logStatistics.lineCount
)
expect(error).toBeInstanceOf(ComputeJobExecutionError)
expect(error.log).toEqual('Test Log')
expect(error.job).toEqual(mockJob)
})
it('should throw a ComputeJobExecutionError if the job has errored out', async () => {
jest
.spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('error'))
const error: ComputeJobExecutionError = await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
true,
false,
true,
defaultPollOptions,
true
).catch((e) => e)
expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
requestClient,
mockAuthConfig.access_token,
mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
mockJob.logStatistics.lineCount
)
expect(error).toBeInstanceOf(ComputeJobExecutionError)
expect(error.log).toEqual('Test Log')
expect(error.job).toEqual(mockJob)
})
it('should fetch the result if expectWebout is true', async () => {
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
false,
true,
true,
defaultPollOptions,
true
)
expect(requestClient.get).toHaveBeenCalledWith(
`/compute/sessions/${mockSession.id}/filerefs/_webout/content`,
mockAuthConfig.access_token,
'text/plain'
)
})
it('should fetch the logs if the webout file was not found', async () => {
jest.spyOn(requestClient, 'get').mockImplementation((url, ...rest) => {
if (url.includes('_webout')) {
return Promise.reject(new NotFoundError(url))
}
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
})
const error = await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
false,
true,
true,
defaultPollOptions,
true
).catch((e) => e)
expect(requestClient.get).toHaveBeenCalledWith(
`/compute/sessions/${mockSession.id}/filerefs/_webout/content`,
mockAuthConfig.access_token,
'text/plain'
)
expect(fetchLogsModule.fetchLogByChunks).toHaveBeenCalledWith(
requestClient,
mockAuthConfig.access_token,
mockJob.links.find((l) => l.rel === 'log')!.href + '/content',
mockJob.logStatistics.lineCount
)
expect(error.status).toEqual(500)
expect(error.log).toEqual('Test Log')
})
it('should clear the session after execution is complete', async () => {
await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
false,
true,
true,
defaultPollOptions,
true
)
expect(sessionManager.clearSession).toHaveBeenCalledWith(
mockSession.id,
mockAuthConfig.access_token
)
})
it('should handle errors while clearing a session', async () => {
jest
.spyOn(sessionManager, 'clearSession')
.mockImplementation(() => Promise.reject('Clear Session Error'))
const error = await executeScript(
requestClient,
sessionManager,
'test',
'test',
['%put "hello";'],
'test context',
mockAuthConfig,
{ foo: 'bar' },
false,
true,
true,
defaultPollOptions,
true
).catch((e) => e)
expect(error).toContain('Error while clearing session.')
})
})
const setupMocks = () => {
jest.restoreAllMocks()
jest.mock('../../../request/RequestClient')
jest.mock('../../../SessionManager')
jest.mock('../../../auth/getTokens')
jest.mock('../pollJobState')
jest.mock('../uploadTables')
jest.mock('../../../utils/formatDataForRequest')
jest.mock('../../../utils/fetchLogByChunks')
jest
.spyOn(requestClient, 'post')
.mockImplementation(() => Promise.resolve({ result: mockJob, etag: '' }))
jest
.spyOn(requestClient, 'get')
.mockImplementation(() =>
Promise.resolve({ result: mockJob, etag: '', status: 200 })
)
jest
.spyOn(requestClient, 'delete')
.mockImplementation(() => Promise.resolve({ result: {}, etag: '' }))
jest
.spyOn(getTokensModule, 'getTokens')
.mockImplementation(() => Promise.resolve(mockAuthConfig))
jest
.spyOn(pollJobStateModule, 'pollJobState')
.mockImplementation(() => Promise.resolve('completed'))
jest
.spyOn(sessionManager, 'getVariable')
.mockImplementation(() =>
Promise.resolve({ result: { value: 'test' }, etag: 'test', status: 200 })
)
jest
.spyOn(sessionManager, 'getSession')
.mockImplementation(() => Promise.resolve(mockSession))
jest
.spyOn(sessionManager, 'clearSession')
.mockImplementation(() => Promise.resolve())
jest
.spyOn(formatDataModule, 'formatDataForRequest')
.mockImplementation(() => ({ sasjs_tables: 'test', sasjs0data: 'test' }))
jest
.spyOn(fetchLogsModule, 'fetchLogByChunks')
.mockImplementation(() => Promise.resolve('Test Log'))
}

View File

@@ -0,0 +1,41 @@
import { Logger, LogLevel } from '@sasjs/utils/logger'
import * as path from 'path'
import * as fileModule from '@sasjs/utils/file'
import { getFileStream } from '../getFileStream'
import { mockJob } from './mockResponses'
import { WriteStream } from '../../../types'
describe('getFileStream', () => {
beforeEach(() => {
;(process as any).logger = new Logger(LogLevel.Off)
setupMocks()
})
it('should use the given log path if it points to a file', async () => {
const { createWriteStream } = require('@sasjs/utils/file')
await getFileStream(mockJob, path.join(__dirname, 'test.log'))
expect(createWriteStream).toHaveBeenCalledWith(
path.join(__dirname, 'test.log')
)
})
it('should generate a log file path with a timestamp if it points to a folder', async () => {
const { createWriteStream } = require('@sasjs/utils/file')
await getFileStream(mockJob, __dirname)
expect(createWriteStream).not.toHaveBeenCalledWith(__dirname)
expect(createWriteStream).toHaveBeenCalledWith(
expect.stringContaining(path.join(__dirname, 'test job-20'))
)
})
})
const setupMocks = () => {
jest.restoreAllMocks()
jest.mock('@sasjs/utils/file/file')
jest
.spyOn(fileModule, 'createWriteStream')
.mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
}

View File

@@ -0,0 +1,73 @@
import { AuthConfig } from '@sasjs/utils/types'
import { Job, Session } from '../../../types'
export const mockSession: Session = {
id: 's35510n',
state: 'idle',
links: [],
attributes: {
sessionInactiveTimeout: 1
},
creationTimeStamp: new Date().valueOf().toString()
}
export const mockJob: Job = {
id: 'j0b',
name: 'test job',
uri: '/j0b',
createdBy: 'test user',
results: {
'_webout.json': 'test'
},
logStatistics: {
lineCount: 100,
modifiedTimeStamp: new Date().valueOf().toString()
},
links: [
{
rel: 'log',
href: '/log',
method: 'GET',
type: 'log',
uri: 'log'
},
{
rel: 'self',
href: '/job',
method: 'GET',
type: 'job',
uri: 'job'
},
{
rel: 'state',
href: '/state',
method: 'GET',
type: 'state',
uri: 'state'
},
{
rel: 'up',
href: '/job',
method: 'GET',
type: 'up',
uri: 'job'
}
]
}
export const mockAuthConfig: AuthConfig = {
client: 'cl13nt',
secret: '53cr3t',
access_token: 'acc355',
refresh_token: 'r3fr35h'
}
export class MockStream {
_write(chunk: string, _: any, next: Function) {
next()
}
reset() {}
destroy() {}
}

View File

@@ -0,0 +1,346 @@
import { Logger, LogLevel } from '@sasjs/utils'
import { RequestClient } from '../../../request/RequestClient'
import { mockAuthConfig, mockJob } from './mockResponses'
import { pollJobState } from '../pollJobState'
import * as getTokensModule from '../../../auth/getTokens'
import * as saveLogModule from '../saveLog'
import * as getFileStreamModule from '../getFileStream'
import * as isNodeModule from '../../../utils/isNode'
import { PollOptions } from '../../../types'
import { WriteStream } from 'fs'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
const defaultPollOptions: PollOptions = {
maxPollCount: 100,
pollInterval: 500,
streamLog: false
}
describe('pollJobState', () => {
beforeEach(() => {
;(process as any).logger = new Logger(LogLevel.Off)
setupMocks()
})
it('should get valid tokens if the authConfig has been provided', async () => {
await pollJobState(
requestClient,
mockJob,
false,
mockAuthConfig,
defaultPollOptions
)
expect(getTokensModule.getTokens).toHaveBeenCalledWith(
requestClient,
mockAuthConfig
)
})
it('should not attempt to get tokens if the authConfig has not been provided', async () => {
await pollJobState(
requestClient,
mockJob,
false,
undefined,
defaultPollOptions
)
expect(getTokensModule.getTokens).not.toHaveBeenCalled()
})
it('should throw an error if the job does not have a state link', async () => {
const error = await pollJobState(
requestClient,
{ ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'state') },
false,
undefined,
defaultPollOptions
).catch((e) => e)
expect((error as Error).message).toContain('Job state link was not found.')
})
it('should attempt to refresh tokens before each poll', async () => {
mockSimplePoll()
await pollJobState(
requestClient,
mockJob,
false,
mockAuthConfig,
defaultPollOptions
)
expect(getTokensModule.getTokens).toHaveBeenCalledTimes(3)
})
it('should attempt to fetch and save the log after each poll when streamLog is true', async () => {
mockSimplePoll()
const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions,
streamLog: true
})
expect(saveLog).toHaveBeenCalledTimes(2)
})
it('should create a write stream in Node.js environment when streamLog is true', async () => {
mockSimplePoll()
const { getFileStream } = require('../getFileStream')
const { saveLog } = require('../saveLog')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions,
streamLog: true
})
expect(getFileStream).toHaveBeenCalled()
expect(saveLog).toHaveBeenCalledTimes(2)
})
it('should not create a write stream in a non-Node.js environment', async () => {
mockSimplePoll()
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
const { saveLog } = require('../saveLog')
const { getFileStream } = require('../getFileStream')
await pollJobState(requestClient, mockJob, false, mockAuthConfig, {
...defaultPollOptions,
streamLog: true
})
expect(getFileStream).not.toHaveBeenCalled()
expect(saveLog).not.toHaveBeenCalled()
})
it('should not attempt to fetch and save the log after each poll when streamLog is false', async () => {
mockSimplePoll()
await pollJobState(
requestClient,
mockJob,
false,
mockAuthConfig,
defaultPollOptions
)
expect(saveLogModule.saveLog).not.toHaveBeenCalled()
})
it('should return the current status when the max poll count is reached', async () => {
mockRunningPoll()
const state = await pollJobState(
requestClient,
mockJob,
false,
mockAuthConfig,
{
...defaultPollOptions,
maxPollCount: 1
}
)
expect(state).toEqual('running')
})
it('should poll with a larger interval for longer running jobs', async () => {
mockLongPoll()
const state = await pollJobState(
requestClient,
mockJob,
false,
mockAuthConfig,
{
...defaultPollOptions,
maxPollCount: 200,
pollInterval: 10
}
)
expect(state).toEqual('completed')
}, 200000)
it('should continue polling until the job completes or errors', async () => {
mockSimplePoll(1)
const state = await pollJobState(
requestClient,
mockJob,
false,
undefined,
defaultPollOptions
)
expect(requestClient.get).toHaveBeenCalledTimes(2)
expect(state).toEqual('completed')
})
it('should print the state to the console when debug is on', async () => {
jest.spyOn((process as any).logger, 'info')
mockSimplePoll()
await pollJobState(
requestClient,
mockJob,
true,
undefined,
defaultPollOptions
)
expect((process as any).logger.info).toHaveBeenCalledTimes(4)
expect((process as any).logger.info).toHaveBeenNthCalledWith(
1,
'Polling job status...'
)
expect((process as any).logger.info).toHaveBeenNthCalledWith(
2,
'Current job state: running'
)
expect((process as any).logger.info).toHaveBeenNthCalledWith(
3,
'Polling job status...'
)
expect((process as any).logger.info).toHaveBeenNthCalledWith(
4,
'Current job state: completed'
)
})
it('should continue polling when there is a single error in between', async () => {
mockPollWithSingleError()
const state = await pollJobState(
requestClient,
mockJob,
false,
undefined,
defaultPollOptions
)
expect(requestClient.get).toHaveBeenCalledTimes(2)
expect(state).toEqual('completed')
})
it('should throw an error when the error count exceeds the set value of 5', async () => {
mockErroredPoll()
const error = await pollJobState(
requestClient,
mockJob,
false,
undefined,
defaultPollOptions
).catch((e) => e)
expect(error.message).toEqual(
'Error while polling job state for job j0b: Status Error'
)
})
})
const setupMocks = () => {
jest.restoreAllMocks()
jest.mock('../../../request/RequestClient')
jest.mock('../../../auth/getTokens')
jest.mock('../saveLog')
jest.mock('../getFileStream')
jest.mock('../../../utils/isNode')
jest
.spyOn(requestClient, 'get')
.mockImplementation(() =>
Promise.resolve({ result: 'completed', etag: '', status: 200 })
)
jest
.spyOn(getTokensModule, 'getTokens')
.mockImplementation(() => Promise.resolve(mockAuthConfig))
jest
.spyOn(saveLogModule, 'saveLog')
.mockImplementation(() => Promise.resolve())
jest
.spyOn(getFileStreamModule, 'getFileStream')
.mockImplementation(() => Promise.resolve({} as unknown as WriteStream))
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
}
const mockSimplePoll = (runningCount = 2) => {
let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++
if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
}
return Promise.resolve({
result:
count === 0
? 'pending'
: count <= runningCount
? 'running'
: 'completed',
etag: '',
status: 200
})
})
}
const mockRunningPoll = () => {
let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++
if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
}
return Promise.resolve({
result: count === 0 ? 'pending' : 'running',
etag: '',
status: 200
})
})
}
const mockLongPoll = () => {
let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++
if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
}
return Promise.resolve({
result: count <= 102 ? 'running' : 'completed',
etag: '',
status: 200
})
})
}
const mockPollWithSingleError = () => {
let count = 0
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
count++
if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
}
if (count === 1) {
return Promise.reject('Status Error')
}
return Promise.resolve({
result: count === 0 ? 'pending' : 'completed',
etag: '',
status: 200
})
})
}
const mockErroredPoll = () => {
jest.spyOn(requestClient, 'get').mockImplementation((url) => {
if (url.includes('job')) {
return Promise.resolve({ result: mockJob, etag: '', status: 200 })
}
return Promise.reject('Status Error')
})
}

View File

@@ -0,0 +1,73 @@
import { Logger, LogLevel } from '@sasjs/utils'
import { RequestClient } from '../../../request/RequestClient'
import * as fetchLogsModule from '../../../utils/fetchLogByChunks'
import * as writeStreamModule from '../writeStream'
import { saveLog } from '../saveLog'
import { mockJob } from './mockResponses'
import { WriteStream } from '../../../types'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
const stream = {} as unknown as WriteStream
describe('saveLog', () => {
beforeEach(() => {
;(process as any).logger = new Logger(LogLevel.Off)
setupMocks()
})
it('should throw an error when a valid access token is not provided', async () => {
const error = await saveLog(mockJob, requestClient, 0, 100, stream).catch(
(e) => e
)
expect(error.message).toContain(
`Logs for job ${mockJob.id} cannot be fetched without a valid access token.`
)
})
it('should throw an error when the log URL is not available', async () => {
const error = await saveLog(
{ ...mockJob, links: mockJob.links.filter((l) => l.rel !== 'log') },
requestClient,
0,
100,
stream,
't0k3n'
).catch((e) => e)
expect(error.message).toContain(
`Log URL for job ${mockJob.id} was not found.`
)
})
it('should fetch and save logs to the given path', async () => {
await saveLog(mockJob, requestClient, 0, 100, stream, 't0k3n')
expect(fetchLogsModule.fetchLog).toHaveBeenCalledWith(
requestClient,
't0k3n',
'/log/content',
0,
100
)
expect(writeStreamModule.writeStream).toHaveBeenCalledWith(
stream,
'Test Log'
)
})
})
const setupMocks = () => {
jest.restoreAllMocks()
jest.mock('../../../request/RequestClient')
jest.mock('../../../utils/fetchLogByChunks')
jest.mock('@sasjs/utils')
jest.mock('../writeStream')
jest
.spyOn(fetchLogsModule, 'fetchLog')
.mockImplementation(() => Promise.resolve('Test Log'))
jest
.spyOn(writeStreamModule, 'writeStream')
.mockImplementation(() => Promise.resolve())
}

View File

@@ -0,0 +1,67 @@
import { RequestClient } from '../../../request/RequestClient'
import * as convertToCsvModule from '../../../utils/convertToCsv'
import { uploadTables } from '../uploadTables'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
describe('uploadTables', () => {
beforeEach(() => {
setupMocks()
})
it('should return a list of uploaded files', async () => {
const data = { foo: 'bar' }
const files = await uploadTables(requestClient, data, 't0k3n')
expect(files).toEqual([{ tableName: 'foo', file: 'test-file' }])
expect(requestClient.uploadFile).toHaveBeenCalledTimes(1)
expect(requestClient.uploadFile).toHaveBeenCalledWith(
'/files/files#rawUpload',
'Test CSV',
't0k3n'
)
})
it('should throw an error when the CSV exceeds the maximum length', async () => {
const data = { foo: 'bar' }
jest
.spyOn(convertToCsvModule, 'convertToCSV')
.mockImplementation(() => 'ERROR: LARGE STRING LENGTH')
const error = await uploadTables(requestClient, data, 't0k3n').catch(
(e) => e
)
expect(requestClient.uploadFile).not.toHaveBeenCalled()
expect(error.message).toEqual(
'The max length of a string value in SASjs is 32765 characters.'
)
})
it('should throw an error when the file upload fails', async () => {
const data = { foo: 'bar' }
jest
.spyOn(requestClient, 'uploadFile')
.mockImplementation(() => Promise.reject('Upload Error'))
const error = await uploadTables(requestClient, data, 't0k3n').catch(
(e) => e
)
expect(error).toContain('Error while uploading file.')
})
})
const setupMocks = () => {
jest.restoreAllMocks()
jest.mock('../../../utils/convertToCsv')
jest
.spyOn(convertToCsvModule, 'convertToCSV')
.mockImplementation(() => 'Test CSV')
jest
.spyOn(requestClient, 'uploadFile')
.mockImplementation(() =>
Promise.resolve({ result: 'test-file', etag: '' })
)
}

View File

@@ -0,0 +1,25 @@
import { WriteStream } from '../../../types'
import { writeStream } from '../writeStream'
import 'jest-extended'
describe('writeStream', () => {
const stream: WriteStream = {
write: jest.fn(),
path: 'test'
}
it('should resolve when the stream is written successfully', async () => {
expect(writeStream(stream, 'test')).toResolve()
expect(stream.write).toHaveBeenCalledWith('test\n', expect.anything())
})
it('should reject when the write errors out', async () => {
jest
.spyOn(stream, 'write')
.mockImplementation((_, callback) => callback(new Error('Test Error')))
const error = await writeStream(stream, 'test').catch((e) => e)
expect(error.message).toEqual('Test Error')
})
})

View File

@@ -0,0 +1,37 @@
import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient } from '../../request/RequestClient'
import { convertToCSV } from '../../utils/convertToCsv'
/**
* Uploads tables to SAS as specially formatted CSVs.
* This is more compact than JSON, and easier to read within SAS.
* @param requestClient - the pre-configured HTTP request client
* @param data - the JSON representation of the data to be uploaded
* @param accessToken - an optional access token for authentication/authorization
* The access token is not required when uploading tables from the browser.
*/
export async function uploadTables(
requestClient: RequestClient,
data: any,
accessToken?: string
) {
const uploadedFiles = []
for (const tableName in data) {
const csv = convertToCSV(data[tableName])
if (csv === 'ERROR: LARGE STRING LENGTH') {
throw new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
}
const uploadResponse = await requestClient
.uploadFile(`/files/files#rawUpload`, csv, accessToken)
.catch((err) => {
throw prefixMessage(err, 'Error while uploading file. ')
})
uploadedFiles.push({ tableName, file: uploadResponse.result })
}
return uploadedFiles
}

View File

@@ -0,0 +1,15 @@
import { WriteStream } from '../../types'
export const writeStream = async (
stream: WriteStream,
content: string
): Promise<void> => {
return new Promise((resolve, reject) => {
stream.write(content + '\n', (e) => {
if (e) {
return reject(e)
}
return resolve()
})
})
}

View File

@@ -1,5 +1,4 @@
import { ServerType } from '@sasjs/utils/types'
import { isAuthorizeFormRequired } from '.'
import { RequestClient } from '../request/RequestClient'
import { serialize } from '../utils'
@@ -35,6 +34,7 @@ export class AuthManager {
this.userName = loginParams.username
const { isLoggedIn, loginForm } = await this.checkSession()
if (isLoggedIn) {
await this.loginCallback()
@@ -44,6 +44,44 @@ export class AuthManager {
}
}
let loginResponse = await this.sendLoginRequest(loginForm, loginParams)
let loggedIn = isLogInSuccess(loginResponse)
if (!loggedIn) {
if (isCredentialsVerifyError(loginResponse)) {
const newLoginForm = await this.getLoginForm(loginResponse)
loginResponse = await this.sendLoginRequest(newLoginForm, loginParams)
}
const currentSession = await this.checkSession()
loggedIn = currentSession.isLoggedIn
}
if (loggedIn) {
if (this.serverType === ServerType.Sas9) {
const casAuthenticationUrl = `${this.serverUrl}/SASStoredProcess/j_spring_cas_security_check`
await this.requestClient.get<string>(
`/SASLogon/login?service=${casAuthenticationUrl}`,
undefined
)
}
this.loginCallback()
}
return {
isLoggedIn: !!loggedIn,
userName: this.userName
}
}
private async sendLoginRequest(
loginForm: { [key: string]: any },
loginParams: { [key: string]: any }
) {
for (const key in loginForm) {
loginParams[key] = loginForm[key]
}
@@ -60,21 +98,7 @@ export class AuthManager {
}
)
let loggedIn = isLogInSuccess(loginResponse)
if (!loggedIn) {
const currentSession = await this.checkSession()
loggedIn = currentSession.isLoggedIn
}
if (loggedIn) {
this.loginCallback()
}
return {
isLoggedIn: !!loggedIn,
userName: this.userName
}
return loginResponse
}
/**
@@ -168,5 +192,10 @@ export class AuthManager {
}
}
const isCredentialsVerifyError = (response: string): boolean =>
/An error occurred while the system was verifying your credentials. Please enter your credentials again./gm.test(
response
)
const isLogInSuccess = (response: string): boolean =>
/You have signed in/gm.test(response)

View File

@@ -0,0 +1,53 @@
import { SasAuthResponse } from '@sasjs/utils/types'
import { prefixMessage } from '@sasjs/utils/error'
import * as NodeFormData from 'form-data'
import { RequestClient } from '../request/RequestClient'
/**
* Exchanges the auth code for an access token for the given client.
* @param requestClient - the pre-configured HTTP request client
* @param clientId - the client ID to authenticate with.
* @param clientSecret - the client secret to authenticate with.
* @param authCode - the auth code received from the server.
*/
export async function getAccessToken(
requestClient: RequestClient,
clientId: string,
clientSecret: string,
authCode: string
): Promise<SasAuthResponse> {
const url = '/SASLogon/oauth/token'
let token
if (typeof Buffer === 'undefined') {
token = btoa(clientId + ':' + clientSecret)
} else {
token = Buffer.from(clientId + ':' + clientSecret).toString('base64')
}
const headers = {
Authorization: 'Basic ' + token
}
let formData
if (typeof FormData === 'undefined') {
formData = new NodeFormData()
} else {
formData = new FormData()
}
formData.append('grant_type', 'authorization_code')
formData.append('code', authCode)
const authResponse = await requestClient
.post(
url,
formData,
undefined,
'multipart/form-data; boundary=' + (formData as any)._boundary,
headers
)
.then((res) => res.result as SasAuthResponse)
.catch((err) => {
throw prefixMessage(err, 'Error while getting access token')
})
return authResponse
}

40
src/auth/getTokens.ts Normal file
View File

@@ -0,0 +1,40 @@
import {
isAccessTokenExpiring,
isRefreshTokenExpiring,
hasTokenExpired
} from '@sasjs/utils/auth'
import { AuthConfig } from '@sasjs/utils/types'
import { RequestClient } from '../request/RequestClient'
import { refreshTokens } from './refreshTokens'
/**
* Returns the auth configuration, refreshing the tokens if necessary.
* @param requestClient - the pre-configured HTTP request client
* @param authConfig - an object containing a client ID, secret, access token and refresh token
*/
export async function getTokens(
requestClient: RequestClient,
authConfig: AuthConfig
): Promise<AuthConfig> {
const logger = process.logger || console
let { access_token, refresh_token, client, secret } = authConfig
if (
isAccessTokenExpiring(access_token) ||
isRefreshTokenExpiring(refresh_token)
) {
if (hasTokenExpired(refresh_token)) {
const error =
'Unable to obtain new access token. Your refresh token has expired.'
logger.error(error)
throw new Error(error)
}
logger.info('Refreshing access and refresh tokens.')
;({ access_token, refresh_token } = await refreshTokens(
requestClient,
client,
secret,
refresh_token
))
}
return { access_token, refresh_token, client, secret }
}

49
src/auth/refreshTokens.ts Normal file
View File

@@ -0,0 +1,49 @@
import { SasAuthResponse } from '@sasjs/utils/types'
import { prefixMessage } from '@sasjs/utils/error'
import * as NodeFormData from 'form-data'
import { RequestClient } from '../request/RequestClient'
/**
* Exchanges the refresh token for an access token for the given client.
* @param requestClient - the pre-configured HTTP request client
* @param clientId - the client ID to authenticate with.
* @param clientSecret - the client secret to authenticate with.
* @param authCode - the refresh token received from the server.
*/
export async function refreshTokens(
requestClient: RequestClient,
clientId: string,
clientSecret: string,
refreshToken: string
) {
const url = '/SASLogon/oauth/token'
let token
token =
typeof Buffer === 'undefined'
? btoa(clientId + ':' + clientSecret)
: Buffer.from(clientId + ':' + clientSecret).toString('base64')
const headers = {
Authorization: 'Basic ' + token
}
const formData =
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
formData.append('grant_type', 'refresh_token')
formData.append('refresh_token', refreshToken)
const authResponse = await requestClient
.post<SasAuthResponse>(
url,
formData,
undefined,
'multipart/form-data; boundary=' + (formData as any)._boundary,
headers
)
.then((res) => res.result)
.catch((err) => {
throw prefixMessage(err, 'Error while refreshing tokens')
})
return authResponse
}

View File

@@ -57,7 +57,7 @@ describe('AuthManager', () => {
expect((authManager as any).logoutUrl).toEqual('/SASLogon/logout?')
})
it('should call the auth callback and return when already logged in', async (done) => {
it('should call the auth callback and return when already logged in', async () => {
const authManager = new AuthManager(
serverUrl,
serverType,
@@ -77,10 +77,9 @@ describe('AuthManager', () => {
expect(loginResponse.isLoggedIn).toBeTruthy()
expect(loginResponse.userName).toEqual(userName)
expect(authCallback).toHaveBeenCalledTimes(1)
done()
})
it('should post a login request to the server if not logged in', async (done) => {
it('should post a login request to the server if not logged in', async () => {
const authManager = new AuthManager(
serverUrl,
serverType,
@@ -121,10 +120,9 @@ describe('AuthManager', () => {
}
)
expect(authCallback).toHaveBeenCalledTimes(1)
done()
})
it('should parse and submit the authorisation form when necessary', async (done) => {
it('should parse and submit the authorisation form when necessary', async () => {
const authManager = new AuthManager(
serverUrl,
serverType,
@@ -160,10 +158,9 @@ describe('AuthManager', () => {
expect(requestClient.authorize).toHaveBeenCalledWith(
mockLoginAuthoriseRequiredResponse
)
done()
})
it('should check and return session information if logged in', async (done) => {
it('should check and return session information if logged in', async () => {
const authManager = new AuthManager(
serverUrl,
serverType,
@@ -189,7 +186,5 @@ describe('AuthManager', () => {
}
}
)
done()
})
})

View File

@@ -0,0 +1,75 @@
import { AuthConfig } from '@sasjs/utils'
import * as NodeFormData from 'form-data'
import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient'
import { getAccessToken } from '../getAccessToken'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
describe('getAccessToken', () => {
it('should attempt to refresh tokens', async () => {
setupMocks()
const access_token = generateToken(30)
const refresh_token = generateToken(30)
const authConfig: AuthConfig = {
access_token,
refresh_token,
client: 'cl13nt',
secret: 's3cr3t'
}
jest
.spyOn(requestClient, 'post')
.mockImplementation(() =>
Promise.resolve({ result: mockAuthResponse, etag: '' })
)
const token = Buffer.from(
authConfig.client + ':' + authConfig.secret
).toString('base64')
await getAccessToken(
requestClient,
authConfig.client,
authConfig.secret,
authConfig.refresh_token
)
expect(requestClient.post).toHaveBeenCalledWith(
'/SASLogon/oauth/token',
expect.any(NodeFormData),
undefined,
expect.stringContaining('multipart/form-data; boundary='),
{
Authorization: 'Basic ' + token
}
)
})
it('should handle errors while refreshing tokens', async () => {
setupMocks()
const access_token = generateToken(30)
const refresh_token = generateToken(30)
const authConfig: AuthConfig = {
access_token,
refresh_token,
client: 'cl13nt',
secret: 's3cr3t'
}
jest
.spyOn(requestClient, 'post')
.mockImplementation(() => Promise.reject('Token Error'))
const error = await getAccessToken(
requestClient,
authConfig.client,
authConfig.secret,
authConfig.refresh_token
).catch((e) => e)
expect(error).toContain('Error while getting access token')
})
})
const setupMocks = () => {
jest.restoreAllMocks()
jest.mock('../../request/RequestClient')
}

View File

@@ -0,0 +1,79 @@
import { AuthConfig } from '@sasjs/utils'
import * as refreshTokensModule from '../refreshTokens'
import { generateToken, mockAuthResponse } from './mockResponses'
import { getTokens } from '../getTokens'
import { RequestClient } from '../../request/RequestClient'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
describe('getTokens', () => {
it('should attempt to refresh tokens if the access token is expiring', async () => {
setupMocks()
const access_token = generateToken(30)
const refresh_token = generateToken(86400000)
const authConfig: AuthConfig = {
access_token,
refresh_token,
client: 'cl13nt',
secret: 's3cr3t'
}
await getTokens(requestClient, authConfig)
expect(refreshTokensModule.refreshTokens).toHaveBeenCalledWith(
requestClient,
authConfig.client,
authConfig.secret,
authConfig.refresh_token
)
})
it('should attempt to refresh tokens if the refresh token is expiring', async () => {
setupMocks()
const access_token = generateToken(86400000)
const refresh_token = generateToken(30)
const authConfig: AuthConfig = {
access_token,
refresh_token,
client: 'cl13nt',
secret: 's3cr3t'
}
await getTokens(requestClient, authConfig)
expect(refreshTokensModule.refreshTokens).toHaveBeenCalledWith(
requestClient,
authConfig.client,
authConfig.secret,
authConfig.refresh_token
)
})
it('should throw an error if the refresh token has already expired', async () => {
setupMocks()
const access_token = generateToken(86400000)
const refresh_token = generateToken(-36000)
const authConfig: AuthConfig = {
access_token,
refresh_token,
client: 'cl13nt',
secret: 's3cr3t'
}
const expectedError =
'Unable to obtain new access token. Your refresh token has expired.'
const error = await getTokens(requestClient, authConfig).catch((e) => e)
expect(error.message).toEqual(expectedError)
})
})
const setupMocks = () => {
jest.restoreAllMocks()
jest.mock('../../request/RequestClient')
jest.mock('../refreshTokens')
jest
.spyOn(refreshTokensModule, 'refreshTokens')
.mockImplementation(() => Promise.resolve(mockAuthResponse))
}

View File

@@ -1,2 +1,24 @@
import { SasAuthResponse } from '@sasjs/utils/types'
export const mockLoginAuthoriseRequiredResponse = `<form id="application_authorization" action="/SASLogon/oauth/authorize" method="POST"><input type="hidden" name="X-Uaa-Csrf" value="2nfuxIn6WaOURWL7tzTXCe"/>`
export const mockLoginSuccessResponse = `You have signed in`
export const mockAuthResponse: SasAuthResponse = {
access_token: 'acc355',
refresh_token: 'r3fr35h',
id_token: 'id',
token_type: 'bearer',
expires_in: new Date().valueOf(),
scope: 'default',
jti: 'test'
}
export const generateToken = (timeToLiveSeconds: number): string => {
const exp =
new Date(new Date().getTime() + timeToLiveSeconds * 1000).getTime() / 1000
const header = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9'
const payload = Buffer.from(JSON.stringify({ exp })).toString('base64')
const signature = '4-iaDojEVl0pJQMjrbM1EzUIfAZgsbK_kgnVyVxFSVo'
const token = `${header}.${payload}.${signature}`
return token
}

View File

@@ -0,0 +1,75 @@
import { AuthConfig } from '@sasjs/utils'
import * as NodeFormData from 'form-data'
import { generateToken, mockAuthResponse } from './mockResponses'
import { RequestClient } from '../../request/RequestClient'
import { refreshTokens } from '../refreshTokens'
const requestClient = new (<jest.Mock<RequestClient>>RequestClient)()
describe('refreshTokens', () => {
it('should attempt to refresh tokens', async () => {
setupMocks()
const access_token = generateToken(30)
const refresh_token = generateToken(30)
const authConfig: AuthConfig = {
access_token,
refresh_token,
client: 'cl13nt',
secret: 's3cr3t'
}
jest
.spyOn(requestClient, 'post')
.mockImplementation(() =>
Promise.resolve({ result: mockAuthResponse, etag: '' })
)
const token = Buffer.from(
authConfig.client + ':' + authConfig.secret
).toString('base64')
await refreshTokens(
requestClient,
authConfig.client,
authConfig.secret,
authConfig.refresh_token
)
expect(requestClient.post).toHaveBeenCalledWith(
'/SASLogon/oauth/token',
expect.any(NodeFormData),
undefined,
expect.stringContaining('multipart/form-data; boundary='),
{
Authorization: 'Basic ' + token
}
)
})
it('should handle errors while refreshing tokens', async () => {
setupMocks()
const access_token = generateToken(30)
const refresh_token = generateToken(30)
const authConfig: AuthConfig = {
access_token,
refresh_token,
client: 'cl13nt',
secret: 's3cr3t'
}
jest
.spyOn(requestClient, 'post')
.mockImplementation(() => Promise.reject('Token Error'))
const error = await refreshTokens(
requestClient,
authConfig.client,
authConfig.secret,
authConfig.refresh_token
).catch((e) => e)
expect(error).toContain('Error while refreshing tokens')
})
})
const setupMocks = () => {
jest.restoreAllMocks()
jest.mock('../../request/RequestClient')
}

View File

@@ -1,4 +1,4 @@
import { ServerType } from '@sasjs/utils/types'
import { AuthConfig, ServerType } from '@sasjs/utils/types'
import { SASViyaApiClient } from '../SASViyaApiClient'
import {
ErrorResponse,
@@ -17,7 +17,7 @@ export class ComputeJobExecutor extends BaseJobExecutor {
data: any,
config: any,
loginRequiredCallback?: any,
accessToken?: string
authConfig?: AuthConfig
) {
const loginCallback = loginRequiredCallback || (() => Promise.resolve())
const waitForResult = true
@@ -30,7 +30,7 @@ export class ComputeJobExecutor extends BaseJobExecutor {
config.contextName,
config.debug,
data,
accessToken,
authConfig,
waitForResult,
expectWebout
)

View File

@@ -1,10 +1,11 @@
import { ServerType } from '@sasjs/utils/types'
import { AuthConfig, ServerType } from '@sasjs/utils/types'
import { SASViyaApiClient } from '../SASViyaApiClient'
import {
ErrorResponse,
JobExecutionError,
LoginRequiredError
} from '../types/errors'
import { ExtraResponseAttributes } from '@sasjs/utils/types'
import { BaseJobExecutor } from './JobExecutor'
export class JesJobExecutor extends BaseJobExecutor {
@@ -17,23 +18,34 @@ export class JesJobExecutor extends BaseJobExecutor {
data: any,
config: any,
loginRequiredCallback?: any,
accessToken?: string
authConfig?: AuthConfig,
extraResponseAttributes: ExtraResponseAttributes[] = []
) {
const loginCallback = loginRequiredCallback || (() => Promise.resolve())
const requestPromise = new Promise((resolve, reject) => {
this.sasViyaApiClient
?.executeJob(
sasJob,
config.contextName,
config.debug,
data,
accessToken
)
.then((response) => {
?.executeJob(sasJob, config.contextName, config.debug, data, authConfig)
.then((response: any) => {
this.appendRequest(response, sasJob, config.debug)
resolve(response)
let responseObject = {}
if (extraResponseAttributes && extraResponseAttributes.length > 0) {
const extraAttributes = extraResponseAttributes.reduce(
(map: any, obj: any) => ((map[obj] = response[obj]), map),
{}
)
responseObject = {
result: response.result,
...extraAttributes
}
} else {
responseObject = response.result
}
resolve(responseObject)
})
.catch(async (e: Error) => {
if (e instanceof JobExecutionError) {
@@ -50,7 +62,9 @@ export class JesJobExecutor extends BaseJobExecutor {
sasJob,
data,
config,
loginRequiredCallback
loginRequiredCallback,
authConfig,
extraResponseAttributes
).then(
(res: any) => {
resolve(res)

View File

@@ -1,5 +1,6 @@
import { ServerType } from '@sasjs/utils/types'
import { AuthConfig, ServerType } from '@sasjs/utils/types'
import { SASjsRequest } from '../types'
import { ExtraResponseAttributes } from '@sasjs/utils/types'
import { asyncForEach, parseGeneratedCode, parseSourceCode } from '../utils'
export type ExecuteFunction = () => Promise<any>
@@ -10,7 +11,8 @@ export interface JobExecutor {
data: any,
config: any,
loginRequiredCallback?: any,
accessToken?: string
authConfig?: AuthConfig,
extraResponseAttributes?: ExtraResponseAttributes[]
) => Promise<any>
resendWaitingRequests: () => Promise<void>
getRequests: () => SASjsRequest[]
@@ -28,7 +30,8 @@ export abstract class BaseJobExecutor implements JobExecutor {
data: any,
config: any,
loginRequiredCallback?: any,
accessToken?: string | undefined
authConfig?: AuthConfig | undefined,
extraResponseAttributes?: ExtraResponseAttributes[]
): Promise<any>
resendWaitingRequests = async () => {
@@ -59,14 +62,14 @@ export abstract class BaseJobExecutor implements JobExecutor {
let sasWork = null
if (debug) {
if (response?.result && response?.log) {
if (response?.log) {
sourceCode = parseSourceCode(response.log)
generatedCode = parseGeneratedCode(response.log)
if (response.log) {
sasWork = response.log
} else {
if (response?.result) {
sasWork = response.result.WORK
} else {
sasWork = response.log
}
} else if (response?.result) {
sourceCode = parseSourceCode(response.result)

View File

@@ -0,0 +1,110 @@
import { ServerType } from '@sasjs/utils/types'
import * as NodeFormData from 'form-data'
import { ErrorResponse } from '../types/errors'
import { convertToCSV, isRelativePath } from '../utils'
import { BaseJobExecutor } from './JobExecutor'
import { Sas9RequestClient } from '../request/Sas9RequestClient'
/**
* Job executor for SAS9 servers for use in Node.js environments.
* Initiates login with the provided username and password from the config
* The cookies are stored in the request client and used in subsequent
* job execution requests.
*/
export class Sas9JobExecutor extends BaseJobExecutor {
private requestClient: Sas9RequestClient
constructor(
serverUrl: string,
serverType: ServerType,
private jobsPath: string
) {
super(serverUrl, serverType)
this.requestClient = new Sas9RequestClient(serverUrl, false)
}
async execute(sasJob: string, data: any, config: any) {
const program = isRelativePath(sasJob)
? config.appLoc
? config.appLoc.replace(/\/?$/, '/') + sasJob.replace(/^\//, '')
: sasJob
: sasJob
let apiUrl = `${config.serverUrl}${this.jobsPath}?${'_program=' + program}`
apiUrl = `${apiUrl}${
config.username && config.password
? '&_username=' + config.username + '&_password=' + config.password
: ''
}`
let requestParams = {
...this.getRequestParams(config)
}
let formData = new NodeFormData()
if (data) {
try {
formData = generateFileUploadForm(formData, data)
} catch (e) {
return Promise.reject(new ErrorResponse(e?.message, e))
}
}
for (const key in requestParams) {
if (requestParams.hasOwnProperty(key)) {
formData.append(key, requestParams[key])
}
}
await this.requestClient.login(
config.username,
config.password,
this.jobsPath
)
const contentType =
data && Object.keys(data).length
? 'multipart/form-data; boundary=' + (formData as any)._boundary
: 'text/plain'
return await this.requestClient!.post(
apiUrl,
formData,
undefined,
contentType,
{
Accept: '*/*',
Connection: 'Keep-Alive'
}
)
}
private getRequestParams(config: any): any {
const requestParams: any = {}
if (config.debug) {
requestParams['_debug'] = 131
}
return requestParams
}
}
const generateFileUploadForm = (
formData: NodeFormData,
data: any
): NodeFormData => {
for (const tableName in data) {
const name = tableName
const csv = convertToCSV(data[tableName])
if (csv === 'ERROR: LARGE STRING LENGTH') {
throw new Error(
'The max length of a string value in SASjs is 32765 characters.'
)
}
formData.append(name, csv, {
filename: `${name}.csv`,
contentType: 'application/csv'
})
}
return formData
}

View File

@@ -8,8 +8,13 @@ import { generateFileUploadForm } from '../file/generateFileUploadForm'
import { generateTableUploadForm } from '../file/generateTableUploadForm'
import { RequestClient } from '../request/RequestClient'
import { SASViyaApiClient } from '../SASViyaApiClient'
import { isRelativePath } from '../utils'
import {
isRelativePath,
getValidJson,
parseSasViyaDebugResponse
} from '../utils'
import { BaseJobExecutor } from './JobExecutor'
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
export interface WaitingRequstPromise {
promise: Promise<any> | null
@@ -39,15 +44,18 @@ export class WebJobExecutor extends BaseJobExecutor {
? config.appLoc.replace(/\/?$/, '/') + sasJob.replace(/^\//, '')
: sasJob
: sasJob
const jobUri =
config.serverType === ServerType.SasViya
? await this.getJobUri(sasJob)
: ''
const apiUrl = `${config.serverUrl}${this.jobsPath}/?${
jobUri.length > 0
? '__program=' + program + '&_job=' + jobUri
: '_program=' + program
}`
let apiUrl = `${config.serverUrl}${this.jobsPath}/?${'_program=' + program}`
if (config.serverType === ServerType.SasViya) {
const jobUri =
config.serverType === ServerType.SasViya
? await this.getJobUri(sasJob)
: ''
apiUrl += jobUri.length > 0 ? '&_job=' + jobUri : ''
apiUrl += config.contextName ? `&_contextname=${config.contextName}` : ''
}
let requestParams = {
...this.getRequestParams(config)
@@ -91,12 +99,27 @@ export class WebJobExecutor extends BaseJobExecutor {
this.requestClient!.post(apiUrl, formData, undefined)
.then(async (res) => {
if (this.serverType === ServerType.SasViya && config.debug) {
const jsonResponse = await this.parseSasViyaDebugResponse(
res.result as string
const jsonResponse = await parseSasViyaDebugResponse(
res.result as string,
this.requestClient,
this.serverUrl
)
this.appendRequest(res, sasJob, config.debug)
resolve(jsonResponse)
}
if (this.serverType === ServerType.Sas9 && config.debug) {
const jsonResponse = parseWeboutResponse(res.result as string)
if (jsonResponse === '') {
throw new Error(
'Valid JSON could not be extracted from response.'
)
}
getValidJson(jsonResponse)
this.appendRequest(res, sasJob, config.debug)
resolve(res.result)
}
getValidJson(res.result as string)
this.appendRequest(res, sasJob, config.debug)
resolve(res.result)
})
@@ -134,20 +157,6 @@ export class WebJobExecutor extends BaseJobExecutor {
return requestPromise
}
private parseSasViyaDebugResponse = async (response: string) => {
const iframeStart = response.split(
'<iframe style="width: 99%; height: 500px" src="'
)[1]
const jsonUrl = iframeStart ? iframeStart.split('"></iframe>')[0] : null
if (!jsonUrl) {
throw new Error('Unable to find webout file URL.')
}
return this.requestClient
.get(this.serverUrl + jsonUrl, undefined)
.then((res) => res.result)
}
private async getJobUri(sasJob: string) {
if (!this.sasViyaApiClient) return ''
let uri = ''

View File

@@ -1,4 +1,5 @@
export * from './ComputeJobExecutor'
export * from './JesJobExecutor'
export * from './JobExecutor'
export * from './Sas9JobExecutor'
export * from './WebJobExecutor'

View File

@@ -10,6 +10,8 @@ import {
} from '../types/errors'
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
import { prefixMessage } from '@sasjs/utils/error'
import { SAS9AuthError } from '../types/errors/SAS9AuthError'
import { getValidJson } from '../utils'
export interface HttpClient {
get<T>(
@@ -41,14 +43,15 @@ export interface HttpClient {
getCsrfToken(type: 'general' | 'file'): CsrfToken | undefined
clearCsrfTokens(): void
getBaseUrl(): string
}
export class RequestClient implements HttpClient {
private csrfToken: CsrfToken = { headerName: '', value: '' }
private fileUploadCsrfToken: CsrfToken | undefined
private httpClient: AxiosInstance
protected csrfToken: CsrfToken = { headerName: '', value: '' }
protected fileUploadCsrfToken: CsrfToken | undefined
protected httpClient: AxiosInstance
constructor(private baseUrl: string, allowInsecure = false) {
constructor(protected baseUrl: string, allowInsecure = false) {
const https = require('https')
if (allowInsecure && https.Agent) {
this.httpClient = axios.create({
@@ -62,6 +65,9 @@ export class RequestClient implements HttpClient {
baseURL: baseUrl
})
}
this.httpClient.defaults.validateStatus = (status) =>
status >= 200 && status < 305
}
public getCsrfToken(type: 'general' | 'file' = 'general') {
@@ -73,13 +79,17 @@ export class RequestClient implements HttpClient {
this.fileUploadCsrfToken = { headerName: '', value: '' }
}
public getBaseUrl() {
return this.httpClient.defaults.baseURL || ''
}
public async get<T>(
url: string,
accessToken: string | undefined,
contentType: string = 'application/json',
overrideHeaders: { [key: string]: string | number } = {},
debug: boolean = false
): Promise<{ result: T; etag: string }> {
): Promise<{ result: T; etag: string; status: number }> {
const headers = {
...this.getHeaders(accessToken, contentType),
...overrideHeaders
@@ -286,11 +296,12 @@ export class RequestClient implements HttpClient {
})
.then((res) => res.data)
.catch((error) => {
console.log(error)
const logger = process.logger || console
logger.error(error)
})
}
private getHeaders = (
protected getHeaders = (
accessToken: string | undefined,
contentType: string
) => {
@@ -315,7 +326,7 @@ export class RequestClient implements HttpClient {
return headers
}
private parseAndSetFileUploadCsrfToken = (response: AxiosResponse) => {
protected parseAndSetFileUploadCsrfToken = (response: AxiosResponse) => {
const token = this.parseCsrfToken(response)
if (token) {
@@ -323,7 +334,7 @@ export class RequestClient implements HttpClient {
}
}
private parseAndSetCsrfToken = (response: AxiosResponse) => {
protected parseAndSetCsrfToken = (response: AxiosResponse) => {
const token = this.parseCsrfToken(response)
if (token) {
@@ -347,7 +358,7 @@ export class RequestClient implements HttpClient {
}
}
private handleError = async (
protected handleError = async (
e: any,
callback: any,
debug: boolean = false
@@ -405,7 +416,7 @@ export class RequestClient implements HttpClient {
throw e
}
private parseResponse<T>(response: AxiosResponse<any>) {
protected parseResponse<T>(response: AxiosResponse<any>) {
const etag = response?.headers ? response.headers['etag'] : ''
let parsedResponse
let includeSAS9Log: boolean = false
@@ -418,7 +429,13 @@ export class RequestClient implements HttpClient {
}
} catch {
try {
parsedResponse = JSON.parse(parseWeboutResponse(response.data))
const weboutResponse = parseWeboutResponse(response.data)
if (weboutResponse === '') {
throw new Error('Valid JSON could not be extracted from response.')
}
const jsonResponse = getValidJson(weboutResponse)
parsedResponse = jsonResponse
} catch {
parsedResponse = response.data
}
@@ -426,9 +443,15 @@ export class RequestClient implements HttpClient {
includeSAS9Log = true
}
let responseToReturn: { result: T; etag: any; log?: string } = {
let responseToReturn: {
result: T
etag: any
log?: string
status: number
} = {
result: parsedResponse as T,
etag
etag,
status: response.status
}
if (includeSAS9Log) {
@@ -439,7 +462,7 @@ export class RequestClient implements HttpClient {
}
}
const throwIfError = (response: AxiosResponse) => {
export const throwIfError = (response: AxiosResponse) => {
if (response.status === 401) {
throw new LoginRequiredError()
}
@@ -470,6 +493,10 @@ const throwIfError = (response: AxiosResponse) => {
throw new AuthorizeError(response.data.message, authorizeRequestUrl)
}
if (response.config?.url?.includes('sasAuthError')) {
throw new SAS9AuthError()
}
const error = parseError(response.data as string)
if (error) {

View File

@@ -0,0 +1,121 @@
import { AxiosRequestConfig } from 'axios'
import axiosCookieJarSupport from 'axios-cookiejar-support'
import * as tough from 'tough-cookie'
import { prefixMessage } from '@sasjs/utils/error'
import { RequestClient, throwIfError } from './RequestClient'
/**
* Specific request client for SAS9 in Node.js environments.
* Handles redirects and cookie management.
*/
export class Sas9RequestClient extends RequestClient {
constructor(baseUrl: string, allowInsecure = false) {
super(baseUrl, allowInsecure)
this.httpClient.defaults.maxRedirects = 0
this.httpClient.defaults.validateStatus = (status) =>
status >= 200 && status < 303
if (axiosCookieJarSupport) {
axiosCookieJarSupport(this.httpClient)
this.httpClient.defaults.jar = new tough.CookieJar()
}
}
public async login(username: string, password: string, jobsPath: string) {
const codeInjectorPath = `/User Folders/${username}/My Folder/sasjs/runner`
if (this.httpClient.defaults.jar) {
;(this.httpClient.defaults.jar as tough.CookieJar).removeAllCookies()
await this.get(
`${jobsPath}?_program=${codeInjectorPath}&_username=${username}&_password=${password}`,
undefined,
'text/plain'
)
}
}
public async get<T>(
url: string,
accessToken: string | undefined,
contentType: string = 'application/json',
overrideHeaders: { [key: string]: string | number } = {},
debug: boolean = false
): Promise<{ result: T; etag: string; status: number }> {
const headers = {
...this.getHeaders(accessToken, contentType),
...overrideHeaders
}
const requestConfig: AxiosRequestConfig = {
headers,
responseType: contentType === 'text/plain' ? 'text' : 'json',
withCredentials: true
}
if (contentType === 'text/plain') {
requestConfig.transformResponse = undefined
}
return this.httpClient
.get<T>(url, requestConfig)
.then((response) => {
if (response.status === 302) {
return this.get(
response.headers['location'],
accessToken,
contentType
)
}
throwIfError(response)
return this.parseResponse<T>(response)
})
.catch(async (e) => {
return await this.handleError(
e,
() =>
this.get<T>(url, accessToken, contentType, overrideHeaders).catch(
(err) => {
throw prefixMessage(
err,
'Error while executing handle error callback. '
)
}
),
debug
).catch((err) => {
throw prefixMessage(err, 'Error while handling error. ')
})
})
}
public post<T>(
url: string,
data: any,
accessToken: string | undefined,
contentType = 'application/json',
overrideHeaders: { [key: string]: string | number } = {}
): Promise<{ result: T; etag: string }> {
const headers = {
...this.getHeaders(accessToken, contentType),
...overrideHeaders
}
return this.httpClient
.post<T>(url, data, { headers, withCredentials: true })
.then(async (response) => {
if (response.status === 302) {
return await this.get(
response.headers['location'],
undefined,
contentType,
overrideHeaders
)
}
throwIfError(response)
return this.parseResponse<T>(response)
})
.catch(async (e) => {
return await this.handleError(e, () =>
this.post<T>(url, data, accessToken, contentType, overrideHeaders)
)
})
}
}

View File

@@ -1,5 +1,9 @@
/**
* @jest-environment jsdom
*/
import { FileUploader } from '../FileUploader'
import { UploadFile } from '../types'
import { SASjsConfig, UploadFile } from '../types'
import { RequestClient } from '../request/RequestClient'
import axios from 'axios'
jest.mock('axios')
@@ -28,48 +32,51 @@ const prepareFilesAndParams = () => {
}
describe('FileUploader', () => {
const config: SASjsConfig = {
...new SASjsConfig(),
appLoc: '/sample/apploc'
}
const fileUploader = new FileUploader(
'/sample/apploc',
'https://sample.server.com',
config,
'/jobs/path',
new RequestClient('https://sample.server.com')
)
it('should upload successfully', async (done) => {
it('should upload successfully', async () => {
const sasJob = 'test/upload'
const { files, params } = prepareFilesAndParams()
mockedAxios.post.mockImplementation(() =>
Promise.resolve({ data: sampleResponse })
)
fileUploader.uploadFile(sasJob, files, params).then((res: any) => {
expect(res).toEqual(JSON.parse(sampleResponse))
done()
})
const res = await fileUploader.uploadFile(sasJob, files, params)
expect(res).toEqual(JSON.parse(sampleResponse))
})
it('should an error when no files are provided', async (done) => {
it('should an error when no files are provided', async () => {
const sasJob = 'test/upload'
const files: UploadFile[] = []
const params = { table: 'libtable' }
fileUploader.uploadFile(sasJob, files, params).catch((err: any) => {
expect(err.error.message).toEqual('At least one file must be provided.')
done()
})
const err = await fileUploader
.uploadFile(sasJob, files, params)
.catch((err: any) => err)
expect(err.error.message).toEqual('At least one file must be provided.')
})
it('should throw an error when no sasJob is provided', async (done) => {
it('should throw an error when no sasJob is provided', async () => {
const sasJob = ''
const { files, params } = prepareFilesAndParams()
fileUploader.uploadFile(sasJob, files, params).catch((err: any) => {
expect(err.error.message).toEqual('sasJob must be provided.')
done()
})
const err = await fileUploader
.uploadFile(sasJob, files, params)
.catch((err: any) => err)
expect(err.error.message).toEqual('sasJob must be provided.')
})
it('should throw an error when login is required', async (done) => {
it('should throw an error when login is required', async () => {
mockedAxios.post.mockImplementation(() =>
Promise.resolve({ data: '<form action="Logon">' })
)
@@ -77,15 +84,13 @@ describe('FileUploader', () => {
const sasJob = 'test'
const { files, params } = prepareFilesAndParams()
fileUploader.uploadFile(sasJob, files, params).catch((err: any) => {
expect(err.error.message).toEqual(
'You must be logged in to upload a file.'
)
done()
})
const err = await fileUploader
.uploadFile(sasJob, files, params)
.catch((err: any) => err)
expect(err.error.message).toEqual('You must be logged in to upload a file.')
})
it('should throw an error when invalid JSON is returned by the server', async (done) => {
it('should throw an error when invalid JSON is returned by the server', async () => {
mockedAxios.post.mockImplementation(() =>
Promise.resolve({ data: '{invalid: "json"' })
)
@@ -93,13 +98,13 @@ describe('FileUploader', () => {
const sasJob = 'test'
const { files, params } = prepareFilesAndParams()
fileUploader.uploadFile(sasJob, files, params).catch((err: any) => {
expect(err.error.message).toEqual('File upload request failed.')
done()
})
const err = await fileUploader
.uploadFile(sasJob, files, params)
.catch((err: any) => err)
expect(err.error.message).toEqual('File upload request failed.')
})
it('should throw an error when the server request fails', async (done) => {
it('should throw an error when the server request fails', async () => {
mockedAxios.post.mockImplementation(() =>
Promise.reject({ data: '{message: "Server error"}' })
)
@@ -107,10 +112,9 @@ describe('FileUploader', () => {
const sasJob = 'test'
const { files, params } = prepareFilesAndParams()
fileUploader.uploadFile(sasJob, files, params).catch((err: any) => {
expect(err.error.message).toEqual('File upload request failed.')
done()
})
const err = await fileUploader
.uploadFile(sasJob, files, params)
.catch((err: any) => err)
expect(err.error.message).toEqual('File upload request failed.')
})
})

View File

@@ -14,7 +14,7 @@ describe('FolderOperations', () => {
beforeEach(() => {})
it('should move and rename folder', async (done) => {
it('should move and rename folder', async () => {
mockFetchResponse(false)
let res: any = await sasViyaApiClient.moveFolder(
@@ -26,11 +26,9 @@ describe('FolderOperations', () => {
expect(res.folder.name).toEqual('newName')
expect(res.folder.parentFolderUri.split('=')[1]).toEqual('/Test/toFolder')
done()
})
it('should move and keep the name of folder', async (done) => {
it('should move and keep the name of folder', async () => {
mockFetchResponse(true)
let res: any = await sasViyaApiClient.moveFolder(
@@ -42,11 +40,9 @@ describe('FolderOperations', () => {
expect(res.folder.name).toEqual('oldName')
expect(res.folder.parentFolderUri.split('=')[1]).toEqual('/Test/toFolder')
done()
})
it('should only rename folder', async (done) => {
it('should only rename folder', async () => {
mockFetchResponse(false)
let res: any = await sasViyaApiClient.moveFolder(
@@ -58,8 +54,6 @@ describe('FolderOperations', () => {
expect(res.folder.name).toEqual('newName')
expect(res.folder.parentFolderUri.split('=')[1]).toEqual('/Test/toFolder')
done()
})
})

View File

@@ -1,7 +1,9 @@
import { SessionManager } from '../SessionManager'
import * as dotenv from 'dotenv'
import { RequestClient } from '../request/RequestClient'
import { NoSessionStateError } from '../types/errors'
import * as dotenv from 'dotenv'
import axios from 'axios'
jest.mock('axios')
const mockedAxios = axios as jest.Mocked<typeof axios>
@@ -43,4 +45,38 @@ describe('SessionManager', () => {
).resolves.toEqual(expectedResponse)
})
})
describe('waitForSession', () => {
it('should reject with NoSessionStateError if SAS server did not provide session state', async () => {
const responseStatus = 304
mockedAxios.get.mockImplementation(() =>
Promise.resolve({ data: '', status: responseStatus })
)
await expect(
sessionManager['waitForSession'](
{
id: 'id',
state: '',
links: [
{ rel: 'state', href: '', uri: '', type: '', method: 'GET' }
],
attributes: {
sessionInactiveTimeout: 0
},
creationTimeStamp: ''
},
null,
'access_token'
)
).rejects.toEqual(
new NoSessionStateError(
responseStatus,
process.env.SERVER_URL as string,
'logUrl'
)
)
})
})
})

View File

@@ -0,0 +1,41 @@
import { getValidJson } from '../../utils'
describe('jsonValidator', () => {
it('should not throw an error with a valid json', () => {
const json = {
test: 'test'
}
expect(getValidJson(json)).toBe(json)
})
it('should not throw an error with a valid json string', () => {
const json = {
test: 'test'
}
expect(getValidJson(JSON.stringify(json))).toStrictEqual(json)
})
it('should throw an error with an invalid json', () => {
const json = `{\"test\":\"test\"\"test2\":\"test\"}`
let errorThrown = false
try {
getValidJson(json)
} catch (error) {
errorThrown = true
}
expect(errorThrown).toBe(true)
})
it('should throw an error when an array is passed', () => {
const array = ['hello', 'world']
let errorThrown = false
try {
getValidJson(array)
} catch (error) {
errorThrown = true
}
expect(errorThrown).toBe(true)
})
})

View File

@@ -1,6 +1,6 @@
import { parseGeneratedCode } from '../../utils/index'
it('should parse generated code', async (done) => {
it('should parse generated code', () => {
expect(sampleResponse).toBeTruthy()
const parsedGeneratedCode = parseGeneratedCode(sampleResponse)
@@ -15,8 +15,6 @@ it('should parse generated code', async (done) => {
expect(generatedCodeLines[2].startsWith('MPRINT(MM_WEBOUT)')).toBeTruthy()
expect(generatedCodeLines[3].startsWith('MPRINT(MM_WEBRIGHT)')).toBeTruthy()
expect(generatedCodeLines[4].startsWith('MPRINT(MM_WEBOUT)')).toBeTruthy()
done()
})
/* tslint:disable */

View File

@@ -1,6 +1,6 @@
import { parseSourceCode } from '../../utils/index'
it('should parse SAS9 source code', async (done) => {
it('should parse SAS9 source code', async () => {
expect(sampleResponse).toBeTruthy()
const parsedSourceCode = parseSourceCode(sampleResponse)
@@ -15,8 +15,6 @@ it('should parse SAS9 source code', async (done) => {
expect(sourceCodeLines[2].startsWith('8')).toBeTruthy()
expect(sourceCodeLines[3].startsWith('9')).toBeTruthy()
expect(sourceCodeLines[4].startsWith('10')).toBeTruthy()
done()
})
/* tslint:disable */

8
src/types/File.ts Normal file
View File

@@ -0,0 +1,8 @@
import { Link } from './Link'
export interface File {
id: string
name: string
parentUri: string
links: Link[]
}

View File

@@ -1,4 +1,6 @@
export interface PollOptions {
MAX_POLL_COUNT?: number
POLL_INTERVAL?: number
maxPollCount: number
pollInterval: number
streamLog: boolean
logFolderPath?: string
}

5
src/types/Process.d.ts vendored Normal file
View File

@@ -0,0 +1,5 @@
declare namespace NodeJS {
export interface Process {
logger?: import('@sasjs/utils/logger').Logger
}
}

View File

@@ -40,23 +40,19 @@ export class SASjsConfig {
*/
debug: boolean = true
/**
* The name of the compute context to use when calling the Viya APIs directly.
* The name of the compute context to use when calling the Viya services directly.
* Example value: 'SAS Job Execution compute context'
* If set to missing or empty, and useComputeApi is true, the adapter will use
* the JES APIs. If provided, the Job Code will be executed in pooled
* compute sessions on this named context.
*/
contextName: string = ''
/**
* Set to `false` to use the Job Execution Web Service. To enhance VIYA
* If it's `false` adapter will use the JES API as connection approach. To enhance VIYA
* performance, set to `true` and provide a `contextName` on which to run
* the code. When running on a named context, the code executes under the
* user identity. When running as a Job Execution service, the code runs
* under the identity in the JES context. If no `contextName` is provided,
* and `useComputeApi` is `true`, then the service will run as a Job, except
* under the identity in the JES context. If `useComputeApi` is `null` or `undefined`, the service will run as a Job, except
* triggered using the APIs instead of the Job Execution Web Service broker.
*/
useComputeApi = false
useComputeApi: boolean | null = null
/**
* Defaults to `false`.
* When set to `true`, the adapter will allow requests to SAS servers that use a self-signed SSL certificate.

4
src/types/WriteStream.ts Normal file
View File

@@ -0,0 +1,4 @@
export interface WriteStream {
write: (content: string, callback: (err?: Error) => any) => void
path: string
}

View File

@@ -0,0 +1,11 @@
export class JobStatePollError extends Error {
constructor(id: string, public originalError: Error) {
super(
`Error while polling job state for job ${id}: ${
originalError.message || originalError
}`
)
this.name = 'JobStatePollError'
Object.setPrototypeOf(this, JobStatePollError.prototype)
}
}

View File

@@ -0,0 +1,15 @@
export class NoSessionStateError extends Error {
constructor(
public serverResponseStatus: number,
public sessionStateUrl: string,
public logUrl: string
) {
super(
`Could not get session state. Server responded with ${serverResponseStatus} whilst checking state: ${sessionStateUrl}`
)
this.name = 'NoSessionStatus'
Object.setPrototypeOf(this, NoSessionStateError.prototype)
}
}

View File

@@ -0,0 +1,40 @@
import { RootFolderNotFoundError } from './RootFolderNotFoundError'
describe('RootFolderNotFoundError', () => {
it('when access token is provided, error message should contain the scopes in the token', () => {
const token =
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzY29wZSI6WyJzY29wZS0xIiwic2NvcGUtMiJdfQ.ktqPL2ulln-8Asa2jSV9QCfDYmQuNk4tNKopxJR5xZs'
const error = new RootFolderNotFoundError(
'/myProject',
'https://analytium.co.uk',
token
)
expect(error).toBeInstanceOf(RootFolderNotFoundError)
expect(error.message).toContain('scope-1')
expect(error.message).toContain('scope-2')
})
it('when access token is not provided, error message should not contain scopes', () => {
const error = new RootFolderNotFoundError(
'/myProject',
'https://analytium.co.uk'
)
expect(error).toBeInstanceOf(RootFolderNotFoundError)
expect(error.message).not.toContain(
'Your access token contains the following scopes'
)
})
it('should include the folder path and SASDrive URL in the message', () => {
const folderPath = '/myProject'
const serverUrl = 'https://analytium.co.uk'
const error = new RootFolderNotFoundError(folderPath, serverUrl)
expect(error).toBeInstanceOf(RootFolderNotFoundError)
expect(error.message).toContain(folderPath)
expect(error.message).toContain(`${serverUrl}/SASDrive`)
})
})

View File

@@ -0,0 +1,24 @@
import { decodeToken } from '@sasjs/utils/auth'
export class RootFolderNotFoundError extends Error {
constructor(
parentFolderPath: string,
serverUrl: string,
accessToken?: string
) {
let message: string =
`Root folder ${parentFolderPath} was not found.` +
`\nPlease check ${serverUrl}/SASDrive.` +
`\nIf the folder DOES exist then it is likely a permission problem.\n`
if (accessToken) {
const decodedToken = decodeToken(accessToken)
let scope = decodedToken.scope
scope = scope.map((element) => '* ' + element)
message +=
`Your access token contains the following scopes:\n` + scope.join('\n')
}
super(message)
this.name = 'RootFolderNotFoundError'
Object.setPrototypeOf(this, RootFolderNotFoundError.prototype)
}
}

View File

@@ -0,0 +1,9 @@
export class SAS9AuthError extends Error {
constructor() {
super(
'The credentials you provided cannot be authenticated. Please provide a valid set of credentials.'
)
this.name = 'AuthorizeError'
Object.setPrototypeOf(this, SAS9AuthError.prototype)
}
}

View File

@@ -2,6 +2,9 @@ export * from './AuthorizeError'
export * from './ComputeJobExecutionError'
export * from './InternalServerError'
export * from './JobExecutionError'
export * from './JobStatePollError'
export * from './LoginRequiredError'
export * from './NotFoundError'
export * from './ErrorResponse'
export * from './NoSessionStateError'
export * from './RootFolderNotFoundError'

View File

@@ -1,6 +1,7 @@
export * from './Context'
export * from './CsrfToken'
export * from './Folder'
export * from './File'
export * from './Job'
export * from './JobDefinition'
export * from './JobResult'
@@ -10,3 +11,4 @@ export * from './SASjsRequest'
export * from './Session'
export * from './UploadFile'
export * from './PollOptions'
export * from './WriteStream'

View File

@@ -15,15 +15,35 @@ export const fetchLogByChunks = async (
logUrl: string,
logCount: number
): Promise<string> => {
return await fetchLog(requestClient, accessToken, logUrl, 0, logCount)
}
/**
* Fetches a section of the log file delineated by start and end lines
* @param {object} requestClient - client object of Request Client.
* @param {string} accessToken - an access token for an authorized user.
* @param {string} logUrl - url of the log file.
* @param {number} start - the line at which to start fetching the log.
* @param {number} end - the line at which to stop fetching the log.
* @returns an string containing log lines.
*/
export const fetchLog = async (
requestClient: RequestClient,
accessToken: string,
logUrl: string,
start: number,
end: number
): Promise<string> => {
const logger = process.logger || console
let log: string = ''
const loglimit = logCount < 10000 ? logCount : 10000
let start = 0
const loglimit = end < 10000 ? end : 10000
do {
console.log(
logger.info(
`Fetching logs from line no: ${start + 1} to ${
start + loglimit
} of ${logCount}.`
} of ${end}.`
)
const logChunkJson = await requestClient!
.get<any>(`${logUrl}?start=${start}&limit=${loglimit}`, accessToken)
@@ -38,6 +58,6 @@ export const fetchLogByChunks = async (
log += logChunk
start += loglimit
} while (start < logCount)
} while (start < end)
return log
}

16
src/utils/getValidJson.ts Normal file
View File

@@ -0,0 +1,16 @@
/**
* if string passed then parse the string to json else if throw error for all other types unless it is not a valid json object.
* @param str - string to check.
*/
export const getValidJson = (str: string | object) => {
try {
if (Array.isArray(str)) {
throw new Error('Can not parse array object to json.')
}
if (typeof str === 'object') return str
return JSON.parse(str)
} catch (e) {
throw new Error('Invalid JSON response.')
}
}

View File

@@ -1,6 +1,7 @@
export * from './asyncForEach'
export * from './compareTimestamps'
export * from './convertToCsv'
export * from './isNode'
export * from './isRelativePath'
export * from './isUri'
export * from './isUrl'
@@ -12,3 +13,5 @@ export * from './serialize'
export * from './splitChunks'
export * from './parseWeboutResponse'
export * from './fetchLogByChunks'
export * from './getValidJson'
export * from './parseViyaDebugResponse'

4
src/utils/isNode.ts Normal file
View File

@@ -0,0 +1,4 @@
export const isNode = () =>
typeof process !== 'undefined' &&
process.versions != null &&
process.versions.node != null

View File

@@ -0,0 +1,29 @@
import { RequestClient } from '../request/RequestClient'
/**
* When querying a Viya job using the Web approach (as opposed to using the APIs) with _DEBUG enabled,
* the first response contains the log with the content in an iframe. Therefore when debug is enabled,
* and the serverType is VIYA, and useComputeApi is null (WEB), we call this function to extract the
* (_webout) content from the iframe.
* @param response - first response from viya job
* @param requestClient
* @param serverUrl
* @returns
*/
export const parseSasViyaDebugResponse = async (
response: string,
requestClient: RequestClient,
serverUrl: string
) => {
const iframeStart = response.split(
'<iframe style="width: 99%; height: 500px" src="'
)[1]
const jsonUrl = iframeStart ? iframeStart.split('"></iframe>')[0] : null
if (!jsonUrl) {
throw new Error('Unable to find webout file URL.')
}
return requestClient
.get(serverUrl + jsonUrl, undefined)
.then((res) => res.result)
}

View File

@@ -9,5 +9,5 @@
"sourceMap": true
},
"include": ["src"],
"exclude": ["node_modules", "**/*.spec.ts"]
"exclude": ["node_modules"]
}

View File

@@ -1,21 +1,32 @@
const path = require('path')
const webpack = require('webpack')
const terserPlugin = require('terser-webpack-plugin')
const nodePolyfillPlugin = require('node-polyfill-webpack-plugin')
const defaultPlugins = [
new webpack.ContextReplacementPlugin(/moment[\/\\]locale$/, /en/),
new webpack.SourceMapDevToolPlugin({
filename: null,
exclude: [/node_modules/],
test: /\.ts($|\?)/i
})
]
const optimization = {
minimize: true,
minimizer: [
new terserPlugin({
parallel: true,
terserOptions: {}
})
]
}
const browserConfig = {
entry: './src/index.ts',
devtool: 'inline-source-map',
mode: 'production',
optimization: {
minimizer: [
new terserPlugin({
cache: true,
parallel: true,
sourceMap: true,
terserOptions: {}
})
]
},
optimization: optimization,
module: {
rules: [
{
@@ -27,7 +38,7 @@ const browserConfig = {
},
resolve: {
extensions: ['.ts', '.js'],
fallback: { https: false }
fallback: { https: false, fs: false, readline: false }
},
output: {
filename: 'index.js',
@@ -36,17 +47,27 @@ const browserConfig = {
library: 'SASjs'
},
plugins: [
new webpack.ContextReplacementPlugin(/moment[\/\\]locale$/, /en/),
new webpack.SourceMapDevToolPlugin({
filename: null,
exclude: [/node_modules/],
test: /\.ts($|\?)/i
})
...defaultPlugins,
new webpack.ProvidePlugin({
process: 'process/browser'
}),
new nodePolyfillPlugin()
]
}
const browserConfigWithoutProcessPlugin = {
entry: browserConfig.entry,
devtool: browserConfig.devtool,
mode: browserConfig.mode,
optimization: optimization,
module: browserConfig.module,
resolve: browserConfig.resolve,
output: browserConfig.output,
plugins: defaultPlugins
}
const nodeConfig = {
...browserConfig,
...browserConfigWithoutProcessPlugin,
target: 'node',
entry: './node/index.ts',
output: {