mirror of
https://github.com/sasjs/adapter.git
synced 2026-01-02 10:10:06 +00:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
76487b00e9 | ||
|
|
2d0515e25b | ||
|
|
b132b99586 | ||
|
|
5a7b4a1de4 | ||
|
|
6cac008b61 | ||
|
|
5a35237de5 | ||
|
|
5d77bbba8b | ||
|
|
eda021b6a5 | ||
|
|
259c479ef0 | ||
|
|
a962b8e7cf | ||
|
|
eb0e7247a6 | ||
| ccc77cb9d1 | |||
|
|
5cb5bbdb55 | ||
|
|
ac6cd7be82 | ||
|
|
63f5f4d03d | ||
|
|
d8d4da9c9a | ||
| 93c9a34591 |
2299
package-lock.json
generated
2299
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
11
package.json
11
package.json
@@ -3,8 +3,8 @@
|
||||
"description": "JavaScript adapter for SAS",
|
||||
"homepage": "https://adapter.sasjs.io",
|
||||
"scripts": {
|
||||
"build": "rimraf build && rimraf node && mkdir node && cp -r src/* node && webpack && rimraf build/src && rimraf node",
|
||||
"package:lib": "npm run build && cp ./package.json build && cd build && npm version \"5.0.0\" && npm pack",
|
||||
"build": "rimraf build && rimraf node && mkdir node && copyfiles -u 1 \"./src/**/*\" ./node && webpack && rimraf build/src && rimraf node",
|
||||
"package:lib": "npm run build && copyfiles ./package.json build && cd build && npm version \"5.0.0\" && npm pack",
|
||||
"publish:lib": "npm run build && cd build && npm publish",
|
||||
"lint:fix": "npx prettier --write \"src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\" && npx prettier --write \"sasjs-tests/src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\"",
|
||||
"lint": "npx prettier --check \"src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\" && npx prettier --check \"sasjs-tests/src/**/*.{ts,tsx,js,jsx,html,css,sass,less,json,yml,md,graphql}\"",
|
||||
@@ -13,7 +13,7 @@
|
||||
"postpublish": "git clean -fd",
|
||||
"semantic-release": "semantic-release",
|
||||
"typedoc": "typedoc",
|
||||
"postinstall": "[ -d .git ] && git config core.hooksPath ./.git-hooks || true"
|
||||
"prepare": "git rev-parse --git-dir && git config core.hooksPath ./.git-hooks && git config core.autocrlf false || true"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
@@ -41,11 +41,13 @@
|
||||
"@types/jest": "^26.0.23",
|
||||
"@types/mime": "^2.0.3",
|
||||
"@types/tough-cookie": "^4.0.0",
|
||||
"copyfiles": "^2.4.1",
|
||||
"cp": "^0.2.0",
|
||||
"dotenv": "^10.0.0",
|
||||
"jest": "^27.0.6",
|
||||
"jest-extended": "^0.11.5",
|
||||
"mime": "^2.5.2",
|
||||
"node-polyfill-webpack-plugin": "^1.1.4",
|
||||
"path": "^0.12.7",
|
||||
"process": "^0.11.10",
|
||||
"rimraf": "^3.0.2",
|
||||
@@ -64,12 +66,11 @@
|
||||
},
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@sasjs/utils": "^2.21.0",
|
||||
"@sasjs/utils": "^2.23.2",
|
||||
"axios": "^0.21.1",
|
||||
"axios-cookiejar-support": "^1.0.1",
|
||||
"form-data": "^4.0.0",
|
||||
"https": "^1.0.0",
|
||||
"jwt-decode": "^3.1.2",
|
||||
"tough-cookie": "^4.0.0",
|
||||
"url": "^0.11.0"
|
||||
}
|
||||
|
||||
@@ -3,9 +3,7 @@ import {
|
||||
isRelativePath,
|
||||
isUri,
|
||||
isUrl,
|
||||
fetchLogByChunks,
|
||||
isAccessTokenExpiring,
|
||||
isRefreshTokenExpiring
|
||||
fetchLogByChunks
|
||||
} from './utils'
|
||||
import * as NodeFormData from 'form-data'
|
||||
import {
|
||||
@@ -27,11 +25,18 @@ import {
|
||||
import { formatDataForRequest } from './utils/formatDataForRequest'
|
||||
import { SessionManager } from './SessionManager'
|
||||
import { ContextManager } from './ContextManager'
|
||||
import { timestampToYYYYMMDDHHMMSS } from '@sasjs/utils/time'
|
||||
import { Logger, LogLevel } from '@sasjs/utils/logger'
|
||||
import {
|
||||
timestampToYYYYMMDDHHMMSS,
|
||||
isAccessTokenExpiring,
|
||||
isRefreshTokenExpiring,
|
||||
Logger,
|
||||
LogLevel,
|
||||
SasAuthResponse,
|
||||
MacroVar,
|
||||
AuthConfig
|
||||
} from '@sasjs/utils'
|
||||
import { isAuthorizeFormRequired } from './auth/isAuthorizeFormRequired'
|
||||
import { RequestClient } from './request/RequestClient'
|
||||
import { SasAuthResponse, MacroVar, AuthConfig } from '@sasjs/utils/types'
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import * as mime from 'mime'
|
||||
|
||||
@@ -290,16 +295,14 @@ export class SASViyaApiClient {
|
||||
printPid = false,
|
||||
variables?: MacroVar
|
||||
): Promise<any> {
|
||||
const { access_token } = authConfig || {}
|
||||
let access_token = (authConfig || {}).access_token
|
||||
if (authConfig) {
|
||||
;({ access_token } = await this.getTokens(authConfig))
|
||||
}
|
||||
|
||||
const logger = process.logger || console
|
||||
|
||||
try {
|
||||
const headers: any = {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
if (access_token) headers.Authorization = `Bearer ${access_token}`
|
||||
|
||||
let executionSessionId: string
|
||||
|
||||
const session = await this.sessionManager
|
||||
@@ -443,6 +446,10 @@ export class SASViyaApiClient {
|
||||
throw prefixMessage(err, 'Error while polling job status. ')
|
||||
})
|
||||
|
||||
if (authConfig) {
|
||||
;({ access_token } = await this.getTokens(authConfig))
|
||||
}
|
||||
|
||||
const { result: currentJob } = await this.requestClient
|
||||
.get<Job>(
|
||||
`/compute/sessions/${executionSessionId}/jobs/${postedJob.id}`,
|
||||
@@ -886,7 +893,10 @@ export class SASViyaApiClient {
|
||||
printPid = false,
|
||||
variables?: MacroVar
|
||||
) {
|
||||
let { access_token } = authConfig || {}
|
||||
let access_token = (authConfig || {}).access_token
|
||||
if (authConfig) {
|
||||
;({ access_token } = await this.getTokens(authConfig))
|
||||
}
|
||||
|
||||
if (isRelativePath(sasJob) && !this.rootFolderName) {
|
||||
throw new Error(
|
||||
@@ -913,12 +923,6 @@ export class SASViyaApiClient {
|
||||
)
|
||||
}
|
||||
|
||||
const headers: any = { 'Content-Type': 'application/json' }
|
||||
|
||||
if (!!access_token) {
|
||||
headers.Authorization = `Bearer ${access_token}`
|
||||
}
|
||||
|
||||
const jobToExecute = jobFolder?.find((item) => item.name === jobName)
|
||||
|
||||
if (!jobToExecute) {
|
||||
@@ -985,7 +989,10 @@ export class SASViyaApiClient {
|
||||
data?: any,
|
||||
authConfig?: AuthConfig
|
||||
) {
|
||||
let { access_token } = authConfig || {}
|
||||
let access_token = (authConfig || {}).access_token
|
||||
if (authConfig) {
|
||||
;({ access_token } = await this.getTokens(authConfig))
|
||||
}
|
||||
if (isRelativePath(sasJob) && !this.rootFolderName) {
|
||||
throw new Error(
|
||||
'Relative paths cannot be used without specifying a root folder name.'
|
||||
@@ -1145,21 +1152,9 @@ export class SASViyaApiClient {
|
||||
let POLL_INTERVAL = 300
|
||||
let MAX_POLL_COUNT = 1000
|
||||
let MAX_ERROR_COUNT = 5
|
||||
let { access_token, refresh_token, client, secret } = authConfig || {}
|
||||
if (access_token && refresh_token) {
|
||||
if (
|
||||
client &&
|
||||
secret &&
|
||||
refresh_token &&
|
||||
(isAccessTokenExpiring(access_token) ||
|
||||
isRefreshTokenExpiring(refresh_token))
|
||||
) {
|
||||
;({ access_token, refresh_token } = await this.refreshTokens(
|
||||
client,
|
||||
secret,
|
||||
refresh_token
|
||||
))
|
||||
}
|
||||
let access_token = (authConfig || {}).access_token
|
||||
if (authConfig) {
|
||||
;({ access_token } = await this.getTokens(authConfig))
|
||||
}
|
||||
|
||||
if (pollOptions) {
|
||||
@@ -1213,20 +1208,8 @@ export class SASViyaApiClient {
|
||||
postedJobState === 'pending' ||
|
||||
postedJobState === 'unavailable'
|
||||
) {
|
||||
if (access_token && refresh_token) {
|
||||
if (
|
||||
client &&
|
||||
secret &&
|
||||
refresh_token &&
|
||||
(isAccessTokenExpiring(access_token) ||
|
||||
isRefreshTokenExpiring(refresh_token))
|
||||
) {
|
||||
;({ access_token, refresh_token } = await this.refreshTokens(
|
||||
client,
|
||||
secret,
|
||||
refresh_token
|
||||
))
|
||||
}
|
||||
if (authConfig) {
|
||||
;({ access_token } = await this.getTokens(authConfig))
|
||||
}
|
||||
|
||||
if (stateLink) {
|
||||
@@ -1510,4 +1493,21 @@ export class SASViyaApiClient {
|
||||
|
||||
return movedFolder
|
||||
}
|
||||
|
||||
private async getTokens(authConfig: AuthConfig): Promise<AuthConfig> {
|
||||
const logger = process.logger || console
|
||||
let { access_token, refresh_token, client, secret } = authConfig
|
||||
if (
|
||||
isAccessTokenExpiring(access_token) ||
|
||||
isRefreshTokenExpiring(refresh_token)
|
||||
) {
|
||||
logger.info('Refreshing access and refresh tokens.')
|
||||
;({ access_token, refresh_token } = await this.refreshTokens(
|
||||
client,
|
||||
secret,
|
||||
refresh_token
|
||||
))
|
||||
}
|
||||
return { access_token, refresh_token, client, secret }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,10 +6,6 @@ import { RequestClient } from './request/RequestClient'
|
||||
const MAX_SESSION_COUNT = 1
|
||||
const RETRY_LIMIT: number = 3
|
||||
let RETRY_COUNT: number = 0
|
||||
const INTERNAL_SAS_ERROR = {
|
||||
status: 304,
|
||||
message: 'Not Modified'
|
||||
}
|
||||
|
||||
export class SessionManager {
|
||||
constructor(
|
||||
@@ -164,7 +160,7 @@ export class SessionManager {
|
||||
|
||||
const stateLink = session.links.find((l: any) => l.rel === 'state')
|
||||
|
||||
return new Promise(async (resolve, _) => {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
if (
|
||||
sessionState === 'pending' ||
|
||||
sessionState === 'running' ||
|
||||
@@ -182,7 +178,7 @@ export class SessionManager {
|
||||
etag!,
|
||||
accessToken
|
||||
).catch((err) => {
|
||||
throw err
|
||||
throw prefixMessage(err, 'Error while getting session state.')
|
||||
})
|
||||
|
||||
sessionState = state.trim()
|
||||
@@ -196,13 +192,14 @@ export class SessionManager {
|
||||
|
||||
// There is an internal error present in SAS Viya 3.5
|
||||
// Retry to wait for a session status in such case of SAS internal error
|
||||
if (
|
||||
sessionState === INTERNAL_SAS_ERROR.message &&
|
||||
RETRY_COUNT < RETRY_LIMIT
|
||||
) {
|
||||
RETRY_COUNT++
|
||||
if (!sessionState) {
|
||||
if (RETRY_COUNT < RETRY_LIMIT) {
|
||||
RETRY_COUNT++
|
||||
|
||||
resolve(this.waitForSession(session, etag, accessToken))
|
||||
resolve(this.waitForSession(session, etag, accessToken))
|
||||
} else {
|
||||
reject('Could not get session state.')
|
||||
}
|
||||
}
|
||||
|
||||
resolve(sessionState)
|
||||
@@ -222,9 +219,6 @@ export class SessionManager {
|
||||
.get(url, accessToken, 'text/plain', { 'If-None-Match': etag })
|
||||
.then((res) => res.result as string)
|
||||
.catch((err) => {
|
||||
if (err.status === INTERNAL_SAS_ERROR.status)
|
||||
return INTERNAL_SAS_ERROR.message
|
||||
|
||||
throw err
|
||||
})
|
||||
}
|
||||
|
||||
@@ -8,8 +8,9 @@ import { generateFileUploadForm } from '../file/generateFileUploadForm'
|
||||
import { generateTableUploadForm } from '../file/generateTableUploadForm'
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { SASViyaApiClient } from '../SASViyaApiClient'
|
||||
import { isRelativePath } from '../utils'
|
||||
import { isRelativePath, isValidJson } from '../utils'
|
||||
import { BaseJobExecutor } from './JobExecutor'
|
||||
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
|
||||
|
||||
export interface WaitingRequstPromise {
|
||||
promise: Promise<any> | null
|
||||
@@ -100,6 +101,19 @@ export class WebJobExecutor extends BaseJobExecutor {
|
||||
this.appendRequest(res, sasJob, config.debug)
|
||||
resolve(jsonResponse)
|
||||
}
|
||||
if (this.serverType === ServerType.Sas9 && config.debug) {
|
||||
const jsonResponse = parseWeboutResponse(res.result as string)
|
||||
if (jsonResponse === '') {
|
||||
throw new Error(
|
||||
'Valid JSON could not be extracted from response.'
|
||||
)
|
||||
}
|
||||
|
||||
isValidJson(jsonResponse)
|
||||
this.appendRequest(res, sasJob, config.debug)
|
||||
resolve(res.result)
|
||||
}
|
||||
isValidJson(res.result as string)
|
||||
this.appendRequest(res, sasJob, config.debug)
|
||||
resolve(res.result)
|
||||
})
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
|
||||
import { prefixMessage } from '@sasjs/utils/error'
|
||||
import { SAS9AuthError } from '../types/errors/SAS9AuthError'
|
||||
import { isValidJson } from '../utils'
|
||||
|
||||
export interface HttpClient {
|
||||
get<T>(
|
||||
@@ -63,6 +64,9 @@ export class RequestClient implements HttpClient {
|
||||
baseURL: baseUrl
|
||||
})
|
||||
}
|
||||
|
||||
this.httpClient.defaults.validateStatus = (status) =>
|
||||
status >= 200 && status < 305
|
||||
}
|
||||
|
||||
public getCsrfToken(type: 'general' | 'file' = 'general') {
|
||||
@@ -420,7 +424,13 @@ export class RequestClient implements HttpClient {
|
||||
}
|
||||
} catch {
|
||||
try {
|
||||
parsedResponse = JSON.parse(parseWeboutResponse(response.data))
|
||||
const weboutResponse = parseWeboutResponse(response.data)
|
||||
if (weboutResponse === '') {
|
||||
throw new Error('Valid JSON could not be extracted from response.')
|
||||
}
|
||||
|
||||
isValidJson(weboutResponse)
|
||||
parsedResponse = JSON.parse(weboutResponse)
|
||||
} catch {
|
||||
parsedResponse = response.data
|
||||
}
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
import jwtDecode from 'jwt-decode'
|
||||
|
||||
/**
|
||||
* Checks if the Access Token is expired or is expiring in 1 hour. A default Access Token
|
||||
* lasts 12 hours. If the Access Token expires, the Refresh Token is used to fetch a new
|
||||
* Access Token. In the case that the Refresh Token is expired, 1 hour is enough to let
|
||||
* most jobs finish.
|
||||
* @param {string} token- token string that will be evaluated
|
||||
*/
|
||||
export function isAccessTokenExpiring(token: string): boolean {
|
||||
if (!token) {
|
||||
return true
|
||||
}
|
||||
const payload = jwtDecode<{ exp: number }>(token)
|
||||
const timeToLive = payload.exp - new Date().valueOf() / 1000
|
||||
|
||||
return timeToLive <= 60 * 60 // 1 hour
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the Refresh Token is expired or expiring in 30 secs. A default Refresh Token
|
||||
* lasts 30 days. Once the Refresh Token expires, the user must re-authenticate (provide
|
||||
* credentials in a browser to obtain an authorisation code). 30 seconds is enough time
|
||||
* to make a request for a final Access Token.
|
||||
* @param {string} token- token string that will be evaluated
|
||||
*/
|
||||
export function isRefreshTokenExpiring(token?: string): boolean {
|
||||
if (!token) {
|
||||
return true
|
||||
}
|
||||
const payload = jwtDecode<{ exp: number }>(token)
|
||||
const timeToLive = payload.exp - new Date().valueOf() / 1000
|
||||
|
||||
return timeToLive <= 30 // 30 seconds
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
export * from './asyncForEach'
|
||||
export * from './auth'
|
||||
export * from './compareTimestamps'
|
||||
export * from './convertToCsv'
|
||||
export * from './isRelativePath'
|
||||
@@ -13,3 +12,4 @@ export * from './serialize'
|
||||
export * from './splitChunks'
|
||||
export * from './parseWeboutResponse'
|
||||
export * from './fetchLogByChunks'
|
||||
export * from './isValidJson'
|
||||
|
||||
11
src/utils/isValidJson.ts
Normal file
11
src/utils/isValidJson.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
/**
|
||||
* Checks if string is in valid JSON format else throw error.
|
||||
* @param str - string to check.
|
||||
*/
|
||||
export const isValidJson = (str: string) => {
|
||||
try {
|
||||
JSON.parse(str)
|
||||
} catch (e) {
|
||||
throw new Error('Invalid JSON response.')
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
const path = require('path')
|
||||
const webpack = require('webpack')
|
||||
const terserPlugin = require('terser-webpack-plugin')
|
||||
const nodePolyfillPlugin = require('node-polyfill-webpack-plugin')
|
||||
|
||||
const defaultPlugins = [
|
||||
new webpack.ContextReplacementPlugin(/moment[\/\\]locale$/, /en/),
|
||||
@@ -37,7 +38,7 @@ const browserConfig = {
|
||||
},
|
||||
resolve: {
|
||||
extensions: ['.ts', '.js'],
|
||||
fallback: { https: false }
|
||||
fallback: { https: false, fs: false, readline: false }
|
||||
},
|
||||
output: {
|
||||
filename: 'index.js',
|
||||
@@ -49,7 +50,8 @@ const browserConfig = {
|
||||
...defaultPlugins,
|
||||
new webpack.ProvidePlugin({
|
||||
process: 'process/browser'
|
||||
})
|
||||
}),
|
||||
new nodePolyfillPlugin()
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user