mirror of
https://github.com/sasjs/adapter.git
synced 2025-12-11 09:24:35 +00:00
Compare commits
25 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f1e1b33571 | ||
|
|
ccb8599f00 | ||
|
|
5bcd17096b | ||
|
|
d744ee12a3 | ||
|
|
5f15226cd9 | ||
|
|
f31ea28b9c | ||
|
|
e315e4a619 | ||
|
|
76bf5b88e9 | ||
|
|
a97ac4eaa6 | ||
|
|
37cfea6ca7 | ||
|
|
f74c8aca57 | ||
|
|
77baaabfcd | ||
|
|
510ba771f0 | ||
|
|
6fce65f4c8 | ||
|
|
fe03faa59f | ||
|
|
6272eeda23 | ||
|
|
104d1b88b3 | ||
|
|
0d9ba36de8 | ||
|
|
4e7a845d99 | ||
|
|
716cc513ff | ||
|
|
d166231c12 | ||
|
|
4cb150e951 | ||
|
|
fc8598473f | ||
|
|
367e0ae25a | ||
|
|
85dde61baf |
2
.github/workflows/generateDocs.yml
vendored
2
.github/workflows/generateDocs.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [lts/fermium]
|
||||
node-version: [lts/hydrogen]
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
2
.github/workflows/npmpublish.yml
vendored
2
.github/workflows/npmpublish.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [lts/fermium]
|
||||
node-version: [lts/hydrogen]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
84
package-lock.json
generated
84
package-lock.json
generated
@@ -13,7 +13,7 @@
|
||||
"axios-cookiejar-support": "1.0.1",
|
||||
"form-data": "4.0.0",
|
||||
"https": "1.0.0",
|
||||
"tough-cookie": "4.0.0"
|
||||
"tough-cookie": "4.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@cypress/webpack-preprocessor": "5.9.1",
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/jest": "27.4.0",
|
||||
"@types/mime": "2.0.3",
|
||||
"@types/pem": "1.9.6",
|
||||
"@types/tough-cookie": "4.0.1",
|
||||
"@types/tough-cookie": "4.0.2",
|
||||
"copyfiles": "2.4.1",
|
||||
"cp": "0.2.0",
|
||||
"cypress": "7.7.0",
|
||||
@@ -3440,9 +3440,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/tough-cookie": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz",
|
||||
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg=="
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
|
||||
"integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
|
||||
},
|
||||
"node_modules/@types/yargs": {
|
||||
"version": "16.0.5",
|
||||
@@ -14110,6 +14110,11 @@
|
||||
"node": ">=0.4.x"
|
||||
}
|
||||
},
|
||||
"node_modules/querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
|
||||
},
|
||||
"node_modules/queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
@@ -14457,6 +14462,11 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/requires-port": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
|
||||
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
|
||||
},
|
||||
"node_modules/resolve": {
|
||||
"version": "1.22.1",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
|
||||
@@ -15702,22 +15712,23 @@
|
||||
}
|
||||
},
|
||||
"node_modules/tough-cookie": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
|
||||
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
|
||||
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
|
||||
"dependencies": {
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.1.2"
|
||||
"universalify": "^0.2.0",
|
||||
"url-parse": "^1.5.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/tough-cookie/node_modules/universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
|
||||
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
|
||||
"engines": {
|
||||
"node": ">= 4.0.0"
|
||||
}
|
||||
@@ -16351,6 +16362,15 @@
|
||||
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/url-parse": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"dependencies": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/url/node_modules/punycode": {
|
||||
"version": "1.3.2",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz",
|
||||
@@ -19536,9 +19556,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"@types/tough-cookie": {
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.1.tgz",
|
||||
"integrity": "sha512-Y0K95ThC3esLEYD6ZuqNek29lNX2EM1qxV8y2FTLUB0ff5wWrk7az+mLrnNFUnaXcgKye22+sFBRXOgpPILZNg=="
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.2.tgz",
|
||||
"integrity": "sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw=="
|
||||
},
|
||||
"@types/yargs": {
|
||||
"version": "16.0.5",
|
||||
@@ -27552,6 +27572,11 @@
|
||||
"integrity": "sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==",
|
||||
"dev": true
|
||||
},
|
||||
"querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
|
||||
},
|
||||
"queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
@@ -27833,6 +27858,11 @@
|
||||
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
|
||||
"dev": true
|
||||
},
|
||||
"requires-port": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
|
||||
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="
|
||||
},
|
||||
"resolve": {
|
||||
"version": "1.22.1",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
|
||||
@@ -28799,19 +28829,20 @@
|
||||
"dev": true
|
||||
},
|
||||
"tough-cookie": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
|
||||
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
|
||||
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
|
||||
"requires": {
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.1.2"
|
||||
"universalify": "^0.2.0",
|
||||
"url-parse": "^1.5.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
|
||||
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg=="
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -29269,6 +29300,15 @@
|
||||
"integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==",
|
||||
"dev": true
|
||||
},
|
||||
"url-parse": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"requires": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"util": {
|
||||
"version": "0.12.5",
|
||||
"resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz",
|
||||
|
||||
@@ -49,7 +49,7 @@
|
||||
"@types/jest": "27.4.0",
|
||||
"@types/mime": "2.0.3",
|
||||
"@types/pem": "1.9.6",
|
||||
"@types/tough-cookie": "4.0.1",
|
||||
"@types/tough-cookie": "4.0.2",
|
||||
"copyfiles": "2.4.1",
|
||||
"cp": "0.2.0",
|
||||
"cypress": "7.7.0",
|
||||
@@ -82,6 +82,6 @@
|
||||
"axios-cookiejar-support": "1.0.1",
|
||||
"form-data": "4.0.0",
|
||||
"https": "1.0.0",
|
||||
"tough-cookie": "4.0.0"
|
||||
"tough-cookie": "4.1.3"
|
||||
}
|
||||
}
|
||||
|
||||
14136
sasjs-tests/package-lock.json
generated
14136
sasjs-tests/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -854,6 +854,7 @@ export default class SASjs {
|
||||
* @param pollOptions - an object that represents poll interval(milliseconds) and maximum amount of attempts. Object example: { maxPollCount: 24 * 60 * 60, pollInterval: 1000 }. More information available at src/api/viya/pollJobState.ts.
|
||||
* @param printPid - a boolean that indicates whether the function should print (PID) of the started job.
|
||||
* @param variables - an object that represents macro variables.
|
||||
* @param verboseMode - boolean to enable verbose mode (log every HTTP response).
|
||||
*/
|
||||
public async startComputeJob(
|
||||
sasJob: string,
|
||||
@@ -863,7 +864,8 @@ export default class SASjs {
|
||||
waitForResult?: boolean,
|
||||
pollOptions?: PollOptions,
|
||||
printPid = false,
|
||||
variables?: MacroVar
|
||||
variables?: MacroVar,
|
||||
verboseMode?: boolean
|
||||
) {
|
||||
config = {
|
||||
...this.sasjsConfig,
|
||||
@@ -877,6 +879,9 @@ export default class SASjs {
|
||||
)
|
||||
}
|
||||
|
||||
if (verboseMode) this.requestClient?.enableVerboseMode()
|
||||
else this.requestClient?.disableVerboseMode()
|
||||
|
||||
return this.sasViyaApiClient?.executeComputeJob(
|
||||
sasJob,
|
||||
config.contextName,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as NodeFormData from 'form-data'
|
||||
import { convertToCSV } from '../utils/convertToCsv'
|
||||
import { isNode } from '../utils'
|
||||
|
||||
/**
|
||||
* One of the approaches SASjs takes to send tables-formatted JSON (see README)
|
||||
@@ -26,12 +27,15 @@ export const generateFileUploadForm = (
|
||||
)
|
||||
}
|
||||
|
||||
if (typeof FormData === 'undefined' && formData instanceof NodeFormData) {
|
||||
formData.append(name, csv, {
|
||||
// INFO: unfortunately it is not possible to check if formData is instance of NodeFormData or FormData because it will return true for both
|
||||
if (isNode()) {
|
||||
// INFO: environment is Node and formData is instance of NodeFormData
|
||||
;(formData as NodeFormData).append(name, csv, {
|
||||
filename: `${name}.csv`,
|
||||
contentType: 'application/csv'
|
||||
})
|
||||
} else {
|
||||
// INFO: environment is Browser and formData is instance of FormData
|
||||
const file = new Blob([csv], {
|
||||
type: 'application/csv'
|
||||
})
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { generateFileUploadForm } from '../generateFileUploadForm'
|
||||
import { convertToCSV } from '../../utils/convertToCsv'
|
||||
import * as NodeFormData from 'form-data'
|
||||
import * as isNodeModule from '../../utils/isNode'
|
||||
|
||||
describe('generateFileUploadForm', () => {
|
||||
beforeAll(() => {
|
||||
@@ -11,44 +14,94 @@ describe('generateFileUploadForm', () => {
|
||||
;(global as any).Blob = BlobMock
|
||||
})
|
||||
|
||||
it('should generate file upload form from data', () => {
|
||||
const formData = new FormData()
|
||||
const testTable = 'sometable'
|
||||
const testTableWithNullVars: { [key: string]: any } = {
|
||||
[testTable]: [
|
||||
{ var1: 'string', var2: 232, nullvar: 'A' },
|
||||
{ var1: 'string', var2: 232, nullvar: 'B' },
|
||||
{ var1: 'string', var2: 232, nullvar: '_' },
|
||||
{ var1: 'string', var2: 232, nullvar: 0 },
|
||||
{ var1: 'string', var2: 232, nullvar: 'z' },
|
||||
{ var1: 'string', var2: 232, nullvar: null }
|
||||
],
|
||||
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
|
||||
}
|
||||
const tableName = Object.keys(testTableWithNullVars).filter((key: string) =>
|
||||
Array.isArray(testTableWithNullVars[key])
|
||||
)[0]
|
||||
describe('browser', () => {
|
||||
afterAll(() => {
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
jest.spyOn(formData, 'append').mockImplementation(() => {})
|
||||
it('should generate file upload form from data', () => {
|
||||
const formData = new FormData()
|
||||
const testTable = 'sometable'
|
||||
const testTableWithNullVars: { [key: string]: any } = {
|
||||
[testTable]: [
|
||||
{ var1: 'string', var2: 232, nullvar: 'A' },
|
||||
{ var1: 'string', var2: 232, nullvar: 'B' },
|
||||
{ var1: 'string', var2: 232, nullvar: '_' },
|
||||
{ var1: 'string', var2: 232, nullvar: 0 },
|
||||
{ var1: 'string', var2: 232, nullvar: 'z' },
|
||||
{ var1: 'string', var2: 232, nullvar: null }
|
||||
],
|
||||
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
|
||||
}
|
||||
const tableName = Object.keys(testTableWithNullVars).filter(
|
||||
(key: string) => Array.isArray(testTableWithNullVars[key])
|
||||
)[0]
|
||||
|
||||
generateFileUploadForm(formData, testTableWithNullVars)
|
||||
jest.spyOn(formData, 'append').mockImplementation(() => {})
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
|
||||
|
||||
expect(formData.append).toHaveBeenCalledOnce()
|
||||
expect(formData.append).toHaveBeenCalledWith(
|
||||
tableName,
|
||||
{},
|
||||
`${tableName}.csv`
|
||||
)
|
||||
generateFileUploadForm(formData, testTableWithNullVars)
|
||||
|
||||
expect(formData.append).toHaveBeenCalledOnce()
|
||||
expect(formData.append).toHaveBeenCalledWith(
|
||||
tableName,
|
||||
{},
|
||||
`${tableName}.csv`
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw an error if too large string was provided', () => {
|
||||
const formData = new FormData()
|
||||
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
|
||||
|
||||
expect(() => generateFileUploadForm(formData, data)).toThrow(
|
||||
new Error(
|
||||
'The max length of a string value in SASjs is 32765 characters.'
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw an error if too large string was provided', () => {
|
||||
const formData = new FormData()
|
||||
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
|
||||
describe('node', () => {
|
||||
it('should generate file upload form from data', () => {
|
||||
const formData = new NodeFormData()
|
||||
const testTable = 'sometable'
|
||||
const testTableWithNullVars: { [key: string]: any } = {
|
||||
[testTable]: [
|
||||
{ var1: 'string', var2: 232, nullvar: 'A' },
|
||||
{ var1: 'string', var2: 232, nullvar: 'B' },
|
||||
{ var1: 'string', var2: 232, nullvar: '_' },
|
||||
{ var1: 'string', var2: 232, nullvar: 0 },
|
||||
{ var1: 'string', var2: 232, nullvar: 'z' },
|
||||
{ var1: 'string', var2: 232, nullvar: null }
|
||||
],
|
||||
[`$${testTable}`]: { formats: { var1: '$char12.', nullvar: 'best.' } }
|
||||
}
|
||||
const tableName = Object.keys(testTableWithNullVars).filter(
|
||||
(key: string) => Array.isArray(testTableWithNullVars[key])
|
||||
)[0]
|
||||
const csv = convertToCSV(testTableWithNullVars, tableName)
|
||||
|
||||
expect(() => generateFileUploadForm(formData, data)).toThrow(
|
||||
new Error(
|
||||
'The max length of a string value in SASjs is 32765 characters.'
|
||||
jest.spyOn(formData, 'append').mockImplementation(() => {})
|
||||
|
||||
generateFileUploadForm(formData, testTableWithNullVars)
|
||||
|
||||
expect(formData.append).toHaveBeenCalledOnce()
|
||||
expect(formData.append).toHaveBeenCalledWith(tableName, csv, {
|
||||
contentType: 'application/csv',
|
||||
filename: `${tableName}.csv`
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw an error if too large string was provided', () => {
|
||||
const formData = new NodeFormData()
|
||||
const data = { testTable: [{ var1: 'z'.repeat(32765 + 1) }] }
|
||||
|
||||
expect(() => generateFileUploadForm(formData, data)).toThrow(
|
||||
new Error(
|
||||
'The max length of a string value in SASjs is 32765 characters.'
|
||||
)
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import {
|
||||
getValidJson,
|
||||
parseSasViyaDebugResponse,
|
||||
parseWeboutResponse,
|
||||
SASJS_LOGS_SEPARATOR
|
||||
parseWeboutResponse
|
||||
} from '../utils'
|
||||
import { UploadFile } from '../types/UploadFile'
|
||||
import {
|
||||
|
||||
@@ -10,8 +10,8 @@ import {
|
||||
LoginRequiredError
|
||||
} from '../types/errors'
|
||||
import { generateFileUploadForm } from '../file/generateFileUploadForm'
|
||||
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { getFormData } from '../utils'
|
||||
|
||||
import {
|
||||
isRelativePath,
|
||||
@@ -53,8 +53,7 @@ export class SasjsJobExecutor extends BaseJobExecutor {
|
||||
* Use the available form data object (FormData in Browser, NodeFormData in
|
||||
* Node)
|
||||
*/
|
||||
let formData =
|
||||
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
|
||||
let formData = getFormData()
|
||||
|
||||
if (data) {
|
||||
// file upload approach
|
||||
|
||||
@@ -16,10 +16,11 @@ import { SASViyaApiClient } from '../SASViyaApiClient'
|
||||
import {
|
||||
isRelativePath,
|
||||
parseSasViyaDebugResponse,
|
||||
appendExtraResponseAttributes
|
||||
appendExtraResponseAttributes,
|
||||
parseWeboutResponse,
|
||||
getFormData
|
||||
} from '../utils'
|
||||
import { BaseJobExecutor } from './JobExecutor'
|
||||
import { parseWeboutResponse } from '../utils/parseWeboutResponse'
|
||||
|
||||
export interface WaitingRequstPromise {
|
||||
promise: Promise<any> | null
|
||||
@@ -112,8 +113,7 @@ export class WebJobExecutor extends BaseJobExecutor {
|
||||
* Use the available form data object (FormData in Browser, NodeFormData in
|
||||
* Node)
|
||||
*/
|
||||
let formData =
|
||||
typeof FormData === 'undefined' ? new NodeFormData() : new FormData()
|
||||
let formData = getFormData()
|
||||
|
||||
if (data) {
|
||||
const stringifiedData = JSON.stringify(data)
|
||||
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
createAxiosInstance
|
||||
} from '../utils'
|
||||
import { InvalidSASjsCsrfError } from '../types/errors/InvalidSASjsCsrfError'
|
||||
import { inspect } from 'util'
|
||||
|
||||
export interface HttpClient {
|
||||
get<T>(
|
||||
@@ -59,6 +60,7 @@ export interface HttpClient {
|
||||
export class RequestClient implements HttpClient {
|
||||
private requests: SASjsRequest[] = []
|
||||
private requestsLimit: number = 10
|
||||
private httpInterceptor?: number
|
||||
|
||||
protected csrfToken: CsrfToken = { headerName: '', value: '' }
|
||||
protected fileUploadCsrfToken: CsrfToken | undefined
|
||||
@@ -70,6 +72,7 @@ export class RequestClient implements HttpClient {
|
||||
requestsLimit?: number
|
||||
) {
|
||||
this.createHttpClient(baseUrl, httpsAgentOptions)
|
||||
|
||||
if (requestsLimit) this.requestsLimit = requestsLimit
|
||||
}
|
||||
|
||||
@@ -180,6 +183,7 @@ export class RequestClient implements HttpClient {
|
||||
responseType: contentType === 'text/plain' ? 'text' : 'json',
|
||||
withCredentials: true
|
||||
}
|
||||
|
||||
if (contentType === 'text/plain') {
|
||||
requestConfig.transformResponse = undefined
|
||||
}
|
||||
@@ -389,6 +393,105 @@ export class RequestClient implements HttpClient {
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds colors to the string.
|
||||
* @param str - string to be prettified.
|
||||
* @returns - prettified string
|
||||
*/
|
||||
private prettifyString = (str: any) => inspect(str, { colors: true })
|
||||
|
||||
/**
|
||||
* Formats HTTP request/response body.
|
||||
* @param body - HTTP request/response body.
|
||||
* @returns - formatted string.
|
||||
*/
|
||||
private parseInterceptedBody = (body: any) => {
|
||||
if (!body) return ''
|
||||
|
||||
let parsedBody
|
||||
|
||||
// Tries to parse body into JSON object.
|
||||
if (typeof body === 'string') {
|
||||
try {
|
||||
parsedBody = JSON.parse(body)
|
||||
} catch (error) {
|
||||
parsedBody = body
|
||||
}
|
||||
} else {
|
||||
parsedBody = body
|
||||
}
|
||||
|
||||
const bodyLines = this.prettifyString(parsedBody).split('\n')
|
||||
|
||||
// Leaves first 50 lines
|
||||
if (bodyLines.length > 51) {
|
||||
bodyLines.splice(50)
|
||||
bodyLines.push('...')
|
||||
}
|
||||
|
||||
return bodyLines.join('\n')
|
||||
}
|
||||
|
||||
private defaultInterceptionCallBack = (response: AxiosResponse) => {
|
||||
const { status, config, request, data: resData } = response
|
||||
const { data: reqData } = config
|
||||
const { _header: reqHeaders, res } = request
|
||||
const { rawHeaders } = res
|
||||
|
||||
// Converts an array of strings into a single string with the following format:
|
||||
// <headerName>: <headerValue>
|
||||
const resHeaders = rawHeaders.reduce(
|
||||
(acc: string, value: string, i: number) => {
|
||||
if (i % 2 === 0) {
|
||||
acc += `${i === 0 ? '' : '\n'}${value}`
|
||||
} else {
|
||||
acc += `: ${value}`
|
||||
}
|
||||
|
||||
return acc
|
||||
},
|
||||
''
|
||||
)
|
||||
|
||||
const parsedResBody = this.parseInterceptedBody(resData)
|
||||
|
||||
// HTTP response summary.
|
||||
process.logger?.info(`HTTP Request (first 50 lines):
|
||||
${reqHeaders}${this.parseInterceptedBody(reqData)}
|
||||
|
||||
HTTP Response Code: ${this.prettifyString(status)}
|
||||
|
||||
HTTP Response (first 50 lines):
|
||||
${resHeaders}${parsedResBody ? `\n\n${parsedResBody}` : ''}
|
||||
`)
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
/**
|
||||
* Turns on verbose mode to log every HTTP response.
|
||||
* @param successCallBack - function that should be triggered on every HTTP response with the status 2**.
|
||||
* @param errorCallBack - function that should be triggered on every HTTP response with the status different from 2**.
|
||||
*/
|
||||
public enableVerboseMode = (
|
||||
successCallBack = this.defaultInterceptionCallBack,
|
||||
errorCallBack = this.defaultInterceptionCallBack
|
||||
) => {
|
||||
this.httpInterceptor = this.httpClient.interceptors.response.use(
|
||||
successCallBack,
|
||||
errorCallBack
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Turns off verbose mode to log every HTTP response.
|
||||
*/
|
||||
public disableVerboseMode = () => {
|
||||
if (this.httpInterceptor) {
|
||||
this.httpClient.interceptors.response.eject(this.httpInterceptor)
|
||||
}
|
||||
}
|
||||
|
||||
protected getHeaders = (
|
||||
accessToken: string | undefined,
|
||||
contentType: string
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { RequestClient } from './RequestClient'
|
||||
import { AxiosResponse } from 'axios'
|
||||
import { SASJS_LOGS_SEPARATOR } from '../utils'
|
||||
|
||||
interface SasjsParsedResponse<T> {
|
||||
export interface SasjsParsedResponse<T> {
|
||||
result: T
|
||||
log: string
|
||||
etag: string
|
||||
@@ -45,13 +44,30 @@ export class SasjsRequestClient extends RequestClient {
|
||||
}
|
||||
} catch {
|
||||
if (response.data.includes(SASJS_LOGS_SEPARATOR)) {
|
||||
const splittedResponse = response.data.split(SASJS_LOGS_SEPARATOR)
|
||||
const { data } = response
|
||||
const splittedResponse: string[] = data.split(SASJS_LOGS_SEPARATOR)
|
||||
|
||||
webout = splittedResponse[0]
|
||||
webout = splittedResponse.splice(0, 1)[0]
|
||||
if (webout !== undefined) parsedResponse = webout
|
||||
|
||||
log = splittedResponse[1]
|
||||
printOutput = splittedResponse[2]
|
||||
// log can contain nested logs
|
||||
const logs = splittedResponse.splice(0, splittedResponse.length - 1)
|
||||
|
||||
// tests if string ends with SASJS_LOGS_SEPARATOR
|
||||
const endingWithLogSepRegExp = new RegExp(`${SASJS_LOGS_SEPARATOR}$`)
|
||||
|
||||
// at this point splittedResponse can contain only one item
|
||||
const lastChunk = splittedResponse[0]
|
||||
|
||||
if (lastChunk) {
|
||||
// if the last chunk doesn't end with SASJS_LOGS_SEPARATOR, then it is a printOutput
|
||||
// else the last chunk is part of the log and has to be joined
|
||||
if (!endingWithLogSepRegExp.test(data)) printOutput = lastChunk
|
||||
else if (logs.length > 1) logs.push(lastChunk)
|
||||
}
|
||||
|
||||
// join logs into single log with SASJS_LOGS_SEPARATOR
|
||||
log = logs.join(SASJS_LOGS_SEPARATOR)
|
||||
} else {
|
||||
parsedResponse = response.data
|
||||
}
|
||||
@@ -59,7 +75,7 @@ export class SasjsRequestClient extends RequestClient {
|
||||
|
||||
const returnResult: SasjsParsedResponse<T> = {
|
||||
result: parsedResponse as T,
|
||||
log,
|
||||
log: log || '',
|
||||
etag,
|
||||
status: response.status
|
||||
}
|
||||
@@ -69,3 +85,6 @@ export class SasjsRequestClient extends RequestClient {
|
||||
return returnResult
|
||||
}
|
||||
}
|
||||
|
||||
export const SASJS_LOGS_SEPARATOR =
|
||||
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'
|
||||
|
||||
172
src/request/spec/SasjsRequestClient.spec.ts
Normal file
172
src/request/spec/SasjsRequestClient.spec.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
import {
|
||||
SASJS_LOGS_SEPARATOR,
|
||||
SasjsRequestClient,
|
||||
SasjsParsedResponse
|
||||
} from '../SasjsRequestClient'
|
||||
import { AxiosResponse } from 'axios'
|
||||
|
||||
describe('SasjsRequestClient', () => {
|
||||
const requestClient = new SasjsRequestClient('')
|
||||
const etag = 'etag'
|
||||
const status = 200
|
||||
|
||||
const webout = `hello`
|
||||
const log = `1 The SAS System Tuesday, 25 July 2023 12:51:00
|
||||
|
||||
|
||||
PROC MIGRATE will preserve current SAS file attributes and is
|
||||
recommended for converting all your SAS libraries from any
|
||||
SAS 8 release to SAS 9. For details and examples, please see
|
||||
http://support.sas.com/rnd/migration/index.html
|
||||
|
||||
|
||||
|
||||
NOTE: SAS initialization used:
|
||||
real time 0.01 seconds
|
||||
cpu time 0.02 seconds
|
||||
|
||||
|
||||
`
|
||||
const printOutput = 'printOutPut'
|
||||
|
||||
describe('parseResponse', () => {})
|
||||
|
||||
it('should parse response with 1 log', () => {
|
||||
const response: AxiosResponse<any> = {
|
||||
data: `${webout}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${log}
|
||||
${SASJS_LOGS_SEPARATOR}`,
|
||||
status,
|
||||
statusText: 'ok',
|
||||
headers: { etag },
|
||||
config: {}
|
||||
}
|
||||
|
||||
const expectedParsedResponse: SasjsParsedResponse<string> = {
|
||||
result: `${webout}
|
||||
`,
|
||||
log: `
|
||||
${log}
|
||||
`,
|
||||
etag,
|
||||
status
|
||||
}
|
||||
|
||||
expect(requestClient['parseResponse'](response)).toEqual(
|
||||
expectedParsedResponse
|
||||
)
|
||||
})
|
||||
|
||||
it('should parse response with 1 log and printOutput', () => {
|
||||
const response: AxiosResponse<any> = {
|
||||
data: `${webout}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${log}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${printOutput}`,
|
||||
status,
|
||||
statusText: 'ok',
|
||||
headers: { etag },
|
||||
config: {}
|
||||
}
|
||||
|
||||
const expectedParsedResponse: SasjsParsedResponse<string> = {
|
||||
result: `${webout}
|
||||
`,
|
||||
log: `
|
||||
${log}
|
||||
`,
|
||||
etag,
|
||||
status,
|
||||
printOutput: `
|
||||
${printOutput}`
|
||||
}
|
||||
|
||||
expect(requestClient['parseResponse'](response)).toEqual(
|
||||
expectedParsedResponse
|
||||
)
|
||||
})
|
||||
|
||||
it('should parse response with nested logs', () => {
|
||||
const logWithNestedLog = `root log start
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${log}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
root log end`
|
||||
|
||||
const response: AxiosResponse<any> = {
|
||||
data: `${webout}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${logWithNestedLog}
|
||||
${SASJS_LOGS_SEPARATOR}`,
|
||||
status,
|
||||
statusText: 'ok',
|
||||
headers: { etag },
|
||||
config: {}
|
||||
}
|
||||
|
||||
const expectedParsedResponse: SasjsParsedResponse<string> = {
|
||||
result: `${webout}
|
||||
`,
|
||||
log: `
|
||||
${logWithNestedLog}
|
||||
`,
|
||||
etag,
|
||||
status
|
||||
}
|
||||
|
||||
expect(requestClient['parseResponse'](response)).toEqual(
|
||||
expectedParsedResponse
|
||||
)
|
||||
})
|
||||
|
||||
it('should parse response with nested logs and printOutput', () => {
|
||||
const logWithNestedLog = `root log start
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${log}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
log with indentation
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${log}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
some SAS code containing ${SASJS_LOGS_SEPARATOR}
|
||||
root log end`
|
||||
|
||||
const response: AxiosResponse<any> = {
|
||||
data: `${webout}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${logWithNestedLog}
|
||||
${SASJS_LOGS_SEPARATOR}
|
||||
${printOutput}`,
|
||||
status,
|
||||
statusText: 'ok',
|
||||
headers: { etag },
|
||||
config: {}
|
||||
}
|
||||
|
||||
const expectedParsedResponse: SasjsParsedResponse<string> = {
|
||||
result: `${webout}
|
||||
`,
|
||||
log: `
|
||||
${logWithNestedLog}
|
||||
`,
|
||||
etag,
|
||||
status,
|
||||
printOutput: `
|
||||
${printOutput}`
|
||||
}
|
||||
|
||||
expect(requestClient['parseResponse'](response)).toEqual(
|
||||
expectedParsedResponse
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('SASJS_LOGS_SEPARATOR', () => {
|
||||
it('SASJS_LOGS_SEPARATOR should be hardcoded', () => {
|
||||
expect(SASJS_LOGS_SEPARATOR).toEqual(
|
||||
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -13,6 +13,8 @@ import {
|
||||
} from '../types/errors'
|
||||
import { RequestClient } from '../request/RequestClient'
|
||||
import { getTokenRequestErrorPrefixResponse } from '../auth/getTokenRequestErrorPrefix'
|
||||
import { AxiosResponse } from 'axios'
|
||||
import { Logger, LogLevel } from '@sasjs/utils/logger'
|
||||
|
||||
const axiosActual = jest.requireActual('axios')
|
||||
|
||||
@@ -25,16 +27,6 @@ jest
|
||||
const PORT = 8000
|
||||
const SERVER_URL = `https://localhost:${PORT}/`
|
||||
|
||||
const ERROR_MESSAGES = {
|
||||
selfSigned: 'self signed certificate',
|
||||
CCA: 'unable to verify the first certificate'
|
||||
}
|
||||
|
||||
const incorrectAuthCodeErr = {
|
||||
error: 'unauthorized',
|
||||
error_description: 'Bad credentials'
|
||||
}
|
||||
|
||||
describe('RequestClient', () => {
|
||||
let server: http.Server
|
||||
|
||||
@@ -80,6 +72,187 @@ describe('RequestClient', () => {
|
||||
expect(rejectionErrorMessage).toEqual(expectedError.message)
|
||||
})
|
||||
|
||||
describe('defaultInterceptionCallBack', () => {
|
||||
beforeAll(() => {
|
||||
;(process as any).logger = new Logger(LogLevel.Off)
|
||||
})
|
||||
|
||||
it('should log parsed response', () => {
|
||||
jest.spyOn((process as any).logger, 'info')
|
||||
|
||||
const status = 200
|
||||
const reqData = `{
|
||||
name: 'test_job',
|
||||
description: 'Powered by SASjs',
|
||||
code: ['test_code'],
|
||||
variables: {
|
||||
SYS_JES_JOB_URI: '',
|
||||
_program: '/Public/sasjs/jobs/jobs/test_job'
|
||||
},
|
||||
arguments: {
|
||||
_contextName: 'SAS Job Execution compute context',
|
||||
_OMITJSONLISTING: true,
|
||||
_OMITJSONLOG: true,
|
||||
_OMITSESSIONRESULTS: true,
|
||||
_OMITTEXTLISTING: true,
|
||||
_OMITTEXTLOG: true
|
||||
}
|
||||
}`
|
||||
const resData = {
|
||||
id: 'id_string',
|
||||
name: 'name_string',
|
||||
uri: 'uri_string',
|
||||
createdBy: 'createdBy_string',
|
||||
code: 'TEST CODE',
|
||||
links: [
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'state',
|
||||
href: 'state_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
},
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'state',
|
||||
href: 'state_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
},
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'state',
|
||||
href: 'state_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
},
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'state',
|
||||
href: 'state_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
},
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'state',
|
||||
href: 'state_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
},
|
||||
{
|
||||
method: 'method_string',
|
||||
rel: 'self',
|
||||
href: 'self_href_string',
|
||||
uri: 'uri_string',
|
||||
type: 'type_string'
|
||||
}
|
||||
],
|
||||
results: { '_webout.json': '_webout.json_string' },
|
||||
logStatistics: {
|
||||
lineCount: 1,
|
||||
modifiedTimeStamp: 'modifiedTimeStamp_string'
|
||||
}
|
||||
}
|
||||
const reqHeaders = `POST https://sas.server.com/compute/sessions/session_id/jobs HTTP/1.1
|
||||
Accept: application/json
|
||||
Content-Type: application/json
|
||||
User-Agent: axios/0.27.2
|
||||
Content-Length: 334
|
||||
host: sas.server.io
|
||||
Connection: close
|
||||
`
|
||||
const resHeaders = ['content-type', 'application/json']
|
||||
const mockedResponse: AxiosResponse = {
|
||||
data: resData,
|
||||
status,
|
||||
statusText: '',
|
||||
headers: {},
|
||||
config: { data: reqData },
|
||||
request: { _header: reqHeaders, res: { rawHeaders: resHeaders } }
|
||||
}
|
||||
|
||||
const requestClient = new RequestClient('')
|
||||
requestClient['defaultInterceptionCallBack'](mockedResponse)
|
||||
|
||||
const expectedLog = `HTTP Request (first 50 lines):
|
||||
${reqHeaders}${requestClient['parseInterceptedBody'](reqData)}
|
||||
|
||||
HTTP Response Code: ${requestClient['prettifyString'](status)}
|
||||
|
||||
HTTP Response (first 50 lines):
|
||||
${resHeaders[0]}: ${resHeaders[1]}${
|
||||
requestClient['parseInterceptedBody'](resData)
|
||||
? `\n\n${requestClient['parseInterceptedBody'](resData)}`
|
||||
: ''
|
||||
}
|
||||
`
|
||||
|
||||
expect((process as any).logger.info).toHaveBeenCalledWith(expectedLog)
|
||||
})
|
||||
})
|
||||
|
||||
describe('enableVerboseMode', () => {
|
||||
it('should add defaultInterceptionCallBack functions to response interceptors', () => {
|
||||
const requestClient = new RequestClient('')
|
||||
const interceptorSpy = jest.spyOn(
|
||||
requestClient['httpClient'].interceptors.response,
|
||||
'use'
|
||||
)
|
||||
|
||||
requestClient.enableVerboseMode()
|
||||
|
||||
expect(interceptorSpy).toHaveBeenCalledWith(
|
||||
requestClient['defaultInterceptionCallBack'],
|
||||
requestClient['defaultInterceptionCallBack']
|
||||
)
|
||||
})
|
||||
|
||||
it('should add callback functions to response interceptors', () => {
|
||||
const requestClient = new RequestClient('')
|
||||
const interceptorSpy = jest.spyOn(
|
||||
requestClient['httpClient'].interceptors.response,
|
||||
'use'
|
||||
)
|
||||
|
||||
const successCallback = (response: AxiosResponse) => {
|
||||
console.log('success')
|
||||
|
||||
return response
|
||||
}
|
||||
const failureCallback = (response: AxiosResponse) => {
|
||||
console.log('failure')
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
requestClient.enableVerboseMode(successCallback, failureCallback)
|
||||
|
||||
expect(interceptorSpy).toHaveBeenCalledWith(
|
||||
successCallback,
|
||||
failureCallback
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('disableVerboseMode', () => {
|
||||
it('should eject interceptor', () => {
|
||||
const requestClient = new RequestClient('')
|
||||
|
||||
const interceptorSpy = jest.spyOn(
|
||||
requestClient['httpClient'].interceptors.response,
|
||||
'eject'
|
||||
)
|
||||
|
||||
const interceptorId = 100
|
||||
|
||||
requestClient['httpInterceptor'] = interceptorId
|
||||
requestClient.disableVerboseMode()
|
||||
|
||||
expect(interceptorSpy).toHaveBeenCalledWith(interceptorId)
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleError', () => {
|
||||
const requestClient = new RequestClient('https://localhost:8009')
|
||||
const randomError = 'some error'
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
export const SASJS_LOGS_SEPARATOR =
|
||||
'SASJS_LOGS_SEPARATOR_163ee17b6ff24f028928972d80a26784'
|
||||
5
src/utils/getFormData.ts
Normal file
5
src/utils/getFormData.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { isNode } from './'
|
||||
import * as NodeFormData from 'form-data'
|
||||
|
||||
export const getFormData = () =>
|
||||
isNode() ? new NodeFormData() : new FormData()
|
||||
@@ -2,7 +2,6 @@ export * from './appendExtraResponseAttributes'
|
||||
export * from './asyncForEach'
|
||||
export * from './compareTimestamps'
|
||||
export * from './convertToCsv'
|
||||
export * from './constants'
|
||||
export * from './createAxiosInstance'
|
||||
export * from './delay'
|
||||
export * from './fetchLogByChunks'
|
||||
@@ -20,3 +19,4 @@ export * from './parseWeboutResponse'
|
||||
export * from './serialize'
|
||||
export * from './splitChunks'
|
||||
export * from './validateInput'
|
||||
export * from './getFormData'
|
||||
|
||||
20
src/utils/spec/getFormData.spec.ts
Normal file
20
src/utils/spec/getFormData.spec.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { getFormData } from '..'
|
||||
import * as isNodeModule from '../isNode'
|
||||
import * as NodeFormData from 'form-data'
|
||||
|
||||
describe('getFormData', () => {
|
||||
it('should return NodeFormData if environment is Node', () => {
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => true)
|
||||
|
||||
expect(getFormData() instanceof NodeFormData).toEqual(true)
|
||||
})
|
||||
|
||||
it('should return FormData if environment is not Node', () => {
|
||||
const formDataMock = () => {}
|
||||
;(global as any).FormData = formDataMock
|
||||
|
||||
jest.spyOn(isNodeModule, 'isNode').mockImplementation(() => false)
|
||||
|
||||
expect(getFormData() instanceof FormData).toEqual(true)
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user