Merge branch 'master' of github.com:Budibase/budibase into cheeks-fixes
This commit is contained in:
commit
a5e8863281
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.18.0",
|
"version": "2.19.0",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*",
|
"packages/*",
|
||||||
|
|
|
@ -32,7 +32,7 @@
|
||||||
"bcryptjs": "2.4.3",
|
"bcryptjs": "2.4.3",
|
||||||
"bull": "4.10.1",
|
"bull": "4.10.1",
|
||||||
"correlation-id": "4.0.0",
|
"correlation-id": "4.0.0",
|
||||||
"dd-trace": "5.0.0",
|
"dd-trace": "5.2.0",
|
||||||
"dotenv": "16.0.1",
|
"dotenv": "16.0.1",
|
||||||
"ioredis": "5.3.2",
|
"ioredis": "5.3.2",
|
||||||
"joi": "17.6.0",
|
"joi": "17.6.0",
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { IdentityContext, VM } from "@budibase/types"
|
import { IdentityContext, VM } from "@budibase/types"
|
||||||
|
import { ExecutionTimeTracker } from "../timers"
|
||||||
|
|
||||||
// keep this out of Budibase types, don't want to expose context info
|
// keep this out of Budibase types, don't want to expose context info
|
||||||
export type ContextMap = {
|
export type ContextMap = {
|
||||||
|
@ -9,5 +10,6 @@ export type ContextMap = {
|
||||||
isScim?: boolean
|
isScim?: boolean
|
||||||
automationId?: string
|
automationId?: string
|
||||||
isMigrating?: boolean
|
isMigrating?: boolean
|
||||||
|
jsExecutionTracker?: ExecutionTimeTracker
|
||||||
vm?: VM
|
vm?: VM
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,3 +20,41 @@ export function cleanup() {
|
||||||
}
|
}
|
||||||
intervals = []
|
intervals = []
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export class ExecutionTimeoutError extends Error {
|
||||||
|
public readonly name = "ExecutionTimeoutError"
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ExecutionTimeTracker {
|
||||||
|
static withLimit(limitMs: number) {
|
||||||
|
return new ExecutionTimeTracker(limitMs)
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(readonly limitMs: number) {}
|
||||||
|
|
||||||
|
private totalTimeMs = 0
|
||||||
|
|
||||||
|
track<T>(f: () => T): T {
|
||||||
|
this.checkLimit()
|
||||||
|
const start = process.hrtime.bigint()
|
||||||
|
try {
|
||||||
|
return f()
|
||||||
|
} finally {
|
||||||
|
const end = process.hrtime.bigint()
|
||||||
|
this.totalTimeMs += Number(end - start) / 1e6
|
||||||
|
this.checkLimit()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get elapsedMS() {
|
||||||
|
return this.totalTimeMs
|
||||||
|
}
|
||||||
|
|
||||||
|
checkLimit() {
|
||||||
|
if (this.totalTimeMs > this.limitMs) {
|
||||||
|
throw new ExecutionTimeoutError(
|
||||||
|
`Execution time limit of ${this.limitMs}ms exceeded: ${this.totalTimeMs}ms`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -69,7 +69,7 @@
|
||||||
"cookies": "0.8.0",
|
"cookies": "0.8.0",
|
||||||
"csvtojson": "2.0.10",
|
"csvtojson": "2.0.10",
|
||||||
"curlconverter": "3.21.0",
|
"curlconverter": "3.21.0",
|
||||||
"dd-trace": "5.0.0",
|
"dd-trace": "5.2.0",
|
||||||
"dotenv": "8.2.0",
|
"dotenv": "8.2.0",
|
||||||
"form-data": "4.0.0",
|
"form-data": "4.0.0",
|
||||||
"global-agent": "3.0.0",
|
"global-agent": "3.0.0",
|
||||||
|
@ -113,6 +113,7 @@
|
||||||
"undici-types": "^6.0.1",
|
"undici-types": "^6.0.1",
|
||||||
"uuid": "^8.3.2",
|
"uuid": "^8.3.2",
|
||||||
"validate.js": "0.13.1",
|
"validate.js": "0.13.1",
|
||||||
|
"vm2": "^3.9.19",
|
||||||
"worker-farm": "1.7.0",
|
"worker-farm": "1.7.0",
|
||||||
"xml2js": "0.5.0"
|
"xml2js": "0.5.0"
|
||||||
},
|
},
|
||||||
|
|
|
@ -72,9 +72,9 @@ const environment = {
|
||||||
HTTP_MB_LIMIT: process.env.HTTP_MB_LIMIT,
|
HTTP_MB_LIMIT: process.env.HTTP_MB_LIMIT,
|
||||||
FORKED_PROCESS_NAME: process.env.FORKED_PROCESS_NAME || "main",
|
FORKED_PROCESS_NAME: process.env.FORKED_PROCESS_NAME || "main",
|
||||||
JS_PER_INVOCATION_TIMEOUT_MS:
|
JS_PER_INVOCATION_TIMEOUT_MS:
|
||||||
parseIntSafe(process.env.JS_PER_INVOCATION_TIMEOUT_MS) || 1000,
|
parseIntSafe(process.env.JS_PER_EXECUTION_TIME_LIMIT_MS) || 1000,
|
||||||
JS_PER_REQUEST_TIMEOUT_MS: parseIntSafe(
|
JS_PER_REQUEST_TIMEOUT_MS: parseIntSafe(
|
||||||
process.env.JS_PER_REQUEST_TIMEOUT_MS
|
process.env.JS_PER_REQUEST_TIME_LIMIT_MS
|
||||||
),
|
),
|
||||||
// old
|
// old
|
||||||
CLIENT_ID: process.env.CLIENT_ID,
|
CLIENT_ID: process.env.CLIENT_ID,
|
||||||
|
|
|
@ -1,42 +1,61 @@
|
||||||
|
import vm from "vm"
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import { setJSRunner, JsErrorTimeout } from "@budibase/string-templates"
|
import { setJSRunner } from "@budibase/string-templates"
|
||||||
|
import { context, timers } from "@budibase/backend-core"
|
||||||
import tracer from "dd-trace"
|
import tracer from "dd-trace"
|
||||||
|
|
||||||
import { IsolatedVM } from "./vm"
|
type TrackerFn = <T>(f: () => T) => T
|
||||||
import { context } from "@budibase/backend-core"
|
|
||||||
|
|
||||||
export function init() {
|
export function init() {
|
||||||
setJSRunner((js: string, ctx: Record<string, any>) => {
|
setJSRunner((js: string, ctx: vm.Context) => {
|
||||||
return tracer.trace("runJS", {}, span => {
|
return tracer.trace("runJS", {}, span => {
|
||||||
try {
|
const perRequestLimit = env.JS_PER_REQUEST_TIMEOUT_MS
|
||||||
const bbCtx = context.getCurrentContext()!
|
let track: TrackerFn = f => f()
|
||||||
|
if (perRequestLimit) {
|
||||||
let { vm } = bbCtx
|
const bbCtx = tracer.trace("runJS.getCurrentContext", {}, span =>
|
||||||
if (!vm) {
|
context.getCurrentContext()
|
||||||
// Can't copy the native helpers into the isolate. We just ignore them as they are handled properly from the helpersSource
|
)
|
||||||
const { helpers, ...ctxToPass } = ctx
|
if (bbCtx) {
|
||||||
|
if (!bbCtx.jsExecutionTracker) {
|
||||||
vm = new IsolatedVM({
|
span?.addTags({
|
||||||
memoryLimit: env.JS_RUNNER_MEMORY_LIMIT,
|
createdExecutionTracker: true,
|
||||||
invocationTimeout: env.JS_PER_INVOCATION_TIMEOUT_MS,
|
})
|
||||||
isolateAccumulatedTimeout: env.JS_PER_REQUEST_TIMEOUT_MS,
|
bbCtx.jsExecutionTracker = tracer.trace(
|
||||||
|
"runJS.createExecutionTimeTracker",
|
||||||
|
{},
|
||||||
|
span => timers.ExecutionTimeTracker.withLimit(perRequestLimit)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
span?.addTags({
|
||||||
|
js: {
|
||||||
|
limitMS: bbCtx.jsExecutionTracker.limitMs,
|
||||||
|
elapsedMS: bbCtx.jsExecutionTracker.elapsedMS,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
// We call checkLimit() here to prevent paying the cost of creating
|
||||||
|
// a new VM context below when we don't need to.
|
||||||
|
tracer.trace("runJS.checkLimitAndBind", {}, span => {
|
||||||
|
bbCtx.jsExecutionTracker!.checkLimit()
|
||||||
|
track = bbCtx.jsExecutionTracker!.track.bind(
|
||||||
|
bbCtx.jsExecutionTracker
|
||||||
|
)
|
||||||
})
|
})
|
||||||
.withContext(ctxToPass)
|
|
||||||
.withHelpers()
|
|
||||||
|
|
||||||
bbCtx.vm = vm
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = vm.execute(js)
|
|
||||||
|
|
||||||
return result
|
|
||||||
} catch (error: any) {
|
|
||||||
if (error.message === "Script execution timed out.") {
|
|
||||||
throw new JsErrorTimeout()
|
|
||||||
}
|
|
||||||
|
|
||||||
throw error
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ctx = {
|
||||||
|
...ctx,
|
||||||
|
alert: undefined,
|
||||||
|
setInterval: undefined,
|
||||||
|
setTimeout: undefined,
|
||||||
|
}
|
||||||
|
|
||||||
|
vm.createContext(ctx)
|
||||||
|
return track(() =>
|
||||||
|
vm.runInNewContext(js, ctx, {
|
||||||
|
timeout: env.JS_PER_INVOCATION_TIMEOUT_MS,
|
||||||
|
})
|
||||||
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { validate as isValidUUID } from "uuid"
|
// import { validate as isValidUUID } from "uuid"
|
||||||
|
|
||||||
jest.mock("@budibase/handlebars-helpers/lib/math", () => {
|
jest.mock("@budibase/handlebars-helpers/lib/math", () => {
|
||||||
const actual = jest.requireActual("@budibase/handlebars-helpers/lib/math")
|
const actual = jest.requireActual("@budibase/handlebars-helpers/lib/math")
|
||||||
|
@ -47,29 +47,37 @@ describe("jsRunner", () => {
|
||||||
expect(output).toBe(3)
|
expect(output).toBe(3)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// TODO This should be reenabled when running on isolated-vm
|
||||||
|
it.skip("should prevent sandbox escape", async () => {
|
||||||
|
const output = await processJS(
|
||||||
|
`return this.constructor.constructor("return process")()`
|
||||||
|
)
|
||||||
|
expect(output).toBe("Error while executing JS")
|
||||||
|
})
|
||||||
|
|
||||||
describe("helpers", () => {
|
describe("helpers", () => {
|
||||||
runJsHelpersTests({
|
runJsHelpersTests({
|
||||||
funcWrap: (func: any) => config.doInContext(config.getAppId(), func),
|
funcWrap: (func: any) => config.doInContext(config.getAppId(), func),
|
||||||
testsToSkip: ["random", "uuid"],
|
// testsToSkip: ["random", "uuid"],
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("uuid", () => {
|
// describe("uuid", () => {
|
||||||
it("uuid helper returns a valid uuid", async () => {
|
// it("uuid helper returns a valid uuid", async () => {
|
||||||
const result = await processJS("return helpers.uuid()")
|
// const result = await processJS("return helpers.uuid()")
|
||||||
expect(result).toBeDefined()
|
// expect(result).toBeDefined()
|
||||||
expect(isValidUUID(result)).toBe(true)
|
// expect(isValidUUID(result)).toBe(true)
|
||||||
})
|
// })
|
||||||
})
|
// })
|
||||||
|
|
||||||
describe("random", () => {
|
// describe("random", () => {
|
||||||
it("random helper returns a valid number", async () => {
|
// it("random helper returns a valid number", async () => {
|
||||||
const min = 1
|
// const min = 1
|
||||||
const max = 8
|
// const max = 8
|
||||||
const result = await processJS(`return helpers.random(${min}, ${max})`)
|
// const result = await processJS(`return helpers.random(${min}, ${max})`)
|
||||||
expect(result).toBeDefined()
|
// expect(result).toBeDefined()
|
||||||
expect(result).toBeGreaterThanOrEqual(min)
|
// expect(result).toBeGreaterThanOrEqual(min)
|
||||||
expect(result).toBeLessThanOrEqual(max)
|
// expect(result).toBeLessThanOrEqual(max)
|
||||||
})
|
// })
|
||||||
})
|
// })
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -14,6 +14,7 @@ import {
|
||||||
ATTACHMENT_DIRECTORY,
|
ATTACHMENT_DIRECTORY,
|
||||||
} from "./constants"
|
} from "./constants"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
|
import fsp from "fs/promises"
|
||||||
import { join } from "path"
|
import { join } from "path"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import { v4 as uuid } from "uuid"
|
import { v4 as uuid } from "uuid"
|
||||||
|
@ -117,7 +118,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
ObjectStoreBuckets.APPS,
|
ObjectStoreBuckets.APPS,
|
||||||
join(appPath, path)
|
join(appPath, path)
|
||||||
)
|
)
|
||||||
fs.writeFileSync(join(tmpPath, path), contents)
|
await fsp.writeFile(join(tmpPath, path), contents)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// get all the files
|
// get all the files
|
||||||
|
@ -131,14 +132,14 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
|
|
||||||
const downloadedPath = join(tmpPath, appPath)
|
const downloadedPath = join(tmpPath, appPath)
|
||||||
if (fs.existsSync(downloadedPath)) {
|
if (fs.existsSync(downloadedPath)) {
|
||||||
const allFiles = fs.readdirSync(downloadedPath)
|
const allFiles = await fsp.readdir(downloadedPath)
|
||||||
for (let file of allFiles) {
|
for (let file of allFiles) {
|
||||||
const path = join(downloadedPath, file)
|
const path = join(downloadedPath, file)
|
||||||
// move out of app directory, simplify structure
|
// move out of app directory, simplify structure
|
||||||
fs.renameSync(path, join(downloadedPath, "..", file))
|
await fsp.rename(path, join(downloadedPath, "..", file))
|
||||||
}
|
}
|
||||||
// remove the old app directory created by object export
|
// remove the old app directory created by object export
|
||||||
fs.rmdirSync(downloadedPath)
|
await fsp.rmdir(downloadedPath)
|
||||||
}
|
}
|
||||||
// enforce an export of app DB to the tmp path
|
// enforce an export of app DB to the tmp path
|
||||||
const dbPath = join(tmpPath, DB_EXPORT_FILE)
|
const dbPath = join(tmpPath, DB_EXPORT_FILE)
|
||||||
|
@ -148,7 +149,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
})
|
})
|
||||||
|
|
||||||
if (config?.encryptPassword) {
|
if (config?.encryptPassword) {
|
||||||
for (let file of fs.readdirSync(tmpPath)) {
|
for (let file of await fsp.readdir(tmpPath)) {
|
||||||
const path = join(tmpPath, file)
|
const path = join(tmpPath, file)
|
||||||
|
|
||||||
// skip the attachments - too big to encrypt
|
// skip the attachments - too big to encrypt
|
||||||
|
@ -157,7 +158,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
{ dir: tmpPath, filename: file },
|
{ dir: tmpPath, filename: file },
|
||||||
config.encryptPassword
|
config.encryptPassword
|
||||||
)
|
)
|
||||||
fs.rmSync(path)
|
await fsp.rm(path)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -165,9 +166,9 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
// if tar requested, return where the tarball is
|
// if tar requested, return where the tarball is
|
||||||
if (config?.tar) {
|
if (config?.tar) {
|
||||||
// now the tmpPath contains both the DB export and attachments, tar this
|
// now the tmpPath contains both the DB export and attachments, tar this
|
||||||
const tarPath = await tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath))
|
const tarPath = await tarFilesToTmp(tmpPath, await fsp.readdir(tmpPath))
|
||||||
// cleanup the tmp export files as tarball returned
|
// cleanup the tmp export files as tarball returned
|
||||||
fs.rmSync(tmpPath, { recursive: true, force: true })
|
await fsp.rm(tmpPath, { recursive: true, force: true })
|
||||||
|
|
||||||
return tarPath
|
return tarPath
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@ import { downloadTemplate } from "../../../utilities/fileSystem"
|
||||||
import { ObjectStoreBuckets } from "../../../constants"
|
import { ObjectStoreBuckets } from "../../../constants"
|
||||||
import { join } from "path"
|
import { join } from "path"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
|
import fsp from "fs/promises"
|
||||||
import sdk from "../../"
|
import sdk from "../../"
|
||||||
import { v4 as uuid } from "uuid"
|
import { v4 as uuid } from "uuid"
|
||||||
import tar from "tar"
|
import tar from "tar"
|
||||||
|
@ -119,7 +120,7 @@ async function getTemplateStream(template: TemplateType) {
|
||||||
|
|
||||||
export async function untarFile(file: { path: string }) {
|
export async function untarFile(file: { path: string }) {
|
||||||
const tmpPath = join(budibaseTempDir(), uuid())
|
const tmpPath = join(budibaseTempDir(), uuid())
|
||||||
fs.mkdirSync(tmpPath)
|
await fsp.mkdir(tmpPath)
|
||||||
// extract the tarball
|
// extract the tarball
|
||||||
await tar.extract({
|
await tar.extract({
|
||||||
cwd: tmpPath,
|
cwd: tmpPath,
|
||||||
|
@ -130,12 +131,12 @@ export async function untarFile(file: { path: string }) {
|
||||||
|
|
||||||
async function decryptFiles(path: string, password: string) {
|
async function decryptFiles(path: string, password: string) {
|
||||||
try {
|
try {
|
||||||
for (let file of fs.readdirSync(path)) {
|
for (let file of await fsp.readdir(path)) {
|
||||||
const inputPath = join(path, file)
|
const inputPath = join(path, file)
|
||||||
if (!inputPath.endsWith(ATTACHMENT_DIRECTORY)) {
|
if (!inputPath.endsWith(ATTACHMENT_DIRECTORY)) {
|
||||||
const outputPath = inputPath.replace(/\.enc$/, "")
|
const outputPath = inputPath.replace(/\.enc$/, "")
|
||||||
await encryption.decryptFile(inputPath, outputPath, password)
|
await encryption.decryptFile(inputPath, outputPath, password)
|
||||||
fs.rmSync(inputPath)
|
await fsp.rm(inputPath)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
|
@ -164,14 +165,14 @@ export async function importApp(
|
||||||
let dbStream: any
|
let dbStream: any
|
||||||
const isTar = template.file && template?.file?.type?.endsWith("gzip")
|
const isTar = template.file && template?.file?.type?.endsWith("gzip")
|
||||||
const isDirectory =
|
const isDirectory =
|
||||||
template.file && fs.lstatSync(template.file.path).isDirectory()
|
template.file && (await fsp.lstat(template.file.path)).isDirectory()
|
||||||
let tmpPath: string | undefined = undefined
|
let tmpPath: string | undefined = undefined
|
||||||
if (template.file && (isTar || isDirectory)) {
|
if (template.file && (isTar || isDirectory)) {
|
||||||
tmpPath = isTar ? await untarFile(template.file) : template.file.path
|
tmpPath = isTar ? await untarFile(template.file) : template.file.path
|
||||||
if (isTar && template.file.password) {
|
if (isTar && template.file.password) {
|
||||||
await decryptFiles(tmpPath, template.file.password)
|
await decryptFiles(tmpPath, template.file.password)
|
||||||
}
|
}
|
||||||
const contents = fs.readdirSync(tmpPath)
|
const contents = await fsp.readdir(tmpPath)
|
||||||
// have to handle object import
|
// have to handle object import
|
||||||
if (contents.length && opts.importObjStoreContents) {
|
if (contents.length && opts.importObjStoreContents) {
|
||||||
let promises = []
|
let promises = []
|
||||||
|
@ -182,7 +183,7 @@ export async function importApp(
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
filename = join(prodAppId, filename)
|
filename = join(prodAppId, filename)
|
||||||
if (fs.lstatSync(path).isDirectory()) {
|
if ((await fsp.lstat(path)).isDirectory()) {
|
||||||
promises.push(
|
promises.push(
|
||||||
objectStore.uploadDirectory(ObjectStoreBuckets.APPS, path, filename)
|
objectStore.uploadDirectory(ObjectStoreBuckets.APPS, path, filename)
|
||||||
)
|
)
|
||||||
|
@ -211,7 +212,7 @@ export async function importApp(
|
||||||
await updateAutomations(prodAppId, db)
|
await updateAutomations(prodAppId, db)
|
||||||
// clear up afterward
|
// clear up afterward
|
||||||
if (tmpPath) {
|
if (tmpPath) {
|
||||||
fs.rmSync(tmpPath, { recursive: true, force: true })
|
await fsp.rm(tmpPath, { recursive: true, force: true })
|
||||||
}
|
}
|
||||||
return ok
|
return ok
|
||||||
}
|
}
|
||||||
|
|
|
@ -127,16 +127,10 @@ class QueryRunner {
|
||||||
|
|
||||||
// transform as required
|
// transform as required
|
||||||
if (transformer) {
|
if (transformer) {
|
||||||
const runner = new ScriptRunner(
|
const runner = new ScriptRunner(transformer, {
|
||||||
transformer,
|
data: rows,
|
||||||
{
|
params: enrichedParameters,
|
||||||
data: rows,
|
})
|
||||||
params: enrichedParameters,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
parseBson: datasource.source === SourceName.MONGODB,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
rows = runner.execute()
|
rows = runner.execute()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -158,11 +152,6 @@ class QueryRunner {
|
||||||
return this.execute()
|
return this.execute()
|
||||||
}
|
}
|
||||||
|
|
||||||
// check for undefined response
|
|
||||||
if (!rows) {
|
|
||||||
rows = []
|
|
||||||
}
|
|
||||||
|
|
||||||
// needs to an array for next step
|
// needs to an array for next step
|
||||||
if (!Array.isArray(rows)) {
|
if (!Array.isArray(rows)) {
|
||||||
rows = [rows]
|
rows = [rows]
|
||||||
|
|
|
@ -1,27 +1,28 @@
|
||||||
import env from "../environment"
|
import fetch from "node-fetch"
|
||||||
import { IsolatedVM } from "../jsRunner/vm"
|
import { VM, VMScript } from "vm2"
|
||||||
|
|
||||||
const JS_TIMEOUT_MS = 1000
|
const JS_TIMEOUT_MS = 1000
|
||||||
|
|
||||||
class ScriptRunner {
|
class ScriptRunner {
|
||||||
private code: string
|
vm: VM
|
||||||
private vm: IsolatedVM
|
results: { out: string }
|
||||||
|
script: VMScript
|
||||||
|
|
||||||
constructor(script: string, context: any, { parseBson = false } = {}) {
|
constructor(script: string, context: any) {
|
||||||
this.code = `(() => {${script}})();`
|
const code = `let fn = () => {\n${script}\n}; results.out = fn();`
|
||||||
this.vm = new IsolatedVM({
|
this.vm = new VM({
|
||||||
memoryLimit: env.JS_RUNNER_MEMORY_LIMIT,
|
timeout: JS_TIMEOUT_MS,
|
||||||
invocationTimeout: JS_TIMEOUT_MS,
|
})
|
||||||
}).withContext(context)
|
this.results = { out: "" }
|
||||||
|
this.vm.setGlobals(context)
|
||||||
if (parseBson && context.data) {
|
this.vm.setGlobal("fetch", fetch)
|
||||||
this.vm = this.vm.withParsingBson(context.data)
|
this.vm.setGlobal("results", this.results)
|
||||||
}
|
this.script = new VMScript(code)
|
||||||
}
|
}
|
||||||
|
|
||||||
execute() {
|
execute() {
|
||||||
const result = this.vm.execute(this.code)
|
this.vm.run(this.script)
|
||||||
return result
|
return this.results.out
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,13 +2,13 @@
|
||||||
"name": "@budibase/string-templates",
|
"name": "@budibase/string-templates",
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"description": "Handlebars wrapper for Budibase templating.",
|
"description": "Handlebars wrapper for Budibase templating.",
|
||||||
"main": "src/index.cjs",
|
"main": "src/index.js",
|
||||||
"module": "dist/bundle.mjs",
|
"module": "dist/bundle.mjs",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
"require": "./src/index.cjs",
|
"require": "./src/index.js",
|
||||||
"import": "./dist/bundle.mjs"
|
"import": "./dist/bundle.mjs"
|
||||||
},
|
},
|
||||||
"./package.json": "./package.json",
|
"./package.json": "./package.json",
|
||||||
|
@ -29,8 +29,7 @@
|
||||||
"@budibase/handlebars-helpers": "^0.13.1",
|
"@budibase/handlebars-helpers": "^0.13.1",
|
||||||
"dayjs": "^1.10.8",
|
"dayjs": "^1.10.8",
|
||||||
"handlebars": "^4.7.6",
|
"handlebars": "^4.7.6",
|
||||||
"lodash.clonedeep": "^4.5.0",
|
"lodash.clonedeep": "^4.5.0"
|
||||||
"vm2": "^3.9.19"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@rollup/plugin-commonjs": "^17.1.0",
|
"@rollup/plugin-commonjs": "^17.1.0",
|
||||||
|
|
|
@ -4,7 +4,7 @@ const { LITERAL_MARKER } = require("../helpers/constants")
|
||||||
const { getJsHelperList } = require("./list")
|
const { getJsHelperList } = require("./list")
|
||||||
|
|
||||||
// The method of executing JS scripts depends on the bundle being built.
|
// The method of executing JS scripts depends on the bundle being built.
|
||||||
// This setter is used in the entrypoint (either index.cjs or index.mjs).
|
// This setter is used in the entrypoint (either index.js or index.mjs).
|
||||||
let runJS
|
let runJS
|
||||||
module.exports.setJSRunner = runner => (runJS = runner)
|
module.exports.setJSRunner = runner => (runJS = runner)
|
||||||
|
|
||||||
|
|
|
@ -1,43 +0,0 @@
|
||||||
const templates = require("./index.js")
|
|
||||||
|
|
||||||
/**
|
|
||||||
* CJS entrypoint for rollup
|
|
||||||
*/
|
|
||||||
module.exports.isValid = templates.isValid
|
|
||||||
module.exports.makePropSafe = templates.makePropSafe
|
|
||||||
module.exports.getManifest = templates.getManifest
|
|
||||||
module.exports.isJSBinding = templates.isJSBinding
|
|
||||||
module.exports.encodeJSBinding = templates.encodeJSBinding
|
|
||||||
module.exports.decodeJSBinding = templates.decodeJSBinding
|
|
||||||
module.exports.processStringSync = templates.processStringSync
|
|
||||||
module.exports.processObjectSync = templates.processObjectSync
|
|
||||||
module.exports.processString = templates.processString
|
|
||||||
module.exports.processObject = templates.processObject
|
|
||||||
module.exports.doesContainStrings = templates.doesContainStrings
|
|
||||||
module.exports.doesContainString = templates.doesContainString
|
|
||||||
module.exports.disableEscaping = templates.disableEscaping
|
|
||||||
module.exports.findHBSBlocks = templates.findHBSBlocks
|
|
||||||
module.exports.convertToJS = templates.convertToJS
|
|
||||||
module.exports.setJSRunner = templates.setJSRunner
|
|
||||||
module.exports.FIND_ANY_HBS_REGEX = templates.FIND_ANY_HBS_REGEX
|
|
||||||
module.exports.helpersToRemoveForJs = templates.helpersToRemoveForJs
|
|
||||||
|
|
||||||
if (!process.env.NO_JS) {
|
|
||||||
const { VM } = require("vm2")
|
|
||||||
const { setJSRunner } = require("./helpers/javascript")
|
|
||||||
/**
|
|
||||||
* Use vm2 to run JS scripts in a node env
|
|
||||||
*/
|
|
||||||
setJSRunner((js, context) => {
|
|
||||||
const vm = new VM({
|
|
||||||
sandbox: context,
|
|
||||||
timeout: 1000,
|
|
||||||
})
|
|
||||||
return vm.run(js)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const errors = require("./errors")
|
|
||||||
for (const error in errors) {
|
|
||||||
module.exports[error] = errors[error]
|
|
||||||
}
|
|
|
@ -8,7 +8,7 @@ const {
|
||||||
doesContainString,
|
doesContainString,
|
||||||
disableEscaping,
|
disableEscaping,
|
||||||
findHBSBlocks,
|
findHBSBlocks,
|
||||||
} = require("../src/index.cjs")
|
} = require("../src/index.js")
|
||||||
|
|
||||||
describe("Test that the string processing works correctly", () => {
|
describe("Test that the string processing works correctly", () => {
|
||||||
it("should process a basic template string", async () => {
|
it("should process a basic template string", async () => {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
const { processString } = require("../src/index.cjs")
|
const { processString } = require("../src/index.js")
|
||||||
|
|
||||||
describe("Handling context properties with spaces in their name", () => {
|
describe("Handling context properties with spaces in their name", () => {
|
||||||
it("should allow through literal specifiers", async () => {
|
it("should allow through literal specifiers", async () => {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
const { convertToJS } = require("../src/index.cjs")
|
const { convertToJS } = require("../src/index.js")
|
||||||
|
|
||||||
function checkLines(response, lines) {
|
function checkLines(response, lines) {
|
||||||
const toCheck = response.split("\n")
|
const toCheck = response.split("\n")
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
const { processString, processObject, isValid } = require("../src/index.cjs")
|
const { processString, processObject, isValid } = require("../src/index.js")
|
||||||
const tableJson = require("./examples/table.json")
|
const tableJson = require("./examples/table.json")
|
||||||
const dayjs = require("dayjs")
|
const dayjs = require("dayjs")
|
||||||
const { UUID_REGEX } = require("./constants")
|
const { UUID_REGEX } = require("./constants")
|
||||||
|
|
|
@ -1,149 +1,156 @@
|
||||||
const { processStringSync, encodeJSBinding } = require("../src/index.cjs")
|
const vm = require("vm")
|
||||||
|
|
||||||
|
const {
|
||||||
|
processStringSync,
|
||||||
|
encodeJSBinding,
|
||||||
|
setJSRunner,
|
||||||
|
} = require("../src/index.js")
|
||||||
const { UUID_REGEX } = require("./constants")
|
const { UUID_REGEX } = require("./constants")
|
||||||
|
|
||||||
const processJS = (js, context) => {
|
const processJS = (js, context) => {
|
||||||
return processStringSync(encodeJSBinding(js), context)
|
return processStringSync(encodeJSBinding(js), context)
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("Test the JavaScript helper", () => {
|
describe("Javascript", () => {
|
||||||
it("should execute a simple expression", () => {
|
beforeAll(() => {
|
||||||
const output = processJS(`return 1 + 2`)
|
setJSRunner((js, context) => {
|
||||||
expect(output).toBe(3)
|
return vm.runInNewContext(js, context, { timeout: 1000 })
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to use primitive bindings", () => {
|
|
||||||
const output = processJS(`return $("foo")`, {
|
|
||||||
foo: "bar",
|
|
||||||
})
|
})
|
||||||
expect(output).toBe("bar")
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to use an object binding", () => {
|
describe("Test the JavaScript helper", () => {
|
||||||
const output = processJS(`return $("foo").bar`, {
|
it("should execute a simple expression", () => {
|
||||||
foo: {
|
const output = processJS(`return 1 + 2`)
|
||||||
bar: "baz",
|
expect(output).toBe(3)
|
||||||
},
|
|
||||||
})
|
})
|
||||||
expect(output).toBe("baz")
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to use a complex object binding", () => {
|
it("should be able to use primitive bindings", () => {
|
||||||
const output = processJS(`return $("foo").bar[0].baz`, {
|
const output = processJS(`return $("foo")`, {
|
||||||
foo: {
|
foo: "bar",
|
||||||
bar: [
|
})
|
||||||
{
|
expect(output).toBe("bar")
|
||||||
baz: "shazbat",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
expect(output).toBe("shazbat")
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to use a deep binding", () => {
|
it("should be able to use an object binding", () => {
|
||||||
const output = processJS(`return $("foo.bar.baz")`, {
|
const output = processJS(`return $("foo").bar`, {
|
||||||
foo: {
|
foo: {
|
||||||
bar: {
|
|
||||||
baz: "shazbat",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
expect(output).toBe("shazbat")
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to return an object", () => {
|
|
||||||
const output = processJS(`return $("foo")`, {
|
|
||||||
foo: {
|
|
||||||
bar: {
|
|
||||||
baz: "shazbat",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
expect(output.bar.baz).toBe("shazbat")
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to return an array", () => {
|
|
||||||
const output = processJS(`return $("foo")`, {
|
|
||||||
foo: ["a", "b", "c"],
|
|
||||||
})
|
|
||||||
expect(output[2]).toBe("c")
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to return null", () => {
|
|
||||||
const output = processJS(`return $("foo")`, {
|
|
||||||
foo: null,
|
|
||||||
})
|
|
||||||
expect(output).toBe(null)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to return undefined", () => {
|
|
||||||
const output = processJS(`return $("foo")`, {
|
|
||||||
foo: undefined,
|
|
||||||
})
|
|
||||||
expect(output).toBe(undefined)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to return 0", () => {
|
|
||||||
const output = processJS(`return $("foo")`, {
|
|
||||||
foo: 0,
|
|
||||||
})
|
|
||||||
expect(output).toBe(0)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to return an empty string", () => {
|
|
||||||
const output = processJS(`return $("foo")`, {
|
|
||||||
foo: "",
|
|
||||||
})
|
|
||||||
expect(output).toBe("")
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to use a deep array binding", () => {
|
|
||||||
const output = processJS(`return $("foo.0.bar")`, {
|
|
||||||
foo: [
|
|
||||||
{
|
|
||||||
bar: "baz",
|
bar: "baz",
|
||||||
},
|
},
|
||||||
],
|
})
|
||||||
|
expect(output).toBe("baz")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to use a complex object binding", () => {
|
||||||
|
const output = processJS(`return $("foo").bar[0].baz`, {
|
||||||
|
foo: {
|
||||||
|
bar: [
|
||||||
|
{
|
||||||
|
baz: "shazbat",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(output).toBe("shazbat")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to use a deep binding", () => {
|
||||||
|
const output = processJS(`return $("foo.bar.baz")`, {
|
||||||
|
foo: {
|
||||||
|
bar: {
|
||||||
|
baz: "shazbat",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(output).toBe("shazbat")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to return an object", () => {
|
||||||
|
const output = processJS(`return $("foo")`, {
|
||||||
|
foo: {
|
||||||
|
bar: {
|
||||||
|
baz: "shazbat",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(output.bar.baz).toBe("shazbat")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to return an array", () => {
|
||||||
|
const output = processJS(`return $("foo")`, {
|
||||||
|
foo: ["a", "b", "c"],
|
||||||
|
})
|
||||||
|
expect(output[2]).toBe("c")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to return null", () => {
|
||||||
|
const output = processJS(`return $("foo")`, {
|
||||||
|
foo: null,
|
||||||
|
})
|
||||||
|
expect(output).toBe(null)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to return undefined", () => {
|
||||||
|
const output = processJS(`return $("foo")`, {
|
||||||
|
foo: undefined,
|
||||||
|
})
|
||||||
|
expect(output).toBe(undefined)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to return 0", () => {
|
||||||
|
const output = processJS(`return $("foo")`, {
|
||||||
|
foo: 0,
|
||||||
|
})
|
||||||
|
expect(output).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to return an empty string", () => {
|
||||||
|
const output = processJS(`return $("foo")`, {
|
||||||
|
foo: "",
|
||||||
|
})
|
||||||
|
expect(output).toBe("")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to use a deep array binding", () => {
|
||||||
|
const output = processJS(`return $("foo.0.bar")`, {
|
||||||
|
foo: [
|
||||||
|
{
|
||||||
|
bar: "baz",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
expect(output).toBe("baz")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle errors", () => {
|
||||||
|
const output = processJS(`throw "Error"`)
|
||||||
|
expect(output).toBe("Error while executing JS")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should timeout after one second", () => {
|
||||||
|
const output = processJS(`while (true) {}`)
|
||||||
|
expect(output).toBe("Timed out while executing JS")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should prevent access to the process global", () => {
|
||||||
|
const output = processJS(`return process`)
|
||||||
|
expect(output).toBe("Error while executing JS")
|
||||||
})
|
})
|
||||||
expect(output).toBe("baz")
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle errors", () => {
|
describe("check JS helpers", () => {
|
||||||
const output = processJS(`throw "Error"`)
|
it("should error if using the format helper. not helpers.", () => {
|
||||||
expect(output).toBe("Error while executing JS")
|
const output = processJS(`return helper.toInt(4.3)`)
|
||||||
})
|
expect(output).toBe("Error while executing JS")
|
||||||
|
})
|
||||||
|
|
||||||
it("should timeout after one second", () => {
|
it("should be able to use toInt", () => {
|
||||||
const output = processJS(`while (true) {}`)
|
const output = processJS(`return helpers.toInt(4.3)`)
|
||||||
expect(output).toBe("Timed out while executing JS")
|
expect(output).toBe(4)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should prevent access to the process global", () => {
|
it("should be able to use uuid", () => {
|
||||||
const output = processJS(`return process`)
|
const output = processJS(`return helpers.uuid()`)
|
||||||
expect(output).toBe("Error while executing JS")
|
expect(output).toMatch(UUID_REGEX)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should prevent sandbox escape", () => {
|
|
||||||
const output = processJS(
|
|
||||||
`return this.constructor.constructor("return process")()`
|
|
||||||
)
|
|
||||||
expect(output).toBe("Error while executing JS")
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("check JS helpers", () => {
|
|
||||||
it("should error if using the format helper. not helpers.", () => {
|
|
||||||
const output = processJS(`return helper.toInt(4.3)`)
|
|
||||||
expect(output).toBe("Error while executing JS")
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to use toInt", () => {
|
|
||||||
const output = processJS(`return helpers.toInt(4.3)`)
|
|
||||||
expect(output).toBe(4)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to use uuid", () => {
|
|
||||||
const output = processJS(`return helpers.uuid()`)
|
|
||||||
expect(output).toMatch(UUID_REGEX)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
const vm = require("vm")
|
||||||
|
|
||||||
jest.mock("@budibase/handlebars-helpers/lib/math", () => {
|
jest.mock("@budibase/handlebars-helpers/lib/math", () => {
|
||||||
const actual = jest.requireActual("@budibase/handlebars-helpers/lib/math")
|
const actual = jest.requireActual("@budibase/handlebars-helpers/lib/math")
|
||||||
|
|
||||||
|
@ -15,7 +17,7 @@ jest.mock("@budibase/handlebars-helpers/lib/uuid", () => {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
const { processString } = require("../src/index.cjs")
|
const { processString, setJSRunner } = require("../src/index.js")
|
||||||
|
|
||||||
const tk = require("timekeeper")
|
const tk = require("timekeeper")
|
||||||
const { getParsedManifest, runJsHelpersTests } = require("./utils")
|
const { getParsedManifest, runJsHelpersTests } = require("./utils")
|
||||||
|
@ -29,6 +31,12 @@ function escapeRegExp(string) {
|
||||||
describe("manifest", () => {
|
describe("manifest", () => {
|
||||||
const manifest = getParsedManifest()
|
const manifest = getParsedManifest()
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
setJSRunner((js, context) => {
|
||||||
|
return vm.runInNewContext(js, context, { timeout: 1000 })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe("examples are valid", () => {
|
describe("examples are valid", () => {
|
||||||
describe.each(Object.keys(manifest))("%s", collection => {
|
describe.each(Object.keys(manifest))("%s", collection => {
|
||||||
it.each(manifest[collection])("%s", async (_, { hbs, js }) => {
|
it.each(manifest[collection])("%s", async (_, { hbs, js }) => {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
const { processString } = require("../src/index.cjs")
|
const { processString } = require("../src/index.js")
|
||||||
|
|
||||||
describe("specific test case for whether or not full app template can still be rendered", () => {
|
describe("specific test case for whether or not full app template can still be rendered", () => {
|
||||||
it("should be able to render the app template", async () => {
|
it("should be able to render the app template", async () => {
|
||||||
|
|
|
@ -5,7 +5,7 @@ const {
|
||||||
convertToJS,
|
convertToJS,
|
||||||
processStringSync,
|
processStringSync,
|
||||||
encodeJSBinding,
|
encodeJSBinding,
|
||||||
} = require("../src/index.cjs")
|
} = require("../src/index.js")
|
||||||
|
|
||||||
function tryParseJson(str) {
|
function tryParseJson(str) {
|
||||||
if (typeof str !== "string") {
|
if (typeof str !== "string") {
|
||||||
|
|
|
@ -48,7 +48,7 @@
|
||||||
"bcrypt": "5.1.0",
|
"bcrypt": "5.1.0",
|
||||||
"bcryptjs": "2.4.3",
|
"bcryptjs": "2.4.3",
|
||||||
"bull": "4.10.1",
|
"bull": "4.10.1",
|
||||||
"dd-trace": "5.0.0",
|
"dd-trace": "5.2.0",
|
||||||
"dotenv": "8.6.0",
|
"dotenv": "8.6.0",
|
||||||
"global-agent": "3.0.0",
|
"global-agent": "3.0.0",
|
||||||
"ical-generator": "4.1.0",
|
"ical-generator": "4.1.0",
|
||||||
|
|
52
yarn.lock
52
yarn.lock
|
@ -2227,6 +2227,13 @@
|
||||||
dependencies:
|
dependencies:
|
||||||
node-gyp-build "^3.9.0"
|
node-gyp-build "^3.9.0"
|
||||||
|
|
||||||
|
"@datadog/native-appsec@7.0.0":
|
||||||
|
version "7.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-7.0.0.tgz#a380174dd49aef2d9bb613a0ec8ead6dc7822095"
|
||||||
|
integrity sha512-bywstWFW2hWxzPuS0+mFMVHHL0geulx5yQFtsjfszaH2LTAgk2D+Rt40MKbAoZ8q3tRw2dy6aYQ7svO3ca8jpA==
|
||||||
|
dependencies:
|
||||||
|
node-gyp-build "^3.9.0"
|
||||||
|
|
||||||
"@datadog/native-iast-rewriter@2.2.2":
|
"@datadog/native-iast-rewriter@2.2.2":
|
||||||
version "2.2.2"
|
version "2.2.2"
|
||||||
resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.2.tgz#3f7feaf6be1af4c83ad063065b8ed509bbaf11cb"
|
resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.2.tgz#3f7feaf6be1af4c83ad063065b8ed509bbaf11cb"
|
||||||
|
@ -8832,6 +8839,43 @@ dd-trace@5.0.0:
|
||||||
semver "^7.5.4"
|
semver "^7.5.4"
|
||||||
tlhunter-sorted-set "^0.1.0"
|
tlhunter-sorted-set "^0.1.0"
|
||||||
|
|
||||||
|
dd-trace@5.2.0:
|
||||||
|
version "5.2.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/dd-trace/-/dd-trace-5.2.0.tgz#6ca2d76ece95f08d98468d7782c22f24192afa53"
|
||||||
|
integrity sha512-Z5ql3ZKzVW3DPstHPkTPcIPvKljHNtzTYY/WuZRlgT4XK7rMaN0j5nA8LlUh7m+tOPWs05IiKngbYVZjsqhRgA==
|
||||||
|
dependencies:
|
||||||
|
"@datadog/native-appsec" "7.0.0"
|
||||||
|
"@datadog/native-iast-rewriter" "2.2.2"
|
||||||
|
"@datadog/native-iast-taint-tracking" "1.6.4"
|
||||||
|
"@datadog/native-metrics" "^2.0.0"
|
||||||
|
"@datadog/pprof" "5.0.0"
|
||||||
|
"@datadog/sketches-js" "^2.1.0"
|
||||||
|
"@opentelemetry/api" "^1.0.0"
|
||||||
|
"@opentelemetry/core" "^1.14.0"
|
||||||
|
crypto-randomuuid "^1.0.0"
|
||||||
|
dc-polyfill "^0.1.2"
|
||||||
|
ignore "^5.2.4"
|
||||||
|
import-in-the-middle "^1.7.3"
|
||||||
|
int64-buffer "^0.1.9"
|
||||||
|
ipaddr.js "^2.1.0"
|
||||||
|
istanbul-lib-coverage "3.2.0"
|
||||||
|
jest-docblock "^29.7.0"
|
||||||
|
koalas "^1.0.2"
|
||||||
|
limiter "1.1.5"
|
||||||
|
lodash.sortby "^4.7.0"
|
||||||
|
lru-cache "^7.14.0"
|
||||||
|
methods "^1.1.2"
|
||||||
|
module-details-from-path "^1.0.3"
|
||||||
|
msgpack-lite "^0.1.26"
|
||||||
|
node-abort-controller "^3.1.1"
|
||||||
|
opentracing ">=0.12.1"
|
||||||
|
path-to-regexp "^0.1.2"
|
||||||
|
pprof-format "^2.0.7"
|
||||||
|
protobufjs "^7.2.5"
|
||||||
|
retry "^0.13.1"
|
||||||
|
semver "^7.5.4"
|
||||||
|
tlhunter-sorted-set "^0.1.0"
|
||||||
|
|
||||||
debug@4, debug@4.3.4, debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4, debug@~4.3.1, debug@~4.3.2:
|
debug@4, debug@4.3.4, debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4, debug@~4.3.1, debug@~4.3.2:
|
||||||
version "4.3.4"
|
version "4.3.4"
|
||||||
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
|
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
|
||||||
|
@ -12082,10 +12126,10 @@ import-from@^3.0.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
resolve-from "^5.0.0"
|
resolve-from "^5.0.0"
|
||||||
|
|
||||||
import-in-the-middle@^1.7.1:
|
import-in-the-middle@^1.7.1, import-in-the-middle@^1.7.3:
|
||||||
version "1.7.2"
|
version "1.7.3"
|
||||||
resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.7.2.tgz#31c44088271b50ecb9cacbdfb1e5732c802e0658"
|
resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.7.3.tgz#ffa784cdd57a47d2b68d2e7dd33070ff06baee43"
|
||||||
integrity sha512-coz7AjRnPyKW36J6JX5Bjz1mcX7MX1H2XsEGseVcnXMdzsAbbAu0HBZhiAem+3SAmuZdi+p8OwoB2qUpTRgjOQ==
|
integrity sha512-R2I11NRi0lI3jD2+qjqyVlVEahsejw7LDnYEbGb47QEFjczE3bZYsmWheCTQA+LFs2DzOQxR7Pms7naHW1V4bQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
acorn "^8.8.2"
|
acorn "^8.8.2"
|
||||||
acorn-import-assertions "^1.9.0"
|
acorn-import-assertions "^1.9.0"
|
||||||
|
|
Loading…
Reference in New Issue