Merge branch 'master' into BUDI-8441/row-action-run-api

This commit is contained in:
Adria Navarro 2024-07-25 16:09:23 +02:00 committed by GitHub
commit b12641e467
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 233 additions and 100 deletions

View File

@ -108,7 +108,7 @@ jobs:
- name: Pull testcontainers images - name: Pull testcontainers images
run: | run: |
docker pull testcontainers/ryuk:0.5.1 & docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.2.1-sqs & docker pull budibase/couchdb:v3.3.3 &
docker pull redis & docker pull redis &
wait $(jobs -p) wait $(jobs -p)
@ -162,17 +162,23 @@ jobs:
node-version: 20.x node-version: 20.x
cache: yarn cache: yarn
- name: Load dotenv
id: dotenv
uses: falti/dotenv-action@v1.1.3
with:
path: ./packages/server/datasource-sha.env
- name: Pull testcontainers images - name: Pull testcontainers images
run: | run: |
docker pull mcr.microsoft.com/mssql/server:2022-CU13-ubuntu-22.04 & docker pull mcr.microsoft.com/mssql/server@${{ steps.dotenv.outputs.MSSQL_SHA }} &
docker pull mysql:8.3 & docker pull mysql@${{ steps.dotenv.outputs.MYSQL_SHA }} &
docker pull postgres:16.1-bullseye & docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }} &
docker pull mongo:7.0-jammy & docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }} &
docker pull mariadb:lts & docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }} &
docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.2.1-sqs &
docker pull minio/minio & docker pull minio/minio &
docker pull redis & docker pull redis &
docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.3.3 &
wait $(jobs -p) wait $(jobs -p)

View File

@ -46,7 +46,7 @@ export default async function setup() {
await killContainers(containers) await killContainers(containers)
try { try {
const couchdb = new GenericContainer("budibase/couchdb:v3.2.1-sqs") const couchdb = new GenericContainer("budibase/couchdb:v3.3.3")
.withExposedPorts(5984, 4984) .withExposedPorts(5984, 4984)
.withEnvironment({ .withEnvironment({
COUCHDB_PASSWORD: "budibase", COUCHDB_PASSWORD: "budibase",

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.29.24", "version": "2.29.25",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -56,24 +56,24 @@ class CouchDBError extends Error implements DBError {
constructor( constructor(
message: string, message: string,
info: { info: {
status: number | undefined status?: number
statusCode: number | undefined statusCode?: number
name: string name: string
errid: string errid?: string
description: string description?: string
reason: string reason?: string
error: string error?: string
} }
) { ) {
super(message) super(message)
const statusCode = info.status || info.statusCode || 500 const statusCode = info.status || info.statusCode || 500
this.status = statusCode this.status = statusCode
this.statusCode = statusCode this.statusCode = statusCode
this.reason = info.reason this.reason = info.reason || "Unknown"
this.name = info.name this.name = info.name
this.errid = info.errid this.errid = info.errid || "Unknown"
this.description = info.description this.description = info.description || "Unknown"
this.error = info.error this.error = info.error || "Not found"
} }
} }
@ -246,6 +246,35 @@ export class DatabaseImpl implements Database {
}) })
} }
async bulkRemove(documents: Document[], opts?: { silenceErrors?: boolean }) {
const response: Nano.DocumentBulkResponse[] = await this.performCall(db => {
return () =>
db.bulk({
docs: documents.map(doc => ({
...doc,
_deleted: true,
})),
})
})
if (opts?.silenceErrors) {
return
}
let errorFound = false
let errorMessage: string = "Unable to bulk remove documents: "
for (let res of response) {
if (res.error) {
errorFound = true
errorMessage += res.error
}
}
if (errorFound) {
throw new CouchDBError(errorMessage, {
name: this.name,
status: 400,
})
}
}
async post(document: AnyDocument, opts?: DatabasePutOpts) { async post(document: AnyDocument, opts?: DatabasePutOpts) {
if (!document._id) { if (!document._id) {
document._id = newid() document._id = newid()

View File

@ -71,6 +71,16 @@ export class DDInstrumentedDatabase implements Database {
}) })
} }
bulkRemove(
documents: Document[],
opts?: { silenceErrors?: boolean }
): Promise<void> {
return tracer.trace("db.bulkRemove", span => {
span?.addTags({ db_name: this.name, num_docs: documents.length })
return this.db.bulkRemove(documents, opts)
})
}
put( put(
document: AnyDocument, document: AnyDocument,
opts?: DatabasePutOpts | undefined opts?: DatabasePutOpts | undefined

View File

@ -113,15 +113,12 @@ export async function addUser(
export async function removeUser(user: User) { export async function removeUser(user: User) {
const db = getPlatformDB() const db = getPlatformDB()
const keys = [user._id!, user.email] const keys = [user._id!, user.email]
const userDocs = await db.allDocs({ const userDocs = await db.allDocs<User>({
keys, keys,
include_docs: true, include_docs: true,
}) })
const toDelete = userDocs.rows.map((row: any) => { await db.bulkRemove(
return { userDocs.rows.map(row => row.doc!),
...row.doc, { silenceErrors: true }
_deleted: true, )
}
})
await db.bulkDocs(toDelete)
} }

View File

@ -58,6 +58,7 @@
AutomationEventType, AutomationEventType,
AutomationStepType, AutomationStepType,
AutomationActionStepId, AutomationActionStepId,
AutomationCustomIOType,
} from "@budibase/types" } from "@budibase/types"
import { FIELDS } from "constants/backend" import { FIELDS } from "constants/backend"
import PropField from "./PropField.svelte" import PropField from "./PropField.svelte"
@ -394,7 +395,9 @@
*/ */
const onRowTriggerUpdate = async update => { const onRowTriggerUpdate = async update => {
if ( if (
["tableId", "filters", "meta"].some(key => Object.hasOwn(update, key)) ["tableId", AutomationCustomIOType.FILTERS, "meta"].some(key =>
Object.hasOwn(update, key)
)
) { ) {
try { try {
let updatedAutomation let updatedAutomation
@ -744,7 +747,11 @@
for (let [key, field] of properties) { for (let [key, field] of properties) {
// need to look for the builder definition (keyed separately, see saveFilters) // need to look for the builder definition (keyed separately, see saveFilters)
const defKey = `${key}-def` const defKey = `${key}-def`
if (field.customType === "filters" && inputs?.[defKey]) { if (
(field.customType === AutomationCustomIOType.FILTERS ||
field.customType === AutomationCustomIOType.TRIGGER_FILTER) &&
inputs?.[defKey]
) {
filters = inputs[defKey] filters = inputs[defKey]
break break
} }
@ -846,7 +853,7 @@
<Label> <Label>
{label} {label}
</Label> </Label>
{#if value.customType === "trigger_filter"} {#if value.customType === AutomationCustomIOType.TRIGGER_FILTER}
<Icon <Icon
hoverable hoverable
on:click={() => on:click={() =>
@ -982,7 +989,7 @@
{/if} {/if}
</div> </div>
</div> </div>
{:else if value.customType === "filters" || value.customType === "trigger_filter"} {:else if value.customType === AutomationCustomIOType.FILTERS || value.customType === AutomationCustomIOType.TRIGGER_FILTER}
<ActionButton fullWidth on:click={drawer.show} <ActionButton fullWidth on:click={drawer.show}
>{filters.length > 0 >{filters.length > 0
? "Update Filter" ? "Update Filter"

View File

@ -115,6 +115,7 @@
}) })
$: fields = bindings $: fields = bindings
.filter(x => arrayTypes.includes(x.fieldSchema?.type)) .filter(x => arrayTypes.includes(x.fieldSchema?.type))
.filter(x => x.fieldSchema?.tableId != null)
.map(binding => { .map(binding => {
const { providerId, readableBinding, runtimeBinding } = binding const { providerId, readableBinding, runtimeBinding } = binding
const { name, type, tableId } = binding.fieldSchema const { name, type, tableId } = binding.fieldSchema

View File

@ -0,0 +1,5 @@
MSSQL_SHA=sha256:c4369c38385eba011c10906dc8892425831275bb035d5ce69656da8e29de50d8
MYSQL_SHA=sha256:9de9d54fecee6253130e65154b930978b1fcc336bcc86dfd06e89b72a2588ebe
POSTGRES_SHA=sha256:bd0d8e485d1aca439d39e5ea99b931160bd28d862e74c786f7508e9d0053090e
MONGODB_SHA=sha256:afa36bca12295b5f9dae68a493c706113922bdab520e901bd5d6c9d7247a1d8d
MARIADB_SHA=sha256:e59ba8783bf7bc02a4779f103bb0d8751ac0e10f9471089709608377eded7aa8

View File

@ -16,7 +16,8 @@
"build:isolated-vm-lib:snippets": "esbuild --minify --bundle src/jsRunner/bundles/snippets.ts --outfile=src/jsRunner/bundles/snippets.ivm.bundle.js --platform=node --format=iife --global-name=snippets", "build:isolated-vm-lib:snippets": "esbuild --minify --bundle src/jsRunner/bundles/snippets.ts --outfile=src/jsRunner/bundles/snippets.ivm.bundle.js --platform=node --format=iife --global-name=snippets",
"build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=iife --external:handlebars --global-name=helpers", "build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=iife --external:handlebars --global-name=helpers",
"build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=iife --global-name=bson", "build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=iife --global-name=bson",
"build:isolated-vm-libs": "yarn build:isolated-vm-lib:string-templates && yarn build:isolated-vm-lib:bson && yarn build:isolated-vm-lib:snippets", "build:isolated-vm-lib:buffer": "esbuild --minify --bundle src/jsRunner/bundles/buffer.ts --outfile=src/jsRunner/bundles/buffer.ivm.bundle.js --platform=node --format=iife --global-name=buffer",
"build:isolated-vm-libs": "yarn build:isolated-vm-lib:string-templates && yarn build:isolated-vm-lib:bson && yarn build:isolated-vm-lib:snippets && yarn build:isolated-vm-lib:buffer",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js",
"jest": "NODE_OPTIONS=\"--no-node-snapshot $NODE_OPTIONS\" jest", "jest": "NODE_OPTIONS=\"--no-node-snapshot $NODE_OPTIONS\" jest",
@ -68,6 +69,7 @@
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"buffer": "6.0.3",
"bull": "4.10.1", "bull": "4.10.1",
"chokidar": "3.5.3", "chokidar": "3.5.3",
"content-disposition": "^0.5.4", "content-disposition": "^0.5.4",

View File

@ -25,7 +25,7 @@ export const definition: AutomationTriggerSchema = {
}, },
filters: { filters: {
type: AutomationIOType.OBJECT, type: AutomationIOType.OBJECT,
customType: AutomationCustomIOType.FILTERS, customType: AutomationCustomIOType.TRIGGER_FILTER,
title: "Filtering", title: "Filtering",
}, },
}, },

View File

@ -6,7 +6,6 @@ import {
Database, Database,
FieldSchema, FieldSchema,
FieldType, FieldType,
LinkDocumentValue,
RelationshipFieldMetadata, RelationshipFieldMetadata,
RelationshipType, RelationshipType,
Row, Row,
@ -213,11 +212,10 @@ class LinkController {
linkedSchema?.relationshipType === RelationshipType.ONE_TO_MANY linkedSchema?.relationshipType === RelationshipType.ONE_TO_MANY
) { ) {
let links = ( let links = (
(await getLinkDocuments({ await getLinkDocuments({
tableId: field.tableId, tableId: field.tableId,
rowId: linkId, rowId: linkId,
includeDocs: IncludeDocs.EXCLUDE, })
})) as LinkDocumentValue[]
).filter( ).filter(
link => link =>
link.id !== row._id && link.fieldName === linkedSchema.name link.id !== row._id && link.fieldName === linkedSchema.name
@ -295,13 +293,7 @@ class LinkController {
if (linkDocs.length === 0) { if (linkDocs.length === 0) {
return null return null
} }
const toDelete = linkDocs.map(doc => { await this._db.bulkRemove(linkDocs, { silenceErrors: true })
return {
...doc,
_deleted: true,
}
})
await this._db.bulkDocs(toDelete)
return row return row
} }
@ -321,14 +313,8 @@ class LinkController {
: linkDoc.doc2.fieldName : linkDoc.doc2.fieldName
return correctFieldName === fieldName return correctFieldName === fieldName
}) })
await this._db.bulkDocs( await this._db.bulkRemove(toDelete, { silenceErrors: true })
toDelete.map(doc => {
return {
...doc,
_deleted: true,
}
})
)
try { try {
// remove schema from other table, if it exists // remove schema from other table, if it exists
let linkedTable = await this._db.get<Table>(field.tableId) let linkedTable = await this._db.get<Table>(field.tableId)
@ -453,13 +439,7 @@ class LinkController {
return null return null
} }
// get link docs for this table and configure for deletion // get link docs for this table and configure for deletion
const toDelete = linkDocs.map(doc => { await this._db.bulkRemove(linkDocs, { silenceErrors: true })
return {
...doc,
_deleted: true,
}
})
await this._db.bulkDocs(toDelete)
return table return table
} }
} }

View File

@ -1,6 +1,5 @@
import LinkController from "./LinkController" import LinkController from "./LinkController"
import { import {
IncludeDocs,
getLinkDocuments, getLinkDocuments,
getUniqueByProp, getUniqueByProp,
getRelatedTableForField, getRelatedTableForField,
@ -56,12 +55,9 @@ async function getLinksForRows(rows: Row[]): Promise<LinkDocumentValue[]> {
const promises = tableIds.map(tableId => const promises = tableIds.map(tableId =>
getLinkDocuments({ getLinkDocuments({
tableId: tableId, tableId: tableId,
includeDocs: IncludeDocs.EXCLUDE,
}) })
) )
const responses = flatten( const responses = flatten(await Promise.all(promises))
(await Promise.all(promises)) as LinkDocumentValue[][]
)
// have to get unique as the previous table query can // have to get unique as the previous table query can
// return duplicates, could be querying for both tables in a relation // return duplicates, could be querying for both tables in a relation
return getUniqueByProp( return getUniqueByProp(

View File

@ -34,6 +34,17 @@ export const IncludeDocs = {
* @returns This will return an array of the linking documents that were found * @returns This will return an array of the linking documents that were found
* (if any). * (if any).
*/ */
export function getLinkDocuments(args: {
tableId?: string
rowId?: string
fieldName?: string
includeDocs: boolean
}): Promise<LinkDocument[]>
export function getLinkDocuments(args: {
tableId?: string
rowId?: string
fieldName?: string
}): Promise<LinkDocumentValue[]>
export async function getLinkDocuments(args: { export async function getLinkDocuments(args: {
tableId?: string tableId?: string
rowId?: string rowId?: string

View File

@ -0,0 +1,13 @@
import dotenv from "dotenv"
import { join } from "path"
const path = join(__dirname, "..", "..", "..", "..", "datasource-sha.env")
dotenv.config({
path,
})
export const MSSQL_IMAGE = `mcr.microsoft.com/mssql/server@${process.env.MSSQL_SHA}`
export const MYSQL_IMAGE = `mysql@${process.env.MYSQL_SHA}`
export const POSTGRES_IMAGE = `postgres@${process.env.POSTGRES_SHA}`
export const MONGODB_IMAGE = `mongo@${process.env.MONGODB_SHA}`
export const MARIADB_IMAGE = `mariadb@${process.env.MARIADB_SHA}`

View File

@ -1,3 +1,4 @@
import "./images"
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import * as postgres from "./postgres" import * as postgres from "./postgres"
import * as mongodb from "./mongodb" import * as mongodb from "./mongodb"
@ -67,7 +68,11 @@ export async function knexClient(ds: Datasource) {
export async function startContainer(container: GenericContainer) { export async function startContainer(container: GenericContainer) {
const imageName = (container as any).imageName.string as string const imageName = (container as any).imageName.string as string
const key = imageName.replaceAll("/", "-").replaceAll(":", "-") let key: string = imageName
if (imageName.includes("@sha256")) {
key = imageName.split("@")[0]
}
key = key.replaceAll("/", "-").replaceAll(":", "-")
container = container container = container
.withReuse() .withReuse()

View File

@ -4,6 +4,7 @@ import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import { knexClient } from "./mysql" import { knexClient } from "./mysql"
import { MARIADB_IMAGE } from "./images"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
@ -27,7 +28,7 @@ class MariaDBWaitStrategy extends AbstractWaitStrategy {
export async function getDatasource(): Promise<Datasource> { export async function getDatasource(): Promise<Datasource> {
if (!ports) { if (!ports) {
ports = startContainer( ports = startContainer(
new GenericContainer("mariadb:lts") new GenericContainer(MARIADB_IMAGE)
.withExposedPorts(3306) .withExposedPorts(3306)
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" }) .withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
.withWaitStrategy(new MariaDBWaitStrategy()) .withWaitStrategy(new MariaDBWaitStrategy())

View File

@ -2,13 +2,14 @@ import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import { startContainer } from "." import { startContainer } from "."
import { MONGODB_IMAGE } from "./images"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
export async function getDatasource(): Promise<Datasource> { export async function getDatasource(): Promise<Datasource> {
if (!ports) { if (!ports) {
ports = startContainer( ports = startContainer(
new GenericContainer("mongo:7.0-jammy") new GenericContainer(MONGODB_IMAGE)
.withExposedPorts(27017) .withExposedPorts(27017)
.withEnvironment({ .withEnvironment({
MONGO_INITDB_ROOT_USERNAME: "mongo", MONGO_INITDB_ROOT_USERNAME: "mongo",

View File

@ -3,15 +3,14 @@ import { GenericContainer, Wait } from "testcontainers"
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import knex from "knex" import knex from "knex"
import { MSSQL_IMAGE } from "./images"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
export async function getDatasource(): Promise<Datasource> { export async function getDatasource(): Promise<Datasource> {
if (!ports) { if (!ports) {
ports = startContainer( ports = startContainer(
new GenericContainer( new GenericContainer(MSSQL_IMAGE)
"mcr.microsoft.com/mssql/server:2022-CU13-ubuntu-22.04"
)
.withExposedPorts(1433) .withExposedPorts(1433)
.withEnvironment({ .withEnvironment({
ACCEPT_EULA: "Y", ACCEPT_EULA: "Y",

View File

@ -4,6 +4,7 @@ import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import knex from "knex" import knex from "knex"
import { MYSQL_IMAGE } from "./images"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
@ -30,7 +31,7 @@ class MySQLWaitStrategy extends AbstractWaitStrategy {
export async function getDatasource(): Promise<Datasource> { export async function getDatasource(): Promise<Datasource> {
if (!ports) { if (!ports) {
ports = startContainer( ports = startContainer(
new GenericContainer("mysql:8.3") new GenericContainer(MYSQL_IMAGE)
.withExposedPorts(3306) .withExposedPorts(3306)
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" }) .withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000)) .withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))

View File

@ -3,13 +3,14 @@ import { GenericContainer, Wait } from "testcontainers"
import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "." import { startContainer } from "."
import knex from "knex" import knex from "knex"
import { POSTGRES_IMAGE } from "./images"
let ports: Promise<testContainerUtils.Port[]> let ports: Promise<testContainerUtils.Port[]>
export async function getDatasource(): Promise<Datasource> { export async function getDatasource(): Promise<Datasource> {
if (!ports) { if (!ports) {
ports = startContainer( ports = startContainer(
new GenericContainer("postgres:16.1-bullseye") new GenericContainer(POSTGRES_IMAGE)
.withExposedPorts(5432) .withExposedPorts(5432)
.withEnvironment({ POSTGRES_PASSWORD: "password" }) .withEnvironment({ POSTGRES_PASSWORD: "password" })
.withWaitStrategy( .withWaitStrategy(

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,3 @@
const Buffer = require("buffer/").Buffer
export default Buffer

View File

@ -4,12 +4,14 @@ export const enum BundleType {
HELPERS = "helpers", HELPERS = "helpers",
BSON = "bson", BSON = "bson",
SNIPPETS = "snippets", SNIPPETS = "snippets",
BUFFER = "buffer",
} }
const bundleSourceFile: Record<BundleType, string> = { const bundleSourceFile: Record<BundleType, string> = {
[BundleType.HELPERS]: "./index-helpers.ivm.bundle.js", [BundleType.HELPERS]: "./index-helpers.ivm.bundle.js",
[BundleType.BSON]: "./bson.ivm.bundle.js", [BundleType.BSON]: "./bson.ivm.bundle.js",
[BundleType.SNIPPETS]: "./snippets.ivm.bundle.js", [BundleType.SNIPPETS]: "./snippets.ivm.bundle.js",
[BundleType.BUFFER]: "./buffer.ivm.bundle.js",
} }
const bundleSourceCode: Partial<Record<BundleType, string>> = {} const bundleSourceCode: Partial<Record<BundleType, string>> = {}

View File

@ -23,6 +23,7 @@ export function init() {
isolateAccumulatedTimeout: env.JS_PER_REQUEST_TIMEOUT_MS, isolateAccumulatedTimeout: env.JS_PER_REQUEST_TIMEOUT_MS,
}) })
.withHelpers() .withHelpers()
.withBuffer()
.withSnippets(bbCtx?.snippets) .withSnippets(bbCtx?.snippets)
// Persist isolate in context so we can reuse it // Persist isolate in context so we can reuse it

View File

@ -71,6 +71,17 @@ describe("jsRunner (using isolated-vm)", () => {
expect(result).toBeLessThanOrEqual(max) expect(result).toBeLessThanOrEqual(max)
}) })
}) })
describe("buffer", () => {
it("handle a buffer", async () => {
const base64 = Buffer.from("hello").toString("base64")
const result = await processJS(
`return Buffer.from("${base64}", "base64").toString("utf8")`
)
expect(result).toBeDefined()
expect(result).toEqual("hello")
})
})
}) })
// the test cases here were extracted from templates/real world examples of JS in Budibase // the test cases here were extracted from templates/real world examples of JS in Budibase

View File

@ -86,6 +86,7 @@ export class IsolatedVM implements VM {
} }
}` }`
const helpersSource = loadBundle(BundleType.HELPERS) const helpersSource = loadBundle(BundleType.HELPERS)
const script = this.isolate.compileScriptSync( const script = this.isolate.compileScriptSync(
`${injectedRequire};${helpersSource};helpers=helpers.default` `${injectedRequire};${helpersSource};helpers=helpers.default`
) )
@ -118,6 +119,19 @@ export class IsolatedVM implements VM {
return this return this
} }
withBuffer() {
const bufferSource = loadBundle(BundleType.BUFFER)
const script = this.isolate.compileScriptSync(`
${bufferSource};
const Buffer = buffer.default;
`)
script.runSync(this.vm, { timeout: this.invocationTimeout, release: false })
new Promise(() => {
script.release()
})
return this
}
withContext<T>(context: Record<string, any>, executeWithContext: () => T) { withContext<T>(context: Record<string, any>, executeWithContext: () => T) {
this.addToContext(context) this.addToContext(context)

View File

@ -80,18 +80,31 @@ export async function startup(
const address = server.address() as AddressInfo const address = server.address() as AddressInfo
env._set("PORT", address.port) env._set("PORT", address.port)
} }
console.log("Emitting port event")
eventEmitter.emitPort(env.PORT) eventEmitter.emitPort(env.PORT)
console.log("Initialising file system")
fileSystem.init() fileSystem.init()
console.log("Initialising redis")
await redis.init() await redis.init()
console.log("Initialising writethrough cache")
cache.docWritethrough.init() cache.docWritethrough.init()
console.log("Initialising events")
eventInit() eventInit()
if (app && server) { if (app && server) {
console.log("Initialising websockets")
initialiseWebsockets(app, server) initialiseWebsockets(app, server)
} }
// run migrations on startup if not done via http // run migrations on startup if not done via http
// not recommended in a clustered environment // not recommended in a clustered environment
if (!env.HTTP_MIGRATIONS && !env.isTest()) { if (!env.HTTP_MIGRATIONS && !env.isTest()) {
console.log("Running migrations")
try { try {
await migrations.migrate() await migrations.migrate()
} catch (e) { } catch (e) {
@ -107,12 +120,15 @@ export async function startup(
env.PLUGINS_DIR && env.PLUGINS_DIR &&
fs.existsSync(env.PLUGINS_DIR) fs.existsSync(env.PLUGINS_DIR)
) { ) {
console.log("Monitoring plugin directory")
watch() watch()
} }
// check for version updates // check for version updates
console.log("Checking for version updates")
await installation.checkInstallVersion() await installation.checkInstallVersion()
console.log("Initialising queues")
// get the references to the queue promises, don't await as // get the references to the queue promises, don't await as
// they will never end, unless the processing stops // they will never end, unless the processing stops
let queuePromises = [] let queuePromises = []
@ -126,6 +142,7 @@ export async function startup(
} }
queuePromises.push(initPro()) queuePromises.push(initPro())
if (app) { if (app) {
console.log("Initialising routes")
// bring routes online as final step once everything ready // bring routes online as final step once everything ready
await initRoutes(app) await initRoutes(app)
} }
@ -141,6 +158,7 @@ export async function startup(
bbAdminEmail && bbAdminEmail &&
bbAdminPassword bbAdminPassword
) { ) {
console.log("Initialising admin user")
const tenantId = tenancy.getTenantId() const tenantId = tenancy.getTenantId()
await tenancy.doInTenant(tenantId, async () => { await tenancy.doInTenant(tenantId, async () => {
const exists = await users.doesUserExist(bbAdminEmail) const exists = await users.doesUserExist(bbAdminEmail)
@ -171,5 +189,6 @@ export async function startup(
}) })
} }
console.log("Initialising JS runner")
jsRunner.init() jsRunner.init()
} }

View File

@ -555,7 +555,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
if (Array.isArray(docValue)) { if (Array.isArray(docValue)) {
return docValue.length === 0 return docValue.length === 0
} }
if (typeof docValue === "object") { if (docValue && typeof docValue === "object") {
return Object.keys(docValue).length === 0 return Object.keys(docValue).length === 0
} }
return docValue == null return docValue == null

View File

@ -137,6 +137,10 @@ export interface Database {
): Promise<T[]> ): Promise<T[]>
remove(idOrDoc: Document): Promise<Nano.DocumentDestroyResponse> remove(idOrDoc: Document): Promise<Nano.DocumentDestroyResponse>
remove(idOrDoc: string, rev?: string): Promise<Nano.DocumentDestroyResponse> remove(idOrDoc: string, rev?: string): Promise<Nano.DocumentDestroyResponse>
bulkRemove(
documents: Document[],
opts?: { silenceErrors?: boolean }
): Promise<void>
put( put(
document: AnyDocument, document: AnyDocument,
opts?: DatabasePutOpts opts?: DatabasePutOpts

View File

@ -2038,7 +2038,7 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/backend-core@2.29.22": "@budibase/backend-core@2.29.24":
version "0.0.0" version "0.0.0"
dependencies: dependencies:
"@budibase/nano" "10.1.5" "@budibase/nano" "10.1.5"
@ -2119,14 +2119,14 @@
through2 "^2.0.0" through2 "^2.0.0"
"@budibase/pro@npm:@budibase/pro@latest": "@budibase/pro@npm:@budibase/pro@latest":
version "2.29.22" version "2.29.24"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.29.22.tgz#2608b2a76be0426879068e5a61100d6b8dde6f3a" resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.29.24.tgz#2dbd4c6c0f757aab7e17c413c6d6e4520086f9ac"
integrity sha512-flMVIpWQb9w3f4aiBSM73aLcYmfoPIf+kP8JXgRWO0k3nGrUGaMQNBKzXwC7soTkTrJCZjBh8uaY75AxTP2RdA== integrity sha512-m1v24UD6O21Vbrfsuo5kC5oeg7FzjWO2w8TQMw1VvPKmdIqqclaKDPTPytxwllTMkapMDRNzM5cQzqnQ3yHf6A==
dependencies: dependencies:
"@budibase/backend-core" "2.29.22" "@budibase/backend-core" "2.29.24"
"@budibase/shared-core" "2.29.22" "@budibase/shared-core" "2.29.24"
"@budibase/string-templates" "2.29.22" "@budibase/string-templates" "2.29.24"
"@budibase/types" "2.29.22" "@budibase/types" "2.29.24"
"@koa/router" "8.0.8" "@koa/router" "8.0.8"
bull "4.10.1" bull "4.10.1"
joi "17.6.0" joi "17.6.0"
@ -2137,13 +2137,13 @@
scim-patch "^0.8.1" scim-patch "^0.8.1"
scim2-parse-filter "^0.2.8" scim2-parse-filter "^0.2.8"
"@budibase/shared-core@2.29.22": "@budibase/shared-core@2.29.24":
version "0.0.0" version "0.0.0"
dependencies: dependencies:
"@budibase/types" "0.0.0" "@budibase/types" "0.0.0"
cron-validate "1.4.5" cron-validate "1.4.5"
"@budibase/string-templates@2.29.22": "@budibase/string-templates@2.29.24":
version "0.0.0" version "0.0.0"
dependencies: dependencies:
"@budibase/handlebars-helpers" "^0.13.2" "@budibase/handlebars-helpers" "^0.13.2"
@ -2151,7 +2151,7 @@
handlebars "^4.7.8" handlebars "^4.7.8"
lodash.clonedeep "^4.5.0" lodash.clonedeep "^4.5.0"
"@budibase/types@2.29.22": "@budibase/types@2.29.24":
version "0.0.0" version "0.0.0"
dependencies: dependencies:
scim-patch "^0.8.1" scim-patch "^0.8.1"
@ -7770,6 +7770,14 @@ buffer@4.9.2:
ieee754 "^1.1.4" ieee754 "^1.1.4"
isarray "^1.0.0" isarray "^1.0.0"
buffer@6.0.3, buffer@^6.0.3:
version "6.0.3"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6"
integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==
dependencies:
base64-js "^1.3.1"
ieee754 "^1.2.1"
buffer@^5.1.0, buffer@^5.2.0, buffer@^5.2.1, buffer@^5.5.0, buffer@^5.6.0: buffer@^5.1.0, buffer@^5.2.0, buffer@^5.2.1, buffer@^5.5.0, buffer@^5.6.0:
version "5.7.1" version "5.7.1"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0"
@ -7778,14 +7786,6 @@ buffer@^5.1.0, buffer@^5.2.0, buffer@^5.2.1, buffer@^5.5.0, buffer@^5.6.0:
base64-js "^1.3.1" base64-js "^1.3.1"
ieee754 "^1.1.13" ieee754 "^1.1.13"
buffer@^6.0.3:
version "6.0.3"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-6.0.3.tgz#2ace578459cc8fbe2a70aaa8f52ee63b6a74c6c6"
integrity sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==
dependencies:
base64-js "^1.3.1"
ieee754 "^1.2.1"
bufferutil@^4.0.1: bufferutil@^4.0.1:
version "4.0.7" version "4.0.7"
resolved "https://registry.yarnpkg.com/bufferutil/-/bufferutil-4.0.7.tgz#60c0d19ba2c992dd8273d3f73772ffc894c153ad" resolved "https://registry.yarnpkg.com/bufferutil/-/bufferutil-4.0.7.tgz#60c0d19ba2c992dd8273d3f73772ffc894c153ad"