From 3f104cb2abb3eac900ba619dea54228644e3fcc8 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 13 Feb 2025 12:02:23 +0000 Subject: [PATCH 01/50] Big automation refactor. Down to 11 test failures. --- packages/server/src/automations/actions.ts | 14 +- .../server/src/automations/automationUtils.ts | 15 +- packages/server/src/automations/loopUtils.ts | 46 -- .../server/src/automations/steps/filter.ts | 17 +- .../src/automations/tests/scenarios.spec.ts | 29 +- .../automations/tests/steps/createRow.spec.ts | 6 +- .../automations/tests/steps/filter.spec.ts | 18 +- .../src/automations/tests/steps/loop.spec.ts | 113 +--- .../tests/triggers/webhook.spec.ts | 2 +- packages/server/src/automations/utils.ts | 5 +- .../server/src/definitions/automations.ts | 10 +- packages/server/src/threads/automation.ts | 614 ++++++++---------- .../src/automations/steps/filter.ts | 20 +- packages/shared-core/src/helpers/helpers.ts | 4 +- .../app/automation/StepInputsOutputs.ts | 20 +- .../documents/app/automation/automation.ts | 30 +- .../src/documents/app/automation/schema.ts | 65 +- 17 files changed, 426 insertions(+), 602 deletions(-) delete mode 100644 packages/server/src/automations/loopUtils.ts diff --git a/packages/server/src/automations/actions.ts b/packages/server/src/automations/actions.ts index 65a57c2586..89a940ae7b 100644 --- a/packages/server/src/automations/actions.ts +++ b/packages/server/src/automations/actions.ts @@ -27,6 +27,8 @@ import { Hosting, ActionImplementation, AutomationStepDefinition, + AutomationStepInputs, + AutomationStepOutputs, } from "@budibase/types" import sdk from "../sdk" import { getAutomationPlugin } from "../utilities/fileSystem" @@ -120,11 +122,15 @@ export async function getActionDefinitions(): Promise< } /* istanbul ignore next */ -export async function getAction( - stepId: AutomationActionStepId -): Promise | undefined> { +export async function getAction< + TStep extends AutomationActionStepId, + TInputs = AutomationStepInputs, + TOutputs = AutomationStepOutputs +>(stepId: TStep): Promise | undefined> { if (ACTION_IMPLS[stepId as keyof ActionImplType] != null) { - return ACTION_IMPLS[stepId as keyof ActionImplType] + return ACTION_IMPLS[ + stepId as keyof ActionImplType + ] as unknown as ActionImplementation } // must be a plugin diff --git a/packages/server/src/automations/automationUtils.ts b/packages/server/src/automations/automationUtils.ts index eacf81ef92..96bfcc87e9 100644 --- a/packages/server/src/automations/automationUtils.ts +++ b/packages/server/src/automations/automationUtils.ts @@ -32,7 +32,7 @@ import path from "path" * primitive types. */ export function cleanInputValues>( - inputs: any, + inputs: T, schema?: any ): T { if (schema == null) { @@ -50,15 +50,18 @@ export function cleanInputValues>( if (propSchema.type === "boolean") { let lcInput = input.toLowerCase() if (lcInput === "true") { + // @ts-expect-error - indexing a generic on purpose inputs[inputKey] = true } if (lcInput === "false") { + // @ts-expect-error - indexing a generic on purpose inputs[inputKey] = false } } if (propSchema.type === "number") { let floatInput = parseFloat(input) if (!isNaN(floatInput)) { + // @ts-expect-error - indexing a generic on purpose inputs[inputKey] = floatInput } } @@ -294,13 +297,3 @@ export function typecastForLooping(input: LoopStepInputs) { } return input.binding } - -export function ensureMaxIterationsAsNumber( - value: number | string | undefined -): number | undefined { - if (typeof value === "number") return value - if (typeof value === "string") { - return parseInt(value) - } - return undefined -} diff --git a/packages/server/src/automations/loopUtils.ts b/packages/server/src/automations/loopUtils.ts deleted file mode 100644 index 2596fb796d..0000000000 --- a/packages/server/src/automations/loopUtils.ts +++ /dev/null @@ -1,46 +0,0 @@ -import * as automationUtils from "./automationUtils" -import { isPlainObject } from "lodash" - -type ObjValue = { - [key: string]: string | ObjValue -} - -export function replaceFakeBindings>( - originalStepInput: T, - loopStepNumber: number -): T { - const result: Record = {} - for (const [key, value] of Object.entries(originalStepInput)) { - result[key] = replaceBindingsRecursive(value, loopStepNumber) - } - return result as T -} - -function replaceBindingsRecursive( - value: string | ObjValue, - loopStepNumber: number -) { - if (value === null || value === undefined) { - return value - } - - if (typeof value === "object") { - for (const [innerKey, innerValue] of Object.entries(value)) { - if (typeof innerValue === "string") { - value[innerKey] = automationUtils.substituteLoopStep( - innerValue, - `steps.${loopStepNumber}` - ) - } else if ( - innerValue && - isPlainObject(innerValue) && - Object.keys(innerValue).length > 0 - ) { - value[innerKey] = replaceBindingsRecursive(innerValue, loopStepNumber) - } - } - } else if (typeof value === "string") { - value = automationUtils.substituteLoopStep(value, `steps.${loopStepNumber}`) - } - return value -} diff --git a/packages/server/src/automations/steps/filter.ts b/packages/server/src/automations/steps/filter.ts index 9b7e347034..92a76692cc 100644 --- a/packages/server/src/automations/steps/filter.ts +++ b/packages/server/src/automations/steps/filter.ts @@ -1,7 +1,8 @@ -import { FilterStepInputs, FilterStepOutputs } from "@budibase/types" -import { automations } from "@budibase/shared-core" - -const FilterConditions = automations.steps.filter.FilterConditions +import { + FilterCondition, + FilterStepInputs, + FilterStepOutputs, +} from "@budibase/types" export async function run({ inputs, @@ -26,16 +27,16 @@ export async function run({ let result = false if (typeof field !== "object" && typeof value !== "object") { switch (condition) { - case FilterConditions.EQUAL: + case FilterCondition.EQUAL: result = field === value break - case FilterConditions.NOT_EQUAL: + case FilterCondition.NOT_EQUAL: result = field !== value break - case FilterConditions.GREATER_THAN: + case FilterCondition.GREATER_THAN: result = field > value break - case FilterConditions.LESS_THAN: + case FilterCondition.LESS_THAN: result = field < value break } diff --git a/packages/server/src/automations/tests/scenarios.spec.ts b/packages/server/src/automations/tests/scenarios.spec.ts index 3015e75018..fdecfe3461 100644 --- a/packages/server/src/automations/tests/scenarios.spec.ts +++ b/packages/server/src/automations/tests/scenarios.spec.ts @@ -1,5 +1,11 @@ import * as automation from "../index" -import { LoopStepType, FieldType, Table, Datasource } from "@budibase/types" +import { + LoopStepType, + FieldType, + Table, + Datasource, + FilterCondition, +} from "@budibase/types" import { createAutomationBuilder } from "./utilities/AutomationTestBuilder" import { DatabaseName, @@ -7,12 +13,9 @@ import { } from "../../integrations/tests/utils" import { Knex } from "knex" import { generator } from "@budibase/backend-core/tests" -import { automations } from "@budibase/shared-core" import TestConfiguration from "../../tests/utilities/TestConfiguration" import { basicTable } from "../../tests/utilities/structures" -const FilterConditions = automations.steps.filter.FilterConditions - describe("Automation Scenarios", () => { const config = new TestConfiguration() @@ -256,7 +259,7 @@ describe("Automation Scenarios", () => { }) .filter({ field: "{{ steps.2.rows.0.value }}", - condition: FilterConditions.EQUAL, + condition: FilterCondition.EQUAL, value: 20, }) .serverLog({ text: "Equal condition met" }) @@ -282,7 +285,7 @@ describe("Automation Scenarios", () => { }) .filter({ field: "{{ steps.2.rows.0.value }}", - condition: FilterConditions.NOT_EQUAL, + condition: FilterCondition.NOT_EQUAL, value: 20, }) .serverLog({ text: "Not Equal condition met" }) @@ -295,37 +298,37 @@ describe("Automation Scenarios", () => { const testCases = [ { - condition: FilterConditions.EQUAL, + condition: FilterCondition.EQUAL, value: 10, rowValue: 10, expectPass: true, }, { - condition: FilterConditions.NOT_EQUAL, + condition: FilterCondition.NOT_EQUAL, value: 10, rowValue: 20, expectPass: true, }, { - condition: FilterConditions.GREATER_THAN, + condition: FilterCondition.GREATER_THAN, value: 10, rowValue: 15, expectPass: true, }, { - condition: FilterConditions.LESS_THAN, + condition: FilterCondition.LESS_THAN, value: 10, rowValue: 5, expectPass: true, }, { - condition: FilterConditions.GREATER_THAN, + condition: FilterCondition.GREATER_THAN, value: 10, rowValue: 5, expectPass: false, }, { - condition: FilterConditions.LESS_THAN, + condition: FilterCondition.LESS_THAN, value: 10, rowValue: 15, expectPass: false, @@ -404,7 +407,7 @@ if (descriptions.length) { client = ds.client! }) - it("should query an external database for some data then insert than into an internal table", async () => { + it.only("should query an external database for some data then insert than into an internal table", async () => { const newTable = await config.api.table.save({ ...basicTable(), name: "table", diff --git a/packages/server/src/automations/tests/steps/createRow.spec.ts b/packages/server/src/automations/tests/steps/createRow.spec.ts index 0a3913cd25..01ce227f36 100644 --- a/packages/server/src/automations/tests/steps/createRow.spec.ts +++ b/packages/server/src/automations/tests/steps/createRow.spec.ts @@ -4,7 +4,7 @@ import { } from "../../../tests/utilities/structures" import { objectStore } from "@budibase/backend-core" import { createAutomationBuilder } from "../utilities/AutomationTestBuilder" -import { Row, Table } from "@budibase/types" +import { FilterCondition, Row, Table } from "@budibase/types" import TestConfiguration from "../../../tests/utilities/TestConfiguration" async function uploadTestFile(filename: string) { @@ -90,7 +90,7 @@ describe("test the create row action", () => { .createRow({ row: {} }, { stepName: "CreateRow" }) .filter({ field: "{{ stepsByName.CreateRow.success }}", - condition: "equal", + condition: FilterCondition.EQUAL, value: true, }) .serverLog( @@ -131,7 +131,7 @@ describe("test the create row action", () => { .createRow({ row: attachmentRow }, { stepName: "CreateRow" }) .filter({ field: "{{ stepsByName.CreateRow.success }}", - condition: "equal", + condition: FilterCondition.EQUAL, value: true, }) .serverLog( diff --git a/packages/server/src/automations/tests/steps/filter.spec.ts b/packages/server/src/automations/tests/steps/filter.spec.ts index 23c191b38d..da1f6e4702 100644 --- a/packages/server/src/automations/tests/steps/filter.spec.ts +++ b/packages/server/src/automations/tests/steps/filter.spec.ts @@ -1,19 +1,21 @@ -import { automations } from "@budibase/shared-core" import { createAutomationBuilder } from "../utilities/AutomationTestBuilder" import TestConfiguration from "../../../tests/utilities/TestConfiguration" +import { FilterCondition } from "@budibase/types" -const FilterConditions = automations.steps.filter.FilterConditions - -function stringToFilterCondition(condition: "==" | "!=" | ">" | "<"): string { +function stringToFilterCondition( + condition: "==" | "!=" | ">" | "<" +): FilterCondition { switch (condition) { case "==": - return FilterConditions.EQUAL + return FilterCondition.EQUAL case "!=": - return FilterConditions.NOT_EQUAL + return FilterCondition.NOT_EQUAL case ">": - return FilterConditions.GREATER_THAN + return FilterCondition.GREATER_THAN case "<": - return FilterConditions.LESS_THAN + return FilterCondition.LESS_THAN + default: + throw new Error(`Unsupported condition: ${condition}`) } } diff --git a/packages/server/src/automations/tests/steps/loop.spec.ts b/packages/server/src/automations/tests/steps/loop.spec.ts index f8af7dcf9f..9e8d82c1f0 100644 --- a/packages/server/src/automations/tests/steps/loop.spec.ts +++ b/packages/server/src/automations/tests/steps/loop.spec.ts @@ -6,8 +6,8 @@ import { ServerLogStepOutputs, CreateRowStepOutputs, FieldType, + FilterCondition, } from "@budibase/types" -import * as loopUtils from "../../loopUtils" import { createAutomationBuilder } from "../utilities/AutomationTestBuilder" import TestConfiguration from "../../../tests/utilities/TestConfiguration" @@ -530,97 +530,30 @@ describe("Attempt to run a basic loop automation", () => { expect(results.steps[2].outputs.rows).toHaveLength(0) }) - describe("replaceFakeBindings", () => { - it("should replace loop bindings in nested objects", () => { - const originalStepInput = { - schema: { - name: { - type: "string", - constraints: { - type: "string", - length: { maximum: null }, - presence: false, - }, - name: "name", - display: { type: "Text" }, - }, - }, - row: { - tableId: "ta_aaad4296e9f74b12b1b90ef7a84afcad", - name: "{{ loop.currentItem.pokemon }}", - }, - } + describe("loop output", () => { + it("should not output anything if a filter stops the automation", async () => { + const results = await createAutomationBuilder(config) + .onAppAction() + .filter({ + condition: FilterCondition.EQUAL, + field: "1", + value: "2", + }) + .loop({ + option: LoopStepType.ARRAY, + binding: [1, 2, 3], + }) + .serverLog({ text: "Message {{loop.currentItem}}" }) + .test({ fields: {} }) - const loopStepNumber = 3 - - const result = loopUtils.replaceFakeBindings( - originalStepInput, - loopStepNumber - ) - - expect(result).toEqual({ - schema: { - name: { - type: "string", - constraints: { - type: "string", - length: { maximum: null }, - presence: false, - }, - name: "name", - display: { type: "Text" }, - }, - }, - row: { - tableId: "ta_aaad4296e9f74b12b1b90ef7a84afcad", - name: "{{ steps.3.currentItem.pokemon }}", - }, + expect(results.steps.length).toBe(1) + expect(results.steps[0].outputs).toEqual({ + comparisonValue: 2, + refValue: 1, + result: false, + success: true, + status: "stopped", }) }) - - it("should handle null values in nested objects", () => { - const originalStepInput = { - nullValue: null, - nestedNull: { - someKey: null, - }, - validValue: "{{ loop.someValue }}", - } - - const loopStepNumber = 2 - - const result = loopUtils.replaceFakeBindings( - originalStepInput, - loopStepNumber - ) - - expect(result).toEqual({ - nullValue: null, - nestedNull: { - someKey: null, - }, - validValue: "{{ steps.2.someValue }}", - }) - }) - - it("should handle empty objects and arrays", () => { - const originalStepInput = { - emptyObject: {}, - emptyArray: [], - nestedEmpty: { - emptyObj: {}, - emptyArr: [], - }, - } - - const loopStepNumber = 1 - - const result = loopUtils.replaceFakeBindings( - originalStepInput, - loopStepNumber - ) - - expect(result).toEqual(originalStepInput) - }) }) }) diff --git a/packages/server/src/automations/tests/triggers/webhook.spec.ts b/packages/server/src/automations/tests/triggers/webhook.spec.ts index 664812f860..9649846830 100644 --- a/packages/server/src/automations/tests/triggers/webhook.spec.ts +++ b/packages/server/src/automations/tests/triggers/webhook.spec.ts @@ -5,7 +5,7 @@ import TestConfiguration from "../../../tests/utilities/TestConfiguration" mocks.licenses.useSyncAutomations() -describe("Branching automations", () => { +describe("Webhook trigger test", () => { const config = new TestConfiguration() let table: Table let webhook: Webhook diff --git a/packages/server/src/automations/utils.ts b/packages/server/src/automations/utils.ts index 83665fc975..a01a760b93 100644 --- a/packages/server/src/automations/utils.ts +++ b/packages/server/src/automations/utils.ts @@ -10,6 +10,7 @@ import { Automation, AutomationActionStepId, AutomationJob, + AutomationResults, AutomationStepDefinition, AutomationTriggerDefinition, AutomationTriggerStepId, @@ -261,9 +262,7 @@ export function isRecurring(automation: Automation) { ) } -export function isErrorInOutput(output: { - steps: { outputs?: { success: boolean } }[] -}) { +export function isErrorInOutput(output: AutomationResults) { let first = true, error = false for (let step of output.steps) { diff --git a/packages/server/src/definitions/automations.ts b/packages/server/src/definitions/automations.ts index 67d6e04e9d..c7e7e5d514 100644 --- a/packages/server/src/definitions/automations.ts +++ b/packages/server/src/definitions/automations.ts @@ -1,4 +1,4 @@ -import { AutomationResults, LoopStepType, UserBindings } from "@budibase/types" +import { LoopStepType, UserBindings } from "@budibase/types" export interface LoopInput { option: LoopStepType @@ -13,19 +13,17 @@ export interface TriggerOutput { timestamp?: number } -export interface AutomationContext extends AutomationResults { +export interface AutomationContext { + trigger: any steps: any[] stepsById: Record stepsByName: Record env?: Record user?: UserBindings - trigger: any settings?: { url?: string logo?: string company?: string } + loop?: { currentItem: any } } - -export interface AutomationResponse - extends Omit {} diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 174efa0fe0..fecc1db141 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -7,7 +7,6 @@ import { } from "../automations/utils" import * as actions from "../automations/actions" import * as automationUtils from "../automations/automationUtils" -import { replaceFakeBindings } from "../automations/loopUtils" import { dataFilters, helpers, utils } from "@budibase/shared-core" import { default as AutomationEmitter } from "../events/AutomationEmitter" import { generateAutomationMetadataID, isProdAppID } from "../db/utils" @@ -30,12 +29,12 @@ import { UserBindings, isBasicSearchOperator, ContextEmitter, + LoopStepType, + AutomationTriggerResult, + AutomationResults, + AutomationStepResult, } from "@budibase/types" -import { - AutomationContext, - AutomationResponse, - TriggerOutput, -} from "../definitions/automations" +import { AutomationContext } from "../definitions/automations" import { WorkerCallback } from "./definitions" import { context, logging, configs } from "@budibase/backend-core" import { @@ -48,11 +47,24 @@ import { performance } from "perf_hooks" import * as sdkUtils from "../sdk/utils" import env from "../environment" import tracer from "dd-trace" +import { isPlainObject } from "lodash" threadUtils.threadSetup() const CRON_STEP_ID = automations.triggers.definitions.CRON.stepId const STOPPED_STATUS = { success: true, status: AutomationStatus.STOPPED } +function matchesLoopFailureCondition(loopStep: LoopStep, currentItem: any) { + if (!loopStep.inputs.failure) { + return false + } + + if (isPlainObject(currentItem)) { + return Object.values(currentItem).some(e => e === loopStep.inputs.failure) + } + + return currentItem === loopStep.inputs.failure +} + function getLoopIterations(loopStep: LoopStep) { const binding = loopStep.inputs.binding if (!binding) { @@ -72,7 +84,27 @@ function getLoopIterations(loopStep: LoopStep) { return 0 } -export async function enrichBaseContext(context: Record) { +function getLoopMaxIterations(loopStep: LoopStep) { + const value = loopStep.inputs.iterations + if (typeof value === "number") return value + if (typeof value === "string") { + return parseInt(value) + } + return undefined +} + +function prepareContext(context: AutomationContext) { + return { + ...context, + steps: { + ...context.steps, + ...context.stepsById, + ...context.stepsByName, + }, + } +} + +export async function enrichBaseContext(context: AutomationContext) { context.env = await sdkUtils.getEnvironmentVariables() try { @@ -86,8 +118,6 @@ export async function enrichBaseContext(context: Record) { // if settings doc doesn't exist, make the settings blank context.settings = {} } - - return context } /** @@ -100,53 +130,47 @@ class Orchestrator { private appId: string private automation: Automation private emitter: ContextEmitter - private context: AutomationContext - private job: Job - private loopStepOutputs: LoopStep[] + private job: AutomationJob private stopped: boolean - private executionOutput: AutomationResponse + private executionOutput: AutomationResults private currentUser: UserBindings | undefined constructor(job: AutomationJob) { - let automation = job.data.automation - let triggerOutput = job.data.event - const metadata = triggerOutput.metadata - this.chainCount = metadata ? metadata.automationChainCount! : 0 + this.automation = job.data.automation + + const triggerOutput = job.data.event + if ( + this.automation.definition.trigger.stepId === CRON_STEP_ID && + !triggerOutput.timestamp + ) { + triggerOutput.timestamp = Date.now() + } + + this.chainCount = triggerOutput.metadata?.automationChainCount || 0 this.appId = triggerOutput.appId as string this.job = job - const triggerStepId = automation.definition.trigger.stepId - triggerOutput = this.cleanupTriggerOutputs(triggerStepId, triggerOutput) + // remove from context delete triggerOutput.appId delete triggerOutput.metadata - // step zero is never used as the template string is zero indexed for customer facing - this.context = { - steps: [{}], - stepsById: {}, - stepsByName: {}, - trigger: triggerOutput, + + // create an emitter which has the chain count for this automation run in + // it, so it can block excessive chaining if required + this.emitter = new AutomationEmitter(this.chainCount + 1) + + const trigger: AutomationTriggerResult = { + id: this.automation.definition.trigger.id, + stepId: this.automation.definition.trigger.stepId, + outputs: triggerOutput, } - this.automation = automation - // create an emitter which has the chain count for this automation run in it, so it can block - // excessive chaining if required - this.emitter = new AutomationEmitter(this.chainCount + 1) - this.executionOutput = { trigger: {}, steps: [] } + this.executionOutput = { trigger, steps: [trigger] } + // setup the execution output - const triggerId = automation.definition.trigger.id - this.updateExecutionOutput(triggerId, triggerStepId, null, triggerOutput) - this.loopStepOutputs = [] this.stopped = false this.currentUser = triggerOutput.user } - cleanupTriggerOutputs(stepId: string, triggerOutput: TriggerOutput) { - if (stepId === CRON_STEP_ID && !triggerOutput.timestamp) { - triggerOutput.timestamp = Date.now() - } - return triggerOutput - } - async getStepFunctionality(stepId: AutomationActionStepId) { let step = await actions.getAction(stepId) if (step == null) { @@ -177,19 +201,14 @@ class Orchestrator { logging.logWarn( `CRON disabled reason=${reason} - ${this.appId}/${this.automation._id}` ) - const automation = this.automation - const trigger = automation.definition.trigger await disableCronById(this.job.id) - this.updateExecutionOutput( - trigger.id, - trigger.stepId, - {}, - { - status: AutomationStatus.STOPPED_ERROR, - success: false, - } - ) - await storeLog(automation, this.executionOutput) + this.executionOutput.trigger.outputs = { + ...this.executionOutput.trigger.outputs, + success: false, + status: AutomationStatus.STOPPED, + } + this.executionOutput.steps[0] = this.executionOutput.trigger + await storeLog(this.automation, this.executionOutput) } async checkIfShouldStop(metadata: AutomationMetadata): Promise { @@ -203,84 +222,7 @@ class Orchestrator { return false } - async updateMetadata(metadata: AutomationMetadata) { - const output = this.executionOutput, - automation = this.automation - if (!output || !isRecurring(automation)) { - return - } - const count = metadata.errorCount - const isError = isErrorInOutput(output) - // nothing to do in this scenario, escape - if (!count && !isError) { - return - } - if (isError) { - metadata.errorCount = count ? count + 1 : 1 - } else { - metadata.errorCount = 0 - } - const db = context.getAppDB() - try { - await db.put(metadata) - } catch (err) { - logging.logAlertWithInfo( - "Failed to write automation metadata", - db.name, - automation._id!, - err - ) - } - } - - updateExecutionOutput(id: string, stepId: string, inputs: any, outputs: any) { - const stepObj = { id, stepId, inputs, outputs } - // replacing trigger when disabling CRON - if ( - stepId === CRON_STEP_ID && - outputs.status === AutomationStatus.STOPPED_ERROR - ) { - this.executionOutput.trigger = stepObj - this.executionOutput.steps = [stepObj] - return - } - // first entry is always the trigger (constructor) - if ( - this.executionOutput.steps.length === 0 || - this.executionOutput.trigger.id === id - ) { - this.executionOutput.trigger = stepObj - } - this.executionOutput.steps.push(stepObj) - } - - updateContextAndOutput( - currentLoopStepIndex: number | undefined, - step: AutomationStep, - output: any, - result: { success: boolean; status: string } - ) { - if (currentLoopStepIndex === undefined) { - throw new Error("No loop step number provided.") - } - this.executionOutput.steps.splice(currentLoopStepIndex, 0, { - id: step.id, - stepId: step.stepId, - outputs: { - ...output, - success: result.success, - status: result.status, - }, - inputs: step.inputs, - }) - this.context.steps.splice(currentLoopStepIndex, 0, { - ...output, - success: result.success, - status: result.status, - }) - } - - async execute(): Promise { + async execute(): Promise { return tracer.trace( "Orchestrator.execute", { resource: "automation" }, @@ -290,10 +232,7 @@ class Orchestrator { automationId: this.automation._id, }) - await enrichBaseContext(this.context) - this.context.user = this.currentUser - - let metadata + let metadata: AutomationMetadata | undefined = undefined // check if this is a recurring automation, if (isProdAppID(this.appId) && isRecurring(this.automation)) { @@ -305,9 +244,24 @@ class Orchestrator { return } } + + const ctx: AutomationContext = { + trigger: this.executionOutput.trigger.outputs, + steps: [this.executionOutput.trigger.outputs], + stepsById: {}, + stepsByName: {}, + user: this.currentUser, + } + await enrichBaseContext(ctx) + const start = performance.now() - await this.executeSteps(this.automation.definition.steps) + const stepOutputs = await this.executeSteps( + ctx, + this.automation.definition.steps + ) + + this.executionOutput.steps.push(...stepOutputs) const end = performance.now() const executionTime = end - start @@ -330,12 +284,27 @@ class Orchestrator { } logging.logAlert("Error writing automation log", e) } + if ( isProdAppID(this.appId) && isRecurring(this.automation) && - metadata + metadata && + isErrorInOutput(this.executionOutput) ) { - await this.updateMetadata(metadata) + metadata.errorCount ??= 0 + metadata.errorCount++ + + const db = context.getAppDB() + try { + await db.put(metadata) + } catch (err) { + logging.logAlertWithInfo( + "Failed to write automation metadata", + db.name, + this.automation._id!, + err + ) + } } return this.executionOutput } @@ -343,9 +312,9 @@ class Orchestrator { } private async executeSteps( - steps: AutomationStep[], - pathIdx?: number - ): Promise { + ctx: AutomationContext, + steps: AutomationStep[] + ): Promise { return tracer.trace( "Orchestrator.executeSteps", { resource: "automation" }, @@ -353,210 +322,164 @@ class Orchestrator { let stepIndex = 0 const timeout = this.job.data.event.timeout || env.AUTOMATION_THREAD_TIMEOUT + const stepOutputs: AutomationStepResult[] = [] try { - await helpers.withTimeout( - timeout, - (async () => { - while (stepIndex < steps.length) { - const step = steps[stepIndex] - if (step.stepId === AutomationActionStepId.BRANCH) { - // stepIndex for current step context offset - // pathIdx relating to the full list of steps in the run - await this.executeBranchStep(step, stepIndex + (pathIdx || 0)) - stepIndex++ - } else if (step.stepId === AutomationActionStepId.LOOP) { - stepIndex = await this.executeLoopStep( - step, - steps, - stepIndex, - pathIdx - ) - } else { - if (!this.stopped) { - await this.executeStep(step) - } - stepIndex++ - } + await helpers.withTimeout(timeout, async () => { + while (stepIndex < steps.length) { + if (this.stopped) { + break } - })() - ) + + const step = steps[stepIndex] + if (step.stepId === AutomationActionStepId.BRANCH) { + // stepIndex for current step context offset + // pathIdx relating to the full list of steps in the run + const [branchResult, ...branchStepResults] = + await this.executeBranchStep(ctx, step) + + stepOutputs.push(branchResult) + stepOutputs.push(...branchStepResults) + + stepIndex++ + } else if (step.stepId === AutomationActionStepId.LOOP) { + const output = await this.executeLoopStep( + ctx, + step, + steps[stepIndex + 1] + ) + + stepIndex += 2 + stepOutputs.push(output) + } else { + const result = await this.executeStep(ctx, step) + + ctx.steps.push(result.outputs) + ctx.stepsById[step.id] = result.outputs + ctx.stepsByName[step.name || step.id] = result.outputs + + stepOutputs.push(result) + stepIndex++ + } + } + }) } catch (error: any) { if (error.errno === "ETIME") { span?.addTags({ timedOut: true }) console.warn(`Automation execution timed out after ${timeout}ms`) } } + + return stepOutputs } ) } private async executeLoopStep( + ctx: AutomationContext, loopStep: LoopStep, - steps: AutomationStep[], - stepIdx: number, - pathIdx?: number - ): Promise { - await processObject(loopStep.inputs, this.mergeContexts(this.context)) - const iterations = getLoopIterations(loopStep) - let stepToLoopIndex = stepIdx + 1 - let pathStepIdx = (pathIdx || stepIdx) + 1 + stepToLoop: AutomationStep + ): Promise { + await processObject(loopStep.inputs, prepareContext(ctx)) + const maxIterations = getLoopMaxIterations(loopStep) + const items: AutomationStepResult[] = [] - let iterationCount = 0 - let shouldCleanup = true - let reachedMaxIterations = false + let status: AutomationStepStatus | undefined = undefined + let success = true - for (let loopStepIndex = 0; loopStepIndex < iterations; loopStepIndex++) { + let i = 0 + for (; i < getLoopIterations(loopStep); i++) { try { loopStep.inputs.binding = automationUtils.typecastForLooping( loopStep.inputs ) } catch (err) { - this.updateContextAndOutput( - pathStepIdx + 1, - steps[stepToLoopIndex], - {}, - { - status: AutomationErrors.INCORRECT_TYPE, - success: false, - } - ) - shouldCleanup = false break } - const maxIterations = automationUtils.ensureMaxIterationsAsNumber( - loopStep.inputs.iterations - ) if ( - loopStepIndex === env.AUTOMATION_MAX_ITERATIONS || - (loopStep.inputs.iterations && loopStepIndex === maxIterations) + i === env.AUTOMATION_MAX_ITERATIONS || + (loopStep.inputs.iterations && i === maxIterations) ) { - reachedMaxIterations = true - shouldCleanup = true + status = AutomationStepStatus.MAX_ITERATIONS break } - let isFailure = false - const currentItem = this.getCurrentLoopItem(loopStep, loopStepIndex) - if (currentItem && typeof currentItem === "object") { - isFailure = Object.keys(currentItem).some(value => { - return currentItem[value] === loopStep?.inputs.failure - }) - } else { - isFailure = currentItem && currentItem === loopStep.inputs.failure - } - - if (isFailure) { - this.updateContextAndOutput( - pathStepIdx + 1, - steps[stepToLoopIndex], - { - items: this.loopStepOutputs, - iterations: loopStepIndex, - }, - { - status: AutomationErrors.FAILURE_CONDITION, - success: false, - } - ) - shouldCleanup = false + const currentItem = this.getCurrentLoopItem(loopStep, i) + if (matchesLoopFailureCondition(loopStep, currentItem)) { + status = AutomationStepStatus.FAILURE_CONDITION + success = false break } - this.context.steps[pathStepIdx] = { - currentItem: this.getCurrentLoopItem(loopStep, loopStepIndex), - } - - stepToLoopIndex = stepIdx + 1 - - await this.executeStep(steps[stepToLoopIndex], stepToLoopIndex) - iterationCount++ + ctx.loop = { currentItem } + items.push(await this.executeStep(ctx, stepToLoop)) + ctx.loop = undefined } - if (shouldCleanup) { - let tempOutput = - iterations === 0 - ? { - status: AutomationStepStatus.NO_ITERATIONS, - success: true, - } - : { - success: true, - items: this.loopStepOutputs, - iterations: iterationCount, - } - - if (reachedMaxIterations && iterations !== 0) { - tempOutput.status = AutomationStepStatus.MAX_ITERATIONS - } - - // Loop Step clean up - this.executionOutput.steps.splice(pathStepIdx, 0, { - id: steps[stepToLoopIndex].id, - stepId: steps[stepToLoopIndex].stepId, - outputs: tempOutput, - inputs: steps[stepToLoopIndex].inputs, - }) - - this.context.stepsById[steps[stepToLoopIndex].id] = tempOutput - const stepName = steps[stepToLoopIndex].name || steps[stepToLoopIndex].id - this.context.stepsByName[stepName] = tempOutput - this.context.steps[this.context.steps.length] = tempOutput - this.context.steps = this.context.steps.filter( - item => !item.hasOwnProperty.call(item, "currentItem") - ) - - this.loopStepOutputs = [] + if (i === 0) { + status = AutomationStepStatus.NO_ITERATIONS } - return stepToLoopIndex + 1 + return { + id: loopStep.id, + stepId: loopStep.stepId, + outputs: { + success, + status, + iterations: i, + items, + }, + inputs: loopStep.inputs, + } } + private async executeBranchStep( - branchStep: BranchStep, - pathIdx?: number - ): Promise { + ctx: AutomationContext, + branchStep: BranchStep + ): Promise { const { branches, children } = branchStep.inputs for (const branch of branches) { - const condition = await this.evaluateBranchCondition(branch.condition) + const condition = await this.evaluateBranchCondition( + ctx, + branch.condition + ) if (condition) { - const branchStatus = { - branchName: branch.name, - status: `${branch.name} branch taken`, - branchId: `${branch.id}`, - success: true, - } + const steps = children?.[branch.id] || [] - this.updateExecutionOutput( - branchStep.id, - branchStep.stepId, - branchStep.inputs, - branchStatus - ) - this.context.steps[this.context.steps.length] = branchStatus - this.context.stepsById[branchStep.id] = branchStatus - - const branchSteps = children?.[branch.id] || [] - // A final +1 to accomodate the branch step itself - await this.executeSteps(branchSteps, (pathIdx || 0) + 1) - return + return [ + { + id: branchStep.id, + stepId: branchStep.stepId, + inputs: branchStep.inputs, + success: true, + outputs: { + branchName: branch.name, + status: `${branch.name} branch taken`, + branchId: `${branch.id}`, + }, + }, + // A final +1 to accommodate the branch step itself + ...(await this.executeSteps(ctx, steps)), + ] } } this.stopped = true - this.updateExecutionOutput( - branchStep.id, - branchStep.stepId, - branchStep.inputs, + return [ { + id: branchStep.id, + stepId: branchStep.stepId, + inputs: branchStep.inputs, success: false, - status: AutomationStatus.NO_CONDITION_MET, - } - ) + outputs: { status: AutomationStatus.NO_CONDITION_MET }, + }, + ] } private async evaluateBranchCondition( + ctx: AutomationContext, conditions: BranchSearchFilters ): Promise { const toFilter: Record = {} @@ -577,17 +500,11 @@ class Orchestrator { ) } else if (isBasicSearchOperator(filterKey)) { for (const [field, value] of Object.entries(filters[filterKey])) { - const fromContext = processStringSync( - field, - this.mergeContexts(this.context) - ) + const fromContext = processStringSync(field, prepareContext(ctx)) toFilter[field] = fromContext if (typeof value === "string" && findHBSBlocks(value).length > 0) { - const processedVal = processStringSync( - value, - this.mergeContexts(this.context) - ) + const processedVal = processStringSync(value, prepareContext(ctx)) filters[filterKey][field] = processedVal } @@ -606,10 +523,11 @@ class Orchestrator { const result = dataFilters.runQuery([toFilter], processedConditions) return result.length > 0 } + private async executeStep( - step: AutomationStep, - loopIteration?: number - ): Promise { + ctx: AutomationContext, + step: AutomationStep + ): Promise { return tracer.trace( "Orchestrator.execute.step", { resource: "automation" }, @@ -628,41 +546,49 @@ class Orchestrator { }) if (this.stopped) { - this.updateExecutionOutput(step.id, step.stepId, {}, STOPPED_STATUS) - return - } - - let originalStepInput = cloneDeep(step.inputs) - if (loopIteration !== undefined) { - originalStepInput = replaceFakeBindings( - originalStepInput, - loopIteration - ) + return { + id: step.id, + stepId: step.stepId, + inputs: step.inputs, + outputs: STOPPED_STATUS, + } } const stepFn = await this.getStepFunctionality(step.stepId) - let inputs = await processObject( - originalStepInput, - this.mergeContexts(this.context) + const inputs = automationUtils.cleanInputValues( + await processObject(cloneDeep(step.inputs), prepareContext(ctx)), + step.schema.inputs ) - inputs = automationUtils.cleanInputValues(inputs, step.schema.inputs) const outputs = await stepFn({ - inputs: inputs, + inputs, appId: this.appId, emitter: this.emitter, - context: this.mergeContexts(this.context), + context: prepareContext(ctx), }) - this.handleStepOutput(step, outputs, loopIteration) + + if ( + step.stepId === AutomationActionStepId.FILTER && + "result" in outputs && + outputs.result === false + ) { + this.stopped = true + } + + return { + id: step.id, + stepId: step.stepId, + inputs, + outputs, + } } ) } private getCurrentLoopItem(loopStep: LoopStep, index: number): any { - if (!loopStep) return null if ( typeof loopStep.inputs.binding === "string" && - loopStep.inputs.option === "String" + loopStep.inputs.option === LoopStepType.STRING ) { return automationUtils.stringSplit(loopStep.inputs.binding)[index] } else if (Array.isArray(loopStep.inputs.binding)) { @@ -670,58 +596,24 @@ class Orchestrator { } return null } - - private mergeContexts(context: AutomationContext) { - const mergeContexts = { - ...context, - steps: { - ...context.steps, - ...context.stepsById, - ...context.stepsByName, - }, - } - return mergeContexts - } - - private handleStepOutput( - step: AutomationStep, - outputs: any, - loopIteration: number | undefined - ): void { - if (step.stepId === AutomationActionStepId.FILTER && !outputs.result) { - this.stopped = true - this.updateExecutionOutput(step.id, step.stepId, step.inputs, { - ...outputs, - ...STOPPED_STATUS, - }) - } else if (loopIteration !== undefined) { - this.loopStepOutputs = this.loopStepOutputs || [] - this.loopStepOutputs.push(outputs) - } else { - this.updateExecutionOutput(step.id, step.stepId, step.inputs, outputs) - this.context.steps[this.context.steps.length] = outputs - this.context.stepsById![step.id] = outputs - const stepName = step.name || step.id - this.context.stepsByName![stepName] = outputs - } - } } export function execute(job: Job, callback: WorkerCallback) { const appId = job.data.event.appId - const automationId = job.data.automation._id if (!appId) { throw new Error("Unable to execute, event doesn't contain app ID.") } + + const automationId = job.data.automation._id if (!automationId) { throw new Error("Unable to execute, event doesn't contain automation ID.") } + return context.doInAutomationContext({ appId, automationId, task: async () => { const envVars = await sdkUtils.getEnvironmentVariables() - // put into automation thread for whole context await context.doInEnvironmentContext(envVars, async () => { const automationOrchestrator = new Orchestrator(job) try { @@ -737,13 +629,13 @@ export function execute(job: Job, callback: WorkerCallback) { export async function executeInThread( job: Job -): Promise { +): Promise { const appId = job.data.event.appId if (!appId) { throw new Error("Unable to execute, event doesn't contain app ID.") } - const timeoutPromise = new Promise((resolve, reject) => { + const timeoutPromise = new Promise((_resolve, reject) => { setTimeout(() => { reject(new Error("Timeout exceeded")) }, job.data.event.timeout || env.AUTOMATION_THREAD_TIMEOUT) @@ -760,7 +652,7 @@ export async function executeInThread( timeoutPromise, ]) }) - })) as AutomationResponse + })) as AutomationResults } export const removeStalled = async (job: Job) => { diff --git a/packages/shared-core/src/automations/steps/filter.ts b/packages/shared-core/src/automations/steps/filter.ts index 70dcb6f66e..6305e667c7 100644 --- a/packages/shared-core/src/automations/steps/filter.ts +++ b/packages/shared-core/src/automations/steps/filter.ts @@ -3,20 +3,14 @@ import { AutomationStepDefinition, AutomationStepType, AutomationIOType, + FilterCondition, } from "@budibase/types" -export const FilterConditions = { - EQUAL: "EQUAL", - NOT_EQUAL: "NOT_EQUAL", - GREATER_THAN: "GREATER_THAN", - LESS_THAN: "LESS_THAN", -} - export const PrettyFilterConditions = { - [FilterConditions.EQUAL]: "Equals", - [FilterConditions.NOT_EQUAL]: "Not equals", - [FilterConditions.GREATER_THAN]: "Greater than", - [FilterConditions.LESS_THAN]: "Less than", + [FilterCondition.EQUAL]: "Equals", + [FilterCondition.NOT_EQUAL]: "Not equals", + [FilterCondition.GREATER_THAN]: "Greater than", + [FilterCondition.LESS_THAN]: "Less than", } export const definition: AutomationStepDefinition = { @@ -30,7 +24,7 @@ export const definition: AutomationStepDefinition = { features: {}, stepId: AutomationActionStepId.FILTER, inputs: { - condition: FilterConditions.EQUAL, + condition: FilterCondition.EQUAL, }, schema: { inputs: { @@ -42,7 +36,7 @@ export const definition: AutomationStepDefinition = { condition: { type: AutomationIOType.STRING, title: "Condition", - enum: Object.values(FilterConditions), + enum: Object.values(FilterCondition), pretty: Object.values(PrettyFilterConditions), }, value: { diff --git a/packages/shared-core/src/helpers/helpers.ts b/packages/shared-core/src/helpers/helpers.ts index 8dbdb7bbfd..10d625be28 100644 --- a/packages/shared-core/src/helpers/helpers.ts +++ b/packages/shared-core/src/helpers/helpers.ts @@ -105,10 +105,10 @@ export function cancelableTimeout( export async function withTimeout( timeout: number, - promise: Promise + promise: () => Promise ): Promise { const [timeoutPromise, cancel] = cancelableTimeout(timeout) - const result = (await Promise.race([promise, timeoutPromise])) as T + const result = (await Promise.race([promise(), timeoutPromise])) as T cancel() return result } diff --git a/packages/types/src/documents/app/automation/StepInputsOutputs.ts b/packages/types/src/documents/app/automation/StepInputsOutputs.ts index 18a6f86284..52b07ae17f 100644 --- a/packages/types/src/documents/app/automation/StepInputsOutputs.ts +++ b/packages/types/src/documents/app/automation/StepInputsOutputs.ts @@ -7,8 +7,20 @@ import { } from "../../../sdk" import { HttpMethod } from "../query" import { Row } from "../row" -import { LoopStepType, EmailAttachment, AutomationResults } from "./automation" -import { AutomationStep, AutomationStepOutputs } from "./schema" +import { + LoopStepType, + EmailAttachment, + AutomationResults, + AutomationStepResult, +} from "./automation" +import { AutomationStep } from "./schema" + +export enum FilterCondition { + EQUAL = "EQUAL", + NOT_EQUAL = "NOT_EQUAL", + GREATER_THAN = "GREATER_THAN", + LESS_THAN = "LESS_THAN", +} export type BaseAutomationOutputs = { success?: boolean @@ -92,7 +104,7 @@ export type ExecuteScriptStepOutputs = BaseAutomationOutputs & { export type FilterStepInputs = { field: any - condition: string + condition: FilterCondition value: any } @@ -110,7 +122,7 @@ export type LoopStepInputs = { } export type LoopStepOutputs = { - items: AutomationStepOutputs[] + items: AutomationStepResult[] success: boolean iterations: number } diff --git a/packages/types/src/documents/app/automation/automation.ts b/packages/types/src/documents/app/automation/automation.ts index 0314701d72..b17447fc9a 100644 --- a/packages/types/src/documents/app/automation/automation.ts +++ b/packages/types/src/documents/app/automation/automation.ts @@ -176,6 +176,7 @@ export enum AutomationFeature { export enum AutomationStepStatus { NO_ITERATIONS = "no_iterations", MAX_ITERATIONS = "max_iterations_reached", + FAILURE_CONDITION = "failure_condition", } export enum AutomationStatus { @@ -190,19 +191,27 @@ export enum AutomationStoppedReason { TRIGGER_FILTER_NOT_MET = "Automation did not run. Filter conditions in trigger were not met.", } +export interface AutomationStepResult { + id: string + stepId: AutomationActionStepId + inputs: Record + outputs: Record + success?: boolean + message?: string +} + +export interface AutomationTriggerResult { + id: string + stepId: AutomationTriggerStepId + inputs?: Record + outputs: Record +} + export interface AutomationResults { automationId?: string status?: AutomationStatus - trigger?: AutomationTrigger - steps: { - stepId: AutomationTriggerStepId | AutomationActionStepId - inputs: { - [key: string]: any - } - outputs: { - [key: string]: any - } - }[] + trigger: AutomationTriggerResult + steps: [AutomationTriggerResult, ...AutomationStepResult[]] } export interface DidNotTriggerResponse { @@ -236,6 +245,7 @@ export type ActionImplementation = ( inputs: TInputs } & AutomationStepInputBase ) => Promise + export interface AutomationMetadata extends Document { errorCount?: number automationChainCount?: number diff --git a/packages/types/src/documents/app/automation/schema.ts b/packages/types/src/documents/app/automation/schema.ts index 820858b48c..324df227fd 100644 --- a/packages/types/src/documents/app/automation/schema.ts +++ b/packages/types/src/documents/app/automation/schema.ts @@ -164,24 +164,6 @@ export interface AutomationStepSchemaBase { features?: Partial> } -export type AutomationStepOutputs = - | CollectStepOutputs - | CreateRowStepOutputs - | DelayStepOutputs - | DeleteRowStepOutputs - | ExecuteQueryStepOutputs - | ExecuteScriptStepOutputs - | FilterStepOutputs - | QueryRowsStepOutputs - | BaseAutomationOutputs - | BashStepOutputs - | ExternalAppStepOutputs - | OpenAIStepOutputs - | ServerLogStepOutputs - | TriggerAutomationStepOutputs - | UpdateRowStepOutputs - | ZapierStepOutputs - export type AutomationStepInputs = T extends AutomationActionStepId.COLLECT ? CollectStepInputs @@ -229,11 +211,56 @@ export type AutomationStepInputs = ? BranchStepInputs : never +export type AutomationStepOutputs = + T extends AutomationActionStepId.COLLECT + ? CollectStepOutputs + : T extends AutomationActionStepId.CREATE_ROW + ? CreateRowStepOutputs + : T extends AutomationActionStepId.DELAY + ? DelayStepOutputs + : T extends AutomationActionStepId.DELETE_ROW + ? DeleteRowStepOutputs + : T extends AutomationActionStepId.EXECUTE_QUERY + ? ExecuteQueryStepOutputs + : T extends AutomationActionStepId.EXECUTE_SCRIPT + ? ExecuteScriptStepOutputs + : T extends AutomationActionStepId.FILTER + ? FilterStepOutputs + : T extends AutomationActionStepId.QUERY_ROWS + ? QueryRowsStepOutputs + : T extends AutomationActionStepId.SEND_EMAIL_SMTP + ? BaseAutomationOutputs + : T extends AutomationActionStepId.SERVER_LOG + ? ServerLogStepOutputs + : T extends AutomationActionStepId.TRIGGER_AUTOMATION_RUN + ? TriggerAutomationStepOutputs + : T extends AutomationActionStepId.UPDATE_ROW + ? UpdateRowStepOutputs + : T extends AutomationActionStepId.OUTGOING_WEBHOOK + ? ExternalAppStepOutputs + : T extends AutomationActionStepId.discord + ? ExternalAppStepOutputs + : T extends AutomationActionStepId.slack + ? ExternalAppStepOutputs + : T extends AutomationActionStepId.zapier + ? ZapierStepOutputs + : T extends AutomationActionStepId.integromat + ? ExternalAppStepOutputs + : T extends AutomationActionStepId.n8n + ? ExternalAppStepOutputs + : T extends AutomationActionStepId.EXECUTE_BASH + ? BashStepOutputs + : T extends AutomationActionStepId.OPENAI + ? OpenAIStepOutputs + : T extends AutomationActionStepId.LOOP + ? BaseAutomationOutputs + : never + export interface AutomationStepSchema extends AutomationStepSchemaBase { id: string stepId: TStep - inputs: AutomationStepInputs & Record // The record union to be removed once the types are fixed + inputs: AutomationStepInputs } export type CollectStep = AutomationStepSchema From 57149b77e17735ea70fa7cdfd90fafc8cd598ab0 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 13 Feb 2025 12:52:03 +0000 Subject: [PATCH 02/50] Loop tests passing again. --- .../server/src/automations/automationUtils.ts | 23 --- .../automations/tests/automationUtils.spec.ts | 30 +-- packages/server/src/constants/index.ts | 5 - packages/server/src/threads/automation.ts | 175 +++++++++--------- .../documents/app/automation/automation.ts | 3 +- 5 files changed, 94 insertions(+), 142 deletions(-) diff --git a/packages/server/src/automations/automationUtils.ts b/packages/server/src/automations/automationUtils.ts index 96bfcc87e9..7c1bb4d685 100644 --- a/packages/server/src/automations/automationUtils.ts +++ b/packages/server/src/automations/automationUtils.ts @@ -274,26 +274,3 @@ export function stringSplit(value: string | string[]) { } return value.split(",") } - -export function typecastForLooping(input: LoopStepInputs) { - if (!input || !input.binding) { - return null - } - try { - switch (input.option) { - case LoopStepType.ARRAY: - if (typeof input.binding === "string") { - return JSON.parse(input.binding) - } - break - case LoopStepType.STRING: - if (Array.isArray(input.binding)) { - return input.binding.join(",") - } - break - } - } catch (err) { - throw new Error("Unable to cast to correct type") - } - return input.binding -} diff --git a/packages/server/src/automations/tests/automationUtils.spec.ts b/packages/server/src/automations/tests/automationUtils.spec.ts index 456feb6e7a..a4346079e1 100644 --- a/packages/server/src/automations/tests/automationUtils.spec.ts +++ b/packages/server/src/automations/tests/automationUtils.spec.ts @@ -1,9 +1,4 @@ -import { - typecastForLooping, - cleanInputValues, - substituteLoopStep, -} from "../automationUtils" -import { LoopStepType } from "@budibase/types" +import { cleanInputValues, substituteLoopStep } from "../automationUtils" describe("automationUtils", () => { describe("substituteLoopStep", () => { @@ -30,29 +25,6 @@ describe("automationUtils", () => { }) }) - describe("typeCastForLooping", () => { - it("should parse to correct type", () => { - expect( - typecastForLooping({ option: LoopStepType.ARRAY, binding: [1, 2, 3] }) - ).toEqual([1, 2, 3]) - expect( - typecastForLooping({ option: LoopStepType.ARRAY, binding: "[1,2,3]" }) - ).toEqual([1, 2, 3]) - expect( - typecastForLooping({ option: LoopStepType.STRING, binding: [1, 2, 3] }) - ).toEqual("1,2,3") - }) - it("should handle null values", () => { - // expect it to handle where the binding is null - expect( - typecastForLooping({ option: LoopStepType.ARRAY, binding: null }) - ).toEqual(null) - expect(() => - typecastForLooping({ option: LoopStepType.ARRAY, binding: "test" }) - ).toThrow() - }) - }) - describe("cleanInputValues", () => { it("should handle array relationship fields from read binding", () => { const schema = { diff --git a/packages/server/src/constants/index.ts b/packages/server/src/constants/index.ts index fde1efd1b9..d511365dca 100644 --- a/packages/server/src/constants/index.ts +++ b/packages/server/src/constants/index.ts @@ -130,11 +130,6 @@ export enum InvalidColumns { TABLE_ID = "tableId", } -export enum AutomationErrors { - INCORRECT_TYPE = "INCORRECT_TYPE", - FAILURE_CONDITION = "FAILURE_CONDITION_MET", -} - // pass through the list from the auth/core lib export const ObjectStoreBuckets = objectStore.ObjectStoreBuckets export const MAX_AUTOMATION_RECURRING_ERRORS = 5 diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index fecc1db141..367b03389b 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -11,7 +11,7 @@ import { dataFilters, helpers, utils } from "@budibase/shared-core" import { default as AutomationEmitter } from "../events/AutomationEmitter" import { generateAutomationMetadataID, isProdAppID } from "../db/utils" import { automations } from "@budibase/shared-core" -import { AutomationErrors, MAX_AUTOMATION_RECURRING_ERRORS } from "../constants" +import { MAX_AUTOMATION_RECURRING_ERRORS } from "../constants" import { storeLog } from "../automations/logging" import { Automation, @@ -65,32 +65,23 @@ function matchesLoopFailureCondition(loopStep: LoopStep, currentItem: any) { return currentItem === loopStep.inputs.failure } -function getLoopIterations(loopStep: LoopStep) { - const binding = loopStep.inputs.binding - if (!binding) { - return 0 - } - try { - const json = typeof binding === "string" ? JSON.parse(binding) : binding - if (Array.isArray(json)) { - return json.length - } - } catch (err) { - // ignore error - wasn't able to parse - } - if (typeof binding === "string") { - return automationUtils.stringSplit(binding).length - } - return 0 -} +function getLoopIterable(loopStep: LoopStep): any[] { + const option = loopStep.inputs.option + let input: any = loopStep.inputs.binding -function getLoopMaxIterations(loopStep: LoopStep) { - const value = loopStep.inputs.iterations - if (typeof value === "number") return value - if (typeof value === "string") { - return parseInt(value) + if (option === LoopStepType.ARRAY && typeof input === "string") { + input = JSON.parse(input) } - return undefined + + if (option === LoopStepType.STRING && Array.isArray(input)) { + input = input.join(",") + } + + if (option === LoopStepType.STRING && typeof input === "string") { + input = automationUtils.stringSplit(input) + } + + return Array.isArray(input) ? input : [input] } function prepareContext(context: AutomationContext) { @@ -333,24 +324,26 @@ class Orchestrator { const step = steps[stepIndex] if (step.stepId === AutomationActionStepId.BRANCH) { - // stepIndex for current step context offset - // pathIdx relating to the full list of steps in the run - const [branchResult, ...branchStepResults] = - await this.executeBranchStep(ctx, step) + const [result, ...childResults] = await this.executeBranchStep( + ctx, + step + ) - stepOutputs.push(branchResult) - stepOutputs.push(...branchStepResults) + stepOutputs.push(result) + stepOutputs.push(...childResults) stepIndex++ } else if (step.stepId === AutomationActionStepId.LOOP) { - const output = await this.executeLoopStep( - ctx, - step, - steps[stepIndex + 1] - ) + const stepToLoop = steps[stepIndex + 1] + const result = await this.executeLoopStep(ctx, step, stepToLoop) + ctx.steps.push(result.outputs) + ctx.stepsById[stepToLoop.id] = result.outputs + ctx.stepsByName[stepToLoop.name || stepToLoop.id] = + result.outputs + + stepOutputs.push(result) stepIndex += 2 - stepOutputs.push(output) } else { const result = await this.executeStep(ctx, step) @@ -381,56 +374,81 @@ class Orchestrator { stepToLoop: AutomationStep ): Promise { await processObject(loopStep.inputs, prepareContext(ctx)) - const maxIterations = getLoopMaxIterations(loopStep) - const items: AutomationStepResult[] = [] - let status: AutomationStepStatus | undefined = undefined - let success = true + const result = { + id: loopStep.id, + stepId: loopStep.stepId, + inputs: loopStep.inputs, + } - let i = 0 - for (; i < getLoopIterations(loopStep); i++) { - try { - loopStep.inputs.binding = automationUtils.typecastForLooping( - loopStep.inputs - ) - } catch (err) { - break + const loopMaxIterations = + typeof loopStep.inputs.iterations === "string" + ? parseInt(loopStep.inputs.iterations) + : loopStep.inputs.iterations + const maxIterations = Math.min( + loopMaxIterations || env.AUTOMATION_MAX_ITERATIONS, + env.AUTOMATION_MAX_ITERATIONS + ) + + const items: Record[] = [] + let iterations = 0 + let iterable: any[] = [] + try { + iterable = getLoopIterable(loopStep) + } catch (err) { + return { + ...result, + outputs: { + success: false, + status: AutomationStepStatus.INCORRECT_TYPE, + }, + } + } + + for (; iterations < iterable.length; iterations++) { + const currentItem = iterable[iterations] + + if (iterations === maxIterations) { + return { + ...result, + outputs: { + success: false, + iterations, + items, + status: AutomationStepStatus.MAX_ITERATIONS, + }, + } } - if ( - i === env.AUTOMATION_MAX_ITERATIONS || - (loopStep.inputs.iterations && i === maxIterations) - ) { - status = AutomationStepStatus.MAX_ITERATIONS - break - } - - const currentItem = this.getCurrentLoopItem(loopStep, i) if (matchesLoopFailureCondition(loopStep, currentItem)) { - status = AutomationStepStatus.FAILURE_CONDITION - success = false - break + return { + ...result, + outputs: { + success: false, + iterations, + items, + status: AutomationStepStatus.FAILURE_CONDITION, + }, + } } ctx.loop = { currentItem } - items.push(await this.executeStep(ctx, stepToLoop)) + const loopedStepResult = await this.executeStep(ctx, stepToLoop) + items.push(loopedStepResult.outputs) ctx.loop = undefined } - if (i === 0) { - status = AutomationStepStatus.NO_ITERATIONS - } - return { - id: loopStep.id, - stepId: loopStep.stepId, + id: stepToLoop.id, + stepId: stepToLoop.stepId, + inputs: stepToLoop.inputs, outputs: { - success, - status, - iterations: i, + success: true, + status: + iterations === 0 ? AutomationStepStatus.NO_ITERATIONS : undefined, + iterations, items, }, - inputs: loopStep.inputs, } } @@ -573,6 +591,7 @@ class Orchestrator { outputs.result === false ) { this.stopped = true + ;(outputs as any).status = AutomationStatus.STOPPED } return { @@ -584,18 +603,6 @@ class Orchestrator { } ) } - - private getCurrentLoopItem(loopStep: LoopStep, index: number): any { - if ( - typeof loopStep.inputs.binding === "string" && - loopStep.inputs.option === LoopStepType.STRING - ) { - return automationUtils.stringSplit(loopStep.inputs.binding)[index] - } else if (Array.isArray(loopStep.inputs.binding)) { - return loopStep.inputs.binding[index] - } - return null - } } export function execute(job: Job, callback: WorkerCallback) { diff --git a/packages/types/src/documents/app/automation/automation.ts b/packages/types/src/documents/app/automation/automation.ts index b17447fc9a..d7f5810761 100644 --- a/packages/types/src/documents/app/automation/automation.ts +++ b/packages/types/src/documents/app/automation/automation.ts @@ -176,7 +176,8 @@ export enum AutomationFeature { export enum AutomationStepStatus { NO_ITERATIONS = "no_iterations", MAX_ITERATIONS = "max_iterations_reached", - FAILURE_CONDITION = "failure_condition", + FAILURE_CONDITION = "FAILURE_CONDITION_MET", + INCORRECT_TYPE = "INCORRECT_TYPE", } export enum AutomationStatus { From 18567f5fe7d450c11caaba462d23cb90f82a132e Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 13 Feb 2025 13:10:16 +0000 Subject: [PATCH 03/50] Further refactoring of branching. --- packages/server/src/threads/automation.ts | 100 +++++++++------------- packages/types/src/sdk/search.ts | 4 + 2 files changed, 45 insertions(+), 59 deletions(-) diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 367b03389b..0634fe4c61 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -7,7 +7,7 @@ import { } from "../automations/utils" import * as actions from "../automations/actions" import * as automationUtils from "../automations/automationUtils" -import { dataFilters, helpers, utils } from "@budibase/shared-core" +import { dataFilters, helpers } from "@budibase/shared-core" import { default as AutomationEmitter } from "../events/AutomationEmitter" import { generateAutomationMetadataID, isProdAppID } from "../db/utils" import { automations } from "@budibase/shared-core" @@ -24,15 +24,15 @@ import { AutomationStepStatus, BranchSearchFilters, BranchStep, - isLogicalSearchOperator, LoopStep, UserBindings, - isBasicSearchOperator, ContextEmitter, LoopStepType, AutomationTriggerResult, AutomationResults, AutomationStepResult, + isLogicalFilter, + Branch, } from "@budibase/types" import { AutomationContext } from "../definitions/automations" import { WorkerCallback } from "./definitions" @@ -84,6 +84,41 @@ function getLoopIterable(loopStep: LoopStep): any[] { return Array.isArray(input) ? input : [input] } +async function branchMatches(ctx: AutomationContext, branch: Branch) { + const toFilter: Record = {} + + const recurseSearchFilters = ( + filters: BranchSearchFilters + ): BranchSearchFilters => { + for (const filter of Object.values(filters)) { + if (!filter) { + continue + } + + if (isLogicalFilter(filter)) { + filter.conditions = filter.conditions.map(condition => + recurseSearchFilters(condition) + ) + } else { + for (const [field, value] of Object.entries(filter)) { + toFilter[field] = processStringSync(field, prepareContext(ctx)) + if (typeof value === "string" && findHBSBlocks(value).length > 0) { + filter[field] = processStringSync(value, prepareContext(ctx)) + } + } + } + } + + return filters + } + + const result = dataFilters.runQuery( + [toFilter], + recurseSearchFilters(branch.condition) + ) + return result.length > 0 +} + function prepareContext(context: AutomationContext) { return { ...context, @@ -434,14 +469,12 @@ class Orchestrator { ctx.loop = { currentItem } const loopedStepResult = await this.executeStep(ctx, stepToLoop) - items.push(loopedStepResult.outputs) ctx.loop = undefined + items.push(loopedStepResult.outputs) } return { - id: stepToLoop.id, - stepId: stepToLoop.stepId, - inputs: stepToLoop.inputs, + ...result, outputs: { success: true, status: @@ -459,11 +492,7 @@ class Orchestrator { const { branches, children } = branchStep.inputs for (const branch of branches) { - const condition = await this.evaluateBranchCondition( - ctx, - branch.condition - ) - if (condition) { + if (await branchMatches(ctx, branch)) { const steps = children?.[branch.id] || [] return [ @@ -478,7 +507,6 @@ class Orchestrator { branchId: `${branch.id}`, }, }, - // A final +1 to accommodate the branch step itself ...(await this.executeSteps(ctx, steps)), ] } @@ -496,52 +524,6 @@ class Orchestrator { ] } - private async evaluateBranchCondition( - ctx: AutomationContext, - conditions: BranchSearchFilters - ): Promise { - const toFilter: Record = {} - - const recurseSearchFilters = ( - filters: BranchSearchFilters - ): BranchSearchFilters => { - for (const filterKey of Object.keys( - filters - ) as (keyof typeof filters)[]) { - if (!filters[filterKey]) { - continue - } - - if (isLogicalSearchOperator(filterKey)) { - filters[filterKey].conditions = filters[filterKey].conditions.map( - condition => recurseSearchFilters(condition) - ) - } else if (isBasicSearchOperator(filterKey)) { - for (const [field, value] of Object.entries(filters[filterKey])) { - const fromContext = processStringSync(field, prepareContext(ctx)) - toFilter[field] = fromContext - - if (typeof value === "string" && findHBSBlocks(value).length > 0) { - const processedVal = processStringSync(value, prepareContext(ctx)) - - filters[filterKey][field] = processedVal - } - } - } else { - // We want to types to complain if we extend BranchSearchFilters, but not to throw if the request comes with some extra data. It will just be ignored - utils.unreachable(filterKey, { doNotThrow: true }) - } - } - - return filters - } - - const processedConditions = recurseSearchFilters(conditions) - - const result = dataFilters.runQuery([toFilter], processedConditions) - return result.length > 0 - } - private async executeStep( ctx: AutomationContext, step: AutomationStep diff --git a/packages/types/src/sdk/search.ts b/packages/types/src/sdk/search.ts index fdc2fafe57..992e9961d4 100644 --- a/packages/types/src/sdk/search.ts +++ b/packages/types/src/sdk/search.ts @@ -82,6 +82,10 @@ type RangeFilter = Record< type LogicalFilter = { conditions: SearchFilters[] } +export function isLogicalFilter(filter: any): filter is LogicalFilter { + return "conditions" in filter +} + export type AnySearchFilter = BasicFilter | ArrayFilter | RangeFilter export interface SearchFilters { From 4e97f72a432391955c9800b3ca32c0895e81a743 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 13 Feb 2025 13:21:31 +0000 Subject: [PATCH 04/50] Refactoring around cleanInputValues --- .../server/src/automations/automationUtils.ts | 16 +++-- .../automations/tests/automationUtils.spec.ts | 60 +++++++++---------- .../src/automations/tests/scenarios.spec.ts | 2 +- packages/server/src/threads/automation.ts | 2 +- .../documents/app/automation/automation.ts | 2 +- 5 files changed, 37 insertions(+), 45 deletions(-) diff --git a/packages/server/src/automations/automationUtils.ts b/packages/server/src/automations/automationUtils.ts index 7c1bb4d685..4c03f0f994 100644 --- a/packages/server/src/automations/automationUtils.ts +++ b/packages/server/src/automations/automationUtils.ts @@ -6,10 +6,10 @@ import { import sdk from "../sdk" import { AutomationAttachment, + BaseIOStructure, + FieldSchema, FieldType, Row, - LoopStepType, - LoopStepInputs, } from "@budibase/types" import { objectStore, context } from "@budibase/backend-core" import * as uuid from "uuid" @@ -33,17 +33,15 @@ import path from "path" */ export function cleanInputValues>( inputs: T, - schema?: any + schema?: Partial> ): T { - if (schema == null) { - return inputs - } - for (let inputKey of Object.keys(inputs)) { + const keys = Object.keys(inputs) as (keyof T)[] + for (let inputKey of keys) { let input = inputs[inputKey] if (typeof input !== "string") { continue } - let propSchema = schema.properties[inputKey] + let propSchema = schema?.[inputKey] if (!propSchema) { continue } @@ -96,7 +94,7 @@ export function cleanInputValues>( */ export async function cleanUpRow(tableId: string, row: Row) { let table = await sdk.tables.getTable(tableId) - return cleanInputValues(row, { properties: table.schema }) + return cleanInputValues(row, table.schema) } export function getError(err: any) { diff --git a/packages/server/src/automations/tests/automationUtils.spec.ts b/packages/server/src/automations/tests/automationUtils.spec.ts index a4346079e1..05dd7483e9 100644 --- a/packages/server/src/automations/tests/automationUtils.spec.ts +++ b/packages/server/src/automations/tests/automationUtils.spec.ts @@ -1,3 +1,4 @@ +import { AutomationIOType } from "@budibase/types" import { cleanInputValues, substituteLoopStep } from "../automationUtils" describe("automationUtils", () => { @@ -42,15 +43,12 @@ describe("automationUtils", () => { }, } expect( - cleanInputValues( - { - row: { - relationship: `[{"_id": "ro_ta_users_us_3"}]`, - }, - schema, + cleanInputValues({ + row: { + relationship: `[{"_id": "ro_ta_users_us_3"}]`, }, - schema - ) + schema, + }) ).toEqual({ row: { relationship: [{ _id: "ro_ta_users_us_3" }], @@ -75,15 +73,12 @@ describe("automationUtils", () => { }, } expect( - cleanInputValues( - { - row: { - relationship: `ro_ta_users_us_3`, - }, - schema, + cleanInputValues({ + row: { + relationship: `ro_ta_users_us_3`, }, - schema - ) + schema, + }) ).toEqual({ row: { relationship: "ro_ta_users_us_3", @@ -94,28 +89,27 @@ describe("automationUtils", () => { it("should be able to clean inputs with the utilities", () => { // can't clean without a schema - let output = cleanInputValues({ a: "1" }) - expect(output.a).toBe("1") - output = cleanInputValues( + const one = cleanInputValues({ a: "1" }) + expect(one.a).toBe("1") + + const two = cleanInputValues( { a: "1", b: "true", c: "false", d: 1, e: "help" }, { - properties: { - a: { - type: "number", - }, - b: { - type: "boolean", - }, - c: { - type: "boolean", - }, + a: { + type: AutomationIOType.NUMBER, + }, + b: { + type: AutomationIOType.BOOLEAN, + }, + c: { + type: AutomationIOType.BOOLEAN, }, } ) - expect(output.a).toBe(1) - expect(output.b).toBe(true) - expect(output.c).toBe(false) - expect(output.d).toBe(1) + expect(two.a).toBe(1) + expect(two.b).toBe(true) + expect(two.c).toBe(false) + expect(two.d).toBe(1) }) }) }) diff --git a/packages/server/src/automations/tests/scenarios.spec.ts b/packages/server/src/automations/tests/scenarios.spec.ts index fdecfe3461..91934a9e22 100644 --- a/packages/server/src/automations/tests/scenarios.spec.ts +++ b/packages/server/src/automations/tests/scenarios.spec.ts @@ -407,7 +407,7 @@ if (descriptions.length) { client = ds.client! }) - it.only("should query an external database for some data then insert than into an internal table", async () => { + it("should query an external database for some data then insert than into an internal table", async () => { const newTable = await config.api.table.save({ ...basicTable(), name: "table", diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 0634fe4c61..5e1764403b 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -557,7 +557,7 @@ class Orchestrator { const stepFn = await this.getStepFunctionality(step.stepId) const inputs = automationUtils.cleanInputValues( await processObject(cloneDeep(step.inputs), prepareContext(ctx)), - step.schema.inputs + step.schema.inputs.properties ) const outputs = await stepFn({ diff --git a/packages/types/src/documents/app/automation/automation.ts b/packages/types/src/documents/app/automation/automation.ts index d7f5810761..ea21b5e23d 100644 --- a/packages/types/src/documents/app/automation/automation.ts +++ b/packages/types/src/documents/app/automation/automation.ts @@ -146,7 +146,7 @@ export interface Automation extends Document { } } -interface BaseIOStructure { +export interface BaseIOStructure { type?: AutomationIOType subtype?: AutomationIOType customType?: AutomationCustomIOType From 2eb3a9fcd816f48d47bffbf3944656c2bf7f670d Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 13 Feb 2025 17:44:48 +0000 Subject: [PATCH 05/50] More refactoring. --- .../backend-core/src/context/mainContext.ts | 4 +- packages/backend-core/src/utils/Duration.ts | 26 +- packages/backend-core/src/utils/index.ts | 1 + packages/backend-core/src/utils/time.ts | 7 + packages/server/src/automations/triggers.ts | 4 +- packages/server/src/automations/utils.ts | 40 +- packages/server/src/threads/automation.ts | 676 +++++++++--------- .../documents/app/automation/automation.ts | 15 +- 8 files changed, 380 insertions(+), 393 deletions(-) create mode 100644 packages/backend-core/src/utils/time.ts diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts index e5f20882d3..6a00c125ad 100644 --- a/packages/backend-core/src/context/mainContext.ts +++ b/packages/backend-core/src/context/mainContext.ts @@ -266,9 +266,9 @@ export const getProdAppId = () => { return conversions.getProdAppID(appId) } -export function doInEnvironmentContext( +export function doInEnvironmentContext( values: Record, - task: any + task: () => T ) { if (!values) { throw new Error("Must supply environment variables.") diff --git a/packages/backend-core/src/utils/Duration.ts b/packages/backend-core/src/utils/Duration.ts index 730b59d1dc..f1cefa5a1f 100644 --- a/packages/backend-core/src/utils/Duration.ts +++ b/packages/backend-core/src/utils/Duration.ts @@ -15,23 +15,27 @@ const conversion: Record = { } export class Duration { + constructor(public ms: number) {} + + to(type: DurationType) { + return this.ms / conversion[type] + } + + toMs() { + return this.ms + } + + toSeconds() { + return this.to(DurationType.SECONDS) + } + static convert(from: DurationType, to: DurationType, duration: number) { const milliseconds = duration * conversion[from] return milliseconds / conversion[to] } static from(from: DurationType, duration: number) { - return { - to: (to: DurationType) => { - return Duration.convert(from, to, duration) - }, - toMs: () => { - return Duration.convert(from, DurationType.MILLISECONDS, duration) - }, - toSeconds: () => { - return Duration.convert(from, DurationType.SECONDS, duration) - }, - } + return new Duration(duration * conversion[from]) } static fromSeconds(duration: number) { diff --git a/packages/backend-core/src/utils/index.ts b/packages/backend-core/src/utils/index.ts index ac17227459..14bc4ca231 100644 --- a/packages/backend-core/src/utils/index.ts +++ b/packages/backend-core/src/utils/index.ts @@ -2,3 +2,4 @@ export * from "./hashing" export * from "./utils" export * from "./stringUtils" export * from "./Duration" +export * from "./time" diff --git a/packages/backend-core/src/utils/time.ts b/packages/backend-core/src/utils/time.ts new file mode 100644 index 0000000000..8ee40dd29f --- /dev/null +++ b/packages/backend-core/src/utils/time.ts @@ -0,0 +1,7 @@ +import { Duration } from "./Duration" + +export async function time(f: () => Promise): Promise<[T, Duration]> { + const start = performance.now() + const result = await f() + return [result, Duration.fromMilliseconds(performance.now() - start)] +} diff --git a/packages/server/src/automations/triggers.ts b/packages/server/src/automations/triggers.ts index 2ac90f3f9c..16d5246a91 100644 --- a/packages/server/src/automations/triggers.ts +++ b/packages/server/src/automations/triggers.ts @@ -182,11 +182,12 @@ export async function externalTrigger( // values are likely to be submitted as strings, so we shall convert to correct type const coercedFields: any = {} const fields = automation.definition.trigger.inputs.fields - for (let key of Object.keys(fields || {})) { + for (const key of Object.keys(fields || {})) { coercedFields[key] = coerce(params.fields[key], fields[key]) } params.fields = coercedFields } + // row actions and webhooks flatten the fields down else if ( sdk.automations.isRowAction(automation) || @@ -198,6 +199,7 @@ export async function externalTrigger( fields: {}, } } + const data: AutomationData = { automation, event: params } const shouldTrigger = await checkTriggerFilters(automation, { diff --git a/packages/server/src/automations/utils.ts b/packages/server/src/automations/utils.ts index a01a760b93..e8b3703c75 100644 --- a/packages/server/src/automations/utils.ts +++ b/packages/server/src/automations/utils.ts @@ -10,7 +10,6 @@ import { Automation, AutomationActionStepId, AutomationJob, - AutomationResults, AutomationStepDefinition, AutomationTriggerDefinition, AutomationTriggerStepId, @@ -19,6 +18,7 @@ import { import { automationsEnabled } from "../features" import { helpers, REBOOT_CRON } from "@budibase/shared-core" import tracer from "dd-trace" +import { JobId } from "bull" const CRON_STEP_ID = automations.triggers.definitions.CRON.stepId let Runner: Thread @@ -156,11 +156,11 @@ export async function disableAllCrons(appId: any) { return { count: results.length / 2 } } -export async function disableCronById(jobId: number | string) { - const repeatJobs = await automationQueue.getRepeatableJobs() - for (let repeatJob of repeatJobs) { - if (repeatJob.id === jobId) { - await automationQueue.removeRepeatableByKey(repeatJob.key) +export async function disableCronById(jobId: JobId) { + const jobs = await automationQueue.getRepeatableJobs() + for (const job of jobs) { + if (job.id === jobId) { + await automationQueue.removeRepeatableByKey(job.key) } } console.log(`jobId=${jobId} disabled`) @@ -249,31 +249,3 @@ export async function enableCronTrigger(appId: any, automation: Automation) { export async function cleanupAutomations(appId: any) { await disableAllCrons(appId) } - -/** - * Checks if the supplied automation is of a recurring type. - * @param automation The automation to check. - * @return if it is recurring (cron). - */ -export function isRecurring(automation: Automation) { - return ( - automation.definition.trigger.stepId === - automations.triggers.definitions.CRON.stepId - ) -} - -export function isErrorInOutput(output: AutomationResults) { - let first = true, - error = false - for (let step of output.steps) { - // skip the trigger, its always successful if automation ran - if (first) { - first = false - continue - } - if (!step.outputs?.success) { - error = true - } - } - return error -} diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 5e1764403b..ca98eb9856 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -1,10 +1,6 @@ import { default as threadUtils } from "./utils" import { Job } from "bull" -import { - disableCronById, - isErrorInOutput, - isRecurring, -} from "../automations/utils" +import { disableCronById } from "../automations/utils" import * as actions from "../automations/actions" import * as automationUtils from "../automations/automationUtils" import { dataFilters, helpers } from "@budibase/shared-core" @@ -25,7 +21,6 @@ import { BranchSearchFilters, BranchStep, LoopStep, - UserBindings, ContextEmitter, LoopStepType, AutomationTriggerResult, @@ -36,14 +31,13 @@ import { } from "@budibase/types" import { AutomationContext } from "../definitions/automations" import { WorkerCallback } from "./definitions" -import { context, logging, configs } from "@budibase/backend-core" +import { context, logging, configs, utils } from "@budibase/backend-core" import { findHBSBlocks, processObject, processStringSync, } from "@budibase/string-templates" import { cloneDeep } from "lodash/fp" -import { performance } from "perf_hooks" import * as sdkUtils from "../sdk/utils" import env from "../environment" import tracer from "dd-trace" @@ -53,21 +47,25 @@ threadUtils.threadSetup() const CRON_STEP_ID = automations.triggers.definitions.CRON.stepId const STOPPED_STATUS = { success: true, status: AutomationStatus.STOPPED } -function matchesLoopFailureCondition(loopStep: LoopStep, currentItem: any) { - if (!loopStep.inputs.failure) { +function matchesLoopFailureCondition(step: LoopStep, currentItem: any) { + const { failure } = step.inputs + if (!failure) { return false } if (isPlainObject(currentItem)) { - return Object.values(currentItem).some(e => e === loopStep.inputs.failure) + return Object.values(currentItem).some(e => e === failure) } - return currentItem === loopStep.inputs.failure + return currentItem === failure } -function getLoopIterable(loopStep: LoopStep): any[] { - const option = loopStep.inputs.option - let input: any = loopStep.inputs.binding +// Returns an array of the things to loop over for a given LoopStep. This +// function handles the various ways that a LoopStep can be configured, parsing +// the input and returning an array of items to loop over. +function getLoopIterable(step: LoopStep): any[] { + const option = step.inputs.option + let input = step.inputs.binding if (option === LoopStepType.ARRAY && typeof input === "string") { input = JSON.parse(input) @@ -84,26 +82,83 @@ function getLoopIterable(loopStep: LoopStep): any[] { return Array.isArray(input) ? input : [input] } -async function branchMatches(ctx: AutomationContext, branch: Branch) { - const toFilter: Record = {} +function getLoopMaxIterations(loopStep: LoopStep): number { + const loopMaxIterations = + typeof loopStep.inputs.iterations === "string" + ? parseInt(loopStep.inputs.iterations) + : loopStep.inputs.iterations + return Math.min( + loopMaxIterations || env.AUTOMATION_MAX_ITERATIONS, + env.AUTOMATION_MAX_ITERATIONS + ) +} - const recurseSearchFilters = ( - filters: BranchSearchFilters - ): BranchSearchFilters => { +function stepSuccess( + step: Readonly, + outputs: Readonly>, + inputs?: Readonly> +): AutomationStepResult { + return { + id: step.id, + stepId: step.stepId, + inputs: inputs || step.inputs, + outputs: { + success: true, + ...outputs, + }, + } +} + +function stepFailure( + step: Readonly, + outputs: Readonly>, + inputs?: Readonly> +): AutomationStepResult { + return { + id: step.id, + stepId: step.stepId, + inputs: inputs || step.inputs, + outputs: { + success: false, + ...outputs, + }, + } +} + +function stepStopped(step: AutomationStep): AutomationStepResult { + return { + id: step.id, + stepId: step.stepId, + inputs: step.inputs, + outputs: STOPPED_STATUS, + } +} + +async function branchMatches( + ctx: AutomationContext, + branch: Readonly +): Promise { + const toFilter: Record = {} + const preparedCtx = prepareContext(ctx) + + // Because we allow bindings on both the left and right of each condition in + // automation branches, we can't pass the BranchSearchFilters directly to + // dataFilters.runQuery as-is. We first need to walk the filter tree and + // evaluate all of the bindings. + const evaluateBindings = (fs: Readonly) => { + const filters = cloneDeep(fs) for (const filter of Object.values(filters)) { if (!filter) { continue } if (isLogicalFilter(filter)) { - filter.conditions = filter.conditions.map(condition => - recurseSearchFilters(condition) - ) + filter.conditions = filter.conditions.map(evaluateBindings) } else { for (const [field, value] of Object.entries(filter)) { - toFilter[field] = processStringSync(field, prepareContext(ctx)) + toFilter[field] = processStringSync(field, preparedCtx) if (typeof value === "string" && findHBSBlocks(value).length > 0) { - filter[field] = processStringSync(value, prepareContext(ctx)) + filter[field] = processStringSync(value, preparedCtx) } } } @@ -114,7 +169,7 @@ async function branchMatches(ctx: AutomationContext, branch: Branch) { const result = dataFilters.runQuery( [toFilter], - recurseSearchFilters(branch.condition) + evaluateBindings(branch.condition) ) return result.length > 0 } @@ -130,7 +185,7 @@ function prepareContext(context: AutomationContext) { } } -export async function enrichBaseContext(context: AutomationContext) { +async function enrichBaseContext(context: AutomationContext) { context.env = await sdkUtils.getEnvironmentVariables() try { @@ -141,181 +196,189 @@ export async function enrichBaseContext(context: AutomationContext) { company: config.company, } } catch (e) { - // if settings doc doesn't exist, make the settings blank context.settings = {} } } -/** - * The automation orchestrator is a class responsible for executing automations. - * It handles the context of the automation and makes sure each step gets the correct - * inputs and handles any outputs. - */ +// Because the trigger appears twice in an AutomationResult, once as .trigger +// and again as .steps[0], this function makes sure that the two are kept in +// sync when setting trigger output. +function setTriggerOutput(result: AutomationResults, outputs: any) { + result.trigger.outputs = { + ...result.trigger.outputs, + ...outputs, + } + result.steps[0] = result.trigger +} + class Orchestrator { - private chainCount: number - private appId: string - private automation: Automation + private readonly job: AutomationJob private emitter: ContextEmitter - private job: AutomationJob private stopped: boolean - private executionOutput: AutomationResults - private currentUser: UserBindings | undefined - constructor(job: AutomationJob) { - this.automation = job.data.automation - - const triggerOutput = job.data.event - if ( - this.automation.definition.trigger.stepId === CRON_STEP_ID && - !triggerOutput.timestamp - ) { - triggerOutput.timestamp = Date.now() - } - - this.chainCount = triggerOutput.metadata?.automationChainCount || 0 - this.appId = triggerOutput.appId as string + constructor(job: Readonly) { this.job = job - - // remove from context - delete triggerOutput.appId - delete triggerOutput.metadata + this.stopped = false // create an emitter which has the chain count for this automation run in // it, so it can block excessive chaining if required - this.emitter = new AutomationEmitter(this.chainCount + 1) - - const trigger: AutomationTriggerResult = { - id: this.automation.definition.trigger.id, - stepId: this.automation.definition.trigger.stepId, - outputs: triggerOutput, - } - - this.executionOutput = { trigger, steps: [trigger] } - - // setup the execution output - this.stopped = false - this.currentUser = triggerOutput.user + const chainCount = job.data.event.metadata?.automationChainCount || 0 + this.emitter = new AutomationEmitter(chainCount + 1) } - async getStepFunctionality(stepId: AutomationActionStepId) { - let step = await actions.getAction(stepId) - if (step == null) { - throw `Cannot find automation step by name ${stepId}` - } - return step + get automation(): Automation { + return this.job.data.automation } - async getMetadata(): Promise { - const metadataId = generateAutomationMetadataID(this.automation._id!) + get appId(): string { + return this.job.data.event.appId! + } + + private async getMetadata(): Promise { + const id = generateAutomationMetadataID(this.automation._id!) const db = context.getAppDB() - let metadata: AutomationMetadata - try { - metadata = await db.get(metadataId) - } catch (err) { - metadata = { - _id: metadataId, - errorCount: 0, - } - } - return metadata + const doc = await db.tryGet(id) + return doc || { _id: id, errorCount: 0 } } - async stopCron(reason: string) { - if (!this.job.opts.repeat) { - return + async stopCron(reason: string, opts?: { result: AutomationResults }) { + if (!this.isCron()) { + throw new Error("Not a cron automation") } - logging.logWarn( - `CRON disabled reason=${reason} - ${this.appId}/${this.automation._id}` - ) + + const msg = `CRON disabled reason=${reason} - ${this.appId}/${this.automation._id}` + logging.logWarn(msg) + await disableCronById(this.job.id) - this.executionOutput.trigger.outputs = { - ...this.executionOutput.trigger.outputs, - success: false, - status: AutomationStatus.STOPPED, + + const { result } = opts || {} + if (result) { + setTriggerOutput(result, { + success: false, + status: AutomationStatus.STOPPED, + }) + await this.logResult(result) } - this.executionOutput.steps[0] = this.executionOutput.trigger - await storeLog(this.automation, this.executionOutput) } - async checkIfShouldStop(metadata: AutomationMetadata): Promise { - if (!metadata.errorCount || !this.job.opts.repeat) { + private async logResult(result: AutomationResults) { + try { + await storeLog(this.automation, result) + } catch (e: any) { + if (e.status === 413 && e.request?.data) { + // if content is too large we shouldn't log it + delete e.request.data + e.request.data = { message: "removed due to large size" } + } + logging.logAlert("Error writing automation log", e) + } + } + + private async shouldStop(metadata: AutomationMetadata): Promise { + if (!metadata.errorCount || !this.isCron()) { return false } if (metadata.errorCount >= MAX_AUTOMATION_RECURRING_ERRORS) { - await this.stopCron("errors") return true } return false } - async execute(): Promise { + private isCron(): boolean { + return this.automation.definition.trigger.stepId === CRON_STEP_ID + } + + private isProdApp(): boolean { + return isProdAppID(this.appId) + } + + hasErrored(context: AutomationContext): boolean { + const [_trigger, ...steps] = context.steps + for (const step of steps) { + if (step.outputs?.success === false) { + return true + } + } + return false + } + + async execute(): Promise { return tracer.trace( "Orchestrator.execute", { resource: "automation" }, async span => { - span?.addTags({ - appId: this.appId, - automationId: this.automation._id, - }) + span?.addTags({ appId: this.appId, automationId: this.automation._id }) + + const job = cloneDeep(this.job) + delete job.data.event.appId + delete job.data.event.metadata + if (!job.data.event.timestamp) { + job.data.event.timestamp = Date.now() + } + + const trigger: AutomationTriggerResult = { + id: job.data.automation.definition.trigger.id, + stepId: job.data.automation.definition.trigger.stepId, + outputs: job.data.event, + } + const result: AutomationResults = { trigger, steps: [trigger] } let metadata: AutomationMetadata | undefined = undefined - // check if this is a recurring automation, - if (isProdAppID(this.appId) && isRecurring(this.automation)) { + if (this.isProdApp() && this.isCron()) { span?.addTags({ recurring: true }) metadata = await this.getMetadata() - const shouldStop = await this.checkIfShouldStop(metadata) - if (shouldStop) { + if (await this.shouldStop(metadata)) { + await this.stopCron("errors") span?.addTags({ shouldStop: true }) - return + return result } } const ctx: AutomationContext = { - trigger: this.executionOutput.trigger.outputs, - steps: [this.executionOutput.trigger.outputs], + trigger: trigger.outputs, + steps: [trigger.outputs], stepsById: {}, stepsByName: {}, - user: this.currentUser, + user: trigger.outputs.user, } await enrichBaseContext(ctx) - const start = performance.now() - - const stepOutputs = await this.executeSteps( - ctx, - this.automation.definition.steps - ) - - this.executionOutput.steps.push(...stepOutputs) - - const end = performance.now() - const executionTime = end - start - - console.info( - `Automation ID: ${this.automation._id} Execution time: ${executionTime} milliseconds`, - { - _logKey: "automation", - executionTime, - } - ) + const timeout = + this.job.data.event.timeout || env.AUTOMATION_THREAD_TIMEOUT try { - await storeLog(this.automation, this.executionOutput) + await helpers.withTimeout(timeout, async () => { + const [stepOutputs, executionTime] = await utils.time(() => + this.executeSteps(ctx, job.data.automation.definition.steps) + ) + + result.steps.push(...stepOutputs) + + console.info( + `Automation ID: ${ + this.automation._id + } Execution time: ${executionTime.toMs()} milliseconds`, + { + _logKey: "automation", + executionTime, + } + ) + }) } catch (e: any) { - if (e.status === 413 && e.request?.data) { - // if content is too large we shouldn't log it - delete e.request.data - e.request.data = { message: "removed due to large size" } + if (e.errno === "ETIME") { + span?.addTags({ timedOut: true }) + console.warn(`Automation execution timed out after ${timeout}ms`) } - logging.logAlert("Error writing automation log", e) } + await this.logResult(result) + if ( - isProdAppID(this.appId) && - isRecurring(this.automation) && + this.isProdApp() && + this.isCron() && metadata && - isErrorInOutput(this.executionOutput) + this.hasErrored(ctx) ) { metadata.errorCount ??= 0 metadata.errorCount++ @@ -327,12 +390,12 @@ class Orchestrator { logging.logAlertWithInfo( "Failed to write automation metadata", db.name, - this.automation._id!, + job.data.automation._id!, err ) } } - return this.executionOutput + return result } ) } @@ -341,155 +404,108 @@ class Orchestrator { ctx: AutomationContext, steps: AutomationStep[] ): Promise { - return tracer.trace( - "Orchestrator.executeSteps", - { resource: "automation" }, - async span => { - let stepIndex = 0 - const timeout = - this.job.data.event.timeout || env.AUTOMATION_THREAD_TIMEOUT - const stepOutputs: AutomationStepResult[] = [] + return tracer.trace("Orchestrator.executeSteps", async () => { + let stepIndex = 0 + const results: AutomationStepResult[] = [] - try { - await helpers.withTimeout(timeout, async () => { - while (stepIndex < steps.length) { - if (this.stopped) { - break - } + function addToContext( + step: AutomationStep, + result: AutomationStepResult + ) { + ctx.steps.push(result.outputs) + ctx.stepsById[step.id] = result.outputs + ctx.stepsByName[step.name || step.id] = result.outputs + results.push(result) + } - const step = steps[stepIndex] - if (step.stepId === AutomationActionStepId.BRANCH) { - const [result, ...childResults] = await this.executeBranchStep( - ctx, - step - ) - - stepOutputs.push(result) - stepOutputs.push(...childResults) - - stepIndex++ - } else if (step.stepId === AutomationActionStepId.LOOP) { - const stepToLoop = steps[stepIndex + 1] - const result = await this.executeLoopStep(ctx, step, stepToLoop) - - ctx.steps.push(result.outputs) - ctx.stepsById[stepToLoop.id] = result.outputs - ctx.stepsByName[stepToLoop.name || stepToLoop.id] = - result.outputs - - stepOutputs.push(result) - stepIndex += 2 - } else { - const result = await this.executeStep(ctx, step) - - ctx.steps.push(result.outputs) - ctx.stepsById[step.id] = result.outputs - ctx.stepsByName[step.name || step.id] = result.outputs - - stepOutputs.push(result) - stepIndex++ - } - } - }) - } catch (error: any) { - if (error.errno === "ETIME") { - span?.addTags({ timedOut: true }) - console.warn(`Automation execution timed out after ${timeout}ms`) - } + while (stepIndex < steps.length) { + if (this.stopped) { + break } - return stepOutputs + const step = steps[stepIndex] + switch (step.stepId) { + case AutomationActionStepId.BRANCH: { + results.push(...(await this.executeBranchStep(ctx, step))) + stepIndex++ + break + } + case AutomationActionStepId.LOOP: { + const stepToLoop = steps[stepIndex + 1] + addToContext( + stepToLoop, + await this.executeLoopStep(ctx, step, stepToLoop) + ) + // We increment by 2 here because the way loops work is that the + // step immediately following the loop step is what gets looped. + // So when we're done looping, to advance correctly we need to + // skip the step that was looped. + stepIndex += 2 + break + } + default: { + addToContext(step, await this.executeStep(ctx, step)) + stepIndex++ + break + } + } } - ) + + return results + }) } private async executeLoopStep( ctx: AutomationContext, - loopStep: LoopStep, + step: LoopStep, stepToLoop: AutomationStep ): Promise { - await processObject(loopStep.inputs, prepareContext(ctx)) - - const result = { - id: loopStep.id, - stepId: loopStep.stepId, - inputs: loopStep.inputs, - } - - const loopMaxIterations = - typeof loopStep.inputs.iterations === "string" - ? parseInt(loopStep.inputs.iterations) - : loopStep.inputs.iterations - const maxIterations = Math.min( - loopMaxIterations || env.AUTOMATION_MAX_ITERATIONS, - env.AUTOMATION_MAX_ITERATIONS - ) + await processObject(step.inputs, prepareContext(ctx)) + const maxIterations = getLoopMaxIterations(step) const items: Record[] = [] let iterations = 0 let iterable: any[] = [] try { - iterable = getLoopIterable(loopStep) + iterable = getLoopIterable(step) } catch (err) { - return { - ...result, - outputs: { - success: false, - status: AutomationStepStatus.INCORRECT_TYPE, - }, - } + return stepFailure(stepToLoop, { + status: AutomationStepStatus.INCORRECT_TYPE, + }) } for (; iterations < iterable.length; iterations++) { const currentItem = iterable[iterations] if (iterations === maxIterations) { - return { - ...result, - outputs: { - success: false, - iterations, - items, - status: AutomationStepStatus.MAX_ITERATIONS, - }, - } + return stepFailure(stepToLoop, { + status: AutomationStepStatus.MAX_ITERATIONS, + iterations, + }) } - if (matchesLoopFailureCondition(loopStep, currentItem)) { - return { - ...result, - outputs: { - success: false, - iterations, - items, - status: AutomationStepStatus.FAILURE_CONDITION, - }, - } + if (matchesLoopFailureCondition(step, currentItem)) { + return stepFailure(stepToLoop, { + status: AutomationStepStatus.FAILURE_CONDITION, + }) } ctx.loop = { currentItem } - const loopedStepResult = await this.executeStep(ctx, stepToLoop) + const result = await this.executeStep(ctx, stepToLoop) + items.push(result.outputs) ctx.loop = undefined - items.push(loopedStepResult.outputs) } - return { - ...result, - outputs: { - success: true, - status: - iterations === 0 ? AutomationStepStatus.NO_ITERATIONS : undefined, - iterations, - items, - }, - } + const status = + iterations === 0 ? AutomationStatus.NO_CONDITION_MET : undefined + return stepSuccess(stepToLoop, { status, iterations, items }) } private async executeBranchStep( ctx: AutomationContext, - branchStep: BranchStep + step: BranchStep ): Promise { - const { branches, children } = branchStep.inputs + const { branches, children } = step.inputs for (const branch of branches) { if (await branchMatches(ctx, branch)) { @@ -497,11 +513,11 @@ class Orchestrator { return [ { - id: branchStep.id, - stepId: branchStep.stepId, - inputs: branchStep.inputs, - success: true, + id: step.id, + stepId: step.stepId, + inputs: step.inputs, outputs: { + success: true, branchName: branch.name, status: `${branch.name} branch taken`, branchId: `${branch.id}`, @@ -515,75 +531,64 @@ class Orchestrator { this.stopped = true return [ { - id: branchStep.id, - stepId: branchStep.stepId, - inputs: branchStep.inputs, - success: false, - outputs: { status: AutomationStatus.NO_CONDITION_MET }, + id: step.id, + stepId: step.stepId, + inputs: step.inputs, + outputs: { success: false, status: AutomationStatus.NO_CONDITION_MET }, }, ] } private async executeStep( ctx: AutomationContext, - step: AutomationStep + step: Readonly ): Promise { - return tracer.trace( - "Orchestrator.execute.step", - { resource: "automation" }, - async span => { - span?.addTags({ - resource: "automation", - step: { - stepId: step.stepId, - id: step.id, - name: step.name, - type: step.type, - title: step.stepTitle, - internal: step.internal, - deprecated: step.deprecated, - }, - }) - - if (this.stopped) { - return { - id: step.id, - stepId: step.stepId, - inputs: step.inputs, - outputs: STOPPED_STATUS, - } - } - - const stepFn = await this.getStepFunctionality(step.stepId) - const inputs = automationUtils.cleanInputValues( - await processObject(cloneDeep(step.inputs), prepareContext(ctx)), - step.schema.inputs.properties - ) - - const outputs = await stepFn({ - inputs, - appId: this.appId, - emitter: this.emitter, - context: prepareContext(ctx), - }) - - if ( - step.stepId === AutomationActionStepId.FILTER && - "result" in outputs && - outputs.result === false - ) { - this.stopped = true - ;(outputs as any).status = AutomationStatus.STOPPED - } - - return { - id: step.id, + return tracer.trace("Orchestrator.executeStep", async span => { + span.addTags({ + step: { stepId: step.stepId, - inputs, - outputs, - } + id: step.id, + name: step.name, + type: step.type, + title: step.stepTitle, + internal: step.internal, + deprecated: step.deprecated, + }, + }) + + if (this.stopped) { + span.addTags({ stopped: true }) + return stepStopped(step) } - ) + + const fn = await actions.getAction(step.stepId) + if (fn == null) { + throw new Error(`Cannot find automation step by name ${step.stepId}`) + } + + const inputs = automationUtils.cleanInputValues( + await processObject(cloneDeep(step.inputs), prepareContext(ctx)), + step.schema.inputs.properties + ) + + const outputs = await fn({ + inputs, + appId: this.appId, + emitter: this.emitter, + context: prepareContext(ctx), + }) + + if ( + step.stepId === AutomationActionStepId.FILTER && + "result" in outputs && + outputs.result === false + ) { + this.stopped = true + ;(outputs as any).status = AutomationStatus.STOPPED + } + + return stepSuccess(step, outputs, inputs) + }) } } @@ -604,10 +609,9 @@ export function execute(job: Job, callback: WorkerCallback) { task: async () => { const envVars = await sdkUtils.getEnvironmentVariables() await context.doInEnvironmentContext(envVars, async () => { - const automationOrchestrator = new Orchestrator(job) + const orchestrator = new Orchestrator(job) try { - const response = await automationOrchestrator.execute() - callback(null, response) + callback(null, await orchestrator.execute()) } catch (err) { callback(err) } @@ -624,24 +628,14 @@ export async function executeInThread( throw new Error("Unable to execute, event doesn't contain app ID.") } - const timeoutPromise = new Promise((_resolve, reject) => { - setTimeout(() => { - reject(new Error("Timeout exceeded")) - }, job.data.event.timeout || env.AUTOMATION_THREAD_TIMEOUT) - }) - - return (await context.doInAppContext(appId, async () => { + return await context.doInAppContext(appId, async () => { await context.ensureSnippetContext() const envVars = await sdkUtils.getEnvironmentVariables() - // put into automation thread for whole context return await context.doInEnvironmentContext(envVars, async () => { - const automationOrchestrator = new Orchestrator(job) - return await Promise.race([ - automationOrchestrator.execute(), - timeoutPromise, - ]) + const orchestrator = new Orchestrator(job) + return orchestrator.execute() }) - })) as AutomationResults + }) } export const removeStalled = async (job: Job) => { @@ -650,7 +644,7 @@ export const removeStalled = async (job: Job) => { throw new Error("Unable to execute, event doesn't contain app ID.") } await context.doInAppContext(appId, async () => { - const automationOrchestrator = new Orchestrator(job) - await automationOrchestrator.stopCron("stalled") + const orchestrator = new Orchestrator(job) + await orchestrator.stopCron("stalled") }) } diff --git a/packages/types/src/documents/app/automation/automation.ts b/packages/types/src/documents/app/automation/automation.ts index ea21b5e23d..590e3e135f 100644 --- a/packages/types/src/documents/app/automation/automation.ts +++ b/packages/types/src/documents/app/automation/automation.ts @@ -192,13 +192,20 @@ export enum AutomationStoppedReason { TRIGGER_FILTER_NOT_MET = "Automation did not run. Filter conditions in trigger were not met.", } +export interface AutomationStepResultOutputs { + success: boolean + [key: string]: any +} + +export interface AutomationStepResultInputs { + [key: string]: any +} + export interface AutomationStepResult { id: string stepId: AutomationActionStepId - inputs: Record - outputs: Record - success?: boolean - message?: string + inputs: AutomationStepResultInputs + outputs: AutomationStepResultOutputs } export interface AutomationTriggerResult { From 8a55a8252dfac058cc01700db86212c81898aeda Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 13 Feb 2025 17:51:55 +0000 Subject: [PATCH 06/50] Addressing Mike's last comment. --- packages/server/src/threads/automation.ts | 30 ++++++----------------- 1 file changed, 7 insertions(+), 23 deletions(-) diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index ca98eb9856..ce7b1cd02b 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -509,34 +509,18 @@ class Orchestrator { for (const branch of branches) { if (await branchMatches(ctx, branch)) { - const steps = children?.[branch.id] || [] - return [ - { - id: step.id, - stepId: step.stepId, - inputs: step.inputs, - outputs: { - success: true, - branchName: branch.name, - status: `${branch.name} branch taken`, - branchId: `${branch.id}`, - }, - }, - ...(await this.executeSteps(ctx, steps)), + stepSuccess(step, { + branchName: branch.name, + status: `${branch.name} branch taken`, + branchId: `${branch.id}`, + }), + ...(await this.executeSteps(ctx, children?.[branch.id] || [])), ] } } - this.stopped = true - return [ - { - id: step.id, - stepId: step.stepId, - inputs: step.inputs, - outputs: { success: false, status: AutomationStatus.NO_CONDITION_MET }, - }, - ] + return [stepFailure(step, { status: AutomationStatus.NO_CONDITION_MET })] } private async executeStep( From 5af11aeeda91196518f7e6e07986aea669f53300 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 13 Feb 2025 18:13:32 +0000 Subject: [PATCH 07/50] Shonky, definitely not correct type fixes in the builder. --- .../builder/src/stores/builder/automations.ts | 60 ++++++++++--------- 1 file changed, 32 insertions(+), 28 deletions(-) diff --git a/packages/builder/src/stores/builder/automations.ts b/packages/builder/src/stores/builder/automations.ts index ddb8706482..cfa64eb332 100644 --- a/packages/builder/src/stores/builder/automations.ts +++ b/packages/builder/src/stores/builder/automations.ts @@ -26,10 +26,9 @@ import { UILogicalOperator, EmptyFilterOption, AutomationIOType, - AutomationStepSchema, - AutomationTriggerSchema, BranchPath, BlockDefinitions, + BranchStep, } from "@budibase/types" import { ActionStepID } from "@/constants/backend/automations" import { FIELDS } from "@/constants/backend" @@ -291,16 +290,16 @@ const automationActions = (store: AutomationStore) => ({ let result: (AutomationStep | AutomationTrigger)[] = [] pathWay.forEach(path => { const { stepIdx, branchIdx } = path - let last = result.length ? result[result.length - 1] : [] if (!result.length) { // Preceeding steps. result = steps.slice(0, stepIdx + 1) return } + let last = result[result.length - 1] as BranchStep if (last && "inputs" in last) { if (Number.isInteger(branchIdx)) { const branchId = last.inputs.branches[branchIdx].id - const children = last.inputs.children[branchId] + const children = last.inputs.children?.[branchId] || [] const stepChildren = children.slice(0, stepIdx + 1) // Preceeding steps. result = result.concat(stepChildren) @@ -473,24 +472,26 @@ const automationActions = (store: AutomationStore) => ({ id: block.id, }, ] - const branches: Branch[] = block.inputs?.branches || [] - branches.forEach((branch, bIdx) => { - block.inputs?.children[branch.id].forEach( - (bBlock: AutomationStep, sIdx: number, array: AutomationStep[]) => { - const ended = - array.length - 1 === sIdx && !bBlock.inputs?.branches?.length + if (block.stepId === AutomationActionStepId.BRANCH) { + const branches = block.inputs.branches + + branches.forEach((branch, bIdx) => { + block.inputs?.children?.[branch.id].forEach((bBlock, sIdx, array) => { + const isBranch = bBlock.stepId === AutomationActionStepId.BRANCH + const hasBranches = isBranch && bBlock.inputs?.branches?.length > 0 + const ended = array.length - 1 === sIdx && !hasBranches treeTraverse(bBlock, pathToCurrentNode, sIdx, bIdx, ended) - } - ) - }) + }) + }) - store.actions.registerBlock( - blockRefs, - block, - pathToCurrentNode, - terminating && !branches.length - ) + store.actions.registerBlock( + blockRefs, + block, + pathToCurrentNode, + terminating && !branches.length + ) + } } // Traverse the entire tree. @@ -594,8 +595,8 @@ const automationActions = (store: AutomationStore) => ({ if (blockIdx === 0 && isTrigger) { if ( - pathBlock.event === AutomationEventType.ROW_UPDATE || - pathBlock.event === AutomationEventType.ROW_SAVE + pathBlock.stepId === AutomationTriggerStepId.ROW_UPDATED || + pathBlock.stepId === AutomationTriggerStepId.ROW_SAVED ) { let table: any = get(tables).list.find( (table: Table) => table._id === pathBlock.inputs.tableId @@ -608,7 +609,7 @@ const automationActions = (store: AutomationStore) => ({ } } delete schema.row - } else if (pathBlock.event === AutomationEventType.APP_TRIGGER) { + } else if (pathBlock.stepId === AutomationTriggerStepId.APP) { schema = Object.fromEntries( Object.keys(pathBlock.inputs.fields || []).map(key => [ key, @@ -914,9 +915,7 @@ const automationActions = (store: AutomationStore) => ({ ...newAutomation.definition.steps, ] - let cache: - | AutomationStepSchema - | AutomationTriggerSchema + let cache: AutomationStep | AutomationTrigger | AutomationStep[] pathWay.forEach((path, pathIdx, array) => { const { stepIdx, branchIdx } = path @@ -938,9 +937,14 @@ const automationActions = (store: AutomationStore) => ({ } return } - if (Number.isInteger(branchIdx)) { + + if ( + Number.isInteger(branchIdx) && + !Array.isArray(cache) && + cache.stepId === AutomationActionStepId.BRANCH + ) { const branchId = cache.inputs.branches[branchIdx].id - const children = cache.inputs.children[branchId] + const children = cache.inputs.children?.[branchId] || [] if (final) { insertBlock(children, stepIdx) @@ -1134,7 +1138,7 @@ const automationActions = (store: AutomationStore) => ({ * @returns */ shiftBranch: (pathTo: Array, block: AutomationStep, direction = -1) => { - let newBlock = cloneDeep(block) + let newBlock = cloneDeep(block) as BranchStep const branchPath = pathTo.at(-1) const targetIdx = branchPath.branchIdx From a684bdf4f2afc09be93983ffef130e15d6fa3fa9 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 13 Feb 2025 18:18:04 +0000 Subject: [PATCH 08/50] Fix tests. --- packages/server/src/api/routes/tests/automation.spec.ts | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/server/src/api/routes/tests/automation.spec.ts b/packages/server/src/api/routes/tests/automation.spec.ts index 1d7b9cd6ed..722aeffcc3 100644 --- a/packages/server/src/api/routes/tests/automation.spec.ts +++ b/packages/server/src/api/routes/tests/automation.spec.ts @@ -13,6 +13,7 @@ import sdk from "../../../sdk" import { ConfigType, FieldType, + FilterCondition, isDidNotTriggerResponse, SettingsConfig, Table, @@ -20,12 +21,9 @@ import { import { mocks } from "@budibase/backend-core/tests" import { removeDeprecated } from "../../../automations/utils" import { createAutomationBuilder } from "../../../automations/tests/utilities/AutomationTestBuilder" -import { automations } from "@budibase/shared-core" import { basicTable } from "../../../tests/utilities/structures" import TestConfiguration from "../../../tests/utilities/TestConfiguration" -const FilterConditions = automations.steps.filter.FilterConditions - const MAX_RETRIES = 4 const { basicAutomation, @@ -594,7 +592,7 @@ describe("/automations", () => { steps: [ { inputs: { - condition: FilterConditions.EQUAL, + condition: FilterCondition.EQUAL, field: "{{ trigger.row.City }}", value: "{{ trigger.oldRow.City }}", }, From 5d7c8cd0f14334446a9bbaa0a074558369ea9a5e Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 13 Feb 2025 18:25:55 +0000 Subject: [PATCH 09/50] Fix more tests. --- packages/worker/src/api/routes/global/tests/realEmail.spec.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/worker/src/api/routes/global/tests/realEmail.spec.ts b/packages/worker/src/api/routes/global/tests/realEmail.spec.ts index bf5ed7b4ee..479a1f0476 100644 --- a/packages/worker/src/api/routes/global/tests/realEmail.spec.ts +++ b/packages/worker/src/api/routes/global/tests/realEmail.spec.ts @@ -31,8 +31,8 @@ describe("/api/global/email", () => { ) { let response, text try { - await helpers.withTimeout(20000, config.saveEtherealSmtpConfig()) - await helpers.withTimeout(20000, config.saveSettingsConfig()) + await helpers.withTimeout(20000, () => config.saveEtherealSmtpConfig()) + await helpers.withTimeout(20000, () => config.saveSettingsConfig()) let res if (attachments) { res = await config.api.emails From a446401fc5ad761b017704102747b88b29554ca7 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 17 Feb 2025 09:27:12 +0000 Subject: [PATCH 10/50] Fix rowAction.spec.ts. --- packages/server/src/threads/automation.ts | 4 +++- packages/types/src/documents/app/automation/automation.ts | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index ce7b1cd02b..48ddc89874 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -312,13 +312,15 @@ class Orchestrator { const job = cloneDeep(this.job) delete job.data.event.appId delete job.data.event.metadata - if (!job.data.event.timestamp) { + + if (this.isCron() && !job.data.event.timestamp) { job.data.event.timestamp = Date.now() } const trigger: AutomationTriggerResult = { id: job.data.automation.definition.trigger.id, stepId: job.data.automation.definition.trigger.stepId, + inputs: null, outputs: job.data.event, } const result: AutomationResults = { trigger, steps: [trigger] } diff --git a/packages/types/src/documents/app/automation/automation.ts b/packages/types/src/documents/app/automation/automation.ts index 590e3e135f..db03efe5d4 100644 --- a/packages/types/src/documents/app/automation/automation.ts +++ b/packages/types/src/documents/app/automation/automation.ts @@ -211,7 +211,7 @@ export interface AutomationStepResult { export interface AutomationTriggerResult { id: string stepId: AutomationTriggerStepId - inputs?: Record + inputs?: Record | null outputs: Record } From d4a55b743db7504ea621c5bbaef0412f32507fcf Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 17 Feb 2025 09:31:53 +0000 Subject: [PATCH 11/50] Update pro reference. --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index eb96d8b2f2..4f2006af1c 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit eb96d8b2f2029033b0f758078ed30c888e8fb249 +Subproject commit 4f2006af1ca5523627dd3e19dce201ce0f758a09 From 378cdbe163efcc7187165888c959c2468ad75ca7 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 17 Feb 2025 11:11:08 +0000 Subject: [PATCH 12/50] Fix type errors. --- .../src/api/routes/tests/automation.spec.ts | 30 ++++++++ .../tests/utilities/AutomationTestBuilder.ts | 1 + .../server/src/sdk/app/automations/crud.ts | 6 +- .../sdk/app/automations/tests/index.spec.ts | 69 ------------------- .../server/src/tests/utilities/structures.ts | 21 +++++- 5 files changed, 53 insertions(+), 74 deletions(-) delete mode 100644 packages/server/src/sdk/app/automations/tests/index.spec.ts diff --git a/packages/server/src/api/routes/tests/automation.spec.ts b/packages/server/src/api/routes/tests/automation.spec.ts index 722aeffcc3..5108895c16 100644 --- a/packages/server/src/api/routes/tests/automation.spec.ts +++ b/packages/server/src/api/routes/tests/automation.spec.ts @@ -485,6 +485,36 @@ describe("/automations", () => { expect(events.automation.created).not.toHaveBeenCalled() expect(events.automation.triggerUpdated).not.toHaveBeenCalled() }) + + it("can update an input field", async () => { + const { automation } = await createAutomationBuilder(config) + .onRowDeleted({ tableId: "tableId" }) + .serverLog({ text: "test" }) + .save() + + automation.definition.trigger.inputs.tableId = "newTableId" + const { automation: updatedAutomation } = + await config.api.automation.update(automation) + + expect(updatedAutomation.definition.trigger.inputs.tableId).toEqual( + "newTableId" + ) + }) + + it("cannot update a readonly field", async () => { + const { automation } = await createAutomationBuilder(config) + .onRowAction({ tableId: "tableId" }) + .serverLog({ text: "test" }) + .save() + + automation.definition.trigger.inputs.tableId = "newTableId" + await config.api.automation.update(automation, { + status: 400, + body: { + message: "Field tableId is readonly and it cannot be modified", + }, + }) + }) }) describe("fetch", () => { diff --git a/packages/server/src/automations/tests/utilities/AutomationTestBuilder.ts b/packages/server/src/automations/tests/utilities/AutomationTestBuilder.ts index d707430a35..4eddf5875f 100644 --- a/packages/server/src/automations/tests/utilities/AutomationTestBuilder.ts +++ b/packages/server/src/automations/tests/utilities/AutomationTestBuilder.ts @@ -63,6 +63,7 @@ class TriggerBuilder { onRowDeleted = this.trigger(AutomationTriggerStepId.ROW_DELETED) onWebhook = this.trigger(AutomationTriggerStepId.WEBHOOK) onCron = this.trigger(AutomationTriggerStepId.CRON) + onRowAction = this.trigger(AutomationTriggerStepId.ROW_ACTION) } class BranchStepBuilder { diff --git a/packages/server/src/sdk/app/automations/crud.ts b/packages/server/src/sdk/app/automations/crud.ts index cd8af1e548..764b1df784 100644 --- a/packages/server/src/sdk/app/automations/crud.ts +++ b/packages/server/src/sdk/app/automations/crud.ts @@ -40,7 +40,8 @@ function cleanAutomationInputs(automation: Automation) { if (step == null) { continue } - for (let inputName of Object.keys(step.inputs)) { + for (const key of Object.keys(step.inputs)) { + const inputName = key as keyof typeof step.inputs if (!step.inputs[inputName] || step.inputs[inputName] === "") { delete step.inputs[inputName] } @@ -281,7 +282,8 @@ function guardInvalidUpdatesAndThrow( const readonlyFields = Object.keys( step.schema.inputs.properties || {} ).filter(k => step.schema.inputs.properties[k].readonly) - readonlyFields.forEach(readonlyField => { + readonlyFields.forEach(key => { + const readonlyField = key as keyof typeof step.inputs const oldStep = oldStepDefinitions.find(i => i.id === step.id) if (step.inputs[readonlyField] !== oldStep?.inputs[readonlyField]) { throw new HTTPError( diff --git a/packages/server/src/sdk/app/automations/tests/index.spec.ts b/packages/server/src/sdk/app/automations/tests/index.spec.ts deleted file mode 100644 index 6c70392300..0000000000 --- a/packages/server/src/sdk/app/automations/tests/index.spec.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { sample } from "lodash/fp" -import { Automation } from "@budibase/types" -import { generator } from "@budibase/backend-core/tests" -import TestConfiguration from "../../../../tests/utilities/TestConfiguration" -import automationSdk from "../" -import { structures } from "../../../../api/routes/tests/utilities" - -describe("automation sdk", () => { - const config = new TestConfiguration() - - beforeAll(async () => { - await config.init() - }) - - describe("update", () => { - it("can rename existing automations", async () => { - await config.doInContext(config.getAppId(), async () => { - const automation = structures.newAutomation() - - const response = await automationSdk.create(automation) - - const newName = generator.guid() - const update = { ...response, name: newName } - const result = await automationSdk.update(update) - expect(result.name).toEqual(newName) - }) - }) - - it.each([ - ["trigger", (a: Automation) => a.definition.trigger], - ["step", (a: Automation) => a.definition.steps[0]], - ])("can update input fields (for a %s)", async (_, getStep) => { - await config.doInContext(config.getAppId(), async () => { - const automation = structures.newAutomation() - - const keyToUse = sample(Object.keys(getStep(automation).inputs))! - getStep(automation).inputs[keyToUse] = "anyValue" - - const response = await automationSdk.create(automation) - - const update = { ...response } - getStep(update).inputs[keyToUse] = "anyUpdatedValue" - const result = await automationSdk.update(update) - expect(getStep(result).inputs[keyToUse]).toEqual("anyUpdatedValue") - }) - }) - - it.each([ - ["trigger", (a: Automation) => a.definition.trigger], - ["step", (a: Automation) => a.definition.steps[0]], - ])("cannot update readonly fields (for a %s)", async (_, getStep) => { - await config.doInContext(config.getAppId(), async () => { - const automation = structures.newAutomation() - getStep(automation).schema.inputs.properties["readonlyProperty"] = { - readonly: true, - } - getStep(automation).inputs["readonlyProperty"] = "anyValue" - - const response = await automationSdk.create(automation) - - const update = { ...response } - getStep(update).inputs["readonlyProperty"] = "anyUpdatedValue" - await expect(automationSdk.update(update)).rejects.toThrow( - "Field readonlyProperty is readonly and it cannot be modified" - ) - }) - }) - }) -}) diff --git a/packages/server/src/tests/utilities/structures.ts b/packages/server/src/tests/utilities/structures.ts index a78a2c6c9e..38d60e1c11 100644 --- a/packages/server/src/tests/utilities/structures.ts +++ b/packages/server/src/tests/utilities/structures.ts @@ -35,6 +35,8 @@ import { WebhookActionType, BuiltinPermissionID, DeepPartial, + FilterCondition, + AutomationTriggerResult, } from "@budibase/types" import { LoopInput } from "../../definitions/automations" import { merge } from "lodash" @@ -372,7 +374,11 @@ export function filterAutomation(opts?: DeepPartial): Automation { type: AutomationStepType.ACTION, internal: true, stepId: AutomationActionStepId.FILTER, - inputs: { field: "name", value: "test", condition: "EQ" }, + inputs: { + field: "name", + value: "test", + condition: FilterCondition.EQUAL, + }, schema: BUILTIN_ACTION_DEFINITIONS.EXECUTE_SCRIPT.schema, }, ], @@ -437,15 +443,24 @@ export function updateRowAutomationWithFilters( export function basicAutomationResults( automationId: string ): AutomationResults { + const trigger: AutomationTriggerResult = { + id: "trigger", + stepId: AutomationTriggerStepId.APP, + outputs: {}, + } return { automationId, status: AutomationStatus.SUCCESS, - trigger: "trigger" as any, + trigger, steps: [ + trigger, { + id: "step1", stepId: AutomationActionStepId.SERVER_LOG, inputs: {}, - outputs: {}, + outputs: { + success: true, + }, }, ], } From 892879f0e6a17235e9e846cf7dcd826294e96b0e Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 17 Feb 2025 11:20:57 +0000 Subject: [PATCH 13/50] Fix automation route tests, address Adri's comments. --- .../builder/src/stores/builder/automations.ts | 8 ++++---- .../src/api/routes/tests/automation.spec.ts | 5 ----- packages/server/src/definitions/automations.ts | 17 ++++++++++++----- .../src/documents/app/automation/automation.ts | 7 +++++-- 4 files changed, 21 insertions(+), 16 deletions(-) diff --git a/packages/builder/src/stores/builder/automations.ts b/packages/builder/src/stores/builder/automations.ts index cfa64eb332..40cbdd4590 100644 --- a/packages/builder/src/stores/builder/automations.ts +++ b/packages/builder/src/stores/builder/automations.ts @@ -1094,7 +1094,7 @@ const automationActions = (store: AutomationStore) => ({ branchLeft: async ( pathTo: Array, automation: Automation, - block: AutomationStep + block: BranchStep ) => { const update = store.actions.shiftBranch(pathTo, block) if (update) { @@ -1117,7 +1117,7 @@ const automationActions = (store: AutomationStore) => ({ branchRight: async ( pathTo: Array, automation: Automation, - block: AutomationStep + block: BranchStep ) => { const update = store.actions.shiftBranch(pathTo, block, 1) if (update) { @@ -1137,8 +1137,8 @@ const automationActions = (store: AutomationStore) => ({ * @param {Number} direction - the direction of the swap. Defaults to -1 for left, add 1 for right * @returns */ - shiftBranch: (pathTo: Array, block: AutomationStep, direction = -1) => { - let newBlock = cloneDeep(block) as BranchStep + shiftBranch: (pathTo: Array, block: BranchStep, direction = -1) => { + let newBlock = cloneDeep(block) const branchPath = pathTo.at(-1) const targetIdx = branchPath.branchIdx diff --git a/packages/server/src/api/routes/tests/automation.spec.ts b/packages/server/src/api/routes/tests/automation.spec.ts index 5108895c16..1591412735 100644 --- a/packages/server/src/api/routes/tests/automation.spec.ts +++ b/packages/server/src/api/routes/tests/automation.spec.ts @@ -519,11 +519,6 @@ describe("/automations", () => { describe("fetch", () => { it("return all the automations for an instance", async () => { - const fetchResponse = await config.api.automation.fetch() - for (const auto of fetchResponse.automations) { - await config.api.automation.delete(auto) - } - const { automation: automation1 } = await config.api.automation.post( newAutomation() ) diff --git a/packages/server/src/definitions/automations.ts b/packages/server/src/definitions/automations.ts index c7e7e5d514..bb472e01a3 100644 --- a/packages/server/src/definitions/automations.ts +++ b/packages/server/src/definitions/automations.ts @@ -1,4 +1,11 @@ -import { LoopStepType, UserBindings } from "@budibase/types" +import { + AutomationStepResult, + AutomationStepResultOutputs, + AutomationTriggerResult, + AutomationTriggerResultOutputs, + LoopStepType, + UserBindings, +} from "@budibase/types" export interface LoopInput { option: LoopStepType @@ -14,10 +21,10 @@ export interface TriggerOutput { } export interface AutomationContext { - trigger: any - steps: any[] - stepsById: Record - stepsByName: Record + trigger: AutomationTriggerResultOutputs + steps: [AutomationTriggerResultOutputs, ...AutomationStepResultOutputs[]] + stepsById: Record + stepsByName: Record env?: Record user?: UserBindings settings?: { diff --git a/packages/types/src/documents/app/automation/automation.ts b/packages/types/src/documents/app/automation/automation.ts index db03efe5d4..d5ef35d059 100644 --- a/packages/types/src/documents/app/automation/automation.ts +++ b/packages/types/src/documents/app/automation/automation.ts @@ -208,11 +208,14 @@ export interface AutomationStepResult { outputs: AutomationStepResultOutputs } +export type AutomationTriggerResultInputs = Record +export type AutomationTriggerResultOutputs = Record + export interface AutomationTriggerResult { id: string stepId: AutomationTriggerStepId - inputs?: Record | null - outputs: Record + inputs?: AutomationTriggerResultInputs | null + outputs: AutomationTriggerResultOutputs } export interface AutomationResults { From 499c7e1e01fe583e29d9f0c8de7a9ced41de31af Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 17 Feb 2025 12:13:42 +0000 Subject: [PATCH 14/50] wip --- packages/server/src/automations/tests/steps/loop.spec.ts | 4 ++-- packages/server/src/definitions/automations.ts | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/server/src/automations/tests/steps/loop.spec.ts b/packages/server/src/automations/tests/steps/loop.spec.ts index 9e8d82c1f0..402fbcda92 100644 --- a/packages/server/src/automations/tests/steps/loop.spec.ts +++ b/packages/server/src/automations/tests/steps/loop.spec.ts @@ -25,8 +25,8 @@ describe("Attempt to run a basic loop automation", () => { await config.api.row.save(table._id!, {}) }) - afterAll(() => { - automation.shutdown() + afterAll(async () => { + await automation.shutdown() config.end() }) diff --git a/packages/server/src/definitions/automations.ts b/packages/server/src/definitions/automations.ts index bb472e01a3..a04b960ca5 100644 --- a/packages/server/src/definitions/automations.ts +++ b/packages/server/src/definitions/automations.ts @@ -1,7 +1,5 @@ import { - AutomationStepResult, AutomationStepResultOutputs, - AutomationTriggerResult, AutomationTriggerResultOutputs, LoopStepType, UserBindings, From e9923bf90be6bfcf35201f8a78337f8270f5905c Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 17 Feb 2025 12:30:09 +0000 Subject: [PATCH 15/50] Fix loop.spec.ts timeout failures. --- packages/server/src/automations/tests/steps/loop.spec.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/server/src/automations/tests/steps/loop.spec.ts b/packages/server/src/automations/tests/steps/loop.spec.ts index 402fbcda92..883732330f 100644 --- a/packages/server/src/automations/tests/steps/loop.spec.ts +++ b/packages/server/src/automations/tests/steps/loop.spec.ts @@ -21,6 +21,11 @@ describe("Attempt to run a basic loop automation", () => { }) beforeEach(async () => { + const { automations } = await config.api.automation.fetch() + for (const automation of automations) { + await config.api.automation.delete(automation) + } + table = await config.api.table.save(basicTable()) await config.api.row.save(table._id!, {}) }) From a0f06bad780f171f74888cc00870d2637853a951 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Tue, 18 Feb 2025 15:48:14 +0000 Subject: [PATCH 16/50] Re-fix types in the automation store. --- .../builder/src/stores/builder/automations.ts | 62 ++++++++++--------- .../src/documents/app/automation/schema.ts | 30 +++++++++ 2 files changed, 64 insertions(+), 28 deletions(-) diff --git a/packages/builder/src/stores/builder/automations.ts b/packages/builder/src/stores/builder/automations.ts index 40cbdd4590..039a057a1b 100644 --- a/packages/builder/src/stores/builder/automations.ts +++ b/packages/builder/src/stores/builder/automations.ts @@ -15,7 +15,6 @@ import { import { AutomationTriggerStepId, AutomationEventType, - AutomationStepType, AutomationActionStepId, Automation, AutomationStep, @@ -28,6 +27,11 @@ import { AutomationIOType, BranchPath, BlockDefinitions, + isBranchStep, + isTrigger, + isRowUpdateTrigger, + isRowSaveTrigger, + isAppTrigger, BranchStep, } from "@budibase/types" import { ActionStepID } from "@/constants/backend/automations" @@ -295,8 +299,8 @@ const automationActions = (store: AutomationStore) => ({ result = steps.slice(0, stepIdx + 1) return } - let last = result[result.length - 1] as BranchStep - if (last && "inputs" in last) { + let last = result[result.length - 1] + if (isBranchStep(last)) { if (Number.isInteger(branchIdx)) { const branchId = last.inputs.branches[branchIdx].id const children = last.inputs.children?.[branchId] || [] @@ -473,25 +477,28 @@ const automationActions = (store: AutomationStore) => ({ }, ] - if (block.stepId === AutomationActionStepId.BRANCH) { - const branches = block.inputs.branches + if (isBranchStep(block)) { + const branches = block.inputs?.branches || [] + const children = block.inputs?.children || {} branches.forEach((branch, bIdx) => { - block.inputs?.children?.[branch.id].forEach((bBlock, sIdx, array) => { - const isBranch = bBlock.stepId === AutomationActionStepId.BRANCH - const hasBranches = isBranch && bBlock.inputs?.branches?.length > 0 - const ended = array.length - 1 === sIdx && !hasBranches - treeTraverse(bBlock, pathToCurrentNode, sIdx, bIdx, ended) - }) + children[branch.id].forEach( + (bBlock: AutomationStep, sIdx: number, array: AutomationStep[]) => { + const ended = array.length - 1 === sIdx && !branches.length + treeTraverse(bBlock, pathToCurrentNode, sIdx, bIdx, ended) + } + ) }) - store.actions.registerBlock( - blockRefs, - block, - pathToCurrentNode, - terminating && !branches.length - ) + terminating = terminating && !branches.length } + + store.actions.registerBlock( + blockRefs, + block, + pathToCurrentNode, + terminating + ) } // Traverse the entire tree. @@ -576,7 +583,6 @@ const automationActions = (store: AutomationStore) => ({ pathBlock.stepId === ActionStepID.LOOP && pathBlock.blockToLoop in blocks } - const isTrigger = pathBlock.type === AutomationStepType.TRIGGER if (isLoopBlock && loopBlockCount == 0) { schema = { @@ -587,17 +593,14 @@ const automationActions = (store: AutomationStore) => ({ } } - const icon = isTrigger + const icon = isTrigger(pathBlock) ? pathBlock.icon : isLoopBlock ? "Reuse" : pathBlock.icon - if (blockIdx === 0 && isTrigger) { - if ( - pathBlock.stepId === AutomationTriggerStepId.ROW_UPDATED || - pathBlock.stepId === AutomationTriggerStepId.ROW_SAVED - ) { + if (blockIdx === 0 && isTrigger(pathBlock)) { + if (isRowUpdateTrigger(pathBlock) || isRowSaveTrigger(pathBlock)) { let table: any = get(tables).list.find( (table: Table) => table._id === pathBlock.inputs.tableId ) @@ -609,7 +612,7 @@ const automationActions = (store: AutomationStore) => ({ } } delete schema.row - } else if (pathBlock.stepId === AutomationTriggerStepId.APP) { + } else if (isAppTrigger(pathBlock)) { schema = Object.fromEntries( Object.keys(pathBlock.inputs.fields || []).map(key => [ key, @@ -915,7 +918,11 @@ const automationActions = (store: AutomationStore) => ({ ...newAutomation.definition.steps, ] - let cache: AutomationStep | AutomationTrigger | AutomationStep[] + let cache: + | AutomationStep + | AutomationTrigger + | AutomationStep[] + | undefined = undefined pathWay.forEach((path, pathIdx, array) => { const { stepIdx, branchIdx } = path @@ -937,11 +944,10 @@ const automationActions = (store: AutomationStore) => ({ } return } - if ( Number.isInteger(branchIdx) && !Array.isArray(cache) && - cache.stepId === AutomationActionStepId.BRANCH + isBranchStep(cache) ) { const branchId = cache.inputs.branches[branchIdx].id const children = cache.inputs.children?.[branchId] || [] diff --git a/packages/types/src/documents/app/automation/schema.ts b/packages/types/src/documents/app/automation/schema.ts index 324df227fd..745737e2a6 100644 --- a/packages/types/src/documents/app/automation/schema.ts +++ b/packages/types/src/documents/app/automation/schema.ts @@ -342,6 +342,36 @@ export type AutomationStep = | OpenAIStep | BranchStep +export function isBranchStep( + step: AutomationStep | AutomationTrigger +): step is BranchStep { + return step.stepId === AutomationActionStepId.BRANCH +} + +export function isTrigger( + step: AutomationStep | AutomationTrigger +): step is AutomationTrigger { + return step.type === AutomationStepType.TRIGGER +} + +export function isRowUpdateTrigger( + step: AutomationStep | AutomationTrigger +): step is RowUpdatedTrigger { + return step.stepId === AutomationTriggerStepId.ROW_UPDATED +} + +export function isRowSaveTrigger( + step: AutomationStep | AutomationTrigger +): step is RowSavedTrigger { + return step.stepId === AutomationTriggerStepId.ROW_SAVED +} + +export function isAppTrigger( + step: AutomationStep | AutomationTrigger +): step is AppActionTrigger { + return step.stepId === AutomationTriggerStepId.APP +} + type EmptyInputs = {} export type AutomationStepDefinition = Omit & { inputs: EmptyInputs From 08baf20e9c9af2cd8b263c0c43b2fa30efaa15bb Mon Sep 17 00:00:00 2001 From: Martin McKeaveney Date: Wed, 19 Feb 2025 00:10:55 +0000 Subject: [PATCH 17/50] store koa sessions in redis instead of cookies --- hosting/nginx.dev.conf | 5 + packages/server/package.json | 1 + packages/worker/package.json | 1 + packages/worker/src/index.ts | 17 ++- yarn.lock | 215 ++++++++++++++++++++++++++--------- 5 files changed, 186 insertions(+), 53 deletions(-) diff --git a/hosting/nginx.dev.conf b/hosting/nginx.dev.conf index f0a58a9a98..a8cefe9ccc 100644 --- a/hosting/nginx.dev.conf +++ b/hosting/nginx.dev.conf @@ -63,6 +63,11 @@ http { proxy_send_timeout 120s; proxy_http_version 1.1; + # Enable buffering for potentially large OIDC configs + proxy_buffering on; + proxy_buffer_size 16k; + proxy_buffers 4 32k; + proxy_set_header Host $host; proxy_set_header Connection ""; diff --git a/packages/server/package.json b/packages/server/package.json index 9a70ecba9c..a668d6137c 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -102,6 +102,7 @@ "koa": "2.15.4", "koa-body": "4.2.0", "koa-compress": "4.0.1", + "koa-redis": "^4.0.1", "koa-send": "5.0.1", "koa-useragent": "^4.1.0", "koa2-ratelimit": "1.1.1", diff --git a/packages/worker/package.json b/packages/worker/package.json index c5e32d11a0..211e1c4b72 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -62,6 +62,7 @@ "koa-body": "4.2.0", "koa-compress": "4.0.1", "koa-passport": "4.1.4", + "koa-redis": "^4.0.1", "koa-send": "5.0.1", "koa-session": "5.13.1", "koa-static": "5.0.0", diff --git a/packages/worker/src/index.ts b/packages/worker/src/index.ts index 0547afab38..3bc2c9924c 100644 --- a/packages/worker/src/index.ts +++ b/packages/worker/src/index.ts @@ -20,6 +20,7 @@ import { cache, features, } from "@budibase/backend-core" +import RedisStore from "koa-redis" db.init() import koaBody from "koa-body" @@ -45,6 +46,7 @@ bootstrap() const app: Application = new Application() + app.keys = ["secret", "key"] app.proxy = true @@ -52,7 +54,20 @@ app.proxy = true app.use(handleScimBody) app.use(koaBody({ multipart: true })) -app.use(koaSession(app)) +app.use(async (ctx, next) => { + const redisClient = await new redis.Client(redis.utils.Databases.SESSIONS).init() + return koaSession({ + store: new RedisStore({ client: redisClient.getClient() }), + key: "koa:sess", + maxAge: 86400000, // one day + httpOnly: true, + secure: process.env.NODE_ENV === "production", + sameSite: "strict", + rolling: true, + renew: true, + }, app)(ctx, next) +}) + app.use(middleware.correlation) app.use(middleware.pino) app.use(middleware.ip) diff --git a/yarn.lock b/yarn.lock index efbac62306..93de543413 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1457,7 +1457,7 @@ "@azure/abort-controller" "^2.0.0" tslib "^2.6.2" -"@azure/identity@4.2.1", "@azure/identity@^4.2.1": +"@azure/identity@^4.2.1": version "4.2.1" resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-4.2.1.tgz#22b366201e989b7b41c0e1690e103bd579c31e4c" integrity sha512-U8hsyC9YPcEIzoaObJlRDvp7KiF0MGS7xcWbyJSVvXRkC/HXo1f0oYeBYmEvVgRfacw7GHf6D6yAoh9JHz6A5Q== @@ -2695,6 +2695,13 @@ dependencies: regenerator-runtime "^0.14.0" +"@babel/runtime@^7.8.3": + version "7.26.9" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.9.tgz#aa4c6facc65b9cb3f87d75125ffd47781b475433" + integrity sha512-aA63XwOkcl4xxQa3HjPMqOP6LiK0ZDv3mUPYEFXkpHbaFjtGggE1A61FjFzJnB+p7/oy2gA8E+rcBNl/zC1tMg== + dependencies: + regenerator-runtime "^0.14.0" + "@babel/template@^7.22.15", "@babel/template@^7.22.5", "@babel/template@^7.25.9", "@babel/template@^7.3.3": version "7.25.9" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.25.9.tgz#ecb62d81a8a6f5dc5fe8abfc3901fc52ddf15016" @@ -2777,28 +2784,6 @@ pouchdb-promise "^6.0.4" through2 "^2.0.0" -"@budibase/pro@npm:@budibase/pro@latest": - version "3.4.6" - resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-3.4.6.tgz#62b6ee13a015b98d4768dc7821f468f8177da3e9" - integrity sha512-MC3P5SMokmqbjejZMlNM6z7NB9o5H6hZ++yVvbyThniBPYfuDc2ssa1HNwwcuNE3uRLhcxcKe8CY/0SbFgn51g== - dependencies: - "@anthropic-ai/sdk" "^0.27.3" - "@budibase/backend-core" "*" - "@budibase/shared-core" "*" - "@budibase/string-templates" "*" - "@budibase/types" "*" - "@koa/router" "13.1.0" - bull "4.10.1" - dd-trace "5.26.0" - joi "17.6.0" - jsonwebtoken "9.0.2" - lru-cache "^7.14.1" - memorystream "^0.3.1" - node-fetch "2.6.7" - openai "4.59.0" - scim-patch "^0.8.1" - scim2-parse-filter "^0.2.8" - "@budibase/vm-browserify@^1.1.4": version "1.1.4" resolved "https://registry.yarnpkg.com/@budibase/vm-browserify/-/vm-browserify-1.1.4.tgz#eecb001bd9521cb7647e26fb4d2d29d0a4dce262" @@ -8129,7 +8114,23 @@ aws4@^1.8.0: resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.13.2.tgz#0aa167216965ac9474ccfa83892cfb6b3e1e52ef" integrity sha512-lHe62zvbTB5eEABUVi/AwVh0ZKY9rMMDhmm+eeyuuUQbQ3+J+fONVQOZyj+DdrvD4BY33uYniyRJ4UJIaSKAfw== -axios@1.1.3, axios@1.7.7, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.6.2, axios@^1.6.8: +axios@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.1.3.tgz#8274250dada2edf53814ed7db644b9c2866c1e35" + integrity sha512-00tXVRwKx/FZr/IDVFt4C+f9FYairX517WoGCL6dpOntqLkZofjhu43F/Xl44UOpqa+9sLFDrG/XAnFsUYgkDA== + dependencies: + follow-redirects "^1.15.0" + form-data "^4.0.0" + proxy-from-env "^1.1.0" + +axios@^0.21.1: + version "0.21.4" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" + integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== + dependencies: + follow-redirects "^1.14.0" + +axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.6.2, axios@^1.6.8: version "1.7.7" resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.7.tgz#2f554296f9892a72ac8d8e4c5b79c14a91d0a47f" integrity sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q== @@ -9041,7 +9042,14 @@ co-body@^5.1.1: raw-body "^2.2.0" type-is "^1.6.14" -co@^4.6.0: +co-wrap-all@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/co-wrap-all/-/co-wrap-all-1.0.0.tgz#370ae3e8333510a53f6b2f7fdfbe4568a11b7ecf" + integrity sha512-aru6gLi2vTUazr+MxVm3Rv6ST7/EKtFj9BrfkcOrbCO2Qv6LqJdE71m88HhHiBEviKw/ucVrwoGLrq2xHpOsJA== + dependencies: + co "^4.0.0" + +co@^4.0.0, co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== @@ -11763,7 +11771,7 @@ fn.name@1.x.x: resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== -follow-redirects@^1.15.6: +follow-redirects@^1.14.0, follow-redirects@^1.15.0, follow-redirects@^1.15.6: version "1.15.9" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1" integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== @@ -12378,10 +12386,22 @@ global@~4.4.0: min-document "^2.19.0" process "^0.11.10" -globals@15.13.0, globals@^11.1.0, globals@^13.19.0, globals@^14.0.0: - version "15.13.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-15.13.0.tgz#bbec719d69aafef188ecd67954aae76a696010fc" - integrity sha512-49TewVEz0UxZjr1WYYsWpPrhyC/B/pA8Bq0fUmet2n+eR7yn0IvNzNaoBwnK6mdkzcN+se7Ez9zUgULTz2QH4g== +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.19.0: + version "13.24.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.24.0.tgz#8432a19d78ce0c1e833949c36adb345400bb1171" + integrity sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ== + dependencies: + type-fest "^0.20.2" + +globals@^14.0.0: + version "14.0.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-14.0.0.tgz#898d7413c29babcf6bafe56fcadded858ada724e" + integrity sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ== globalthis@^1.0.1, globalthis@^1.0.4: version "1.0.4" @@ -12791,7 +12811,12 @@ http-assert@^1.3.0: deep-equal "~1.0.1" http-errors "~1.8.0" -http-cache-semantics@3.8.1, http-cache-semantics@4.1.1, http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1: +http-cache-semantics@3.8.1: + version "3.8.1" + resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2" + integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w== + +http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== @@ -13177,7 +13202,7 @@ ioredis@5.3.2: redis-parser "^3.0.0" standard-as-callback "^2.1.0" -ioredis@^4.28.5: +ioredis@^4.14.1, ioredis@^4.28.5: version "4.28.5" resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.5.tgz#5c149e6a8d76a7f8fa8a504ffc85b7d5b6797f9f" integrity sha512-3GYo0GJtLqgNXj4YhrisLaNNvWSNwSS2wS4OELGfGxH8I69+XfNdnmV1AyN+ZqMh0i7eX+SWjrwFKDBDgfBC1A== @@ -13247,6 +13272,11 @@ is-boolean-object@^1.1.0: call-bind "^1.0.2" has-tostringtag "^1.0.0" +is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== + is-buffer@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" @@ -13686,11 +13716,6 @@ isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== -isobject@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-4.0.0.tgz#3f1c9155e73b192022a80819bacd0343711697b0" - integrity sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA== - isolated-vm@^4.7.2: version "4.7.2" resolved "https://registry.yarnpkg.com/isolated-vm/-/isolated-vm-4.7.2.tgz#5670d5cce1d92004f9b825bec5b0b11fc7501b65" @@ -14570,7 +14595,14 @@ kill-port@^1.6.1: get-them-args "1.3.2" shell-exec "1.0.2" -kind-of@6.0.3, kind-of@^3.0.2, kind-of@^3.1.0, kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: +kind-of@^3.0.2, kind-of@^3.1.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + integrity sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ== + dependencies: + is-buffer "^1.1.5" + +kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: version "6.0.3" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== @@ -14677,6 +14709,16 @@ koa-pino-logger@4.0.0: dependencies: pino-http "^6.5.0" +koa-redis@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/koa-redis/-/koa-redis-4.0.1.tgz#57ac1b46d9ab851221a9f4952c1e8d4bf289db40" + integrity sha512-o2eTVNo1NBnloeUGhHed5Q2ZvJSLpUEj/+E1/7oH5EmH8WuQ+QLdl/VawkshxdFQ47W1p6V09lM3hCTu7D0YnQ== + dependencies: + "@babel/runtime" "^7.8.3" + co-wrap-all "^1.0.0" + debug "^4.1.1" + ioredis "^4.14.1" + koa-router@^10.0.0: version "10.1.1" resolved "https://registry.yarnpkg.com/koa-router/-/koa-router-10.1.1.tgz#20809f82648518b84726cd445037813cd99f17ff" @@ -15953,7 +15995,7 @@ msgpackr-extract@^3.0.2: "@msgpackr-extract/msgpackr-extract-linux-x64" "3.0.2" "@msgpackr-extract/msgpackr-extract-win32-x64" "3.0.2" -msgpackr@1.10.1, msgpackr@^1.5.2: +msgpackr@^1.5.2: version "1.10.1" resolved "https://registry.yarnpkg.com/msgpackr/-/msgpackr-1.10.1.tgz#51953bb4ce4f3494f0c4af3f484f01cfbb306555" integrity sha512-r5VRLv9qouXuLiIBrLpl2d5ZvPt8svdQTl5/vMvE4nzDMyEX4sgW5yWhuBBj5UmgwOTWj8CIdSXn5sAfsHAWIQ== @@ -16146,13 +16188,27 @@ node-domexception@1.0.0: resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== -node-fetch@2.6.7, node-fetch@2.6.9, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9: +node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== dependencies: whatwg-url "^5.0.0" +node-fetch@2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.9.tgz#7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6" + integrity sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg== + dependencies: + whatwg-url "^5.0.0" + +node-fetch@^2.6.9: + version "2.7.0" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== + dependencies: + whatwg-url "^5.0.0" + node-forge@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" @@ -17181,7 +17237,15 @@ passport-strategy@1.x.x, passport-strategy@^1.0.0: resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4" integrity sha512-CB97UUvDKJde2V0KDWWB3lyf6PC3FaZP7YxZ2G8OAtn9p4HI9j9JLP9qjOGZFvyl8uwNT8qM+hGnz/n16NI7oA== -passport@0.6.0, passport@^0.4.0, passport@^0.6.0: +passport@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/passport/-/passport-0.4.1.tgz#941446a21cb92fc688d97a0861c38ce9f738f270" + integrity sha512-IxXgZZs8d7uFSt3eqNjM9NQ3g3uQCW5avD8mRNoXV99Yig50vjuaez6dQK2qC0kVWPRTujxY0dWgGfT09adjYg== + dependencies: + passport-strategy "1.x.x" + pause "0.0.1" + +passport@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/passport/-/passport-0.6.0.tgz#e869579fab465b5c0b291e841e6cc95c005fac9d" integrity sha512-0fe+p3ZnrWRW74fe8+SvCyf4a3Pb2/h7gFkQ8yTJpAO50gDzlfjZUZTO1k5Eg9kUct22OxHLqDZoKUWRHOh9ug== @@ -18166,6 +18230,13 @@ pseudomap@^1.0.2: resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ== +psl@^1.1.28: + version "1.15.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.15.0.tgz#bdace31896f1d97cec6a79e8224898ce93d974c6" + integrity sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w== + dependencies: + punycode "^2.3.1" + psl@^1.1.33: version "1.9.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" @@ -18199,6 +18270,11 @@ punycode@^2.1.0, punycode@^2.1.1, punycode@^2.3.0: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== +punycode@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" + integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== + pupa@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.1.1.tgz#f5e8fd4afc2c5d97828faa523549ed8744a20d62" @@ -19030,6 +19106,11 @@ sax@1.2.1: resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" integrity sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA== +sax@>=0.1.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.4.1.tgz#44cc8988377f126304d3b3fc1010c733b929ef0f" + integrity sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg== + sax@>=0.6.0: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" @@ -19102,13 +19183,28 @@ semver-diff@^3.1.1: dependencies: semver "^6.3.0" -"semver@2 || 3 || 4 || 5", semver@7.5.3, semver@^5.6.0, semver@^5.7.1, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@^7.6.0, semver@^7.6.2: +"semver@2 || 3 || 4 || 5", semver@^5.6.0, semver@^5.7.1: + version "5.7.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== + +semver@7.5.3, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3: version "7.5.3" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ== dependencies: lru-cache "^6.0.0" +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + +semver@^7.5.4, semver@^7.6.0, semver@^7.6.2: + version "7.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.1.tgz#abd5098d82b18c6c81f6074ff2647fd3e7220c9f" + integrity sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA== + seq-queue@^0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/seq-queue/-/seq-queue-0.0.5.tgz#d56812e1c017a6e4e7c3e3a37a1da6d78dd3c93e" @@ -20590,7 +20686,7 @@ touch@^3.1.0: dependencies: nopt "~1.0.10" -tough-cookie@4.1.3, "tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2, tough-cookie@~2.5.0: +"tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2: version "4.1.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf" integrity sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw== @@ -20600,6 +20696,14 @@ tough-cookie@4.1.3, "tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0 universalify "^0.2.0" url-parse "^1.5.3" +tough-cookie@~2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" + integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== + dependencies: + psl "^1.1.28" + punycode "^2.1.1" + tr46@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" @@ -21067,14 +21171,6 @@ unpipe@1.0.0: resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== -unset-value@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-2.0.1.tgz#57bed0c22d26f28d69acde5df9a11b77c74d2df3" - integrity sha512-2hvrBfjUE00PkqN+q0XP6yRAOGrR06uSiUoIQGZkc7GxvQ9H7v8quUPNtZjMg4uux69i8HWpIjLPUKwCuRGyNg== - dependencies: - has-value "^2.0.2" - isobject "^4.0.0" - untildify@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" @@ -21778,7 +21874,14 @@ xml-parse-from-string@^1.0.0: resolved "https://registry.yarnpkg.com/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz#a9029e929d3dbcded169f3c6e28238d95a5d5a28" integrity sha512-ErcKwJTF54uRzzNMXq2X5sMIy88zJvfN2DmdoQvy7PAFJ+tPRU6ydWuOKNMyfmOjdyBQTFREi60s0Y0SyI0G0g== -xml2js@0.1.x, xml2js@0.6.2, xml2js@^0.5.0: +xml2js@0.1.x: + version "0.1.14" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.1.14.tgz#5274e67f5a64c5f92974cd85139e0332adc6b90c" + integrity sha512-pbdws4PPPNc1HPluSUKamY4GWMk592K7qwcj6BExbVOhhubub8+pMda/ql68b6L3luZs/OGjGSB5goV7SnmgnA== + dependencies: + sax ">=0.1.1" + +xml2js@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499" integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA== @@ -21786,6 +21889,14 @@ xml2js@0.1.x, xml2js@0.6.2, xml2js@^0.5.0: sax ">=0.6.0" xmlbuilder "~11.0.0" +xml2js@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7" + integrity sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA== + dependencies: + sax ">=0.6.0" + xmlbuilder "~11.0.0" + xmlbuilder@~11.0.0: version "11.0.1" resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" From 9b96f45e53841a3ba2578fde892884937f1f4a45 Mon Sep 17 00:00:00 2001 From: Martin McKeaveney Date: Wed, 19 Feb 2025 00:14:05 +0000 Subject: [PATCH 18/50] lockfile --- yarn.lock | 187 +++++++++++++++--------------------------------------- 1 file changed, 50 insertions(+), 137 deletions(-) diff --git a/yarn.lock b/yarn.lock index 93de543413..48184e90a4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1457,7 +1457,7 @@ "@azure/abort-controller" "^2.0.0" tslib "^2.6.2" -"@azure/identity@^4.2.1": +"@azure/identity@4.2.1", "@azure/identity@^4.2.1": version "4.2.1" resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-4.2.1.tgz#22b366201e989b7b41c0e1690e103bd579c31e4c" integrity sha512-U8hsyC9YPcEIzoaObJlRDvp7KiF0MGS7xcWbyJSVvXRkC/HXo1f0oYeBYmEvVgRfacw7GHf6D6yAoh9JHz6A5Q== @@ -2784,6 +2784,28 @@ pouchdb-promise "^6.0.4" through2 "^2.0.0" +"@budibase/pro@npm:@budibase/pro@latest": + version "3.4.11" + resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-3.4.11.tgz#5ba5fe278ccd74efea4cc6762b36844e6256f9b3" + integrity sha512-EHBlgEciGKm1Qlm/z3gO9o8ujOISPKwhL8Dqsg8yhnOs/ngglMJArrlwQvFIEX5IMNfJ7eGqUxsgrkXOLevjrg== + dependencies: + "@anthropic-ai/sdk" "^0.27.3" + "@budibase/backend-core" "*" + "@budibase/shared-core" "*" + "@budibase/string-templates" "*" + "@budibase/types" "*" + "@koa/router" "13.1.0" + bull "4.10.1" + dd-trace "5.26.0" + joi "17.6.0" + jsonwebtoken "9.0.2" + lru-cache "^7.14.1" + memorystream "^0.3.1" + node-fetch "2.6.7" + openai "4.59.0" + scim-patch "^0.8.1" + scim2-parse-filter "^0.2.8" + "@budibase/vm-browserify@^1.1.4": version "1.1.4" resolved "https://registry.yarnpkg.com/@budibase/vm-browserify/-/vm-browserify-1.1.4.tgz#eecb001bd9521cb7647e26fb4d2d29d0a4dce262" @@ -8114,23 +8136,7 @@ aws4@^1.8.0: resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.13.2.tgz#0aa167216965ac9474ccfa83892cfb6b3e1e52ef" integrity sha512-lHe62zvbTB5eEABUVi/AwVh0ZKY9rMMDhmm+eeyuuUQbQ3+J+fONVQOZyj+DdrvD4BY33uYniyRJ4UJIaSKAfw== -axios@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.1.3.tgz#8274250dada2edf53814ed7db644b9c2866c1e35" - integrity sha512-00tXVRwKx/FZr/IDVFt4C+f9FYairX517WoGCL6dpOntqLkZofjhu43F/Xl44UOpqa+9sLFDrG/XAnFsUYgkDA== - dependencies: - follow-redirects "^1.15.0" - form-data "^4.0.0" - proxy-from-env "^1.1.0" - -axios@^0.21.1: - version "0.21.4" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" - integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== - dependencies: - follow-redirects "^1.14.0" - -axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.6.2, axios@^1.6.8: +axios@1.1.3, axios@1.7.7, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.6.2, axios@^1.6.8: version "1.7.7" resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.7.tgz#2f554296f9892a72ac8d8e4c5b79c14a91d0a47f" integrity sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q== @@ -11771,7 +11777,7 @@ fn.name@1.x.x: resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== -follow-redirects@^1.14.0, follow-redirects@^1.15.0, follow-redirects@^1.15.6: +follow-redirects@^1.15.6: version "1.15.9" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1" integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== @@ -12386,22 +12392,10 @@ global@~4.4.0: min-document "^2.19.0" process "^0.11.10" -globals@^11.1.0: - version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globals@^13.19.0: - version "13.24.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.24.0.tgz#8432a19d78ce0c1e833949c36adb345400bb1171" - integrity sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ== - dependencies: - type-fest "^0.20.2" - -globals@^14.0.0: - version "14.0.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-14.0.0.tgz#898d7413c29babcf6bafe56fcadded858ada724e" - integrity sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ== +globals@15.13.0, globals@^11.1.0, globals@^13.19.0, globals@^14.0.0: + version "15.13.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-15.13.0.tgz#bbec719d69aafef188ecd67954aae76a696010fc" + integrity sha512-49TewVEz0UxZjr1WYYsWpPrhyC/B/pA8Bq0fUmet2n+eR7yn0IvNzNaoBwnK6mdkzcN+se7Ez9zUgULTz2QH4g== globalthis@^1.0.1, globalthis@^1.0.4: version "1.0.4" @@ -12811,12 +12805,7 @@ http-assert@^1.3.0: deep-equal "~1.0.1" http-errors "~1.8.0" -http-cache-semantics@3.8.1: - version "3.8.1" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2" - integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w== - -http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1: +http-cache-semantics@3.8.1, http-cache-semantics@4.1.1, http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== @@ -13272,11 +13261,6 @@ is-boolean-object@^1.1.0: call-bind "^1.0.2" has-tostringtag "^1.0.0" -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - is-buffer@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" @@ -13716,6 +13700,11 @@ isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== +isobject@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-4.0.0.tgz#3f1c9155e73b192022a80819bacd0343711697b0" + integrity sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA== + isolated-vm@^4.7.2: version "4.7.2" resolved "https://registry.yarnpkg.com/isolated-vm/-/isolated-vm-4.7.2.tgz#5670d5cce1d92004f9b825bec5b0b11fc7501b65" @@ -14595,14 +14584,7 @@ kill-port@^1.6.1: get-them-args "1.3.2" shell-exec "1.0.2" -kind-of@^3.0.2, kind-of@^3.1.0: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - integrity sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ== - dependencies: - is-buffer "^1.1.5" - -kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: +kind-of@6.0.3, kind-of@^3.0.2, kind-of@^3.1.0, kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: version "6.0.3" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== @@ -15995,7 +15977,7 @@ msgpackr-extract@^3.0.2: "@msgpackr-extract/msgpackr-extract-linux-x64" "3.0.2" "@msgpackr-extract/msgpackr-extract-win32-x64" "3.0.2" -msgpackr@^1.5.2: +msgpackr@1.10.1, msgpackr@^1.5.2: version "1.10.1" resolved "https://registry.yarnpkg.com/msgpackr/-/msgpackr-1.10.1.tgz#51953bb4ce4f3494f0c4af3f484f01cfbb306555" integrity sha512-r5VRLv9qouXuLiIBrLpl2d5ZvPt8svdQTl5/vMvE4nzDMyEX4sgW5yWhuBBj5UmgwOTWj8CIdSXn5sAfsHAWIQ== @@ -16188,27 +16170,13 @@ node-domexception@1.0.0: resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== -node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7: +node-fetch@2.6.7, node-fetch@2.6.9, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== dependencies: whatwg-url "^5.0.0" -node-fetch@2.6.9: - version "2.6.9" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.9.tgz#7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6" - integrity sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg== - dependencies: - whatwg-url "^5.0.0" - -node-fetch@^2.6.9: - version "2.7.0" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" - integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== - dependencies: - whatwg-url "^5.0.0" - node-forge@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" @@ -17237,15 +17205,7 @@ passport-strategy@1.x.x, passport-strategy@^1.0.0: resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4" integrity sha512-CB97UUvDKJde2V0KDWWB3lyf6PC3FaZP7YxZ2G8OAtn9p4HI9j9JLP9qjOGZFvyl8uwNT8qM+hGnz/n16NI7oA== -passport@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/passport/-/passport-0.4.1.tgz#941446a21cb92fc688d97a0861c38ce9f738f270" - integrity sha512-IxXgZZs8d7uFSt3eqNjM9NQ3g3uQCW5avD8mRNoXV99Yig50vjuaez6dQK2qC0kVWPRTujxY0dWgGfT09adjYg== - dependencies: - passport-strategy "1.x.x" - pause "0.0.1" - -passport@^0.6.0: +passport@0.6.0, passport@^0.4.0, passport@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/passport/-/passport-0.6.0.tgz#e869579fab465b5c0b291e841e6cc95c005fac9d" integrity sha512-0fe+p3ZnrWRW74fe8+SvCyf4a3Pb2/h7gFkQ8yTJpAO50gDzlfjZUZTO1k5Eg9kUct22OxHLqDZoKUWRHOh9ug== @@ -18230,13 +18190,6 @@ pseudomap@^1.0.2: resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ== -psl@^1.1.28: - version "1.15.0" - resolved "https://registry.yarnpkg.com/psl/-/psl-1.15.0.tgz#bdace31896f1d97cec6a79e8224898ce93d974c6" - integrity sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w== - dependencies: - punycode "^2.3.1" - psl@^1.1.33: version "1.9.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" @@ -18270,11 +18223,6 @@ punycode@^2.1.0, punycode@^2.1.1, punycode@^2.3.0: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== -punycode@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" - integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== - pupa@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.1.1.tgz#f5e8fd4afc2c5d97828faa523549ed8744a20d62" @@ -19106,11 +19054,6 @@ sax@1.2.1: resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" integrity sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA== -sax@>=0.1.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.4.1.tgz#44cc8988377f126304d3b3fc1010c733b929ef0f" - integrity sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg== - sax@>=0.6.0: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" @@ -19183,28 +19126,13 @@ semver-diff@^3.1.1: dependencies: semver "^6.3.0" -"semver@2 || 3 || 4 || 5", semver@^5.6.0, semver@^5.7.1: - version "5.7.2" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" - integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== - -semver@7.5.3, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3: +"semver@2 || 3 || 4 || 5", semver@7.5.3, semver@^5.6.0, semver@^5.7.1, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@^7.6.0, semver@^7.6.2: version "7.5.3" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ== dependencies: lru-cache "^6.0.0" -semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1: - version "6.3.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" - integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== - -semver@^7.5.4, semver@^7.6.0, semver@^7.6.2: - version "7.7.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.1.tgz#abd5098d82b18c6c81f6074ff2647fd3e7220c9f" - integrity sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA== - seq-queue@^0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/seq-queue/-/seq-queue-0.0.5.tgz#d56812e1c017a6e4e7c3e3a37a1da6d78dd3c93e" @@ -20686,7 +20614,7 @@ touch@^3.1.0: dependencies: nopt "~1.0.10" -"tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2: +tough-cookie@4.1.3, "tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2, tough-cookie@~2.5.0: version "4.1.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf" integrity sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw== @@ -20696,14 +20624,6 @@ touch@^3.1.0: universalify "^0.2.0" url-parse "^1.5.3" -tough-cookie@~2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" - integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== - dependencies: - psl "^1.1.28" - punycode "^2.1.1" - tr46@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" @@ -21171,6 +21091,14 @@ unpipe@1.0.0: resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== +unset-value@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-2.0.1.tgz#57bed0c22d26f28d69acde5df9a11b77c74d2df3" + integrity sha512-2hvrBfjUE00PkqN+q0XP6yRAOGrR06uSiUoIQGZkc7GxvQ9H7v8quUPNtZjMg4uux69i8HWpIjLPUKwCuRGyNg== + dependencies: + has-value "^2.0.2" + isobject "^4.0.0" + untildify@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" @@ -21874,14 +21802,7 @@ xml-parse-from-string@^1.0.0: resolved "https://registry.yarnpkg.com/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz#a9029e929d3dbcded169f3c6e28238d95a5d5a28" integrity sha512-ErcKwJTF54uRzzNMXq2X5sMIy88zJvfN2DmdoQvy7PAFJ+tPRU6ydWuOKNMyfmOjdyBQTFREi60s0Y0SyI0G0g== -xml2js@0.1.x: - version "0.1.14" - resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.1.14.tgz#5274e67f5a64c5f92974cd85139e0332adc6b90c" - integrity sha512-pbdws4PPPNc1HPluSUKamY4GWMk592K7qwcj6BExbVOhhubub8+pMda/ql68b6L3luZs/OGjGSB5goV7SnmgnA== - dependencies: - sax ">=0.1.1" - -xml2js@0.6.2: +xml2js@0.1.x, xml2js@0.6.2, xml2js@^0.5.0: version "0.6.2" resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499" integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA== @@ -21889,14 +21810,6 @@ xml2js@0.6.2: sax ">=0.6.0" xmlbuilder "~11.0.0" -xml2js@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7" - integrity sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA== - dependencies: - sax ">=0.6.0" - xmlbuilder "~11.0.0" - xmlbuilder@~11.0.0: version "11.0.1" resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" From bbd5046dcf816dee098bd00b55d01a12bf152e68 Mon Sep 17 00:00:00 2001 From: Martin McKeaveney Date: Wed, 19 Feb 2025 00:26:00 +0000 Subject: [PATCH 19/50] add koa-redis types --- packages/worker/package.json | 1 + packages/worker/src/index.ts | 28 ++--- yarn.lock | 203 ++++++++++++++++++++++++++--------- 3 files changed, 170 insertions(+), 62 deletions(-) diff --git a/packages/worker/package.json b/packages/worker/package.json index 211e1c4b72..d9200880a2 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -84,6 +84,7 @@ "@types/jest": "29.5.5", "@types/jsonwebtoken": "9.0.3", "@types/koa": "2.13.4", + "@types/koa-redis": "^4.0.5", "@types/koa__router": "12.0.4", "@types/lodash": "4.14.200", "@types/node-fetch": "2.6.4", diff --git a/packages/worker/src/index.ts b/packages/worker/src/index.ts index 3bc2c9924c..fb12da9465 100644 --- a/packages/worker/src/index.ts +++ b/packages/worker/src/index.ts @@ -46,7 +46,6 @@ bootstrap() const app: Application = new Application() - app.keys = ["secret", "key"] app.proxy = true @@ -55,17 +54,22 @@ app.use(handleScimBody) app.use(koaBody({ multipart: true })) app.use(async (ctx, next) => { - const redisClient = await new redis.Client(redis.utils.Databases.SESSIONS).init() - return koaSession({ - store: new RedisStore({ client: redisClient.getClient() }), - key: "koa:sess", - maxAge: 86400000, // one day - httpOnly: true, - secure: process.env.NODE_ENV === "production", - sameSite: "strict", - rolling: true, - renew: true, - }, app)(ctx, next) + const redisClient = await new redis.Client( + redis.utils.Databases.SESSIONS + ).init() + return koaSession( + { + store: new RedisStore({ client: redisClient.getClient() }), + key: "koa:sess", + maxAge: 86400000, // one day + httpOnly: true, + secure: process.env.NODE_ENV === "production", + sameSite: "strict", + rolling: true, + renew: true, + }, + app + )(ctx, next) }) app.use(middleware.correlation) diff --git a/yarn.lock b/yarn.lock index 48184e90a4..1b04429b37 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1457,7 +1457,7 @@ "@azure/abort-controller" "^2.0.0" tslib "^2.6.2" -"@azure/identity@4.2.1", "@azure/identity@^4.2.1": +"@azure/identity@^4.2.1": version "4.2.1" resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-4.2.1.tgz#22b366201e989b7b41c0e1690e103bd579c31e4c" integrity sha512-U8hsyC9YPcEIzoaObJlRDvp7KiF0MGS7xcWbyJSVvXRkC/HXo1f0oYeBYmEvVgRfacw7GHf6D6yAoh9JHz6A5Q== @@ -2784,28 +2784,6 @@ pouchdb-promise "^6.0.4" through2 "^2.0.0" -"@budibase/pro@npm:@budibase/pro@latest": - version "3.4.11" - resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-3.4.11.tgz#5ba5fe278ccd74efea4cc6762b36844e6256f9b3" - integrity sha512-EHBlgEciGKm1Qlm/z3gO9o8ujOISPKwhL8Dqsg8yhnOs/ngglMJArrlwQvFIEX5IMNfJ7eGqUxsgrkXOLevjrg== - dependencies: - "@anthropic-ai/sdk" "^0.27.3" - "@budibase/backend-core" "*" - "@budibase/shared-core" "*" - "@budibase/string-templates" "*" - "@budibase/types" "*" - "@koa/router" "13.1.0" - bull "4.10.1" - dd-trace "5.26.0" - joi "17.6.0" - jsonwebtoken "9.0.2" - lru-cache "^7.14.1" - memorystream "^0.3.1" - node-fetch "2.6.7" - openai "4.59.0" - scim-patch "^0.8.1" - scim2-parse-filter "^0.2.8" - "@budibase/vm-browserify@^1.1.4": version "1.1.4" resolved "https://registry.yarnpkg.com/@budibase/vm-browserify/-/vm-browserify-1.1.4.tgz#eecb001bd9521cb7647e26fb4d2d29d0a4dce262" @@ -6625,6 +6603,14 @@ dependencies: "@types/koa" "*" +"@types/koa-redis@^4.0.5": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@types/koa-redis/-/koa-redis-4.0.5.tgz#c535e38ad681782080b53f006104f7968ba1121f" + integrity sha512-xoEdyWDejkLkFVVf+N12euYakItlMicF/SHEDOOvndbteJvQ3AFGUHDDbQIrE2DGjH93LGweOZxXLAs8XfpY2Q== + dependencies: + "@types/koa-session" "*" + "@types/redis" "^2.8.0" + "@types/koa-send@^4.1.6": version "4.1.6" resolved "https://registry.yarnpkg.com/@types/koa-send/-/koa-send-4.1.6.tgz#15d90e95e3ccce669a15b6a3c56c3a650a167cea" @@ -6632,6 +6618,14 @@ dependencies: "@types/koa" "*" +"@types/koa-session@*": + version "6.4.5" + resolved "https://registry.yarnpkg.com/@types/koa-session/-/koa-session-6.4.5.tgz#ac10bac507f4bb722fa6c55c33607b5c8769f779" + integrity sha512-Vc6+fslnPuMH2v9y80WYeo39UMo8mweuNNthKCwYU2ZE6l5vnRrzRU3BRvexKwsoI5sxsRl5CxDsBlLI8kY/XA== + dependencies: + "@types/cookies" "*" + "@types/koa" "*" + "@types/koa@*": version "2.13.5" resolved "https://registry.yarnpkg.com/@types/koa/-/koa-2.13.5.tgz#64b3ca4d54e08c0062e89ec666c9f45443b21a61" @@ -8136,7 +8130,23 @@ aws4@^1.8.0: resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.13.2.tgz#0aa167216965ac9474ccfa83892cfb6b3e1e52ef" integrity sha512-lHe62zvbTB5eEABUVi/AwVh0ZKY9rMMDhmm+eeyuuUQbQ3+J+fONVQOZyj+DdrvD4BY33uYniyRJ4UJIaSKAfw== -axios@1.1.3, axios@1.7.7, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.6.2, axios@^1.6.8: +axios@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.1.3.tgz#8274250dada2edf53814ed7db644b9c2866c1e35" + integrity sha512-00tXVRwKx/FZr/IDVFt4C+f9FYairX517WoGCL6dpOntqLkZofjhu43F/Xl44UOpqa+9sLFDrG/XAnFsUYgkDA== + dependencies: + follow-redirects "^1.15.0" + form-data "^4.0.0" + proxy-from-env "^1.1.0" + +axios@^0.21.1: + version "0.21.4" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" + integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== + dependencies: + follow-redirects "^1.14.0" + +axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.6.2, axios@^1.6.8: version "1.7.7" resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.7.tgz#2f554296f9892a72ac8d8e4c5b79c14a91d0a47f" integrity sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q== @@ -11777,7 +11787,7 @@ fn.name@1.x.x: resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== -follow-redirects@^1.15.6: +follow-redirects@^1.14.0, follow-redirects@^1.15.0, follow-redirects@^1.15.6: version "1.15.9" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1" integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== @@ -12392,10 +12402,22 @@ global@~4.4.0: min-document "^2.19.0" process "^0.11.10" -globals@15.13.0, globals@^11.1.0, globals@^13.19.0, globals@^14.0.0: - version "15.13.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-15.13.0.tgz#bbec719d69aafef188ecd67954aae76a696010fc" - integrity sha512-49TewVEz0UxZjr1WYYsWpPrhyC/B/pA8Bq0fUmet2n+eR7yn0IvNzNaoBwnK6mdkzcN+se7Ez9zUgULTz2QH4g== +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.19.0: + version "13.24.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.24.0.tgz#8432a19d78ce0c1e833949c36adb345400bb1171" + integrity sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ== + dependencies: + type-fest "^0.20.2" + +globals@^14.0.0: + version "14.0.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-14.0.0.tgz#898d7413c29babcf6bafe56fcadded858ada724e" + integrity sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ== globalthis@^1.0.1, globalthis@^1.0.4: version "1.0.4" @@ -12805,7 +12827,12 @@ http-assert@^1.3.0: deep-equal "~1.0.1" http-errors "~1.8.0" -http-cache-semantics@3.8.1, http-cache-semantics@4.1.1, http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1: +http-cache-semantics@3.8.1: + version "3.8.1" + resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2" + integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w== + +http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== @@ -13261,6 +13288,11 @@ is-boolean-object@^1.1.0: call-bind "^1.0.2" has-tostringtag "^1.0.0" +is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== + is-buffer@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" @@ -13700,11 +13732,6 @@ isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== -isobject@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-4.0.0.tgz#3f1c9155e73b192022a80819bacd0343711697b0" - integrity sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA== - isolated-vm@^4.7.2: version "4.7.2" resolved "https://registry.yarnpkg.com/isolated-vm/-/isolated-vm-4.7.2.tgz#5670d5cce1d92004f9b825bec5b0b11fc7501b65" @@ -14584,7 +14611,14 @@ kill-port@^1.6.1: get-them-args "1.3.2" shell-exec "1.0.2" -kind-of@6.0.3, kind-of@^3.0.2, kind-of@^3.1.0, kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: +kind-of@^3.0.2, kind-of@^3.1.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + integrity sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ== + dependencies: + is-buffer "^1.1.5" + +kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: version "6.0.3" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== @@ -15977,7 +16011,7 @@ msgpackr-extract@^3.0.2: "@msgpackr-extract/msgpackr-extract-linux-x64" "3.0.2" "@msgpackr-extract/msgpackr-extract-win32-x64" "3.0.2" -msgpackr@1.10.1, msgpackr@^1.5.2: +msgpackr@^1.5.2: version "1.10.1" resolved "https://registry.yarnpkg.com/msgpackr/-/msgpackr-1.10.1.tgz#51953bb4ce4f3494f0c4af3f484f01cfbb306555" integrity sha512-r5VRLv9qouXuLiIBrLpl2d5ZvPt8svdQTl5/vMvE4nzDMyEX4sgW5yWhuBBj5UmgwOTWj8CIdSXn5sAfsHAWIQ== @@ -16170,13 +16204,27 @@ node-domexception@1.0.0: resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== -node-fetch@2.6.7, node-fetch@2.6.9, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9: +node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== dependencies: whatwg-url "^5.0.0" +node-fetch@2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.9.tgz#7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6" + integrity sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg== + dependencies: + whatwg-url "^5.0.0" + +node-fetch@^2.6.9: + version "2.7.0" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== + dependencies: + whatwg-url "^5.0.0" + node-forge@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" @@ -17205,7 +17253,15 @@ passport-strategy@1.x.x, passport-strategy@^1.0.0: resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4" integrity sha512-CB97UUvDKJde2V0KDWWB3lyf6PC3FaZP7YxZ2G8OAtn9p4HI9j9JLP9qjOGZFvyl8uwNT8qM+hGnz/n16NI7oA== -passport@0.6.0, passport@^0.4.0, passport@^0.6.0: +passport@^0.4.0: + version "0.4.1" + resolved "https://registry.yarnpkg.com/passport/-/passport-0.4.1.tgz#941446a21cb92fc688d97a0861c38ce9f738f270" + integrity sha512-IxXgZZs8d7uFSt3eqNjM9NQ3g3uQCW5avD8mRNoXV99Yig50vjuaez6dQK2qC0kVWPRTujxY0dWgGfT09adjYg== + dependencies: + passport-strategy "1.x.x" + pause "0.0.1" + +passport@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/passport/-/passport-0.6.0.tgz#e869579fab465b5c0b291e841e6cc95c005fac9d" integrity sha512-0fe+p3ZnrWRW74fe8+SvCyf4a3Pb2/h7gFkQ8yTJpAO50gDzlfjZUZTO1k5Eg9kUct22OxHLqDZoKUWRHOh9ug== @@ -18190,6 +18246,13 @@ pseudomap@^1.0.2: resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ== +psl@^1.1.28: + version "1.15.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.15.0.tgz#bdace31896f1d97cec6a79e8224898ce93d974c6" + integrity sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w== + dependencies: + punycode "^2.3.1" + psl@^1.1.33: version "1.9.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" @@ -18223,6 +18286,11 @@ punycode@^2.1.0, punycode@^2.1.1, punycode@^2.3.0: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== +punycode@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" + integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== + pupa@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.1.1.tgz#f5e8fd4afc2c5d97828faa523549ed8744a20d62" @@ -19054,6 +19122,11 @@ sax@1.2.1: resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" integrity sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA== +sax@>=0.1.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.4.1.tgz#44cc8988377f126304d3b3fc1010c733b929ef0f" + integrity sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg== + sax@>=0.6.0: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" @@ -19126,13 +19199,28 @@ semver-diff@^3.1.1: dependencies: semver "^6.3.0" -"semver@2 || 3 || 4 || 5", semver@7.5.3, semver@^5.6.0, semver@^5.7.1, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@^7.6.0, semver@^7.6.2: +"semver@2 || 3 || 4 || 5", semver@^5.6.0, semver@^5.7.1: + version "5.7.2" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== + +semver@7.5.3, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3: version "7.5.3" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ== dependencies: lru-cache "^6.0.0" +semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + +semver@^7.5.4, semver@^7.6.0, semver@^7.6.2: + version "7.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.1.tgz#abd5098d82b18c6c81f6074ff2647fd3e7220c9f" + integrity sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA== + seq-queue@^0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/seq-queue/-/seq-queue-0.0.5.tgz#d56812e1c017a6e4e7c3e3a37a1da6d78dd3c93e" @@ -20614,7 +20702,7 @@ touch@^3.1.0: dependencies: nopt "~1.0.10" -tough-cookie@4.1.3, "tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2, tough-cookie@~2.5.0: +"tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2: version "4.1.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf" integrity sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw== @@ -20624,6 +20712,14 @@ tough-cookie@4.1.3, "tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0 universalify "^0.2.0" url-parse "^1.5.3" +tough-cookie@~2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" + integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== + dependencies: + psl "^1.1.28" + punycode "^2.1.1" + tr46@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" @@ -21091,14 +21187,6 @@ unpipe@1.0.0: resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== -unset-value@2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-2.0.1.tgz#57bed0c22d26f28d69acde5df9a11b77c74d2df3" - integrity sha512-2hvrBfjUE00PkqN+q0XP6yRAOGrR06uSiUoIQGZkc7GxvQ9H7v8quUPNtZjMg4uux69i8HWpIjLPUKwCuRGyNg== - dependencies: - has-value "^2.0.2" - isobject "^4.0.0" - untildify@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" @@ -21802,7 +21890,14 @@ xml-parse-from-string@^1.0.0: resolved "https://registry.yarnpkg.com/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz#a9029e929d3dbcded169f3c6e28238d95a5d5a28" integrity sha512-ErcKwJTF54uRzzNMXq2X5sMIy88zJvfN2DmdoQvy7PAFJ+tPRU6ydWuOKNMyfmOjdyBQTFREi60s0Y0SyI0G0g== -xml2js@0.1.x, xml2js@0.6.2, xml2js@^0.5.0: +xml2js@0.1.x: + version "0.1.14" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.1.14.tgz#5274e67f5a64c5f92974cd85139e0332adc6b90c" + integrity sha512-pbdws4PPPNc1HPluSUKamY4GWMk592K7qwcj6BExbVOhhubub8+pMda/ql68b6L3luZs/OGjGSB5goV7SnmgnA== + dependencies: + sax ">=0.1.1" + +xml2js@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499" integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA== @@ -21810,6 +21905,14 @@ xml2js@0.1.x, xml2js@0.6.2, xml2js@^0.5.0: sax ">=0.6.0" xmlbuilder "~11.0.0" +xml2js@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7" + integrity sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA== + dependencies: + sax ">=0.6.0" + xmlbuilder "~11.0.0" + xmlbuilder@~11.0.0: version "11.0.1" resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" From cf5b69a2affc38b30d3440a4a9d0b1bbd933f225 Mon Sep 17 00:00:00 2001 From: Martin McKeaveney Date: Wed, 19 Feb 2025 00:37:47 +0000 Subject: [PATCH 20/50] remove koa-redis from server --- packages/server/package.json | 1 - yarn.lock | 187 ++++++++++------------------------- 2 files changed, 50 insertions(+), 138 deletions(-) diff --git a/packages/server/package.json b/packages/server/package.json index a668d6137c..9a70ecba9c 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -102,7 +102,6 @@ "koa": "2.15.4", "koa-body": "4.2.0", "koa-compress": "4.0.1", - "koa-redis": "^4.0.1", "koa-send": "5.0.1", "koa-useragent": "^4.1.0", "koa2-ratelimit": "1.1.1", diff --git a/yarn.lock b/yarn.lock index 1b04429b37..ac0ad7db35 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1457,7 +1457,7 @@ "@azure/abort-controller" "^2.0.0" tslib "^2.6.2" -"@azure/identity@^4.2.1": +"@azure/identity@4.2.1", "@azure/identity@^4.2.1": version "4.2.1" resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-4.2.1.tgz#22b366201e989b7b41c0e1690e103bd579c31e4c" integrity sha512-U8hsyC9YPcEIzoaObJlRDvp7KiF0MGS7xcWbyJSVvXRkC/HXo1f0oYeBYmEvVgRfacw7GHf6D6yAoh9JHz6A5Q== @@ -2784,6 +2784,28 @@ pouchdb-promise "^6.0.4" through2 "^2.0.0" +"@budibase/pro@npm:@budibase/pro@latest": + version "3.4.11" + resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-3.4.11.tgz#5ba5fe278ccd74efea4cc6762b36844e6256f9b3" + integrity sha512-EHBlgEciGKm1Qlm/z3gO9o8ujOISPKwhL8Dqsg8yhnOs/ngglMJArrlwQvFIEX5IMNfJ7eGqUxsgrkXOLevjrg== + dependencies: + "@anthropic-ai/sdk" "^0.27.3" + "@budibase/backend-core" "*" + "@budibase/shared-core" "*" + "@budibase/string-templates" "*" + "@budibase/types" "*" + "@koa/router" "13.1.0" + bull "4.10.1" + dd-trace "5.26.0" + joi "17.6.0" + jsonwebtoken "9.0.2" + lru-cache "^7.14.1" + memorystream "^0.3.1" + node-fetch "2.6.7" + openai "4.59.0" + scim-patch "^0.8.1" + scim2-parse-filter "^0.2.8" + "@budibase/vm-browserify@^1.1.4": version "1.1.4" resolved "https://registry.yarnpkg.com/@budibase/vm-browserify/-/vm-browserify-1.1.4.tgz#eecb001bd9521cb7647e26fb4d2d29d0a4dce262" @@ -8130,23 +8152,7 @@ aws4@^1.8.0: resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.13.2.tgz#0aa167216965ac9474ccfa83892cfb6b3e1e52ef" integrity sha512-lHe62zvbTB5eEABUVi/AwVh0ZKY9rMMDhmm+eeyuuUQbQ3+J+fONVQOZyj+DdrvD4BY33uYniyRJ4UJIaSKAfw== -axios@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.1.3.tgz#8274250dada2edf53814ed7db644b9c2866c1e35" - integrity sha512-00tXVRwKx/FZr/IDVFt4C+f9FYairX517WoGCL6dpOntqLkZofjhu43F/Xl44UOpqa+9sLFDrG/XAnFsUYgkDA== - dependencies: - follow-redirects "^1.15.0" - form-data "^4.0.0" - proxy-from-env "^1.1.0" - -axios@^0.21.1: - version "0.21.4" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" - integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== - dependencies: - follow-redirects "^1.14.0" - -axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.6.2, axios@^1.6.8: +axios@1.1.3, axios@1.7.7, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.6.2, axios@^1.6.8: version "1.7.7" resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.7.tgz#2f554296f9892a72ac8d8e4c5b79c14a91d0a47f" integrity sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q== @@ -11787,7 +11793,7 @@ fn.name@1.x.x: resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== -follow-redirects@^1.14.0, follow-redirects@^1.15.0, follow-redirects@^1.15.6: +follow-redirects@^1.15.6: version "1.15.9" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1" integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== @@ -12402,22 +12408,10 @@ global@~4.4.0: min-document "^2.19.0" process "^0.11.10" -globals@^11.1.0: - version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globals@^13.19.0: - version "13.24.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.24.0.tgz#8432a19d78ce0c1e833949c36adb345400bb1171" - integrity sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ== - dependencies: - type-fest "^0.20.2" - -globals@^14.0.0: - version "14.0.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-14.0.0.tgz#898d7413c29babcf6bafe56fcadded858ada724e" - integrity sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ== +globals@15.13.0, globals@^11.1.0, globals@^13.19.0, globals@^14.0.0: + version "15.13.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-15.13.0.tgz#bbec719d69aafef188ecd67954aae76a696010fc" + integrity sha512-49TewVEz0UxZjr1WYYsWpPrhyC/B/pA8Bq0fUmet2n+eR7yn0IvNzNaoBwnK6mdkzcN+se7Ez9zUgULTz2QH4g== globalthis@^1.0.1, globalthis@^1.0.4: version "1.0.4" @@ -12827,12 +12821,7 @@ http-assert@^1.3.0: deep-equal "~1.0.1" http-errors "~1.8.0" -http-cache-semantics@3.8.1: - version "3.8.1" - resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2" - integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w== - -http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1: +http-cache-semantics@3.8.1, http-cache-semantics@4.1.1, http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== @@ -13288,11 +13277,6 @@ is-boolean-object@^1.1.0: call-bind "^1.0.2" has-tostringtag "^1.0.0" -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - is-buffer@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" @@ -13732,6 +13716,11 @@ isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== +isobject@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-4.0.0.tgz#3f1c9155e73b192022a80819bacd0343711697b0" + integrity sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA== + isolated-vm@^4.7.2: version "4.7.2" resolved "https://registry.yarnpkg.com/isolated-vm/-/isolated-vm-4.7.2.tgz#5670d5cce1d92004f9b825bec5b0b11fc7501b65" @@ -14611,14 +14600,7 @@ kill-port@^1.6.1: get-them-args "1.3.2" shell-exec "1.0.2" -kind-of@^3.0.2, kind-of@^3.1.0: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - integrity sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ== - dependencies: - is-buffer "^1.1.5" - -kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: +kind-of@6.0.3, kind-of@^3.0.2, kind-of@^3.1.0, kind-of@^6.0.0, kind-of@^6.0.2, kind-of@^6.0.3: version "6.0.3" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== @@ -16011,7 +15993,7 @@ msgpackr-extract@^3.0.2: "@msgpackr-extract/msgpackr-extract-linux-x64" "3.0.2" "@msgpackr-extract/msgpackr-extract-win32-x64" "3.0.2" -msgpackr@^1.5.2: +msgpackr@1.10.1, msgpackr@^1.5.2: version "1.10.1" resolved "https://registry.yarnpkg.com/msgpackr/-/msgpackr-1.10.1.tgz#51953bb4ce4f3494f0c4af3f484f01cfbb306555" integrity sha512-r5VRLv9qouXuLiIBrLpl2d5ZvPt8svdQTl5/vMvE4nzDMyEX4sgW5yWhuBBj5UmgwOTWj8CIdSXn5sAfsHAWIQ== @@ -16204,27 +16186,13 @@ node-domexception@1.0.0: resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== -node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7: +node-fetch@2.6.7, node-fetch@2.6.9, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== dependencies: whatwg-url "^5.0.0" -node-fetch@2.6.9: - version "2.6.9" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.9.tgz#7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6" - integrity sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg== - dependencies: - whatwg-url "^5.0.0" - -node-fetch@^2.6.9: - version "2.7.0" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" - integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== - dependencies: - whatwg-url "^5.0.0" - node-forge@^1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" @@ -17253,15 +17221,7 @@ passport-strategy@1.x.x, passport-strategy@^1.0.0: resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4" integrity sha512-CB97UUvDKJde2V0KDWWB3lyf6PC3FaZP7YxZ2G8OAtn9p4HI9j9JLP9qjOGZFvyl8uwNT8qM+hGnz/n16NI7oA== -passport@^0.4.0: - version "0.4.1" - resolved "https://registry.yarnpkg.com/passport/-/passport-0.4.1.tgz#941446a21cb92fc688d97a0861c38ce9f738f270" - integrity sha512-IxXgZZs8d7uFSt3eqNjM9NQ3g3uQCW5avD8mRNoXV99Yig50vjuaez6dQK2qC0kVWPRTujxY0dWgGfT09adjYg== - dependencies: - passport-strategy "1.x.x" - pause "0.0.1" - -passport@^0.6.0: +passport@0.6.0, passport@^0.4.0, passport@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/passport/-/passport-0.6.0.tgz#e869579fab465b5c0b291e841e6cc95c005fac9d" integrity sha512-0fe+p3ZnrWRW74fe8+SvCyf4a3Pb2/h7gFkQ8yTJpAO50gDzlfjZUZTO1k5Eg9kUct22OxHLqDZoKUWRHOh9ug== @@ -18246,13 +18206,6 @@ pseudomap@^1.0.2: resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3" integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ== -psl@^1.1.28: - version "1.15.0" - resolved "https://registry.yarnpkg.com/psl/-/psl-1.15.0.tgz#bdace31896f1d97cec6a79e8224898ce93d974c6" - integrity sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w== - dependencies: - punycode "^2.3.1" - psl@^1.1.33: version "1.9.0" resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" @@ -18286,11 +18239,6 @@ punycode@^2.1.0, punycode@^2.1.1, punycode@^2.3.0: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== -punycode@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.1.tgz#027422e2faec0b25e1549c3e1bd8309b9133b6e5" - integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== - pupa@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.1.1.tgz#f5e8fd4afc2c5d97828faa523549ed8744a20d62" @@ -19122,11 +19070,6 @@ sax@1.2.1: resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" integrity sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA== -sax@>=0.1.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.4.1.tgz#44cc8988377f126304d3b3fc1010c733b929ef0f" - integrity sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg== - sax@>=0.6.0: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" @@ -19199,28 +19142,13 @@ semver-diff@^3.1.1: dependencies: semver "^6.3.0" -"semver@2 || 3 || 4 || 5", semver@^5.6.0, semver@^5.7.1: - version "5.7.2" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8" - integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== - -semver@7.5.3, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3: +"semver@2 || 3 || 4 || 5", semver@7.5.3, semver@^5.6.0, semver@^5.7.1, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.0.0, semver@^7.1.1, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@^7.6.0, semver@^7.6.2: version "7.5.3" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ== dependencies: lru-cache "^6.0.0" -semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1: - version "6.3.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" - integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== - -semver@^7.5.4, semver@^7.6.0, semver@^7.6.2: - version "7.7.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.1.tgz#abd5098d82b18c6c81f6074ff2647fd3e7220c9f" - integrity sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA== - seq-queue@^0.0.5: version "0.0.5" resolved "https://registry.yarnpkg.com/seq-queue/-/seq-queue-0.0.5.tgz#d56812e1c017a6e4e7c3e3a37a1da6d78dd3c93e" @@ -20702,7 +20630,7 @@ touch@^3.1.0: dependencies: nopt "~1.0.10" -"tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2: +tough-cookie@4.1.3, "tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2, tough-cookie@~2.5.0: version "4.1.3" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf" integrity sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw== @@ -20712,14 +20640,6 @@ touch@^3.1.0: universalify "^0.2.0" url-parse "^1.5.3" -tough-cookie@~2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" - integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== - dependencies: - psl "^1.1.28" - punycode "^2.1.1" - tr46@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240" @@ -21187,6 +21107,14 @@ unpipe@1.0.0: resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== +unset-value@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-2.0.1.tgz#57bed0c22d26f28d69acde5df9a11b77c74d2df3" + integrity sha512-2hvrBfjUE00PkqN+q0XP6yRAOGrR06uSiUoIQGZkc7GxvQ9H7v8quUPNtZjMg4uux69i8HWpIjLPUKwCuRGyNg== + dependencies: + has-value "^2.0.2" + isobject "^4.0.0" + untildify@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b" @@ -21890,14 +21818,7 @@ xml-parse-from-string@^1.0.0: resolved "https://registry.yarnpkg.com/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz#a9029e929d3dbcded169f3c6e28238d95a5d5a28" integrity sha512-ErcKwJTF54uRzzNMXq2X5sMIy88zJvfN2DmdoQvy7PAFJ+tPRU6ydWuOKNMyfmOjdyBQTFREi60s0Y0SyI0G0g== -xml2js@0.1.x: - version "0.1.14" - resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.1.14.tgz#5274e67f5a64c5f92974cd85139e0332adc6b90c" - integrity sha512-pbdws4PPPNc1HPluSUKamY4GWMk592K7qwcj6BExbVOhhubub8+pMda/ql68b6L3luZs/OGjGSB5goV7SnmgnA== - dependencies: - sax ">=0.1.1" - -xml2js@0.6.2: +xml2js@0.1.x, xml2js@0.6.2, xml2js@^0.5.0: version "0.6.2" resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499" integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA== @@ -21905,14 +21826,6 @@ xml2js@0.6.2: sax ">=0.6.0" xmlbuilder "~11.0.0" -xml2js@^0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7" - integrity sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA== - dependencies: - sax ">=0.6.0" - xmlbuilder "~11.0.0" - xmlbuilder@~11.0.0: version "11.0.1" resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" From c2a4ad86b9b80222569cdcff01de90d4976afe5b Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 19 Feb 2025 09:30:28 +0000 Subject: [PATCH 21/50] Update pro reference. --- packages/pro | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/pro b/packages/pro index 4f2006af1c..45f5673d5e 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 4f2006af1ca5523627dd3e19dce201ce0f758a09 +Subproject commit 45f5673d5e5ab3c22deb6663cea2e31a628aa133 From 1920276786fce8da0db5b0ce5fea1ec57408da99 Mon Sep 17 00:00:00 2001 From: Michael Drury Date: Wed, 19 Feb 2025 10:06:13 +0000 Subject: [PATCH 22/50] Setting middleware type. --- packages/worker/src/index.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/worker/src/index.ts b/packages/worker/src/index.ts index fb12da9465..cb8eefe21a 100644 --- a/packages/worker/src/index.ts +++ b/packages/worker/src/index.ts @@ -4,7 +4,7 @@ if (process.env.DD_APM_ENABLED) { // need to load environment first import env from "./environment" -import Application from "koa" +import Application, { Middleware } from "koa" import { bootstrap } from "global-agent" import * as db from "./db" import { sdk as proSdk } from "@budibase/pro" @@ -53,7 +53,7 @@ app.proxy = true app.use(handleScimBody) app.use(koaBody({ multipart: true })) -app.use(async (ctx, next) => { +const sessionMiddleware: Middleware = async (ctx, next) => { const redisClient = await new redis.Client( redis.utils.Databases.SESSIONS ).init() @@ -70,7 +70,9 @@ app.use(async (ctx, next) => { }, app )(ctx, next) -}) +} + +app.use(sessionMiddleware) app.use(middleware.correlation) app.use(middleware.pino) From e1a72317a0abe699bc2a34b03b38b17d39e4e542 Mon Sep 17 00:00:00 2001 From: Martin McKeaveney Date: Wed, 19 Feb 2025 10:29:18 +0000 Subject: [PATCH 23/50] fix types --- packages/worker/package.json | 2 -- packages/worker/src/index.ts | 3 ++- packages/worker/src/koa-redis.d.ts | 1 + yarn.lock | 22 +++------------------- 4 files changed, 6 insertions(+), 22 deletions(-) create mode 100644 packages/worker/src/koa-redis.d.ts diff --git a/packages/worker/package.json b/packages/worker/package.json index d9200880a2..28728272ca 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -83,8 +83,6 @@ "@swc/jest": "0.2.27", "@types/jest": "29.5.5", "@types/jsonwebtoken": "9.0.3", - "@types/koa": "2.13.4", - "@types/koa-redis": "^4.0.5", "@types/koa__router": "12.0.4", "@types/lodash": "4.14.200", "@types/node-fetch": "2.6.4", diff --git a/packages/worker/src/index.ts b/packages/worker/src/index.ts index cb8eefe21a..f382aa8a20 100644 --- a/packages/worker/src/index.ts +++ b/packages/worker/src/index.ts @@ -53,12 +53,13 @@ app.proxy = true app.use(handleScimBody) app.use(koaBody({ multipart: true })) -const sessionMiddleware: Middleware = async (ctx, next) => { +const sessionMiddleware: Middleware = async (ctx: any, next: any) => { const redisClient = await new redis.Client( redis.utils.Databases.SESSIONS ).init() return koaSession( { + // @ts-ignore store: new RedisStore({ client: redisClient.getClient() }), key: "koa:sess", maxAge: 86400000, // one day diff --git a/packages/worker/src/koa-redis.d.ts b/packages/worker/src/koa-redis.d.ts new file mode 100644 index 0000000000..ad1b7a46f1 --- /dev/null +++ b/packages/worker/src/koa-redis.d.ts @@ -0,0 +1 @@ +declare module "koa-redis" {} diff --git a/yarn.lock b/yarn.lock index ac0ad7db35..8f611e224c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2785,9 +2785,9 @@ through2 "^2.0.0" "@budibase/pro@npm:@budibase/pro@latest": - version "3.4.11" - resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-3.4.11.tgz#5ba5fe278ccd74efea4cc6762b36844e6256f9b3" - integrity sha512-EHBlgEciGKm1Qlm/z3gO9o8ujOISPKwhL8Dqsg8yhnOs/ngglMJArrlwQvFIEX5IMNfJ7eGqUxsgrkXOLevjrg== + version "3.4.12" + resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-3.4.12.tgz#60e630944de4e2de970a04179d8f0f57d48ce75e" + integrity sha512-msUBmcWxRDg+ugjZvd27XudERQqtQRdiARsO8MaDVTcp5ejIXgshEIVVshHOCj3hcbRblw9pXvBIMI53iTMUsA== dependencies: "@anthropic-ai/sdk" "^0.27.3" "@budibase/backend-core" "*" @@ -6625,14 +6625,6 @@ dependencies: "@types/koa" "*" -"@types/koa-redis@^4.0.5": - version "4.0.5" - resolved "https://registry.yarnpkg.com/@types/koa-redis/-/koa-redis-4.0.5.tgz#c535e38ad681782080b53f006104f7968ba1121f" - integrity sha512-xoEdyWDejkLkFVVf+N12euYakItlMicF/SHEDOOvndbteJvQ3AFGUHDDbQIrE2DGjH93LGweOZxXLAs8XfpY2Q== - dependencies: - "@types/koa-session" "*" - "@types/redis" "^2.8.0" - "@types/koa-send@^4.1.6": version "4.1.6" resolved "https://registry.yarnpkg.com/@types/koa-send/-/koa-send-4.1.6.tgz#15d90e95e3ccce669a15b6a3c56c3a650a167cea" @@ -6640,14 +6632,6 @@ dependencies: "@types/koa" "*" -"@types/koa-session@*": - version "6.4.5" - resolved "https://registry.yarnpkg.com/@types/koa-session/-/koa-session-6.4.5.tgz#ac10bac507f4bb722fa6c55c33607b5c8769f779" - integrity sha512-Vc6+fslnPuMH2v9y80WYeo39UMo8mweuNNthKCwYU2ZE6l5vnRrzRU3BRvexKwsoI5sxsRl5CxDsBlLI8kY/XA== - dependencies: - "@types/cookies" "*" - "@types/koa" "*" - "@types/koa@*": version "2.13.5" resolved "https://registry.yarnpkg.com/@types/koa/-/koa-2.13.5.tgz#64b3ca4d54e08c0062e89ec666c9f45443b21a61" From 06cff1292c7c9c7c07d37ce374b3d0c373b9aeff Mon Sep 17 00:00:00 2001 From: Martin McKeaveney Date: Wed, 19 Feb 2025 10:31:07 +0000 Subject: [PATCH 24/50] skip oidc callback test --- packages/worker/src/api/routes/global/tests/auth.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/worker/src/api/routes/global/tests/auth.spec.ts b/packages/worker/src/api/routes/global/tests/auth.spec.ts index bff959469e..2726ec1dcb 100644 --- a/packages/worker/src/api/routes/global/tests/auth.spec.ts +++ b/packages/worker/src/api/routes/global/tests/auth.spec.ts @@ -311,7 +311,7 @@ describe("/api/global/auth", () => { }) }) - describe("GET /api/global/auth/:tenantId/oidc/callback", () => { + xdescribe("GET /api/global/auth/:tenantId/oidc/callback", () => { it("logs in", async () => { const email = `${generator.guid()}@example.com` From 43ca46f9a31e31c35cc8f38111e5c92945ec545e Mon Sep 17 00:00:00 2001 From: Martin McKeaveney Date: Wed, 19 Feb 2025 10:40:10 +0000 Subject: [PATCH 25/50] describe.skip --- packages/worker/src/api/routes/global/tests/auth.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/worker/src/api/routes/global/tests/auth.spec.ts b/packages/worker/src/api/routes/global/tests/auth.spec.ts index 2726ec1dcb..f89cb4a027 100644 --- a/packages/worker/src/api/routes/global/tests/auth.spec.ts +++ b/packages/worker/src/api/routes/global/tests/auth.spec.ts @@ -311,7 +311,7 @@ describe("/api/global/auth", () => { }) }) - xdescribe("GET /api/global/auth/:tenantId/oidc/callback", () => { + describe.skip("GET /api/global/auth/:tenantId/oidc/callback", () => { it("logs in", async () => { const email = `${generator.guid()}@example.com` From 72074dfd60cd8fdf04ba74bc688628743376a066 Mon Sep 17 00:00:00 2001 From: Budibase Staging Release Bot <> Date: Wed, 19 Feb 2025 10:41:10 +0000 Subject: [PATCH 26/50] Bump version to 3.4.13 --- lerna.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lerna.json b/lerna.json index 09c739cc8c..b6eb31f2b0 100644 --- a/lerna.json +++ b/lerna.json @@ -1,6 +1,6 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", - "version": "3.4.12", + "version": "3.4.13", "npmClient": "yarn", "concurrency": 20, "command": { From 7bac376599571f66ba5f6ca265de5404a3a40640 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Mon, 17 Feb 2025 09:34:34 +0100 Subject: [PATCH 27/50] Initial js validation --- .../common/CodeEditor/CodeEditor.svelte | 4 + .../common/CodeEditor/validator/js.ts | 80 +++++++++++++++++++ .../common/bindings/BindingPanel.svelte | 1 + 3 files changed, 85 insertions(+) create mode 100644 packages/builder/src/components/common/CodeEditor/validator/js.ts diff --git a/packages/builder/src/components/common/CodeEditor/CodeEditor.svelte b/packages/builder/src/components/common/CodeEditor/CodeEditor.svelte index fbf74d1e9b..0f4bc64e2a 100644 --- a/packages/builder/src/components/common/CodeEditor/CodeEditor.svelte +++ b/packages/builder/src/components/common/CodeEditor/CodeEditor.svelte @@ -49,6 +49,7 @@ import type { EditorMode } from "@budibase/types" import type { BindingCompletion, CodeValidator } from "@/types" import { validateHbsTemplate } from "./validator/hbs" + import { validateJsTemplate } from "./validator/js" export let label: string | undefined = undefined export let completions: BindingCompletion[] = [] @@ -356,6 +357,9 @@ if (mode === EditorModes.Handlebars) { const diagnostics = validateHbsTemplate(value, validations) editor.dispatch(setDiagnostics(editor.state, diagnostics)) + } else if (mode === EditorModes.JS) { + const diagnostics = validateJsTemplate(value, validations) + editor.dispatch(setDiagnostics(editor.state, diagnostics)) } } diff --git a/packages/builder/src/components/common/CodeEditor/validator/js.ts b/packages/builder/src/components/common/CodeEditor/validator/js.ts new file mode 100644 index 0000000000..6e85dc41ec --- /dev/null +++ b/packages/builder/src/components/common/CodeEditor/validator/js.ts @@ -0,0 +1,80 @@ +import { Parser } from "acorn" +import { simple as walk } from "acorn-walk" + +import { iifeWrapper } from "@budibase/string-templates" +import type { Diagnostic } from "@codemirror/lint" +import { CodeValidator } from "@/types" + +export function validateJsTemplate( + code: string, + validations: CodeValidator +): Diagnostic[] { + const diagnostics: Diagnostic[] = [] + + try { + // const helperUsages = new RegExp(/\bhelpers\.(\w)+\b/).exec(code) + const ast = Parser.parse(iifeWrapper(code), { + ecmaVersion: "latest", + locations: true, + }) + + const lineOffsets: number[] = [0] + let offset = 0 + for (const line of code.split("\n")) { + lineOffsets.push(offset) + offset += line.length + 1 // +1 for newline character + } + + walk(ast, { + CallExpression(node) { + const callee: any = node.callee + if ( + node.type === "CallExpression" && + callee.object?.name === "helpers" && + node.loc + ) { + const functionName = callee.property.name + const from = + lineOffsets[node.loc.start.line - 1] + node.loc.start.column + const to = lineOffsets[node.loc.end.line - 1] + node.loc.end.column + + if (!(functionName in validations)) { + diagnostics.push({ + from, + to, + severity: "warning", + message: `"${functionName}" function does not exist.`, + }) + return + } + + const { arguments: expectedArguments } = validations[functionName] + if ( + expectedArguments && + node.arguments.length !== expectedArguments.length + ) { + diagnostics.push({ + from, + to, + severity: "error", + message: `Function "${functionName}" expects ${ + expectedArguments.length + } parameters (${expectedArguments.join(", ")}), but got ${ + node.arguments.length + }.`, + }) + } + } + }, + }) + } catch (e: any) { + diagnostics.push({ + from: 0, + to: code.length, + severity: "error", + message: `Syntax error: ${e.message}`, + }) + } + + return diagnostics +} diff --git a/packages/builder/src/components/common/bindings/BindingPanel.svelte b/packages/builder/src/components/common/bindings/BindingPanel.svelte index 2c35acdf2d..9ebbc91309 100644 --- a/packages/builder/src/components/common/bindings/BindingPanel.svelte +++ b/packages/builder/src/components/common/bindings/BindingPanel.svelte @@ -377,6 +377,7 @@ value={jsValue ? decodeJSBinding(jsValue) : jsValue} on:change={onChangeJSValue} {completions} + {validations} mode={EditorModes.JS} bind:getCaretPosition bind:insertAtPos From 608fcb42be494dbf418fff23fc4eb75bfb2ca9de Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 19 Feb 2025 12:24:19 +0100 Subject: [PATCH 28/50] Validate return value --- .../components/common/CodeEditor/validator/js.ts | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/packages/builder/src/components/common/CodeEditor/validator/js.ts b/packages/builder/src/components/common/CodeEditor/validator/js.ts index 6e85dc41ec..6d7c3ea8bf 100644 --- a/packages/builder/src/components/common/CodeEditor/validator/js.ts +++ b/packages/builder/src/components/common/CodeEditor/validator/js.ts @@ -25,7 +25,11 @@ export function validateJsTemplate( offset += line.length + 1 // +1 for newline character } + let hasReturnStatement = false walk(ast, { + ReturnStatement(node) { + hasReturnStatement = !!node.argument + }, CallExpression(node) { const callee: any = node.callee if ( @@ -67,6 +71,15 @@ export function validateJsTemplate( } }, }) + + if (!hasReturnStatement) { + diagnostics.push({ + from: 0, + to: code.length, + severity: "error", + message: "Your code must return a value.", + }) + } } catch (e: any) { diagnostics.push({ from: 0, From c5f2ef9354917c930cab6aef42470b2bfdcad7c8 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 19 Feb 2025 12:35:52 +0100 Subject: [PATCH 29/50] Add js validation tests --- .../CodeEditor/validator/tests/js.spec.ts | 71 +++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts diff --git a/packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts b/packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts new file mode 100644 index 0000000000..be6977f35a --- /dev/null +++ b/packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts @@ -0,0 +1,71 @@ +import { validateJsTemplate } from "../js" +import { CodeValidator } from "@/types" + +describe("js validator", () => { + it("validates valid code", () => { + const text = "return 7" + const validators = {} + + const result = validateJsTemplate(text, validators) + expect(result).toEqual([]) + }) + + it("does not validate runtime errors", () => { + const text = "return a" + const validators = {} + + const result = validateJsTemplate(text, validators) + expect(result).toEqual([]) + }) + + it("validates multiline code", () => { + const text = "const foo='bar'\nreturn 123" + const validators = {} + + const result = validateJsTemplate(text, validators) + expect(result).toEqual([]) + }) + + describe("helpers", () => { + const validators: CodeValidator = { + helperFunction: { + arguments: ["a", "b", "c"], + }, + } + + it("validates helpers with valid params", () => { + const text = "return helpers.helperFunction(1, 99, 'a')" + + const result = validateJsTemplate(text, validators) + expect(result).toHaveLength(0) + }) + + it("throws on too few params", () => { + const text = "return helpers.helperFunction(100)" + + const result = validateJsTemplate(text, validators) + expect(result).toEqual([ + { + from: 7, + message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 1.`, + severity: "error", + to: 34, + }, + ]) + }) + + it("throws on too many params", () => { + const text = "return helpers.helperFunction( 1, 99, 'a', 100)" + + const result = validateJsTemplate(text, validators) + expect(result).toEqual([ + { + from: 7, + message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 4.`, + severity: "error", + to: 47, + }, + ]) + }) + }) +}) From fba5663e66aeb34605529896e7bb085e34a4d5b9 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 19 Feb 2025 12:44:47 +0100 Subject: [PATCH 30/50] Improve tests --- .../CodeEditor/validator/tests/js.spec.ts | 69 ++++++++++++++++++- 1 file changed, 68 insertions(+), 1 deletion(-) diff --git a/packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts b/packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts index be6977f35a..d0c25182f7 100644 --- a/packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts +++ b/packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts @@ -26,6 +26,29 @@ describe("js validator", () => { expect(result).toEqual([]) }) + it("allows return not being on the last line", () => { + const text = "const foo='bar'\nreturn 123\nconsole.log(foo)" + const validators = {} + + const result = validateJsTemplate(text, validators) + expect(result).toEqual([]) + }) + + it("throws on missing return", () => { + const text = "const foo='bar'\nbar='foo'" + const validators = {} + + const result = validateJsTemplate(text, validators) + expect(result).toEqual([ + { + from: 0, + message: "Your code must return a value.", + severity: "error", + to: 25, + }, + ]) + }) + describe("helpers", () => { const validators: CodeValidator = { helperFunction: { @@ -37,7 +60,7 @@ describe("js validator", () => { const text = "return helpers.helperFunction(1, 99, 'a')" const result = validateJsTemplate(text, validators) - expect(result).toHaveLength(0) + expect(result).toEqual([]) }) it("throws on too few params", () => { @@ -67,5 +90,49 @@ describe("js validator", () => { }, ]) }) + + it("validates helpers on inner functions", () => { + const text = `function call(){ + return helpers.helperFunction(1, 99) + } + return call()` + + const result = validateJsTemplate(text, validators) + expect(result).toEqual([ + { + from: 46, + message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 2.`, + severity: "error", + to: 75, + }, + ]) + }) + + it("validates multiple helpers", () => { + const text = + "return helpers.helperFunction(1, 99, 'a') + helpers.helperFunction(1) + helpers.another(1) + helpers.another()" + const validators: CodeValidator = { + helperFunction: { + arguments: ["a", "b", "c"], + }, + another: { arguments: [] }, + } + + const result = validateJsTemplate(text, validators) + expect(result).toEqual([ + { + from: 44, + message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 1.`, + severity: "error", + to: 69, + }, + { + from: 72, + message: `Function "another" expects 0 parameters (), but got 1.`, + severity: "error", + to: 90, + }, + ]) + }) }) }) From 1a4abb7630b5c72ac18e9c5267e2f583caa28a99 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 19 Feb 2025 13:14:40 +0100 Subject: [PATCH 31/50] Validate return --- .../common/CodeEditor/validator/js.ts | 24 ++++++++++++------- .../CodeEditor/validator/tests/js.spec.ts | 18 ++++++++++++++ 2 files changed, 34 insertions(+), 8 deletions(-) diff --git a/packages/builder/src/components/common/CodeEditor/validator/js.ts b/packages/builder/src/components/common/CodeEditor/validator/js.ts index 6d7c3ea8bf..20fb5abd07 100644 --- a/packages/builder/src/components/common/CodeEditor/validator/js.ts +++ b/packages/builder/src/components/common/CodeEditor/validator/js.ts @@ -1,7 +1,6 @@ import { Parser } from "acorn" -import { simple as walk } from "acorn-walk" +import * as walk from "acorn-walk" -import { iifeWrapper } from "@budibase/string-templates" import type { Diagnostic } from "@codemirror/lint" import { CodeValidator } from "@/types" @@ -12,13 +11,13 @@ export function validateJsTemplate( const diagnostics: Diagnostic[] = [] try { - // const helperUsages = new RegExp(/\bhelpers\.(\w)+\b/).exec(code) - const ast = Parser.parse(iifeWrapper(code), { + const ast = Parser.parse(code, { ecmaVersion: "latest", locations: true, + allowReturnOutsideFunction: true, }) - const lineOffsets: number[] = [0] + const lineOffsets: number[] = [] let offset = 0 for (const line of code.split("\n")) { lineOffsets.push(offset) @@ -26,9 +25,18 @@ export function validateJsTemplate( } let hasReturnStatement = false - walk(ast, { - ReturnStatement(node) { - hasReturnStatement = !!node.argument + walk.ancestor(ast, { + ReturnStatement(node, _state, ancestors) { + if ( + // it returns a value + node.argument && + // and it is top level + ancestors.length === 2 && + ancestors[0].type === "Program" && + ancestors[1].type === "ReturnStatement" + ) { + hasReturnStatement = true + } }, CallExpression(node) { const callee: any = node.callee diff --git a/packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts b/packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts index d0c25182f7..cd9fe4684c 100644 --- a/packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts +++ b/packages/builder/src/components/common/CodeEditor/validator/tests/js.spec.ts @@ -49,6 +49,24 @@ describe("js validator", () => { ]) }) + it("checks that returns are at top level", () => { + const text = ` + function call(){ + return 1 + }` + const validators = {} + + const result = validateJsTemplate(text, validators) + expect(result).toEqual([ + { + from: 0, + message: "Your code must return a value.", + severity: "error", + to: text.length, + }, + ]) + }) + describe("helpers", () => { const validators: CodeValidator = { helperFunction: { From 9efd1a76f0d5964728c35ef88439df3667341e5e Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Wed, 19 Feb 2025 14:05:15 +0100 Subject: [PATCH 32/50] Fix getting proper contexts --- packages/builder/src/stores/builder/screenComponent.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/builder/src/stores/builder/screenComponent.ts b/packages/builder/src/stores/builder/screenComponent.ts index bb10bb2307..310bf2172c 100644 --- a/packages/builder/src/stores/builder/screenComponent.ts +++ b/packages/builder/src/stores/builder/screenComponent.ts @@ -8,6 +8,7 @@ import { UIComponentError, ComponentDefinition, DependsOnComponentSetting, + Screen, } from "@budibase/types" import { queries } from "./queries" import { views } from "./views" @@ -66,6 +67,7 @@ export const screenComponentErrorList = derived( if (!$selectedScreen) { return [] } + const screen = $selectedScreen const datasources = { ...reduceBy("_id", $tables.list), @@ -79,7 +81,9 @@ export const screenComponentErrorList = derived( const errors: UIComponentError[] = [] function checkComponentErrors(component: Component, ancestors: string[]) { - errors.push(...getInvalidDatasources(component, datasources, definitions)) + errors.push( + ...getInvalidDatasources(screen, component, datasources, definitions) + ) errors.push(...getMissingRequiredSettings(component, definitions)) errors.push(...getMissingAncestors(component, definitions, ancestors)) @@ -95,6 +99,7 @@ export const screenComponentErrorList = derived( ) function getInvalidDatasources( + screen: Screen, component: Component, datasources: Record, definitions: Record From 885873ef298da3d211d15fc9e49afe9c47f22489 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 19 Feb 2025 15:23:56 +0000 Subject: [PATCH 33/50] Fix CouchDB datasource. --- packages/backend-core/src/sql/utils.ts | 2 +- packages/server/src/integrations/couchdb.ts | 21 +-- .../src/integrations/tests/couchdb.spec.ts | 141 +++++++++--------- 3 files changed, 84 insertions(+), 80 deletions(-) diff --git a/packages/backend-core/src/sql/utils.ts b/packages/backend-core/src/sql/utils.ts index b07854b2a0..746a949ef3 100644 --- a/packages/backend-core/src/sql/utils.ts +++ b/packages/backend-core/src/sql/utils.ts @@ -5,10 +5,10 @@ import { SqlQuery, Table, TableSourceType, + SEPARATOR, } from "@budibase/types" import { DEFAULT_BB_DATASOURCE_ID } from "../constants" import { Knex } from "knex" -import { SEPARATOR } from "../db" import environment from "../environment" const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` diff --git a/packages/server/src/integrations/couchdb.ts b/packages/server/src/integrations/couchdb.ts index 4004bc47c6..39d8e17243 100644 --- a/packages/server/src/integrations/couchdb.ts +++ b/packages/server/src/integrations/couchdb.ts @@ -62,12 +62,16 @@ const SCHEMA: Integration = { type: DatasourceFieldType.STRING, required: true, }, + rev: { + type: DatasourceFieldType.STRING, + required: true, + }, }, }, }, } -class CouchDBIntegration implements IntegrationBase { +export class CouchDBIntegration implements IntegrationBase { private readonly client: Database constructor(config: CouchDBConfig) { @@ -82,7 +86,8 @@ class CouchDBIntegration implements IntegrationBase { connected: false, } try { - response.connected = await this.client.exists() + await this.client.allDocs({ limit: 1 }) + response.connected = true } catch (e: any) { response.error = e.message as string } @@ -99,13 +104,9 @@ class CouchDBIntegration implements IntegrationBase { } async read(query: { json: string | object }) { - const parsed = this.parse(query) - const params = { - include_docs: true, - ...parsed, - } + const params = { include_docs: true, ...this.parse(query) } const result = await this.client.allDocs(params) - return result.rows.map(row => row.doc) + return result.rows.map(row => row.doc!) } async update(query: { json: string | object }) { @@ -121,8 +122,8 @@ class CouchDBIntegration implements IntegrationBase { return await this.client.get(query.id) } - async delete(query: { id: string }) { - return await this.client.remove(query.id) + async delete(query: { id: string; rev: string }) { + return await this.client.remove(query.id, query.rev) } } diff --git a/packages/server/src/integrations/tests/couchdb.spec.ts b/packages/server/src/integrations/tests/couchdb.spec.ts index 6cb0c438c0..bc8c4fd38e 100644 --- a/packages/server/src/integrations/tests/couchdb.spec.ts +++ b/packages/server/src/integrations/tests/couchdb.spec.ts @@ -1,84 +1,87 @@ -jest.mock("@budibase/backend-core", () => { - const core = jest.requireActual("@budibase/backend-core") - return { - ...core, - db: { - ...core.db, - DatabaseWithConnection: function () { - return { - allDocs: jest.fn().mockReturnValue({ rows: [] }), - put: jest.fn(), - get: jest.fn().mockReturnValue({ _rev: "a" }), - remove: jest.fn(), - } - }, - }, - } -}) +import { env } from "@budibase/backend-core" +import { CouchDBIntegration } from "../couchdb" +import { generator } from "@budibase/backend-core/tests" -import { default as CouchDBIntegration } from "../couchdb" +function couchSafeID(): string { + // CouchDB IDs must start with a letter, so we prepend an 'a'. + return `a${generator.guid()}` +} -class TestConfiguration { - integration: any +function doc(data: Record): string { + return JSON.stringify({ _id: couchSafeID(), ...data }) +} - constructor( - config: any = { url: "http://somewhere", database: "something" } - ) { - this.integration = new CouchDBIntegration.integration(config) - } +function query(data?: Record): { json: string } { + return { json: doc(data || {}) } } describe("CouchDB Integration", () => { - let config: any + let couchdb: CouchDBIntegration beforeEach(() => { - config = new TestConfiguration() - }) - - it("calls the create method with the correct params", async () => { - const doc = { - test: 1, - } - await config.integration.create({ - json: JSON.stringify(doc), - }) - expect(config.integration.client.put).toHaveBeenCalledWith(doc) - }) - - it("calls the read method with the correct params", async () => { - const doc = { - name: "search", - } - - await config.integration.read({ - json: JSON.stringify(doc), - }) - - expect(config.integration.client.allDocs).toHaveBeenCalledWith({ - include_docs: true, - name: "search", + couchdb = new CouchDBIntegration({ + url: env.COUCH_DB_URL, + database: couchSafeID(), }) }) - it("calls the update method with the correct params", async () => { - const doc = { - _id: "1234", - name: "search", - } - - await config.integration.update({ - json: JSON.stringify(doc), - }) - - expect(config.integration.client.put).toHaveBeenCalledWith({ - ...doc, - _rev: "a", - }) + it("successfully connects", async () => { + const { connected } = await couchdb.testConnection() + expect(connected).toBe(true) }) - it("calls the delete method with the correct params", async () => { - const id = "1234" - await config.integration.delete({ id }) - expect(config.integration.client.remove).toHaveBeenCalledWith(id) + it("can create documents", async () => { + const { id, ok, rev } = await couchdb.create(query({ test: 1 })) + expect(id).toBeDefined() + expect(ok).toBe(true) + expect(rev).toBeDefined() + }) + + it("can read created documents", async () => { + const { id, ok, rev } = await couchdb.create(query({ test: 1 })) + expect(id).toBeDefined() + expect(ok).toBe(true) + expect(rev).toBeDefined() + + const docs = await couchdb.read(query()) + expect(docs).toEqual([ + { + _id: id, + _rev: rev, + test: 1, + createdAt: expect.any(String), + updatedAt: expect.any(String), + }, + ]) + }) + + it("can update documents", async () => { + const { id, ok, rev } = await couchdb.create(query({ test: 1 })) + expect(ok).toBe(true) + + const { id: newId, rev: newRev } = await couchdb.update( + query({ _id: id, _rev: rev, test: 2 }) + ) + const docs = await couchdb.read(query()) + expect(docs).toEqual([ + { + _id: newId, + _rev: newRev, + test: 2, + createdAt: expect.any(String), + updatedAt: expect.any(String), + }, + ]) + }) + + it("can delete documents", async () => { + const { id, ok, rev } = await couchdb.create(query({ test: 1 })) + expect(ok).toBe(true) + + const deleteResponse = await couchdb.delete({ id, rev }) + expect(deleteResponse.ok).toBe(true) + + const docs = await couchdb.read(query()) + expect(docs).toBeEmpty() }) }) From ddf64b9f70790f969bdf4a3eb1fe532adc031d5c Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 19 Feb 2025 16:51:09 +0000 Subject: [PATCH 34/50] Improve APM traces around automations. --- .../backend-core/src/context/mainContext.ts | 2 +- packages/server/src/automations/utils.ts | 82 +++--- packages/server/src/threads/automation.ts | 250 ++++++++++-------- 3 files changed, 170 insertions(+), 164 deletions(-) diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts index 6a00c125ad..8e0c71ff18 100644 --- a/packages/backend-core/src/context/mainContext.ts +++ b/packages/backend-core/src/context/mainContext.ts @@ -123,7 +123,7 @@ export async function doInAutomationContext(params: { task: () => T }): Promise { await ensureSnippetContext() - return newContext( + return await newContext( { tenantId: getTenantIDFromAppID(params.appId), appId: params.appId, diff --git a/packages/server/src/automations/utils.ts b/packages/server/src/automations/utils.ts index 3ec8f41621..e135c123f4 100644 --- a/packages/server/src/automations/utils.ts +++ b/packages/server/src/automations/utils.ts @@ -40,39 +40,35 @@ function loggingArgs(job: AutomationJob) { } export async function processEvent(job: AutomationJob) { - return tracer.trace( - "processEvent", - { resource: "automation" }, - async span => { - const appId = job.data.event.appId! - const automationId = job.data.automation._id! + return tracer.trace("processEvent", async span => { + const appId = job.data.event.appId! + const automationId = job.data.automation._id! - span?.addTags({ - appId, - automationId, - job: { - id: job.id, - name: job.name, - attemptsMade: job.attemptsMade, - opts: { - attempts: job.opts.attempts, - priority: job.opts.priority, - delay: job.opts.delay, - repeat: job.opts.repeat, - backoff: job.opts.backoff, - lifo: job.opts.lifo, - timeout: job.opts.timeout, - jobId: job.opts.jobId, - removeOnComplete: job.opts.removeOnComplete, - removeOnFail: job.opts.removeOnFail, - stackTraceLimit: job.opts.stackTraceLimit, - preventParsingData: job.opts.preventParsingData, - }, - }, - }) + span.addTags({ + appId, + automationId, + job: { + id: job.id, + name: job.name, + attemptsMade: job.attemptsMade, + attempts: job.opts.attempts, + priority: job.opts.priority, + delay: job.opts.delay, + repeat: job.opts.repeat, + backoff: job.opts.backoff, + lifo: job.opts.lifo, + timeout: job.opts.timeout, + jobId: job.opts.jobId, + removeOnComplete: job.opts.removeOnComplete, + removeOnFail: job.opts.removeOnFail, + stackTraceLimit: job.opts.stackTraceLimit, + preventParsingData: job.opts.preventParsingData, + }, + }) - const task = async () => { - try { + const task = async () => { + try { + return await tracer.trace("task", async () => { if (isCronTrigger(job.data.automation) && !job.data.event.timestamp) { // Requires the timestamp at run time job.data.event.timestamp = Date.now() @@ -81,25 +77,19 @@ export async function processEvent(job: AutomationJob) { console.log("automation running", ...loggingArgs(job)) const runFn = () => Runner.run(job) - const result = await quotas.addAutomation(runFn, { - automationId, - }) + const result = await quotas.addAutomation(runFn, { automationId }) console.log("automation completed", ...loggingArgs(job)) return result - } catch (err) { - span?.addTags({ error: true }) - console.error( - `automation was unable to run`, - err, - ...loggingArgs(job) - ) - return { err } - } + }) + } catch (err) { + span.addTags({ error: true }) + console.error(`automation was unable to run`, err, ...loggingArgs(job)) + return { err } } - - return await context.doInAutomationContext({ appId, automationId, task }) } - ) + + return await context.doInAutomationContext({ appId, automationId, task }) + }) } export async function updateTestHistory( diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 6ee467023f..8b2aac662c 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -310,87 +310,83 @@ class Orchestrator { } async execute(): Promise { - return tracer.trace( - "Orchestrator.execute", - { resource: "automation" }, - async span => { - span?.addTags({ appId: this.appId, automationId: this.automation._id }) + return await tracer.trace("execute", async span => { + span.addTags({ appId: this.appId, automationId: this.automation._id }) - const job = cloneDeep(this.job) - delete job.data.event.appId - delete job.data.event.metadata + const job = cloneDeep(this.job) + delete job.data.event.appId + delete job.data.event.metadata - if (this.isCron() && !job.data.event.timestamp) { - job.data.event.timestamp = Date.now() - } - - const trigger: AutomationTriggerResult = { - id: job.data.automation.definition.trigger.id, - stepId: job.data.automation.definition.trigger.stepId, - inputs: null, - outputs: job.data.event, - } - const result: AutomationResults = { trigger, steps: [trigger] } - - const ctx: AutomationContext = { - trigger: trigger.outputs, - steps: [trigger.outputs], - stepsById: {}, - stepsByName: {}, - user: trigger.outputs.user, - } - await enrichBaseContext(ctx) - - const timeout = - this.job.data.event.timeout || env.AUTOMATION_THREAD_TIMEOUT - - try { - await helpers.withTimeout(timeout, async () => { - const [stepOutputs, executionTime] = await utils.time(() => - this.executeSteps(ctx, job.data.automation.definition.steps) - ) - - result.steps.push(...stepOutputs) - - console.info( - `Automation ID: ${ - this.automation._id - } Execution time: ${executionTime.toMs()} milliseconds`, - { - _logKey: "automation", - executionTime, - } - ) - }) - } catch (e: any) { - if (e.errno === "ETIME") { - span?.addTags({ timedOut: true }) - console.warn(`Automation execution timed out after ${timeout}ms`) - } - } - - let errorCount = 0 - if (this.isProdApp() && this.isCron() && this.hasErrored(ctx)) { - errorCount = (await this.incrementErrorCount()) || 0 - } - - if (errorCount >= MAX_AUTOMATION_RECURRING_ERRORS) { - await this.stopCron("errors", { result }) - span?.addTags({ shouldStop: true }) - } else { - await this.logResult(result) - } - - return result + if (this.isCron() && !job.data.event.timestamp) { + job.data.event.timestamp = Date.now() } - ) + + const trigger: AutomationTriggerResult = { + id: job.data.automation.definition.trigger.id, + stepId: job.data.automation.definition.trigger.stepId, + inputs: null, + outputs: job.data.event, + } + const result: AutomationResults = { trigger, steps: [trigger] } + + const ctx: AutomationContext = { + trigger: trigger.outputs, + steps: [trigger.outputs], + stepsById: {}, + stepsByName: {}, + user: trigger.outputs.user, + } + await enrichBaseContext(ctx) + + const timeout = + this.job.data.event.timeout || env.AUTOMATION_THREAD_TIMEOUT + + try { + await helpers.withTimeout(timeout, async () => { + const [stepOutputs, executionTime] = await utils.time(() => + this.executeSteps(ctx, job.data.automation.definition.steps) + ) + + result.steps.push(...stepOutputs) + + console.info( + `Automation ID: ${ + this.automation._id + } Execution time: ${executionTime.toMs()} milliseconds`, + { + _logKey: "automation", + executionTime, + } + ) + }) + } catch (e: any) { + if (e.errno === "ETIME") { + span?.addTags({ timedOut: true }) + console.warn(`Automation execution timed out after ${timeout}ms`) + } + } + + let errorCount = 0 + if (this.isProdApp() && this.isCron() && this.hasErrored(ctx)) { + errorCount = (await this.incrementErrorCount()) || 0 + } + + if (errorCount >= MAX_AUTOMATION_RECURRING_ERRORS) { + await this.stopCron("errors", { result }) + span?.addTags({ shouldStop: true }) + } else { + await this.logResult(result) + } + + return result + }) } private async executeSteps( ctx: AutomationContext, steps: AutomationStep[] ): Promise { - return tracer.trace("Orchestrator.executeSteps", async () => { + return await tracer.trace("executeSteps", async () => { let stepIndex = 0 const results: AutomationStepResult[] = [] @@ -446,74 +442,92 @@ class Orchestrator { step: LoopStep, stepToLoop: AutomationStep ): Promise { - await processObject(step.inputs, prepareContext(ctx)) + return await tracer.trace("executeLoopStep", async span => { + await processObject(step.inputs, prepareContext(ctx)) - const maxIterations = getLoopMaxIterations(step) - const items: Record[] = [] - let iterations = 0 - let iterable: any[] = [] - try { - iterable = getLoopIterable(step) - } catch (err) { - return stepFailure(stepToLoop, { - status: AutomationStepStatus.INCORRECT_TYPE, - }) - } - - for (; iterations < iterable.length; iterations++) { - const currentItem = iterable[iterations] - - if (iterations === maxIterations) { - return stepFailure(stepToLoop, { - status: AutomationStepStatus.MAX_ITERATIONS, + const maxIterations = getLoopMaxIterations(step) + const items: Record[] = [] + let iterations = 0 + let iterable: any[] = [] + try { + iterable = getLoopIterable(step) + } catch (err) { + span.addTags({ + status: AutomationStepStatus.INCORRECT_TYPE, iterations, }) - } - - if (matchesLoopFailureCondition(step, currentItem)) { return stepFailure(stepToLoop, { - status: AutomationStepStatus.FAILURE_CONDITION, + status: AutomationStepStatus.INCORRECT_TYPE, }) } - ctx.loop = { currentItem } - const result = await this.executeStep(ctx, stepToLoop) - items.push(result.outputs) - ctx.loop = undefined - } + for (; iterations < iterable.length; iterations++) { + const currentItem = iterable[iterations] - const status = - iterations === 0 ? AutomationStatus.NO_CONDITION_MET : undefined - return stepSuccess(stepToLoop, { status, iterations, items }) + if (iterations === maxIterations) { + span.addTags({ + status: AutomationStepStatus.MAX_ITERATIONS, + iterations, + }) + return stepFailure(stepToLoop, { + status: AutomationStepStatus.MAX_ITERATIONS, + iterations, + }) + } + + if (matchesLoopFailureCondition(step, currentItem)) { + span.addTags({ + status: AutomationStepStatus.FAILURE_CONDITION, + iterations, + }) + return stepFailure(stepToLoop, { + status: AutomationStepStatus.FAILURE_CONDITION, + }) + } + + ctx.loop = { currentItem } + const result = await this.executeStep(ctx, stepToLoop) + items.push(result.outputs) + ctx.loop = undefined + } + + const status = + iterations === 0 ? AutomationStatus.NO_CONDITION_MET : undefined + return stepSuccess(stepToLoop, { status, iterations, items }) + }) } private async executeBranchStep( ctx: AutomationContext, step: BranchStep ): Promise { - const { branches, children } = step.inputs + return await tracer.trace("executeBranchStep", async span => { + const { branches, children } = step.inputs - for (const branch of branches) { - if (await branchMatches(ctx, branch)) { - return [ - stepSuccess(step, { - branchName: branch.name, - status: `${branch.name} branch taken`, - branchId: `${branch.id}`, - }), - ...(await this.executeSteps(ctx, children?.[branch.id] || [])), - ] + for (const branch of branches) { + if (await branchMatches(ctx, branch)) { + span.addTags({ branchName: branch.name, branchId: branch.id }) + return [ + stepSuccess(step, { + branchName: branch.name, + status: `${branch.name} branch taken`, + branchId: `${branch.id}`, + }), + ...(await this.executeSteps(ctx, children?.[branch.id] || [])), + ] + } } - } - return [stepFailure(step, { status: AutomationStatus.NO_CONDITION_MET })] + span.addTags({ status: AutomationStatus.NO_CONDITION_MET }) + return [stepFailure(step, { status: AutomationStatus.NO_CONDITION_MET })] + }) } private async executeStep( ctx: AutomationContext, step: Readonly ): Promise { - return tracer.trace("Orchestrator.executeStep", async span => { + return await tracer.trace(step.stepId, async span => { span.addTags({ step: { stepId: step.stepId, @@ -524,6 +538,7 @@ class Orchestrator { internal: step.internal, deprecated: step.deprecated, }, + inputsKeys: Object.keys(step.inputs), }) if (this.stopped) { @@ -557,6 +572,7 @@ class Orchestrator { ;(outputs as any).status = AutomationStatus.STOPPED } + span.addTags({ outputsKeys: Object.keys(outputs) }) return stepSuccess(step, outputs, inputs) }) } From 28553fc684c89871d5e8aec64df9a551ddf0ca38 Mon Sep 17 00:00:00 2001 From: Christos Alexiou Date: Thu, 20 Feb 2025 03:02:34 +0200 Subject: [PATCH 35/50] formatting and add special minio version --- hosting/single/Dockerfile | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/hosting/single/Dockerfile b/hosting/single/Dockerfile index e4858d4af0..043cd3dd73 100644 --- a/hosting/single/Dockerfile +++ b/hosting/single/Dockerfile @@ -1,5 +1,5 @@ ARG BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1 -FROM node:20-slim as build +FROM node:20-slim AS build # install node-gyp dependencies RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq @@ -34,7 +34,7 @@ COPY packages/worker/dist packages/worker/dist COPY packages/worker/pm2.config.js packages/worker/pm2.config.js -FROM $BASEIMG as runner +FROM $BASEIMG AS runner ARG TARGETARCH ENV TARGETARCH $TARGETARCH #TARGETBUILD can be set to single (for single docker image) or aas (for azure app service) @@ -67,6 +67,11 @@ RUN mkdir -p /var/log/nginx && \ # setup minio WORKDIR /minio + +# a 2022 version of minio that supports gateway mode +COPY scripts/resources/minio /minio + +# handles the installation of minio in non-aas environments COPY scripts/install-minio.sh ./install.sh RUN chmod +x install.sh && ./install.sh From d4a2b6d0610fd27272f8d93ca65b68fc0e20a362 Mon Sep 17 00:00:00 2001 From: Christos Alexiou Date: Thu, 20 Feb 2025 03:02:42 +0200 Subject: [PATCH 36/50] handle minio with gateway --- hosting/single/runner.sh | 47 ++++++++++++++++++++-------------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/hosting/single/runner.sh b/hosting/single/runner.sh index e06a197ad5..d6b77e9e42 100644 --- a/hosting/single/runner.sh +++ b/hosting/single/runner.sh @@ -19,14 +19,7 @@ declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONME [[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app # export CUSTOM_DOMAIN=budi001.custom.com -# Azure App Service customisations -if [[ "${TARGETBUILD}" = "aas" ]]; then - export DATA_DIR="${DATA_DIR:-/home}" - WEBSITES_ENABLE_APP_SERVICE_STORAGE=true - /etc/init.d/ssh start -else - export DATA_DIR=${DATA_DIR:-/data} -fi +export DATA_DIR=${DATA_DIR:-/data} mkdir -p ${DATA_DIR} # Mount NFS or GCP Filestore if env vars exist for it if [[ ! -z ${FILESHARE_IP} && ! -z ${FILESHARE_NAME} ]]; then @@ -42,8 +35,7 @@ if [ -f "${DATA_DIR}/.env" ]; then for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done fi # randomise any unset environment variables -for ENV_VAR in "${ENV_VARS[@]}" -do +for ENV_VAR in "${ENV_VARS[@]}"; do if [[ -z "${!ENV_VAR}" ]]; then eval "export $ENV_VAR=$(uuidgen | sed -e 's/-//g')" fi @@ -58,17 +50,15 @@ fi if [ ! -f "${DATA_DIR}/.env" ]; then touch ${DATA_DIR}/.env - for ENV_VAR in "${ENV_VARS[@]}" - do + for ENV_VAR in "${ENV_VARS[@]}"; do temp=$(eval "echo \$$ENV_VAR") - echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env + echo "$ENV_VAR=$temp" >>${DATA_DIR}/.env done - for ENV_VAR in "${DOCKER_VARS[@]}" - do + for ENV_VAR in "${DOCKER_VARS[@]}"; do temp=$(eval "echo \$$ENV_VAR") - echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env + echo "$ENV_VAR=$temp" >>${DATA_DIR}/.env done - echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env + echo "COUCH_DB_URL=${COUCH_DB_URL}" >>${DATA_DIR}/.env fi # Read in the .env file and export the variables @@ -79,31 +69,42 @@ ln -s ${DATA_DIR}/.env /worker/.env # make these directories in runner, incase of mount mkdir -p ${DATA_DIR}/minio mkdir -p ${DATA_DIR}/redis -chown -R couchdb:couchdb ${DATA_DIR}/couch +#mkdir -p ${DATA_DIR}/couch +#chown -R couchdb:couchdb ${DATA_DIR}/couch REDIS_CONFIG="/etc/redis/redis.conf" sed -i "s#DATA_DIR#${DATA_DIR}#g" "${REDIS_CONFIG}" if [[ -n "${USE_DEFAULT_REDIS_CONFIG}" ]]; then - REDIS_CONFIG="" + REDIS_CONFIG="" fi if [[ -n "${REDIS_PASSWORD}" ]]; then - redis-server "${REDIS_CONFIG}" --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 & + redis-server "${REDIS_CONFIG}" --requirepass $REDIS_PASSWORD >/dev/stdout 2>&1 & else - redis-server "${REDIS_CONFIG}" > /dev/stdout 2>&1 & + redis-server "${REDIS_CONFIG}" >/dev/stdout 2>&1 & fi /bbcouch-runner.sh & # only start minio if use s3 isn't passed if [[ -z "${USE_S3}" ]]; then - /minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 & + if [[ $TARGETBUILD == aas ]]; then + echo "Starting MinIO in Azure Gateway mode" + if [[ -z "${AZURE_STORAGE_ACCOUNT}" || -z "${AZURE_STORAGE_KEY}" ]]; then + echo "AZURE_STORAGE_ACCOUNT and AZURE_STORAGE_KEY must be set when deploying in Azure App Service mode" + exit 1 + fi + /minio/minio gateway azure --console-address ":9001" >/dev/stdout 2>&1 & + else + echo "Starting MinIO in standalone mode" + /minio/minio server --console-address ":9001" ${DATA_DIR}/minio >/dev/stdout 2>&1 & + fi fi /etc/init.d/nginx restart if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then # Add monthly cron job to renew certbot certificate - echo -n "* * 2 * * root exec /app/letsencrypt/certificate-renew.sh ${CUSTOM_DOMAIN}" >> /etc/cron.d/certificate-renew + echo -n "* * 2 * * root exec /app/letsencrypt/certificate-renew.sh ${CUSTOM_DOMAIN}" >>/etc/cron.d/certificate-renew chmod +x /etc/cron.d/certificate-renew # Request the certbot certificate /app/letsencrypt/certificate-request.sh ${CUSTOM_DOMAIN} From 1650fdc75ea9fe5e15c9983fdd6f689f2a261b51 Mon Sep 17 00:00:00 2001 From: Christos Alexiou Date: Thu, 20 Feb 2025 03:03:32 +0200 Subject: [PATCH 37/50] minio from dockerfile COPY --- scripts/install-minio.sh | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/scripts/install-minio.sh b/scripts/install-minio.sh index fede984377..b885453f2a 100755 --- a/scripts/install-minio.sh +++ b/scripts/install-minio.sh @@ -1,10 +1,18 @@ #!/bin/bash -if [[ $TARGETARCH == arm* ]] ; -then + +if [[ $TARGETBUILD == "aas" ]]; then + echo "A aas-compatible version of Minio is already installed." + exit 0 +fi + +if [[ $TARGETARCH == arm* ]]; then echo "INSTALLING ARM64 MINIO" + rm -f minio wget https://dl.min.io/server/minio/release/linux-arm64/minio else echo "INSTALLING AMD64 MINIO" + rm -f minio wget https://dl.min.io/server/minio/release/linux-amd64/minio fi -chmod +x minio \ No newline at end of file + +chmod +x minio From 32f6fc3d32cbef84cfbe619c563a78a5d720da80 Mon Sep 17 00:00:00 2001 From: Christos Alexiou Date: Thu, 20 Feb 2025 03:09:14 +0200 Subject: [PATCH 38/50] add minio with gateway --- scripts/resources/minio | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 scripts/resources/minio diff --git a/scripts/resources/minio b/scripts/resources/minio new file mode 100644 index 0000000000..c121cc0963 --- /dev/null +++ b/scripts/resources/minio @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63db3aa3c2299ebaf13b46c64523a589bd5bf272f9e971d17f1eaa55f6f1fd79 +size 118595584 From 49dda35358abcc38d70577854491e447edae9b73 Mon Sep 17 00:00:00 2001 From: Christos Alexiou Date: Thu, 20 Feb 2025 03:09:42 +0200 Subject: [PATCH 39/50] track minio in lfs --- .gitattributes | 1 + 1 file changed, 1 insertion(+) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..85b026dd08 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +scripts/resources/minio filter=lfs diff=lfs merge=lfs -text From 406c60c9737248b626a4d6863746e98d55c8e2e8 Mon Sep 17 00:00:00 2001 From: Budibase Staging Release Bot <> Date: Thu, 20 Feb 2025 09:24:49 +0000 Subject: [PATCH 40/50] Bump version to 3.4.14 --- lerna.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lerna.json b/lerna.json index b6eb31f2b0..79a0eac346 100644 --- a/lerna.json +++ b/lerna.json @@ -1,6 +1,6 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", - "version": "3.4.13", + "version": "3.4.14", "npmClient": "yarn", "concurrency": 20, "command": { From 4092f4c3e158d67401883d94c5c6cbd85fd1805c Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 20 Feb 2025 10:11:04 +0000 Subject: [PATCH 41/50] Fix loops being given empty strings. --- .../src/automations/tests/steps/loop.spec.ts | 22 +++++++++++++++++++ packages/server/src/threads/automation.ts | 8 +++++-- 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/packages/server/src/automations/tests/steps/loop.spec.ts b/packages/server/src/automations/tests/steps/loop.spec.ts index 883732330f..19f7e5506f 100644 --- a/packages/server/src/automations/tests/steps/loop.spec.ts +++ b/packages/server/src/automations/tests/steps/loop.spec.ts @@ -7,6 +7,8 @@ import { CreateRowStepOutputs, FieldType, FilterCondition, + AutomationStatus, + AutomationStepStatus, } from "@budibase/types" import { createAutomationBuilder } from "../utilities/AutomationTestBuilder" import TestConfiguration from "../../../tests/utilities/TestConfiguration" @@ -560,5 +562,25 @@ describe("Attempt to run a basic loop automation", () => { status: "stopped", }) }) + + it("should not fail if queryRows returns nothing", async () => { + const table = await config.api.table.save(basicTable()) + const results = await createAutomationBuilder(config) + .onAppAction() + .queryRows({ + tableId: table._id!, + }) + .loop({ + option: LoopStepType.ARRAY, + binding: "{{ steps.1.rows }}", + }) + .serverLog({ text: "Message {{loop.currentItem}}" }) + .test({ fields: {} }) + + expect(results.steps[1].outputs.success).toBe(true) + expect(results.steps[1].outputs.status).toBe( + AutomationStepStatus.NO_ITERATIONS + ) + }) }) }) diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 8b2aac662c..762da1cbc1 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -68,7 +68,11 @@ function getLoopIterable(step: LoopStep): any[] { let input = step.inputs.binding if (option === LoopStepType.ARRAY && typeof input === "string") { - input = JSON.parse(input) + if (input === "") { + input = [] + } else { + input = JSON.parse(input) + } } if (option === LoopStepType.STRING && Array.isArray(input)) { @@ -492,7 +496,7 @@ class Orchestrator { } const status = - iterations === 0 ? AutomationStatus.NO_CONDITION_MET : undefined + iterations === 0 ? AutomationStepStatus.NO_ITERATIONS : undefined return stepSuccess(stepToLoop, { status, iterations, items }) }) } From ef9cbfce5936cb26fe981b6e2a4903ae3bd9bc1d Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 20 Feb 2025 10:16:13 +0000 Subject: [PATCH 42/50] Fix lint. --- packages/server/src/automations/tests/steps/loop.spec.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/server/src/automations/tests/steps/loop.spec.ts b/packages/server/src/automations/tests/steps/loop.spec.ts index 19f7e5506f..2bdf33b253 100644 --- a/packages/server/src/automations/tests/steps/loop.spec.ts +++ b/packages/server/src/automations/tests/steps/loop.spec.ts @@ -7,7 +7,6 @@ import { CreateRowStepOutputs, FieldType, FilterCondition, - AutomationStatus, AutomationStepStatus, } from "@budibase/types" import { createAutomationBuilder } from "../utilities/AutomationTestBuilder" From 1328076d03f88d656edb1a188c24d121904297fa Mon Sep 17 00:00:00 2001 From: Budibase Staging Release Bot <> Date: Thu, 20 Feb 2025 10:26:46 +0000 Subject: [PATCH 43/50] Bump version to 3.4.15 --- lerna.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lerna.json b/lerna.json index 79a0eac346..91980e0a15 100644 --- a/lerna.json +++ b/lerna.json @@ -1,6 +1,6 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", - "version": "3.4.14", + "version": "3.4.15", "npmClient": "yarn", "concurrency": 20, "command": { From dba89a678c246dd2daaeb5e561452f3d9066e145 Mon Sep 17 00:00:00 2001 From: Christos Alexiou Date: Thu, 20 Feb 2025 20:35:24 +0200 Subject: [PATCH 44/50] make minio binary executable --- hosting/single/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/hosting/single/Dockerfile b/hosting/single/Dockerfile index 043cd3dd73..e6c1142ab8 100644 --- a/hosting/single/Dockerfile +++ b/hosting/single/Dockerfile @@ -70,6 +70,7 @@ WORKDIR /minio # a 2022 version of minio that supports gateway mode COPY scripts/resources/minio /minio +RUN chmod +x minio # handles the installation of minio in non-aas environments COPY scripts/install-minio.sh ./install.sh From afe293de5648dfa35b02e3552d5cb16d1c5ea9da Mon Sep 17 00:00:00 2001 From: Christos Alexiou Date: Thu, 20 Feb 2025 20:40:17 +0200 Subject: [PATCH 45/50] linting fix --- hosting/single/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hosting/single/Dockerfile b/hosting/single/Dockerfile index e6c1142ab8..1f449e7376 100644 --- a/hosting/single/Dockerfile +++ b/hosting/single/Dockerfile @@ -36,11 +36,11 @@ COPY packages/worker/pm2.config.js packages/worker/pm2.config.js FROM $BASEIMG AS runner ARG TARGETARCH -ENV TARGETARCH $TARGETARCH +ENV TARGETARCH=$TARGETARCH #TARGETBUILD can be set to single (for single docker image) or aas (for azure app service) # e.g. docker build --build-arg TARGETBUILD=aas .... ARG TARGETBUILD=single -ENV TARGETBUILD $TARGETBUILD +ENV TARGETBUILD=$TARGETBUILD # install base dependencies RUN apt-get update && \ From 18ddcd092ef7e77c48e4598166b69ef30a7c7e44 Mon Sep 17 00:00:00 2001 From: Christos Alexiou Date: Thu, 20 Feb 2025 21:04:44 +0200 Subject: [PATCH 46/50] simplify env setup and handle MINIO_ missing --- hosting/single/runner.sh | 80 +++++++++++++++++++++++----------------- 1 file changed, 46 insertions(+), 34 deletions(-) diff --git a/hosting/single/runner.sh b/hosting/single/runner.sh index d6b77e9e42..42305cf62e 100644 --- a/hosting/single/runner.sh +++ b/hosting/single/runner.sh @@ -1,45 +1,57 @@ #!/bin/bash -declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD") -declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONMENT" "CLUSTER_PORT" "DEPLOYMENT_ENVIRONMENT" "MINIO_URL" "NODE_ENV" "POSTHOG_TOKEN" "REDIS_URL" "SELF_HOSTED" "WORKER_PORT" "WORKER_URL" "TENANT_FEATURE_FLAGS" "ACCOUNT_PORTAL_URL") -# Check the env vars set in Dockerfile have come through, AAS seems to drop them -[[ -z "${APP_PORT}" ]] && export APP_PORT=4001 -[[ -z "${ARCHITECTURE}" ]] && export ARCHITECTURE=amd -[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION -[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80 -[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker -[[ -z "${MINIO_URL}" ]] && [[ -z "${USE_S3}" ]] && export MINIO_URL=http://127.0.0.1:9000 -[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production -[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU -[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app -[[ -z "${REDIS_URL}" ]] && export REDIS_URL=127.0.0.1:6379 -[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1 -[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002 -[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://127.0.0.1:4002 -[[ -z "${APPS_URL}" ]] && export APPS_URL=http://127.0.0.1:4001 -[[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app -# export CUSTOM_DOMAIN=budi001.custom.com -export DATA_DIR=${DATA_DIR:-/data} -mkdir -p ${DATA_DIR} -# Mount NFS or GCP Filestore if env vars exist for it -if [[ ! -z ${FILESHARE_IP} && ! -z ${FILESHARE_NAME} ]]; then +echo "Starting runner.sh" + +# set defaults for Docker-related variables +export APP_PORT="${APP_PORT:-4001}" +export ARCHITECTURE="${ARCHITECTURE:-amd}" +export BUDIBASE_ENVIRONMENT="${BUDIBASE_ENVIRONMENT:-PRODUCTION}" +export CLUSTER_PORT="${CLUSTER_PORT:-80}" +export DEPLOYMENT_ENVIRONMENT="${DEPLOYMENT_ENVIRONMENT:-docker}" + +# only set MINIO_URL if neither MINIO_URL nor USE_S3 is set +if [[ -z "${MINIO_URL}" && -z "${USE_S3}" ]]; then + export MINIO_URL="http://127.0.0.1:9000" +fi + +export NODE_ENV="${NODE_ENV:-production}" +export POSTHOG_TOKEN="${POSTHOG_TOKEN:-phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU}" +export ACCOUNT_PORTAL_URL="${ACCOUNT_PORTAL_URL:-https://account.budibase.app}" +export REDIS_URL="${REDIS_URL:-127.0.0.1:6379}" +export SELF_HOSTED="${SELF_HOSTED:-1}" +export WORKER_PORT="${WORKER_PORT:-4002}" +export WORKER_URL="${WORKER_URL:-http://127.0.0.1:4002}" +export APPS_URL="${APPS_URL:-http://127.0.0.1:4001}" +export SERVER_TOP_LEVEL_PATH="${SERVER_TOP_LEVEL_PATH:-/app}" + +# set DATA_DIR and ensure the directory exists +export DATA_DIR="${DATA_DIR:-/data}" +mkdir -p "${DATA_DIR}" + +# mount NFS or GCP Filestore if FILESHARE_IP and FILESHARE_NAME are set +if [[ -n "${FILESHARE_IP}" && -n "${FILESHARE_NAME}" ]]; then echo "Mounting NFS share" apt update && apt install -y nfs-common nfs-kernel-server echo "Mount file share ${FILESHARE_IP}:/${FILESHARE_NAME} to ${DATA_DIR}" - mount -o nolock ${FILESHARE_IP}:/${FILESHARE_NAME} ${DATA_DIR} + mount -o nolock "${FILESHARE_IP}:/${FILESHARE_NAME}" "${DATA_DIR}" echo "Mounting result: $?" fi -if [ -f "${DATA_DIR}/.env" ]; then - # Read in the .env file and export the variables - for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done +# source environment variables from a .env file if it exists in DATA_DIR +if [[ -f "${DATA_DIR}/.env" ]]; then + set -a # Automatically export all variables loaded from .env + source "${DATA_DIR}/.env" + set +a fi -# randomise any unset environment variables -for ENV_VAR in "${ENV_VARS[@]}"; do - if [[ -z "${!ENV_VAR}" ]]; then - eval "export $ENV_VAR=$(uuidgen | sed -e 's/-//g')" + +# randomize any unset sensitive environment variables using uuidgen +env_vars=(COUCHDB_USER COUCHDB_PASSWORD MINIO_ACCESS_KEY MINIO_SECRET_KEY INTERNAL_API_KEY JWT_SECRET REDIS_PASSWORD) +for var in "${env_vars[@]}"; do + if [[ -z "${!var}" ]]; then + export "$var"="$(uuidgen | tr -d '-')" fi done + if [[ -z "${COUCH_DB_URL}" ]]; then export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@127.0.0.1:5984 fi @@ -88,10 +100,10 @@ fi # only start minio if use s3 isn't passed if [[ -z "${USE_S3}" ]]; then - if [[ $TARGETBUILD == aas ]]; then + if [[ ${TARGETBUILD} == aas ]]; then echo "Starting MinIO in Azure Gateway mode" - if [[ -z "${AZURE_STORAGE_ACCOUNT}" || -z "${AZURE_STORAGE_KEY}" ]]; then - echo "AZURE_STORAGE_ACCOUNT and AZURE_STORAGE_KEY must be set when deploying in Azure App Service mode" + if [[ -z "${AZURE_STORAGE_ACCOUNT}" || -z "${AZURE_STORAGE_KEY}" || -z "${MINIO_ACCESS_KEY}" || -z "${MINIO_SECRET_KEY}" ]]; then + echo "The following environment variables must be set: AZURE_STORAGE_ACCOUNT, AZURE_STORAGE_KEY, MINIO_ACCESS_KEY, MINIO_SECRET_KEY" exit 1 fi /minio/minio gateway azure --console-address ":9001" >/dev/stdout 2>&1 & From 77f5c05a5b7fd39fc1fd26ffb2372dd548ce83c6 Mon Sep 17 00:00:00 2001 From: Christos Alexiou Date: Thu, 20 Feb 2025 21:29:55 +0200 Subject: [PATCH 47/50] debug messages --- hosting/single/runner.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/hosting/single/runner.sh b/hosting/single/runner.sh index 42305cf62e..6f81cc2fbd 100644 --- a/hosting/single/runner.sh +++ b/hosting/single/runner.sh @@ -1,6 +1,6 @@ #!/bin/bash -echo "Starting runner.sh" +echo "Starting runner.sh..." # set defaults for Docker-related variables export APP_PORT="${APP_PORT:-4001}" @@ -96,7 +96,9 @@ if [[ -n "${REDIS_PASSWORD}" ]]; then else redis-server "${REDIS_CONFIG}" >/dev/stdout 2>&1 & fi -/bbcouch-runner.sh & + +echo "Starting callback CouchDB runner..." +./bbcouch-runner.sh & # only start minio if use s3 isn't passed if [[ -z "${USE_S3}" ]]; then From bf2fcea4338b58259623c7ea0af2d3f9c5674be2 Mon Sep 17 00:00:00 2001 From: Christos Alexiou Date: Thu, 20 Feb 2025 21:36:01 +0200 Subject: [PATCH 48/50] fix issue with couchdb startup --- hosting/single/runner.sh | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/hosting/single/runner.sh b/hosting/single/runner.sh index 6f81cc2fbd..1a81515d31 100644 --- a/hosting/single/runner.sh +++ b/hosting/single/runner.sh @@ -25,7 +25,11 @@ export APPS_URL="${APPS_URL:-http://127.0.0.1:4001}" export SERVER_TOP_LEVEL_PATH="${SERVER_TOP_LEVEL_PATH:-/app}" # set DATA_DIR and ensure the directory exists -export DATA_DIR="${DATA_DIR:-/data}" +if [[ ${TARGETBUILD} == "aas" ]]; then + export DATA_DIR="/home" +else + export DATA_DIR="${DATA_DIR:-/data}" +fi mkdir -p "${DATA_DIR}" # mount NFS or GCP Filestore if FILESHARE_IP and FILESHARE_NAME are set @@ -81,8 +85,8 @@ ln -s ${DATA_DIR}/.env /worker/.env # make these directories in runner, incase of mount mkdir -p ${DATA_DIR}/minio mkdir -p ${DATA_DIR}/redis -#mkdir -p ${DATA_DIR}/couch -#chown -R couchdb:couchdb ${DATA_DIR}/couch +mkdir -p ${DATA_DIR}/couch +chown -R couchdb:couchdb ${DATA_DIR}/couch REDIS_CONFIG="/etc/redis/redis.conf" sed -i "s#DATA_DIR#${DATA_DIR}#g" "${REDIS_CONFIG}" From de62913afdf96e7c8c02d8c2c16b8f037cca0966 Mon Sep 17 00:00:00 2001 From: jvcalderon Date: Thu, 20 Feb 2025 23:16:58 +0100 Subject: [PATCH 49/50] [Revert] store koa sessions in redis instead of cookies --- hosting/nginx.dev.conf | 6 ---- packages/worker/package.json | 1 - .../src/api/routes/global/tests/auth.spec.ts | 2 +- packages/worker/src/index.ts | 26 ++--------------- packages/worker/src/koa-redis.d.ts | 1 - yarn.lock | 28 ++----------------- 6 files changed, 5 insertions(+), 59 deletions(-) delete mode 100644 packages/worker/src/koa-redis.d.ts diff --git a/hosting/nginx.dev.conf b/hosting/nginx.dev.conf index a8cefe9ccc..747235e8ef 100644 --- a/hosting/nginx.dev.conf +++ b/hosting/nginx.dev.conf @@ -62,12 +62,6 @@ http { proxy_connect_timeout 120s; proxy_send_timeout 120s; proxy_http_version 1.1; - - # Enable buffering for potentially large OIDC configs - proxy_buffering on; - proxy_buffer_size 16k; - proxy_buffers 4 32k; - proxy_set_header Host $host; proxy_set_header Connection ""; diff --git a/packages/worker/package.json b/packages/worker/package.json index 28728272ca..53d14dacee 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -62,7 +62,6 @@ "koa-body": "4.2.0", "koa-compress": "4.0.1", "koa-passport": "4.1.4", - "koa-redis": "^4.0.1", "koa-send": "5.0.1", "koa-session": "5.13.1", "koa-static": "5.0.0", diff --git a/packages/worker/src/api/routes/global/tests/auth.spec.ts b/packages/worker/src/api/routes/global/tests/auth.spec.ts index f89cb4a027..bff959469e 100644 --- a/packages/worker/src/api/routes/global/tests/auth.spec.ts +++ b/packages/worker/src/api/routes/global/tests/auth.spec.ts @@ -311,7 +311,7 @@ describe("/api/global/auth", () => { }) }) - describe.skip("GET /api/global/auth/:tenantId/oidc/callback", () => { + describe("GET /api/global/auth/:tenantId/oidc/callback", () => { it("logs in", async () => { const email = `${generator.guid()}@example.com` diff --git a/packages/worker/src/index.ts b/packages/worker/src/index.ts index f382aa8a20..0547afab38 100644 --- a/packages/worker/src/index.ts +++ b/packages/worker/src/index.ts @@ -4,7 +4,7 @@ if (process.env.DD_APM_ENABLED) { // need to load environment first import env from "./environment" -import Application, { Middleware } from "koa" +import Application from "koa" import { bootstrap } from "global-agent" import * as db from "./db" import { sdk as proSdk } from "@budibase/pro" @@ -20,7 +20,6 @@ import { cache, features, } from "@budibase/backend-core" -import RedisStore from "koa-redis" db.init() import koaBody from "koa-body" @@ -53,28 +52,7 @@ app.proxy = true app.use(handleScimBody) app.use(koaBody({ multipart: true })) -const sessionMiddleware: Middleware = async (ctx: any, next: any) => { - const redisClient = await new redis.Client( - redis.utils.Databases.SESSIONS - ).init() - return koaSession( - { - // @ts-ignore - store: new RedisStore({ client: redisClient.getClient() }), - key: "koa:sess", - maxAge: 86400000, // one day - httpOnly: true, - secure: process.env.NODE_ENV === "production", - sameSite: "strict", - rolling: true, - renew: true, - }, - app - )(ctx, next) -} - -app.use(sessionMiddleware) - +app.use(koaSession(app)) app.use(middleware.correlation) app.use(middleware.pino) app.use(middleware.ip) diff --git a/packages/worker/src/koa-redis.d.ts b/packages/worker/src/koa-redis.d.ts deleted file mode 100644 index ad1b7a46f1..0000000000 --- a/packages/worker/src/koa-redis.d.ts +++ /dev/null @@ -1 +0,0 @@ -declare module "koa-redis" {} diff --git a/yarn.lock b/yarn.lock index 8f611e224c..ceae41458c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2695,13 +2695,6 @@ dependencies: regenerator-runtime "^0.14.0" -"@babel/runtime@^7.8.3": - version "7.26.9" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.9.tgz#aa4c6facc65b9cb3f87d75125ffd47781b475433" - integrity sha512-aA63XwOkcl4xxQa3HjPMqOP6LiK0ZDv3mUPYEFXkpHbaFjtGggE1A61FjFzJnB+p7/oy2gA8E+rcBNl/zC1tMg== - dependencies: - regenerator-runtime "^0.14.0" - "@babel/template@^7.22.15", "@babel/template@^7.22.5", "@babel/template@^7.25.9", "@babel/template@^7.3.3": version "7.25.9" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.25.9.tgz#ecb62d81a8a6f5dc5fe8abfc3901fc52ddf15016" @@ -9048,14 +9041,7 @@ co-body@^5.1.1: raw-body "^2.2.0" type-is "^1.6.14" -co-wrap-all@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/co-wrap-all/-/co-wrap-all-1.0.0.tgz#370ae3e8333510a53f6b2f7fdfbe4568a11b7ecf" - integrity sha512-aru6gLi2vTUazr+MxVm3Rv6ST7/EKtFj9BrfkcOrbCO2Qv6LqJdE71m88HhHiBEviKw/ucVrwoGLrq2xHpOsJA== - dependencies: - co "^4.0.0" - -co@^4.0.0, co@^4.6.0: +co@^4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== @@ -13191,7 +13177,7 @@ ioredis@5.3.2: redis-parser "^3.0.0" standard-as-callback "^2.1.0" -ioredis@^4.14.1, ioredis@^4.28.5: +ioredis@^4.28.5: version "4.28.5" resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.5.tgz#5c149e6a8d76a7f8fa8a504ffc85b7d5b6797f9f" integrity sha512-3GYo0GJtLqgNXj4YhrisLaNNvWSNwSS2wS4OELGfGxH8I69+XfNdnmV1AyN+ZqMh0i7eX+SWjrwFKDBDgfBC1A== @@ -14691,16 +14677,6 @@ koa-pino-logger@4.0.0: dependencies: pino-http "^6.5.0" -koa-redis@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/koa-redis/-/koa-redis-4.0.1.tgz#57ac1b46d9ab851221a9f4952c1e8d4bf289db40" - integrity sha512-o2eTVNo1NBnloeUGhHed5Q2ZvJSLpUEj/+E1/7oH5EmH8WuQ+QLdl/VawkshxdFQ47W1p6V09lM3hCTu7D0YnQ== - dependencies: - "@babel/runtime" "^7.8.3" - co-wrap-all "^1.0.0" - debug "^4.1.1" - ioredis "^4.14.1" - koa-router@^10.0.0: version "10.1.1" resolved "https://registry.yarnpkg.com/koa-router/-/koa-router-10.1.1.tgz#20809f82648518b84726cd445037813cd99f17ff" From add89b87d230d755b949cc4d53cb2f6227777acf Mon Sep 17 00:00:00 2001 From: Budibase Staging Release Bot <> Date: Fri, 21 Feb 2025 14:02:45 +0000 Subject: [PATCH 50/50] Bump version to 3.4.16 --- lerna.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lerna.json b/lerna.json index 91980e0a15..bb71d10f41 100644 --- a/lerna.json +++ b/lerna.json @@ -1,6 +1,6 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", - "version": "3.4.15", + "version": "3.4.16", "npmClient": "yarn", "concurrency": 20, "command": {