Merge branch 'master' into execute-script-v2

This commit is contained in:
deanhannigan 2025-02-19 09:11:57 +00:00 committed by GitHub
commit 2b7754425a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 161 additions and 84 deletions

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.4.11",
"version": "3.4.12",
"npmClient": "yarn",
"concurrency": 20,
"command": {

View File

@ -3,6 +3,7 @@ import { newid } from "../utils"
import { Queue, QueueOptions, JobOptions } from "./queue"
import { helpers } from "@budibase/shared-core"
import { Job, JobId, JobInformation } from "bull"
import { cloneDeep } from "lodash"
function jobToJobInformation(job: Job): JobInformation {
let cron = ""
@ -33,12 +34,13 @@ function jobToJobInformation(job: Job): JobInformation {
}
}
interface JobMessage<T = any> extends Partial<Job<T>> {
export interface TestQueueMessage<T = any> extends Partial<Job<T>> {
id: string
timestamp: number
queue: Queue<T>
data: any
opts?: JobOptions
manualTrigger?: boolean
}
/**
@ -47,15 +49,15 @@ interface JobMessage<T = any> extends Partial<Job<T>> {
* internally to register when messages are available to the consumers - in can
* support many inputs and many consumers.
*/
class InMemoryQueue implements Partial<Queue> {
export class InMemoryQueue<T = any> implements Partial<Queue<T>> {
_name: string
_opts?: QueueOptions
_messages: JobMessage[]
_messages: TestQueueMessage<T>[]
_queuedJobIds: Set<string>
_emitter: NodeJS.EventEmitter<{
message: [JobMessage]
completed: [Job]
removed: [JobMessage]
message: [TestQueueMessage<T>]
completed: [Job<T>]
removed: [TestQueueMessage<T>]
}>
_runCount: number
_addCount: number
@ -86,10 +88,12 @@ class InMemoryQueue implements Partial<Queue> {
*/
async process(concurrencyOrFunc: number | any, func?: any) {
func = typeof concurrencyOrFunc === "number" ? func : concurrencyOrFunc
this._emitter.on("message", async message => {
this._emitter.on("message", async msg => {
const message = cloneDeep(msg)
// For the purpose of testing, don't trigger cron jobs immediately.
// Require the test to trigger them manually with timestamps.
if (message.opts?.repeat != null) {
if (!message.manualTrigger && message.opts?.repeat != null) {
return
}
@ -107,7 +111,7 @@ class InMemoryQueue implements Partial<Queue> {
if (resp.then != null) {
try {
await retryFunc(resp)
this._emitter.emit("completed", message as Job)
this._emitter.emit("completed", message as Job<T>)
} catch (e: any) {
console.error(e)
}
@ -124,7 +128,6 @@ class InMemoryQueue implements Partial<Queue> {
return this as any
}
// simply puts a message to the queue and emits to the queue for processing
/**
* Simple function to replicate the add message functionality of Bull, putting
* a new message on the queue. This then emits an event which will be used to
@ -133,7 +136,13 @@ class InMemoryQueue implements Partial<Queue> {
* a JSON message as this is required by Bull.
* @param repeat serves no purpose for the import queue.
*/
async add(data: any, opts?: JobOptions) {
async add(data: T | string, optsOrT?: JobOptions | T) {
if (typeof data === "string") {
throw new Error("doesn't support named jobs")
}
const opts = optsOrT as JobOptions
const jobId = opts?.jobId?.toString()
if (jobId && this._queuedJobIds.has(jobId)) {
console.log(`Ignoring already queued job ${jobId}`)
@ -148,7 +157,7 @@ class InMemoryQueue implements Partial<Queue> {
}
const pushMessage = () => {
const message: JobMessage = {
const message: TestQueueMessage = {
id: newid(),
timestamp: Date.now(),
queue: this as unknown as Queue,
@ -176,7 +185,7 @@ class InMemoryQueue implements Partial<Queue> {
async removeRepeatableByKey(id: string) {
for (const [idx, message] of this._messages.entries()) {
if (message.opts?.jobId?.toString() === id) {
if (message.id === id) {
this._messages.splice(idx, 1)
this._emitter.emit("removed", message)
return
@ -204,6 +213,16 @@ class InMemoryQueue implements Partial<Queue> {
return null
}
manualTrigger(id: JobId) {
for (const message of this._messages) {
if (message.id === id) {
this._emitter.emit("message", { ...message, manualTrigger: true })
return
}
}
throw new Error(`Job with id ${id} not found`)
}
on(event: string, callback: (...args: any[]) => void): Queue {
// @ts-expect-error - this callback can be one of many types
this._emitter.on(event, callback)

View File

@ -1,2 +1,3 @@
export * from "./queue"
export * from "./constants"
export * from "./inMemoryQueue"

View File

@ -1,7 +1,7 @@
import env from "../../environment"
import { AutomationResults, Automation, App } from "@budibase/types"
import { automations } from "@budibase/pro"
import { db as dbUtils } from "@budibase/backend-core"
import { db as dbUtils, logging } from "@budibase/backend-core"
import sizeof from "object-sizeof"
const MAX_LOG_SIZE_MB = 5
@ -32,7 +32,16 @@ export async function storeLog(
if (bytes / MB_IN_BYTES > MAX_LOG_SIZE_MB) {
sanitiseResults(results)
}
await automations.logs.storeLog(automation, results)
try {
await automations.logs.storeLog(automation, results)
} catch (e: any) {
if (e.status === 413 && e.request?.data) {
// if content is too large we shouldn't log it
delete e.request.data
e.request.data = { message: "removed due to large size" }
}
logging.logAlert("Error writing automation log", e)
}
}
export async function checkAppMetadata(apps: App[]) {

View File

@ -21,6 +21,11 @@ describe("Attempt to run a basic loop automation", () => {
})
beforeEach(async () => {
const { automations } = await config.api.automation.fetch()
for (const automation of automations) {
await config.api.automation.delete(automation)
}
table = await config.api.table.save(basicTable())
await config.api.row.save(table._id!, {})
})

View File

@ -1,11 +1,15 @@
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
import {
captureAutomationQueueMessages,
captureAutomationMessages,
captureAutomationRemovals,
captureAutomationResults,
triggerCron,
} from "../utilities"
import { automations } from "@budibase/pro"
import { AutomationStatus } from "@budibase/types"
import { AutomationData, AutomationStatus } from "@budibase/types"
import { MAX_AUTOMATION_RECURRING_ERRORS } from "../../../constants"
import { queue } from "@budibase/backend-core"
describe("cron trigger", () => {
const config = new TestConfiguration()
@ -33,7 +37,7 @@ describe("cron trigger", () => {
})
.save()
const messages = await captureAutomationQueueMessages(automation, () =>
const messages = await captureAutomationMessages(automation, () =>
config.api.application.publish()
)
expect(messages).toHaveLength(1)
@ -63,7 +67,7 @@ describe("cron trigger", () => {
})
it("should stop if the job fails more than 3 times", async () => {
const runner = await createAutomationBuilder(config)
const { automation } = await createAutomationBuilder(config)
.onCron({ cron: "* * * * *" })
.queryRows({
// @ts-expect-error intentionally sending invalid data
@ -71,28 +75,31 @@ describe("cron trigger", () => {
})
.save()
await config.api.application.publish()
const results = await captureAutomationResults(
runner.automation,
async () => {
await runner.trigger({ timeout: 1000, fields: {} })
await runner.trigger({ timeout: 1000, fields: {} })
await runner.trigger({ timeout: 1000, fields: {} })
await runner.trigger({ timeout: 1000, fields: {} })
await runner.trigger({ timeout: 1000, fields: {} })
}
const [message] = await captureAutomationMessages(automation, () =>
config.api.application.publish()
)
expect(results).toHaveLength(5)
await config.withProdApp(async () => {
const {
data: [latest, ..._],
} = await automations.logs.logSearch({
automationId: runner.automation._id,
let results: queue.TestQueueMessage<AutomationData>[] = []
const removed = await captureAutomationRemovals(automation, async () => {
results = await captureAutomationResults(automation, async () => {
for (let i = 0; i < MAX_AUTOMATION_RECURRING_ERRORS; i++) {
triggerCron(message)
}
})
})
expect(latest.status).toEqual(AutomationStatus.STOPPED_ERROR)
expect(removed).toHaveLength(1)
expect(removed[0].id).toEqual(message.id)
expect(results).toHaveLength(5)
const search = await automations.logs.logSearch({
automationId: automation._id,
status: AutomationStatus.STOPPED_ERROR,
})
expect(search.data).toHaveLength(1)
expect(search.data[0].status).toEqual(AutomationStatus.STOPPED_ERROR)
})
})

View File

@ -6,6 +6,7 @@ import { Knex } from "knex"
import { getQueue } from "../.."
import { Job } from "bull"
import { helpers } from "@budibase/shared-core"
import { queue } from "@budibase/backend-core"
let config: TestConfiguration
@ -20,6 +21,17 @@ export function afterAll() {
config.end()
}
export function getTestQueue(): queue.InMemoryQueue<AutomationData> {
return getQueue() as unknown as queue.InMemoryQueue<AutomationData>
}
export function triggerCron(message: Job<AutomationData>) {
if (!message.opts?.repeat || !("cron" in message.opts.repeat)) {
throw new Error("Expected cron message")
}
getTestQueue().manualTrigger(message.id)
}
export async function runInProd(fn: any) {
env._set("NODE_ENV", "production")
let error
@ -34,9 +46,41 @@ export async function runInProd(fn: any) {
}
}
export async function captureAllAutomationQueueMessages(
export async function captureAllAutomationRemovals(f: () => Promise<unknown>) {
const messages: Job<AutomationData>[] = []
const queue = getQueue()
const messageListener = async (message: Job<AutomationData>) => {
messages.push(message)
}
queue.on("removed", messageListener)
try {
await f()
// Queue messages tend to be send asynchronously in API handlers, so there's
// no guarantee that awaiting this function will have queued anything yet.
// We wait here to make sure we're queued _after_ any existing async work.
await helpers.wait(100)
} finally {
queue.off("removed", messageListener)
}
return messages
}
export async function captureAutomationRemovals(
automation: Automation | string,
f: () => Promise<unknown>
) {
const messages = await captureAllAutomationRemovals(f)
return messages.filter(
m =>
m.data.automation._id ===
(typeof automation === "string" ? automation : automation._id)
)
}
export async function captureAllAutomationMessages(f: () => Promise<unknown>) {
const messages: Job<AutomationData>[] = []
const queue = getQueue()
@ -58,11 +102,11 @@ export async function captureAllAutomationQueueMessages(
return messages
}
export async function captureAutomationQueueMessages(
export async function captureAutomationMessages(
automation: Automation | string,
f: () => Promise<unknown>
) {
const messages = await captureAllAutomationQueueMessages(f)
const messages = await captureAllAutomationMessages(f)
return messages.filter(
m =>
m.data.automation._id ===
@ -76,18 +120,22 @@ export async function captureAutomationQueueMessages(
*/
export async function captureAllAutomationResults(
f: () => Promise<unknown>
): Promise<Job<AutomationData>[]> {
const runs: Job<AutomationData>[] = []
): Promise<queue.TestQueueMessage<AutomationData>[]> {
const runs: queue.TestQueueMessage<AutomationData>[] = []
const queue = getQueue()
let messagesOutstanding = 0
const completedListener = async (job: Job<AutomationData>) => {
const completedListener = async (
job: queue.TestQueueMessage<AutomationData>
) => {
runs.push(job)
messagesOutstanding--
}
const messageListener = async (message: Job<AutomationData>) => {
const messageListener = async (
message: queue.TestQueueMessage<AutomationData>
) => {
// Don't count cron messages, as they don't get triggered automatically.
if (message.opts?.repeat != null) {
if (!message.manualTrigger && message.opts?.repeat != null) {
return
}
messagesOutstanding++

View File

@ -181,17 +181,6 @@ class Orchestrator {
await storeLog(automation, this.executionOutput)
}
async checkIfShouldStop(): Promise<boolean> {
const metadata = await this.getMetadata()
if (!metadata.errorCount || !this.isCron()) {
return false
}
if (metadata.errorCount >= MAX_AUTOMATION_RECURRING_ERRORS) {
return true
}
return false
}
async getMetadata(): Promise<AutomationMetadata> {
const metadataId = generateAutomationMetadataID(this.automation._id!)
const db = context.getAppDB()
@ -200,24 +189,29 @@ class Orchestrator {
}
async incrementErrorCount() {
for (let attempt = 0; attempt < 3; attempt++) {
const db = context.getAppDB()
let err: Error | undefined = undefined
for (let attempt = 0; attempt < 10; attempt++) {
const metadata = await this.getMetadata()
metadata.errorCount ||= 0
metadata.errorCount++
const db = context.getAppDB()
try {
await db.put(metadata)
return
} catch (err) {
logging.logAlertWithInfo(
"Failed to update error count in automation metadata",
db.name,
this.automation._id!,
err
)
return metadata.errorCount
} catch (error: any) {
err = error
await helpers.wait(1000 + Math.random() * 1000)
}
}
logging.logAlertWithInfo(
"Failed to update error count in automation metadata",
db.name,
this.automation._id!,
err
)
return undefined
}
updateExecutionOutput(id: string, stepId: string, inputs: any, outputs: any) {
@ -295,28 +289,22 @@ class Orchestrator {
}
)
try {
await storeLog(this.automation, this.executionOutput)
} catch (e: any) {
if (e.status === 413 && e.request?.data) {
// if content is too large we shouldn't log it
delete e.request.data
e.request.data = { message: "removed due to large size" }
}
logging.logAlert("Error writing automation log", e)
}
let errorCount = 0
if (
isProdAppID(this.appId) &&
this.isCron() &&
isErrorInOutput(this.executionOutput)
) {
await this.incrementErrorCount()
if (await this.checkIfShouldStop()) {
await this.stopCron("errors")
span?.addTags({ shouldStop: true })
return
}
errorCount = (await this.incrementErrorCount()) || 0
}
if (errorCount >= MAX_AUTOMATION_RECURRING_ERRORS) {
await this.stopCron("errors")
span?.addTags({ shouldStop: true })
} else {
await storeLog(this.automation, this.executionOutput)
}
return this.executionOutput
}
)
@ -749,7 +737,7 @@ export async function executeInThread(
})) as AutomationResponse
}
export const removeStalled = async (job: Job) => {
export const removeStalled = async (job: Job<AutomationData>) => {
const appId = job.data.event.appId
if (!appId) {
throw new Error("Unable to execute, event doesn't contain app ID.")