Merge branch 'master' into remove-mocks-1

This commit is contained in:
Sam Rose 2025-02-24 09:18:03 +00:00 committed by GitHub
commit 384c5bab44
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
24 changed files with 649 additions and 364 deletions

1
.gitattributes vendored Normal file
View File

@ -0,0 +1 @@
scripts/resources/minio filter=lfs diff=lfs merge=lfs -text

View File

@ -62,12 +62,6 @@ http {
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
# Enable buffering for potentially large OIDC configs
proxy_buffering on;
proxy_buffer_size 16k;
proxy_buffers 4 32k;
proxy_set_header Host $host;
proxy_set_header Connection "";

View File

@ -1,5 +1,5 @@
ARG BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1
FROM node:20-slim as build
FROM node:20-slim AS build
# install node-gyp dependencies
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
@ -34,13 +34,13 @@ COPY packages/worker/dist packages/worker/dist
COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
FROM $BASEIMG as runner
FROM $BASEIMG AS runner
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
ENV TARGETARCH=$TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
ENV TARGETBUILD $TARGETBUILD
ENV TARGETBUILD=$TARGETBUILD
# install base dependencies
RUN apt-get update && \
@ -67,6 +67,12 @@ RUN mkdir -p /var/log/nginx && \
# setup minio
WORKDIR /minio
# a 2022 version of minio that supports gateway mode
COPY scripts/resources/minio /minio
RUN chmod +x minio
# handles the installation of minio in non-aas environments
COPY scripts/install-minio.sh ./install.sh
RUN chmod +x install.sh && ./install.sh

View File

@ -1,53 +1,61 @@
#!/bin/bash
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONMENT" "CLUSTER_PORT" "DEPLOYMENT_ENVIRONMENT" "MINIO_URL" "NODE_ENV" "POSTHOG_TOKEN" "REDIS_URL" "SELF_HOSTED" "WORKER_PORT" "WORKER_URL" "TENANT_FEATURE_FLAGS" "ACCOUNT_PORTAL_URL")
# Check the env vars set in Dockerfile have come through, AAS seems to drop them
[[ -z "${APP_PORT}" ]] && export APP_PORT=4001
[[ -z "${ARCHITECTURE}" ]] && export ARCHITECTURE=amd
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
[[ -z "${MINIO_URL}" ]] && [[ -z "${USE_S3}" ]] && export MINIO_URL=http://127.0.0.1:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=127.0.0.1:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://127.0.0.1:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://127.0.0.1:4001
[[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app
# export CUSTOM_DOMAIN=budi001.custom.com
# Azure App Service customisations
if [[ "${TARGETBUILD}" = "aas" ]]; then
export DATA_DIR="${DATA_DIR:-/home}"
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
/etc/init.d/ssh start
else
export DATA_DIR=${DATA_DIR:-/data}
echo "Starting runner.sh..."
# set defaults for Docker-related variables
export APP_PORT="${APP_PORT:-4001}"
export ARCHITECTURE="${ARCHITECTURE:-amd}"
export BUDIBASE_ENVIRONMENT="${BUDIBASE_ENVIRONMENT:-PRODUCTION}"
export CLUSTER_PORT="${CLUSTER_PORT:-80}"
export DEPLOYMENT_ENVIRONMENT="${DEPLOYMENT_ENVIRONMENT:-docker}"
# only set MINIO_URL if neither MINIO_URL nor USE_S3 is set
if [[ -z "${MINIO_URL}" && -z "${USE_S3}" ]]; then
export MINIO_URL="http://127.0.0.1:9000"
fi
mkdir -p ${DATA_DIR}
# Mount NFS or GCP Filestore if env vars exist for it
if [[ ! -z ${FILESHARE_IP} && ! -z ${FILESHARE_NAME} ]]; then
export NODE_ENV="${NODE_ENV:-production}"
export POSTHOG_TOKEN="${POSTHOG_TOKEN:-phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU}"
export ACCOUNT_PORTAL_URL="${ACCOUNT_PORTAL_URL:-https://account.budibase.app}"
export REDIS_URL="${REDIS_URL:-127.0.0.1:6379}"
export SELF_HOSTED="${SELF_HOSTED:-1}"
export WORKER_PORT="${WORKER_PORT:-4002}"
export WORKER_URL="${WORKER_URL:-http://127.0.0.1:4002}"
export APPS_URL="${APPS_URL:-http://127.0.0.1:4001}"
export SERVER_TOP_LEVEL_PATH="${SERVER_TOP_LEVEL_PATH:-/app}"
# set DATA_DIR and ensure the directory exists
if [[ ${TARGETBUILD} == "aas" ]]; then
export DATA_DIR="/home"
else
export DATA_DIR="${DATA_DIR:-/data}"
fi
mkdir -p "${DATA_DIR}"
# mount NFS or GCP Filestore if FILESHARE_IP and FILESHARE_NAME are set
if [[ -n "${FILESHARE_IP}" && -n "${FILESHARE_NAME}" ]]; then
echo "Mounting NFS share"
apt update && apt install -y nfs-common nfs-kernel-server
echo "Mount file share ${FILESHARE_IP}:/${FILESHARE_NAME} to ${DATA_DIR}"
mount -o nolock ${FILESHARE_IP}:/${FILESHARE_NAME} ${DATA_DIR}
mount -o nolock "${FILESHARE_IP}:/${FILESHARE_NAME}" "${DATA_DIR}"
echo "Mounting result: $?"
fi
if [ -f "${DATA_DIR}/.env" ]; then
# Read in the .env file and export the variables
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
# source environment variables from a .env file if it exists in DATA_DIR
if [[ -f "${DATA_DIR}/.env" ]]; then
set -a # Automatically export all variables loaded from .env
source "${DATA_DIR}/.env"
set +a
fi
# randomise any unset environment variables
for ENV_VAR in "${ENV_VARS[@]}"
do
if [[ -z "${!ENV_VAR}" ]]; then
eval "export $ENV_VAR=$(uuidgen | sed -e 's/-//g')"
# randomize any unset sensitive environment variables using uuidgen
env_vars=(COUCHDB_USER COUCHDB_PASSWORD MINIO_ACCESS_KEY MINIO_SECRET_KEY INTERNAL_API_KEY JWT_SECRET REDIS_PASSWORD)
for var in "${env_vars[@]}"; do
if [[ -z "${!var}" ]]; then
export "$var"="$(uuidgen | tr -d '-')"
fi
done
if [[ -z "${COUCH_DB_URL}" ]]; then
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@127.0.0.1:5984
fi
@ -58,17 +66,15 @@ fi
if [ ! -f "${DATA_DIR}/.env" ]; then
touch ${DATA_DIR}/.env
for ENV_VAR in "${ENV_VARS[@]}"
do
for ENV_VAR in "${ENV_VARS[@]}"; do
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
echo "$ENV_VAR=$temp" >>${DATA_DIR}/.env
done
for ENV_VAR in "${DOCKER_VARS[@]}"
do
for ENV_VAR in "${DOCKER_VARS[@]}"; do
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
echo "$ENV_VAR=$temp" >>${DATA_DIR}/.env
done
echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env
echo "COUCH_DB_URL=${COUCH_DB_URL}" >>${DATA_DIR}/.env
fi
# Read in the .env file and export the variables
@ -79,31 +85,44 @@ ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/redis
mkdir -p ${DATA_DIR}/couch
chown -R couchdb:couchdb ${DATA_DIR}/couch
REDIS_CONFIG="/etc/redis/redis.conf"
sed -i "s#DATA_DIR#${DATA_DIR}#g" "${REDIS_CONFIG}"
if [[ -n "${USE_DEFAULT_REDIS_CONFIG}" ]]; then
REDIS_CONFIG=""
REDIS_CONFIG=""
fi
if [[ -n "${REDIS_PASSWORD}" ]]; then
redis-server "${REDIS_CONFIG}" --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
redis-server "${REDIS_CONFIG}" --requirepass $REDIS_PASSWORD >/dev/stdout 2>&1 &
else
redis-server "${REDIS_CONFIG}" > /dev/stdout 2>&1 &
redis-server "${REDIS_CONFIG}" >/dev/stdout 2>&1 &
fi
/bbcouch-runner.sh &
echo "Starting callback CouchDB runner..."
./bbcouch-runner.sh &
# only start minio if use s3 isn't passed
if [[ -z "${USE_S3}" ]]; then
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
if [[ ${TARGETBUILD} == aas ]]; then
echo "Starting MinIO in Azure Gateway mode"
if [[ -z "${AZURE_STORAGE_ACCOUNT}" || -z "${AZURE_STORAGE_KEY}" || -z "${MINIO_ACCESS_KEY}" || -z "${MINIO_SECRET_KEY}" ]]; then
echo "The following environment variables must be set: AZURE_STORAGE_ACCOUNT, AZURE_STORAGE_KEY, MINIO_ACCESS_KEY, MINIO_SECRET_KEY"
exit 1
fi
/minio/minio gateway azure --console-address ":9001" >/dev/stdout 2>&1 &
else
echo "Starting MinIO in standalone mode"
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio >/dev/stdout 2>&1 &
fi
fi
/etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
# Add monthly cron job to renew certbot certificate
echo -n "* * 2 * * root exec /app/letsencrypt/certificate-renew.sh ${CUSTOM_DOMAIN}" >> /etc/cron.d/certificate-renew
echo -n "* * 2 * * root exec /app/letsencrypt/certificate-renew.sh ${CUSTOM_DOMAIN}" >>/etc/cron.d/certificate-renew
chmod +x /etc/cron.d/certificate-renew
# Request the certbot certificate
/app/letsencrypt/certificate-request.sh ${CUSTOM_DOMAIN}

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.4.13",
"version": "3.4.16",
"npmClient": "yarn",
"concurrency": 20,
"command": {

View File

@ -123,7 +123,7 @@ export async function doInAutomationContext<T>(params: {
task: () => T
}): Promise<T> {
await ensureSnippetContext()
return newContext(
return await newContext(
{
tenantId: getTenantIDFromAppID(params.appId),
appId: params.appId,

View File

@ -5,10 +5,10 @@ import {
SqlQuery,
Table,
TableSourceType,
SEPARATOR,
} from "@budibase/types"
import { DEFAULT_BB_DATASOURCE_ID } from "../constants"
import { Knex } from "knex"
import { SEPARATOR } from "../db"
import environment from "../environment"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`

View File

@ -49,6 +49,7 @@
import type { EditorMode } from "@budibase/types"
import type { BindingCompletion, CodeValidator } from "@/types"
import { validateHbsTemplate } from "./validator/hbs"
import { validateJsTemplate } from "./validator/js"
export let label: string | undefined = undefined
export let completions: BindingCompletion[] = []
@ -356,6 +357,9 @@
if (mode === EditorModes.Handlebars) {
const diagnostics = validateHbsTemplate(value, validations)
editor.dispatch(setDiagnostics(editor.state, diagnostics))
} else if (mode === EditorModes.JS) {
const diagnostics = validateJsTemplate(value, validations)
editor.dispatch(setDiagnostics(editor.state, diagnostics))
}
}

View File

@ -0,0 +1,101 @@
import { Parser } from "acorn"
import * as walk from "acorn-walk"
import type { Diagnostic } from "@codemirror/lint"
import { CodeValidator } from "@/types"
export function validateJsTemplate(
code: string,
validations: CodeValidator
): Diagnostic[] {
const diagnostics: Diagnostic[] = []
try {
const ast = Parser.parse(code, {
ecmaVersion: "latest",
locations: true,
allowReturnOutsideFunction: true,
})
const lineOffsets: number[] = []
let offset = 0
for (const line of code.split("\n")) {
lineOffsets.push(offset)
offset += line.length + 1 // +1 for newline character
}
let hasReturnStatement = false
walk.ancestor(ast, {
ReturnStatement(node, _state, ancestors) {
if (
// it returns a value
node.argument &&
// and it is top level
ancestors.length === 2 &&
ancestors[0].type === "Program" &&
ancestors[1].type === "ReturnStatement"
) {
hasReturnStatement = true
}
},
CallExpression(node) {
const callee: any = node.callee
if (
node.type === "CallExpression" &&
callee.object?.name === "helpers" &&
node.loc
) {
const functionName = callee.property.name
const from =
lineOffsets[node.loc.start.line - 1] + node.loc.start.column
const to = lineOffsets[node.loc.end.line - 1] + node.loc.end.column
if (!(functionName in validations)) {
diagnostics.push({
from,
to,
severity: "warning",
message: `"${functionName}" function does not exist.`,
})
return
}
const { arguments: expectedArguments } = validations[functionName]
if (
expectedArguments &&
node.arguments.length !== expectedArguments.length
) {
diagnostics.push({
from,
to,
severity: "error",
message: `Function "${functionName}" expects ${
expectedArguments.length
} parameters (${expectedArguments.join(", ")}), but got ${
node.arguments.length
}.`,
})
}
}
},
})
if (!hasReturnStatement) {
diagnostics.push({
from: 0,
to: code.length,
severity: "error",
message: "Your code must return a value.",
})
}
} catch (e: any) {
diagnostics.push({
from: 0,
to: code.length,
severity: "error",
message: `Syntax error: ${e.message}`,
})
}
return diagnostics
}

View File

@ -0,0 +1,156 @@
import { validateJsTemplate } from "../js"
import { CodeValidator } from "@/types"
describe("js validator", () => {
it("validates valid code", () => {
const text = "return 7"
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([])
})
it("does not validate runtime errors", () => {
const text = "return a"
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([])
})
it("validates multiline code", () => {
const text = "const foo='bar'\nreturn 123"
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([])
})
it("allows return not being on the last line", () => {
const text = "const foo='bar'\nreturn 123\nconsole.log(foo)"
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([])
})
it("throws on missing return", () => {
const text = "const foo='bar'\nbar='foo'"
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 0,
message: "Your code must return a value.",
severity: "error",
to: 25,
},
])
})
it("checks that returns are at top level", () => {
const text = `
function call(){
return 1
}`
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 0,
message: "Your code must return a value.",
severity: "error",
to: text.length,
},
])
})
describe("helpers", () => {
const validators: CodeValidator = {
helperFunction: {
arguments: ["a", "b", "c"],
},
}
it("validates helpers with valid params", () => {
const text = "return helpers.helperFunction(1, 99, 'a')"
const result = validateJsTemplate(text, validators)
expect(result).toEqual([])
})
it("throws on too few params", () => {
const text = "return helpers.helperFunction(100)"
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 7,
message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 1.`,
severity: "error",
to: 34,
},
])
})
it("throws on too many params", () => {
const text = "return helpers.helperFunction( 1, 99, 'a', 100)"
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 7,
message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 4.`,
severity: "error",
to: 47,
},
])
})
it("validates helpers on inner functions", () => {
const text = `function call(){
return helpers.helperFunction(1, 99)
}
return call()`
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 46,
message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 2.`,
severity: "error",
to: 75,
},
])
})
it("validates multiple helpers", () => {
const text =
"return helpers.helperFunction(1, 99, 'a') + helpers.helperFunction(1) + helpers.another(1) + helpers.another()"
const validators: CodeValidator = {
helperFunction: {
arguments: ["a", "b", "c"],
},
another: { arguments: [] },
}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 44,
message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 1.`,
severity: "error",
to: 69,
},
{
from: 72,
message: `Function "another" expects 0 parameters (), but got 1.`,
severity: "error",
to: 90,
},
])
})
})
})

View File

@ -377,6 +377,7 @@
value={jsValue ? decodeJSBinding(jsValue) : jsValue}
on:change={onChangeJSValue}
{completions}
{validations}
mode={EditorModes.JS}
bind:getCaretPosition
bind:insertAtPos

View File

@ -8,6 +8,7 @@ import {
UIComponentError,
ComponentDefinition,
DependsOnComponentSetting,
Screen,
} from "@budibase/types"
import { queries } from "./queries"
import { views } from "./views"
@ -66,6 +67,7 @@ export const screenComponentErrorList = derived(
if (!$selectedScreen) {
return []
}
const screen = $selectedScreen
const datasources = {
...reduceBy("_id", $tables.list),
@ -79,7 +81,9 @@ export const screenComponentErrorList = derived(
const errors: UIComponentError[] = []
function checkComponentErrors(component: Component, ancestors: string[]) {
errors.push(...getInvalidDatasources(component, datasources, definitions))
errors.push(
...getInvalidDatasources(screen, component, datasources, definitions)
)
errors.push(...getMissingRequiredSettings(component, definitions))
errors.push(...getMissingAncestors(component, definitions, ancestors))
@ -95,6 +99,7 @@ export const screenComponentErrorList = derived(
)
function getInvalidDatasources(
screen: Screen,
component: Component,
datasources: Record<string, any>,
definitions: Record<string, ComponentDefinition>

View File

@ -7,6 +7,7 @@ import {
CreateRowStepOutputs,
FieldType,
FilterCondition,
AutomationStepStatus,
} from "@budibase/types"
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
@ -560,5 +561,25 @@ describe("Attempt to run a basic loop automation", () => {
status: "stopped",
})
})
it("should not fail if queryRows returns nothing", async () => {
const table = await config.api.table.save(basicTable())
const results = await createAutomationBuilder(config)
.onAppAction()
.queryRows({
tableId: table._id!,
})
.loop({
option: LoopStepType.ARRAY,
binding: "{{ steps.1.rows }}",
})
.serverLog({ text: "Message {{loop.currentItem}}" })
.test({ fields: {} })
expect(results.steps[1].outputs.success).toBe(true)
expect(results.steps[1].outputs.status).toBe(
AutomationStepStatus.NO_ITERATIONS
)
})
})
})

View File

@ -40,39 +40,35 @@ function loggingArgs(job: AutomationJob) {
}
export async function processEvent(job: AutomationJob) {
return tracer.trace(
"processEvent",
{ resource: "automation" },
async span => {
const appId = job.data.event.appId!
const automationId = job.data.automation._id!
return tracer.trace("processEvent", async span => {
const appId = job.data.event.appId!
const automationId = job.data.automation._id!
span?.addTags({
appId,
automationId,
job: {
id: job.id,
name: job.name,
attemptsMade: job.attemptsMade,
opts: {
attempts: job.opts.attempts,
priority: job.opts.priority,
delay: job.opts.delay,
repeat: job.opts.repeat,
backoff: job.opts.backoff,
lifo: job.opts.lifo,
timeout: job.opts.timeout,
jobId: job.opts.jobId,
removeOnComplete: job.opts.removeOnComplete,
removeOnFail: job.opts.removeOnFail,
stackTraceLimit: job.opts.stackTraceLimit,
preventParsingData: job.opts.preventParsingData,
},
},
})
span.addTags({
appId,
automationId,
job: {
id: job.id,
name: job.name,
attemptsMade: job.attemptsMade,
attempts: job.opts.attempts,
priority: job.opts.priority,
delay: job.opts.delay,
repeat: job.opts.repeat,
backoff: job.opts.backoff,
lifo: job.opts.lifo,
timeout: job.opts.timeout,
jobId: job.opts.jobId,
removeOnComplete: job.opts.removeOnComplete,
removeOnFail: job.opts.removeOnFail,
stackTraceLimit: job.opts.stackTraceLimit,
preventParsingData: job.opts.preventParsingData,
},
})
const task = async () => {
try {
const task = async () => {
try {
return await tracer.trace("task", async () => {
if (isCronTrigger(job.data.automation) && !job.data.event.timestamp) {
// Requires the timestamp at run time
job.data.event.timestamp = Date.now()
@ -81,25 +77,19 @@ export async function processEvent(job: AutomationJob) {
console.log("automation running", ...loggingArgs(job))
const runFn = () => Runner.run(job)
const result = await quotas.addAutomation(runFn, {
automationId,
})
const result = await quotas.addAutomation(runFn, { automationId })
console.log("automation completed", ...loggingArgs(job))
return result
} catch (err) {
span?.addTags({ error: true })
console.error(
`automation was unable to run`,
err,
...loggingArgs(job)
)
return { err }
}
})
} catch (err) {
span.addTags({ error: true })
console.error(`automation was unable to run`, err, ...loggingArgs(job))
return { err }
}
return await context.doInAutomationContext({ appId, automationId, task })
}
)
return await context.doInAutomationContext({ appId, automationId, task })
})
}
export async function updateTestHistory(

View File

@ -62,12 +62,16 @@ const SCHEMA: Integration = {
type: DatasourceFieldType.STRING,
required: true,
},
rev: {
type: DatasourceFieldType.STRING,
required: true,
},
},
},
},
}
class CouchDBIntegration implements IntegrationBase {
export class CouchDBIntegration implements IntegrationBase {
private readonly client: Database
constructor(config: CouchDBConfig) {
@ -82,7 +86,8 @@ class CouchDBIntegration implements IntegrationBase {
connected: false,
}
try {
response.connected = await this.client.exists()
await this.client.allDocs({ limit: 1 })
response.connected = true
} catch (e: any) {
response.error = e.message as string
}
@ -99,13 +104,9 @@ class CouchDBIntegration implements IntegrationBase {
}
async read(query: { json: string | object }) {
const parsed = this.parse(query)
const params = {
include_docs: true,
...parsed,
}
const params = { include_docs: true, ...this.parse(query) }
const result = await this.client.allDocs(params)
return result.rows.map(row => row.doc)
return result.rows.map(row => row.doc!)
}
async update(query: { json: string | object }) {
@ -121,8 +122,8 @@ class CouchDBIntegration implements IntegrationBase {
return await this.client.get(query.id)
}
async delete(query: { id: string }) {
return await this.client.remove(query.id)
async delete(query: { id: string; rev: string }) {
return await this.client.remove(query.id, query.rev)
}
}

View File

@ -1,84 +1,87 @@
jest.mock("@budibase/backend-core", () => {
const core = jest.requireActual("@budibase/backend-core")
return {
...core,
db: {
...core.db,
DatabaseWithConnection: function () {
return {
allDocs: jest.fn().mockReturnValue({ rows: [] }),
put: jest.fn(),
get: jest.fn().mockReturnValue({ _rev: "a" }),
remove: jest.fn(),
}
},
},
}
})
import { env } from "@budibase/backend-core"
import { CouchDBIntegration } from "../couchdb"
import { generator } from "@budibase/backend-core/tests"
import { default as CouchDBIntegration } from "../couchdb"
function couchSafeID(): string {
// CouchDB IDs must start with a letter, so we prepend an 'a'.
return `a${generator.guid()}`
}
class TestConfiguration {
integration: any
function doc(data: Record<string, any>): string {
return JSON.stringify({ _id: couchSafeID(), ...data })
}
constructor(
config: any = { url: "http://somewhere", database: "something" }
) {
this.integration = new CouchDBIntegration.integration(config)
}
function query(data?: Record<string, any>): { json: string } {
return { json: doc(data || {}) }
}
describe("CouchDB Integration", () => {
let config: any
let couchdb: CouchDBIntegration
beforeEach(() => {
config = new TestConfiguration()
})
it("calls the create method with the correct params", async () => {
const doc = {
test: 1,
}
await config.integration.create({
json: JSON.stringify(doc),
})
expect(config.integration.client.put).toHaveBeenCalledWith(doc)
})
it("calls the read method with the correct params", async () => {
const doc = {
name: "search",
}
await config.integration.read({
json: JSON.stringify(doc),
})
expect(config.integration.client.allDocs).toHaveBeenCalledWith({
include_docs: true,
name: "search",
couchdb = new CouchDBIntegration({
url: env.COUCH_DB_URL,
database: couchSafeID(),
})
})
it("calls the update method with the correct params", async () => {
const doc = {
_id: "1234",
name: "search",
}
await config.integration.update({
json: JSON.stringify(doc),
})
expect(config.integration.client.put).toHaveBeenCalledWith({
...doc,
_rev: "a",
})
it("successfully connects", async () => {
const { connected } = await couchdb.testConnection()
expect(connected).toBe(true)
})
it("calls the delete method with the correct params", async () => {
const id = "1234"
await config.integration.delete({ id })
expect(config.integration.client.remove).toHaveBeenCalledWith(id)
it("can create documents", async () => {
const { id, ok, rev } = await couchdb.create(query({ test: 1 }))
expect(id).toBeDefined()
expect(ok).toBe(true)
expect(rev).toBeDefined()
})
it("can read created documents", async () => {
const { id, ok, rev } = await couchdb.create(query({ test: 1 }))
expect(id).toBeDefined()
expect(ok).toBe(true)
expect(rev).toBeDefined()
const docs = await couchdb.read(query())
expect(docs).toEqual([
{
_id: id,
_rev: rev,
test: 1,
createdAt: expect.any(String),
updatedAt: expect.any(String),
},
])
})
it("can update documents", async () => {
const { id, ok, rev } = await couchdb.create(query({ test: 1 }))
expect(ok).toBe(true)
const { id: newId, rev: newRev } = await couchdb.update(
query({ _id: id, _rev: rev, test: 2 })
)
const docs = await couchdb.read(query())
expect(docs).toEqual([
{
_id: newId,
_rev: newRev,
test: 2,
createdAt: expect.any(String),
updatedAt: expect.any(String),
},
])
})
it("can delete documents", async () => {
const { id, ok, rev } = await couchdb.create(query({ test: 1 }))
expect(ok).toBe(true)
const deleteResponse = await couchdb.delete({ id, rev })
expect(deleteResponse.ok).toBe(true)
const docs = await couchdb.read(query())
expect(docs).toBeEmpty()
})
})

View File

@ -68,7 +68,11 @@ function getLoopIterable(step: LoopStep): any[] {
let input = step.inputs.binding
if (option === LoopStepType.ARRAY && typeof input === "string") {
input = JSON.parse(input)
if (input === "") {
input = []
} else {
input = JSON.parse(input)
}
}
if (option === LoopStepType.STRING && Array.isArray(input)) {
@ -310,87 +314,83 @@ class Orchestrator {
}
async execute(): Promise<AutomationResults> {
return tracer.trace(
"Orchestrator.execute",
{ resource: "automation" },
async span => {
span?.addTags({ appId: this.appId, automationId: this.automation._id })
return await tracer.trace("execute", async span => {
span.addTags({ appId: this.appId, automationId: this.automation._id })
const job = cloneDeep(this.job)
delete job.data.event.appId
delete job.data.event.metadata
const job = cloneDeep(this.job)
delete job.data.event.appId
delete job.data.event.metadata
if (this.isCron() && !job.data.event.timestamp) {
job.data.event.timestamp = Date.now()
}
const trigger: AutomationTriggerResult = {
id: job.data.automation.definition.trigger.id,
stepId: job.data.automation.definition.trigger.stepId,
inputs: null,
outputs: job.data.event,
}
const result: AutomationResults = { trigger, steps: [trigger] }
const ctx: AutomationContext = {
trigger: trigger.outputs,
steps: [trigger.outputs],
stepsById: {},
stepsByName: {},
user: trigger.outputs.user,
}
await enrichBaseContext(ctx)
const timeout =
this.job.data.event.timeout || env.AUTOMATION_THREAD_TIMEOUT
try {
await helpers.withTimeout(timeout, async () => {
const [stepOutputs, executionTime] = await utils.time(() =>
this.executeSteps(ctx, job.data.automation.definition.steps)
)
result.steps.push(...stepOutputs)
console.info(
`Automation ID: ${
this.automation._id
} Execution time: ${executionTime.toMs()} milliseconds`,
{
_logKey: "automation",
executionTime,
}
)
})
} catch (e: any) {
if (e.errno === "ETIME") {
span?.addTags({ timedOut: true })
console.warn(`Automation execution timed out after ${timeout}ms`)
}
}
let errorCount = 0
if (this.isProdApp() && this.isCron() && this.hasErrored(ctx)) {
errorCount = (await this.incrementErrorCount()) || 0
}
if (errorCount >= MAX_AUTOMATION_RECURRING_ERRORS) {
await this.stopCron("errors", { result })
span?.addTags({ shouldStop: true })
} else {
await this.logResult(result)
}
return result
if (this.isCron() && !job.data.event.timestamp) {
job.data.event.timestamp = Date.now()
}
)
const trigger: AutomationTriggerResult = {
id: job.data.automation.definition.trigger.id,
stepId: job.data.automation.definition.trigger.stepId,
inputs: null,
outputs: job.data.event,
}
const result: AutomationResults = { trigger, steps: [trigger] }
const ctx: AutomationContext = {
trigger: trigger.outputs,
steps: [trigger.outputs],
stepsById: {},
stepsByName: {},
user: trigger.outputs.user,
}
await enrichBaseContext(ctx)
const timeout =
this.job.data.event.timeout || env.AUTOMATION_THREAD_TIMEOUT
try {
await helpers.withTimeout(timeout, async () => {
const [stepOutputs, executionTime] = await utils.time(() =>
this.executeSteps(ctx, job.data.automation.definition.steps)
)
result.steps.push(...stepOutputs)
console.info(
`Automation ID: ${
this.automation._id
} Execution time: ${executionTime.toMs()} milliseconds`,
{
_logKey: "automation",
executionTime,
}
)
})
} catch (e: any) {
if (e.errno === "ETIME") {
span?.addTags({ timedOut: true })
console.warn(`Automation execution timed out after ${timeout}ms`)
}
}
let errorCount = 0
if (this.isProdApp() && this.isCron() && this.hasErrored(ctx)) {
errorCount = (await this.incrementErrorCount()) || 0
}
if (errorCount >= MAX_AUTOMATION_RECURRING_ERRORS) {
await this.stopCron("errors", { result })
span?.addTags({ shouldStop: true })
} else {
await this.logResult(result)
}
return result
})
}
private async executeSteps(
ctx: AutomationContext,
steps: AutomationStep[]
): Promise<AutomationStepResult[]> {
return tracer.trace("Orchestrator.executeSteps", async () => {
return await tracer.trace("executeSteps", async () => {
let stepIndex = 0
const results: AutomationStepResult[] = []
@ -446,74 +446,92 @@ class Orchestrator {
step: LoopStep,
stepToLoop: AutomationStep
): Promise<AutomationStepResult> {
await processObject(step.inputs, prepareContext(ctx))
return await tracer.trace("executeLoopStep", async span => {
await processObject(step.inputs, prepareContext(ctx))
const maxIterations = getLoopMaxIterations(step)
const items: Record<string, any>[] = []
let iterations = 0
let iterable: any[] = []
try {
iterable = getLoopIterable(step)
} catch (err) {
return stepFailure(stepToLoop, {
status: AutomationStepStatus.INCORRECT_TYPE,
})
}
for (; iterations < iterable.length; iterations++) {
const currentItem = iterable[iterations]
if (iterations === maxIterations) {
return stepFailure(stepToLoop, {
status: AutomationStepStatus.MAX_ITERATIONS,
const maxIterations = getLoopMaxIterations(step)
const items: Record<string, any>[] = []
let iterations = 0
let iterable: any[] = []
try {
iterable = getLoopIterable(step)
} catch (err) {
span.addTags({
status: AutomationStepStatus.INCORRECT_TYPE,
iterations,
})
}
if (matchesLoopFailureCondition(step, currentItem)) {
return stepFailure(stepToLoop, {
status: AutomationStepStatus.FAILURE_CONDITION,
status: AutomationStepStatus.INCORRECT_TYPE,
})
}
ctx.loop = { currentItem }
const result = await this.executeStep(ctx, stepToLoop)
items.push(result.outputs)
ctx.loop = undefined
}
for (; iterations < iterable.length; iterations++) {
const currentItem = iterable[iterations]
const status =
iterations === 0 ? AutomationStatus.NO_CONDITION_MET : undefined
return stepSuccess(stepToLoop, { status, iterations, items })
if (iterations === maxIterations) {
span.addTags({
status: AutomationStepStatus.MAX_ITERATIONS,
iterations,
})
return stepFailure(stepToLoop, {
status: AutomationStepStatus.MAX_ITERATIONS,
iterations,
})
}
if (matchesLoopFailureCondition(step, currentItem)) {
span.addTags({
status: AutomationStepStatus.FAILURE_CONDITION,
iterations,
})
return stepFailure(stepToLoop, {
status: AutomationStepStatus.FAILURE_CONDITION,
})
}
ctx.loop = { currentItem }
const result = await this.executeStep(ctx, stepToLoop)
items.push(result.outputs)
ctx.loop = undefined
}
const status =
iterations === 0 ? AutomationStepStatus.NO_ITERATIONS : undefined
return stepSuccess(stepToLoop, { status, iterations, items })
})
}
private async executeBranchStep(
ctx: AutomationContext,
step: BranchStep
): Promise<AutomationStepResult[]> {
const { branches, children } = step.inputs
return await tracer.trace("executeBranchStep", async span => {
const { branches, children } = step.inputs
for (const branch of branches) {
if (await branchMatches(ctx, branch)) {
return [
stepSuccess(step, {
branchName: branch.name,
status: `${branch.name} branch taken`,
branchId: `${branch.id}`,
}),
...(await this.executeSteps(ctx, children?.[branch.id] || [])),
]
for (const branch of branches) {
if (await branchMatches(ctx, branch)) {
span.addTags({ branchName: branch.name, branchId: branch.id })
return [
stepSuccess(step, {
branchName: branch.name,
status: `${branch.name} branch taken`,
branchId: `${branch.id}`,
}),
...(await this.executeSteps(ctx, children?.[branch.id] || [])),
]
}
}
}
return [stepFailure(step, { status: AutomationStatus.NO_CONDITION_MET })]
span.addTags({ status: AutomationStatus.NO_CONDITION_MET })
return [stepFailure(step, { status: AutomationStatus.NO_CONDITION_MET })]
})
}
private async executeStep(
ctx: AutomationContext,
step: Readonly<AutomationStep>
): Promise<AutomationStepResult> {
return tracer.trace("Orchestrator.executeStep", async span => {
return await tracer.trace(step.stepId, async span => {
span.addTags({
step: {
stepId: step.stepId,
@ -524,6 +542,7 @@ class Orchestrator {
internal: step.internal,
deprecated: step.deprecated,
},
inputsKeys: Object.keys(step.inputs),
})
if (this.stopped) {
@ -557,6 +576,7 @@ class Orchestrator {
;(outputs as any).status = AutomationStatus.STOPPED
}
span.addTags({ outputsKeys: Object.keys(outputs) })
return stepSuccess(step, outputs, inputs)
})
}

View File

@ -62,7 +62,6 @@
"koa-body": "4.2.0",
"koa-compress": "4.0.1",
"koa-passport": "4.1.4",
"koa-redis": "^4.0.1",
"koa-send": "5.0.1",
"koa-session": "5.13.1",
"koa-static": "5.0.0",

View File

@ -311,7 +311,7 @@ describe("/api/global/auth", () => {
})
})
describe.skip("GET /api/global/auth/:tenantId/oidc/callback", () => {
describe("GET /api/global/auth/:tenantId/oidc/callback", () => {
it("logs in", async () => {
const email = `${generator.guid()}@example.com`

View File

@ -4,7 +4,7 @@ if (process.env.DD_APM_ENABLED) {
// need to load environment first
import env from "./environment"
import Application, { Middleware } from "koa"
import Application from "koa"
import { bootstrap } from "global-agent"
import * as db from "./db"
import { sdk as proSdk } from "@budibase/pro"
@ -20,7 +20,6 @@ import {
cache,
features,
} from "@budibase/backend-core"
import RedisStore from "koa-redis"
db.init()
import koaBody from "koa-body"
@ -53,28 +52,7 @@ app.proxy = true
app.use(handleScimBody)
app.use(koaBody({ multipart: true }))
const sessionMiddleware: Middleware = async (ctx: any, next: any) => {
const redisClient = await new redis.Client(
redis.utils.Databases.SESSIONS
).init()
return koaSession(
{
// @ts-ignore
store: new RedisStore({ client: redisClient.getClient() }),
key: "koa:sess",
maxAge: 86400000, // one day
httpOnly: true,
secure: process.env.NODE_ENV === "production",
sameSite: "strict",
rolling: true,
renew: true,
},
app
)(ctx, next)
}
app.use(sessionMiddleware)
app.use(koaSession(app))
app.use(middleware.correlation)
app.use(middleware.pino)
app.use(middleware.ip)

View File

@ -1 +0,0 @@
declare module "koa-redis" {}

View File

@ -1,10 +1,18 @@
#!/bin/bash
if [[ $TARGETARCH == arm* ]] ;
then
if [[ $TARGETBUILD == "aas" ]]; then
echo "A aas-compatible version of Minio is already installed."
exit 0
fi
if [[ $TARGETARCH == arm* ]]; then
echo "INSTALLING ARM64 MINIO"
rm -f minio
wget https://dl.min.io/server/minio/release/linux-arm64/minio
else
echo "INSTALLING AMD64 MINIO"
rm -f minio
wget https://dl.min.io/server/minio/release/linux-amd64/minio
fi
chmod +x minio
chmod +x minio

3
scripts/resources/minio Normal file
View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:63db3aa3c2299ebaf13b46c64523a589bd5bf272f9e971d17f1eaa55f6f1fd79
size 118595584

View File

@ -2695,13 +2695,6 @@
dependencies:
regenerator-runtime "^0.14.0"
"@babel/runtime@^7.8.3":
version "7.26.9"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.9.tgz#aa4c6facc65b9cb3f87d75125ffd47781b475433"
integrity sha512-aA63XwOkcl4xxQa3HjPMqOP6LiK0ZDv3mUPYEFXkpHbaFjtGggE1A61FjFzJnB+p7/oy2gA8E+rcBNl/zC1tMg==
dependencies:
regenerator-runtime "^0.14.0"
"@babel/template@^7.22.15", "@babel/template@^7.22.5", "@babel/template@^7.25.9", "@babel/template@^7.3.3":
version "7.25.9"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.25.9.tgz#ecb62d81a8a6f5dc5fe8abfc3901fc52ddf15016"
@ -9048,14 +9041,7 @@ co-body@^5.1.1:
raw-body "^2.2.0"
type-is "^1.6.14"
co-wrap-all@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/co-wrap-all/-/co-wrap-all-1.0.0.tgz#370ae3e8333510a53f6b2f7fdfbe4568a11b7ecf"
integrity sha512-aru6gLi2vTUazr+MxVm3Rv6ST7/EKtFj9BrfkcOrbCO2Qv6LqJdE71m88HhHiBEviKw/ucVrwoGLrq2xHpOsJA==
dependencies:
co "^4.0.0"
co@^4.0.0, co@^4.6.0:
co@^4.6.0:
version "4.6.0"
resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184"
integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==
@ -13191,7 +13177,7 @@ ioredis@5.3.2:
redis-parser "^3.0.0"
standard-as-callback "^2.1.0"
ioredis@^4.14.1, ioredis@^4.28.5:
ioredis@^4.28.5:
version "4.28.5"
resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.5.tgz#5c149e6a8d76a7f8fa8a504ffc85b7d5b6797f9f"
integrity sha512-3GYo0GJtLqgNXj4YhrisLaNNvWSNwSS2wS4OELGfGxH8I69+XfNdnmV1AyN+ZqMh0i7eX+SWjrwFKDBDgfBC1A==
@ -14691,16 +14677,6 @@ koa-pino-logger@4.0.0:
dependencies:
pino-http "^6.5.0"
koa-redis@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/koa-redis/-/koa-redis-4.0.1.tgz#57ac1b46d9ab851221a9f4952c1e8d4bf289db40"
integrity sha512-o2eTVNo1NBnloeUGhHed5Q2ZvJSLpUEj/+E1/7oH5EmH8WuQ+QLdl/VawkshxdFQ47W1p6V09lM3hCTu7D0YnQ==
dependencies:
"@babel/runtime" "^7.8.3"
co-wrap-all "^1.0.0"
debug "^4.1.1"
ioredis "^4.14.1"
koa-router@^10.0.0:
version "10.1.1"
resolved "https://registry.yarnpkg.com/koa-router/-/koa-router-10.1.1.tgz#20809f82648518b84726cd445037813cd99f17ff"