Merge branch 'master' into remove-mocks-1

This commit is contained in:
Sam Rose 2025-02-24 09:18:03 +00:00 committed by GitHub
commit 384c5bab44
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
24 changed files with 649 additions and 364 deletions

1
.gitattributes vendored Normal file
View File

@ -0,0 +1 @@
scripts/resources/minio filter=lfs diff=lfs merge=lfs -text

View File

@ -62,12 +62,6 @@ http {
proxy_connect_timeout 120s; proxy_connect_timeout 120s;
proxy_send_timeout 120s; proxy_send_timeout 120s;
proxy_http_version 1.1; proxy_http_version 1.1;
# Enable buffering for potentially large OIDC configs
proxy_buffering on;
proxy_buffer_size 16k;
proxy_buffers 4 32k;
proxy_set_header Host $host; proxy_set_header Host $host;
proxy_set_header Connection ""; proxy_set_header Connection "";

View File

@ -1,5 +1,5 @@
ARG BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1 ARG BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1
FROM node:20-slim as build FROM node:20-slim AS build
# install node-gyp dependencies # install node-gyp dependencies
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
@ -34,13 +34,13 @@ COPY packages/worker/dist packages/worker/dist
COPY packages/worker/pm2.config.js packages/worker/pm2.config.js COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
FROM $BASEIMG as runner FROM $BASEIMG AS runner
ARG TARGETARCH ARG TARGETARCH
ENV TARGETARCH $TARGETARCH ENV TARGETARCH=$TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service) #TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas .... # e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single ARG TARGETBUILD=single
ENV TARGETBUILD $TARGETBUILD ENV TARGETBUILD=$TARGETBUILD
# install base dependencies # install base dependencies
RUN apt-get update && \ RUN apt-get update && \
@ -67,6 +67,12 @@ RUN mkdir -p /var/log/nginx && \
# setup minio # setup minio
WORKDIR /minio WORKDIR /minio
# a 2022 version of minio that supports gateway mode
COPY scripts/resources/minio /minio
RUN chmod +x minio
# handles the installation of minio in non-aas environments
COPY scripts/install-minio.sh ./install.sh COPY scripts/install-minio.sh ./install.sh
RUN chmod +x install.sh && ./install.sh RUN chmod +x install.sh && ./install.sh

View File

@ -1,53 +1,61 @@
#!/bin/bash #!/bin/bash
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONMENT" "CLUSTER_PORT" "DEPLOYMENT_ENVIRONMENT" "MINIO_URL" "NODE_ENV" "POSTHOG_TOKEN" "REDIS_URL" "SELF_HOSTED" "WORKER_PORT" "WORKER_URL" "TENANT_FEATURE_FLAGS" "ACCOUNT_PORTAL_URL")
# Check the env vars set in Dockerfile have come through, AAS seems to drop them
[[ -z "${APP_PORT}" ]] && export APP_PORT=4001
[[ -z "${ARCHITECTURE}" ]] && export ARCHITECTURE=amd
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
[[ -z "${MINIO_URL}" ]] && [[ -z "${USE_S3}" ]] && export MINIO_URL=http://127.0.0.1:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=127.0.0.1:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://127.0.0.1:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://127.0.0.1:4001
[[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app
# export CUSTOM_DOMAIN=budi001.custom.com
# Azure App Service customisations echo "Starting runner.sh..."
if [[ "${TARGETBUILD}" = "aas" ]]; then
export DATA_DIR="${DATA_DIR:-/home}" # set defaults for Docker-related variables
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true export APP_PORT="${APP_PORT:-4001}"
/etc/init.d/ssh start export ARCHITECTURE="${ARCHITECTURE:-amd}"
else export BUDIBASE_ENVIRONMENT="${BUDIBASE_ENVIRONMENT:-PRODUCTION}"
export DATA_DIR=${DATA_DIR:-/data} export CLUSTER_PORT="${CLUSTER_PORT:-80}"
export DEPLOYMENT_ENVIRONMENT="${DEPLOYMENT_ENVIRONMENT:-docker}"
# only set MINIO_URL if neither MINIO_URL nor USE_S3 is set
if [[ -z "${MINIO_URL}" && -z "${USE_S3}" ]]; then
export MINIO_URL="http://127.0.0.1:9000"
fi fi
mkdir -p ${DATA_DIR}
# Mount NFS or GCP Filestore if env vars exist for it export NODE_ENV="${NODE_ENV:-production}"
if [[ ! -z ${FILESHARE_IP} && ! -z ${FILESHARE_NAME} ]]; then export POSTHOG_TOKEN="${POSTHOG_TOKEN:-phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU}"
export ACCOUNT_PORTAL_URL="${ACCOUNT_PORTAL_URL:-https://account.budibase.app}"
export REDIS_URL="${REDIS_URL:-127.0.0.1:6379}"
export SELF_HOSTED="${SELF_HOSTED:-1}"
export WORKER_PORT="${WORKER_PORT:-4002}"
export WORKER_URL="${WORKER_URL:-http://127.0.0.1:4002}"
export APPS_URL="${APPS_URL:-http://127.0.0.1:4001}"
export SERVER_TOP_LEVEL_PATH="${SERVER_TOP_LEVEL_PATH:-/app}"
# set DATA_DIR and ensure the directory exists
if [[ ${TARGETBUILD} == "aas" ]]; then
export DATA_DIR="/home"
else
export DATA_DIR="${DATA_DIR:-/data}"
fi
mkdir -p "${DATA_DIR}"
# mount NFS or GCP Filestore if FILESHARE_IP and FILESHARE_NAME are set
if [[ -n "${FILESHARE_IP}" && -n "${FILESHARE_NAME}" ]]; then
echo "Mounting NFS share" echo "Mounting NFS share"
apt update && apt install -y nfs-common nfs-kernel-server apt update && apt install -y nfs-common nfs-kernel-server
echo "Mount file share ${FILESHARE_IP}:/${FILESHARE_NAME} to ${DATA_DIR}" echo "Mount file share ${FILESHARE_IP}:/${FILESHARE_NAME} to ${DATA_DIR}"
mount -o nolock ${FILESHARE_IP}:/${FILESHARE_NAME} ${DATA_DIR} mount -o nolock "${FILESHARE_IP}:/${FILESHARE_NAME}" "${DATA_DIR}"
echo "Mounting result: $?" echo "Mounting result: $?"
fi fi
if [ -f "${DATA_DIR}/.env" ]; then # source environment variables from a .env file if it exists in DATA_DIR
# Read in the .env file and export the variables if [[ -f "${DATA_DIR}/.env" ]]; then
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done set -a # Automatically export all variables loaded from .env
source "${DATA_DIR}/.env"
set +a
fi fi
# randomise any unset environment variables
for ENV_VAR in "${ENV_VARS[@]}" # randomize any unset sensitive environment variables using uuidgen
do env_vars=(COUCHDB_USER COUCHDB_PASSWORD MINIO_ACCESS_KEY MINIO_SECRET_KEY INTERNAL_API_KEY JWT_SECRET REDIS_PASSWORD)
if [[ -z "${!ENV_VAR}" ]]; then for var in "${env_vars[@]}"; do
eval "export $ENV_VAR=$(uuidgen | sed -e 's/-//g')" if [[ -z "${!var}" ]]; then
export "$var"="$(uuidgen | tr -d '-')"
fi fi
done done
if [[ -z "${COUCH_DB_URL}" ]]; then if [[ -z "${COUCH_DB_URL}" ]]; then
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@127.0.0.1:5984 export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@127.0.0.1:5984
fi fi
@ -58,17 +66,15 @@ fi
if [ ! -f "${DATA_DIR}/.env" ]; then if [ ! -f "${DATA_DIR}/.env" ]; then
touch ${DATA_DIR}/.env touch ${DATA_DIR}/.env
for ENV_VAR in "${ENV_VARS[@]}" for ENV_VAR in "${ENV_VARS[@]}"; do
do
temp=$(eval "echo \$$ENV_VAR") temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env echo "$ENV_VAR=$temp" >>${DATA_DIR}/.env
done done
for ENV_VAR in "${DOCKER_VARS[@]}" for ENV_VAR in "${DOCKER_VARS[@]}"; do
do
temp=$(eval "echo \$$ENV_VAR") temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env echo "$ENV_VAR=$temp" >>${DATA_DIR}/.env
done done
echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env echo "COUCH_DB_URL=${COUCH_DB_URL}" >>${DATA_DIR}/.env
fi fi
# Read in the .env file and export the variables # Read in the .env file and export the variables
@ -79,6 +85,7 @@ ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount # make these directories in runner, incase of mount
mkdir -p ${DATA_DIR}/minio mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/redis mkdir -p ${DATA_DIR}/redis
mkdir -p ${DATA_DIR}/couch
chown -R couchdb:couchdb ${DATA_DIR}/couch chown -R couchdb:couchdb ${DATA_DIR}/couch
REDIS_CONFIG="/etc/redis/redis.conf" REDIS_CONFIG="/etc/redis/redis.conf"
@ -89,21 +96,33 @@ if [[ -n "${USE_DEFAULT_REDIS_CONFIG}" ]]; then
fi fi
if [[ -n "${REDIS_PASSWORD}" ]]; then if [[ -n "${REDIS_PASSWORD}" ]]; then
redis-server "${REDIS_CONFIG}" --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 & redis-server "${REDIS_CONFIG}" --requirepass $REDIS_PASSWORD >/dev/stdout 2>&1 &
else else
redis-server "${REDIS_CONFIG}" > /dev/stdout 2>&1 & redis-server "${REDIS_CONFIG}" >/dev/stdout 2>&1 &
fi fi
/bbcouch-runner.sh &
echo "Starting callback CouchDB runner..."
./bbcouch-runner.sh &
# only start minio if use s3 isn't passed # only start minio if use s3 isn't passed
if [[ -z "${USE_S3}" ]]; then if [[ -z "${USE_S3}" ]]; then
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 & if [[ ${TARGETBUILD} == aas ]]; then
echo "Starting MinIO in Azure Gateway mode"
if [[ -z "${AZURE_STORAGE_ACCOUNT}" || -z "${AZURE_STORAGE_KEY}" || -z "${MINIO_ACCESS_KEY}" || -z "${MINIO_SECRET_KEY}" ]]; then
echo "The following environment variables must be set: AZURE_STORAGE_ACCOUNT, AZURE_STORAGE_KEY, MINIO_ACCESS_KEY, MINIO_SECRET_KEY"
exit 1
fi
/minio/minio gateway azure --console-address ":9001" >/dev/stdout 2>&1 &
else
echo "Starting MinIO in standalone mode"
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio >/dev/stdout 2>&1 &
fi
fi fi
/etc/init.d/nginx restart /etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
# Add monthly cron job to renew certbot certificate # Add monthly cron job to renew certbot certificate
echo -n "* * 2 * * root exec /app/letsencrypt/certificate-renew.sh ${CUSTOM_DOMAIN}" >> /etc/cron.d/certificate-renew echo -n "* * 2 * * root exec /app/letsencrypt/certificate-renew.sh ${CUSTOM_DOMAIN}" >>/etc/cron.d/certificate-renew
chmod +x /etc/cron.d/certificate-renew chmod +x /etc/cron.d/certificate-renew
# Request the certbot certificate # Request the certbot certificate
/app/letsencrypt/certificate-request.sh ${CUSTOM_DOMAIN} /app/letsencrypt/certificate-request.sh ${CUSTOM_DOMAIN}

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.4.13", "version": "3.4.16",
"npmClient": "yarn", "npmClient": "yarn",
"concurrency": 20, "concurrency": 20,
"command": { "command": {

View File

@ -123,7 +123,7 @@ export async function doInAutomationContext<T>(params: {
task: () => T task: () => T
}): Promise<T> { }): Promise<T> {
await ensureSnippetContext() await ensureSnippetContext()
return newContext( return await newContext(
{ {
tenantId: getTenantIDFromAppID(params.appId), tenantId: getTenantIDFromAppID(params.appId),
appId: params.appId, appId: params.appId,

View File

@ -5,10 +5,10 @@ import {
SqlQuery, SqlQuery,
Table, Table,
TableSourceType, TableSourceType,
SEPARATOR,
} from "@budibase/types" } from "@budibase/types"
import { DEFAULT_BB_DATASOURCE_ID } from "../constants" import { DEFAULT_BB_DATASOURCE_ID } from "../constants"
import { Knex } from "knex" import { Knex } from "knex"
import { SEPARATOR } from "../db"
import environment from "../environment" import environment from "../environment"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`

View File

@ -49,6 +49,7 @@
import type { EditorMode } from "@budibase/types" import type { EditorMode } from "@budibase/types"
import type { BindingCompletion, CodeValidator } from "@/types" import type { BindingCompletion, CodeValidator } from "@/types"
import { validateHbsTemplate } from "./validator/hbs" import { validateHbsTemplate } from "./validator/hbs"
import { validateJsTemplate } from "./validator/js"
export let label: string | undefined = undefined export let label: string | undefined = undefined
export let completions: BindingCompletion[] = [] export let completions: BindingCompletion[] = []
@ -356,6 +357,9 @@
if (mode === EditorModes.Handlebars) { if (mode === EditorModes.Handlebars) {
const diagnostics = validateHbsTemplate(value, validations) const diagnostics = validateHbsTemplate(value, validations)
editor.dispatch(setDiagnostics(editor.state, diagnostics)) editor.dispatch(setDiagnostics(editor.state, diagnostics))
} else if (mode === EditorModes.JS) {
const diagnostics = validateJsTemplate(value, validations)
editor.dispatch(setDiagnostics(editor.state, diagnostics))
} }
} }

View File

@ -0,0 +1,101 @@
import { Parser } from "acorn"
import * as walk from "acorn-walk"
import type { Diagnostic } from "@codemirror/lint"
import { CodeValidator } from "@/types"
export function validateJsTemplate(
code: string,
validations: CodeValidator
): Diagnostic[] {
const diagnostics: Diagnostic[] = []
try {
const ast = Parser.parse(code, {
ecmaVersion: "latest",
locations: true,
allowReturnOutsideFunction: true,
})
const lineOffsets: number[] = []
let offset = 0
for (const line of code.split("\n")) {
lineOffsets.push(offset)
offset += line.length + 1 // +1 for newline character
}
let hasReturnStatement = false
walk.ancestor(ast, {
ReturnStatement(node, _state, ancestors) {
if (
// it returns a value
node.argument &&
// and it is top level
ancestors.length === 2 &&
ancestors[0].type === "Program" &&
ancestors[1].type === "ReturnStatement"
) {
hasReturnStatement = true
}
},
CallExpression(node) {
const callee: any = node.callee
if (
node.type === "CallExpression" &&
callee.object?.name === "helpers" &&
node.loc
) {
const functionName = callee.property.name
const from =
lineOffsets[node.loc.start.line - 1] + node.loc.start.column
const to = lineOffsets[node.loc.end.line - 1] + node.loc.end.column
if (!(functionName in validations)) {
diagnostics.push({
from,
to,
severity: "warning",
message: `"${functionName}" function does not exist.`,
})
return
}
const { arguments: expectedArguments } = validations[functionName]
if (
expectedArguments &&
node.arguments.length !== expectedArguments.length
) {
diagnostics.push({
from,
to,
severity: "error",
message: `Function "${functionName}" expects ${
expectedArguments.length
} parameters (${expectedArguments.join(", ")}), but got ${
node.arguments.length
}.`,
})
}
}
},
})
if (!hasReturnStatement) {
diagnostics.push({
from: 0,
to: code.length,
severity: "error",
message: "Your code must return a value.",
})
}
} catch (e: any) {
diagnostics.push({
from: 0,
to: code.length,
severity: "error",
message: `Syntax error: ${e.message}`,
})
}
return diagnostics
}

View File

@ -0,0 +1,156 @@
import { validateJsTemplate } from "../js"
import { CodeValidator } from "@/types"
describe("js validator", () => {
it("validates valid code", () => {
const text = "return 7"
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([])
})
it("does not validate runtime errors", () => {
const text = "return a"
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([])
})
it("validates multiline code", () => {
const text = "const foo='bar'\nreturn 123"
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([])
})
it("allows return not being on the last line", () => {
const text = "const foo='bar'\nreturn 123\nconsole.log(foo)"
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([])
})
it("throws on missing return", () => {
const text = "const foo='bar'\nbar='foo'"
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 0,
message: "Your code must return a value.",
severity: "error",
to: 25,
},
])
})
it("checks that returns are at top level", () => {
const text = `
function call(){
return 1
}`
const validators = {}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 0,
message: "Your code must return a value.",
severity: "error",
to: text.length,
},
])
})
describe("helpers", () => {
const validators: CodeValidator = {
helperFunction: {
arguments: ["a", "b", "c"],
},
}
it("validates helpers with valid params", () => {
const text = "return helpers.helperFunction(1, 99, 'a')"
const result = validateJsTemplate(text, validators)
expect(result).toEqual([])
})
it("throws on too few params", () => {
const text = "return helpers.helperFunction(100)"
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 7,
message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 1.`,
severity: "error",
to: 34,
},
])
})
it("throws on too many params", () => {
const text = "return helpers.helperFunction( 1, 99, 'a', 100)"
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 7,
message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 4.`,
severity: "error",
to: 47,
},
])
})
it("validates helpers on inner functions", () => {
const text = `function call(){
return helpers.helperFunction(1, 99)
}
return call()`
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 46,
message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 2.`,
severity: "error",
to: 75,
},
])
})
it("validates multiple helpers", () => {
const text =
"return helpers.helperFunction(1, 99, 'a') + helpers.helperFunction(1) + helpers.another(1) + helpers.another()"
const validators: CodeValidator = {
helperFunction: {
arguments: ["a", "b", "c"],
},
another: { arguments: [] },
}
const result = validateJsTemplate(text, validators)
expect(result).toEqual([
{
from: 44,
message: `Function "helperFunction" expects 3 parameters (a, b, c), but got 1.`,
severity: "error",
to: 69,
},
{
from: 72,
message: `Function "another" expects 0 parameters (), but got 1.`,
severity: "error",
to: 90,
},
])
})
})
})

View File

@ -377,6 +377,7 @@
value={jsValue ? decodeJSBinding(jsValue) : jsValue} value={jsValue ? decodeJSBinding(jsValue) : jsValue}
on:change={onChangeJSValue} on:change={onChangeJSValue}
{completions} {completions}
{validations}
mode={EditorModes.JS} mode={EditorModes.JS}
bind:getCaretPosition bind:getCaretPosition
bind:insertAtPos bind:insertAtPos

View File

@ -8,6 +8,7 @@ import {
UIComponentError, UIComponentError,
ComponentDefinition, ComponentDefinition,
DependsOnComponentSetting, DependsOnComponentSetting,
Screen,
} from "@budibase/types" } from "@budibase/types"
import { queries } from "./queries" import { queries } from "./queries"
import { views } from "./views" import { views } from "./views"
@ -66,6 +67,7 @@ export const screenComponentErrorList = derived(
if (!$selectedScreen) { if (!$selectedScreen) {
return [] return []
} }
const screen = $selectedScreen
const datasources = { const datasources = {
...reduceBy("_id", $tables.list), ...reduceBy("_id", $tables.list),
@ -79,7 +81,9 @@ export const screenComponentErrorList = derived(
const errors: UIComponentError[] = [] const errors: UIComponentError[] = []
function checkComponentErrors(component: Component, ancestors: string[]) { function checkComponentErrors(component: Component, ancestors: string[]) {
errors.push(...getInvalidDatasources(component, datasources, definitions)) errors.push(
...getInvalidDatasources(screen, component, datasources, definitions)
)
errors.push(...getMissingRequiredSettings(component, definitions)) errors.push(...getMissingRequiredSettings(component, definitions))
errors.push(...getMissingAncestors(component, definitions, ancestors)) errors.push(...getMissingAncestors(component, definitions, ancestors))
@ -95,6 +99,7 @@ export const screenComponentErrorList = derived(
) )
function getInvalidDatasources( function getInvalidDatasources(
screen: Screen,
component: Component, component: Component,
datasources: Record<string, any>, datasources: Record<string, any>,
definitions: Record<string, ComponentDefinition> definitions: Record<string, ComponentDefinition>

View File

@ -7,6 +7,7 @@ import {
CreateRowStepOutputs, CreateRowStepOutputs,
FieldType, FieldType,
FilterCondition, FilterCondition,
AutomationStepStatus,
} from "@budibase/types" } from "@budibase/types"
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder" import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
import TestConfiguration from "../../../tests/utilities/TestConfiguration" import TestConfiguration from "../../../tests/utilities/TestConfiguration"
@ -560,5 +561,25 @@ describe("Attempt to run a basic loop automation", () => {
status: "stopped", status: "stopped",
}) })
}) })
it("should not fail if queryRows returns nothing", async () => {
const table = await config.api.table.save(basicTable())
const results = await createAutomationBuilder(config)
.onAppAction()
.queryRows({
tableId: table._id!,
})
.loop({
option: LoopStepType.ARRAY,
binding: "{{ steps.1.rows }}",
})
.serverLog({ text: "Message {{loop.currentItem}}" })
.test({ fields: {} })
expect(results.steps[1].outputs.success).toBe(true)
expect(results.steps[1].outputs.status).toBe(
AutomationStepStatus.NO_ITERATIONS
)
})
}) })
}) })

View File

@ -40,21 +40,17 @@ function loggingArgs(job: AutomationJob) {
} }
export async function processEvent(job: AutomationJob) { export async function processEvent(job: AutomationJob) {
return tracer.trace( return tracer.trace("processEvent", async span => {
"processEvent",
{ resource: "automation" },
async span => {
const appId = job.data.event.appId! const appId = job.data.event.appId!
const automationId = job.data.automation._id! const automationId = job.data.automation._id!
span?.addTags({ span.addTags({
appId, appId,
automationId, automationId,
job: { job: {
id: job.id, id: job.id,
name: job.name, name: job.name,
attemptsMade: job.attemptsMade, attemptsMade: job.attemptsMade,
opts: {
attempts: job.opts.attempts, attempts: job.opts.attempts,
priority: job.opts.priority, priority: job.opts.priority,
delay: job.opts.delay, delay: job.opts.delay,
@ -68,11 +64,11 @@ export async function processEvent(job: AutomationJob) {
stackTraceLimit: job.opts.stackTraceLimit, stackTraceLimit: job.opts.stackTraceLimit,
preventParsingData: job.opts.preventParsingData, preventParsingData: job.opts.preventParsingData,
}, },
},
}) })
const task = async () => { const task = async () => {
try { try {
return await tracer.trace("task", async () => {
if (isCronTrigger(job.data.automation) && !job.data.event.timestamp) { if (isCronTrigger(job.data.automation) && !job.data.event.timestamp) {
// Requires the timestamp at run time // Requires the timestamp at run time
job.data.event.timestamp = Date.now() job.data.event.timestamp = Date.now()
@ -81,25 +77,19 @@ export async function processEvent(job: AutomationJob) {
console.log("automation running", ...loggingArgs(job)) console.log("automation running", ...loggingArgs(job))
const runFn = () => Runner.run(job) const runFn = () => Runner.run(job)
const result = await quotas.addAutomation(runFn, { const result = await quotas.addAutomation(runFn, { automationId })
automationId,
})
console.log("automation completed", ...loggingArgs(job)) console.log("automation completed", ...loggingArgs(job))
return result return result
})
} catch (err) { } catch (err) {
span?.addTags({ error: true }) span.addTags({ error: true })
console.error( console.error(`automation was unable to run`, err, ...loggingArgs(job))
`automation was unable to run`,
err,
...loggingArgs(job)
)
return { err } return { err }
} }
} }
return await context.doInAutomationContext({ appId, automationId, task }) return await context.doInAutomationContext({ appId, automationId, task })
} })
)
} }
export async function updateTestHistory( export async function updateTestHistory(

View File

@ -62,12 +62,16 @@ const SCHEMA: Integration = {
type: DatasourceFieldType.STRING, type: DatasourceFieldType.STRING,
required: true, required: true,
}, },
rev: {
type: DatasourceFieldType.STRING,
required: true,
},
}, },
}, },
}, },
} }
class CouchDBIntegration implements IntegrationBase { export class CouchDBIntegration implements IntegrationBase {
private readonly client: Database private readonly client: Database
constructor(config: CouchDBConfig) { constructor(config: CouchDBConfig) {
@ -82,7 +86,8 @@ class CouchDBIntegration implements IntegrationBase {
connected: false, connected: false,
} }
try { try {
response.connected = await this.client.exists() await this.client.allDocs({ limit: 1 })
response.connected = true
} catch (e: any) { } catch (e: any) {
response.error = e.message as string response.error = e.message as string
} }
@ -99,13 +104,9 @@ class CouchDBIntegration implements IntegrationBase {
} }
async read(query: { json: string | object }) { async read(query: { json: string | object }) {
const parsed = this.parse(query) const params = { include_docs: true, ...this.parse(query) }
const params = {
include_docs: true,
...parsed,
}
const result = await this.client.allDocs(params) const result = await this.client.allDocs(params)
return result.rows.map(row => row.doc) return result.rows.map(row => row.doc!)
} }
async update(query: { json: string | object }) { async update(query: { json: string | object }) {
@ -121,8 +122,8 @@ class CouchDBIntegration implements IntegrationBase {
return await this.client.get(query.id) return await this.client.get(query.id)
} }
async delete(query: { id: string }) { async delete(query: { id: string; rev: string }) {
return await this.client.remove(query.id) return await this.client.remove(query.id, query.rev)
} }
} }

View File

@ -1,84 +1,87 @@
jest.mock("@budibase/backend-core", () => { import { env } from "@budibase/backend-core"
const core = jest.requireActual("@budibase/backend-core") import { CouchDBIntegration } from "../couchdb"
return { import { generator } from "@budibase/backend-core/tests"
...core,
db: {
...core.db,
DatabaseWithConnection: function () {
return {
allDocs: jest.fn().mockReturnValue({ rows: [] }),
put: jest.fn(),
get: jest.fn().mockReturnValue({ _rev: "a" }),
remove: jest.fn(),
}
},
},
}
})
import { default as CouchDBIntegration } from "../couchdb" function couchSafeID(): string {
// CouchDB IDs must start with a letter, so we prepend an 'a'.
return `a${generator.guid()}`
}
class TestConfiguration { function doc(data: Record<string, any>): string {
integration: any return JSON.stringify({ _id: couchSafeID(), ...data })
}
constructor( function query(data?: Record<string, any>): { json: string } {
config: any = { url: "http://somewhere", database: "something" } return { json: doc(data || {}) }
) {
this.integration = new CouchDBIntegration.integration(config)
}
} }
describe("CouchDB Integration", () => { describe("CouchDB Integration", () => {
let config: any let couchdb: CouchDBIntegration
beforeEach(() => { beforeEach(() => {
config = new TestConfiguration() couchdb = new CouchDBIntegration({
url: env.COUCH_DB_URL,
database: couchSafeID(),
})
}) })
it("calls the create method with the correct params", async () => { it("successfully connects", async () => {
const doc = { const { connected } = await couchdb.testConnection()
expect(connected).toBe(true)
})
it("can create documents", async () => {
const { id, ok, rev } = await couchdb.create(query({ test: 1 }))
expect(id).toBeDefined()
expect(ok).toBe(true)
expect(rev).toBeDefined()
})
it("can read created documents", async () => {
const { id, ok, rev } = await couchdb.create(query({ test: 1 }))
expect(id).toBeDefined()
expect(ok).toBe(true)
expect(rev).toBeDefined()
const docs = await couchdb.read(query())
expect(docs).toEqual([
{
_id: id,
_rev: rev,
test: 1, test: 1,
} createdAt: expect.any(String),
await config.integration.create({ updatedAt: expect.any(String),
json: JSON.stringify(doc), },
}) ])
expect(config.integration.client.put).toHaveBeenCalledWith(doc)
}) })
it("calls the read method with the correct params", async () => { it("can update documents", async () => {
const doc = { const { id, ok, rev } = await couchdb.create(query({ test: 1 }))
name: "search", expect(ok).toBe(true)
}
await config.integration.read({ const { id: newId, rev: newRev } = await couchdb.update(
json: JSON.stringify(doc), query({ _id: id, _rev: rev, test: 2 })
)
const docs = await couchdb.read(query())
expect(docs).toEqual([
{
_id: newId,
_rev: newRev,
test: 2,
createdAt: expect.any(String),
updatedAt: expect.any(String),
},
])
}) })
expect(config.integration.client.allDocs).toHaveBeenCalledWith({ it("can delete documents", async () => {
include_docs: true, const { id, ok, rev } = await couchdb.create(query({ test: 1 }))
name: "search", expect(ok).toBe(true)
})
})
it("calls the update method with the correct params", async () => { const deleteResponse = await couchdb.delete({ id, rev })
const doc = { expect(deleteResponse.ok).toBe(true)
_id: "1234",
name: "search",
}
await config.integration.update({ const docs = await couchdb.read(query())
json: JSON.stringify(doc), expect(docs).toBeEmpty()
})
expect(config.integration.client.put).toHaveBeenCalledWith({
...doc,
_rev: "a",
})
})
it("calls the delete method with the correct params", async () => {
const id = "1234"
await config.integration.delete({ id })
expect(config.integration.client.remove).toHaveBeenCalledWith(id)
}) })
}) })

View File

@ -68,8 +68,12 @@ function getLoopIterable(step: LoopStep): any[] {
let input = step.inputs.binding let input = step.inputs.binding
if (option === LoopStepType.ARRAY && typeof input === "string") { if (option === LoopStepType.ARRAY && typeof input === "string") {
if (input === "") {
input = []
} else {
input = JSON.parse(input) input = JSON.parse(input)
} }
}
if (option === LoopStepType.STRING && Array.isArray(input)) { if (option === LoopStepType.STRING && Array.isArray(input)) {
input = input.join(",") input = input.join(",")
@ -310,11 +314,8 @@ class Orchestrator {
} }
async execute(): Promise<AutomationResults> { async execute(): Promise<AutomationResults> {
return tracer.trace( return await tracer.trace("execute", async span => {
"Orchestrator.execute", span.addTags({ appId: this.appId, automationId: this.automation._id })
{ resource: "automation" },
async span => {
span?.addTags({ appId: this.appId, automationId: this.automation._id })
const job = cloneDeep(this.job) const job = cloneDeep(this.job)
delete job.data.event.appId delete job.data.event.appId
@ -382,15 +383,14 @@ class Orchestrator {
} }
return result return result
} })
)
} }
private async executeSteps( private async executeSteps(
ctx: AutomationContext, ctx: AutomationContext,
steps: AutomationStep[] steps: AutomationStep[]
): Promise<AutomationStepResult[]> { ): Promise<AutomationStepResult[]> {
return tracer.trace("Orchestrator.executeSteps", async () => { return await tracer.trace("executeSteps", async () => {
let stepIndex = 0 let stepIndex = 0
const results: AutomationStepResult[] = [] const results: AutomationStepResult[] = []
@ -446,6 +446,7 @@ class Orchestrator {
step: LoopStep, step: LoopStep,
stepToLoop: AutomationStep stepToLoop: AutomationStep
): Promise<AutomationStepResult> { ): Promise<AutomationStepResult> {
return await tracer.trace("executeLoopStep", async span => {
await processObject(step.inputs, prepareContext(ctx)) await processObject(step.inputs, prepareContext(ctx))
const maxIterations = getLoopMaxIterations(step) const maxIterations = getLoopMaxIterations(step)
@ -455,6 +456,10 @@ class Orchestrator {
try { try {
iterable = getLoopIterable(step) iterable = getLoopIterable(step)
} catch (err) { } catch (err) {
span.addTags({
status: AutomationStepStatus.INCORRECT_TYPE,
iterations,
})
return stepFailure(stepToLoop, { return stepFailure(stepToLoop, {
status: AutomationStepStatus.INCORRECT_TYPE, status: AutomationStepStatus.INCORRECT_TYPE,
}) })
@ -464,6 +469,10 @@ class Orchestrator {
const currentItem = iterable[iterations] const currentItem = iterable[iterations]
if (iterations === maxIterations) { if (iterations === maxIterations) {
span.addTags({
status: AutomationStepStatus.MAX_ITERATIONS,
iterations,
})
return stepFailure(stepToLoop, { return stepFailure(stepToLoop, {
status: AutomationStepStatus.MAX_ITERATIONS, status: AutomationStepStatus.MAX_ITERATIONS,
iterations, iterations,
@ -471,6 +480,10 @@ class Orchestrator {
} }
if (matchesLoopFailureCondition(step, currentItem)) { if (matchesLoopFailureCondition(step, currentItem)) {
span.addTags({
status: AutomationStepStatus.FAILURE_CONDITION,
iterations,
})
return stepFailure(stepToLoop, { return stepFailure(stepToLoop, {
status: AutomationStepStatus.FAILURE_CONDITION, status: AutomationStepStatus.FAILURE_CONDITION,
}) })
@ -483,18 +496,21 @@ class Orchestrator {
} }
const status = const status =
iterations === 0 ? AutomationStatus.NO_CONDITION_MET : undefined iterations === 0 ? AutomationStepStatus.NO_ITERATIONS : undefined
return stepSuccess(stepToLoop, { status, iterations, items }) return stepSuccess(stepToLoop, { status, iterations, items })
})
} }
private async executeBranchStep( private async executeBranchStep(
ctx: AutomationContext, ctx: AutomationContext,
step: BranchStep step: BranchStep
): Promise<AutomationStepResult[]> { ): Promise<AutomationStepResult[]> {
return await tracer.trace("executeBranchStep", async span => {
const { branches, children } = step.inputs const { branches, children } = step.inputs
for (const branch of branches) { for (const branch of branches) {
if (await branchMatches(ctx, branch)) { if (await branchMatches(ctx, branch)) {
span.addTags({ branchName: branch.name, branchId: branch.id })
return [ return [
stepSuccess(step, { stepSuccess(step, {
branchName: branch.name, branchName: branch.name,
@ -506,14 +522,16 @@ class Orchestrator {
} }
} }
span.addTags({ status: AutomationStatus.NO_CONDITION_MET })
return [stepFailure(step, { status: AutomationStatus.NO_CONDITION_MET })] return [stepFailure(step, { status: AutomationStatus.NO_CONDITION_MET })]
})
} }
private async executeStep( private async executeStep(
ctx: AutomationContext, ctx: AutomationContext,
step: Readonly<AutomationStep> step: Readonly<AutomationStep>
): Promise<AutomationStepResult> { ): Promise<AutomationStepResult> {
return tracer.trace("Orchestrator.executeStep", async span => { return await tracer.trace(step.stepId, async span => {
span.addTags({ span.addTags({
step: { step: {
stepId: step.stepId, stepId: step.stepId,
@ -524,6 +542,7 @@ class Orchestrator {
internal: step.internal, internal: step.internal,
deprecated: step.deprecated, deprecated: step.deprecated,
}, },
inputsKeys: Object.keys(step.inputs),
}) })
if (this.stopped) { if (this.stopped) {
@ -557,6 +576,7 @@ class Orchestrator {
;(outputs as any).status = AutomationStatus.STOPPED ;(outputs as any).status = AutomationStatus.STOPPED
} }
span.addTags({ outputsKeys: Object.keys(outputs) })
return stepSuccess(step, outputs, inputs) return stepSuccess(step, outputs, inputs)
}) })
} }

View File

@ -62,7 +62,6 @@
"koa-body": "4.2.0", "koa-body": "4.2.0",
"koa-compress": "4.0.1", "koa-compress": "4.0.1",
"koa-passport": "4.1.4", "koa-passport": "4.1.4",
"koa-redis": "^4.0.1",
"koa-send": "5.0.1", "koa-send": "5.0.1",
"koa-session": "5.13.1", "koa-session": "5.13.1",
"koa-static": "5.0.0", "koa-static": "5.0.0",

View File

@ -311,7 +311,7 @@ describe("/api/global/auth", () => {
}) })
}) })
describe.skip("GET /api/global/auth/:tenantId/oidc/callback", () => { describe("GET /api/global/auth/:tenantId/oidc/callback", () => {
it("logs in", async () => { it("logs in", async () => {
const email = `${generator.guid()}@example.com` const email = `${generator.guid()}@example.com`

View File

@ -4,7 +4,7 @@ if (process.env.DD_APM_ENABLED) {
// need to load environment first // need to load environment first
import env from "./environment" import env from "./environment"
import Application, { Middleware } from "koa" import Application from "koa"
import { bootstrap } from "global-agent" import { bootstrap } from "global-agent"
import * as db from "./db" import * as db from "./db"
import { sdk as proSdk } from "@budibase/pro" import { sdk as proSdk } from "@budibase/pro"
@ -20,7 +20,6 @@ import {
cache, cache,
features, features,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import RedisStore from "koa-redis"
db.init() db.init()
import koaBody from "koa-body" import koaBody from "koa-body"
@ -53,28 +52,7 @@ app.proxy = true
app.use(handleScimBody) app.use(handleScimBody)
app.use(koaBody({ multipart: true })) app.use(koaBody({ multipart: true }))
const sessionMiddleware: Middleware = async (ctx: any, next: any) => { app.use(koaSession(app))
const redisClient = await new redis.Client(
redis.utils.Databases.SESSIONS
).init()
return koaSession(
{
// @ts-ignore
store: new RedisStore({ client: redisClient.getClient() }),
key: "koa:sess",
maxAge: 86400000, // one day
httpOnly: true,
secure: process.env.NODE_ENV === "production",
sameSite: "strict",
rolling: true,
renew: true,
},
app
)(ctx, next)
}
app.use(sessionMiddleware)
app.use(middleware.correlation) app.use(middleware.correlation)
app.use(middleware.pino) app.use(middleware.pino)
app.use(middleware.ip) app.use(middleware.ip)

View File

@ -1 +0,0 @@
declare module "koa-redis" {}

View File

@ -1,10 +1,18 @@
#!/bin/bash #!/bin/bash
if [[ $TARGETARCH == arm* ]] ;
then if [[ $TARGETBUILD == "aas" ]]; then
echo "A aas-compatible version of Minio is already installed."
exit 0
fi
if [[ $TARGETARCH == arm* ]]; then
echo "INSTALLING ARM64 MINIO" echo "INSTALLING ARM64 MINIO"
rm -f minio
wget https://dl.min.io/server/minio/release/linux-arm64/minio wget https://dl.min.io/server/minio/release/linux-arm64/minio
else else
echo "INSTALLING AMD64 MINIO" echo "INSTALLING AMD64 MINIO"
rm -f minio
wget https://dl.min.io/server/minio/release/linux-amd64/minio wget https://dl.min.io/server/minio/release/linux-amd64/minio
fi fi
chmod +x minio chmod +x minio

3
scripts/resources/minio Normal file
View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:63db3aa3c2299ebaf13b46c64523a589bd5bf272f9e971d17f1eaa55f6f1fd79
size 118595584

View File

@ -2695,13 +2695,6 @@
dependencies: dependencies:
regenerator-runtime "^0.14.0" regenerator-runtime "^0.14.0"
"@babel/runtime@^7.8.3":
version "7.26.9"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.9.tgz#aa4c6facc65b9cb3f87d75125ffd47781b475433"
integrity sha512-aA63XwOkcl4xxQa3HjPMqOP6LiK0ZDv3mUPYEFXkpHbaFjtGggE1A61FjFzJnB+p7/oy2gA8E+rcBNl/zC1tMg==
dependencies:
regenerator-runtime "^0.14.0"
"@babel/template@^7.22.15", "@babel/template@^7.22.5", "@babel/template@^7.25.9", "@babel/template@^7.3.3": "@babel/template@^7.22.15", "@babel/template@^7.22.5", "@babel/template@^7.25.9", "@babel/template@^7.3.3":
version "7.25.9" version "7.25.9"
resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.25.9.tgz#ecb62d81a8a6f5dc5fe8abfc3901fc52ddf15016" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.25.9.tgz#ecb62d81a8a6f5dc5fe8abfc3901fc52ddf15016"
@ -9048,14 +9041,7 @@ co-body@^5.1.1:
raw-body "^2.2.0" raw-body "^2.2.0"
type-is "^1.6.14" type-is "^1.6.14"
co-wrap-all@^1.0.0: co@^4.6.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/co-wrap-all/-/co-wrap-all-1.0.0.tgz#370ae3e8333510a53f6b2f7fdfbe4568a11b7ecf"
integrity sha512-aru6gLi2vTUazr+MxVm3Rv6ST7/EKtFj9BrfkcOrbCO2Qv6LqJdE71m88HhHiBEviKw/ucVrwoGLrq2xHpOsJA==
dependencies:
co "^4.0.0"
co@^4.0.0, co@^4.6.0:
version "4.6.0" version "4.6.0"
resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184"
integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==
@ -13191,7 +13177,7 @@ ioredis@5.3.2:
redis-parser "^3.0.0" redis-parser "^3.0.0"
standard-as-callback "^2.1.0" standard-as-callback "^2.1.0"
ioredis@^4.14.1, ioredis@^4.28.5: ioredis@^4.28.5:
version "4.28.5" version "4.28.5"
resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.5.tgz#5c149e6a8d76a7f8fa8a504ffc85b7d5b6797f9f" resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.5.tgz#5c149e6a8d76a7f8fa8a504ffc85b7d5b6797f9f"
integrity sha512-3GYo0GJtLqgNXj4YhrisLaNNvWSNwSS2wS4OELGfGxH8I69+XfNdnmV1AyN+ZqMh0i7eX+SWjrwFKDBDgfBC1A== integrity sha512-3GYo0GJtLqgNXj4YhrisLaNNvWSNwSS2wS4OELGfGxH8I69+XfNdnmV1AyN+ZqMh0i7eX+SWjrwFKDBDgfBC1A==
@ -14691,16 +14677,6 @@ koa-pino-logger@4.0.0:
dependencies: dependencies:
pino-http "^6.5.0" pino-http "^6.5.0"
koa-redis@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/koa-redis/-/koa-redis-4.0.1.tgz#57ac1b46d9ab851221a9f4952c1e8d4bf289db40"
integrity sha512-o2eTVNo1NBnloeUGhHed5Q2ZvJSLpUEj/+E1/7oH5EmH8WuQ+QLdl/VawkshxdFQ47W1p6V09lM3hCTu7D0YnQ==
dependencies:
"@babel/runtime" "^7.8.3"
co-wrap-all "^1.0.0"
debug "^4.1.1"
ioredis "^4.14.1"
koa-router@^10.0.0: koa-router@^10.0.0:
version "10.1.1" version "10.1.1"
resolved "https://registry.yarnpkg.com/koa-router/-/koa-router-10.1.1.tgz#20809f82648518b84726cd445037813cd99f17ff" resolved "https://registry.yarnpkg.com/koa-router/-/koa-router-10.1.1.tgz#20809f82648518b84726cd445037813cd99f17ff"