Merge branch 'master' into chore/fix-npm-vulnerabilities
This commit is contained in:
commit
8da83dac58
|
@ -214,6 +214,7 @@ jobs:
|
|||
echo "pro_commit=$pro_commit"
|
||||
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
|
||||
echo "base_commit=$base_commit"
|
||||
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
|
||||
|
||||
base_commit_excluding_merges=$(git log --no-merges -n 1 --format=format:%H $base_commit)
|
||||
echo "base_commit_excluding_merges=$base_commit_excluding_merges"
|
||||
|
@ -230,7 +231,7 @@ jobs:
|
|||
base_commit_excluding_merges='${{ steps.get_pro_commits.outputs.base_commit_excluding_merges }}'
|
||||
pro_commit='${{ steps.get_pro_commits.outputs.pro_commit }}'
|
||||
|
||||
any_commit=$(git log --no-merges $base_commit...$pro_commit)
|
||||
any_commit=$(git log --no-merges $base_commit_excluding_merges...$pro_commit)
|
||||
|
||||
if [ -n "$any_commit" ]; then
|
||||
echo $any_commit
|
||||
|
|
|
@ -96,10 +96,13 @@ EXPOSE 5984 4369 9100
|
|||
CMD ["/opt/couchdb/bin/couchdb"]
|
||||
|
||||
FROM base as runner
|
||||
ARG TARGETARCH
|
||||
ENV TARGETARCH $TARGETARCH
|
||||
|
||||
ENV COUCHDB_USER admin
|
||||
ENV COUCHDB_PASSWORD admin
|
||||
EXPOSE 5984
|
||||
EXPOSE 4984
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
|
||||
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
|
||||
|
@ -125,7 +128,12 @@ ADD clouseau/log4j.properties clouseau/clouseau.ini ./
|
|||
WORKDIR /opt/couchdb
|
||||
ADD couch/vm.args couch/local.ini ./etc/
|
||||
|
||||
# setup SQS
|
||||
WORKDIR /opt/sqs
|
||||
ADD sqs ./
|
||||
RUN chmod +x ./install.sh && ./install.sh
|
||||
|
||||
WORKDIR /
|
||||
ADD runner.sh ./bbcouch-runner.sh
|
||||
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau
|
||||
CMD ["./bbcouch-runner.sh"]
|
||||
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau /opt/sqs/sqs
|
||||
CMD ["./bbcouch-runner.sh"]
|
||||
|
|
|
@ -1,139 +0,0 @@
|
|||
# Modified from https://github.com/apache/couchdb-docker/blob/main/3.3.3/Dockerfile
|
||||
#
|
||||
# Everything in this `base` image is adapted from the official `couchdb` image's
|
||||
# Dockerfile. Only modifications related to upgrading from Debian bullseye to
|
||||
# bookworm have been included. The `runner` image contains Budibase's
|
||||
# customisations to the image, e.g. adding Clouseau.
|
||||
FROM node:20-slim AS base
|
||||
|
||||
# Add CouchDB user account to make sure the IDs are assigned consistently
|
||||
RUN groupadd -g 5984 -r couchdb && useradd -u 5984 -d /opt/couchdb -g couchdb couchdb
|
||||
|
||||
# be sure GPG and apt-transport-https are available and functional
|
||||
RUN set -ex; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
dirmngr \
|
||||
gnupg \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# grab tini for signal handling and zombie reaping
|
||||
# see https://github.com/apache/couchdb-docker/pull/28#discussion_r141112407
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends tini; \
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
tini --version
|
||||
|
||||
# http://docs.couchdb.org/en/latest/install/unix.html#installing-the-apache-couchdb-packages
|
||||
ENV GPG_COUCH_KEY \
|
||||
# gpg: rsa8192 205-01-19 The Apache Software Foundation (Package repository signing key) <root@apache.org>
|
||||
390EF70BB1EA12B2773962950EE62FB37A00258D
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y curl; \
|
||||
export GNUPGHOME="$(mktemp -d)"; \
|
||||
curl -fL -o keys.asc https://couchdb.apache.org/repo/keys.asc; \
|
||||
gpg --batch --import keys.asc; \
|
||||
gpg --batch --export "${GPG_COUCH_KEY}" > /usr/share/keyrings/couchdb-archive-keyring.gpg; \
|
||||
command -v gpgconf && gpgconf --kill all || :; \
|
||||
rm -rf "$GNUPGHOME"; \
|
||||
apt-key list; \
|
||||
apt purge -y --autoremove curl; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV COUCHDB_VERSION 3.3.3
|
||||
|
||||
RUN . /etc/os-release; \
|
||||
echo "deb [signed-by=/usr/share/keyrings/couchdb-archive-keyring.gpg] https://apache.jfrog.io/artifactory/couchdb-deb/ ${VERSION_CODENAME} main" | \
|
||||
tee /etc/apt/sources.list.d/couchdb.list >/dev/null
|
||||
|
||||
# https://github.com/apache/couchdb-pkg/blob/master/debian/README.Debian
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
\
|
||||
echo "couchdb couchdb/mode select none" | debconf-set-selections; \
|
||||
# we DO want recommends this time
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \
|
||||
couchdb="$COUCHDB_VERSION"~bookworm \
|
||||
; \
|
||||
# Undo symlinks to /var/log and /var/lib
|
||||
rmdir /var/lib/couchdb /var/log/couchdb; \
|
||||
rm /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
mkdir -p /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
chown couchdb:couchdb /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
chmod 777 /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
# Remove file that sets logging to a file
|
||||
rm /opt/couchdb/etc/default.d/10-filelog.ini; \
|
||||
# Check we own everything in /opt/couchdb. Matches the command in dockerfile_entrypoint.sh
|
||||
find /opt/couchdb \! \( -user couchdb -group couchdb \) -exec chown -f couchdb:couchdb '{}' +; \
|
||||
# Setup directories and permissions for config. Technically these could be 555 and 444 respectively
|
||||
# but we keep them as 755 and 644 for consistency with CouchDB defaults and the dockerfile_entrypoint.sh.
|
||||
find /opt/couchdb/etc -type d ! -perm 0755 -exec chmod -f 0755 '{}' +; \
|
||||
find /opt/couchdb/etc -type f ! -perm 0644 -exec chmod -f 0644 '{}' +; \
|
||||
# only local.d needs to be writable for the docker_entrypoint.sh
|
||||
chmod -f 0777 /opt/couchdb/etc/local.d; \
|
||||
# apt clean-up
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
|
||||
# Add configuration
|
||||
COPY --chown=couchdb:couchdb couch/10-docker-default.ini /opt/couchdb/etc/default.d/
|
||||
# COPY --chown=couchdb:couchdb vm.args /opt/couchdb/etc/
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
RUN ln -s usr/local/bin/docker-entrypoint.sh /docker-entrypoint.sh # backwards compat
|
||||
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
|
||||
|
||||
VOLUME /opt/couchdb/data
|
||||
|
||||
# 5984: Main CouchDB endpoint
|
||||
# 4369: Erlang portmap daemon (epmd)
|
||||
# 9100: CouchDB cluster communication port
|
||||
EXPOSE 5984 4369 9100
|
||||
CMD ["/opt/couchdb/bin/couchdb"]
|
||||
|
||||
FROM base as runner
|
||||
ARG TARGETARCH
|
||||
ENV TARGETARCH $TARGETARCH
|
||||
|
||||
ENV COUCHDB_USER admin
|
||||
ENV COUCHDB_PASSWORD admin
|
||||
EXPOSE 5984
|
||||
EXPOSE 4984
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
|
||||
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
|
||||
apt-add-repository 'deb http://security.debian.org/debian-security bookworm-security/updates main' && \
|
||||
apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \
|
||||
apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bookworm main' && \
|
||||
apt-get update && apt-get install -y --no-install-recommends temurin-8-jdk && \
|
||||
rm -rf /var/lib/apt/lists/
|
||||
|
||||
# setup clouseau
|
||||
WORKDIR /
|
||||
RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \
|
||||
unzip clouseau-2.21.0-dist.zip && \
|
||||
mv clouseau-2.21.0 /opt/clouseau && \
|
||||
rm clouseau-2.21.0-dist.zip
|
||||
|
||||
WORKDIR /opt/clouseau
|
||||
RUN mkdir ./bin
|
||||
ADD clouseau/clouseau ./bin/
|
||||
ADD clouseau/log4j.properties clouseau/clouseau.ini ./
|
||||
|
||||
# setup CouchDB
|
||||
WORKDIR /opt/couchdb
|
||||
ADD couch/vm.args couch/local.ini ./etc/
|
||||
|
||||
# setup SQS
|
||||
WORKDIR /opt/sqs
|
||||
ADD sqs ./
|
||||
RUN chmod +x ./install.sh && ./install.sh
|
||||
|
||||
WORKDIR /
|
||||
ADD runner.v2.sh ./bbcouch-runner.sh
|
||||
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau /opt/sqs/sqs
|
||||
CMD ["./bbcouch-runner.sh"]
|
|
@ -70,9 +70,12 @@ sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouse
|
|||
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
|
||||
|
||||
# Start CouchDB.
|
||||
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
|
||||
/docker-entrypoint.sh /opt/couchdb/bin/couchdb > /dev/stdout 2>&1 &
|
||||
|
||||
# Wati for CouchDB to start up.
|
||||
# Start SQS. Use 127.0.0.1 instead of localhost to avoid IPv6 issues.
|
||||
/opt/sqs/sqs --server "http://127.0.0.1:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 > /dev/stdout 2>&1 &
|
||||
|
||||
# Wait for CouchDB to start up.
|
||||
while [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/_up -o /dev/null) -ne 200 ]]; do
|
||||
echo 'Waiting for CouchDB to start...';
|
||||
sleep 5;
|
||||
|
@ -82,4 +85,4 @@ done
|
|||
# function correctly, so we create them here.
|
||||
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_users
|
||||
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_replicator
|
||||
sleep infinity
|
||||
sleep infinity
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
DATA_DIR=${DATA_DIR:-/data}
|
||||
COUCHDB_ERLANG_COOKIE=${COUCHDB_ERLANG_COOKIE:-B9CFC32C-3458-4A86-8448-B3C753991CA7}
|
||||
|
||||
mkdir -p ${DATA_DIR}
|
||||
mkdir -p ${DATA_DIR}/couch/{dbs,views}
|
||||
mkdir -p ${DATA_DIR}/search
|
||||
chown -R couchdb:couchdb ${DATA_DIR}/couch
|
||||
|
||||
echo ${TARGETBUILD} > /buildtarget.txt
|
||||
if [[ "${TARGETBUILD}" = "aas" ]]; then
|
||||
# Azure AppService uses /home for persistent data & SSH on port 2222
|
||||
DATA_DIR="${DATA_DIR:-/home}"
|
||||
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
|
||||
mkdir -p $DATA_DIR/{search,minio,couch}
|
||||
mkdir -p $DATA_DIR/couch/{dbs,views}
|
||||
chown -R couchdb:couchdb $DATA_DIR/couch/
|
||||
apt update
|
||||
apt-get install -y openssh-server
|
||||
echo "root:Docker!" | chpasswd
|
||||
mkdir -p /tmp
|
||||
chmod +x /tmp/ssh_setup.sh \
|
||||
&& (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
|
||||
cp /etc/sshd_config /etc/ssh/sshd_config
|
||||
/etc/init.d/ssh restart
|
||||
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
|
||||
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
|
||||
elif [[ "${TARGETBUILD}" = "single" ]]; then
|
||||
# In the single image build, the Dockerfile specifies /data as a volume
|
||||
# mount, so we use that for all persistent data.
|
||||
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
|
||||
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
|
||||
elif [[ "${TARGETBUILD}" = "docker-compose" ]]; then
|
||||
# We remove the database_dir and view_index_dir settings from the local.ini
|
||||
# in docker-compose because it will default to /opt/couchdb/data which is what
|
||||
# our docker-compose was using prior to us switching to using our own CouchDB
|
||||
# image.
|
||||
sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini
|
||||
sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini
|
||||
sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini
|
||||
elif [[ -n $KUBERNETES_SERVICE_HOST ]]; then
|
||||
# In Kubernetes the directory /opt/couchdb/data has a persistent volume
|
||||
# mount for storing database data.
|
||||
sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini
|
||||
|
||||
# We remove the database_dir and view_index_dir settings from the local.ini
|
||||
# in Kubernetes because it will default to /opt/couchdb/data which is what
|
||||
# our Helm chart was using prior to us switching to using our own CouchDB
|
||||
# image.
|
||||
sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini
|
||||
sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini
|
||||
|
||||
# We remove the -name setting from the vm.args file in Kubernetes because
|
||||
# it will default to the pod FQDN, which is what's required for clustering
|
||||
# to work.
|
||||
sed -i "s/^-name .*$//g" /opt/couchdb/etc/vm.args
|
||||
else
|
||||
# For all other builds, we use /data for persistent data.
|
||||
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
|
||||
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
|
||||
fi
|
||||
|
||||
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/couchdb/etc/vm.args
|
||||
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouseau.ini
|
||||
|
||||
# Start Clouseau. Budibase won't function correctly without Clouseau running, it
|
||||
# powers the search API endpoints which are used to do all sorts, including
|
||||
# populating app grids.
|
||||
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
|
||||
|
||||
# Start CouchDB.
|
||||
/docker-entrypoint.sh /opt/couchdb/bin/couchdb > /dev/stdout 2>&1 &
|
||||
|
||||
# Start SQS. Use 127.0.0.1 instead of localhost to avoid IPv6 issues.
|
||||
/opt/sqs/sqs --server "http://127.0.0.1:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 > /dev/stdout 2>&1 &
|
||||
|
||||
# Wait for CouchDB to start up.
|
||||
while [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/_up -o /dev/null) -ne 200 ]]; do
|
||||
echo 'Waiting for CouchDB to start...';
|
||||
sleep 5;
|
||||
done
|
||||
|
||||
# CouchDB needs the `_users` and `_replicator` databases to exist before it will
|
||||
# function correctly, so we create them here.
|
||||
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_users
|
||||
curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_replicator
|
||||
sleep infinity
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "2.29.5",
|
||||
"version": "2.29.12",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
@ -22,4 +22,4 @@
|
|||
"loadEnvFiles": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -33,10 +33,10 @@
|
|||
"scripts": {
|
||||
"get-past-client-version": "node scripts/getPastClientVersion.js",
|
||||
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
|
||||
"build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
|
||||
"build:apps": "yarn build --scope @budibase/server --scope @budibase/worker",
|
||||
"build": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
|
||||
"build:apps": "DISABLE_V8_COMPILE_CACHE=1 yarn build --scope @budibase/server --scope @budibase/worker",
|
||||
"build:oss": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
|
||||
"build:cli": "yarn build --scope @budibase/cli",
|
||||
"build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
|
||||
"build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal-server --scope @budibase/account-portal-ui",
|
||||
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
|
||||
"check:types": "lerna run --concurrency 2 check:types --ignore @budibase/account-portal-server",
|
||||
|
@ -77,7 +77,6 @@
|
|||
"build:docker:single:sqs": "./scripts/build-single-image-sqs.sh",
|
||||
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
|
||||
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.3.3 --push ./hosting/couchdb",
|
||||
"publish:docker:couch-sqs": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile.v2 -t budibase/couchdb:v3.3.3-sqs --push ./hosting/couchdb",
|
||||
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
|
||||
"release:helm": "node scripts/releaseHelmChart",
|
||||
"env:multi:enable": "lerna run --stream env:multi:enable",
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import env from "../../environment"
|
||||
|
||||
export const getCouchInfo = (connection?: string) => {
|
||||
// clean out any auth credentials
|
||||
const urlInfo = getUrlInfo(connection)
|
||||
let username
|
||||
let password
|
||||
|
@ -23,9 +24,16 @@ export const getCouchInfo = (connection?: string) => {
|
|||
throw new Error("CouchDB password not set")
|
||||
}
|
||||
const authCookie = Buffer.from(`${username}:${password}`).toString("base64")
|
||||
let sqlUrl = env.COUCH_DB_SQL_URL
|
||||
if (!sqlUrl && urlInfo.url) {
|
||||
const parsed = new URL(urlInfo.url)
|
||||
// attempt to connect on default port
|
||||
sqlUrl = urlInfo.url.replace(parsed.port, "4984")
|
||||
}
|
||||
return {
|
||||
url: urlInfo.url!,
|
||||
sqlUrl: env.COUCH_DB_SQL_URL,
|
||||
// clean out any auth credentials
|
||||
sqlUrl: getUrlInfo(sqlUrl).url,
|
||||
auth: {
|
||||
username: username,
|
||||
password: password,
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
import env from "../../environment"
|
||||
import { getCouchInfo } from "../couch"
|
||||
|
||||
const MAIN_COUCH_URL = "http://user:test@localhost:5984"
|
||||
|
||||
describe("connections", () => {
|
||||
beforeAll(() => {
|
||||
env._set("COUCH_DB_SQL_URL", "https://user:test@localhost:4984")
|
||||
})
|
||||
|
||||
it("should strip URL credentials", () => {
|
||||
const response = getCouchInfo(MAIN_COUCH_URL)
|
||||
expect(response.url).toBe("http://localhost:5984")
|
||||
expect(response.sqlUrl).toBe("https://localhost:4984")
|
||||
})
|
||||
|
||||
it("should return separate auth credentials", () => {
|
||||
const response = getCouchInfo(MAIN_COUCH_URL)
|
||||
expect(response.auth.username).toBe("user")
|
||||
expect(response.auth.password).toBe("test")
|
||||
})
|
||||
})
|
|
@ -200,6 +200,9 @@ const environment = {
|
|||
},
|
||||
ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M",
|
||||
DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS,
|
||||
BB_ADMIN_USER_EMAIL: process.env.BB_ADMIN_USER_EMAIL,
|
||||
BB_ADMIN_USER_PASSWORD: process.env.BB_ADMIN_USER_PASSWORD,
|
||||
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
||||
}
|
||||
|
||||
// clean up any environment variable edge cases
|
||||
|
|
|
@ -221,7 +221,7 @@ export class UserDB {
|
|||
const tenantId = getTenantId()
|
||||
const db = getGlobalDB()
|
||||
|
||||
let { email, _id, userGroups = [], roles } = user
|
||||
const { email, _id, userGroups = [], roles } = user
|
||||
|
||||
if (!email && !_id) {
|
||||
throw new Error("_id or email is required")
|
||||
|
@ -231,11 +231,10 @@ export class UserDB {
|
|||
if (_id) {
|
||||
// try to get existing user from db
|
||||
try {
|
||||
dbUser = (await db.get(_id)) as User
|
||||
if (email && dbUser.email !== email) {
|
||||
throw "Email address cannot be changed"
|
||||
dbUser = await usersCore.getById(_id)
|
||||
if (email && dbUser.email !== email && !opts.allowChangingEmail) {
|
||||
throw new Error("Email address cannot be changed")
|
||||
}
|
||||
email = dbUser.email
|
||||
} catch (e: any) {
|
||||
if (e.status === 404) {
|
||||
// do nothing, save this new user with the id specified - required for SSO auth
|
||||
|
@ -271,13 +270,13 @@ export class UserDB {
|
|||
|
||||
// make sure we set the _id field for a new user
|
||||
// Also if this is a new user, associate groups with them
|
||||
let groupPromises = []
|
||||
const groupPromises = []
|
||||
if (!_id) {
|
||||
_id = builtUser._id!
|
||||
|
||||
if (userGroups.length > 0) {
|
||||
for (let groupId of userGroups) {
|
||||
groupPromises.push(UserDB.groups.addUsers(groupId, [_id!]))
|
||||
groupPromises.push(
|
||||
UserDB.groups.addUsers(groupId, [builtUser._id!])
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -288,6 +287,11 @@ export class UserDB {
|
|||
builtUser._rev = response.rev
|
||||
|
||||
await eventHelpers.handleSaveEvents(builtUser, dbUser)
|
||||
if (dbUser && builtUser.email !== dbUser.email) {
|
||||
// Remove the plaform email reference if the email changed
|
||||
await platform.users.removeUser({ email: dbUser.email } as User)
|
||||
}
|
||||
|
||||
await platform.users.addUser(
|
||||
tenantId,
|
||||
builtUser._id!,
|
||||
|
|
|
@ -0,0 +1,188 @@
|
|||
import { User, UserStatus } from "@budibase/types"
|
||||
import { DBTestConfiguration, generator, structures } from "../../../tests"
|
||||
import { UserDB } from "../db"
|
||||
import { searchExistingEmails } from "../lookup"
|
||||
|
||||
const db = UserDB
|
||||
|
||||
const config = new DBTestConfiguration()
|
||||
|
||||
const quotas = {
|
||||
addUsers: jest
|
||||
.fn()
|
||||
.mockImplementation(
|
||||
(_change: number, _creatorsChange: number, cb?: () => Promise<any>) =>
|
||||
cb && cb()
|
||||
),
|
||||
removeUsers: jest
|
||||
.fn()
|
||||
.mockImplementation(
|
||||
(_change: number, _creatorsChange: number, cb?: () => Promise<any>) =>
|
||||
cb && cb()
|
||||
),
|
||||
}
|
||||
const groups = {
|
||||
addUsers: jest.fn(),
|
||||
getBulk: jest.fn(),
|
||||
getGroupBuilderAppIds: jest.fn(),
|
||||
}
|
||||
const features = { isSSOEnforced: jest.fn(), isAppBuildersEnabled: jest.fn() }
|
||||
|
||||
describe("UserDB", () => {
|
||||
beforeAll(() => {
|
||||
db.init(quotas, groups, features)
|
||||
})
|
||||
|
||||
describe("save", () => {
|
||||
describe("create", () => {
|
||||
it("creating a new user will persist it", async () => {
|
||||
const email = generator.email({})
|
||||
const user: User = structures.users.user({
|
||||
email,
|
||||
tenantId: config.getTenantId(),
|
||||
})
|
||||
|
||||
await config.doInTenant(async () => {
|
||||
const saveUserResponse = await db.save(user)
|
||||
|
||||
const persistedUser = await db.getUserByEmail(email)
|
||||
expect(persistedUser).toEqual({
|
||||
...user,
|
||||
_id: saveUserResponse._id,
|
||||
_rev: expect.stringMatching(/^1-\w+/),
|
||||
password: expect.not.stringMatching(user.password!),
|
||||
status: UserStatus.ACTIVE,
|
||||
createdAt: Date.now(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("the same email cannot be used twice in the same tenant", async () => {
|
||||
const email = generator.email({})
|
||||
const user: User = structures.users.user({
|
||||
email,
|
||||
tenantId: config.getTenantId(),
|
||||
})
|
||||
|
||||
await config.doInTenant(() => db.save(user))
|
||||
|
||||
await config.doInTenant(() =>
|
||||
expect(db.save(user)).rejects.toThrow(
|
||||
`Email already in use: '${email}'`
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
it("the same email cannot be used twice in different tenants", async () => {
|
||||
const email = generator.email({})
|
||||
const user: User = structures.users.user({
|
||||
email,
|
||||
tenantId: config.getTenantId(),
|
||||
})
|
||||
|
||||
await config.doInTenant(() => db.save(user))
|
||||
|
||||
config.newTenant()
|
||||
await config.doInTenant(() =>
|
||||
expect(db.save(user)).rejects.toThrow(
|
||||
`Email already in use: '${email}'`
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
let user: User
|
||||
|
||||
beforeEach(async () => {
|
||||
user = await config.doInTenant(() =>
|
||||
db.save(
|
||||
structures.users.user({
|
||||
email: generator.email({}),
|
||||
tenantId: config.getTenantId(),
|
||||
})
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
it("can update user properties", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const updatedName = generator.first()
|
||||
user.firstName = updatedName
|
||||
|
||||
await db.save(user)
|
||||
|
||||
const persistedUser = await db.getUserByEmail(user.email)
|
||||
expect(persistedUser).toEqual(
|
||||
expect.objectContaining({
|
||||
_id: user._id,
|
||||
email: user.email,
|
||||
firstName: updatedName,
|
||||
lastName: user.lastName,
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("email cannot be updated by default", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
await expect(
|
||||
db.save({ ...user, email: generator.email({}) })
|
||||
).rejects.toThrow("Email address cannot be changed")
|
||||
})
|
||||
})
|
||||
|
||||
it("email can be updated if specified", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const newEmail = generator.email({})
|
||||
|
||||
await db.save(
|
||||
{ ...user, email: newEmail },
|
||||
{ allowChangingEmail: true }
|
||||
)
|
||||
|
||||
const persistedUser = await db.getUserByEmail(newEmail)
|
||||
expect(persistedUser).toEqual(
|
||||
expect.objectContaining({
|
||||
_id: user._id,
|
||||
email: newEmail,
|
||||
lastName: user.lastName,
|
||||
_rev: expect.stringMatching(/^2-\w+/),
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("updating emails frees previous emails", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const previousEmail = user.email
|
||||
const newEmail = generator.email({})
|
||||
expect(await searchExistingEmails([previousEmail, newEmail])).toEqual(
|
||||
[previousEmail]
|
||||
)
|
||||
|
||||
await db.save(
|
||||
{ ...user, email: newEmail },
|
||||
{ allowChangingEmail: true }
|
||||
)
|
||||
|
||||
expect(await searchExistingEmails([previousEmail, newEmail])).toEqual(
|
||||
[newEmail]
|
||||
)
|
||||
|
||||
await db.save(
|
||||
structures.users.user({
|
||||
email: previousEmail,
|
||||
tenantId: config.getTenantId(),
|
||||
})
|
||||
)
|
||||
|
||||
expect(await searchExistingEmails([previousEmail, newEmail])).toEqual(
|
||||
[previousEmail, newEmail]
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,33 +1,25 @@
|
|||
<script>
|
||||
import Tooltip from "./Tooltip.svelte"
|
||||
import Icon from "../Icon/Icon.svelte"
|
||||
import AbsTooltip from "./AbsTooltip.svelte"
|
||||
|
||||
export let tooltip = ""
|
||||
export let size = "M"
|
||||
export let disabled = true
|
||||
|
||||
let showTooltip = false
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div class:container={!!tooltip}>
|
||||
<slot />
|
||||
{#if tooltip}
|
||||
<div class="icon-container">
|
||||
<div
|
||||
class="icon"
|
||||
class:icon-small={size === "M" || size === "S"}
|
||||
on:mouseover={() => (showTooltip = true)}
|
||||
on:mouseleave={() => (showTooltip = false)}
|
||||
on:focus
|
||||
>
|
||||
<Icon name="InfoOutline" size="S" {disabled} />
|
||||
</div>
|
||||
{#if showTooltip}
|
||||
<div class="tooltip">
|
||||
<Tooltip textWrapping={true} direction={"bottom"} text={tooltip} />
|
||||
<AbsTooltip text={tooltip}>
|
||||
<div
|
||||
class="icon"
|
||||
class:icon-small={size === "M" || size === "S"}
|
||||
on:focus
|
||||
>
|
||||
<Icon name="InfoOutline" size="S" {disabled} hoverable />
|
||||
</div>
|
||||
{/if}
|
||||
</AbsTooltip>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
@ -44,14 +36,6 @@
|
|||
margin-left: 5px;
|
||||
margin-right: 5px;
|
||||
}
|
||||
.tooltip {
|
||||
position: absolute;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
top: 15px;
|
||||
z-index: 200;
|
||||
width: 160px;
|
||||
}
|
||||
.icon {
|
||||
transform: scale(0.75);
|
||||
}
|
||||
|
|
|
@ -112,7 +112,7 @@
|
|||
This action cannot be undone.
|
||||
</ConfirmDialog>
|
||||
|
||||
<Modal bind:this={testDataModal} width="30%">
|
||||
<Modal bind:this={testDataModal} width="30%" zIndex={5}>
|
||||
<TestDataModal />
|
||||
</Modal>
|
||||
|
||||
|
@ -148,7 +148,6 @@
|
|||
.header.scrolling {
|
||||
background: var(--background);
|
||||
border-bottom: var(--border-light);
|
||||
border-left: var(--border-light);
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,11 +8,63 @@
|
|||
import { automationStore, selectedAutomation } from "stores/builder"
|
||||
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { memo } from "@budibase/frontend-core"
|
||||
import { AutomationEventType } from "@budibase/types"
|
||||
|
||||
let failedParse = null
|
||||
let trigger = {}
|
||||
let schemaProperties = {}
|
||||
|
||||
const rowTriggers = [
|
||||
AutomationEventType.ROW_DELETE,
|
||||
AutomationEventType.ROW_UPDATE,
|
||||
AutomationEventType.ROW_SAVE,
|
||||
]
|
||||
|
||||
/**
|
||||
* Parses the automation test data and ensures it is valid
|
||||
* @param {object} testData contains all config for the test
|
||||
* @returns {object} valid testData
|
||||
* @todo Parse *all* data for each trigger type and relay adequate feedback
|
||||
*/
|
||||
const parseTestData = testData => {
|
||||
const autoTrigger = $selectedAutomation?.definition?.trigger
|
||||
const { tableId } = autoTrigger?.inputs || {}
|
||||
|
||||
// Ensure the tableId matches the trigger table for row trigger automations
|
||||
if (
|
||||
rowTriggers.includes(autoTrigger?.event) &&
|
||||
testData?.row?.tableId !== tableId
|
||||
) {
|
||||
return {
|
||||
// Reset Core fields
|
||||
row: { tableId },
|
||||
meta: {},
|
||||
id: "",
|
||||
revision: "",
|
||||
}
|
||||
} else {
|
||||
// Leave the core data as it is
|
||||
return testData
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Before executing a test run, relay if an automation is in a valid state
|
||||
* @param {object} trigger The automation trigger config
|
||||
* @returns {boolean} validation status
|
||||
* @todo Parse *all* trigger types relay adequate feedback
|
||||
*/
|
||||
const isTriggerValid = trigger => {
|
||||
if (rowTriggers.includes(trigger?.event) && !trigger?.inputs?.tableId) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
const memoTestData = memo(parseTestData($selectedAutomation.testData))
|
||||
$: memoTestData.set(parseTestData($selectedAutomation.testData))
|
||||
|
||||
$: {
|
||||
// clone the trigger so we're not mutating the reference
|
||||
trigger = cloneDeep($selectedAutomation.definition.trigger)
|
||||
|
@ -20,34 +72,45 @@
|
|||
// get the outputs so we can define the fields
|
||||
let schema = Object.entries(trigger.schema?.outputs?.properties || {})
|
||||
|
||||
if (trigger?.event === "app:trigger") {
|
||||
if (trigger?.event === AutomationEventType.APP_TRIGGER) {
|
||||
schema = [["fields", { customType: "fields" }]]
|
||||
}
|
||||
|
||||
schemaProperties = schema
|
||||
}
|
||||
|
||||
// check to see if there is existing test data in the store
|
||||
$: testData = $selectedAutomation.testData || {}
|
||||
|
||||
// Check the schema to see if required fields have been entered
|
||||
$: isError = !trigger.schema.outputs.required.every(
|
||||
required => testData[required] || required !== "row"
|
||||
)
|
||||
$: isError =
|
||||
!isTriggerValid(trigger) ||
|
||||
!trigger.schema.outputs.required.every(
|
||||
required => $memoTestData?.[required] || required !== "row"
|
||||
)
|
||||
|
||||
function parseTestJSON(e) {
|
||||
let jsonUpdate
|
||||
|
||||
try {
|
||||
const obj = JSON.parse(e.detail)
|
||||
jsonUpdate = JSON.parse(e.detail)
|
||||
failedParse = null
|
||||
automationStore.actions.addTestDataToAutomation(obj)
|
||||
} catch (e) {
|
||||
failedParse = "Invalid JSON"
|
||||
return false
|
||||
}
|
||||
|
||||
if (rowTriggers.includes(trigger?.event)) {
|
||||
const tableId = trigger?.inputs?.tableId
|
||||
|
||||
// Reset the tableId as it must match the trigger
|
||||
if (jsonUpdate?.row?.tableId !== tableId) {
|
||||
jsonUpdate.row.tableId = tableId
|
||||
}
|
||||
}
|
||||
|
||||
automationStore.actions.addTestDataToAutomation(jsonUpdate)
|
||||
}
|
||||
|
||||
const testAutomation = async () => {
|
||||
try {
|
||||
await automationStore.actions.test($selectedAutomation, testData)
|
||||
await automationStore.actions.test($selectedAutomation, $memoTestData)
|
||||
$automationStore.showTestPanel = true
|
||||
} catch (error) {
|
||||
notifications.error(error)
|
||||
|
@ -85,7 +148,7 @@
|
|||
{#if selectedValues}
|
||||
<div class="tab-content-padding">
|
||||
<AutomationBlockSetup
|
||||
{testData}
|
||||
testData={$memoTestData}
|
||||
{schemaProperties}
|
||||
isTestModal
|
||||
block={trigger}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,19 +1,28 @@
|
|||
<script>
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import RowSelectorTypes from "./RowSelectorTypes.svelte"
|
||||
import PropField from "./PropField.svelte"
|
||||
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
||||
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
|
||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||
import { DatePicker, Select } from "@budibase/bbui"
|
||||
import { FieldType } from "@budibase/types"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
export let value
|
||||
export let value = {}
|
||||
export let bindings
|
||||
export let block
|
||||
export let isTestModal
|
||||
|
||||
let schemaFields
|
||||
const { STRING, NUMBER, ARRAY } = FieldType
|
||||
|
||||
let schemaFields = []
|
||||
let editableValue
|
||||
|
||||
$: editableValue = { ...value }
|
||||
|
||||
$: {
|
||||
let fields = {}
|
||||
|
||||
for (const [key, type] of Object.entries(block?.inputs?.fields ?? {})) {
|
||||
fields = {
|
||||
...fields,
|
||||
|
@ -25,8 +34,8 @@
|
|||
},
|
||||
}
|
||||
|
||||
if (value[key] === type) {
|
||||
value[key] = INITIAL_VALUES[type.toUpperCase()]
|
||||
if (editableValue[key] === type) {
|
||||
editableValue[key] = INITIAL_VALUES[type.toUpperCase()]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -38,77 +47,58 @@
|
|||
NUMBER: null,
|
||||
DATETIME: null,
|
||||
STRING: "",
|
||||
OPTIONS: [],
|
||||
ARRAY: [],
|
||||
ARRAY: "",
|
||||
}
|
||||
|
||||
const coerce = (value, type) => {
|
||||
const re = new RegExp(/{{([^{].*?)}}/g)
|
||||
if (re.test(value)) {
|
||||
return value
|
||||
const onChange = (e, field) => {
|
||||
if (e.detail !== editableValue[field]) {
|
||||
editableValue[field] = e.detail
|
||||
dispatch("change", editableValue)
|
||||
}
|
||||
|
||||
if (type === "boolean") {
|
||||
if (typeof value === "boolean") {
|
||||
return value
|
||||
}
|
||||
return value === "true"
|
||||
}
|
||||
if (type === "number") {
|
||||
if (typeof value === "number") {
|
||||
return value
|
||||
}
|
||||
return Number(value)
|
||||
}
|
||||
if (type === "options") {
|
||||
return [value]
|
||||
}
|
||||
if (type === "array") {
|
||||
if (Array.isArray(value)) {
|
||||
return value
|
||||
}
|
||||
return value.split(",").map(x => x.trim())
|
||||
}
|
||||
|
||||
if (type === "link") {
|
||||
if (Array.isArray(value)) {
|
||||
return value
|
||||
}
|
||||
|
||||
return [value]
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
const onChange = (e, field, type) => {
|
||||
value[field] = coerce(e.detail, type)
|
||||
dispatch("change", value)
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if schemaFields.length && isTestModal}
|
||||
<div class="schema-fields">
|
||||
{#if schemaFields?.length && isTestModal}
|
||||
<div class="fields">
|
||||
{#each schemaFields as [field, schema]}
|
||||
<RowSelectorTypes
|
||||
{isTestModal}
|
||||
{field}
|
||||
{schema}
|
||||
{bindings}
|
||||
{value}
|
||||
{onChange}
|
||||
/>
|
||||
<PropField label={field}>
|
||||
{#if [STRING, NUMBER, ARRAY].includes(schema.type)}
|
||||
<svelte:component
|
||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||
panel={AutomationBindingPanel}
|
||||
value={editableValue[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
type="string"
|
||||
{bindings}
|
||||
allowJS={true}
|
||||
updateOnChange={false}
|
||||
title={schema.name}
|
||||
autocomplete="off"
|
||||
/>
|
||||
{:else if schema.type === "boolean"}
|
||||
<Select
|
||||
on:change={e => onChange(e, field)}
|
||||
value={editableValue[field]}
|
||||
options={[
|
||||
{ label: "True", value: "true" },
|
||||
{ label: "False", value: "false" },
|
||||
]}
|
||||
/>
|
||||
{:else if schema.type === "datetime"}
|
||||
<DatePicker
|
||||
value={editableValue[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
/>
|
||||
{/if}
|
||||
</PropField>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.schema-fields {
|
||||
display: grid;
|
||||
grid-gap: var(--spacing-s);
|
||||
margin-top: var(--spacing-s);
|
||||
}
|
||||
.schema-fields :global(label) {
|
||||
text-transform: capitalize;
|
||||
.fields {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--spacing-m);
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -0,0 +1,60 @@
|
|||
<script>
|
||||
import { Label } from "@budibase/bbui"
|
||||
|
||||
export let label
|
||||
export let labelTooltip
|
||||
export let fullWidth = false
|
||||
export let componentWidth = 320
|
||||
</script>
|
||||
|
||||
<div
|
||||
class="prop-field"
|
||||
class:fullWidth
|
||||
style={`--comp-width: ${componentWidth}px;`}
|
||||
>
|
||||
<div class="prop-label" title={label}>
|
||||
<Label tooltip={labelTooltip}>{label}</Label>
|
||||
</div>
|
||||
<div class="prop-control">
|
||||
<slot />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.prop-field {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr var(--comp-width);
|
||||
}
|
||||
|
||||
.prop-field.fullWidth {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
|
||||
.prop-field.fullWidth .prop-label {
|
||||
margin-bottom: var(--spacing-s);
|
||||
}
|
||||
|
||||
.prop-label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.prop-label :global(> div) {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.prop-label :global(> div > label) {
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.prop-control {
|
||||
margin-left: var(--spacing-s);
|
||||
}
|
||||
|
||||
.prop-field.fullWidth .prop-control {
|
||||
margin-left: 0px;
|
||||
}
|
||||
</style>
|
|
@ -1,28 +1,43 @@
|
|||
<script>
|
||||
import { tables } from "stores/builder"
|
||||
import { Select, Checkbox, Label } from "@budibase/bbui"
|
||||
import {
|
||||
ActionButton,
|
||||
Popover,
|
||||
Icon,
|
||||
TooltipPosition,
|
||||
TooltipType,
|
||||
} from "@budibase/bbui"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { FieldType } from "@budibase/types"
|
||||
|
||||
import RowSelectorTypes from "./RowSelectorTypes.svelte"
|
||||
import DrawerBindableSlot from "../../common/bindings/DrawerBindableSlot.svelte"
|
||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||
import { TableNames } from "constants"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import { capitalise } from "helpers"
|
||||
import { memo } from "@budibase/frontend-core"
|
||||
import PropField from "./PropField.svelte"
|
||||
import { cloneDeep, isPlainObject, mergeWith } from "lodash"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
export let value
|
||||
|
||||
export let row
|
||||
export let meta
|
||||
export let bindings
|
||||
export let isTestModal
|
||||
export let isUpdateRow
|
||||
|
||||
$: parsedBindings = bindings.map(binding => {
|
||||
let clone = Object.assign({}, binding)
|
||||
clone.icon = "ShareAndroid"
|
||||
return clone
|
||||
const typeToField = Object.values(FIELDS).reduce((acc, field) => {
|
||||
acc[field.type] = field
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
const memoStore = memo({
|
||||
row,
|
||||
meta,
|
||||
})
|
||||
|
||||
let table
|
||||
// Row Schema Fields
|
||||
let schemaFields
|
||||
let attachmentTypes = [
|
||||
FieldType.ATTACHMENTS,
|
||||
|
@ -30,32 +45,123 @@
|
|||
FieldType.SIGNATURE_SINGLE,
|
||||
]
|
||||
|
||||
$: {
|
||||
table = $tables.list.find(table => table._id === value?.tableId)
|
||||
let customPopover
|
||||
let popoverAnchor
|
||||
let editableRow = {}
|
||||
let editableFields = {}
|
||||
|
||||
// Just sorting attachment types to the bottom here for a cleaner UX
|
||||
schemaFields = Object.entries(table?.schema ?? {}).sort(
|
||||
([, schemaA], [, schemaB]) =>
|
||||
(schemaA.type === "attachment") - (schemaB.type === "attachment")
|
||||
)
|
||||
// Avoid unnecessary updates
|
||||
$: memoStore.set({
|
||||
row,
|
||||
meta,
|
||||
})
|
||||
|
||||
schemaFields.forEach(([, schema]) => {
|
||||
if (!schema.autocolumn && !value[schema.name]) {
|
||||
value[schema.name] = ""
|
||||
}
|
||||
})
|
||||
}
|
||||
const onChangeTable = e => {
|
||||
value["tableId"] = e.detail
|
||||
dispatch("change", value)
|
||||
}
|
||||
$: parsedBindings = bindings.map(binding => {
|
||||
let clone = Object.assign({}, binding)
|
||||
clone.icon = "ShareAndroid"
|
||||
return clone
|
||||
})
|
||||
|
||||
const coerce = (value, type) => {
|
||||
const re = new RegExp(/{{([^{].*?)}}/g)
|
||||
if (re.test(value)) {
|
||||
return value
|
||||
$: tableId = $memoStore?.row?.tableId
|
||||
|
||||
$: initData(tableId, $memoStore?.meta?.fields, $memoStore?.row)
|
||||
|
||||
const initData = (tableId, metaFields, row) => {
|
||||
if (!tableId) {
|
||||
return
|
||||
}
|
||||
|
||||
// Refesh the editable fields
|
||||
editableFields = cloneDeep(metaFields || {})
|
||||
|
||||
// Refresh all the row data
|
||||
editableRow = cloneDeep(row || {})
|
||||
|
||||
table = $tables.list.find(table => table._id === tableId)
|
||||
|
||||
if (table) {
|
||||
editableRow["tableId"] = tableId
|
||||
|
||||
schemaFields = Object.entries(table?.schema ?? {})
|
||||
.filter(entry => {
|
||||
const [, field] = entry
|
||||
return field.type !== "formula" && !field.autocolumn
|
||||
})
|
||||
.sort(([nameA], [nameB]) => {
|
||||
return nameA < nameB ? -1 : 1
|
||||
})
|
||||
|
||||
// Parse out any data not in the schema.
|
||||
for (const column in editableFields) {
|
||||
if (!Object.hasOwn(table?.schema, column)) {
|
||||
delete editableFields[column]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Go through the table schema and build out the editable content
|
||||
for (const entry of schemaFields) {
|
||||
const [key, fieldSchema] = entry
|
||||
|
||||
const emptyField =
|
||||
editableRow[key] == null || editableRow[key]?.length === 0
|
||||
|
||||
// Put non-empty elements into the update and add their key to the fields list.
|
||||
if (!emptyField && !Object.hasOwn(editableFields, key)) {
|
||||
editableFields = {
|
||||
...editableFields,
|
||||
[key]: {},
|
||||
}
|
||||
}
|
||||
|
||||
// Legacy - clearRelationships
|
||||
// Init the field and add it to the update.
|
||||
if (emptyField) {
|
||||
if (editableFields[key]?.clearRelationships === true) {
|
||||
const emptyField = coerce(
|
||||
!Object.hasOwn($memoStore?.row, key) ? "" : $memoStore?.row[key],
|
||||
fieldSchema.type
|
||||
)
|
||||
|
||||
// remove this and place the field in the editable row.
|
||||
delete editableFields[key]?.clearRelationships
|
||||
|
||||
// Default the field
|
||||
editableRow = {
|
||||
...editableRow,
|
||||
[key]: emptyField,
|
||||
}
|
||||
} else {
|
||||
// Purge from the update as its presence is not necessary.
|
||||
delete editableRow[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse all known row schema keys
|
||||
const schemaKeys = [
|
||||
"tableId",
|
||||
...schemaFields.map(entry => {
|
||||
const [key] = entry
|
||||
return key
|
||||
}),
|
||||
]
|
||||
|
||||
// Purge any row keys that are not present in the schema.
|
||||
for (const rowKey of Object.keys(editableRow)) {
|
||||
if (!schemaKeys.includes(rowKey)) {
|
||||
delete editableRow[rowKey]
|
||||
delete editableFields[rowKey]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Row coerce
|
||||
const coerce = (value, type) => {
|
||||
const re = new RegExp(/{{([^{].*?)}}/g)
|
||||
if (typeof value === "string" && re.test(value)) {
|
||||
return value
|
||||
}
|
||||
if (type === "number") {
|
||||
if (typeof value === "number") {
|
||||
return value
|
||||
|
@ -66,6 +172,9 @@
|
|||
return value
|
||||
}
|
||||
if (type === "array") {
|
||||
if (!value) {
|
||||
return []
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
return value
|
||||
}
|
||||
|
@ -73,7 +182,9 @@
|
|||
}
|
||||
|
||||
if (type === "link") {
|
||||
if (Array.isArray(value)) {
|
||||
if (!value) {
|
||||
return []
|
||||
} else if (Array.isArray(value)) {
|
||||
return value
|
||||
}
|
||||
return value.split(",").map(x => x.trim())
|
||||
|
@ -86,130 +197,176 @@
|
|||
return value
|
||||
}
|
||||
|
||||
const onChange = (e, field, type) => {
|
||||
let newValue = {
|
||||
...value,
|
||||
[field]: coerce(e.detail, type),
|
||||
}
|
||||
dispatch("change", newValue)
|
||||
const isFullWidth = type => {
|
||||
return (
|
||||
attachmentTypes.includes(type) ||
|
||||
type === FieldType.JSON ||
|
||||
type === FieldType.LONGFORM
|
||||
)
|
||||
}
|
||||
|
||||
const onChangeSetting = (field, key, value) => {
|
||||
let newField = {}
|
||||
newField[field] = {
|
||||
[key]: value,
|
||||
const onChange = update => {
|
||||
const customizer = (objValue, srcValue) => {
|
||||
if (isPlainObject(objValue) && isPlainObject(srcValue)) {
|
||||
const result = mergeWith({}, objValue, srcValue, customizer)
|
||||
let outcome = Object.keys(result).reduce((acc, key) => {
|
||||
if (result[key] !== null) {
|
||||
acc[key] = result[key]
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
return outcome
|
||||
}
|
||||
return srcValue
|
||||
}
|
||||
|
||||
let updatedFields = {
|
||||
...meta?.fields,
|
||||
...newField,
|
||||
}
|
||||
|
||||
dispatch("change", {
|
||||
key: "meta",
|
||||
fields: updatedFields,
|
||||
})
|
||||
const result = mergeWith(
|
||||
{},
|
||||
{
|
||||
row: editableRow,
|
||||
meta: {
|
||||
fields: editableFields,
|
||||
},
|
||||
},
|
||||
update,
|
||||
customizer
|
||||
)
|
||||
dispatch("change", result)
|
||||
}
|
||||
// Ensure any nullish tableId values get set to empty string so
|
||||
// that the select works
|
||||
$: if (value?.tableId == null) value = { tableId: "" }
|
||||
</script>
|
||||
|
||||
<div class="schema-fields">
|
||||
<Label>Table</Label>
|
||||
<div class="field-width">
|
||||
<Select
|
||||
on:change={onChangeTable}
|
||||
value={value.tableId}
|
||||
options={$tables.list.filter(table => table._id !== TableNames.USERS)}
|
||||
getOptionLabel={table => table.name}
|
||||
getOptionValue={table => table._id}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
{#if schemaFields.length}
|
||||
{#each schemaFields as [field, schema]}
|
||||
{#if !schema.autocolumn}
|
||||
<div class:schema-fields={!attachmentTypes.includes(schema.type)}>
|
||||
<Label>{field}</Label>
|
||||
<div class:field-width={!attachmentTypes.includes(schema.type)}>
|
||||
{#if isTestModal}
|
||||
{#each schemaFields || [] as [field, schema]}
|
||||
{#if !schema.autocolumn && Object.hasOwn(editableFields, field)}
|
||||
<PropField label={field} fullWidth={isFullWidth(schema.type)}>
|
||||
<div class="prop-control-wrap">
|
||||
{#if isTestModal}
|
||||
<RowSelectorTypes
|
||||
{isTestModal}
|
||||
{field}
|
||||
{schema}
|
||||
bindings={parsedBindings}
|
||||
value={editableRow}
|
||||
meta={{
|
||||
fields: editableFields,
|
||||
}}
|
||||
{onChange}
|
||||
/>
|
||||
{:else}
|
||||
<DrawerBindableSlot
|
||||
title={$memoStore?.row?.title || field}
|
||||
panel={AutomationBindingPanel}
|
||||
type={schema.type}
|
||||
{schema}
|
||||
value={editableRow[field]}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
{bindings}
|
||||
allowJS={true}
|
||||
updateOnChange={false}
|
||||
drawerLeft="260px"
|
||||
>
|
||||
<RowSelectorTypes
|
||||
{isTestModal}
|
||||
{field}
|
||||
{schema}
|
||||
bindings={parsedBindings}
|
||||
{value}
|
||||
{onChange}
|
||||
value={editableRow}
|
||||
meta={{
|
||||
fields: editableFields,
|
||||
}}
|
||||
onChange={change => onChange(change)}
|
||||
/>
|
||||
{:else}
|
||||
<DrawerBindableSlot
|
||||
title={value.title || field}
|
||||
panel={AutomationBindingPanel}
|
||||
type={schema.type}
|
||||
{schema}
|
||||
value={value[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
{bindings}
|
||||
allowJS={true}
|
||||
updateOnChange={false}
|
||||
drawerLeft="260px"
|
||||
>
|
||||
<RowSelectorTypes
|
||||
{isTestModal}
|
||||
{field}
|
||||
{schema}
|
||||
bindings={parsedBindings}
|
||||
{value}
|
||||
{onChange}
|
||||
useAttachmentBinding={meta?.fields?.[field]
|
||||
?.useAttachmentBinding}
|
||||
{onChangeSetting}
|
||||
/>
|
||||
</DrawerBindableSlot>
|
||||
{/if}
|
||||
|
||||
{#if isUpdateRow && schema.type === "link"}
|
||||
<div class="checkbox-field">
|
||||
<Checkbox
|
||||
value={meta.fields?.[field]?.clearRelationships}
|
||||
text={"Clear relationships if empty?"}
|
||||
size={"S"}
|
||||
on:change={e =>
|
||||
onChangeSetting(field, "clearRelationships", e.detail)}
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</DrawerBindableSlot>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{/each}
|
||||
</PropField>
|
||||
{/if}
|
||||
{/each}
|
||||
|
||||
{#if table && schemaFields}
|
||||
{#key editableFields}
|
||||
<div
|
||||
class="add-fields-btn"
|
||||
class:empty={Object.is(editableFields, {})}
|
||||
bind:this={popoverAnchor}
|
||||
>
|
||||
<ActionButton
|
||||
icon="Add"
|
||||
fullWidth
|
||||
on:click={() => {
|
||||
customPopover.show()
|
||||
}}
|
||||
disabled={!schemaFields}
|
||||
>Add fields
|
||||
</ActionButton>
|
||||
</div>
|
||||
{/key}
|
||||
{/if}
|
||||
|
||||
<Popover
|
||||
align="center"
|
||||
bind:this={customPopover}
|
||||
anchor={popoverAnchor}
|
||||
useAnchorWidth
|
||||
maxHeight={300}
|
||||
resizable={false}
|
||||
offset={10}
|
||||
>
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-noninteractive-element-interactions -->
|
||||
<ul class="spectrum-Menu" role="listbox">
|
||||
{#each schemaFields || [] as [field, schema]}
|
||||
{#if !schema.autocolumn}
|
||||
<li
|
||||
class="table_field spectrum-Menu-item"
|
||||
class:is-selected={Object.hasOwn(editableFields, field)}
|
||||
on:click={() => {
|
||||
if (Object.hasOwn(editableFields, field)) {
|
||||
delete editableFields[field]
|
||||
onChange({
|
||||
meta: { fields: editableFields },
|
||||
row: { [field]: null },
|
||||
})
|
||||
} else {
|
||||
editableFields[field] = {}
|
||||
onChange({ meta: { fields: editableFields } })
|
||||
}
|
||||
}}
|
||||
>
|
||||
<Icon
|
||||
name={typeToField?.[schema.type]?.icon}
|
||||
color={"var(--spectrum-global-color-gray-600)"}
|
||||
tooltip={capitalise(schema.type)}
|
||||
tooltipType={TooltipType.Info}
|
||||
tooltipPosition={TooltipPosition.Left}
|
||||
/>
|
||||
<div class="field_name spectrum-Menu-itemLabel">{field}</div>
|
||||
<svg
|
||||
class="spectrum-Icon spectrum-UIIcon-Checkmark100 spectrum-Menu-checkmark spectrum-Menu-itemIcon"
|
||||
focusable="false"
|
||||
aria-hidden="true"
|
||||
>
|
||||
<use xlink:href="#spectrum-css-icon-Checkmark100" />
|
||||
</svg>
|
||||
</li>
|
||||
{/if}
|
||||
{/each}
|
||||
</ul>
|
||||
</Popover>
|
||||
|
||||
<style>
|
||||
.field-width {
|
||||
width: 320px;
|
||||
.table_field {
|
||||
display: flex;
|
||||
padding: var(--spacing-s) var(--spacing-l);
|
||||
gap: var(--spacing-s);
|
||||
}
|
||||
|
||||
.schema-fields {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
gap: 10px;
|
||||
flex: 1;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
.schema-fields :global(label) {
|
||||
text-transform: capitalize;
|
||||
}
|
||||
.checkbox-field {
|
||||
padding-bottom: var(--spacing-s);
|
||||
padding-left: 1px;
|
||||
padding-top: var(--spacing-s);
|
||||
}
|
||||
.checkbox-field :global(label) {
|
||||
text-transform: none;
|
||||
/* Override for general json field override */
|
||||
.prop-control-wrap :global(.icon.json-slot-icon) {
|
||||
right: 1px !important;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -11,17 +11,18 @@
|
|||
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
||||
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
|
||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||
import Editor from "components/integration/QueryEditor.svelte"
|
||||
import CodeEditor from "components/common/CodeEditor/CodeEditor.svelte"
|
||||
import KeyValueBuilder from "components/integration/KeyValueBuilder.svelte"
|
||||
|
||||
export let onChange
|
||||
export let field
|
||||
export let schema
|
||||
export let value
|
||||
export let meta
|
||||
export let bindings
|
||||
export let isTestModal
|
||||
export let useAttachmentBinding
|
||||
export let onChangeSetting
|
||||
|
||||
$: fieldData = value[field]
|
||||
|
||||
$: parsedBindings = bindings.map(binding => {
|
||||
let clone = Object.assign({}, binding)
|
||||
|
@ -35,14 +36,15 @@
|
|||
FieldType.SIGNATURE_SINGLE,
|
||||
]
|
||||
|
||||
let previousBindingState = useAttachmentBinding
|
||||
|
||||
function schemaHasOptions(schema) {
|
||||
return !!schema.constraints?.inclusion?.length
|
||||
}
|
||||
|
||||
function handleAttachmentParams(keyValueObj) {
|
||||
let params = {}
|
||||
if (!keyValueObj) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (!Array.isArray(keyValueObj) && keyValueObj) {
|
||||
keyValueObj = [keyValueObj]
|
||||
|
@ -50,45 +52,68 @@
|
|||
|
||||
if (keyValueObj.length) {
|
||||
for (let param of keyValueObj) {
|
||||
params[param.url] = param.filename
|
||||
params[param.url || ""] = param.filename || ""
|
||||
}
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
async function handleToggleChange(toggleField, event) {
|
||||
if (event.detail === true) {
|
||||
value[toggleField] = []
|
||||
} else {
|
||||
value[toggleField] = ""
|
||||
}
|
||||
previousBindingState = event.detail
|
||||
onChangeSetting(toggleField, "useAttachmentBinding", event.detail)
|
||||
onChange({ detail: value[toggleField] }, toggleField)
|
||||
}
|
||||
const handleMediaUpdate = e => {
|
||||
const media = e.detail || []
|
||||
const isSingle =
|
||||
schema.type === FieldType.ATTACHMENT_SINGLE ||
|
||||
schema.type === FieldType.SIGNATURE_SINGLE
|
||||
const parsedMedia = media.map(({ name, value }) => ({
|
||||
url: name,
|
||||
filename: value,
|
||||
}))
|
||||
|
||||
$: if (useAttachmentBinding !== previousBindingState) {
|
||||
if (useAttachmentBinding) {
|
||||
value[field] = []
|
||||
} else {
|
||||
value[field] = ""
|
||||
if (isSingle) {
|
||||
const [singleMedia] = parsedMedia
|
||||
// Return only the first entry
|
||||
return singleMedia
|
||||
? {
|
||||
url: singleMedia.url,
|
||||
filename: singleMedia.filename,
|
||||
}
|
||||
: null
|
||||
}
|
||||
previousBindingState = useAttachmentBinding
|
||||
|
||||
// Return the entire array
|
||||
return parsedMedia
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if schemaHasOptions(schema) && schema.type !== "array"}
|
||||
<Select
|
||||
on:change={e => onChange(e, field)}
|
||||
value={value[field]}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
value={fieldData}
|
||||
options={schema.constraints.inclusion}
|
||||
/>
|
||||
{:else if schema.type === "datetime"}
|
||||
<DatePicker value={value[field]} on:change={e => onChange(e, field)} />
|
||||
<DatePicker
|
||||
value={fieldData}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
/>
|
||||
{:else if schema.type === "boolean"}
|
||||
<Select
|
||||
on:change={e => onChange(e, field)}
|
||||
value={value[field]}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
value={fieldData}
|
||||
options={[
|
||||
{ label: "True", value: "true" },
|
||||
{ label: "False", value: "false" },
|
||||
|
@ -96,83 +121,111 @@
|
|||
/>
|
||||
{:else if schemaHasOptions(schema) && schema.type === "array"}
|
||||
<Multiselect
|
||||
bind:value={value[field]}
|
||||
value={fieldData}
|
||||
options={schema.constraints.inclusion}
|
||||
on:change={e => onChange(e, field)}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
/>
|
||||
{:else if schema.type === "longform"}
|
||||
<TextArea bind:value={value[field]} on:change={e => onChange(e, field)} />
|
||||
<TextArea
|
||||
value={fieldData}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
/>
|
||||
{:else if schema.type === "json"}
|
||||
<span>
|
||||
<Editor
|
||||
editorHeight="150"
|
||||
mode="json"
|
||||
on:change={e => {
|
||||
if (e.detail?.value !== value[field]) {
|
||||
onChange(e, field, schema.type)
|
||||
}
|
||||
}}
|
||||
value={value[field]}
|
||||
/>
|
||||
<div class="field-wrap json-field">
|
||||
<CodeEditor
|
||||
value={fieldData}
|
||||
on:change={e => {
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</span>
|
||||
{:else if schema.type === "link"}
|
||||
<LinkedRowSelector
|
||||
linkedRows={value[field]}
|
||||
linkedRows={fieldData}
|
||||
{schema}
|
||||
on:change={e => onChange(e, field)}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
useLabel={false}
|
||||
/>
|
||||
{:else if schema.type === "bb_reference" || schema.type === "bb_reference_single"}
|
||||
<LinkedRowSelector
|
||||
linkedRows={value[field]}
|
||||
linkedRows={fieldData}
|
||||
{schema}
|
||||
linkedTableId={"ta_users"}
|
||||
on:change={e => onChange(e, field)}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
useLabel={false}
|
||||
/>
|
||||
{:else if attachmentTypes.includes(schema.type)}
|
||||
<div class="attachment-field-container">
|
||||
<div class="toggle-container">
|
||||
<Toggle
|
||||
value={useAttachmentBinding}
|
||||
value={meta?.fields?.[field]?.useAttachmentBinding}
|
||||
text={"Use bindings"}
|
||||
size={"XS"}
|
||||
on:change={e => handleToggleChange(field, e)}
|
||||
on:change={e => {
|
||||
onChange({
|
||||
row: {
|
||||
[field]: null,
|
||||
},
|
||||
meta: {
|
||||
fields: {
|
||||
[field]: {
|
||||
useAttachmentBinding: e.detail,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
{#if !useAttachmentBinding}
|
||||
|
||||
{#if !meta?.fields?.[field]?.useAttachmentBinding}
|
||||
<div class="attachment-field-spacing">
|
||||
<KeyValueBuilder
|
||||
on:change={async e => {
|
||||
onChange(
|
||||
{
|
||||
detail:
|
||||
schema.type === FieldType.ATTACHMENT_SINGLE ||
|
||||
schema.type === FieldType.SIGNATURE_SINGLE
|
||||
? e.detail.length > 0
|
||||
? {
|
||||
url: e.detail[0].name,
|
||||
filename: e.detail[0].value,
|
||||
}
|
||||
: {}
|
||||
: e.detail.map(({ name, value }) => ({
|
||||
url: name,
|
||||
filename: value,
|
||||
})),
|
||||
on:change={e => {
|
||||
onChange({
|
||||
row: {
|
||||
[field]: handleMediaUpdate(e),
|
||||
},
|
||||
field
|
||||
)
|
||||
})
|
||||
}}
|
||||
object={handleAttachmentParams(value[field])}
|
||||
object={handleAttachmentParams(fieldData)}
|
||||
allowJS
|
||||
{bindings}
|
||||
keyBindings
|
||||
customButtonText={"Add attachment"}
|
||||
customButtonText={schema.type === FieldType.SIGNATURE_SINGLE
|
||||
? "Add signature"
|
||||
: "Add attachment"}
|
||||
keyPlaceholder={"URL"}
|
||||
valuePlaceholder={"Filename"}
|
||||
actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE ||
|
||||
schema.type === FieldType.SIGNATURE) &&
|
||||
Object.keys(value[field]).length >= 1}
|
||||
schema.type === FieldType.SIGNATURE_SINGLE) &&
|
||||
fieldData}
|
||||
/>
|
||||
</div>
|
||||
{:else}
|
||||
|
@ -180,8 +233,13 @@
|
|||
<svelte:component
|
||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||
panel={AutomationBindingPanel}
|
||||
value={value[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
value={fieldData}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
type="string"
|
||||
bindings={parsedBindings}
|
||||
allowJS={true}
|
||||
|
@ -195,20 +253,41 @@
|
|||
<svelte:component
|
||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||
panel={AutomationBindingPanel}
|
||||
value={value[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
value={fieldData}
|
||||
on:change={e =>
|
||||
onChange({
|
||||
row: {
|
||||
[field]: e.detail,
|
||||
},
|
||||
})}
|
||||
type="string"
|
||||
bindings={parsedBindings}
|
||||
allowJS={true}
|
||||
updateOnChange={false}
|
||||
title={schema.name}
|
||||
autocomplete="off"
|
||||
/>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.attachment-field-spacing,
|
||||
.json-input-spacing {
|
||||
margin-top: var(--spacing-s);
|
||||
margin-bottom: var(--spacing-l);
|
||||
.attachment-field-spacing {
|
||||
border: 1px solid var(--spectrum-global-color-gray-400);
|
||||
border-radius: 4px;
|
||||
padding: var(--spacing-s);
|
||||
}
|
||||
|
||||
.field-wrap.json-field {
|
||||
height: 120px;
|
||||
}
|
||||
|
||||
.field-wrap {
|
||||
box-sizing: border-box;
|
||||
border: 1px solid var(--spectrum-global-color-gray-400);
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
.field-wrap :global(.cm-editor),
|
||||
.field-wrap :global(.cm-scroller) {
|
||||
border-radius: 4px;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
export let value
|
||||
export let isTrigger
|
||||
export let disabled = false
|
||||
|
||||
$: filteredTables = $tables.list.filter(table => {
|
||||
return !isTrigger || table._id !== TableNames.USERS
|
||||
|
@ -25,4 +26,5 @@
|
|||
options={filteredTables}
|
||||
getOptionLabel={table => table.name}
|
||||
getOptionValue={table => table._id}
|
||||
{disabled}
|
||||
/>
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
export let disableBindings = false
|
||||
export let forceModal = false
|
||||
export let context = null
|
||||
export let autocomplete
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
|
@ -71,6 +72,7 @@
|
|||
on:blur={onBlur}
|
||||
{placeholder}
|
||||
{updateOnChange}
|
||||
{autocomplete}
|
||||
/>
|
||||
{#if !disabled && !disableBindings}
|
||||
<div
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
export { default as BindableCombobox } from "./BindableCombobox.svelte"
|
||||
export { default as BindingPanel } from "./BindingPanel.svelte"
|
||||
export { default as BindingSidePanel } from "./BindingSidePanel.svelte"
|
||||
export { default as DrawerBindableCombobox } from "./DrawerBindableCombobox.svelte"
|
||||
export { default as ClientBindingPanel } from "./ClientBindingPanel.svelte"
|
||||
export { default as DrawerBindableInput } from "./DrawerBindableInput.svelte"
|
||||
export { default as DrawerBindableSlot } from "./DrawerBindableSlot.svelte"
|
||||
export { default as EvaluationSidePanel } from "./EvaluationSidePanel.svelte"
|
||||
export { default as ModalBindableInput } from "./ModalBindableInput.svelte"
|
||||
export { default as ServerBindingPanel } from "./ServerBindingPanel.svelte"
|
||||
export { default as SnippetDrawer } from "./SnippetDrawer.svelte"
|
||||
export { default as SnippetSidePanel } from "./SnippetSidePanel.svelte"
|
|
@ -11,7 +11,7 @@
|
|||
notifications,
|
||||
} from "@budibase/bbui"
|
||||
import { AUTH_TYPE_LABELS, AUTH_TYPES } from "./authTypes"
|
||||
import BindableCombobox from "components/common/bindings/BindableCombobox.svelte"
|
||||
import { BindableCombobox } from "components/common/bindings"
|
||||
import { getAuthBindings, getEnvironmentBindings } from "dataBinding"
|
||||
import { environment, licensing, auth } from "stores/portal"
|
||||
import CreateEditVariableModal from "components/portal/environment/CreateEditVariableModal.svelte"
|
||||
|
|
|
@ -157,7 +157,8 @@ const automationActions = store => ({
|
|||
)
|
||||
}
|
||||
},
|
||||
updateBlockInputs: async (block, data) => {
|
||||
|
||||
processBlockInputs: async (block, data) => {
|
||||
// Create new modified block
|
||||
let newBlock = {
|
||||
...block,
|
||||
|
@ -184,6 +185,14 @@ const automationActions = store => ({
|
|||
|
||||
// Don't save if no changes were made
|
||||
if (JSON.stringify(newAutomation) === JSON.stringify(automation)) {
|
||||
return false
|
||||
}
|
||||
|
||||
return newAutomation
|
||||
},
|
||||
updateBlockInputs: async (block, data) => {
|
||||
const newAutomation = await store.actions.processBlockInputs(block, data)
|
||||
if (newAutomation === false) {
|
||||
return
|
||||
}
|
||||
await store.actions.save(newAutomation)
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit e8f2c5a14780e1f61ec3896821ba5f93d486eb72
|
||||
Subproject commit 11379517b76264a7f938c2d520bd259f586edada
|
|
@ -1,23 +0,0 @@
|
|||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
module SendgridMock {
|
||||
class Email {
|
||||
constructor() {
|
||||
// @ts-ignore
|
||||
this.apiKey = null
|
||||
}
|
||||
|
||||
setApiKey(apiKey: any) {
|
||||
// @ts-ignore
|
||||
this.apiKey = apiKey
|
||||
}
|
||||
|
||||
async send(msg: any) {
|
||||
if (msg.to === "invalid@example.com") {
|
||||
throw "Invalid"
|
||||
}
|
||||
return msg
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new Email()
|
||||
}
|
|
@ -94,7 +94,7 @@
|
|||
"koa2-ratelimit": "1.1.1",
|
||||
"lodash": "4.17.21",
|
||||
"memorystream": "0.3.1",
|
||||
"mongodb": "^6.3.0",
|
||||
"mongodb": "6.7.0",
|
||||
"mssql": "11.0.1",
|
||||
"mysql2": "3.9.8",
|
||||
"node-fetch": "2.6.7",
|
||||
|
|
|
@ -25,7 +25,9 @@ export async function searchView(
|
|||
ctx.throw(400, `This method only supports viewsV2`)
|
||||
}
|
||||
|
||||
const viewFields = Object.keys(view.schema || {})
|
||||
const viewFields = Object.entries(view.schema || {})
|
||||
.filter(([_, value]) => value.visible)
|
||||
.map(([key]) => key)
|
||||
const { body } = ctx.request
|
||||
|
||||
// Enrich saved query with ephemeral query params.
|
||||
|
|
|
@ -33,11 +33,6 @@ async function parseSchema(view: CreateViewRequest) {
|
|||
p[fieldName] = fieldSchema
|
||||
return p
|
||||
}, {} as Record<string, RequiredKeys<ViewUIFieldMetadata>>)
|
||||
for (let [key, column] of Object.entries(finalViewSchema)) {
|
||||
if (!column.visible && !column.readonly) {
|
||||
delete finalViewSchema[key]
|
||||
}
|
||||
}
|
||||
return finalViewSchema
|
||||
}
|
||||
|
||||
|
|
|
@ -218,6 +218,10 @@ describe.each([
|
|||
order: 1,
|
||||
width: 100,
|
||||
},
|
||||
Category: {
|
||||
visible: false,
|
||||
icon: "ic",
|
||||
},
|
||||
},
|
||||
id: createdView.id,
|
||||
version: 2,
|
||||
|
@ -269,9 +273,8 @@ describe.each([
|
|||
...newView,
|
||||
schema: {
|
||||
id: { visible: true },
|
||||
Price: {
|
||||
visible: true,
|
||||
},
|
||||
Price: { visible: true },
|
||||
Category: { visible: false },
|
||||
},
|
||||
id: expect.any(String),
|
||||
version: 2,
|
||||
|
@ -759,6 +762,7 @@ describe.each([
|
|||
order: 1,
|
||||
width: 100,
|
||||
},
|
||||
Category: { visible: false, icon: "ic" },
|
||||
},
|
||||
id: view.id,
|
||||
version: 2,
|
||||
|
@ -873,30 +877,23 @@ describe.each([
|
|||
await db.getDB(config.appId!).put(tableToUpdate)
|
||||
|
||||
view = await config.api.viewV2.get(view.id)
|
||||
await config.api.viewV2.update({
|
||||
...view,
|
||||
schema: {
|
||||
...view.schema,
|
||||
Price: {
|
||||
visible: false,
|
||||
await config.api.viewV2.update(
|
||||
{
|
||||
...view,
|
||||
schema: {
|
||||
...view.schema,
|
||||
Price: {
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(await config.api.viewV2.get(view.id)).toEqual(
|
||||
expect.objectContaining({
|
||||
schema: {
|
||||
id: expect.objectContaining({
|
||||
visible: false,
|
||||
}),
|
||||
Price: expect.objectContaining({
|
||||
visible: false,
|
||||
}),
|
||||
Category: expect.objectContaining({
|
||||
visible: true,
|
||||
}),
|
||||
{
|
||||
status: 400,
|
||||
body: {
|
||||
message: 'You can\'t hide "id" because it is a required field.',
|
||||
status: 400,
|
||||
},
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -938,7 +935,6 @@ describe.each([
|
|||
Category: { visible: true },
|
||||
},
|
||||
})
|
||||
expect(res.schema?.Price).toBeUndefined()
|
||||
|
||||
const view = await config.api.viewV2.get(res.id)
|
||||
const updatedTable = await config.api.table.get(table._id!)
|
||||
|
@ -1205,6 +1201,7 @@ describe.each([
|
|||
],
|
||||
schema: {
|
||||
id: { visible: true },
|
||||
one: { visible: false },
|
||||
two: { visible: true },
|
||||
},
|
||||
})
|
||||
|
|
|
@ -49,7 +49,6 @@ export async function checkMissingMigrations(
|
|||
const queue = getAppMigrationQueue()
|
||||
|
||||
if (
|
||||
queue &&
|
||||
latestMigration &&
|
||||
getTimestamp(currentVersion) < getTimestamp(latestMigration)
|
||||
) {
|
||||
|
|
|
@ -10,6 +10,6 @@ export const MIGRATIONS: AppMigration[] = [
|
|||
{
|
||||
id: "20240604153647_initial_sqs",
|
||||
func: m20240604153647_initial_sqs,
|
||||
disabled: !env.SQS_SEARCH_ENABLE,
|
||||
disabled: !(env.SQS_MIGRATION_ENABLE || env.SQS_SEARCH_ENABLE),
|
||||
},
|
||||
]
|
||||
|
|
|
@ -40,7 +40,7 @@ const migration = async () => {
|
|||
// only do initial search if environment is using SQS already
|
||||
// initial search makes sure that all the indexes have been created
|
||||
// and are ready to use, avoiding any initial waits for large tables
|
||||
if (env.SQS_SEARCH_ENABLE) {
|
||||
if (env.SQS_MIGRATION_ENABLE || env.SQS_SEARCH_ENABLE) {
|
||||
const tables = await sdk.tables.getAllInternalTables()
|
||||
// do these one by one - running in parallel could cause problems
|
||||
for (let table of tables) {
|
||||
|
|
|
@ -66,64 +66,69 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
|
|||
}
|
||||
}
|
||||
|
||||
async function sqsDisabled(cb: () => Promise<void>) {
|
||||
await config.withEnv({ SQS_SEARCH_ENABLE: "" }, cb)
|
||||
type SQSEnvVar = "SQS_MIGRATION_ENABLE" | "SQS_SEARCH_ENABLE"
|
||||
|
||||
async function sqsDisabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
||||
await config.withEnv({ [envVar]: "" }, cb)
|
||||
}
|
||||
|
||||
async function sqsEnabled(cb: () => Promise<void>) {
|
||||
await config.withEnv({ SQS_SEARCH_ENABLE: "1" }, cb)
|
||||
async function sqsEnabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
||||
await config.withEnv({ [envVar]: "1" }, cb)
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
await sqsDisabled(async () => {
|
||||
await config.init()
|
||||
const table = await config.api.table.save(basicTable())
|
||||
tableId = table._id!
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// old link document
|
||||
await db.put(oldLinkDocument())
|
||||
})
|
||||
})
|
||||
|
||||
describe("SQS migration", () => {
|
||||
it("test migration runs as expected against an older DB", async () => {
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// confirm nothing exists initially
|
||||
await sqsDisabled(async () => {
|
||||
let error: any | undefined
|
||||
try {
|
||||
await db.get(SQLITE_DESIGN_DOC_ID)
|
||||
} catch (err: any) {
|
||||
error = err
|
||||
}
|
||||
expect(error).toBeDefined()
|
||||
expect(error.status).toBe(404)
|
||||
})
|
||||
await sqsEnabled(async () => {
|
||||
await processMigrations(config.appId!, MIGRATIONS)
|
||||
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||
expect(designDoc.sql.tables).toBeDefined()
|
||||
const mainTableDef = designDoc.sql.tables[tableId]
|
||||
expect(mainTableDef).toBeDefined()
|
||||
expect(mainTableDef.fields[prefix("name")]).toEqual({
|
||||
field: "name",
|
||||
type: SQLiteType.TEXT,
|
||||
describe.each(["SQS_MIGRATION_ENABLE", "SQS_SEARCH_ENABLE"] as SQSEnvVar[])(
|
||||
"SQS migration with (%s)",
|
||||
envVar => {
|
||||
beforeAll(async () => {
|
||||
await sqsDisabled(envVar, async () => {
|
||||
await config.init()
|
||||
const table = await config.api.table.save(basicTable())
|
||||
tableId = table._id!
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// old link document
|
||||
await db.put(oldLinkDocument())
|
||||
})
|
||||
expect(mainTableDef.fields[prefix("description")]).toEqual({
|
||||
field: "description",
|
||||
type: SQLiteType.TEXT,
|
||||
})
|
||||
|
||||
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
||||
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
||||
expect(linkDoc.tableId).toEqual(
|
||||
generateJunctionTableID(tableId1, tableId2)
|
||||
)
|
||||
// should have swapped the documents
|
||||
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
||||
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
||||
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
||||
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("test migration runs as expected against an older DB", async () => {
|
||||
const db = dbCore.getDB(config.appId!)
|
||||
// confirm nothing exists initially
|
||||
await sqsDisabled(envVar, async () => {
|
||||
let error: any | undefined
|
||||
try {
|
||||
await db.get(SQLITE_DESIGN_DOC_ID)
|
||||
} catch (err: any) {
|
||||
error = err
|
||||
}
|
||||
expect(error).toBeDefined()
|
||||
expect(error.status).toBe(404)
|
||||
})
|
||||
await sqsEnabled(envVar, async () => {
|
||||
await processMigrations(config.appId!, MIGRATIONS)
|
||||
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||
expect(designDoc.sql.tables).toBeDefined()
|
||||
const mainTableDef = designDoc.sql.tables[tableId]
|
||||
expect(mainTableDef).toBeDefined()
|
||||
expect(mainTableDef.fields[prefix("name")]).toEqual({
|
||||
field: "name",
|
||||
type: SQLiteType.TEXT,
|
||||
})
|
||||
expect(mainTableDef.fields[prefix("description")]).toEqual({
|
||||
field: "description",
|
||||
type: SQLiteType.TEXT,
|
||||
})
|
||||
|
||||
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
||||
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
||||
expect(linkDoc.tableId).toEqual(
|
||||
generateJunctionTableID(tableId1, tableId2)
|
||||
)
|
||||
// should have swapped the documents
|
||||
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
||||
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
||||
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
||||
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
|
@ -11,26 +11,26 @@ export type AppMigrationJob = {
|
|||
appId: string
|
||||
}
|
||||
|
||||
let appMigrationQueue: queue.Queue<AppMigrationJob> | undefined
|
||||
// always create app migration queue - so that events can be pushed and read from it
|
||||
// across the different api and automation services
|
||||
const appMigrationQueue = queue.createQueue<AppMigrationJob>(
|
||||
queue.JobQueue.APP_MIGRATION,
|
||||
{
|
||||
jobOptions: {
|
||||
attempts: MAX_ATTEMPTS,
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true,
|
||||
},
|
||||
maxStalledCount: MAX_ATTEMPTS,
|
||||
removeStalledCb: async (job: Job) => {
|
||||
logging.logAlert(
|
||||
`App migration failed, queue job ID: ${job.id} - reason: ${job.failedReason}`
|
||||
)
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
export function init() {
|
||||
appMigrationQueue = queue.createQueue<AppMigrationJob>(
|
||||
queue.JobQueue.APP_MIGRATION,
|
||||
{
|
||||
jobOptions: {
|
||||
attempts: MAX_ATTEMPTS,
|
||||
removeOnComplete: true,
|
||||
removeOnFail: true,
|
||||
},
|
||||
maxStalledCount: MAX_ATTEMPTS,
|
||||
removeStalledCb: async (job: Job) => {
|
||||
logging.logAlert(
|
||||
`App migration failed, queue job ID: ${job.id} - reason: ${job.failedReason}`
|
||||
)
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return appMigrationQueue.process(MIGRATION_CONCURRENCY, processMessage)
|
||||
}
|
||||
|
||||
|
|
|
@ -100,7 +100,10 @@ export function getError(err: any) {
|
|||
}
|
||||
|
||||
export function guardAttachment(attachmentObject: any) {
|
||||
if (!("url" in attachmentObject) || !("filename" in attachmentObject)) {
|
||||
if (
|
||||
attachmentObject &&
|
||||
(!("url" in attachmentObject) || !("filename" in attachmentObject))
|
||||
) {
|
||||
const providedKeys = Object.keys(attachmentObject).join(", ")
|
||||
throw new Error(
|
||||
`Attachments must have both "url" and "filename" keys. You have provided: ${providedKeys}`
|
||||
|
@ -135,7 +138,9 @@ export async function sendAutomationAttachmentsToStorage(
|
|||
}
|
||||
|
||||
for (const [prop, attachments] of Object.entries(attachmentRows)) {
|
||||
if (Array.isArray(attachments)) {
|
||||
if (!attachments) {
|
||||
continue
|
||||
} else if (Array.isArray(attachments)) {
|
||||
if (attachments.length) {
|
||||
row[prop] = await Promise.all(
|
||||
attachments.map(attachment => generateAttachmentRow(attachment))
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import * as automationUtils from "./automationUtils"
|
||||
import { isPlainObject } from "lodash"
|
||||
|
||||
type ObjValue = {
|
||||
[key: string]: string | ObjValue
|
||||
|
@ -18,6 +19,10 @@ function replaceBindingsRecursive(
|
|||
value: string | ObjValue,
|
||||
loopStepNumber: number
|
||||
) {
|
||||
if (value === null || value === undefined) {
|
||||
return value
|
||||
}
|
||||
|
||||
if (typeof value === "object") {
|
||||
for (const [innerKey, innerValue] of Object.entries(value)) {
|
||||
if (typeof innerValue === "string") {
|
||||
|
@ -25,7 +30,11 @@ function replaceBindingsRecursive(
|
|||
innerValue,
|
||||
`steps.${loopStepNumber}`
|
||||
)
|
||||
} else if (typeof innerValue === "object") {
|
||||
} else if (
|
||||
innerValue &&
|
||||
isPlainObject(innerValue) &&
|
||||
Object.keys(innerValue).length > 0
|
||||
) {
|
||||
value[innerKey] = replaceBindingsRecursive(innerValue, loopStepNumber)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,8 +7,8 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationIOType,
|
||||
} from "@budibase/types"
|
||||
import { env } from "@budibase/backend-core"
|
||||
import * as automationUtils from "../automationUtils"
|
||||
import environment from "../../environment"
|
||||
|
||||
enum Model {
|
||||
GPT_35_TURBO = "gpt-3.5-turbo",
|
||||
|
@ -60,7 +60,7 @@ export const definition: AutomationStepSchema = {
|
|||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
if (!environment.OPENAI_API_KEY) {
|
||||
if (!env.OPENAI_API_KEY) {
|
||||
return {
|
||||
success: false,
|
||||
response:
|
||||
|
@ -77,7 +77,7 @@ export async function run({ inputs }: AutomationStepInput) {
|
|||
|
||||
try {
|
||||
const openai = new OpenAI({
|
||||
apiKey: environment.OPENAI_API_KEY,
|
||||
apiKey: env.OPENAI_API_KEY,
|
||||
})
|
||||
|
||||
const completion = await openai.chat.completions.create({
|
||||
|
|
|
@ -82,39 +82,73 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
|||
}
|
||||
const tableId = inputs.row.tableId
|
||||
|
||||
// clear any undefined, null or empty string properties so that they aren't updated
|
||||
for (let propKey of Object.keys(inputs.row)) {
|
||||
const clearRelationships =
|
||||
inputs.meta?.fields?.[propKey]?.clearRelationships
|
||||
if (
|
||||
(inputs.row[propKey] == null || inputs.row[propKey]?.length === 0) &&
|
||||
!clearRelationships
|
||||
) {
|
||||
delete inputs.row[propKey]
|
||||
}
|
||||
// Base update
|
||||
let rowUpdate: Record<string, any>
|
||||
|
||||
// Legacy
|
||||
// Find previously set values and add them to the update. Ensure empty relationships
|
||||
// are added to the update if clearRelationships is true
|
||||
const legacyUpdated = Object.keys(inputs.row || {}).reduce(
|
||||
(acc: Record<string, any>, key: string) => {
|
||||
const isEmpty = inputs.row[key] == null || inputs.row[key]?.length === 0
|
||||
const fieldConfig = inputs.meta?.fields || {}
|
||||
|
||||
if (isEmpty) {
|
||||
if (
|
||||
Object.hasOwn(fieldConfig, key) &&
|
||||
fieldConfig[key].clearRelationships === true
|
||||
) {
|
||||
// Explicitly clear the field on update
|
||||
acc[key] = []
|
||||
}
|
||||
} else {
|
||||
// Keep non-empty values
|
||||
acc[key] = inputs.row[key]
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
// The source of truth for inclusion in the update is: inputs.meta?.fields
|
||||
const parsedUpdate = Object.keys(inputs.meta?.fields || {}).reduce(
|
||||
(acc: Record<string, any>, key: string) => {
|
||||
const fieldConfig = inputs.meta?.fields?.[key] || {}
|
||||
// Ignore legacy config.
|
||||
if (Object.hasOwn(fieldConfig, "clearRelationships")) {
|
||||
return acc
|
||||
}
|
||||
acc[key] =
|
||||
!inputs.row[key] || inputs.row[key]?.length === 0 ? "" : inputs.row[key]
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
rowUpdate = {
|
||||
tableId,
|
||||
...parsedUpdate,
|
||||
...legacyUpdated,
|
||||
}
|
||||
|
||||
try {
|
||||
if (tableId) {
|
||||
inputs.row = await automationUtils.cleanUpRow(
|
||||
inputs.row.tableId,
|
||||
inputs.row
|
||||
)
|
||||
rowUpdate = await automationUtils.cleanUpRow(tableId, rowUpdate)
|
||||
|
||||
inputs.row = await automationUtils.sendAutomationAttachmentsToStorage(
|
||||
inputs.row.tableId,
|
||||
inputs.row
|
||||
rowUpdate = await automationUtils.sendAutomationAttachmentsToStorage(
|
||||
tableId,
|
||||
rowUpdate
|
||||
)
|
||||
}
|
||||
// have to clean up the row, remove the table from it
|
||||
const ctx: any = buildCtx(appId, emitter, {
|
||||
body: {
|
||||
...inputs.row,
|
||||
...rowUpdate,
|
||||
_id: inputs.rowId,
|
||||
},
|
||||
params: {
|
||||
rowId: inputs.rowId,
|
||||
tableId: tableId,
|
||||
tableId,
|
||||
},
|
||||
})
|
||||
await rowController.patch(ctx)
|
||||
|
|
|
@ -4,6 +4,7 @@ import { loopAutomation } from "../../tests/utilities/structures"
|
|||
import { context } from "@budibase/backend-core"
|
||||
import * as setup from "./utilities"
|
||||
import { Table } from "@budibase/types"
|
||||
import * as loopUtils from "../loopUtils"
|
||||
import { LoopInput, LoopStepType } from "../../definitions/automations"
|
||||
|
||||
describe("Attempt to run a basic loop automation", () => {
|
||||
|
@ -51,4 +52,98 @@ describe("Attempt to run a basic loop automation", () => {
|
|||
})
|
||||
expect(resp.steps[2].outputs.iterations).toBe(1)
|
||||
})
|
||||
|
||||
describe("replaceFakeBindings", () => {
|
||||
it("should replace loop bindings in nested objects", () => {
|
||||
const originalStepInput = {
|
||||
schema: {
|
||||
name: {
|
||||
type: "string",
|
||||
constraints: {
|
||||
type: "string",
|
||||
length: { maximum: null },
|
||||
presence: false,
|
||||
},
|
||||
name: "name",
|
||||
display: { type: "Text" },
|
||||
},
|
||||
},
|
||||
row: {
|
||||
tableId: "ta_aaad4296e9f74b12b1b90ef7a84afcad",
|
||||
name: "{{ loop.currentItem.pokemon }}",
|
||||
},
|
||||
}
|
||||
|
||||
const loopStepNumber = 3
|
||||
|
||||
const result = loopUtils.replaceFakeBindings(
|
||||
originalStepInput,
|
||||
loopStepNumber
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
schema: {
|
||||
name: {
|
||||
type: "string",
|
||||
constraints: {
|
||||
type: "string",
|
||||
length: { maximum: null },
|
||||
presence: false,
|
||||
},
|
||||
name: "name",
|
||||
display: { type: "Text" },
|
||||
},
|
||||
},
|
||||
row: {
|
||||
tableId: "ta_aaad4296e9f74b12b1b90ef7a84afcad",
|
||||
name: "{{ steps.3.currentItem.pokemon }}",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle null values in nested objects", () => {
|
||||
const originalStepInput = {
|
||||
nullValue: null,
|
||||
nestedNull: {
|
||||
someKey: null,
|
||||
},
|
||||
validValue: "{{ loop.someValue }}",
|
||||
}
|
||||
|
||||
const loopStepNumber = 2
|
||||
|
||||
const result = loopUtils.replaceFakeBindings(
|
||||
originalStepInput,
|
||||
loopStepNumber
|
||||
)
|
||||
|
||||
expect(result).toEqual({
|
||||
nullValue: null,
|
||||
nestedNull: {
|
||||
someKey: null,
|
||||
},
|
||||
validValue: "{{ steps.2.someValue }}",
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle empty objects and arrays", () => {
|
||||
const originalStepInput = {
|
||||
emptyObject: {},
|
||||
emptyArray: [],
|
||||
nestedEmpty: {
|
||||
emptyObj: {},
|
||||
emptyArr: [],
|
||||
},
|
||||
}
|
||||
|
||||
const loopStepNumber = 1
|
||||
|
||||
const result = loopUtils.replaceFakeBindings(
|
||||
originalStepInput,
|
||||
loopStepNumber
|
||||
)
|
||||
|
||||
expect(result).toEqual(originalStepInput)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
const setup = require("./utilities")
|
||||
|
||||
import environment from "../../environment"
|
||||
import { getConfig, runStep, afterAll as _afterAll } from "./utilities"
|
||||
import { OpenAI } from "openai"
|
||||
|
||||
jest.mock("openai", () => ({
|
||||
|
@ -26,42 +24,41 @@ const mockedOpenAI = OpenAI as jest.MockedClass<typeof OpenAI>
|
|||
const OPENAI_PROMPT = "What is the meaning of life?"
|
||||
|
||||
describe("test the openai action", () => {
|
||||
let config = setup.getConfig()
|
||||
let config = getConfig()
|
||||
let resetEnv: () => void | undefined
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
environment.OPENAI_API_KEY = "abc123"
|
||||
resetEnv = config.setCoreEnv({ OPENAI_API_KEY: "abc123" })
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
afterEach(() => {
|
||||
resetEnv()
|
||||
})
|
||||
|
||||
afterAll(_afterAll)
|
||||
|
||||
it("should present the correct error message when the OPENAI_API_KEY variable isn't set", async () => {
|
||||
delete environment.OPENAI_API_KEY
|
||||
|
||||
let res = await setup.runStep("OPENAI", {
|
||||
prompt: OPENAI_PROMPT,
|
||||
await config.withCoreEnv({ OPENAI_API_KEY: "" }, async () => {
|
||||
let res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
|
||||
expect(res.response).toEqual(
|
||||
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable."
|
||||
)
|
||||
expect(res.success).toBeFalsy()
|
||||
})
|
||||
expect(res.response).toEqual(
|
||||
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable."
|
||||
)
|
||||
expect(res.success).toBeFalsy()
|
||||
})
|
||||
|
||||
it("should be able to receive a response from ChatGPT given a prompt", async () => {
|
||||
const res = await setup.runStep("OPENAI", {
|
||||
prompt: OPENAI_PROMPT,
|
||||
})
|
||||
const res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
|
||||
expect(res.response).toEqual("This is a test")
|
||||
expect(res.success).toBeTruthy()
|
||||
})
|
||||
|
||||
it("should present the correct error message when a prompt is not provided", async () => {
|
||||
const res = await setup.runStep("OPENAI", {
|
||||
prompt: null,
|
||||
})
|
||||
const res = await runStep("OPENAI", { prompt: null })
|
||||
expect(res.response).toEqual(
|
||||
"Budibase OpenAI Automation Failed: No prompt supplied"
|
||||
)
|
||||
|
@ -84,7 +81,7 @@ describe("test the openai action", () => {
|
|||
} as any)
|
||||
)
|
||||
|
||||
const res = await setup.runStep("OPENAI", {
|
||||
const res = await runStep("OPENAI", {
|
||||
prompt: OPENAI_PROMPT,
|
||||
})
|
||||
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "App Action",
|
||||
event: "app:trigger",
|
||||
event: AutomationEventType.APP_TRIGGER,
|
||||
icon: "Apps",
|
||||
tagline: "Automation fired from the frontend",
|
||||
description: "Trigger an automation from an action inside your app",
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "Cron Trigger",
|
||||
event: "cron:trigger",
|
||||
event: AutomationEventType.CRON_TRIGGER,
|
||||
icon: "Clock",
|
||||
tagline: "Cron Trigger (<b>{{inputs.cron}}</b>)",
|
||||
description: "Triggers automation on a cron schedule.",
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "Row Deleted",
|
||||
event: "row:delete",
|
||||
event: AutomationEventType.ROW_DELETE,
|
||||
icon: "TableRowRemoveCenter",
|
||||
tagline: "Row is deleted from {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is deleted from your database",
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "Row Created",
|
||||
event: "row:save",
|
||||
event: AutomationEventType.ROW_SAVE,
|
||||
icon: "TableRowAddBottom",
|
||||
tagline: "Row is added to {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is added to your database",
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "Row Updated",
|
||||
event: "row:update",
|
||||
event: AutomationEventType.ROW_UPDATE,
|
||||
icon: "Refresh",
|
||||
tagline: "Row is updated in {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is updated in your database",
|
||||
|
|
|
@ -4,11 +4,12 @@ import {
|
|||
AutomationStepType,
|
||||
AutomationTriggerSchema,
|
||||
AutomationTriggerStepId,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const definition: AutomationTriggerSchema = {
|
||||
name: "Webhook",
|
||||
event: "web:trigger",
|
||||
event: AutomationEventType.WEBHOOK_TRIGGER,
|
||||
icon: "Send",
|
||||
tagline: "Webhook endpoint is hit",
|
||||
description: "Trigger an automation when a HTTP POST webhook is hit",
|
||||
|
|
|
@ -13,6 +13,7 @@ import {
|
|||
Row,
|
||||
AutomationData,
|
||||
AutomationJob,
|
||||
AutomationEventType,
|
||||
UpdatedRowEventEmitter,
|
||||
} from "@budibase/types"
|
||||
import { executeInThread } from "../threads/automation"
|
||||
|
@ -71,28 +72,31 @@ async function queueRelevantRowAutomations(
|
|||
})
|
||||
}
|
||||
|
||||
emitter.on("row:save", async function (event: UpdatedRowEventEmitter) {
|
||||
emitter.on(
|
||||
AutomationEventType.ROW_SAVE,
|
||||
async function (event: UpdatedRowEventEmitter) {
|
||||
/* istanbul ignore next */
|
||||
if (!event || !event.row || !event.row.tableId) {
|
||||
return
|
||||
}
|
||||
await queueRelevantRowAutomations(event, AutomationEventType.ROW_SAVE)
|
||||
}
|
||||
)
|
||||
|
||||
emitter.on(AutomationEventType.ROW_UPDATE, async function (event) {
|
||||
/* istanbul ignore next */
|
||||
if (!event || !event.row || !event.row.tableId) {
|
||||
return
|
||||
}
|
||||
await queueRelevantRowAutomations(event, "row:save")
|
||||
await queueRelevantRowAutomations(event, AutomationEventType.ROW_UPDATE)
|
||||
})
|
||||
|
||||
emitter.on("row:update", async function (event) {
|
||||
emitter.on(AutomationEventType.ROW_DELETE, async function (event) {
|
||||
/* istanbul ignore next */
|
||||
if (!event || !event.row || !event.row.tableId) {
|
||||
return
|
||||
}
|
||||
await queueRelevantRowAutomations(event, "row:update")
|
||||
})
|
||||
|
||||
emitter.on("row:delete", async function (event) {
|
||||
/* istanbul ignore next */
|
||||
if (!event || !event.row || !event.row.tableId) {
|
||||
return
|
||||
}
|
||||
await queueRelevantRowAutomations(event, "row:delete")
|
||||
await queueRelevantRowAutomations(event, AutomationEventType.ROW_DELETE)
|
||||
})
|
||||
|
||||
export async function externalTrigger(
|
||||
|
@ -118,7 +122,6 @@ export async function externalTrigger(
|
|||
}
|
||||
params.fields = coercedFields
|
||||
}
|
||||
|
||||
const data: AutomationData = { automation, event: params as any }
|
||||
if (getResponses) {
|
||||
data.event = {
|
||||
|
|
|
@ -75,16 +75,12 @@ const environment = {
|
|||
AUTOMATION_MAX_ITERATIONS:
|
||||
parseIntSafe(process.env.AUTOMATION_MAX_ITERATIONS) ||
|
||||
DEFAULTS.AUTOMATION_MAX_ITERATIONS,
|
||||
SENDGRID_API_KEY: process.env.SENDGRID_API_KEY,
|
||||
DYNAMO_ENDPOINT: process.env.DYNAMO_ENDPOINT,
|
||||
QUERY_THREAD_TIMEOUT: QUERY_THREAD_TIMEOUT,
|
||||
AUTOMATION_THREAD_TIMEOUT:
|
||||
parseIntSafe(process.env.AUTOMATION_THREAD_TIMEOUT) ||
|
||||
DEFAULT_AUTOMATION_TIMEOUT,
|
||||
BB_ADMIN_USER_EMAIL: process.env.BB_ADMIN_USER_EMAIL,
|
||||
BB_ADMIN_USER_PASSWORD: process.env.BB_ADMIN_USER_PASSWORD,
|
||||
PLUGINS_DIR: process.env.PLUGINS_DIR || DEFAULTS.PLUGINS_DIR,
|
||||
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
||||
MAX_IMPORT_SIZE_MB: process.env.MAX_IMPORT_SIZE_MB,
|
||||
SESSION_EXPIRY_SECONDS: process.env.SESSION_EXPIRY_SECONDS,
|
||||
// SQL
|
||||
|
@ -92,6 +88,7 @@ const environment = {
|
|||
SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE,
|
||||
SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE,
|
||||
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
|
||||
SQS_MIGRATION_ENABLE: process.env.SQS_MIGRATION_ENABLE,
|
||||
// flags
|
||||
ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS,
|
||||
DISABLE_THREADING: process.env.DISABLE_THREADING,
|
||||
|
|
|
@ -160,14 +160,10 @@ export function enrichSchema(
|
|||
for (const key of Object.keys(schema)) {
|
||||
// if nothing specified in view, then it is not visible
|
||||
const ui = view.schema?.[key] || { visible: false }
|
||||
if (ui.visible === false) {
|
||||
schema[key].visible = false
|
||||
} else {
|
||||
schema[key] = {
|
||||
...schema[key],
|
||||
...ui,
|
||||
order: anyViewOrder ? ui?.order ?? undefined : schema[key].order,
|
||||
}
|
||||
schema[key] = {
|
||||
...schema[key],
|
||||
...ui,
|
||||
order: anyViewOrder ? ui?.order ?? undefined : schema[key].order,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import {
|
|||
tenancy,
|
||||
users,
|
||||
cache,
|
||||
env as coreEnv,
|
||||
} from "@budibase/backend-core"
|
||||
import { watch } from "../watch"
|
||||
import * as automations from "../automations"
|
||||
|
@ -132,8 +133,8 @@ export async function startup(
|
|||
// check and create admin user if required
|
||||
// this must be run after the api has been initialised due to
|
||||
// the app user sync
|
||||
const bbAdminEmail = env.BB_ADMIN_USER_EMAIL,
|
||||
bbAdminPassword = env.BB_ADMIN_USER_PASSWORD
|
||||
const bbAdminEmail = coreEnv.BB_ADMIN_USER_EMAIL,
|
||||
bbAdminPassword = coreEnv.BB_ADMIN_USER_PASSWORD
|
||||
if (
|
||||
env.SELF_HOSTED &&
|
||||
!env.MULTI_TENANCY &&
|
||||
|
|
|
@ -14,20 +14,27 @@ describe("check BB_ADMIN environment variables", () => {
|
|||
await tenancy.doInTenant(tenancy.DEFAULT_TENANT_ID, async () => {
|
||||
await config.withEnv(
|
||||
{
|
||||
BB_ADMIN_USER_EMAIL: EMAIL,
|
||||
BB_ADMIN_USER_PASSWORD: PASSWORD,
|
||||
MULTI_TENANCY: "0",
|
||||
SELF_HOSTED: "1",
|
||||
},
|
||||
async () => {
|
||||
await startup({ rerun: true })
|
||||
const user = await users.getGlobalUserByEmail(EMAIL, {
|
||||
cleanup: false,
|
||||
})
|
||||
expect(user).toBeDefined()
|
||||
expect(user?.password).toBeDefined()
|
||||
expect(await utils.compare(PASSWORD, user?.password!)).toEqual(true)
|
||||
}
|
||||
() =>
|
||||
config.withCoreEnv(
|
||||
{
|
||||
BB_ADMIN_USER_EMAIL: EMAIL,
|
||||
BB_ADMIN_USER_PASSWORD: PASSWORD,
|
||||
},
|
||||
async () => {
|
||||
await startup({ rerun: true })
|
||||
const user = await users.getGlobalUserByEmail(EMAIL, {
|
||||
cleanup: false,
|
||||
})
|
||||
expect(user).toBeDefined()
|
||||
expect(user?.password).toBeDefined()
|
||||
expect(await utils.compare(PASSWORD, user?.password!)).toEqual(
|
||||
true
|
||||
)
|
||||
}
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -290,7 +290,7 @@ export default class TestConfiguration {
|
|||
* that can be called to reset the environment variables to their original values.
|
||||
*/
|
||||
setCoreEnv(newEnvVars: Partial<typeof coreEnv>): () => void {
|
||||
const oldEnv = cloneDeep(env)
|
||||
const oldEnv = cloneDeep(coreEnv)
|
||||
|
||||
let key: keyof typeof newEnvVars
|
||||
for (key in newEnvVars) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import {
|
|||
Query,
|
||||
Webhook,
|
||||
WebhookActionType,
|
||||
AutomationEventType,
|
||||
} from "@budibase/types"
|
||||
import { LoopInput, LoopStepType } from "../../definitions/automations"
|
||||
import { merge } from "lodash"
|
||||
|
@ -305,7 +306,7 @@ export function loopAutomation(
|
|||
trigger: {
|
||||
id: "a",
|
||||
type: "TRIGGER",
|
||||
event: "row:save",
|
||||
event: AutomationEventType.ROW_SAVE,
|
||||
stepId: AutomationTriggerStepId.ROW_SAVED,
|
||||
inputs: {
|
||||
tableId,
|
||||
|
@ -347,7 +348,7 @@ export function collectAutomation(tableId?: string): Automation {
|
|||
trigger: {
|
||||
id: "a",
|
||||
type: "TRIGGER",
|
||||
event: "row:save",
|
||||
event: AutomationEventType.ROW_SAVE,
|
||||
stepId: AutomationTriggerStepId.ROW_SAVED,
|
||||
inputs: {
|
||||
tableId,
|
||||
|
|
|
@ -50,6 +50,13 @@ export const TYPE_TRANSFORM_MAP: any = {
|
|||
[undefined]: undefined,
|
||||
parse: parseArrayString,
|
||||
},
|
||||
[FieldType.BB_REFERENCE]: {
|
||||
//@ts-ignore
|
||||
[null]: [],
|
||||
//@ts-ignore
|
||||
[undefined]: undefined,
|
||||
parse: parseArrayString,
|
||||
},
|
||||
[FieldType.STRING]: {
|
||||
"": null,
|
||||
//@ts-ignore
|
||||
|
@ -113,6 +120,9 @@ export const TYPE_TRANSFORM_MAP: any = {
|
|||
[undefined]: undefined,
|
||||
parse: parseArrayString,
|
||||
},
|
||||
[FieldType.ATTACHMENT_SINGLE]: {
|
||||
"": null,
|
||||
},
|
||||
[FieldType.BOOLEAN]: {
|
||||
"": null,
|
||||
//@ts-ignore
|
||||
|
|
|
@ -209,10 +209,22 @@ describe("rowProcessor - inputProcessing", () => {
|
|||
|
||||
const { row } = await inputProcessing(userId, table, newRow)
|
||||
|
||||
if (userValue === undefined) {
|
||||
// The 'user' field is omitted
|
||||
expect(row).toEqual({
|
||||
name: "Jack",
|
||||
})
|
||||
} else {
|
||||
// The update is processed if null or "". 'user' is changed to an empty array.
|
||||
expect(row).toEqual({
|
||||
name: "Jack",
|
||||
user: [],
|
||||
})
|
||||
}
|
||||
|
||||
expect(
|
||||
bbReferenceProcessor.processInputBBReferences
|
||||
).not.toHaveBeenCalled()
|
||||
expect(row).toEqual(newRow)
|
||||
}
|
||||
)
|
||||
|
||||
|
|
|
@ -255,6 +255,15 @@ export type BucketedContent = AutomationAttachmentContent & {
|
|||
path: string
|
||||
}
|
||||
|
||||
export enum AutomationEventType {
|
||||
ROW_SAVE = "row:save",
|
||||
ROW_UPDATE = "row:update",
|
||||
ROW_DELETE = "row:delete",
|
||||
APP_TRIGGER = "app:trigger",
|
||||
CRON_TRIGGER = "cron:trigger",
|
||||
WEBHOOK_TRIGGER = "web:trigger",
|
||||
}
|
||||
|
||||
export type UpdatedRowEventEmitter = {
|
||||
row: Row
|
||||
oldRow: Row
|
||||
|
|
|
@ -3,4 +3,5 @@ export interface SaveUserOpts {
|
|||
requirePassword?: boolean
|
||||
currentUserId?: string
|
||||
skipPasswordValidation?: boolean
|
||||
allowChangingEmail?: boolean
|
||||
}
|
||||
|
|
|
@ -3,12 +3,6 @@ import env from "../../../environment"
|
|||
import { env as coreEnv } from "@budibase/backend-core"
|
||||
import nodeFetch from "node-fetch"
|
||||
|
||||
// When we come to move to SQS fully and move away from Clouseau, we will need
|
||||
// to flip this to true (or remove it entirely). This will then be used to
|
||||
// determine if we should show the maintenance page that links to the SQS
|
||||
// migration docs.
|
||||
const sqsRequired = false
|
||||
|
||||
let sqsAvailable: boolean
|
||||
async function isSqsAvailable() {
|
||||
// We cache this value for the duration of the Node process because we don't
|
||||
|
@ -30,7 +24,7 @@ async function isSqsAvailable() {
|
|||
}
|
||||
|
||||
async function isSqsMissing() {
|
||||
return sqsRequired && !(await isSqsAvailable())
|
||||
return env.SQS_SEARCH_ENABLE && !(await isSqsAvailable())
|
||||
}
|
||||
|
||||
export const fetch = async (ctx: Ctx) => {
|
||||
|
|
|
@ -574,6 +574,41 @@ describe("scim", () => {
|
|||
|
||||
expect(events.user.updated).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("an existing user's email can be updated", async () => {
|
||||
const newEmail = structures.generator.email()
|
||||
const body: ScimUpdateRequest = {
|
||||
schemas: ["urn:ietf:params:scim:api:messages:2.0:PatchOp"],
|
||||
Operations: [
|
||||
{
|
||||
op: "Replace",
|
||||
path: 'emails[type eq "work"].value',
|
||||
value: newEmail,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const response = await patchScimUser({ id: user.id, body })
|
||||
|
||||
const expectedScimUser: ScimUserResponse = {
|
||||
...user,
|
||||
emails: [
|
||||
{
|
||||
value: newEmail,
|
||||
type: "work",
|
||||
primary: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
expect(response).toEqual(expectedScimUser)
|
||||
|
||||
const persistedUser = await config.api.scimUsersAPI.find(user.id)
|
||||
expect(persistedUser).toEqual(expectedScimUser)
|
||||
|
||||
expect((await config.api.users.getUser(user.id)).body).toEqual(
|
||||
expect.objectContaining({ _id: user.id, email: newEmail })
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("DELETE /api/global/scim/v2/users/:id", () => {
|
||||
|
|
28
yarn.lock
28
yarn.lock
|
@ -3637,10 +3637,10 @@
|
|||
semver "^7.3.5"
|
||||
tar "^6.1.11"
|
||||
|
||||
"@mongodb-js/saslprep@^1.1.0":
|
||||
version "1.1.4"
|
||||
resolved "https://registry.yarnpkg.com/@mongodb-js/saslprep/-/saslprep-1.1.4.tgz#24ec1c4915a65f5c506bb88c081731450d91bb1c"
|
||||
integrity sha512-8zJ8N1x51xo9hwPh6AWnKdLGEC5N3lDa6kms1YHmFBoRhTpJR6HG8wWk0td1MVCu9cD4YBrvjZEtd5Obw0Fbnw==
|
||||
"@mongodb-js/saslprep@^1.1.5":
|
||||
version "1.1.7"
|
||||
resolved "https://registry.yarnpkg.com/@mongodb-js/saslprep/-/saslprep-1.1.7.tgz#d1700facfd6916c50c2c88fd6d48d363a56c702f"
|
||||
integrity sha512-dCHW/oEX0KJ4NjDULBo3JiOaK5+6axtpBbS+ao2ZInoAL9/YRQLhXzSNAFz7hP4nzLkIqsfYAK/PDE3+XHny0Q==
|
||||
dependencies:
|
||||
sparse-bitfield "^3.0.3"
|
||||
|
||||
|
@ -7976,10 +7976,10 @@ bser@2.1.1:
|
|||
dependencies:
|
||||
node-int64 "^0.4.0"
|
||||
|
||||
bson@^6.2.0:
|
||||
version "6.3.0"
|
||||
resolved "https://registry.yarnpkg.com/bson/-/bson-6.3.0.tgz#d47acba525ba7d7eb0e816c10538bce26a337fe0"
|
||||
integrity sha512-balJfqwwTBddxfnidJZagCBPP/f48zj9Sdp3OJswREOgsJzHiQSaOIAtApSgDQFYgHqAvFkp53AFSqjMDZoTFw==
|
||||
bson@^6.7.0:
|
||||
version "6.8.0"
|
||||
resolved "https://registry.yarnpkg.com/bson/-/bson-6.8.0.tgz#5063c41ba2437c2b8ff851b50d9e36cb7aaa7525"
|
||||
integrity sha512-iOJg8pr7wq2tg/zSlCCHMi3hMm5JTOxLTagf3zxhcenHsFp+c6uOs6K7W5UE7A4QIJGtqh/ZovFNMP4mOPJynQ==
|
||||
|
||||
btoa@^1.2.1:
|
||||
version "1.2.1"
|
||||
|
@ -16022,13 +16022,13 @@ mongodb-connection-string-url@^3.0.0:
|
|||
"@types/whatwg-url" "^11.0.2"
|
||||
whatwg-url "^13.0.0"
|
||||
|
||||
mongodb@^6.3.0:
|
||||
version "6.3.0"
|
||||
resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-6.3.0.tgz#ec9993b19f7ed2ea715b903fcac6171c9d1d38ca"
|
||||
integrity sha512-tt0KuGjGtLUhLoU263+xvQmPHEGTw5LbcNC73EoFRYgSHwZt5tsoJC110hDyO1kjQzpgNrpdcSza9PknWN4LrA==
|
||||
mongodb@6.7.0:
|
||||
version "6.7.0"
|
||||
resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-6.7.0.tgz#f86e51e6530e6a2ca4a99d7cfdf6f409223ac199"
|
||||
integrity sha512-TMKyHdtMcO0fYBNORiYdmM25ijsHs+Njs963r4Tro4OQZzqYigAzYQouwWRg4OIaiLRUEGUh/1UAcH5lxdSLIA==
|
||||
dependencies:
|
||||
"@mongodb-js/saslprep" "^1.1.0"
|
||||
bson "^6.2.0"
|
||||
"@mongodb-js/saslprep" "^1.1.5"
|
||||
bson "^6.7.0"
|
||||
mongodb-connection-string-url "^3.0.0"
|
||||
|
||||
ms@2.1.2:
|
||||
|
|
Loading…
Reference in New Issue