Merge branch 'master' into BUDI-9068/type-sidepanel
This commit is contained in:
commit
1dc21128e8
|
@ -0,0 +1 @@
|
||||||
|
scripts/resources/minio filter=lfs diff=lfs merge=lfs -text
|
|
@ -30,7 +30,7 @@ env:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -47,7 +47,7 @@ jobs:
|
||||||
- run: yarn lint
|
- run: yarn lint
|
||||||
|
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -76,7 +76,7 @@ jobs:
|
||||||
fi
|
fi
|
||||||
|
|
||||||
helm-lint:
|
helm-lint:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -88,7 +88,7 @@ jobs:
|
||||||
- run: cd charts/budibase && helm lint .
|
- run: cd charts/budibase && helm lint .
|
||||||
|
|
||||||
test-libraries:
|
test-libraries:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -122,7 +122,7 @@ jobs:
|
||||||
fi
|
fi
|
||||||
|
|
||||||
test-worker:
|
test-worker:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -151,11 +151,22 @@ jobs:
|
||||||
yarn test --verbose --reporters=default --reporters=github-actions
|
yarn test --verbose --reporters=default --reporters=github-actions
|
||||||
|
|
||||||
test-server:
|
test-server:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
datasource:
|
datasource:
|
||||||
[mssql, mysql, postgres, postgres_legacy, mongodb, mariadb, oracle, sqs, none]
|
[
|
||||||
|
mssql,
|
||||||
|
mysql,
|
||||||
|
postgres,
|
||||||
|
postgres_legacy,
|
||||||
|
mongodb,
|
||||||
|
mariadb,
|
||||||
|
oracle,
|
||||||
|
sqs,
|
||||||
|
elasticsearch,
|
||||||
|
none,
|
||||||
|
]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -192,6 +203,8 @@ jobs:
|
||||||
docker pull budibase/oracle-database:23.2-slim-faststart
|
docker pull budibase/oracle-database:23.2-slim-faststart
|
||||||
elif [ "${{ matrix.datasource }}" == "postgres_legacy" ]; then
|
elif [ "${{ matrix.datasource }}" == "postgres_legacy" ]; then
|
||||||
docker pull postgres:9.5.25
|
docker pull postgres:9.5.25
|
||||||
|
elif [ "${{ matrix.datasource }}" == "elasticsearch" ]; then
|
||||||
|
docker pull elasticsearch@${{ steps.dotenv.outputs.ELASTICSEARCH_SHA }}
|
||||||
fi
|
fi
|
||||||
docker pull minio/minio &
|
docker pull minio/minio &
|
||||||
docker pull redis &
|
docker pull redis &
|
||||||
|
@ -240,7 +253,7 @@ jobs:
|
||||||
yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions
|
yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions
|
||||||
|
|
||||||
check-pro-submodule:
|
check-pro-submodule:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo and submodules
|
- name: Checkout repo and submodules
|
||||||
|
@ -299,7 +312,7 @@ jobs:
|
||||||
fi
|
fi
|
||||||
|
|
||||||
check-lockfile:
|
check-lockfile:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
ARG BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1
|
ARG BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1
|
||||||
FROM node:20-slim as build
|
FROM node:20-slim AS build
|
||||||
|
|
||||||
# install node-gyp dependencies
|
# install node-gyp dependencies
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
|
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
|
||||||
|
@ -34,13 +34,13 @@ COPY packages/worker/dist packages/worker/dist
|
||||||
COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
|
COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
|
||||||
|
|
||||||
|
|
||||||
FROM $BASEIMG as runner
|
FROM $BASEIMG AS runner
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ENV TARGETARCH $TARGETARCH
|
ENV TARGETARCH=$TARGETARCH
|
||||||
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
|
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
|
||||||
# e.g. docker build --build-arg TARGETBUILD=aas ....
|
# e.g. docker build --build-arg TARGETBUILD=aas ....
|
||||||
ARG TARGETBUILD=single
|
ARG TARGETBUILD=single
|
||||||
ENV TARGETBUILD $TARGETBUILD
|
ENV TARGETBUILD=$TARGETBUILD
|
||||||
|
|
||||||
# install base dependencies
|
# install base dependencies
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
|
@ -67,6 +67,12 @@ RUN mkdir -p /var/log/nginx && \
|
||||||
|
|
||||||
# setup minio
|
# setup minio
|
||||||
WORKDIR /minio
|
WORKDIR /minio
|
||||||
|
|
||||||
|
# a 2022 version of minio that supports gateway mode
|
||||||
|
COPY scripts/resources/minio /minio
|
||||||
|
RUN chmod +x minio
|
||||||
|
|
||||||
|
# handles the installation of minio in non-aas environments
|
||||||
COPY scripts/install-minio.sh ./install.sh
|
COPY scripts/install-minio.sh ./install.sh
|
||||||
RUN chmod +x install.sh && ./install.sh
|
RUN chmod +x install.sh && ./install.sh
|
||||||
|
|
||||||
|
@ -86,7 +92,7 @@ COPY hosting/single/ssh/sshd_config /etc/
|
||||||
COPY hosting/single/ssh/ssh_setup.sh /tmp
|
COPY hosting/single/ssh/ssh_setup.sh /tmp
|
||||||
|
|
||||||
# setup letsencrypt certificate
|
# setup letsencrypt certificate
|
||||||
RUN apt-get install -y certbot python3-certbot-nginx
|
RUN apt-get update && apt-get install -y certbot python3-certbot-nginx
|
||||||
COPY hosting/letsencrypt /app/letsencrypt
|
COPY hosting/letsencrypt /app/letsencrypt
|
||||||
RUN chmod +x /app/letsencrypt/certificate-request.sh /app/letsencrypt/certificate-renew.sh
|
RUN chmod +x /app/letsencrypt/certificate-request.sh /app/letsencrypt/certificate-renew.sh
|
||||||
|
|
||||||
|
|
|
@ -1,53 +1,61 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "DATA_DIR" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
|
|
||||||
declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONMENT" "CLUSTER_PORT" "DEPLOYMENT_ENVIRONMENT" "MINIO_URL" "NODE_ENV" "POSTHOG_TOKEN" "REDIS_URL" "SELF_HOSTED" "WORKER_PORT" "WORKER_URL" "TENANT_FEATURE_FLAGS" "ACCOUNT_PORTAL_URL")
|
|
||||||
# Check the env vars set in Dockerfile have come through, AAS seems to drop them
|
|
||||||
[[ -z "${APP_PORT}" ]] && export APP_PORT=4001
|
|
||||||
[[ -z "${ARCHITECTURE}" ]] && export ARCHITECTURE=amd
|
|
||||||
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
|
|
||||||
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
|
|
||||||
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
|
|
||||||
[[ -z "${MINIO_URL}" ]] && [[ -z "${USE_S3}" ]] && export MINIO_URL=http://127.0.0.1:9000
|
|
||||||
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
|
|
||||||
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
|
|
||||||
[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
|
|
||||||
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=127.0.0.1:6379
|
|
||||||
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
|
|
||||||
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
|
|
||||||
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://127.0.0.1:4002
|
|
||||||
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://127.0.0.1:4001
|
|
||||||
[[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app
|
|
||||||
# export CUSTOM_DOMAIN=budi001.custom.com
|
|
||||||
|
|
||||||
# Azure App Service customisations
|
echo "Starting runner.sh..."
|
||||||
if [[ "${TARGETBUILD}" = "aas" ]]; then
|
|
||||||
export DATA_DIR="${DATA_DIR:-/home}"
|
# set defaults for Docker-related variables
|
||||||
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
|
export APP_PORT="${APP_PORT:-4001}"
|
||||||
/etc/init.d/ssh start
|
export ARCHITECTURE="${ARCHITECTURE:-amd}"
|
||||||
else
|
export BUDIBASE_ENVIRONMENT="${BUDIBASE_ENVIRONMENT:-PRODUCTION}"
|
||||||
export DATA_DIR=${DATA_DIR:-/data}
|
export CLUSTER_PORT="${CLUSTER_PORT:-80}"
|
||||||
|
export DEPLOYMENT_ENVIRONMENT="${DEPLOYMENT_ENVIRONMENT:-docker}"
|
||||||
|
|
||||||
|
# only set MINIO_URL if neither MINIO_URL nor USE_S3 is set
|
||||||
|
if [[ -z "${MINIO_URL}" && -z "${USE_S3}" ]]; then
|
||||||
|
export MINIO_URL="http://127.0.0.1:9000"
|
||||||
fi
|
fi
|
||||||
mkdir -p ${DATA_DIR}
|
|
||||||
# Mount NFS or GCP Filestore if env vars exist for it
|
export NODE_ENV="${NODE_ENV:-production}"
|
||||||
if [[ ! -z ${FILESHARE_IP} && ! -z ${FILESHARE_NAME} ]]; then
|
export POSTHOG_TOKEN="${POSTHOG_TOKEN:-phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU}"
|
||||||
|
export ACCOUNT_PORTAL_URL="${ACCOUNT_PORTAL_URL:-https://account.budibase.app}"
|
||||||
|
export REDIS_URL="${REDIS_URL:-127.0.0.1:6379}"
|
||||||
|
export SELF_HOSTED="${SELF_HOSTED:-1}"
|
||||||
|
export WORKER_PORT="${WORKER_PORT:-4002}"
|
||||||
|
export WORKER_URL="${WORKER_URL:-http://127.0.0.1:4002}"
|
||||||
|
export APPS_URL="${APPS_URL:-http://127.0.0.1:4001}"
|
||||||
|
export SERVER_TOP_LEVEL_PATH="${SERVER_TOP_LEVEL_PATH:-/app}"
|
||||||
|
|
||||||
|
# set DATA_DIR and ensure the directory exists
|
||||||
|
if [[ ${TARGETBUILD} == "aas" ]]; then
|
||||||
|
export DATA_DIR="/home"
|
||||||
|
else
|
||||||
|
export DATA_DIR="${DATA_DIR:-/data}"
|
||||||
|
fi
|
||||||
|
mkdir -p "${DATA_DIR}"
|
||||||
|
|
||||||
|
# mount NFS or GCP Filestore if FILESHARE_IP and FILESHARE_NAME are set
|
||||||
|
if [[ -n "${FILESHARE_IP}" && -n "${FILESHARE_NAME}" ]]; then
|
||||||
echo "Mounting NFS share"
|
echo "Mounting NFS share"
|
||||||
apt update && apt install -y nfs-common nfs-kernel-server
|
apt update && apt install -y nfs-common nfs-kernel-server
|
||||||
echo "Mount file share ${FILESHARE_IP}:/${FILESHARE_NAME} to ${DATA_DIR}"
|
echo "Mount file share ${FILESHARE_IP}:/${FILESHARE_NAME} to ${DATA_DIR}"
|
||||||
mount -o nolock ${FILESHARE_IP}:/${FILESHARE_NAME} ${DATA_DIR}
|
mount -o nolock "${FILESHARE_IP}:/${FILESHARE_NAME}" "${DATA_DIR}"
|
||||||
echo "Mounting result: $?"
|
echo "Mounting result: $?"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -f "${DATA_DIR}/.env" ]; then
|
# source environment variables from a .env file if it exists in DATA_DIR
|
||||||
# Read in the .env file and export the variables
|
if [[ -f "${DATA_DIR}/.env" ]]; then
|
||||||
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
|
set -a # Automatically export all variables loaded from .env
|
||||||
|
source "${DATA_DIR}/.env"
|
||||||
|
set +a
|
||||||
fi
|
fi
|
||||||
# randomise any unset environment variables
|
|
||||||
for ENV_VAR in "${ENV_VARS[@]}"
|
# randomize any unset sensitive environment variables using uuidgen
|
||||||
do
|
env_vars=(COUCHDB_USER COUCHDB_PASSWORD MINIO_ACCESS_KEY MINIO_SECRET_KEY INTERNAL_API_KEY JWT_SECRET REDIS_PASSWORD)
|
||||||
if [[ -z "${!ENV_VAR}" ]]; then
|
for var in "${env_vars[@]}"; do
|
||||||
eval "export $ENV_VAR=$(uuidgen | sed -e 's/-//g')"
|
if [[ -z "${!var}" ]]; then
|
||||||
|
export "$var"="$(uuidgen | tr -d '-')"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
if [[ -z "${COUCH_DB_URL}" ]]; then
|
if [[ -z "${COUCH_DB_URL}" ]]; then
|
||||||
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@127.0.0.1:5984
|
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@127.0.0.1:5984
|
||||||
fi
|
fi
|
||||||
|
@ -58,17 +66,15 @@ fi
|
||||||
|
|
||||||
if [ ! -f "${DATA_DIR}/.env" ]; then
|
if [ ! -f "${DATA_DIR}/.env" ]; then
|
||||||
touch ${DATA_DIR}/.env
|
touch ${DATA_DIR}/.env
|
||||||
for ENV_VAR in "${ENV_VARS[@]}"
|
for ENV_VAR in "${ENV_VARS[@]}"; do
|
||||||
do
|
|
||||||
temp=$(eval "echo \$$ENV_VAR")
|
temp=$(eval "echo \$$ENV_VAR")
|
||||||
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
|
echo "$ENV_VAR=$temp" >>${DATA_DIR}/.env
|
||||||
done
|
done
|
||||||
for ENV_VAR in "${DOCKER_VARS[@]}"
|
for ENV_VAR in "${DOCKER_VARS[@]}"; do
|
||||||
do
|
|
||||||
temp=$(eval "echo \$$ENV_VAR")
|
temp=$(eval "echo \$$ENV_VAR")
|
||||||
echo "$ENV_VAR=$temp" >> ${DATA_DIR}/.env
|
echo "$ENV_VAR=$temp" >>${DATA_DIR}/.env
|
||||||
done
|
done
|
||||||
echo "COUCH_DB_URL=${COUCH_DB_URL}" >> ${DATA_DIR}/.env
|
echo "COUCH_DB_URL=${COUCH_DB_URL}" >>${DATA_DIR}/.env
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Read in the .env file and export the variables
|
# Read in the .env file and export the variables
|
||||||
|
@ -79,31 +85,44 @@ ln -s ${DATA_DIR}/.env /worker/.env
|
||||||
# make these directories in runner, incase of mount
|
# make these directories in runner, incase of mount
|
||||||
mkdir -p ${DATA_DIR}/minio
|
mkdir -p ${DATA_DIR}/minio
|
||||||
mkdir -p ${DATA_DIR}/redis
|
mkdir -p ${DATA_DIR}/redis
|
||||||
|
mkdir -p ${DATA_DIR}/couch
|
||||||
chown -R couchdb:couchdb ${DATA_DIR}/couch
|
chown -R couchdb:couchdb ${DATA_DIR}/couch
|
||||||
|
|
||||||
REDIS_CONFIG="/etc/redis/redis.conf"
|
REDIS_CONFIG="/etc/redis/redis.conf"
|
||||||
sed -i "s#DATA_DIR#${DATA_DIR}#g" "${REDIS_CONFIG}"
|
sed -i "s#DATA_DIR#${DATA_DIR}#g" "${REDIS_CONFIG}"
|
||||||
|
|
||||||
if [[ -n "${USE_DEFAULT_REDIS_CONFIG}" ]]; then
|
if [[ -n "${USE_DEFAULT_REDIS_CONFIG}" ]]; then
|
||||||
REDIS_CONFIG=""
|
REDIS_CONFIG=""
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ -n "${REDIS_PASSWORD}" ]]; then
|
if [[ -n "${REDIS_PASSWORD}" ]]; then
|
||||||
redis-server "${REDIS_CONFIG}" --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
|
redis-server "${REDIS_CONFIG}" --requirepass $REDIS_PASSWORD >/dev/stdout 2>&1 &
|
||||||
else
|
else
|
||||||
redis-server "${REDIS_CONFIG}" > /dev/stdout 2>&1 &
|
redis-server "${REDIS_CONFIG}" >/dev/stdout 2>&1 &
|
||||||
fi
|
fi
|
||||||
/bbcouch-runner.sh &
|
|
||||||
|
echo "Starting callback CouchDB runner..."
|
||||||
|
./bbcouch-runner.sh &
|
||||||
|
|
||||||
# only start minio if use s3 isn't passed
|
# only start minio if use s3 isn't passed
|
||||||
if [[ -z "${USE_S3}" ]]; then
|
if [[ -z "${USE_S3}" ]]; then
|
||||||
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
|
if [[ ${TARGETBUILD} == aas ]]; then
|
||||||
|
echo "Starting MinIO in Azure Gateway mode"
|
||||||
|
if [[ -z "${AZURE_STORAGE_ACCOUNT}" || -z "${AZURE_STORAGE_KEY}" || -z "${MINIO_ACCESS_KEY}" || -z "${MINIO_SECRET_KEY}" ]]; then
|
||||||
|
echo "The following environment variables must be set: AZURE_STORAGE_ACCOUNT, AZURE_STORAGE_KEY, MINIO_ACCESS_KEY, MINIO_SECRET_KEY"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
/minio/minio gateway azure --console-address ":9001" >/dev/stdout 2>&1 &
|
||||||
|
else
|
||||||
|
echo "Starting MinIO in standalone mode"
|
||||||
|
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio >/dev/stdout 2>&1 &
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
/etc/init.d/nginx restart
|
/etc/init.d/nginx restart
|
||||||
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
|
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
|
||||||
# Add monthly cron job to renew certbot certificate
|
# Add monthly cron job to renew certbot certificate
|
||||||
echo -n "* * 2 * * root exec /app/letsencrypt/certificate-renew.sh ${CUSTOM_DOMAIN}" >> /etc/cron.d/certificate-renew
|
echo -n "* * 2 * * root exec /app/letsencrypt/certificate-renew.sh ${CUSTOM_DOMAIN}" >>/etc/cron.d/certificate-renew
|
||||||
chmod +x /etc/cron.d/certificate-renew
|
chmod +x /etc/cron.d/certificate-renew
|
||||||
# Request the certbot certificate
|
# Request the certbot certificate
|
||||||
/app/letsencrypt/certificate-request.sh ${CUSTOM_DOMAIN}
|
/app/letsencrypt/certificate-request.sh ${CUSTOM_DOMAIN}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||||
"version": "3.4.15",
|
"version": "3.4.20",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"concurrency": 20,
|
"concurrency": 20,
|
||||||
"command": {
|
"command": {
|
||||||
|
|
|
@ -90,7 +90,7 @@
|
||||||
$: requestEval(runtimeExpression, context, snippets)
|
$: requestEval(runtimeExpression, context, snippets)
|
||||||
$: bindingHelpers = new BindingHelpers(getCaretPosition, insertAtPos)
|
$: bindingHelpers = new BindingHelpers(getCaretPosition, insertAtPos)
|
||||||
|
|
||||||
$: bindingOptions = bindingsToCompletions(bindings, editorMode)
|
$: bindingOptions = bindingsToCompletions(enrichedBindings, editorMode)
|
||||||
$: helperOptions = allowHelpers ? getHelperCompletions(editorMode) : []
|
$: helperOptions = allowHelpers ? getHelperCompletions(editorMode) : []
|
||||||
$: snippetsOptions =
|
$: snippetsOptions =
|
||||||
usingJS && useSnippets && snippets?.length ? snippets : []
|
usingJS && useSnippets && snippets?.length ? snippets : []
|
||||||
|
|
|
@ -484,7 +484,7 @@ const automationActions = (store: AutomationStore) => ({
|
||||||
branches.forEach((branch, bIdx) => {
|
branches.forEach((branch, bIdx) => {
|
||||||
children[branch.id].forEach(
|
children[branch.id].forEach(
|
||||||
(bBlock: AutomationStep, sIdx: number, array: AutomationStep[]) => {
|
(bBlock: AutomationStep, sIdx: number, array: AutomationStep[]) => {
|
||||||
const ended = array.length - 1 === sIdx && !branches.length
|
const ended = array.length - 1 === sIdx
|
||||||
treeTraverse(bBlock, pathToCurrentNode, sIdx, bIdx, ended)
|
treeTraverse(bBlock, pathToCurrentNode, sIdx, bIdx, ended)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -505,7 +505,6 @@ const automationActions = (store: AutomationStore) => ({
|
||||||
blocks.forEach((block, idx, array) => {
|
blocks.forEach((block, idx, array) => {
|
||||||
treeTraverse(block, null, idx, null, array.length - 1 === idx)
|
treeTraverse(block, null, idx, null, array.length - 1 === idx)
|
||||||
})
|
})
|
||||||
|
|
||||||
return blockRefs
|
return blockRefs
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -81,11 +81,11 @@ export const screenComponentErrorList = derived(
|
||||||
const errors: UIComponentError[] = []
|
const errors: UIComponentError[] = []
|
||||||
|
|
||||||
function checkComponentErrors(component: Component, ancestors: string[]) {
|
function checkComponentErrors(component: Component, ancestors: string[]) {
|
||||||
|
errors.push(...getMissingAncestors(component, definitions, ancestors))
|
||||||
errors.push(
|
errors.push(
|
||||||
...getInvalidDatasources(screen, component, datasources, definitions)
|
...getInvalidDatasources(screen, component, datasources, definitions)
|
||||||
)
|
)
|
||||||
errors.push(...getMissingRequiredSettings(component, definitions))
|
errors.push(...getMissingRequiredSettings(component, definitions))
|
||||||
errors.push(...getMissingAncestors(component, definitions, ancestors))
|
|
||||||
|
|
||||||
for (const child of component._children || []) {
|
for (const child of component._children || []) {
|
||||||
checkComponentErrors(child, [...ancestors, component._component])
|
checkComponentErrors(child, [...ancestors, component._component])
|
||||||
|
@ -239,7 +239,10 @@ function getMissingAncestors(
|
||||||
ancestors: string[]
|
ancestors: string[]
|
||||||
): UIComponentError[] {
|
): UIComponentError[] {
|
||||||
const definition = definitions[component._component]
|
const definition = definitions[component._component]
|
||||||
|
if (ancestors.some(a => !a.startsWith(BudibasePrefix))) {
|
||||||
|
// We don't have a way to know what components are used within a plugin component
|
||||||
|
return []
|
||||||
|
}
|
||||||
if (!definition?.requiredAncestors?.length) {
|
if (!definition?.requiredAncestors?.length) {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
|
|
@ -4492,6 +4492,12 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"label": "Zoom level",
|
||||||
|
"key": "defaultZoom",
|
||||||
|
"defaultValue": "1"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "event",
|
"type": "event",
|
||||||
"label": "On change",
|
"label": "On change",
|
||||||
|
|
|
@ -28,7 +28,7 @@
|
||||||
"apexcharts": "^3.48.0",
|
"apexcharts": "^3.48.0",
|
||||||
"dayjs": "^1.10.8",
|
"dayjs": "^1.10.8",
|
||||||
"downloadjs": "1.4.7",
|
"downloadjs": "1.4.7",
|
||||||
"html5-qrcode": "^2.2.1",
|
"html5-qrcode": "^2.3.8",
|
||||||
"leaflet": "^1.7.1",
|
"leaflet": "^1.7.1",
|
||||||
"sanitize-html": "^2.13.0",
|
"sanitize-html": "^2.13.0",
|
||||||
"screenfull": "^6.0.1",
|
"screenfull": "^6.0.1",
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
export let beepFrequency = 2637
|
export let beepFrequency = 2637
|
||||||
export let customFrequency = 1046
|
export let customFrequency = 1046
|
||||||
export let preferredCamera = "environment"
|
export let preferredCamera = "environment"
|
||||||
|
export let defaultZoom = 1
|
||||||
export let validator
|
export let validator
|
||||||
|
|
||||||
const dispatch = createEventDispatcher()
|
const dispatch = createEventDispatcher()
|
||||||
|
@ -58,6 +59,14 @@
|
||||||
html5QrCode
|
html5QrCode
|
||||||
.start(cameraSetting, cameraConfig, onScanSuccess)
|
.start(cameraSetting, cameraConfig, onScanSuccess)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
if (defaultZoom > 1) {
|
||||||
|
const cameraOptions =
|
||||||
|
html5QrCode.getRunningTrackCameraCapabilities()
|
||||||
|
const zoom = cameraOptions.zoomFeature()
|
||||||
|
if (zoom.isSupported()) {
|
||||||
|
zoom.apply(defaultZoom)
|
||||||
|
}
|
||||||
|
}
|
||||||
resolve({ initialised: true })
|
resolve({ initialised: true })
|
||||||
})
|
})
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
export let beepFrequency
|
export let beepFrequency
|
||||||
export let customFrequency
|
export let customFrequency
|
||||||
export let preferredCamera
|
export let preferredCamera
|
||||||
|
export let defaultZoom
|
||||||
export let helpText = null
|
export let helpText = null
|
||||||
|
|
||||||
let fieldState
|
let fieldState
|
||||||
|
@ -56,6 +57,7 @@
|
||||||
{beepFrequency}
|
{beepFrequency}
|
||||||
{customFrequency}
|
{customFrequency}
|
||||||
{preferredCamera}
|
{preferredCamera}
|
||||||
|
{defaultZoom}
|
||||||
validator={fieldState.validator}
|
validator={fieldState.validator}
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
|
@ -1,142 +0,0 @@
|
||||||
import ClientApp from "./components/ClientApp.svelte"
|
|
||||||
import UpdatingApp from "./components/UpdatingApp.svelte"
|
|
||||||
import {
|
|
||||||
builderStore,
|
|
||||||
appStore,
|
|
||||||
blockStore,
|
|
||||||
componentStore,
|
|
||||||
environmentStore,
|
|
||||||
dndStore,
|
|
||||||
eventStore,
|
|
||||||
hoverStore,
|
|
||||||
stateStore,
|
|
||||||
routeStore,
|
|
||||||
} from "./stores"
|
|
||||||
import loadSpectrumIcons from "@budibase/bbui/spectrum-icons-vite.js"
|
|
||||||
import { get } from "svelte/store"
|
|
||||||
import { initWebsocket } from "./websocket.js"
|
|
||||||
|
|
||||||
// Provide svelte and svelte/internal as globals for custom components
|
|
||||||
import * as svelte from "svelte"
|
|
||||||
import * as internal from "svelte/internal"
|
|
||||||
|
|
||||||
window.svelte_internal = internal
|
|
||||||
window.svelte = svelte
|
|
||||||
|
|
||||||
// Initialise spectrum icons
|
|
||||||
loadSpectrumIcons()
|
|
||||||
|
|
||||||
let app
|
|
||||||
|
|
||||||
const loadBudibase = async () => {
|
|
||||||
// Update builder store with any builder flags
|
|
||||||
builderStore.set({
|
|
||||||
...get(builderStore),
|
|
||||||
inBuilder: !!window["##BUDIBASE_IN_BUILDER##"],
|
|
||||||
layout: window["##BUDIBASE_PREVIEW_LAYOUT##"],
|
|
||||||
screen: window["##BUDIBASE_PREVIEW_SCREEN##"],
|
|
||||||
selectedComponentId: window["##BUDIBASE_SELECTED_COMPONENT_ID##"],
|
|
||||||
previewId: window["##BUDIBASE_PREVIEW_ID##"],
|
|
||||||
theme: window["##BUDIBASE_PREVIEW_THEME##"],
|
|
||||||
customTheme: window["##BUDIBASE_PREVIEW_CUSTOM_THEME##"],
|
|
||||||
previewDevice: window["##BUDIBASE_PREVIEW_DEVICE##"],
|
|
||||||
navigation: window["##BUDIBASE_PREVIEW_NAVIGATION##"],
|
|
||||||
hiddenComponentIds: window["##BUDIBASE_HIDDEN_COMPONENT_IDS##"],
|
|
||||||
usedPlugins: window["##BUDIBASE_USED_PLUGINS##"],
|
|
||||||
location: window["##BUDIBASE_LOCATION##"],
|
|
||||||
snippets: window["##BUDIBASE_SNIPPETS##"],
|
|
||||||
componentErrors: window["##BUDIBASE_COMPONENT_ERRORS##"],
|
|
||||||
})
|
|
||||||
|
|
||||||
// Set app ID - this window flag is set by both the preview and the real
|
|
||||||
// server rendered app HTML
|
|
||||||
appStore.actions.setAppId(window["##BUDIBASE_APP_ID##"])
|
|
||||||
|
|
||||||
// Set the flag used to determine if the app is being loaded via an iframe
|
|
||||||
appStore.actions.setAppEmbedded(
|
|
||||||
window["##BUDIBASE_APP_EMBEDDED##"] === "true"
|
|
||||||
)
|
|
||||||
|
|
||||||
if (window.MIGRATING_APP) {
|
|
||||||
new UpdatingApp({
|
|
||||||
target: window.document.body,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch environment info
|
|
||||||
if (!get(environmentStore)?.loaded) {
|
|
||||||
await environmentStore.actions.fetchEnvironment()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register handler for runtime events from the builder
|
|
||||||
window.handleBuilderRuntimeEvent = (type, data) => {
|
|
||||||
if (!window["##BUDIBASE_IN_BUILDER##"]) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (type === "event-completed") {
|
|
||||||
eventStore.actions.resolveEvent(data)
|
|
||||||
} else if (type === "eject-block") {
|
|
||||||
const block = blockStore.actions.getBlock(data)
|
|
||||||
block?.eject()
|
|
||||||
} else if (type === "dragging-new-component") {
|
|
||||||
const { dragging, component } = data
|
|
||||||
if (dragging) {
|
|
||||||
const definition =
|
|
||||||
componentStore.actions.getComponentDefinition(component)
|
|
||||||
dndStore.actions.startDraggingNewComponent({ component, definition })
|
|
||||||
} else {
|
|
||||||
dndStore.actions.reset()
|
|
||||||
}
|
|
||||||
} else if (type === "request-context") {
|
|
||||||
const { selectedComponentInstance, screenslotInstance } =
|
|
||||||
get(componentStore)
|
|
||||||
const instance = selectedComponentInstance || screenslotInstance
|
|
||||||
const context = instance?.getDataContext()
|
|
||||||
let stringifiedContext = null
|
|
||||||
try {
|
|
||||||
stringifiedContext = JSON.stringify(context)
|
|
||||||
} catch (error) {
|
|
||||||
// Ignore - invalid context
|
|
||||||
}
|
|
||||||
eventStore.actions.dispatchEvent("provide-context", {
|
|
||||||
context: stringifiedContext,
|
|
||||||
})
|
|
||||||
} else if (type === "hover-component") {
|
|
||||||
hoverStore.actions.hoverComponent(data, false)
|
|
||||||
} else if (type === "builder-meta") {
|
|
||||||
builderStore.actions.setMetadata(data)
|
|
||||||
} else if (type === "builder-state") {
|
|
||||||
const [[key, value]] = Object.entries(data)
|
|
||||||
stateStore.actions.setValue(key, value)
|
|
||||||
} else if (type === "builder-url-test-data") {
|
|
||||||
const { route, testValue } = data
|
|
||||||
routeStore.actions.setTestUrlParams(route, testValue)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register any custom components
|
|
||||||
if (window["##BUDIBASE_CUSTOM_COMPONENTS##"]) {
|
|
||||||
window["##BUDIBASE_CUSTOM_COMPONENTS##"].forEach(component => {
|
|
||||||
componentStore.actions.registerCustomComponent(component)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make a callback available for custom component bundles to register
|
|
||||||
// themselves at runtime
|
|
||||||
window.registerCustomComponent =
|
|
||||||
componentStore.actions.registerCustomComponent
|
|
||||||
|
|
||||||
// Initialise websocket
|
|
||||||
initWebsocket()
|
|
||||||
|
|
||||||
// Create app if one hasn't been created yet
|
|
||||||
if (!app) {
|
|
||||||
app = new ClientApp({
|
|
||||||
target: window.document.body,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Attach to window so the HTML template can call this when it loads
|
|
||||||
window.loadBudibase = loadBudibase
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 45f5673d5e5ab3c22deb6663cea2e31a628aa133
|
Subproject commit e3843dd4eaced68ae063355b77df200dbc789c98
|
|
@ -1,24 +0,0 @@
|
||||||
const elastic: any = {}
|
|
||||||
|
|
||||||
elastic.Client = function () {
|
|
||||||
this.index = jest.fn().mockResolvedValue({ body: [] })
|
|
||||||
this.search = jest.fn().mockResolvedValue({
|
|
||||||
body: {
|
|
||||||
hits: {
|
|
||||||
hits: [
|
|
||||||
{
|
|
||||||
_source: {
|
|
||||||
name: "test",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
this.update = jest.fn().mockResolvedValue({ body: [] })
|
|
||||||
this.delete = jest.fn().mockResolvedValue({ body: [] })
|
|
||||||
|
|
||||||
this.close = jest.fn()
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = elastic
|
|
|
@ -1,5 +1,6 @@
|
||||||
MSSQL_SHA=sha256:3b913841850a4d57fcfcb798be06acc88ea0f2acc5418bc0c140a43e91c4a545
|
MSSQL_SHA=sha256:d252932ef839c24c61c1139cc98f69c85ca774fa7c6bfaaa0015b7eb02b9dc87
|
||||||
MYSQL_SHA=sha256:9de9d54fecee6253130e65154b930978b1fcc336bcc86dfd06e89b72a2588ebe
|
MYSQL_SHA=sha256:9de9d54fecee6253130e65154b930978b1fcc336bcc86dfd06e89b72a2588ebe
|
||||||
POSTGRES_SHA=sha256:bd0d8e485d1aca439d39e5ea99b931160bd28d862e74c786f7508e9d0053090e
|
POSTGRES_SHA=sha256:bd0d8e485d1aca439d39e5ea99b931160bd28d862e74c786f7508e9d0053090e
|
||||||
MONGODB_SHA=sha256:afa36bca12295b5f9dae68a493c706113922bdab520e901bd5d6c9d7247a1d8d
|
MONGODB_SHA=sha256:afa36bca12295b5f9dae68a493c706113922bdab520e901bd5d6c9d7247a1d8d
|
||||||
MARIADB_SHA=sha256:e59ba8783bf7bc02a4779f103bb0d8751ac0e10f9471089709608377eded7aa8
|
MARIADB_SHA=sha256:e59ba8783bf7bc02a4779f103bb0d8751ac0e10f9471089709608377eded7aa8
|
||||||
|
ELASTICSEARCH_SHA=sha256:9a6443f55243f6acbfeb4a112d15eb3b9aac74bf25e0e39fa19b3ddd3a6879d0
|
|
@ -11,6 +11,7 @@ import {
|
||||||
UploadPluginResponse,
|
UploadPluginResponse,
|
||||||
FetchPluginResponse,
|
FetchPluginResponse,
|
||||||
DeletePluginResponse,
|
DeletePluginResponse,
|
||||||
|
PluginMetadata,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import { clientAppSocket } from "../../../websockets"
|
import { clientAppSocket } from "../../../websockets"
|
||||||
|
@ -53,10 +54,11 @@ export async function create(
|
||||||
const { source, url, headers, githubToken } = ctx.request.body
|
const { source, url, headers, githubToken } = ctx.request.body
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let metadata
|
let metadata: PluginMetadata
|
||||||
let directory
|
let directory: string
|
||||||
|
|
||||||
// Generating random name as a backup and needed for url
|
// Generating random name as a backup and needed for url
|
||||||
let name = "PLUGIN_" + Math.floor(100000 + Math.random() * 900000)
|
const name = "PLUGIN_" + Math.floor(100000 + Math.random() * 900000)
|
||||||
|
|
||||||
switch (source) {
|
switch (source) {
|
||||||
case PluginSource.NPM: {
|
case PluginSource.NPM: {
|
||||||
|
@ -81,12 +83,14 @@ export async function create(
|
||||||
directory = directoryUrl
|
directory = directoryUrl
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
default:
|
||||||
|
ctx.throw(400, "Invalid source")
|
||||||
}
|
}
|
||||||
|
|
||||||
pluginCore.validate(metadata?.schema)
|
pluginCore.validate(metadata.schema)
|
||||||
|
|
||||||
// Only allow components in cloud
|
// Only allow components in cloud
|
||||||
if (!env.SELF_HOSTED && metadata?.schema?.type !== PluginType.COMPONENT) {
|
if (!env.SELF_HOSTED && metadata.schema?.type !== PluginType.COMPONENT) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
"Only component plugins are supported outside of self-host"
|
"Only component plugins are supported outside of self-host"
|
||||||
)
|
)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -165,7 +165,8 @@ describe("/datasources", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({
|
const descriptions = datasourceDescribe({
|
||||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
plus: true,
|
||||||
|
exclude: [DatabaseName.SQS],
|
||||||
})
|
})
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
|
@ -590,7 +591,8 @@ if (descriptions.length) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const datasources = datasourceDescribe({
|
const datasources = datasourceDescribe({
|
||||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS, DatabaseName.ORACLE],
|
plus: true,
|
||||||
|
exclude: [DatabaseName.SQS, DatabaseName.ORACLE],
|
||||||
})
|
})
|
||||||
|
|
||||||
if (datasources.length) {
|
if (datasources.length) {
|
||||||
|
|
|
@ -9,7 +9,8 @@ import { Knex } from "knex"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({
|
const descriptions = datasourceDescribe({
|
||||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
plus: true,
|
||||||
|
exclude: [DatabaseName.SQS],
|
||||||
})
|
})
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
|
|
||||||
import {
|
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
datasourceDescribe,
|
|
||||||
} from "../../../integrations/tests/utils"
|
|
||||||
|
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
import emitter from "../../../../src/events"
|
import emitter from "../../../../src/events"
|
||||||
|
@ -80,7 +77,7 @@ function encodeJS(binding: string) {
|
||||||
return `{{ js "${Buffer.from(binding).toString("base64")}"}}`
|
return `{{ js "${Buffer.from(binding).toString("base64")}"}}`
|
||||||
}
|
}
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
const descriptions = datasourceDescribe({ plus: true })
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
describe.each(descriptions)(
|
describe.each(descriptions)(
|
||||||
|
|
|
@ -1,8 +1,5 @@
|
||||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
import {
|
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
datasourceDescribe,
|
|
||||||
} from "../../../integrations/tests/utils"
|
|
||||||
import {
|
import {
|
||||||
context,
|
context,
|
||||||
db as dbCore,
|
db as dbCore,
|
||||||
|
@ -60,7 +57,7 @@ jest.mock("@budibase/pro", () => ({
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
const descriptions = datasourceDescribe({ plus: true })
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
describe.each(descriptions)(
|
describe.each(descriptions)(
|
||||||
|
@ -3553,6 +3550,31 @@ if (descriptions.length) {
|
||||||
limit: 1,
|
limit: 1,
|
||||||
}).toContainExactly([row])
|
}).toContainExactly([row])
|
||||||
})
|
})
|
||||||
|
|
||||||
|
isInternal &&
|
||||||
|
describe("search by _id for relations", () => {
|
||||||
|
it("can filter by the related _id", async () => {
|
||||||
|
await expectSearch({
|
||||||
|
query: {
|
||||||
|
equal: { "rel._id": row.rel[0]._id },
|
||||||
|
},
|
||||||
|
}).toContainExactly([row])
|
||||||
|
|
||||||
|
await expectSearch({
|
||||||
|
query: {
|
||||||
|
equal: { "rel._id": row.rel[1]._id },
|
||||||
|
},
|
||||||
|
}).toContainExactly([row])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can filter by the related _id and find nothing", async () => {
|
||||||
|
await expectSearch({
|
||||||
|
query: {
|
||||||
|
equal: { "rel._id": "rel_none" },
|
||||||
|
},
|
||||||
|
}).toFindNothing()
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
!isInternal &&
|
!isInternal &&
|
||||||
|
|
|
@ -1,11 +1,3 @@
|
||||||
// Directly mock the AWS SDK
|
|
||||||
jest.mock("@aws-sdk/s3-request-presigner", () => ({
|
|
||||||
getSignedUrl: jest.fn(() => {
|
|
||||||
return `http://example.com`
|
|
||||||
}),
|
|
||||||
}))
|
|
||||||
jest.mock("@aws-sdk/client-s3")
|
|
||||||
|
|
||||||
import { Datasource, SourceName } from "@budibase/types"
|
import { Datasource, SourceName } from "@budibase/types"
|
||||||
import { setEnv } from "../../../environment"
|
import { setEnv } from "../../../environment"
|
||||||
import { getRequest, getConfig, afterAll as _afterAll } from "./utilities"
|
import { getRequest, getConfig, afterAll as _afterAll } from "./utilities"
|
||||||
|
@ -92,7 +84,17 @@ describe("/static", () => {
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(200)
|
.expect(200)
|
||||||
expect(res.body.signedUrl).toEqual("http://example.com")
|
|
||||||
|
expect(res.body.signedUrl).toStartWith(
|
||||||
|
"https://foo.s3.eu-west-1.amazonaws.com/bar?"
|
||||||
|
)
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-Algorithm=AWS4-HMAC-SHA256")
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-Credential=bb")
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-Date=")
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-Signature=")
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-Expires=900")
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-SignedHeaders=host")
|
||||||
|
|
||||||
expect(res.body.publicUrl).toEqual(
|
expect(res.body.publicUrl).toEqual(
|
||||||
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
|
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
|
||||||
)
|
)
|
||||||
|
|
|
@ -28,17 +28,14 @@ import * as setup from "./utilities"
|
||||||
import * as uuid from "uuid"
|
import * as uuid from "uuid"
|
||||||
|
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import {
|
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
datasourceDescribe,
|
|
||||||
} from "../../../integrations/tests/utils"
|
|
||||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
import timekeeper from "timekeeper"
|
import timekeeper from "timekeeper"
|
||||||
|
|
||||||
const { basicTable } = setup.structures
|
const { basicTable } = setup.structures
|
||||||
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
|
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
const descriptions = datasourceDescribe({ plus: true })
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
describe.each(descriptions)(
|
describe.each(descriptions)(
|
||||||
|
|
|
@ -37,17 +37,14 @@ import {
|
||||||
ViewV2Type,
|
ViewV2Type,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||||
import {
|
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
datasourceDescribe,
|
|
||||||
} from "../../../integrations/tests/utils"
|
|
||||||
import merge from "lodash/merge"
|
import merge from "lodash/merge"
|
||||||
import { quotas } from "@budibase/pro"
|
import { quotas } from "@budibase/pro"
|
||||||
import { context, db, events, roles, setEnv } from "@budibase/backend-core"
|
import { context, db, events, roles, setEnv } from "@budibase/backend-core"
|
||||||
import { mockChatGPTResponse } from "../../../tests/utilities/mocks/openai"
|
import { mockChatGPTResponse } from "../../../tests/utilities/mocks/openai"
|
||||||
import nock from "nock"
|
import nock from "nock"
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
const descriptions = datasourceDescribe({ plus: true })
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
describe.each(descriptions)(
|
describe.each(descriptions)(
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import * as automation from "../index"
|
import * as automation from "../index"
|
||||||
import { Table, AutomationStatus } from "@budibase/types"
|
import { Table, AutomationStatus, EmptyFilterOption } from "@budibase/types"
|
||||||
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
|
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
|
||||||
import TestConfiguration from "../../tests/utilities/TestConfiguration"
|
import TestConfiguration from "../../tests/utilities/TestConfiguration"
|
||||||
|
|
||||||
|
@ -280,4 +280,23 @@ describe("Branching automations", () => {
|
||||||
|
|
||||||
expect(results.steps[2].outputs.message).toContain("Special user")
|
expect(results.steps[2].outputs.message).toContain("Special user")
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should not fail with empty conditions", async () => {
|
||||||
|
const results = await createAutomationBuilder(config)
|
||||||
|
.onAppAction()
|
||||||
|
.branch({
|
||||||
|
specialBranch: {
|
||||||
|
steps: stepBuilder => stepBuilder.serverLog({ text: "Hello!" }),
|
||||||
|
condition: {
|
||||||
|
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.test({ fields: { test_trigger: true } })
|
||||||
|
|
||||||
|
expect(results.steps[0].outputs.success).toEqual(false)
|
||||||
|
expect(results.steps[0].outputs.status).toEqual(
|
||||||
|
AutomationStatus.NO_CONDITION_MET
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -9,7 +9,8 @@ import { generator } from "@budibase/backend-core/tests"
|
||||||
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
|
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({
|
const descriptions = datasourceDescribe({
|
||||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
plus: true,
|
||||||
|
exclude: [DatabaseName.SQS],
|
||||||
})
|
})
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
|
|
|
@ -10,7 +10,7 @@ import {
|
||||||
import { Client, ClientOptions } from "@elastic/elasticsearch"
|
import { Client, ClientOptions } from "@elastic/elasticsearch"
|
||||||
import { HOST_ADDRESS } from "./utils"
|
import { HOST_ADDRESS } from "./utils"
|
||||||
|
|
||||||
interface ElasticsearchConfig {
|
export interface ElasticsearchConfig {
|
||||||
url: string
|
url: string
|
||||||
ssl?: boolean
|
ssl?: boolean
|
||||||
ca?: string
|
ca?: string
|
||||||
|
@ -99,9 +99,9 @@ const SCHEMA: Integration = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
class ElasticSearchIntegration implements IntegrationBase {
|
export class ElasticSearchIntegration implements IntegrationBase {
|
||||||
private config: ElasticsearchConfig
|
private config: ElasticsearchConfig
|
||||||
private client
|
private client: Client
|
||||||
|
|
||||||
constructor(config: ElasticsearchConfig) {
|
constructor(config: ElasticsearchConfig) {
|
||||||
this.config = config
|
this.config = config
|
||||||
|
@ -132,20 +132,23 @@ class ElasticSearchIntegration implements IntegrationBase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(query: { index: string; json: object }) {
|
async create(query: {
|
||||||
const { index, json } = query
|
index: string
|
||||||
|
json: object
|
||||||
|
extra?: Record<string, string>
|
||||||
|
}) {
|
||||||
|
const { index, json, extra } = query
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const result = await this.client.index({
|
const result = await this.client.index({
|
||||||
index,
|
index,
|
||||||
body: json,
|
body: json,
|
||||||
|
...extra,
|
||||||
})
|
})
|
||||||
return result.body
|
return result.body
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Error writing to elasticsearch", err)
|
console.error("Error writing to elasticsearch", err)
|
||||||
throw err
|
throw err
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,41 +163,46 @@ class ElasticSearchIntegration implements IntegrationBase {
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Error querying elasticsearch", err)
|
console.error("Error querying elasticsearch", err)
|
||||||
throw err
|
throw err
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async update(query: { id: string; index: string; json: object }) {
|
async update(query: {
|
||||||
const { id, index, json } = query
|
id: string
|
||||||
|
index: string
|
||||||
|
json: object
|
||||||
|
extra?: Record<string, string>
|
||||||
|
}) {
|
||||||
|
const { id, index, json, extra } = query
|
||||||
try {
|
try {
|
||||||
const result = await this.client.update({
|
const result = await this.client.update({
|
||||||
id,
|
id,
|
||||||
index,
|
index,
|
||||||
body: json,
|
body: json,
|
||||||
|
...extra,
|
||||||
})
|
})
|
||||||
return result.body
|
return result.body
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Error querying elasticsearch", err)
|
console.error("Error querying elasticsearch", err)
|
||||||
throw err
|
throw err
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async delete(query: { id: string; index: string }) {
|
async delete(query: {
|
||||||
const { id, index } = query
|
id: string
|
||||||
|
index: string
|
||||||
|
extra?: Record<string, string>
|
||||||
|
}) {
|
||||||
|
const { id, index, extra } = query
|
||||||
try {
|
try {
|
||||||
const result = await this.client.delete({
|
const result = await this.client.delete({
|
||||||
id,
|
id,
|
||||||
index,
|
index,
|
||||||
|
...extra,
|
||||||
})
|
})
|
||||||
return result.body
|
return result.body
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Error deleting from elasticsearch", err)
|
console.error("Error deleting from elasticsearch", err)
|
||||||
throw err
|
throw err
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,83 +1,81 @@
|
||||||
import { default as ElasticSearchIntegration } from "../elasticsearch"
|
import { Datasource } from "@budibase/types"
|
||||||
|
import { ElasticsearchConfig, ElasticSearchIntegration } from "../elasticsearch"
|
||||||
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
import { DatabaseName, datasourceDescribe } from "./utils"
|
||||||
|
|
||||||
jest.mock("@elastic/elasticsearch")
|
const describes = datasourceDescribe({ only: [DatabaseName.ELASTICSEARCH] })
|
||||||
|
|
||||||
class TestConfiguration {
|
if (describes.length) {
|
||||||
integration: any
|
describe.each(describes)("Elasticsearch Integration", ({ dsProvider }) => {
|
||||||
|
let datasource: Datasource
|
||||||
|
let integration: ElasticSearchIntegration
|
||||||
|
|
||||||
constructor(config: any = {}) {
|
let index: string
|
||||||
this.integration = new ElasticSearchIntegration.integration(config)
|
|
||||||
}
|
beforeAll(async () => {
|
||||||
|
const ds = await dsProvider()
|
||||||
|
datasource = ds.datasource!
|
||||||
|
})
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
index = generator.guid()
|
||||||
|
integration = new ElasticSearchIntegration(
|
||||||
|
datasource.config! as ElasticsearchConfig
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can create a record", async () => {
|
||||||
|
await integration.create({
|
||||||
|
index,
|
||||||
|
json: { name: "Hello" },
|
||||||
|
extra: { refresh: "true" },
|
||||||
|
})
|
||||||
|
const records = await integration.read({
|
||||||
|
index,
|
||||||
|
json: { query: { match_all: {} } },
|
||||||
|
})
|
||||||
|
expect(records).toEqual([{ name: "Hello" }])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can update a record", async () => {
|
||||||
|
const create = await integration.create({
|
||||||
|
index,
|
||||||
|
json: { name: "Hello" },
|
||||||
|
extra: { refresh: "true" },
|
||||||
|
})
|
||||||
|
|
||||||
|
await integration.update({
|
||||||
|
id: create._id,
|
||||||
|
index,
|
||||||
|
json: { doc: { name: "World" } },
|
||||||
|
extra: { refresh: "true" },
|
||||||
|
})
|
||||||
|
|
||||||
|
const records = await integration.read({
|
||||||
|
index,
|
||||||
|
json: { query: { match_all: {} } },
|
||||||
|
})
|
||||||
|
expect(records).toEqual([{ name: "World" }])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can delete a record", async () => {
|
||||||
|
const create = await integration.create({
|
||||||
|
index,
|
||||||
|
json: { name: "Hello" },
|
||||||
|
extra: { refresh: "true" },
|
||||||
|
})
|
||||||
|
|
||||||
|
await integration.delete({
|
||||||
|
id: create._id,
|
||||||
|
index,
|
||||||
|
extra: { refresh: "true" },
|
||||||
|
})
|
||||||
|
|
||||||
|
const records = await integration.read({
|
||||||
|
index,
|
||||||
|
json: { query: { match_all: {} } },
|
||||||
|
})
|
||||||
|
expect(records).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("Elasticsearch Integration", () => {
|
|
||||||
let config: any
|
|
||||||
let indexName = "Users"
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
config = new TestConfiguration()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("calls the create method with the correct params", async () => {
|
|
||||||
const body = {
|
|
||||||
name: "Hello",
|
|
||||||
}
|
|
||||||
await config.integration.create({
|
|
||||||
index: indexName,
|
|
||||||
json: body,
|
|
||||||
})
|
|
||||||
expect(config.integration.client.index).toHaveBeenCalledWith({
|
|
||||||
index: indexName,
|
|
||||||
body,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("calls the read method with the correct params", async () => {
|
|
||||||
const body = {
|
|
||||||
query: {
|
|
||||||
term: {
|
|
||||||
name: "kimchy",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
const response = await config.integration.read({
|
|
||||||
index: indexName,
|
|
||||||
json: body,
|
|
||||||
})
|
|
||||||
expect(config.integration.client.search).toHaveBeenCalledWith({
|
|
||||||
index: indexName,
|
|
||||||
body,
|
|
||||||
})
|
|
||||||
expect(response).toEqual(expect.any(Array))
|
|
||||||
})
|
|
||||||
|
|
||||||
it("calls the update method with the correct params", async () => {
|
|
||||||
const body = {
|
|
||||||
name: "updated",
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await config.integration.update({
|
|
||||||
id: "1234",
|
|
||||||
index: indexName,
|
|
||||||
json: body,
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(config.integration.client.update).toHaveBeenCalledWith({
|
|
||||||
id: "1234",
|
|
||||||
index: indexName,
|
|
||||||
body,
|
|
||||||
})
|
|
||||||
expect(response).toEqual(expect.any(Array))
|
|
||||||
})
|
|
||||||
|
|
||||||
it("calls the delete method with the correct params", async () => {
|
|
||||||
const body = {
|
|
||||||
id: "1234",
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await config.integration.delete(body)
|
|
||||||
|
|
||||||
expect(config.integration.client.delete).toHaveBeenCalledWith(body)
|
|
||||||
expect(response).toEqual(expect.any(Array))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
import { Datasource, SourceName } from "@budibase/types"
|
||||||
|
import { GenericContainer, Wait } from "testcontainers"
|
||||||
|
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
|
import { startContainer } from "."
|
||||||
|
import { ELASTICSEARCH_IMAGE } from "./images"
|
||||||
|
import { ElasticsearchConfig } from "../../elasticsearch"
|
||||||
|
|
||||||
|
let ports: Promise<testContainerUtils.Port[]>
|
||||||
|
|
||||||
|
export async function getDatasource(): Promise<Datasource> {
|
||||||
|
if (!ports) {
|
||||||
|
ports = startContainer(
|
||||||
|
new GenericContainer(ELASTICSEARCH_IMAGE)
|
||||||
|
.withExposedPorts(9200)
|
||||||
|
.withEnvironment({
|
||||||
|
// We need to set the discovery type to single-node to avoid the
|
||||||
|
// cluster waiting for other nodes to join before starting up.
|
||||||
|
"discovery.type": "single-node",
|
||||||
|
// We disable security to avoid having to do any auth against the
|
||||||
|
// container, and to disable SSL. With SSL enabled it uses a self
|
||||||
|
// signed certificate that we'd have to ignore anyway.
|
||||||
|
"xpack.security.enabled": "false",
|
||||||
|
})
|
||||||
|
.withWaitStrategy(
|
||||||
|
Wait.forHttp(
|
||||||
|
// Single node clusters never reach status green, so we wait for
|
||||||
|
// yellow instead.
|
||||||
|
"/_cluster/health?wait_for_status=yellow&timeout=10s",
|
||||||
|
9200
|
||||||
|
).withStartupTimeout(60000)
|
||||||
|
)
|
||||||
|
// We gave the container a tmpfs data directory. Without this, I found
|
||||||
|
// that the default data directory was very small and the container
|
||||||
|
// easily filled it up. This caused the cluster to go into a red status
|
||||||
|
// and stop responding to requests.
|
||||||
|
.withTmpFs({ "/usr/share/elasticsearch/data": "rw" })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const port = (await ports).find(x => x.container === 9200)?.host
|
||||||
|
if (!port) {
|
||||||
|
throw new Error("Elasticsearch port not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
const config: ElasticsearchConfig = {
|
||||||
|
url: `http://127.0.0.1:${port}`,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "datasource",
|
||||||
|
source: SourceName.ELASTICSEARCH,
|
||||||
|
config,
|
||||||
|
}
|
||||||
|
}
|
|
@ -12,3 +12,4 @@ export const POSTGRES_IMAGE = `postgres@${process.env.POSTGRES_SHA}`
|
||||||
export const POSTGRES_LEGACY_IMAGE = `postgres:9.5.25`
|
export const POSTGRES_LEGACY_IMAGE = `postgres:9.5.25`
|
||||||
export const MONGODB_IMAGE = `mongo@${process.env.MONGODB_SHA}`
|
export const MONGODB_IMAGE = `mongo@${process.env.MONGODB_SHA}`
|
||||||
export const MARIADB_IMAGE = `mariadb@${process.env.MARIADB_SHA}`
|
export const MARIADB_IMAGE = `mariadb@${process.env.MARIADB_SHA}`
|
||||||
|
export const ELASTICSEARCH_IMAGE = `elasticsearch@${process.env.ELASTICSEARCH_SHA}`
|
||||||
|
|
|
@ -6,6 +6,7 @@ import * as mysql from "./mysql"
|
||||||
import * as mssql from "./mssql"
|
import * as mssql from "./mssql"
|
||||||
import * as mariadb from "./mariadb"
|
import * as mariadb from "./mariadb"
|
||||||
import * as oracle from "./oracle"
|
import * as oracle from "./oracle"
|
||||||
|
import * as elasticsearch from "./elasticsearch"
|
||||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||||
|
@ -23,22 +24,32 @@ export enum DatabaseName {
|
||||||
MARIADB = "mariadb",
|
MARIADB = "mariadb",
|
||||||
ORACLE = "oracle",
|
ORACLE = "oracle",
|
||||||
SQS = "sqs",
|
SQS = "sqs",
|
||||||
|
ELASTICSEARCH = "elasticsearch",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const DATASOURCE_PLUS = [
|
||||||
|
DatabaseName.POSTGRES,
|
||||||
|
DatabaseName.POSTGRES_LEGACY,
|
||||||
|
DatabaseName.MYSQL,
|
||||||
|
DatabaseName.SQL_SERVER,
|
||||||
|
DatabaseName.MARIADB,
|
||||||
|
DatabaseName.ORACLE,
|
||||||
|
DatabaseName.SQS,
|
||||||
|
]
|
||||||
|
|
||||||
const providers: Record<DatabaseName, DatasourceProvider> = {
|
const providers: Record<DatabaseName, DatasourceProvider> = {
|
||||||
|
// datasource_plus entries
|
||||||
[DatabaseName.POSTGRES]: postgres.getDatasource,
|
[DatabaseName.POSTGRES]: postgres.getDatasource,
|
||||||
[DatabaseName.POSTGRES_LEGACY]: postgres.getLegacyDatasource,
|
[DatabaseName.POSTGRES_LEGACY]: postgres.getLegacyDatasource,
|
||||||
[DatabaseName.MONGODB]: mongodb.getDatasource,
|
|
||||||
[DatabaseName.MYSQL]: mysql.getDatasource,
|
[DatabaseName.MYSQL]: mysql.getDatasource,
|
||||||
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
||||||
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
||||||
[DatabaseName.ORACLE]: oracle.getDatasource,
|
[DatabaseName.ORACLE]: oracle.getDatasource,
|
||||||
[DatabaseName.SQS]: async () => undefined,
|
[DatabaseName.SQS]: async () => undefined,
|
||||||
}
|
|
||||||
|
|
||||||
export interface DatasourceDescribeOpts {
|
// rest
|
||||||
only?: DatabaseName[]
|
[DatabaseName.ELASTICSEARCH]: elasticsearch.getDatasource,
|
||||||
exclude?: DatabaseName[]
|
[DatabaseName.MONGODB]: mongodb.getDatasource,
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface DatasourceDescribeReturnPromise {
|
export interface DatasourceDescribeReturnPromise {
|
||||||
|
@ -103,6 +114,20 @@ function createDummyTest() {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface OnlyOpts {
|
||||||
|
only: DatabaseName[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PlusOpts {
|
||||||
|
plus: true
|
||||||
|
exclude?: DatabaseName[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DatasourceDescribeOpts = OnlyOpts | PlusOpts
|
||||||
|
|
||||||
|
// If you ever want to rename this function, be mindful that you will also need
|
||||||
|
// to modify src/tests/filters/index.js to make sure that we're correctly
|
||||||
|
// filtering datasource/non-datasource tests in CI.
|
||||||
export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
||||||
// tests that call this need a lot longer timeouts
|
// tests that call this need a lot longer timeouts
|
||||||
jest.setTimeout(120000)
|
jest.setTimeout(120000)
|
||||||
|
@ -111,17 +136,15 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
||||||
createDummyTest()
|
createDummyTest()
|
||||||
}
|
}
|
||||||
|
|
||||||
const { only, exclude } = opts
|
let databases: DatabaseName[] = []
|
||||||
|
if ("only" in opts) {
|
||||||
if (only && exclude) {
|
databases = opts.only
|
||||||
throw new Error("you can only supply one of 'only' or 'exclude'")
|
} else if ("plus" in opts) {
|
||||||
}
|
databases = Object.values(DatabaseName)
|
||||||
|
.filter(db => DATASOURCE_PLUS.includes(db))
|
||||||
let databases = Object.values(DatabaseName)
|
.filter(db => !opts.exclude?.includes(db))
|
||||||
if (only) {
|
} else {
|
||||||
databases = only
|
throw new Error("invalid options")
|
||||||
} else if (exclude) {
|
|
||||||
databases = databases.filter(db => !exclude.includes(db))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (process.env.DATASOURCE) {
|
if (process.env.DATASOURCE) {
|
||||||
|
@ -156,6 +179,7 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
||||||
isMSSQL: dbName === DatabaseName.SQL_SERVER,
|
isMSSQL: dbName === DatabaseName.SQL_SERVER,
|
||||||
isOracle: dbName === DatabaseName.ORACLE,
|
isOracle: dbName === DatabaseName.ORACLE,
|
||||||
isMariaDB: dbName === DatabaseName.MARIADB,
|
isMariaDB: dbName === DatabaseName.MARIADB,
|
||||||
|
isElasticsearch: dbName === DatabaseName.ELASTICSEARCH,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
})
|
})
|
||||||
.withWaitStrategy(
|
.withWaitStrategy(
|
||||||
Wait.forSuccessfulCommand(
|
Wait.forSuccessfulCommand(
|
||||||
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
"/opt/mssql-tools18/bin/sqlcmd -C -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
||||||
).withStartupTimeout(20000)
|
).withStartupTimeout(20000)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -44,7 +44,8 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
user: "sa",
|
user: "sa",
|
||||||
password: "Password_123",
|
password: "Password_123",
|
||||||
options: {
|
options: {
|
||||||
encrypt: false,
|
encrypt: true,
|
||||||
|
trustServerCertificate: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,7 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
import { isInternal } from "../tables/utils"
|
||||||
|
|
||||||
export const removeInvalidFilters = (
|
export const removeInvalidFilters = (
|
||||||
filters: SearchFilters,
|
filters: SearchFilters,
|
||||||
|
@ -70,6 +71,10 @@ export const getQueryableFields = async (
|
||||||
opts?: { noRelationships?: boolean }
|
opts?: { noRelationships?: boolean }
|
||||||
): Promise<string[]> => {
|
): Promise<string[]> => {
|
||||||
const result = []
|
const result = []
|
||||||
|
if (isInternal({ table })) {
|
||||||
|
result.push("_id")
|
||||||
|
}
|
||||||
|
|
||||||
for (const field of Object.keys(table.schema).filter(
|
for (const field of Object.keys(table.schema).filter(
|
||||||
f => allowedFields.includes(f) && table.schema[f].visible !== false
|
f => allowedFields.includes(f) && table.schema[f].visible !== false
|
||||||
)) {
|
)) {
|
||||||
|
@ -113,14 +118,13 @@ export const getQueryableFields = async (
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = [
|
// Querying by _id is always allowed, even if it's never part of the schema
|
||||||
"_id", // Querying by _id is always allowed, even if it's never part of the schema
|
const result = ["_id"]
|
||||||
]
|
|
||||||
|
|
||||||
if (fields == null) {
|
if (fields == null) {
|
||||||
fields = Object.keys(table.schema)
|
fields = Object.keys(table.schema)
|
||||||
}
|
}
|
||||||
result.push(...(await extractTableFields(table, fields, [table._id!])))
|
result.push(...(await extractTableFields(table, fields, [table._id!])))
|
||||||
|
|
||||||
return result
|
return Array.from(new Set(result))
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,16 +10,13 @@ import {
|
||||||
import { search } from "../../../../../sdk/app/rows/search"
|
import { search } from "../../../../../sdk/app/rows/search"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
|
||||||
import {
|
import { datasourceDescribe } from "../../../../../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
datasourceDescribe,
|
|
||||||
} from "../../../../../integrations/tests/utils"
|
|
||||||
import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
||||||
|
|
||||||
// These test cases are only for things that cannot be tested through the API
|
// These test cases are only for things that cannot be tested through the API
|
||||||
// (e.g. limiting searches to returning specific fields). If it's possible to
|
// (e.g. limiting searches to returning specific fields). If it's possible to
|
||||||
// test through the API, it should be done there instead.
|
// test through the API, it should be done there instead.
|
||||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
const descriptions = datasourceDescribe({ plus: true })
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
describe.each(descriptions)(
|
describe.each(descriptions)(
|
||||||
|
|
|
@ -250,6 +250,8 @@ describe("query utils", () => {
|
||||||
expect(result).toEqual([
|
expect(result).toEqual([
|
||||||
"_id",
|
"_id",
|
||||||
"name",
|
"name",
|
||||||
|
"aux._id",
|
||||||
|
"auxTable._id",
|
||||||
"aux.title",
|
"aux.title",
|
||||||
"auxTable.title",
|
"auxTable.title",
|
||||||
"aux.name",
|
"aux.name",
|
||||||
|
@ -284,7 +286,14 @@ describe("query utils", () => {
|
||||||
const result = await config.doInContext(config.appId, () => {
|
const result = await config.doInContext(config.appId, () => {
|
||||||
return getQueryableFields(table)
|
return getQueryableFields(table)
|
||||||
})
|
})
|
||||||
expect(result).toEqual(["_id", "name", "aux.name", "auxTable.name"])
|
expect(result).toEqual([
|
||||||
|
"_id",
|
||||||
|
"name",
|
||||||
|
"aux._id",
|
||||||
|
"auxTable._id",
|
||||||
|
"aux.name",
|
||||||
|
"auxTable.name",
|
||||||
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("excludes all relationship fields if hidden", async () => {
|
it("excludes all relationship fields if hidden", async () => {
|
||||||
|
@ -387,10 +396,14 @@ describe("query utils", () => {
|
||||||
"_id",
|
"_id",
|
||||||
"name",
|
"name",
|
||||||
// aux1 primitive props
|
// aux1 primitive props
|
||||||
|
"aux1._id",
|
||||||
|
"aux1Table._id",
|
||||||
"aux1.name",
|
"aux1.name",
|
||||||
"aux1Table.name",
|
"aux1Table.name",
|
||||||
|
|
||||||
// aux2 primitive props
|
// aux2 primitive props
|
||||||
|
"aux2._id",
|
||||||
|
"aux2Table._id",
|
||||||
"aux2.title",
|
"aux2.title",
|
||||||
"aux2Table.title",
|
"aux2Table.title",
|
||||||
])
|
])
|
||||||
|
@ -405,14 +418,18 @@ describe("query utils", () => {
|
||||||
"name",
|
"name",
|
||||||
|
|
||||||
// aux2_1 primitive props
|
// aux2_1 primitive props
|
||||||
|
"aux2_1._id",
|
||||||
|
"aux2Table._id",
|
||||||
"aux2_1.title",
|
"aux2_1.title",
|
||||||
"aux2Table.title",
|
"aux2Table.title",
|
||||||
|
|
||||||
// aux2_2 primitive props
|
// aux2_2 primitive props
|
||||||
|
"aux2_2._id",
|
||||||
"aux2_2.title",
|
"aux2_2.title",
|
||||||
"aux2Table.title",
|
|
||||||
|
|
||||||
// table primitive props
|
// table primitive props
|
||||||
|
"table._id",
|
||||||
|
"TestTable._id",
|
||||||
"table.name",
|
"table.name",
|
||||||
"TestTable.name",
|
"TestTable.name",
|
||||||
])
|
])
|
||||||
|
@ -427,14 +444,18 @@ describe("query utils", () => {
|
||||||
"title",
|
"title",
|
||||||
|
|
||||||
// aux1_1 primitive props
|
// aux1_1 primitive props
|
||||||
|
"aux1_1._id",
|
||||||
|
"aux1Table._id",
|
||||||
"aux1_1.name",
|
"aux1_1.name",
|
||||||
"aux1Table.name",
|
"aux1Table.name",
|
||||||
|
|
||||||
// aux1_2 primitive props
|
// aux1_2 primitive props
|
||||||
|
"aux1_2._id",
|
||||||
"aux1_2.name",
|
"aux1_2.name",
|
||||||
"aux1Table.name",
|
|
||||||
|
|
||||||
// table primitive props
|
// table primitive props
|
||||||
|
"table._id",
|
||||||
|
"TestTable._id",
|
||||||
"table.name",
|
"table.name",
|
||||||
"TestTable.name",
|
"TestTable.name",
|
||||||
])
|
])
|
||||||
|
@ -481,6 +502,8 @@ describe("query utils", () => {
|
||||||
"name",
|
"name",
|
||||||
|
|
||||||
// deep 1 aux primitive props
|
// deep 1 aux primitive props
|
||||||
|
"aux._id",
|
||||||
|
"auxTable._id",
|
||||||
"aux.title",
|
"aux.title",
|
||||||
"auxTable.title",
|
"auxTable.title",
|
||||||
])
|
])
|
||||||
|
@ -495,6 +518,8 @@ describe("query utils", () => {
|
||||||
"title",
|
"title",
|
||||||
|
|
||||||
// deep 1 dependency primitive props
|
// deep 1 dependency primitive props
|
||||||
|
"table._id",
|
||||||
|
"TestTable._id",
|
||||||
"table.name",
|
"table.name",
|
||||||
"TestTable.name",
|
"TestTable.name",
|
||||||
])
|
])
|
||||||
|
|
|
@ -1,108 +0,0 @@
|
||||||
import {
|
|
||||||
FieldType,
|
|
||||||
INTERNAL_TABLE_SOURCE_ID,
|
|
||||||
Table,
|
|
||||||
TableSourceType,
|
|
||||||
ViewV2,
|
|
||||||
} from "@budibase/types"
|
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
|
||||||
import sdk from "../../.."
|
|
||||||
|
|
||||||
jest.mock("../../views", () => ({
|
|
||||||
...jest.requireActual("../../views"),
|
|
||||||
enrichSchema: jest.fn().mockImplementation(v => ({ ...v, mocked: true })),
|
|
||||||
}))
|
|
||||||
|
|
||||||
describe("table sdk", () => {
|
|
||||||
describe("enrichViewSchemas", () => {
|
|
||||||
const basicTable: Table = {
|
|
||||||
_id: generator.guid(),
|
|
||||||
name: "TestTable",
|
|
||||||
type: "table",
|
|
||||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
|
||||||
sourceType: TableSourceType.INTERNAL,
|
|
||||||
schema: {
|
|
||||||
name: {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
name: "name",
|
|
||||||
visible: true,
|
|
||||||
width: 80,
|
|
||||||
order: 2,
|
|
||||||
constraints: {
|
|
||||||
type: "string",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
name: "description",
|
|
||||||
visible: true,
|
|
||||||
width: 200,
|
|
||||||
constraints: {
|
|
||||||
type: "string",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
id: {
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
name: "id",
|
|
||||||
visible: true,
|
|
||||||
order: 1,
|
|
||||||
constraints: {
|
|
||||||
type: "number",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
hiddenField: {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
name: "hiddenField",
|
|
||||||
visible: false,
|
|
||||||
constraints: {
|
|
||||||
type: "string",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
it("should fetch the default schema if not overriden", async () => {
|
|
||||||
const tableId = basicTable._id!
|
|
||||||
function getTable() {
|
|
||||||
const view: ViewV2 = {
|
|
||||||
version: 2,
|
|
||||||
id: generator.guid(),
|
|
||||||
name: generator.guid(),
|
|
||||||
tableId,
|
|
||||||
}
|
|
||||||
return view
|
|
||||||
}
|
|
||||||
const view1 = getTable()
|
|
||||||
const view2 = getTable()
|
|
||||||
const view3 = getTable()
|
|
||||||
const res = await sdk.tables.enrichViewSchemas({
|
|
||||||
...basicTable,
|
|
||||||
views: {
|
|
||||||
[view1.name]: view1,
|
|
||||||
[view2.name]: view2,
|
|
||||||
[view3.name]: view3,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(sdk.views.enrichSchema).toHaveBeenCalledTimes(3)
|
|
||||||
|
|
||||||
expect(res).toEqual({
|
|
||||||
...basicTable,
|
|
||||||
views: {
|
|
||||||
[view1.name]: {
|
|
||||||
...view1,
|
|
||||||
mocked: true,
|
|
||||||
},
|
|
||||||
[view2.name]: {
|
|
||||||
...view2,
|
|
||||||
mocked: true,
|
|
||||||
},
|
|
||||||
[view3.name]: {
|
|
||||||
...view3,
|
|
||||||
mocked: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
|
@ -7,7 +7,7 @@ import { default as queries } from "./app/queries"
|
||||||
import { default as rows } from "./app/rows"
|
import { default as rows } from "./app/rows"
|
||||||
import { default as links } from "./app/links"
|
import { default as links } from "./app/links"
|
||||||
import { default as users } from "./users"
|
import { default as users } from "./users"
|
||||||
import { default as plugins } from "./plugins"
|
import * as plugins from "./plugins"
|
||||||
import * as views from "./app/views"
|
import * as views from "./app/views"
|
||||||
import * as permissions from "./app/permissions"
|
import * as permissions from "./app/permissions"
|
||||||
import * as rowActions from "./app/rowActions"
|
import * as rowActions from "./app/rowActions"
|
||||||
|
|
|
@ -1,5 +1,41 @@
|
||||||
import * as plugins from "./plugins"
|
import { KoaFile, Plugin, PluginSource, PluginType } from "@budibase/types"
|
||||||
|
import {
|
||||||
|
db as dbCore,
|
||||||
|
objectStore,
|
||||||
|
plugins as pluginCore,
|
||||||
|
tenancy,
|
||||||
|
} from "@budibase/backend-core"
|
||||||
|
import { fileUpload } from "../../api/controllers/plugin/file"
|
||||||
|
import env from "../../environment"
|
||||||
|
import { clientAppSocket } from "../../websockets"
|
||||||
|
import { sdk as pro } from "@budibase/pro"
|
||||||
|
|
||||||
export default {
|
export async function fetch(type?: PluginType): Promise<Plugin[]> {
|
||||||
...plugins,
|
const db = tenancy.getGlobalDB()
|
||||||
|
const response = await db.allDocs(
|
||||||
|
dbCore.getPluginParams(null, {
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
let plugins = response.rows.map((row: any) => row.doc) as Plugin[]
|
||||||
|
plugins = await objectStore.enrichPluginURLs(plugins)
|
||||||
|
if (type) {
|
||||||
|
return plugins.filter((plugin: Plugin) => plugin.schema?.type === type)
|
||||||
|
} else {
|
||||||
|
return plugins
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function processUploaded(plugin: KoaFile, source: PluginSource) {
|
||||||
|
const { metadata, directory } = await fileUpload(plugin)
|
||||||
|
pluginCore.validate(metadata.schema)
|
||||||
|
|
||||||
|
// Only allow components in cloud
|
||||||
|
if (!env.SELF_HOSTED && metadata.schema?.type !== PluginType.COMPONENT) {
|
||||||
|
throw new Error("Only component plugins are supported outside of self-host")
|
||||||
|
}
|
||||||
|
|
||||||
|
const doc = await pro.plugins.storePlugin(metadata, directory, source)
|
||||||
|
clientAppSocket?.emit("plugin-update", { name: doc.name, hash: doc.hash })
|
||||||
|
return doc
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,41 +0,0 @@
|
||||||
import { KoaFile, Plugin, PluginSource, PluginType } from "@budibase/types"
|
|
||||||
import {
|
|
||||||
db as dbCore,
|
|
||||||
objectStore,
|
|
||||||
plugins as pluginCore,
|
|
||||||
tenancy,
|
|
||||||
} from "@budibase/backend-core"
|
|
||||||
import { fileUpload } from "../../api/controllers/plugin/file"
|
|
||||||
import env from "../../environment"
|
|
||||||
import { clientAppSocket } from "../../websockets"
|
|
||||||
import { sdk as pro } from "@budibase/pro"
|
|
||||||
|
|
||||||
export async function fetch(type?: PluginType): Promise<Plugin[]> {
|
|
||||||
const db = tenancy.getGlobalDB()
|
|
||||||
const response = await db.allDocs(
|
|
||||||
dbCore.getPluginParams(null, {
|
|
||||||
include_docs: true,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
let plugins = response.rows.map((row: any) => row.doc) as Plugin[]
|
|
||||||
plugins = await objectStore.enrichPluginURLs(plugins)
|
|
||||||
if (type) {
|
|
||||||
return plugins.filter((plugin: Plugin) => plugin.schema?.type === type)
|
|
||||||
} else {
|
|
||||||
return plugins
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function processUploaded(plugin: KoaFile, source?: PluginSource) {
|
|
||||||
const { metadata, directory } = await fileUpload(plugin)
|
|
||||||
pluginCore.validate(metadata?.schema)
|
|
||||||
|
|
||||||
// Only allow components in cloud
|
|
||||||
if (!env.SELF_HOSTED && metadata?.schema?.type !== PluginType.COMPONENT) {
|
|
||||||
throw new Error("Only component plugins are supported outside of self-host")
|
|
||||||
}
|
|
||||||
|
|
||||||
const doc = await pro.plugins.storePlugin(metadata, directory, source)
|
|
||||||
clientAppSocket?.emit("plugin-update", { name: doc.name, hash: doc.hash })
|
|
||||||
return doc
|
|
||||||
}
|
|
|
@ -367,6 +367,8 @@ class Orchestrator {
|
||||||
if (e.errno === "ETIME") {
|
if (e.errno === "ETIME") {
|
||||||
span?.addTags({ timedOut: true })
|
span?.addTags({ timedOut: true })
|
||||||
console.warn(`Automation execution timed out after ${timeout}ms`)
|
console.warn(`Automation execution timed out after ${timeout}ms`)
|
||||||
|
} else {
|
||||||
|
throw e
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { Plugin } from "@budibase/types"
|
import { Plugin, PluginUpload } from "@budibase/types"
|
||||||
import { budibaseTempDir } from "../budibaseDir"
|
import { budibaseTempDir } from "../budibaseDir"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
import { join } from "path"
|
import { join } from "path"
|
||||||
|
@ -8,31 +8,31 @@ import stream from "stream"
|
||||||
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
|
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
|
||||||
const AUTOMATION_PATH = join(budibaseTempDir(), "automation")
|
const AUTOMATION_PATH = join(budibaseTempDir(), "automation")
|
||||||
|
|
||||||
export const getPluginMetadata = async (path: string) => {
|
export const getPluginMetadata = async (
|
||||||
let metadata: any = {}
|
path: string
|
||||||
|
): Promise<PluginUpload> => {
|
||||||
|
let pkg: any
|
||||||
|
let schema: any
|
||||||
try {
|
try {
|
||||||
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
|
pkg = JSON.parse(fs.readFileSync(join(path, "package.json"), "utf8"))
|
||||||
const schema = fs.readFileSync(join(path, "schema.json"), "utf8")
|
schema = JSON.parse(fs.readFileSync(join(path, "schema.json"), "utf8"))
|
||||||
|
if (!pkg.name) {
|
||||||
metadata.schema = JSON.parse(schema)
|
throw new Error("package.json is missing 'name'.")
|
||||||
metadata.package = JSON.parse(pkg)
|
}
|
||||||
|
if (!pkg.version) {
|
||||||
if (
|
throw new Error("package.json is missing 'version'.")
|
||||||
!metadata.package.name ||
|
}
|
||||||
!metadata.package.version ||
|
if (!pkg.description) {
|
||||||
!metadata.package.description
|
throw new Error("package.json is missing 'description'.")
|
||||||
) {
|
|
||||||
throw new Error(
|
|
||||||
"package.json is missing one of 'name', 'version' or 'description'."
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Unable to process schema.json/package.json in plugin. ${err.message}`
|
`Unable to process schema.json/package.json in plugin. ${err.message}`,
|
||||||
|
{ cause: err }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return { metadata, directory: path }
|
return { metadata: { package: pkg, schema }, directory: path }
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getPluginImpl(path: string, plugin: Plugin) {
|
async function getPluginImpl(path: string, plugin: Plugin) {
|
||||||
|
|
|
@ -3,7 +3,8 @@ import env from "./environment"
|
||||||
import chokidar from "chokidar"
|
import chokidar from "chokidar"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
import { constants, tenancy } from "@budibase/backend-core"
|
import { constants, tenancy } from "@budibase/backend-core"
|
||||||
import pluginsSdk from "./sdk/plugins"
|
import { processUploaded } from "./sdk/plugins"
|
||||||
|
import { PluginSource } from "@budibase/types"
|
||||||
|
|
||||||
export function watch() {
|
export function watch() {
|
||||||
const watchPath = path.join(env.PLUGINS_DIR, "./**/*.tar.gz")
|
const watchPath = path.join(env.PLUGINS_DIR, "./**/*.tar.gz")
|
||||||
|
@ -27,7 +28,7 @@ export function watch() {
|
||||||
const split = path.split("/")
|
const split = path.split("/")
|
||||||
const name = split[split.length - 1]
|
const name = split[split.length - 1]
|
||||||
console.log("Importing plugin:", path)
|
console.log("Importing plugin:", path)
|
||||||
await pluginsSdk.processUploaded({ name, path })
|
await processUploaded({ name, path }, PluginSource.FILE)
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
const message = err?.message ? err?.message : err
|
const message = err?.message ? err?.message : err
|
||||||
console.error("Failed to import plugin:", message)
|
console.error("Failed to import plugin:", message)
|
||||||
|
|
|
@ -24,10 +24,7 @@ export interface Plugin extends Document {
|
||||||
source: PluginSource
|
source: PluginSource
|
||||||
package: { [key: string]: any }
|
package: { [key: string]: any }
|
||||||
hash: string
|
hash: string
|
||||||
schema: {
|
schema: PluginSchema
|
||||||
type: PluginType
|
|
||||||
[key: string]: any
|
|
||||||
}
|
|
||||||
iconFileName?: string
|
iconFileName?: string
|
||||||
// Populated on read
|
// Populated on read
|
||||||
jsUrl?: string
|
jsUrl?: string
|
||||||
|
@ -36,3 +33,24 @@ export interface Plugin extends Document {
|
||||||
}
|
}
|
||||||
|
|
||||||
export const PLUGIN_TYPE_ARR = Object.values(PluginType)
|
export const PLUGIN_TYPE_ARR = Object.values(PluginType)
|
||||||
|
|
||||||
|
export interface PluginSchema {
|
||||||
|
type: PluginType
|
||||||
|
[key: string]: any
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Package {
|
||||||
|
name: string
|
||||||
|
version: string
|
||||||
|
description: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PluginMetadata {
|
||||||
|
schema: PluginSchema
|
||||||
|
package: Package
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PluginUpload {
|
||||||
|
metadata: PluginMetadata
|
||||||
|
directory: string
|
||||||
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@ import { Row, DocumentType, Table, Datasource } from "../documents"
|
||||||
import { SortOrder, SortType } from "../api"
|
import { SortOrder, SortType } from "../api"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
import { Aggregation } from "./row"
|
import { Aggregation } from "./row"
|
||||||
|
import _ from "lodash"
|
||||||
|
|
||||||
export enum BasicOperator {
|
export enum BasicOperator {
|
||||||
EQUAL = "equal",
|
EQUAL = "equal",
|
||||||
|
@ -83,7 +84,7 @@ type RangeFilter = Record<
|
||||||
type LogicalFilter = { conditions: SearchFilters[] }
|
type LogicalFilter = { conditions: SearchFilters[] }
|
||||||
|
|
||||||
export function isLogicalFilter(filter: any): filter is LogicalFilter {
|
export function isLogicalFilter(filter: any): filter is LogicalFilter {
|
||||||
return "conditions" in filter
|
return _.isPlainObject(filter) && "conditions" in filter
|
||||||
}
|
}
|
||||||
|
|
||||||
export type AnySearchFilter = BasicFilter | ArrayFilter | RangeFilter
|
export type AnySearchFilter = BasicFilter | ArrayFilter | RangeFilter
|
||||||
|
|
|
@ -63,11 +63,6 @@ const sessionMiddleware: Middleware = async (ctx: any, next: any) => {
|
||||||
store: new RedisStore({ client: redisClient.getClient() }),
|
store: new RedisStore({ client: redisClient.getClient() }),
|
||||||
key: "koa:sess",
|
key: "koa:sess",
|
||||||
maxAge: 86400000, // one day
|
maxAge: 86400000, // one day
|
||||||
httpOnly: true,
|
|
||||||
secure: process.env.NODE_ENV === "production",
|
|
||||||
sameSite: "strict",
|
|
||||||
rolling: true,
|
|
||||||
renew: true,
|
|
||||||
},
|
},
|
||||||
app
|
app
|
||||||
)(ctx, next)
|
)(ctx, next)
|
||||||
|
|
|
@ -1,10 +1,18 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
if [[ $TARGETARCH == arm* ]] ;
|
|
||||||
then
|
if [[ $TARGETBUILD == "aas" ]]; then
|
||||||
|
echo "A aas-compatible version of Minio is already installed."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $TARGETARCH == arm* ]]; then
|
||||||
echo "INSTALLING ARM64 MINIO"
|
echo "INSTALLING ARM64 MINIO"
|
||||||
|
rm -f minio
|
||||||
wget https://dl.min.io/server/minio/release/linux-arm64/minio
|
wget https://dl.min.io/server/minio/release/linux-arm64/minio
|
||||||
else
|
else
|
||||||
echo "INSTALLING AMD64 MINIO"
|
echo "INSTALLING AMD64 MINIO"
|
||||||
|
rm -f minio
|
||||||
wget https://dl.min.io/server/minio/release/linux-amd64/minio
|
wget https://dl.min.io/server/minio/release/linux-amd64/minio
|
||||||
fi
|
fi
|
||||||
|
|
||||||
chmod +x minio
|
chmod +x minio
|
|
@ -0,0 +1,3 @@
|
||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:63db3aa3c2299ebaf13b46c64523a589bd5bf272f9e971d17f1eaa55f6f1fd79
|
||||||
|
size 118595584
|
|
@ -12782,10 +12782,10 @@ html-tag@^2.0.0:
|
||||||
is-self-closing "^1.0.1"
|
is-self-closing "^1.0.1"
|
||||||
kind-of "^6.0.0"
|
kind-of "^6.0.0"
|
||||||
|
|
||||||
html5-qrcode@^2.2.1:
|
html5-qrcode@^2.3.8:
|
||||||
version "2.3.7"
|
version "2.3.8"
|
||||||
resolved "https://registry.yarnpkg.com/html5-qrcode/-/html5-qrcode-2.3.7.tgz#09ed2ca7473a47bd551088c15fcfcb7cb409a5be"
|
resolved "https://registry.yarnpkg.com/html5-qrcode/-/html5-qrcode-2.3.8.tgz#0b0cdf7a9926cfd4be530e13a51db47592adfa0d"
|
||||||
integrity sha512-Jmlok9Ynm49hgVXkdupWryf8o430proIFoQsRl1LmTg4Rq461W72omylR9yw9tsEMtswMEw3wacUM5y0agOBQA==
|
integrity sha512-jsr4vafJhwoLVEDW3n1KvPnCCXWaQfRng0/EEYk1vNcQGcG/htAdhJX0be8YyqMoSz7+hZvOZSTAepsabiuhiQ==
|
||||||
|
|
||||||
htmlparser2@^8.0.0:
|
htmlparser2@^8.0.0:
|
||||||
version "8.0.1"
|
version "8.0.1"
|
||||||
|
|
Loading…
Reference in New Issue