Merge remote-tracking branch 'origin/master' into fix/branding-cookie-update
This commit is contained in:
commit
9b9ddc7a6b
|
@ -1,4 +1,101 @@
|
||||||
FROM couchdb:3.2.1
|
# Modified from https://github.com/apache/couchdb-docker/blob/main/3.3.3/Dockerfile
|
||||||
|
#
|
||||||
|
# Everything in this `base` image is adapted from the official `couchdb` image's
|
||||||
|
# Dockerfile. Only modifications related to upgrading from Debian bullseye to
|
||||||
|
# bookworm have been included. The `runner` image contains Budibase's
|
||||||
|
# customisations to the image, e.g. adding Clouseau.
|
||||||
|
FROM node:20-slim AS base
|
||||||
|
|
||||||
|
# Add CouchDB user account to make sure the IDs are assigned consistently
|
||||||
|
RUN groupadd -g 5984 -r couchdb && useradd -u 5984 -d /opt/couchdb -g couchdb couchdb
|
||||||
|
|
||||||
|
# be sure GPG and apt-transport-https are available and functional
|
||||||
|
RUN set -ex; \
|
||||||
|
apt-get update; \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
apt-transport-https \
|
||||||
|
ca-certificates \
|
||||||
|
dirmngr \
|
||||||
|
gnupg \
|
||||||
|
; \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# grab tini for signal handling and zombie reaping
|
||||||
|
# see https://github.com/apache/couchdb-docker/pull/28#discussion_r141112407
|
||||||
|
RUN set -eux; \
|
||||||
|
apt-get update; \
|
||||||
|
apt-get install -y --no-install-recommends tini; \
|
||||||
|
rm -rf /var/lib/apt/lists/*; \
|
||||||
|
tini --version
|
||||||
|
|
||||||
|
# http://docs.couchdb.org/en/latest/install/unix.html#installing-the-apache-couchdb-packages
|
||||||
|
ENV GPG_COUCH_KEY \
|
||||||
|
# gpg: rsa8192 205-01-19 The Apache Software Foundation (Package repository signing key) <root@apache.org>
|
||||||
|
390EF70BB1EA12B2773962950EE62FB37A00258D
|
||||||
|
RUN set -eux; \
|
||||||
|
apt-get update; \
|
||||||
|
apt-get install -y curl; \
|
||||||
|
export GNUPGHOME="$(mktemp -d)"; \
|
||||||
|
curl -fL -o keys.asc https://couchdb.apache.org/repo/keys.asc; \
|
||||||
|
gpg --batch --import keys.asc; \
|
||||||
|
gpg --batch --export "${GPG_COUCH_KEY}" > /usr/share/keyrings/couchdb-archive-keyring.gpg; \
|
||||||
|
command -v gpgconf && gpgconf --kill all || :; \
|
||||||
|
rm -rf "$GNUPGHOME"; \
|
||||||
|
apt-key list; \
|
||||||
|
apt purge -y --autoremove curl; \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
ENV COUCHDB_VERSION 3.3.3
|
||||||
|
|
||||||
|
RUN . /etc/os-release; \
|
||||||
|
echo "deb [signed-by=/usr/share/keyrings/couchdb-archive-keyring.gpg] https://apache.jfrog.io/artifactory/couchdb-deb/ ${VERSION_CODENAME} main" | \
|
||||||
|
tee /etc/apt/sources.list.d/couchdb.list >/dev/null
|
||||||
|
|
||||||
|
# https://github.com/apache/couchdb-pkg/blob/master/debian/README.Debian
|
||||||
|
RUN set -eux; \
|
||||||
|
apt-get update; \
|
||||||
|
\
|
||||||
|
echo "couchdb couchdb/mode select none" | debconf-set-selections; \
|
||||||
|
# we DO want recommends this time
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \
|
||||||
|
couchdb="$COUCHDB_VERSION"~bookworm \
|
||||||
|
; \
|
||||||
|
# Undo symlinks to /var/log and /var/lib
|
||||||
|
rmdir /var/lib/couchdb /var/log/couchdb; \
|
||||||
|
rm /opt/couchdb/data /opt/couchdb/var/log; \
|
||||||
|
mkdir -p /opt/couchdb/data /opt/couchdb/var/log; \
|
||||||
|
chown couchdb:couchdb /opt/couchdb/data /opt/couchdb/var/log; \
|
||||||
|
chmod 777 /opt/couchdb/data /opt/couchdb/var/log; \
|
||||||
|
# Remove file that sets logging to a file
|
||||||
|
rm /opt/couchdb/etc/default.d/10-filelog.ini; \
|
||||||
|
# Check we own everything in /opt/couchdb. Matches the command in dockerfile_entrypoint.sh
|
||||||
|
find /opt/couchdb \! \( -user couchdb -group couchdb \) -exec chown -f couchdb:couchdb '{}' +; \
|
||||||
|
# Setup directories and permissions for config. Technically these could be 555 and 444 respectively
|
||||||
|
# but we keep them as 755 and 644 for consistency with CouchDB defaults and the dockerfile_entrypoint.sh.
|
||||||
|
find /opt/couchdb/etc -type d ! -perm 0755 -exec chmod -f 0755 '{}' +; \
|
||||||
|
find /opt/couchdb/etc -type f ! -perm 0644 -exec chmod -f 0644 '{}' +; \
|
||||||
|
# only local.d needs to be writable for the docker_entrypoint.sh
|
||||||
|
chmod -f 0777 /opt/couchdb/etc/local.d; \
|
||||||
|
# apt clean-up
|
||||||
|
rm -rf /var/lib/apt/lists/*;
|
||||||
|
|
||||||
|
# Add configuration
|
||||||
|
COPY --chown=couchdb:couchdb couch/10-docker-default.ini /opt/couchdb/etc/default.d/
|
||||||
|
# COPY --chown=couchdb:couchdb vm.args /opt/couchdb/etc/
|
||||||
|
|
||||||
|
COPY docker-entrypoint.sh /usr/local/bin
|
||||||
|
RUN ln -s usr/local/bin/docker-entrypoint.sh /docker-entrypoint.sh # backwards compat
|
||||||
|
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
|
||||||
|
|
||||||
|
VOLUME /opt/couchdb/data
|
||||||
|
|
||||||
|
# 5984: Main CouchDB endpoint
|
||||||
|
# 4369: Erlang portmap daemon (epmd)
|
||||||
|
# 9100: CouchDB cluster communication port
|
||||||
|
EXPOSE 5984 4369 9100
|
||||||
|
CMD ["/opt/couchdb/bin/couchdb"]
|
||||||
|
|
||||||
|
FROM base as runner
|
||||||
|
|
||||||
ENV COUCHDB_USER admin
|
ENV COUCHDB_USER admin
|
||||||
ENV COUCHDB_PASSWORD admin
|
ENV COUCHDB_PASSWORD admin
|
||||||
|
@ -6,9 +103,9 @@ EXPOSE 5984
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
|
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
|
||||||
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
|
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
|
||||||
apt-add-repository 'deb http://security.debian.org/debian-security bullseye-security/updates main' && \
|
apt-add-repository 'deb http://security.debian.org/debian-security bookworm-security/updates main' && \
|
||||||
apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \
|
apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \
|
||||||
apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bullseye main' && \
|
apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bookworm main' && \
|
||||||
apt-get update && apt-get install -y --no-install-recommends temurin-8-jdk && \
|
apt-get update && apt-get install -y --no-install-recommends temurin-8-jdk && \
|
||||||
rm -rf /var/lib/apt/lists/
|
rm -rf /var/lib/apt/lists/
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
name=clouseau@127.0.0.1
|
name=clouseau@127.0.0.1
|
||||||
|
|
||||||
; set this to the same distributed Erlang cookie used by the CouchDB nodes
|
; set this to the same distributed Erlang cookie used by the CouchDB nodes
|
||||||
cookie=monster
|
cookie=COUCHDB_ERLANG_COOKIE
|
||||||
|
|
||||||
; the path where you would like to store the search index files
|
; the path where you would like to store the search index files
|
||||||
dir=DATA_DIR/search
|
dir=DATA_DIR/search
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
; CouchDB Configuration Settings
|
||||||
|
|
||||||
|
; Custom settings should be made in this file. They will override settings
|
||||||
|
; in default.ini, but unlike changes made to default.ini, this file won't be
|
||||||
|
; overwritten on server upgrade.
|
||||||
|
|
||||||
|
[chttpd]
|
||||||
|
bind_address = any
|
|
@ -12,7 +12,7 @@
|
||||||
|
|
||||||
# erlang cookie for clouseau security
|
# erlang cookie for clouseau security
|
||||||
-name couchdb@127.0.0.1
|
-name couchdb@127.0.0.1
|
||||||
-setcookie monster
|
-setcookie COUCHDB_ERLANG_COOKIE
|
||||||
|
|
||||||
# Ensure that the Erlang VM listens on a known port
|
# Ensure that the Erlang VM listens on a known port
|
||||||
-kernel inet_dist_listen_min 9100
|
-kernel inet_dist_listen_min 9100
|
||||||
|
|
|
@ -0,0 +1,122 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
|
||||||
|
# use this file except in compliance with the License. You may obtain a copy of
|
||||||
|
# the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations under
|
||||||
|
# the License.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# first arg is `-something` or `+something`
|
||||||
|
if [ "${1#-}" != "$1" ] || [ "${1#+}" != "$1" ]; then
|
||||||
|
set -- /opt/couchdb/bin/couchdb "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# first arg is the bare word `couchdb`
|
||||||
|
if [ "$1" = 'couchdb' ]; then
|
||||||
|
shift
|
||||||
|
set -- /opt/couchdb/bin/couchdb "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$1" = '/opt/couchdb/bin/couchdb' ]; then
|
||||||
|
# this is where runtime configuration changes will be written.
|
||||||
|
# we need to explicitly touch it here in case /opt/couchdb/etc has
|
||||||
|
# been mounted as an external volume, in which case it won't exist.
|
||||||
|
# If running as the couchdb user (i.e. container starts as root),
|
||||||
|
# write permissions will be granted below.
|
||||||
|
touch /opt/couchdb/etc/local.d/docker.ini
|
||||||
|
|
||||||
|
# if user is root, assume running under the couchdb user (default)
|
||||||
|
# and ensure it is able to access files and directories that may be mounted externally
|
||||||
|
if [ "$(id -u)" = '0' ]; then
|
||||||
|
# Check that we own everything in /opt/couchdb and fix if necessary. We also
|
||||||
|
# add the `-f` flag in all the following invocations because there may be
|
||||||
|
# cases where some of these ownership and permissions issues are non-fatal
|
||||||
|
# (e.g. a config file owned by root with o+r is actually fine), and we don't
|
||||||
|
# to be too aggressive about crashing here ...
|
||||||
|
find /opt/couchdb \! \( -user couchdb -group couchdb \) -exec chown -f couchdb:couchdb '{}' +
|
||||||
|
|
||||||
|
# Ensure that data files have the correct permissions. We were previously
|
||||||
|
# preventing any access to these files outside of couchdb:couchdb, but it
|
||||||
|
# turns out that CouchDB itself does not set such restrictive permissions
|
||||||
|
# when it creates the files. The approach taken here ensures that the
|
||||||
|
# contents of the datadir have the same permissions as they had when they
|
||||||
|
# were initially created. This should minimize any startup delay.
|
||||||
|
find /opt/couchdb/data -type d ! -perm 0755 -exec chmod -f 0755 '{}' +
|
||||||
|
find /opt/couchdb/data -type f ! -perm 0644 -exec chmod -f 0644 '{}' +
|
||||||
|
|
||||||
|
# Do the same thing for configuration files and directories. Technically
|
||||||
|
# CouchDB only needs read access to the configuration files as all online
|
||||||
|
# changes will be applied to the "docker.ini" file below, but we set 644
|
||||||
|
# for the sake of consistency.
|
||||||
|
find /opt/couchdb/etc -type d ! -perm 0755 -exec chmod -f 0755 '{}' +
|
||||||
|
find /opt/couchdb/etc -type f ! -perm 0644 -exec chmod -f 0644 '{}' +
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -z "$NODENAME" ] && ! grep "couchdb@" /opt/couchdb/etc/vm.args; then
|
||||||
|
echo "-name couchdb@$NODENAME" >> /opt/couchdb/etc/vm.args
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$COUCHDB_USER" ] && [ "$COUCHDB_PASSWORD" ]; then
|
||||||
|
# Create admin only if not already present
|
||||||
|
if ! grep -Pzoqr "\[admins\]\n$COUCHDB_USER =" /opt/couchdb/etc/local.d/*.ini /opt/couchdb/etc/local.ini; then
|
||||||
|
printf "\n[admins]\n%s = %s\n" "$COUCHDB_USER" "$COUCHDB_PASSWORD" >> /opt/couchdb/etc/local.d/docker.ini
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$COUCHDB_SECRET" ]; then
|
||||||
|
# Set secret only if not already present
|
||||||
|
if ! grep -Pzoqr "\[chttpd_auth\]\nsecret =" /opt/couchdb/etc/local.d/*.ini /opt/couchdb/etc/local.ini; then
|
||||||
|
printf "\n[chttpd_auth]\nsecret = %s\n" "$COUCHDB_SECRET" >> /opt/couchdb/etc/local.d/docker.ini
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$COUCHDB_ERLANG_COOKIE" ]; then
|
||||||
|
cookieFile='/opt/couchdb/.erlang.cookie'
|
||||||
|
if [ -e "$cookieFile" ]; then
|
||||||
|
if [ "$(cat "$cookieFile" 2>/dev/null)" != "$COUCHDB_ERLANG_COOKIE" ]; then
|
||||||
|
echo >&2
|
||||||
|
echo >&2 "warning: $cookieFile contents do not match COUCHDB_ERLANG_COOKIE"
|
||||||
|
echo >&2
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "$COUCHDB_ERLANG_COOKIE" > "$cookieFile"
|
||||||
|
fi
|
||||||
|
chown couchdb:couchdb "$cookieFile"
|
||||||
|
chmod 600 "$cookieFile"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$(id -u)" = '0' ]; then
|
||||||
|
chown -f couchdb:couchdb /opt/couchdb/etc/local.d/docker.ini || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# if we don't find an [admins] section followed by a non-comment, display a warning
|
||||||
|
if ! grep -Pzoqr '\[admins\]\n[^;]\w+' /opt/couchdb/etc/default.d/*.ini /opt/couchdb/etc/local.d/*.ini /opt/couchdb/etc/local.ini; then
|
||||||
|
# The - option suppresses leading tabs but *not* spaces. :)
|
||||||
|
cat >&2 <<-'EOWARN'
|
||||||
|
*************************************************************
|
||||||
|
ERROR: CouchDB 3.0+ will no longer run in "Admin Party"
|
||||||
|
mode. You *MUST* specify an admin user and
|
||||||
|
password, either via your own .ini file mapped
|
||||||
|
into the container at /opt/couchdb/etc/local.ini
|
||||||
|
or inside /opt/couchdb/etc/local.d, or with
|
||||||
|
"-e COUCHDB_USER=admin -e COUCHDB_PASSWORD=password"
|
||||||
|
to set it via "docker run".
|
||||||
|
*************************************************************
|
||||||
|
EOWARN
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$(id -u)" = '0' ]; then
|
||||||
|
export HOME=$(echo ~couchdb)
|
||||||
|
exec setpriv --reuid=couchdb --regid=couchdb --clear-groups "$@"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec "$@"
|
|
@ -1,6 +1,7 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
DATA_DIR=${DATA_DIR:-/data}
|
DATA_DIR=${DATA_DIR:-/data}
|
||||||
|
COUCHDB_ERLANG_COOKIE=${COUCHDB_ERLANG_COOKIE:-B9CFC32C-3458-4A86-8448-B3C753991CA7}
|
||||||
|
|
||||||
mkdir -p ${DATA_DIR}
|
mkdir -p ${DATA_DIR}
|
||||||
mkdir -p ${DATA_DIR}/couch/{dbs,views}
|
mkdir -p ${DATA_DIR}/couch/{dbs,views}
|
||||||
|
@ -60,6 +61,9 @@ else
|
||||||
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
|
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/couchdb/etc/vm.args
|
||||||
|
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouseau.ini
|
||||||
|
|
||||||
# Start Clouseau. Budibase won't function correctly without Clouseau running, it
|
# Start Clouseau. Budibase won't function correctly without Clouseau running, it
|
||||||
# powers the search API endpoints which are used to do all sorts, including
|
# powers the search API endpoints which are used to do all sorts, including
|
||||||
# populating app grids.
|
# populating app grids.
|
||||||
|
|
|
@ -3,7 +3,6 @@ FROM node:20-slim as build
|
||||||
# install node-gyp dependencies
|
# install node-gyp dependencies
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
|
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
|
||||||
|
|
||||||
|
|
||||||
# copy and install dependencies
|
# copy and install dependencies
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY package.json .
|
COPY package.json .
|
||||||
|
@ -39,10 +38,9 @@ COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
|
||||||
COPY packages/string-templates packages/string-templates
|
COPY packages/string-templates packages/string-templates
|
||||||
|
|
||||||
|
|
||||||
FROM budibase/couchdb as runner
|
FROM budibase/couchdb:v3.3.3 as runner
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ENV TARGETARCH $TARGETARCH
|
ENV TARGETARCH $TARGETARCH
|
||||||
ENV NODE_MAJOR 20
|
|
||||||
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
|
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
|
||||||
# e.g. docker build --build-arg TARGETBUILD=aas ....
|
# e.g. docker build --build-arg TARGETBUILD=aas ....
|
||||||
ARG TARGETBUILD=single
|
ARG TARGETBUILD=single
|
||||||
|
@ -60,10 +58,8 @@ RUN apt install -y software-properties-common apt-transport-https ca-certificate
|
||||||
&& apt install postgresql-client-15 -y \
|
&& apt install postgresql-client-15 -y \
|
||||||
&& apt remove software-properties-common apt-transport-https gpg -y
|
&& apt remove software-properties-common apt-transport-https gpg -y
|
||||||
|
|
||||||
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
|
# We use pm2 in order to run multiple node processes in a single container
|
||||||
WORKDIR /nodejs
|
RUN npm install --global pm2
|
||||||
COPY scripts/install-node.sh ./install.sh
|
|
||||||
RUN chmod +x install.sh && ./install.sh
|
|
||||||
|
|
||||||
# setup nginx
|
# setup nginx
|
||||||
COPY hosting/single/nginx/nginx.conf /etc/nginx
|
COPY hosting/single/nginx/nginx.conf /etc/nginx
|
||||||
|
|
|
@ -97,10 +97,12 @@ fi
|
||||||
sleep 10
|
sleep 10
|
||||||
|
|
||||||
pushd app
|
pushd app
|
||||||
pm2 start -l /dev/stdout --name app "yarn run:docker"
|
pm2 start --name app "yarn run:docker"
|
||||||
popd
|
popd
|
||||||
pushd worker
|
pushd worker
|
||||||
pm2 start -l /dev/stdout --name worker "yarn run:docker"
|
pm2 start --name worker "yarn run:docker"
|
||||||
popd
|
popd
|
||||||
echo "end of runner.sh, sleeping ..."
|
echo "end of runner.sh, sleeping ..."
|
||||||
|
|
||||||
|
tail -f $HOME/.pm2/logs/*.log
|
||||||
sleep infinity
|
sleep infinity
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.19.6",
|
"version": "2.20.6",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*",
|
"packages/*",
|
||||||
|
|
|
@ -43,6 +43,7 @@
|
||||||
flex-direction: row;
|
flex-direction: row;
|
||||||
justify-content: flex-start;
|
justify-content: flex-start;
|
||||||
align-items: stretch;
|
align-items: stretch;
|
||||||
|
overflow-y: scroll !important;
|
||||||
flex: 1 1 auto;
|
flex: 1 1 auto;
|
||||||
overflow-x: hidden;
|
overflow-x: hidden;
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,3 +20,9 @@
|
||||||
>
|
>
|
||||||
<slot />
|
<slot />
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
p {
|
||||||
|
text-wrap: pretty;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
|
|
@ -21,4 +21,8 @@
|
||||||
h1 {
|
h1 {
|
||||||
font-family: var(--font-accent);
|
font-family: var(--font-accent);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
text-wrap: balance;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -130,6 +130,7 @@
|
||||||
flex-grow: 1;
|
flex-grow: 1;
|
||||||
padding: 23px 23px 80px;
|
padding: 23px 23px 80px;
|
||||||
box-sizing: border-box;
|
box-sizing: border-box;
|
||||||
|
overflow-x: hidden;
|
||||||
}
|
}
|
||||||
|
|
||||||
.header.scrolling {
|
.header.scrolling {
|
||||||
|
|
|
@ -1,15 +1,25 @@
|
||||||
<script>
|
<script>
|
||||||
import EditComponentPopover from "../EditComponentPopover.svelte"
|
import EditComponentPopover from "../EditComponentPopover.svelte"
|
||||||
import { FieldTypeToComponentMap } from "../FieldConfiguration/utils"
|
|
||||||
import { Toggle, Icon } from "@budibase/bbui"
|
import { Toggle, Icon } from "@budibase/bbui"
|
||||||
import { createEventDispatcher } from "svelte"
|
import { createEventDispatcher } from "svelte"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import { componentStore } from "stores/builder"
|
import { FIELDS } from "constants/backend"
|
||||||
|
|
||||||
export let item
|
export let item
|
||||||
export let anchor
|
export let anchor
|
||||||
|
|
||||||
const dispatch = createEventDispatcher()
|
const dispatch = createEventDispatcher()
|
||||||
|
|
||||||
|
$: fieldIconLookupMap = buildFieldIconLookupMap(FIELDS)
|
||||||
|
|
||||||
|
const buildFieldIconLookupMap = fields => {
|
||||||
|
let map = {}
|
||||||
|
Object.values(fields).forEach(fieldInfo => {
|
||||||
|
map[fieldInfo.type] = fieldInfo.icon
|
||||||
|
})
|
||||||
|
return map
|
||||||
|
}
|
||||||
|
|
||||||
const onToggle = item => {
|
const onToggle = item => {
|
||||||
return e => {
|
return e => {
|
||||||
item.active = e.detail
|
item.active = e.detail
|
||||||
|
@ -24,13 +34,6 @@
|
||||||
return { ...setting, nested: true }
|
return { ...setting, nested: true }
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const getIcon = () => {
|
|
||||||
const component = `@budibase/standard-components/${
|
|
||||||
FieldTypeToComponentMap[item.columnType]
|
|
||||||
}`
|
|
||||||
return componentStore.getDefinition(component).icon
|
|
||||||
}
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<div class="list-item-body">
|
<div class="list-item-body">
|
||||||
|
@ -42,7 +45,7 @@
|
||||||
on:change
|
on:change
|
||||||
>
|
>
|
||||||
<div slot="header" class="type-icon">
|
<div slot="header" class="type-icon">
|
||||||
<Icon name={getIcon()} />
|
<Icon name={fieldIconLookupMap[item.columnType]} />
|
||||||
<span>{item.field}</span>
|
<span>{item.field}</span>
|
||||||
</div>
|
</div>
|
||||||
</EditComponentPopover>
|
</EditComponentPopover>
|
||||||
|
|
|
@ -77,7 +77,7 @@
|
||||||
</DatasourceOption>
|
</DatasourceOption>
|
||||||
<DatasourceOption
|
<DatasourceOption
|
||||||
on:click={() => internalTableModal.show({ promptUpload: true })}
|
on:click={() => internalTableModal.show({ promptUpload: true })}
|
||||||
title="Upload data"
|
title="Upload CSV / JSON"
|
||||||
description="Non-relational"
|
description="Non-relational"
|
||||||
{disabled}
|
{disabled}
|
||||||
>
|
>
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
{#if $admin.cloud && $auth?.user?.accountPortalAccess}
|
{#if $admin.cloud && $auth?.user?.accountPortalAccess}
|
||||||
<Button
|
<Button
|
||||||
cta
|
cta
|
||||||
size="S"
|
size="M"
|
||||||
on:click
|
on:click
|
||||||
on:click={() => {
|
on:click={() => {
|
||||||
window.open($admin.accountPortalUrl + "/portal/upgrade", "_blank")
|
window.open($admin.accountPortalUrl + "/portal/upgrade", "_blank")
|
||||||
|
@ -21,7 +21,7 @@
|
||||||
{:else if !$admin.cloud && sdk.users.isAdmin($auth.user)}
|
{:else if !$admin.cloud && sdk.users.isAdmin($auth.user)}
|
||||||
<Button
|
<Button
|
||||||
cta
|
cta
|
||||||
size="S"
|
size="M"
|
||||||
on:click={() => $goto("/builder/portal/account/upgrade")}
|
on:click={() => $goto("/builder/portal/account/upgrade")}
|
||||||
on:click
|
on:click
|
||||||
>
|
>
|
||||||
|
|
|
@ -49,10 +49,13 @@
|
||||||
|
|
||||||
{#if sdk.users.isAdmin($auth.user) && diagnosticInfo}
|
{#if sdk.users.isAdmin($auth.user) && diagnosticInfo}
|
||||||
<Layout noPadding>
|
<Layout noPadding>
|
||||||
<Layout gap="XS">
|
<Layout gap="XS" noPadding>
|
||||||
<Heading size="M">Diagnostics</Heading>
|
<Heading size="M">Diagnostics</Heading>
|
||||||
Please include this diagnostic information in support requests and github issues
|
<Body>
|
||||||
by clicking the button on the top right to copy to clipboard.
|
Please include this diagnostic information in support requests and
|
||||||
|
github issues by clicking the button on the top right to copy to
|
||||||
|
clipboard.
|
||||||
|
</Body>
|
||||||
<Divider />
|
<Divider />
|
||||||
<Body size="M">
|
<Body size="M">
|
||||||
<section>
|
<section>
|
||||||
|
|
|
@ -75,17 +75,7 @@ export function createQueriesStore() {
|
||||||
}
|
}
|
||||||
|
|
||||||
const preview = async query => {
|
const preview = async query => {
|
||||||
const parameters = query.parameters.reduce(
|
const result = await API.previewQuery(query)
|
||||||
(acc, next) => ({
|
|
||||||
...acc,
|
|
||||||
[next.name]: next.default,
|
|
||||||
}),
|
|
||||||
{}
|
|
||||||
)
|
|
||||||
const result = await API.previewQuery({
|
|
||||||
...query,
|
|
||||||
parameters,
|
|
||||||
})
|
|
||||||
// Assume all the fields are strings and create a basic schema from the
|
// Assume all the fields are strings and create a basic schema from the
|
||||||
// unique fields returned by the server
|
// unique fields returned by the server
|
||||||
const schema = {}
|
const schema = {}
|
||||||
|
|
|
@ -89,13 +89,13 @@
|
||||||
{
|
{
|
||||||
"label": "Column",
|
"label": "Column",
|
||||||
"value": "column",
|
"value": "column",
|
||||||
"barIcon": "ViewColumn",
|
"barIcon": "TableSelectColumn",
|
||||||
"barTitle": "Column layout"
|
"barTitle": "Column layout"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Row",
|
"label": "Row",
|
||||||
"value": "row",
|
"value": "row",
|
||||||
"barIcon": "ViewRow",
|
"barIcon": "TableSelectRow",
|
||||||
"barTitle": "Row layout"
|
"barTitle": "Row layout"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -298,13 +298,13 @@
|
||||||
{
|
{
|
||||||
"label": "Column",
|
"label": "Column",
|
||||||
"value": "column",
|
"value": "column",
|
||||||
"barIcon": "ViewColumn",
|
"barIcon": "TableSelectColumn",
|
||||||
"barTitle": "Column layout"
|
"barTitle": "Column layout"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Row",
|
"label": "Row",
|
||||||
"value": "row",
|
"value": "row",
|
||||||
"barIcon": "ViewRow",
|
"barIcon": "TableSelectRow",
|
||||||
"barTitle": "Row layout"
|
"barTitle": "Row layout"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -460,6 +460,10 @@
|
||||||
"label": "Variant",
|
"label": "Variant",
|
||||||
"key": "type",
|
"key": "type",
|
||||||
"options": [
|
"options": [
|
||||||
|
{
|
||||||
|
"label": "Action",
|
||||||
|
"value": "cta"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"label": "Primary",
|
"label": "Primary",
|
||||||
"value": "primary"
|
"value": "primary"
|
||||||
|
@ -468,10 +472,6 @@
|
||||||
"label": "Secondary",
|
"label": "Secondary",
|
||||||
"value": "secondary"
|
"value": "secondary"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"label": "Action",
|
|
||||||
"value": "cta"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"label": "Warning",
|
"label": "Warning",
|
||||||
"value": "warning"
|
"value": "warning"
|
||||||
|
@ -481,7 +481,7 @@
|
||||||
"value": "overBackground"
|
"value": "overBackground"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"defaultValue": "primary"
|
"defaultValue": "cta"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "select",
|
"type": "select",
|
||||||
|
@ -602,13 +602,13 @@
|
||||||
{
|
{
|
||||||
"label": "Column",
|
"label": "Column",
|
||||||
"value": "column",
|
"value": "column",
|
||||||
"barIcon": "ViewColumn",
|
"barIcon": "TableSelectColumn",
|
||||||
"barTitle": "Column layout"
|
"barTitle": "Column layout"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Row",
|
"label": "Row",
|
||||||
"value": "row",
|
"value": "row",
|
||||||
"barIcon": "ViewRow",
|
"barIcon": "TableSelectRow",
|
||||||
"barTitle": "Row layout"
|
"barTitle": "Row layout"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -5917,13 +5917,13 @@
|
||||||
{
|
{
|
||||||
"label": "Column",
|
"label": "Column",
|
||||||
"value": "column",
|
"value": "column",
|
||||||
"barIcon": "ViewRow",
|
"barIcon": "TableSelectColumn",
|
||||||
"barTitle": "Column layout"
|
"barTitle": "Column layout"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Row",
|
"label": "Row",
|
||||||
"value": "row",
|
"value": "row",
|
||||||
"barIcon": "ViewColumn",
|
"barIcon": "TableSelectRow",
|
||||||
"barTitle": "Row layout"
|
"barTitle": "Row layout"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
export let text = ""
|
export let text = ""
|
||||||
export let onClick
|
export let onClick
|
||||||
export let size = "M"
|
export let size = "M"
|
||||||
export let type = "primary"
|
export let type = "cta"
|
||||||
export let quiet = false
|
export let quiet = false
|
||||||
|
|
||||||
// For internal use only for now - not defined in the manifest
|
// For internal use only for now - not defined in the manifest
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 336bf2184cf632fdc2bffbad5628e8b15dd381bd
|
Subproject commit 60e47a8249fd6291a6bc20fe3fe6776b11938fa1
|
|
@ -13,8 +13,8 @@
|
||||||
"build": "node ./scripts/build.js",
|
"build": "node ./scripts/build.js",
|
||||||
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/",
|
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/",
|
||||||
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
|
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
|
||||||
"build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=esm --external:handlebars",
|
"build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=iife --external:handlebars --global-name=helpers",
|
||||||
"build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=esm",
|
"build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=iife --global-name=bson",
|
||||||
"build:isolated-vm-libs": "yarn build:isolated-vm-lib:string-templates && yarn build:isolated-vm-lib:bson",
|
"build:isolated-vm-libs": "yarn build:isolated-vm-lib:string-templates && yarn build:isolated-vm-lib:bson",
|
||||||
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
|
||||||
"debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js",
|
"debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js",
|
||||||
|
@ -114,7 +114,6 @@
|
||||||
"undici-types": "^6.0.1",
|
"undici-types": "^6.0.1",
|
||||||
"uuid": "^8.3.2",
|
"uuid": "^8.3.2",
|
||||||
"validate.js": "0.13.1",
|
"validate.js": "0.13.1",
|
||||||
"vm2": "^3.9.19",
|
|
||||||
"worker-farm": "1.7.0",
|
"worker-farm": "1.7.0",
|
||||||
"xml2js": "0.5.0"
|
"xml2js": "0.5.0"
|
||||||
},
|
},
|
||||||
|
|
|
@ -20,6 +20,7 @@ import {
|
||||||
type ExecuteQueryRequest,
|
type ExecuteQueryRequest,
|
||||||
type ExecuteQueryResponse,
|
type ExecuteQueryResponse,
|
||||||
type Row,
|
type Row,
|
||||||
|
QueryParameter,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
|
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
|
||||||
|
|
||||||
|
@ -118,6 +119,21 @@ function getAuthConfig(ctx: UserCtx) {
|
||||||
return authConfigCtx
|
return authConfigCtx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function enrichParameters(
|
||||||
|
queryParameters: QueryParameter[],
|
||||||
|
requestParameters: { [key: string]: string } = {}
|
||||||
|
): {
|
||||||
|
[key: string]: string
|
||||||
|
} {
|
||||||
|
// make sure parameters are fully enriched with defaults
|
||||||
|
for (let parameter of queryParameters) {
|
||||||
|
if (!requestParameters[parameter.name]) {
|
||||||
|
requestParameters[parameter.name] = parameter.default
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return requestParameters
|
||||||
|
}
|
||||||
|
|
||||||
export async function preview(ctx: UserCtx) {
|
export async function preview(ctx: UserCtx) {
|
||||||
const { datasource, envVars } = await sdk.datasources.getWithEnvVars(
|
const { datasource, envVars } = await sdk.datasources.getWithEnvVars(
|
||||||
ctx.request.body.datasourceId
|
ctx.request.body.datasourceId
|
||||||
|
@ -142,6 +158,68 @@ export async function preview(ctx: UserCtx) {
|
||||||
|
|
||||||
const authConfigCtx: any = getAuthConfig(ctx)
|
const authConfigCtx: any = getAuthConfig(ctx)
|
||||||
|
|
||||||
|
function getFieldMetadata(field: any, key: string): QuerySchema {
|
||||||
|
const makeQuerySchema = (
|
||||||
|
type: FieldType,
|
||||||
|
name: string,
|
||||||
|
subtype?: string
|
||||||
|
): QuerySchema => ({
|
||||||
|
type,
|
||||||
|
name,
|
||||||
|
subtype,
|
||||||
|
})
|
||||||
|
// Because custom queries have no fixed schema, we dynamically determine the schema,
|
||||||
|
// however types cannot be determined from null. We have no 'unknown' type, so we default to string.
|
||||||
|
let type = typeof field,
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
|
||||||
|
if (field != null)
|
||||||
|
switch (type) {
|
||||||
|
case "boolean":
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.BOOLEAN, key)
|
||||||
|
break
|
||||||
|
case "object":
|
||||||
|
if (field instanceof Date) {
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.DATETIME, key)
|
||||||
|
} else if (Array.isArray(field)) {
|
||||||
|
if (field.some(item => JsonUtils.hasSchema(item))) {
|
||||||
|
fieldMetadata = makeQuerySchema(
|
||||||
|
FieldType.JSON,
|
||||||
|
key,
|
||||||
|
JsonFieldSubType.ARRAY
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case "number":
|
||||||
|
fieldMetadata = makeQuerySchema(FieldType.NUMBER, key)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return fieldMetadata
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildNestedSchema(
|
||||||
|
nestedSchemaFields: {
|
||||||
|
[key: string]: Record<string, string | QuerySchema>
|
||||||
|
},
|
||||||
|
key: string,
|
||||||
|
fieldArray: any[]
|
||||||
|
) {
|
||||||
|
let schema: { [key: string]: any } = {}
|
||||||
|
// build the schema by aggregating all row objects in the array
|
||||||
|
for (const item of fieldArray) {
|
||||||
|
if (JsonUtils.hasSchema(item)) {
|
||||||
|
for (const [key, value] of Object.entries(item)) {
|
||||||
|
schema[key] = getFieldMetadata(value, key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
nestedSchemaFields[key] = schema
|
||||||
|
}
|
||||||
|
|
||||||
function getSchemaFields(
|
function getSchemaFields(
|
||||||
rows: any[],
|
rows: any[],
|
||||||
keys: string[]
|
keys: string[]
|
||||||
|
@ -155,51 +233,16 @@ export async function preview(ctx: UserCtx) {
|
||||||
const nestedSchemaFields: {
|
const nestedSchemaFields: {
|
||||||
[key: string]: Record<string, string | QuerySchema>
|
[key: string]: Record<string, string | QuerySchema>
|
||||||
} = {}
|
} = {}
|
||||||
const makeQuerySchema = (
|
|
||||||
type: FieldType,
|
|
||||||
name: string,
|
|
||||||
subtype?: string
|
|
||||||
): QuerySchema => ({
|
|
||||||
type,
|
|
||||||
name,
|
|
||||||
subtype,
|
|
||||||
})
|
|
||||||
if (rows?.length > 0) {
|
if (rows?.length > 0) {
|
||||||
for (let key of [...new Set(keys)] as string[]) {
|
for (let key of new Set(keys)) {
|
||||||
const field = rows[0][key]
|
const fieldMetadata = getFieldMetadata(rows[0][key], key)
|
||||||
let type = typeof field,
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
|
|
||||||
if (field)
|
|
||||||
switch (type) {
|
|
||||||
case "boolean":
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.BOOLEAN, key)
|
|
||||||
break
|
|
||||||
case "object":
|
|
||||||
if (field instanceof Date) {
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.DATETIME, key)
|
|
||||||
} else if (Array.isArray(field)) {
|
|
||||||
if (JsonUtils.hasSchema(field[0])) {
|
|
||||||
fieldMetadata = makeQuerySchema(
|
|
||||||
FieldType.JSON,
|
|
||||||
key,
|
|
||||||
JsonFieldSubType.ARRAY
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
|
|
||||||
}
|
|
||||||
nestedSchemaFields[key] = getSchemaFields(
|
|
||||||
field,
|
|
||||||
Object.keys(field[0])
|
|
||||||
).previewSchema
|
|
||||||
} else {
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
case "number":
|
|
||||||
fieldMetadata = makeQuerySchema(FieldType.NUMBER, key)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
previewSchema[key] = fieldMetadata
|
previewSchema[key] = fieldMetadata
|
||||||
|
if (
|
||||||
|
fieldMetadata.type === FieldType.JSON &&
|
||||||
|
fieldMetadata.subtype === JsonFieldSubType.ARRAY
|
||||||
|
) {
|
||||||
|
buildNestedSchema(nestedSchemaFields, key, rows[0][key])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return { previewSchema, nestedSchemaFields }
|
return { previewSchema, nestedSchemaFields }
|
||||||
|
@ -211,7 +254,7 @@ export async function preview(ctx: UserCtx) {
|
||||||
datasource,
|
datasource,
|
||||||
queryVerb,
|
queryVerb,
|
||||||
fields,
|
fields,
|
||||||
parameters,
|
parameters: enrichParameters(parameters),
|
||||||
transformer,
|
transformer,
|
||||||
queryId,
|
queryId,
|
||||||
schema,
|
schema,
|
||||||
|
@ -266,15 +309,6 @@ async function execute(
|
||||||
if (!opts.isAutomation) {
|
if (!opts.isAutomation) {
|
||||||
authConfigCtx = getAuthConfig(ctx)
|
authConfigCtx = getAuthConfig(ctx)
|
||||||
}
|
}
|
||||||
const enrichedParameters = ctx.request.body.parameters || {}
|
|
||||||
// make sure parameters are fully enriched with defaults
|
|
||||||
if (query && query.parameters) {
|
|
||||||
for (let parameter of query.parameters) {
|
|
||||||
if (!enrichedParameters[parameter.name]) {
|
|
||||||
enrichedParameters[parameter.name] = parameter.default
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// call the relevant CRUD method on the integration class
|
// call the relevant CRUD method on the integration class
|
||||||
try {
|
try {
|
||||||
|
@ -284,7 +318,10 @@ async function execute(
|
||||||
queryVerb: query.queryVerb,
|
queryVerb: query.queryVerb,
|
||||||
fields: query.fields,
|
fields: query.fields,
|
||||||
pagination: ctx.request.body.pagination,
|
pagination: ctx.request.body.pagination,
|
||||||
parameters: enrichedParameters,
|
parameters: enrichParameters(
|
||||||
|
query.parameters,
|
||||||
|
ctx.request.body.parameters
|
||||||
|
),
|
||||||
transformer: query.transformer,
|
transformer: query.transformer,
|
||||||
queryId: ctx.params.queryId,
|
queryId: ctx.params.queryId,
|
||||||
// have to pass down to the thread runner - can't put into context now
|
// have to pass down to the thread runner - can't put into context now
|
||||||
|
|
|
@ -3,11 +3,10 @@ import Joi from "joi"
|
||||||
|
|
||||||
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
|
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
|
||||||
|
|
||||||
export function queryValidation() {
|
function baseQueryValidation() {
|
||||||
return Joi.object({
|
return {
|
||||||
_id: Joi.string(),
|
_id: OPTIONAL_STRING,
|
||||||
_rev: Joi.string(),
|
_rev: OPTIONAL_STRING,
|
||||||
name: Joi.string().required(),
|
|
||||||
fields: Joi.object().required(),
|
fields: Joi.object().required(),
|
||||||
datasourceId: Joi.string().required(),
|
datasourceId: Joi.string().required(),
|
||||||
readable: Joi.boolean(),
|
readable: Joi.boolean(),
|
||||||
|
@ -17,11 +16,19 @@ export function queryValidation() {
|
||||||
default: Joi.string().allow(""),
|
default: Joi.string().allow(""),
|
||||||
})
|
})
|
||||||
),
|
),
|
||||||
queryVerb: Joi.string().allow().required(),
|
queryVerb: Joi.string().required(),
|
||||||
extra: Joi.object().optional(),
|
extra: Joi.object().optional(),
|
||||||
schema: Joi.object({}).required().unknown(true),
|
schema: Joi.object({}).required().unknown(true),
|
||||||
transformer: OPTIONAL_STRING,
|
transformer: OPTIONAL_STRING,
|
||||||
flags: Joi.object().optional(),
|
flags: Joi.object().optional(),
|
||||||
|
queryId: OPTIONAL_STRING,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function queryValidation() {
|
||||||
|
return Joi.object({
|
||||||
|
...baseQueryValidation(),
|
||||||
|
name: Joi.string().required(),
|
||||||
}).unknown(true)
|
}).unknown(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,19 +39,10 @@ export function generateQueryValidation() {
|
||||||
|
|
||||||
export function generateQueryPreviewValidation() {
|
export function generateQueryPreviewValidation() {
|
||||||
// prettier-ignore
|
// prettier-ignore
|
||||||
return auth.joiValidator.body(Joi.object({
|
return auth.joiValidator.body(
|
||||||
_id: OPTIONAL_STRING,
|
Joi.object({
|
||||||
_rev: OPTIONAL_STRING,
|
...baseQueryValidation(),
|
||||||
readable: Joi.boolean().optional(),
|
name: OPTIONAL_STRING,
|
||||||
fields: Joi.object().required(),
|
}).unknown(true)
|
||||||
queryVerb: Joi.string().required(),
|
)
|
||||||
name: OPTIONAL_STRING,
|
|
||||||
flags: Joi.object().optional(),
|
|
||||||
schema: Joi.object().optional(),
|
|
||||||
extra: Joi.object().optional(),
|
|
||||||
datasourceId: Joi.string().required(),
|
|
||||||
transformer: OPTIONAL_STRING,
|
|
||||||
parameters: Joi.object({}).required().unknown(true),
|
|
||||||
queryId: OPTIONAL_STRING,
|
|
||||||
}).unknown(true))
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
import ScriptRunner from "../../utilities/scriptRunner"
|
|
||||||
import { Ctx } from "@budibase/types"
|
import { Ctx } from "@budibase/types"
|
||||||
|
import { IsolatedVM } from "../../jsRunner/vm"
|
||||||
|
|
||||||
export async function execute(ctx: Ctx) {
|
export async function execute(ctx: Ctx) {
|
||||||
const { script, context } = ctx.request.body
|
const { script, context } = ctx.request.body
|
||||||
const runner = new ScriptRunner(script, context)
|
const vm = new IsolatedVM()
|
||||||
ctx.body = runner.execute()
|
const result = vm.withContext(context, () =>
|
||||||
|
vm.execute(`(function(){\n${script}\n})();`)
|
||||||
|
)
|
||||||
|
ctx.body = result
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function save(ctx: Ctx) {
|
export async function save(ctx: Ctx) {
|
||||||
|
|
|
@ -8,8 +8,8 @@ import {
|
||||||
paramResource,
|
paramResource,
|
||||||
} from "../../middleware/resourceId"
|
} from "../../middleware/resourceId"
|
||||||
import {
|
import {
|
||||||
generateQueryPreviewValidation,
|
|
||||||
generateQueryValidation,
|
generateQueryValidation,
|
||||||
|
generateQueryPreviewValidation,
|
||||||
} from "../controllers/query/validation"
|
} from "../controllers/query/validation"
|
||||||
|
|
||||||
const { BUILDER, PermissionType, PermissionLevel } = permissions
|
const { BUILDER, PermissionType, PermissionLevel } = permissions
|
||||||
|
|
|
@ -8,7 +8,6 @@ import { mocks } from "@budibase/backend-core/tests"
|
||||||
mocks.licenses.useBackups()
|
mocks.licenses.useBackups()
|
||||||
|
|
||||||
describe("/backups", () => {
|
describe("/backups", () => {
|
||||||
let request = setup.getRequest()
|
|
||||||
let config = setup.getConfig()
|
let config = setup.getConfig()
|
||||||
|
|
||||||
afterAll(setup.afterAll)
|
afterAll(setup.afterAll)
|
||||||
|
@ -59,10 +58,8 @@ describe("/backups", () => {
|
||||||
await config.createScreen()
|
await config.createScreen()
|
||||||
const exportRes = await config.api.backup.createBackup(appId)
|
const exportRes = await config.api.backup.createBackup(appId)
|
||||||
expect(exportRes.backupId).toBeDefined()
|
expect(exportRes.backupId).toBeDefined()
|
||||||
const importRes = await config.api.backup.importBackup(
|
await config.api.backup.waitForBackupToComplete(appId, exportRes.backupId)
|
||||||
appId,
|
await config.api.backup.importBackup(appId, exportRes.backupId)
|
||||||
exportRes.backupId
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,7 @@ import sdk from "../../../sdk"
|
||||||
|
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
import { mocks } from "@budibase/backend-core/tests"
|
import { mocks } from "@budibase/backend-core/tests"
|
||||||
|
import { QueryPreview } from "@budibase/types"
|
||||||
|
|
||||||
tk.freeze(mocks.date.MOCK_DATE)
|
tk.freeze(mocks.date.MOCK_DATE)
|
||||||
|
|
||||||
|
@ -63,14 +64,17 @@ describe("/datasources", () => {
|
||||||
datasource: any,
|
datasource: any,
|
||||||
fields: { path: string; queryString: string }
|
fields: { path: string; queryString: string }
|
||||||
) {
|
) {
|
||||||
return config.previewQuery(
|
const queryPreview: QueryPreview = {
|
||||||
request,
|
|
||||||
config,
|
|
||||||
datasource,
|
|
||||||
fields,
|
fields,
|
||||||
undefined,
|
datasourceId: datasource._id,
|
||||||
""
|
parameters: [],
|
||||||
)
|
transformer: null,
|
||||||
|
queryVerb: "read",
|
||||||
|
name: datasource.name,
|
||||||
|
schema: {},
|
||||||
|
readable: true,
|
||||||
|
}
|
||||||
|
return config.api.query.previewQuery(queryPreview)
|
||||||
}
|
}
|
||||||
|
|
||||||
it("should invalidate changed or removed variables", async () => {
|
it("should invalidate changed or removed variables", async () => {
|
||||||
|
|
|
@ -14,6 +14,7 @@ jest.mock("pg", () => {
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
import { mocks } from "@budibase/backend-core/tests"
|
import { mocks } from "@budibase/backend-core/tests"
|
||||||
import { env, events } from "@budibase/backend-core"
|
import { env, events } from "@budibase/backend-core"
|
||||||
|
import { QueryPreview } from "@budibase/types"
|
||||||
|
|
||||||
const structures = setup.structures
|
const structures = setup.structures
|
||||||
|
|
||||||
|
@ -120,16 +121,19 @@ describe("/api/env/variables", () => {
|
||||||
.expect(200)
|
.expect(200)
|
||||||
expect(response.body.datasource._id).toBeDefined()
|
expect(response.body.datasource._id).toBeDefined()
|
||||||
|
|
||||||
const query = {
|
const queryPreview: QueryPreview = {
|
||||||
datasourceId: response.body.datasource._id,
|
datasourceId: response.body.datasource._id,
|
||||||
parameters: {},
|
parameters: [],
|
||||||
fields: {},
|
fields: {},
|
||||||
queryVerb: "read",
|
queryVerb: "read",
|
||||||
name: response.body.datasource.name,
|
name: response.body.datasource.name,
|
||||||
|
transformer: null,
|
||||||
|
schema: {},
|
||||||
|
readable: true,
|
||||||
}
|
}
|
||||||
const res = await request
|
const res = await request
|
||||||
.post(`/api/queries/preview`)
|
.post(`/api/queries/preview`)
|
||||||
.send(query)
|
.send(queryPreview)
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(200)
|
.expect(200)
|
||||||
|
@ -139,7 +143,7 @@ describe("/api/env/variables", () => {
|
||||||
delete response.body.datasource.config
|
delete response.body.datasource.config
|
||||||
expect(events.query.previewed).toBeCalledWith(
|
expect(events.query.previewed).toBeCalledWith(
|
||||||
response.body.datasource,
|
response.body.datasource,
|
||||||
query
|
queryPreview
|
||||||
)
|
)
|
||||||
expect(pg.Client).toHaveBeenCalledWith({ password: "test", ssl: undefined })
|
expect(pg.Client).toHaveBeenCalledWith({ password: "test", ssl: undefined })
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
|
|
||||||
|
const pg = require("pg")
|
||||||
|
|
||||||
// Mock out postgres for this
|
// Mock out postgres for this
|
||||||
jest.mock("pg")
|
jest.mock("pg")
|
||||||
jest.mock("node-fetch")
|
jest.mock("node-fetch")
|
||||||
|
@ -22,7 +24,13 @@ import { checkCacheForDynamicVariable } from "../../../../threads/utils"
|
||||||
|
|
||||||
const { basicQuery, basicDatasource } = setup.structures
|
const { basicQuery, basicDatasource } = setup.structures
|
||||||
import { events, db as dbCore } from "@budibase/backend-core"
|
import { events, db as dbCore } from "@budibase/backend-core"
|
||||||
import { Datasource, Query, SourceName } from "@budibase/types"
|
import {
|
||||||
|
Datasource,
|
||||||
|
Query,
|
||||||
|
SourceName,
|
||||||
|
QueryPreview,
|
||||||
|
QueryParameter,
|
||||||
|
} from "@budibase/types"
|
||||||
|
|
||||||
tk.freeze(Date.now())
|
tk.freeze(Date.now())
|
||||||
|
|
||||||
|
@ -218,28 +226,26 @@ describe("/queries", () => {
|
||||||
|
|
||||||
describe("preview", () => {
|
describe("preview", () => {
|
||||||
it("should be able to preview the query", async () => {
|
it("should be able to preview the query", async () => {
|
||||||
const query = {
|
const queryPreview: QueryPreview = {
|
||||||
datasourceId: datasource._id,
|
datasourceId: datasource._id,
|
||||||
parameters: {},
|
|
||||||
fields: {},
|
|
||||||
queryVerb: "read",
|
queryVerb: "read",
|
||||||
name: datasource.name,
|
fields: {},
|
||||||
|
parameters: [],
|
||||||
|
transformer: "return data",
|
||||||
|
name: datasource.name!,
|
||||||
|
schema: {},
|
||||||
|
readable: true,
|
||||||
}
|
}
|
||||||
const res = await request
|
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||||
.post(`/api/queries/preview`)
|
|
||||||
.send(query)
|
|
||||||
.set(config.defaultHeaders())
|
|
||||||
.expect("Content-Type", /json/)
|
|
||||||
.expect(200)
|
|
||||||
// these responses come from the mock
|
// these responses come from the mock
|
||||||
expect(res.body.schema).toEqual({
|
expect(responseBody.schema).toEqual({
|
||||||
a: { type: "string", name: "a" },
|
a: { type: "string", name: "a" },
|
||||||
b: { type: "number", name: "b" },
|
b: { type: "number", name: "b" },
|
||||||
})
|
})
|
||||||
expect(res.body.rows.length).toEqual(1)
|
expect(responseBody.rows.length).toEqual(1)
|
||||||
expect(events.query.previewed).toBeCalledTimes(1)
|
expect(events.query.previewed).toBeCalledTimes(1)
|
||||||
delete datasource.config
|
delete datasource.config
|
||||||
expect(events.query.previewed).toBeCalledWith(datasource, query)
|
expect(events.query.previewed).toBeCalledWith(datasource, queryPreview)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should apply authorization to endpoint", async () => {
|
it("should apply authorization to endpoint", async () => {
|
||||||
|
@ -249,6 +255,128 @@ describe("/queries", () => {
|
||||||
url: `/api/queries/preview`,
|
url: `/api/queries/preview`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should not error when trying to generate a nested schema for an empty array", async () => {
|
||||||
|
const queryPreview: QueryPreview = {
|
||||||
|
datasourceId: datasource._id,
|
||||||
|
parameters: [],
|
||||||
|
fields: {},
|
||||||
|
queryVerb: "read",
|
||||||
|
name: datasource.name!,
|
||||||
|
transformer: "return data",
|
||||||
|
schema: {},
|
||||||
|
readable: true,
|
||||||
|
}
|
||||||
|
const rows = [
|
||||||
|
{
|
||||||
|
contacts: [],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
pg.queryMock.mockImplementation(() => ({
|
||||||
|
rows,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||||
|
expect(responseBody).toEqual({
|
||||||
|
nestedSchemaFields: {},
|
||||||
|
rows,
|
||||||
|
schema: {
|
||||||
|
contacts: { type: "array", name: "contacts" },
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(responseBody.rows.length).toEqual(1)
|
||||||
|
delete datasource.config
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should generate a nested schema based on all the nested items", async () => {
|
||||||
|
const queryPreview: QueryPreview = {
|
||||||
|
datasourceId: datasource._id,
|
||||||
|
parameters: [],
|
||||||
|
fields: {},
|
||||||
|
queryVerb: "read",
|
||||||
|
name: datasource.name!,
|
||||||
|
transformer: "return data",
|
||||||
|
schema: {},
|
||||||
|
readable: true,
|
||||||
|
}
|
||||||
|
const rows = [
|
||||||
|
{
|
||||||
|
contacts: [
|
||||||
|
{
|
||||||
|
address: "123 Lane",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
address: "456 Drive",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
postcode: "BT1 12N",
|
||||||
|
lat: 54.59,
|
||||||
|
long: -5.92,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
city: "Belfast",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
address: "789 Avenue",
|
||||||
|
phoneNumber: "0800-999-5555",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Name",
|
||||||
|
isActive: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
pg.queryMock.mockImplementation(() => ({
|
||||||
|
rows,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||||
|
expect(responseBody).toEqual({
|
||||||
|
nestedSchemaFields: {
|
||||||
|
contacts: {
|
||||||
|
address: {
|
||||||
|
type: "string",
|
||||||
|
name: "address",
|
||||||
|
},
|
||||||
|
postcode: {
|
||||||
|
type: "string",
|
||||||
|
name: "postcode",
|
||||||
|
},
|
||||||
|
lat: {
|
||||||
|
type: "number",
|
||||||
|
name: "lat",
|
||||||
|
},
|
||||||
|
long: {
|
||||||
|
type: "number",
|
||||||
|
name: "long",
|
||||||
|
},
|
||||||
|
city: {
|
||||||
|
type: "string",
|
||||||
|
name: "city",
|
||||||
|
},
|
||||||
|
phoneNumber: {
|
||||||
|
type: "string",
|
||||||
|
name: "phoneNumber",
|
||||||
|
},
|
||||||
|
name: {
|
||||||
|
type: "string",
|
||||||
|
name: "name",
|
||||||
|
},
|
||||||
|
isActive: {
|
||||||
|
type: "boolean",
|
||||||
|
name: "isActive",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
rows,
|
||||||
|
schema: {
|
||||||
|
contacts: { type: "json", name: "contacts", subtype: "array" },
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(responseBody.rows.length).toEqual(1)
|
||||||
|
delete datasource.config
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("execute", () => {
|
describe("execute", () => {
|
||||||
|
@ -283,7 +411,17 @@ describe("/queries", () => {
|
||||||
|
|
||||||
describe("variables", () => {
|
describe("variables", () => {
|
||||||
async function preview(datasource: Datasource, fields: any) {
|
async function preview(datasource: Datasource, fields: any) {
|
||||||
return config.previewQuery(request, config, datasource, fields, undefined)
|
const queryPreview: QueryPreview = {
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
parameters: [],
|
||||||
|
fields,
|
||||||
|
queryVerb: "read",
|
||||||
|
name: datasource.name!,
|
||||||
|
transformer: "return data",
|
||||||
|
schema: {},
|
||||||
|
readable: true,
|
||||||
|
}
|
||||||
|
return await config.api.query.previewQuery(queryPreview)
|
||||||
}
|
}
|
||||||
|
|
||||||
it("should work with static variables", async () => {
|
it("should work with static variables", async () => {
|
||||||
|
@ -293,31 +431,31 @@ describe("/queries", () => {
|
||||||
variable2: "1",
|
variable2: "1",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
const res = await preview(datasource, {
|
const responseBody = await preview(datasource, {
|
||||||
path: "www.{{ variable }}.com",
|
path: "www.{{ variable }}.com",
|
||||||
queryString: "test={{ variable2 }}",
|
queryString: "test={{ variable2 }}",
|
||||||
})
|
})
|
||||||
// these responses come from the mock
|
// these responses come from the mock
|
||||||
expect(res.body.schema).toEqual({
|
expect(responseBody.schema).toEqual({
|
||||||
opts: { type: "json", name: "opts" },
|
opts: { type: "json", name: "opts" },
|
||||||
url: { type: "string", name: "url" },
|
url: { type: "string", name: "url" },
|
||||||
value: { type: "string", name: "value" },
|
value: { type: "string", name: "value" },
|
||||||
})
|
})
|
||||||
expect(res.body.rows[0].url).toEqual("http://www.google.com?test=1")
|
expect(responseBody.rows[0].url).toEqual("http://www.google.com?test=1")
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should work with dynamic variables", async () => {
|
it("should work with dynamic variables", async () => {
|
||||||
const { datasource } = await config.dynamicVariableDatasource()
|
const { datasource } = await config.dynamicVariableDatasource()
|
||||||
const res = await preview(datasource, {
|
const responseBody = await preview(datasource, {
|
||||||
path: "www.google.com",
|
path: "www.google.com",
|
||||||
queryString: "test={{ variable3 }}",
|
queryString: "test={{ variable3 }}",
|
||||||
})
|
})
|
||||||
expect(res.body.schema).toEqual({
|
expect(responseBody.schema).toEqual({
|
||||||
opts: { type: "json", name: "opts" },
|
opts: { type: "json", name: "opts" },
|
||||||
url: { type: "string", name: "url" },
|
url: { type: "string", name: "url" },
|
||||||
value: { type: "string", name: "value" },
|
value: { type: "string", name: "value" },
|
||||||
})
|
})
|
||||||
expect(res.body.rows[0].url).toContain("doctype%20html")
|
expect(responseBody.rows[0].url).toContain("doctype%20html")
|
||||||
})
|
})
|
||||||
|
|
||||||
it("check that it automatically retries on fail with cached dynamics", async () => {
|
it("check that it automatically retries on fail with cached dynamics", async () => {
|
||||||
|
@ -331,16 +469,16 @@ describe("/queries", () => {
|
||||||
// check its in cache
|
// check its in cache
|
||||||
const contents = await checkCacheForDynamicVariable(base._id, "variable3")
|
const contents = await checkCacheForDynamicVariable(base._id, "variable3")
|
||||||
expect(contents.rows.length).toEqual(1)
|
expect(contents.rows.length).toEqual(1)
|
||||||
const res = await preview(datasource, {
|
const responseBody = await preview(datasource, {
|
||||||
path: "www.failonce.com",
|
path: "www.failonce.com",
|
||||||
queryString: "test={{ variable3 }}",
|
queryString: "test={{ variable3 }}",
|
||||||
})
|
})
|
||||||
expect(res.body.schema).toEqual({
|
expect(responseBody.schema).toEqual({
|
||||||
fails: { type: "number", name: "fails" },
|
fails: { type: "number", name: "fails" },
|
||||||
opts: { type: "json", name: "opts" },
|
opts: { type: "json", name: "opts" },
|
||||||
url: { type: "string", name: "url" },
|
url: { type: "string", name: "url" },
|
||||||
})
|
})
|
||||||
expect(res.body.rows[0].fails).toEqual(1)
|
expect(responseBody.rows[0].fails).toEqual(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("deletes variables when linked query is deleted", async () => {
|
it("deletes variables when linked query is deleted", async () => {
|
||||||
|
@ -371,24 +509,37 @@ describe("/queries", () => {
|
||||||
async function previewGet(
|
async function previewGet(
|
||||||
datasource: Datasource,
|
datasource: Datasource,
|
||||||
fields: any,
|
fields: any,
|
||||||
params: any
|
params: QueryParameter[]
|
||||||
) {
|
) {
|
||||||
return config.previewQuery(request, config, datasource, fields, params)
|
const queryPreview: QueryPreview = {
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
parameters: params,
|
||||||
|
fields,
|
||||||
|
queryVerb: "read",
|
||||||
|
name: datasource.name!,
|
||||||
|
transformer: "return data",
|
||||||
|
schema: {},
|
||||||
|
readable: true,
|
||||||
|
}
|
||||||
|
return await config.api.query.previewQuery(queryPreview)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function previewPost(
|
async function previewPost(
|
||||||
datasource: Datasource,
|
datasource: Datasource,
|
||||||
fields: any,
|
fields: any,
|
||||||
params: any
|
params: QueryParameter[]
|
||||||
) {
|
) {
|
||||||
return config.previewQuery(
|
const queryPreview: QueryPreview = {
|
||||||
request,
|
datasourceId: datasource._id!,
|
||||||
config,
|
parameters: params,
|
||||||
datasource,
|
|
||||||
fields,
|
fields,
|
||||||
params,
|
queryVerb: "create",
|
||||||
"create"
|
name: datasource.name!,
|
||||||
)
|
transformer: null,
|
||||||
|
schema: {},
|
||||||
|
readable: false,
|
||||||
|
}
|
||||||
|
return await config.api.query.previewQuery(queryPreview)
|
||||||
}
|
}
|
||||||
|
|
||||||
it("should parse global and query level header mappings", async () => {
|
it("should parse global and query level header mappings", async () => {
|
||||||
|
@ -400,7 +551,7 @@ describe("/queries", () => {
|
||||||
emailHdr: "{{[user].[email]}}",
|
emailHdr: "{{[user].[email]}}",
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
const res = await previewGet(
|
const responseBody = await previewGet(
|
||||||
datasource,
|
datasource,
|
||||||
{
|
{
|
||||||
path: "www.google.com",
|
path: "www.google.com",
|
||||||
|
@ -410,17 +561,17 @@ describe("/queries", () => {
|
||||||
secondHdr: "1234",
|
secondHdr: "1234",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
undefined
|
[]
|
||||||
)
|
)
|
||||||
|
|
||||||
const parsedRequest = JSON.parse(res.body.extra.raw)
|
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||||
expect(parsedRequest.opts.headers).toEqual({
|
expect(parsedRequest.opts.headers).toEqual({
|
||||||
test: "headerVal",
|
test: "headerVal",
|
||||||
emailHdr: userDetails.email,
|
emailHdr: userDetails.email,
|
||||||
queryHdr: userDetails.firstName,
|
queryHdr: userDetails.firstName,
|
||||||
secondHdr: "1234",
|
secondHdr: "1234",
|
||||||
})
|
})
|
||||||
expect(res.body.rows[0].url).toEqual(
|
expect(responseBody.rows[0].url).toEqual(
|
||||||
"http://www.google.com?email=" + userDetails.email.replace("@", "%40")
|
"http://www.google.com?email=" + userDetails.email.replace("@", "%40")
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -430,21 +581,21 @@ describe("/queries", () => {
|
||||||
|
|
||||||
const datasource = await config.restDatasource()
|
const datasource = await config.restDatasource()
|
||||||
|
|
||||||
const res = await previewGet(
|
const responseBody = await previewGet(
|
||||||
datasource,
|
datasource,
|
||||||
{
|
{
|
||||||
path: "www.google.com",
|
path: "www.google.com",
|
||||||
queryString:
|
queryString:
|
||||||
"test={{myEmail}}&testName={{myName}}&testParam={{testParam}}",
|
"test={{myEmail}}&testName={{myName}}&testParam={{testParam}}",
|
||||||
},
|
},
|
||||||
{
|
[
|
||||||
myEmail: "{{[user].[email]}}",
|
{ name: "myEmail", default: "{{[user].[email]}}" },
|
||||||
myName: "{{[user].[firstName]}}",
|
{ name: "myName", default: "{{[user].[firstName]}}" },
|
||||||
testParam: "1234",
|
{ name: "testParam", default: "1234" },
|
||||||
}
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
expect(res.body.rows[0].url).toEqual(
|
expect(responseBody.rows[0].url).toEqual(
|
||||||
"http://www.google.com?test=" +
|
"http://www.google.com?test=" +
|
||||||
userDetails.email.replace("@", "%40") +
|
userDetails.email.replace("@", "%40") +
|
||||||
"&testName=" +
|
"&testName=" +
|
||||||
|
@ -457,7 +608,7 @@ describe("/queries", () => {
|
||||||
const userDetails = config.getUserDetails()
|
const userDetails = config.getUserDetails()
|
||||||
const datasource = await config.restDatasource()
|
const datasource = await config.restDatasource()
|
||||||
|
|
||||||
const res = await previewPost(
|
const responseBody = await previewPost(
|
||||||
datasource,
|
datasource,
|
||||||
{
|
{
|
||||||
path: "www.google.com",
|
path: "www.google.com",
|
||||||
|
@ -466,16 +617,14 @@ describe("/queries", () => {
|
||||||
"This is plain text and this is my email: {{[user].[email]}}. This is a test param: {{testParam}}",
|
"This is plain text and this is my email: {{[user].[email]}}. This is a test param: {{testParam}}",
|
||||||
bodyType: "text",
|
bodyType: "text",
|
||||||
},
|
},
|
||||||
{
|
[{ name: "testParam", default: "1234" }]
|
||||||
testParam: "1234",
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const parsedRequest = JSON.parse(res.body.extra.raw)
|
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||||
expect(parsedRequest.opts.body).toEqual(
|
expect(parsedRequest.opts.body).toEqual(
|
||||||
`This is plain text and this is my email: ${userDetails.email}. This is a test param: 1234`
|
`This is plain text and this is my email: ${userDetails.email}. This is a test param: 1234`
|
||||||
)
|
)
|
||||||
expect(res.body.rows[0].url).toEqual(
|
expect(responseBody.rows[0].url).toEqual(
|
||||||
"http://www.google.com?testParam=1234"
|
"http://www.google.com?testParam=1234"
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -484,7 +633,7 @@ describe("/queries", () => {
|
||||||
const userDetails = config.getUserDetails()
|
const userDetails = config.getUserDetails()
|
||||||
const datasource = await config.restDatasource()
|
const datasource = await config.restDatasource()
|
||||||
|
|
||||||
const res = await previewPost(
|
const responseBody = await previewPost(
|
||||||
datasource,
|
datasource,
|
||||||
{
|
{
|
||||||
path: "www.google.com",
|
path: "www.google.com",
|
||||||
|
@ -493,16 +642,16 @@ describe("/queries", () => {
|
||||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||||
bodyType: "json",
|
bodyType: "json",
|
||||||
},
|
},
|
||||||
{
|
[
|
||||||
testParam: "1234",
|
{ name: "testParam", default: "1234" },
|
||||||
userRef: "{{[user].[firstName]}}",
|
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||||
}
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
const parsedRequest = JSON.parse(res.body.extra.raw)
|
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||||
const test = `{"email":"${userDetails.email}","queryCode":1234,"userRef":"${userDetails.firstName}"}`
|
const test = `{"email":"${userDetails.email}","queryCode":1234,"userRef":"${userDetails.firstName}"}`
|
||||||
expect(parsedRequest.opts.body).toEqual(test)
|
expect(parsedRequest.opts.body).toEqual(test)
|
||||||
expect(res.body.rows[0].url).toEqual(
|
expect(responseBody.rows[0].url).toEqual(
|
||||||
"http://www.google.com?testParam=1234"
|
"http://www.google.com?testParam=1234"
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -511,7 +660,7 @@ describe("/queries", () => {
|
||||||
const userDetails = config.getUserDetails()
|
const userDetails = config.getUserDetails()
|
||||||
const datasource = await config.restDatasource()
|
const datasource = await config.restDatasource()
|
||||||
|
|
||||||
const res = await previewPost(
|
const responseBody = await previewPost(
|
||||||
datasource,
|
datasource,
|
||||||
{
|
{
|
||||||
path: "www.google.com",
|
path: "www.google.com",
|
||||||
|
@ -521,17 +670,17 @@ describe("/queries", () => {
|
||||||
"<ref>{{userId}}</ref> <somestring>testing</somestring> </note>",
|
"<ref>{{userId}}</ref> <somestring>testing</somestring> </note>",
|
||||||
bodyType: "xml",
|
bodyType: "xml",
|
||||||
},
|
},
|
||||||
{
|
[
|
||||||
testParam: "1234",
|
{ name: "testParam", default: "1234" },
|
||||||
userId: "{{[user].[firstName]}}",
|
{ name: "userId", default: "{{[user].[firstName]}}" },
|
||||||
}
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
const parsedRequest = JSON.parse(res.body.extra.raw)
|
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||||
const test = `<note> <email>${userDetails.email}</email> <code>1234</code> <ref>${userDetails.firstName}</ref> <somestring>testing</somestring> </note>`
|
const test = `<note> <email>${userDetails.email}</email> <code>1234</code> <ref>${userDetails.firstName}</ref> <somestring>testing</somestring> </note>`
|
||||||
|
|
||||||
expect(parsedRequest.opts.body).toEqual(test)
|
expect(parsedRequest.opts.body).toEqual(test)
|
||||||
expect(res.body.rows[0].url).toEqual(
|
expect(responseBody.rows[0].url).toEqual(
|
||||||
"http://www.google.com?testParam=1234"
|
"http://www.google.com?testParam=1234"
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -540,7 +689,7 @@ describe("/queries", () => {
|
||||||
const userDetails = config.getUserDetails()
|
const userDetails = config.getUserDetails()
|
||||||
const datasource = await config.restDatasource()
|
const datasource = await config.restDatasource()
|
||||||
|
|
||||||
const res = await previewPost(
|
const responseBody = await previewPost(
|
||||||
datasource,
|
datasource,
|
||||||
{
|
{
|
||||||
path: "www.google.com",
|
path: "www.google.com",
|
||||||
|
@ -549,13 +698,13 @@ describe("/queries", () => {
|
||||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||||
bodyType: "form",
|
bodyType: "form",
|
||||||
},
|
},
|
||||||
{
|
[
|
||||||
testParam: "1234",
|
{ name: "testParam", default: "1234" },
|
||||||
userRef: "{{[user].[firstName]}}",
|
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||||
}
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
const parsedRequest = JSON.parse(res.body.extra.raw)
|
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||||
|
|
||||||
const emailData = parsedRequest.opts.body._streams[1]
|
const emailData = parsedRequest.opts.body._streams[1]
|
||||||
expect(emailData).toEqual(userDetails.email)
|
expect(emailData).toEqual(userDetails.email)
|
||||||
|
@ -566,7 +715,7 @@ describe("/queries", () => {
|
||||||
const userRef = parsedRequest.opts.body._streams[7]
|
const userRef = parsedRequest.opts.body._streams[7]
|
||||||
expect(userRef).toEqual(userDetails.firstName)
|
expect(userRef).toEqual(userDetails.firstName)
|
||||||
|
|
||||||
expect(res.body.rows[0].url).toEqual(
|
expect(responseBody.rows[0].url).toEqual(
|
||||||
"http://www.google.com?testParam=1234"
|
"http://www.google.com?testParam=1234"
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -575,7 +724,7 @@ describe("/queries", () => {
|
||||||
const userDetails = config.getUserDetails()
|
const userDetails = config.getUserDetails()
|
||||||
const datasource = await config.restDatasource()
|
const datasource = await config.restDatasource()
|
||||||
|
|
||||||
const res = await previewPost(
|
const responseBody = await previewPost(
|
||||||
datasource,
|
datasource,
|
||||||
{
|
{
|
||||||
path: "www.google.com",
|
path: "www.google.com",
|
||||||
|
@ -584,12 +733,12 @@ describe("/queries", () => {
|
||||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||||
bodyType: "encoded",
|
bodyType: "encoded",
|
||||||
},
|
},
|
||||||
{
|
[
|
||||||
testParam: "1234",
|
{ name: "testParam", default: "1234" },
|
||||||
userRef: "{{[user].[firstName]}}",
|
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||||
}
|
]
|
||||||
)
|
)
|
||||||
const parsedRequest = JSON.parse(res.body.extra.raw)
|
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||||
|
|
||||||
expect(parsedRequest.opts.body.email).toEqual(userDetails.email)
|
expect(parsedRequest.opts.body.email).toEqual(userDetails.email)
|
||||||
expect(parsedRequest.opts.body.queryCode).toEqual("1234")
|
expect(parsedRequest.opts.body.queryCode).toEqual("1234")
|
||||||
|
|
|
@ -2135,5 +2135,48 @@ describe.each([
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should not carry over context between formulas", async () => {
|
||||||
|
const js = Buffer.from(`return $("[text]");`).toString("base64")
|
||||||
|
const table = await config.createTable({
|
||||||
|
name: "table",
|
||||||
|
type: "table",
|
||||||
|
schema: {
|
||||||
|
text: {
|
||||||
|
name: "text",
|
||||||
|
type: FieldType.STRING,
|
||||||
|
},
|
||||||
|
formula: {
|
||||||
|
name: "formula",
|
||||||
|
type: FieldType.FORMULA,
|
||||||
|
formula: `{{ js "${js}"}}`,
|
||||||
|
formulaType: FormulaType.DYNAMIC,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
await config.api.row.save(table._id!, { text: `foo${i}` })
|
||||||
|
}
|
||||||
|
|
||||||
|
const { rows } = await config.api.row.search(table._id!)
|
||||||
|
expect(rows).toHaveLength(10)
|
||||||
|
|
||||||
|
const formulaValues = rows.map(r => r.formula)
|
||||||
|
expect(formulaValues).toEqual(
|
||||||
|
expect.arrayContaining([
|
||||||
|
"foo0",
|
||||||
|
"foo1",
|
||||||
|
"foo2",
|
||||||
|
"foo3",
|
||||||
|
"foo4",
|
||||||
|
"foo5",
|
||||||
|
"foo6",
|
||||||
|
"foo7",
|
||||||
|
"foo8",
|
||||||
|
"foo9",
|
||||||
|
])
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -368,10 +368,12 @@ describe("/tables", () => {
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(200)
|
.expect(200)
|
||||||
const fetchedTable = res.body[0]
|
|
||||||
expect(fetchedTable.name).toEqual(testTable.name)
|
const table = res.body.find((t: Table) => t._id === testTable._id)
|
||||||
expect(fetchedTable.type).toEqual("table")
|
expect(table).toBeDefined()
|
||||||
expect(fetchedTable.sourceType).toEqual("internal")
|
expect(table.name).toEqual(testTable.name)
|
||||||
|
expect(table.type).toEqual("table")
|
||||||
|
expect(table.sourceType).toEqual("internal")
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should apply authorization to endpoint", async () => {
|
it("should apply authorization to endpoint", async () => {
|
||||||
|
|
|
@ -26,6 +26,7 @@ async function start() {
|
||||||
|
|
||||||
start().catch(err => {
|
start().catch(err => {
|
||||||
console.error(`Failed server startup - ${err.message}`)
|
console.error(`Failed server startup - ${err.message}`)
|
||||||
|
throw err
|
||||||
})
|
})
|
||||||
|
|
||||||
export function getServer() {
|
export function getServer() {
|
||||||
|
|
|
@ -23,7 +23,7 @@ const DEFAULTS = {
|
||||||
AUTOMATION_THREAD_TIMEOUT: 12000,
|
AUTOMATION_THREAD_TIMEOUT: 12000,
|
||||||
AUTOMATION_SYNC_TIMEOUT: 120000,
|
AUTOMATION_SYNC_TIMEOUT: 120000,
|
||||||
AUTOMATION_MAX_ITERATIONS: 200,
|
AUTOMATION_MAX_ITERATIONS: 200,
|
||||||
JS_PER_EXECUTION_TIME_LIMIT_MS: 1000,
|
JS_PER_EXECUTION_TIME_LIMIT_MS: 1500,
|
||||||
TEMPLATE_REPOSITORY: "app",
|
TEMPLATE_REPOSITORY: "app",
|
||||||
PLUGINS_DIR: "/plugins",
|
PLUGINS_DIR: "/plugins",
|
||||||
FORKED_PROCESS_NAME: "main",
|
FORKED_PROCESS_NAME: "main",
|
||||||
|
@ -113,6 +113,7 @@ const environment = {
|
||||||
process.env[key] = value
|
process.env[key] = value
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
environment[key] = value
|
environment[key] = value
|
||||||
|
cleanVariables()
|
||||||
},
|
},
|
||||||
isTest: coreEnv.isTest,
|
isTest: coreEnv.isTest,
|
||||||
isJest: coreEnv.isJest,
|
isJest: coreEnv.isJest,
|
||||||
|
@ -128,18 +129,22 @@ const environment = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
// clean up any environment variable edge cases
|
function cleanVariables() {
|
||||||
for (let [key, value] of Object.entries(environment)) {
|
// clean up any environment variable edge cases
|
||||||
// handle the edge case of "0" to disable an environment variable
|
for (let [key, value] of Object.entries(environment)) {
|
||||||
if (value === "0") {
|
// handle the edge case of "0" to disable an environment variable
|
||||||
// @ts-ignore
|
if (value === "0") {
|
||||||
environment[key] = 0
|
// @ts-ignore
|
||||||
}
|
environment[key] = 0
|
||||||
// handle the edge case of "false" to disable an environment variable
|
}
|
||||||
if (value === "false") {
|
// handle the edge case of "false" to disable an environment variable
|
||||||
// @ts-ignore
|
if (value === "false") {
|
||||||
environment[key] = 0
|
// @ts-ignore
|
||||||
|
environment[key] = 0
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cleanVariables()
|
||||||
|
|
||||||
export default environment
|
export default environment
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,4 +1,2 @@
|
||||||
import { EJSON } from "bson"
|
export const deserialize = require("bson").deserialize
|
||||||
|
export const toJson = require("bson").EJSON.deserialize
|
||||||
export { deserialize } from "bson"
|
|
||||||
export const toJson = EJSON.deserialize
|
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -2,9 +2,8 @@ const {
|
||||||
getJsHelperList,
|
getJsHelperList,
|
||||||
} = require("../../../../string-templates/src/helpers/list.js")
|
} = require("../../../../string-templates/src/helpers/list.js")
|
||||||
|
|
||||||
const helpers = getJsHelperList()
|
|
||||||
export default {
|
export default {
|
||||||
...helpers,
|
...getJsHelperList(),
|
||||||
// pointing stripProtocol to a unexisting function to be able to declare it on isolated-vm
|
// pointing stripProtocol to a unexisting function to be able to declare it on isolated-vm
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
// eslint-disable-next-line no-undef
|
// eslint-disable-next-line no-undef
|
||||||
|
|
|
@ -7,22 +7,19 @@ export const enum BundleType {
|
||||||
BSON = "bson",
|
BSON = "bson",
|
||||||
}
|
}
|
||||||
|
|
||||||
const bundleSourceCode = {
|
const bundleSourceFile: Record<BundleType, string> = {
|
||||||
[BundleType.HELPERS]: "../bundles/index-helpers.ivm.bundle.js",
|
[BundleType.HELPERS]: "./index-helpers.ivm.bundle.js",
|
||||||
[BundleType.BSON]: "../bundles/bson.ivm.bundle.js",
|
[BundleType.BSON]: "./bson.ivm.bundle.js",
|
||||||
}
|
}
|
||||||
|
const bundleSourceCode: Partial<Record<BundleType, string>> = {}
|
||||||
|
|
||||||
export function loadBundle(type: BundleType) {
|
export function loadBundle(type: BundleType) {
|
||||||
if (environment.isJest()) {
|
let sourceCode = bundleSourceCode[type]
|
||||||
return fs.readFileSync(require.resolve(bundleSourceCode[type]), "utf-8")
|
if (sourceCode) {
|
||||||
|
return sourceCode
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (type) {
|
sourceCode = fs.readFileSync(require.resolve(bundleSourceFile[type]), "utf-8")
|
||||||
case BundleType.HELPERS:
|
bundleSourceCode[type] = sourceCode
|
||||||
return require("../bundles/index-helpers.ivm.bundle.js")
|
return sourceCode
|
||||||
case BundleType.BSON:
|
|
||||||
return require("../bundles/bson.ivm.bundle.js")
|
|
||||||
default:
|
|
||||||
utils.unreachable(type)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,68 +1,49 @@
|
||||||
import vm from "vm"
|
|
||||||
import env from "../environment"
|
|
||||||
import { setJSRunner, setOnErrorLog } from "@budibase/string-templates"
|
|
||||||
import { context, logging, timers } from "@budibase/backend-core"
|
|
||||||
import tracer from "dd-trace"
|
|
||||||
import { serializeError } from "serialize-error"
|
import { serializeError } from "serialize-error"
|
||||||
|
import env from "../environment"
|
||||||
|
import {
|
||||||
|
JsErrorTimeout,
|
||||||
|
setJSRunner,
|
||||||
|
setOnErrorLog,
|
||||||
|
} from "@budibase/string-templates"
|
||||||
|
import { context, logging } from "@budibase/backend-core"
|
||||||
|
import tracer from "dd-trace"
|
||||||
|
|
||||||
type TrackerFn = <T>(f: () => T) => T
|
import { IsolatedVM } from "./vm"
|
||||||
|
|
||||||
export function init() {
|
export function init() {
|
||||||
setJSRunner((js: string, ctx: vm.Context) => {
|
setJSRunner((js: string, ctx: Record<string, any>) => {
|
||||||
return tracer.trace("runJS", {}, span => {
|
return tracer.trace("runJS", {}, span => {
|
||||||
const perRequestLimit = env.JS_PER_REQUEST_TIMEOUT_MS
|
try {
|
||||||
let track: TrackerFn = f => f()
|
const bbCtx = context.getCurrentContext()
|
||||||
if (perRequestLimit) {
|
|
||||||
const bbCtx = tracer.trace("runJS.getCurrentContext", {}, span =>
|
const vm = bbCtx?.vm
|
||||||
context.getCurrentContext()
|
? bbCtx.vm
|
||||||
)
|
: new IsolatedVM({
|
||||||
|
memoryLimit: env.JS_RUNNER_MEMORY_LIMIT,
|
||||||
|
invocationTimeout: env.JS_PER_INVOCATION_TIMEOUT_MS,
|
||||||
|
isolateAccumulatedTimeout: env.JS_PER_REQUEST_TIMEOUT_MS,
|
||||||
|
}).withHelpers()
|
||||||
|
|
||||||
if (bbCtx) {
|
if (bbCtx) {
|
||||||
if (!bbCtx.jsExecutionTracker) {
|
// If we have a context, we want to persist it to reuse the isolate
|
||||||
span?.addTags({
|
bbCtx.vm = vm
|
||||||
createdExecutionTracker: true,
|
|
||||||
})
|
|
||||||
bbCtx.jsExecutionTracker = tracer.trace(
|
|
||||||
"runJS.createExecutionTimeTracker",
|
|
||||||
{},
|
|
||||||
span => timers.ExecutionTimeTracker.withLimit(perRequestLimit)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
span?.addTags({
|
|
||||||
js: {
|
|
||||||
limitMS: bbCtx.jsExecutionTracker.limitMs,
|
|
||||||
elapsedMS: bbCtx.jsExecutionTracker.elapsedMS,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
// We call checkLimit() here to prevent paying the cost of creating
|
|
||||||
// a new VM context below when we don't need to.
|
|
||||||
tracer.trace("runJS.checkLimitAndBind", {}, span => {
|
|
||||||
bbCtx.jsExecutionTracker!.checkLimit()
|
|
||||||
track = bbCtx.jsExecutionTracker!.track.bind(
|
|
||||||
bbCtx.jsExecutionTracker
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
const { helpers, ...rest } = ctx
|
||||||
|
return vm.withContext(rest, () => vm.execute(js))
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error.message === "Script execution timed out.") {
|
||||||
|
throw new JsErrorTimeout()
|
||||||
|
}
|
||||||
|
throw error
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx = {
|
|
||||||
...ctx,
|
|
||||||
alert: undefined,
|
|
||||||
setInterval: undefined,
|
|
||||||
setTimeout: undefined,
|
|
||||||
}
|
|
||||||
|
|
||||||
vm.createContext(ctx)
|
|
||||||
return track(() =>
|
|
||||||
vm.runInNewContext(js, ctx, {
|
|
||||||
timeout: env.JS_PER_INVOCATION_TIMEOUT_MS,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
if (env.LOG_JS_ERRORS) {
|
if (env.LOG_JS_ERRORS) {
|
||||||
setOnErrorLog((error: Error) => {
|
setOnErrorLog((error: Error) => {
|
||||||
logging.logWarn(JSON.stringify(serializeError(error)))
|
logging.logWarn(
|
||||||
|
`Error while executing js: ${JSON.stringify(serializeError(error))}`
|
||||||
|
)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,22 +1,4 @@
|
||||||
// import { validate as isValidUUID } from "uuid"
|
import { validate as isValidUUID } from "uuid"
|
||||||
|
|
||||||
jest.mock("@budibase/handlebars-helpers/lib/math", () => {
|
|
||||||
const actual = jest.requireActual("@budibase/handlebars-helpers/lib/math")
|
|
||||||
|
|
||||||
return {
|
|
||||||
...actual,
|
|
||||||
random: () => 10,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
jest.mock("@budibase/handlebars-helpers/lib/uuid", () => {
|
|
||||||
const actual = jest.requireActual("@budibase/handlebars-helpers/lib/uuid")
|
|
||||||
|
|
||||||
return {
|
|
||||||
...actual,
|
|
||||||
uuid: () => "f34ebc66-93bd-4f7c-b79b-92b5569138bc",
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
import { processStringSync, encodeJSBinding } from "@budibase/string-templates"
|
import { processStringSync, encodeJSBinding } from "@budibase/string-templates"
|
||||||
|
|
||||||
const { runJsHelpersTests } = require("@budibase/string-templates/test/utils")
|
const { runJsHelpersTests } = require("@budibase/string-templates/test/utils")
|
||||||
|
@ -27,7 +9,7 @@ import TestConfiguration from "../../tests/utilities/TestConfiguration"
|
||||||
|
|
||||||
tk.freeze("2021-01-21T12:00:00")
|
tk.freeze("2021-01-21T12:00:00")
|
||||||
|
|
||||||
describe("jsRunner", () => {
|
describe("jsRunner (using isolated-vm)", () => {
|
||||||
const config = new TestConfiguration()
|
const config = new TestConfiguration()
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
|
@ -36,6 +18,10 @@ describe("jsRunner", () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
afterAll(() => {
|
||||||
|
config.end()
|
||||||
|
})
|
||||||
|
|
||||||
const processJS = (js: string, context?: object) => {
|
const processJS = (js: string, context?: object) => {
|
||||||
return config.doInContext(config.getAppId(), async () =>
|
return config.doInContext(config.getAppId(), async () =>
|
||||||
processStringSync(encodeJSBinding(js), context || {})
|
processStringSync(encodeJSBinding(js), context || {})
|
||||||
|
@ -47,10 +33,14 @@ describe("jsRunner", () => {
|
||||||
expect(output).toBe(3)
|
expect(output).toBe(3)
|
||||||
})
|
})
|
||||||
|
|
||||||
// TODO This should be reenabled when running on isolated-vm
|
it("it can execute sloppy javascript", async () => {
|
||||||
it.skip("should prevent sandbox escape", async () => {
|
const output = await processJS(`a=2;b=3;return a + b`)
|
||||||
|
expect(output).toBe(5)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should prevent sandbox escape", async () => {
|
||||||
const output = await processJS(
|
const output = await processJS(
|
||||||
`return this.constructor.constructor("return process")()`
|
`return this.constructor.constructor("return process.env")()`
|
||||||
)
|
)
|
||||||
expect(output).toBe("Error while executing JS")
|
expect(output).toBe("Error while executing JS")
|
||||||
})
|
})
|
||||||
|
@ -58,26 +48,26 @@ describe("jsRunner", () => {
|
||||||
describe("helpers", () => {
|
describe("helpers", () => {
|
||||||
runJsHelpersTests({
|
runJsHelpersTests({
|
||||||
funcWrap: (func: any) => config.doInContext(config.getAppId(), func),
|
funcWrap: (func: any) => config.doInContext(config.getAppId(), func),
|
||||||
// testsToSkip: ["random", "uuid"],
|
testsToSkip: ["random", "uuid"],
|
||||||
})
|
})
|
||||||
|
|
||||||
// describe("uuid", () => {
|
describe("uuid", () => {
|
||||||
// it("uuid helper returns a valid uuid", async () => {
|
it("uuid helper returns a valid uuid", async () => {
|
||||||
// const result = await processJS("return helpers.uuid()")
|
const result = await processJS("return helpers.uuid()")
|
||||||
// expect(result).toBeDefined()
|
expect(result).toBeDefined()
|
||||||
// expect(isValidUUID(result)).toBe(true)
|
expect(isValidUUID(result)).toBe(true)
|
||||||
// })
|
})
|
||||||
// })
|
})
|
||||||
|
|
||||||
// describe("random", () => {
|
describe("random", () => {
|
||||||
// it("random helper returns a valid number", async () => {
|
it("random helper returns a valid number", async () => {
|
||||||
// const min = 1
|
const min = 1
|
||||||
// const max = 8
|
const max = 8
|
||||||
// const result = await processJS(`return helpers.random(${min}, ${max})`)
|
const result = await processJS(`return helpers.random(${min}, ${max})`)
|
||||||
// expect(result).toBeDefined()
|
expect(result).toBeDefined()
|
||||||
// expect(result).toBeGreaterThanOrEqual(min)
|
expect(result).toBeGreaterThanOrEqual(min)
|
||||||
// expect(result).toBeLessThanOrEqual(max)
|
expect(result).toBeLessThanOrEqual(max)
|
||||||
// })
|
})
|
||||||
// })
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,270 +1 @@
|
||||||
import ivm from "isolated-vm"
|
export * from "./isolated-vm"
|
||||||
import bson from "bson"
|
|
||||||
|
|
||||||
import url from "url"
|
|
||||||
import crypto from "crypto"
|
|
||||||
import querystring from "querystring"
|
|
||||||
|
|
||||||
import { BundleType, loadBundle } from "../bundles"
|
|
||||||
import { VM } from "@budibase/types"
|
|
||||||
|
|
||||||
class ExecutionTimeoutError extends Error {
|
|
||||||
constructor(message: string) {
|
|
||||||
super(message)
|
|
||||||
this.name = "ExecutionTimeoutError"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class ModuleHandler {
|
|
||||||
private modules: {
|
|
||||||
import: string
|
|
||||||
moduleKey: string
|
|
||||||
module: ivm.Module
|
|
||||||
}[] = []
|
|
||||||
|
|
||||||
private generateRandomKey = () => `i${crypto.randomUUID().replace(/-/g, "")}`
|
|
||||||
|
|
||||||
registerModule(module: ivm.Module, imports: string) {
|
|
||||||
this.modules.push({
|
|
||||||
moduleKey: this.generateRandomKey(),
|
|
||||||
import: imports,
|
|
||||||
module: module,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
generateImports() {
|
|
||||||
return this.modules
|
|
||||||
.map(m => `import ${m.import} from "${m.moduleKey}"`)
|
|
||||||
.join(";")
|
|
||||||
}
|
|
||||||
|
|
||||||
getModule(key: string) {
|
|
||||||
const module = this.modules.find(m => m.moduleKey === key)
|
|
||||||
return module?.module
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class IsolatedVM implements VM {
|
|
||||||
private isolate: ivm.Isolate
|
|
||||||
private vm: ivm.Context
|
|
||||||
private jail: ivm.Reference
|
|
||||||
private invocationTimeout: number
|
|
||||||
private isolateAccumulatedTimeout?: number
|
|
||||||
|
|
||||||
// By default the wrapper returns itself
|
|
||||||
private codeWrapper: (code: string) => string = code => code
|
|
||||||
|
|
||||||
private moduleHandler = new ModuleHandler()
|
|
||||||
|
|
||||||
private readonly resultKey = "results"
|
|
||||||
|
|
||||||
constructor({
|
|
||||||
memoryLimit,
|
|
||||||
invocationTimeout,
|
|
||||||
isolateAccumulatedTimeout,
|
|
||||||
}: {
|
|
||||||
memoryLimit: number
|
|
||||||
invocationTimeout: number
|
|
||||||
isolateAccumulatedTimeout?: number
|
|
||||||
}) {
|
|
||||||
this.isolate = new ivm.Isolate({ memoryLimit })
|
|
||||||
this.vm = this.isolate.createContextSync()
|
|
||||||
this.jail = this.vm.global
|
|
||||||
this.jail.setSync("global", this.jail.derefInto())
|
|
||||||
|
|
||||||
this.addToContext({
|
|
||||||
[this.resultKey]: { out: "" },
|
|
||||||
})
|
|
||||||
|
|
||||||
this.invocationTimeout = invocationTimeout
|
|
||||||
this.isolateAccumulatedTimeout = isolateAccumulatedTimeout
|
|
||||||
}
|
|
||||||
|
|
||||||
withHelpers() {
|
|
||||||
const urlModule = this.registerCallbacks({
|
|
||||||
resolve: url.resolve,
|
|
||||||
parse: url.parse,
|
|
||||||
})
|
|
||||||
|
|
||||||
const querystringModule = this.registerCallbacks({
|
|
||||||
escape: querystring.escape,
|
|
||||||
})
|
|
||||||
|
|
||||||
this.addToContext({
|
|
||||||
helpersStripProtocol: new ivm.Callback((str: string) => {
|
|
||||||
var parsed = url.parse(str) as any
|
|
||||||
parsed.protocol = ""
|
|
||||||
return parsed.format()
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
const injectedRequire = `const require=function req(val) {
|
|
||||||
switch (val) {
|
|
||||||
case "url": return ${urlModule};
|
|
||||||
case "querystring": return ${querystringModule};
|
|
||||||
}
|
|
||||||
}`
|
|
||||||
const helpersSource = loadBundle(BundleType.HELPERS)
|
|
||||||
const helpersModule = this.isolate.compileModuleSync(
|
|
||||||
`${injectedRequire};${helpersSource}`
|
|
||||||
)
|
|
||||||
|
|
||||||
helpersModule.instantiateSync(this.vm, specifier => {
|
|
||||||
if (specifier === "crypto") {
|
|
||||||
const cryptoModule = this.registerCallbacks({
|
|
||||||
randomUUID: crypto.randomUUID,
|
|
||||||
})
|
|
||||||
const module = this.isolate.compileModuleSync(
|
|
||||||
`export default ${cryptoModule}`
|
|
||||||
)
|
|
||||||
module.instantiateSync(this.vm, specifier => {
|
|
||||||
throw new Error(`No imports allowed. Required: ${specifier}`)
|
|
||||||
})
|
|
||||||
return module
|
|
||||||
}
|
|
||||||
throw new Error(`No imports allowed. Required: ${specifier}`)
|
|
||||||
})
|
|
||||||
|
|
||||||
this.moduleHandler.registerModule(helpersModule, "helpers")
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
withContext(context: Record<string, any>) {
|
|
||||||
this.addToContext(context)
|
|
||||||
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
withParsingBson(data: any) {
|
|
||||||
this.addToContext({
|
|
||||||
bsonData: bson.BSON.serialize({ data }),
|
|
||||||
})
|
|
||||||
|
|
||||||
// If we need to parse bson, we follow the next steps:
|
|
||||||
// 1. Serialise the data from potential BSON to buffer before passing it to the isolate
|
|
||||||
// 2. Deserialise the data within the isolate, to get the original data
|
|
||||||
// 3. Process script
|
|
||||||
// 4. Stringify the result in order to convert the result from BSON to json
|
|
||||||
this.codeWrapper = code =>
|
|
||||||
`(function(){
|
|
||||||
const data = deserialize(bsonData, { validation: { utf8: false } }).data;
|
|
||||||
const result = ${code}
|
|
||||||
return toJson(result);
|
|
||||||
})();`
|
|
||||||
|
|
||||||
const bsonSource = loadBundle(BundleType.BSON)
|
|
||||||
|
|
||||||
this.addToContext({
|
|
||||||
textDecoderCb: new ivm.Callback(
|
|
||||||
(args: {
|
|
||||||
constructorArgs: any
|
|
||||||
functionArgs: Parameters<InstanceType<typeof TextDecoder>["decode"]>
|
|
||||||
}) => {
|
|
||||||
const result = new TextDecoder(...args.constructorArgs).decode(
|
|
||||||
...args.functionArgs
|
|
||||||
)
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
),
|
|
||||||
})
|
|
||||||
|
|
||||||
// "Polyfilling" text decoder. `bson.deserialize` requires decoding. We are creating a bridge function so we don't need to inject the full library
|
|
||||||
const textDecoderPolyfill = class TextDecoder {
|
|
||||||
constructorArgs
|
|
||||||
|
|
||||||
constructor(...constructorArgs: any) {
|
|
||||||
this.constructorArgs = constructorArgs
|
|
||||||
}
|
|
||||||
|
|
||||||
decode(...input: any) {
|
|
||||||
// @ts-ignore
|
|
||||||
return textDecoderCb({
|
|
||||||
constructorArgs: this.constructorArgs,
|
|
||||||
functionArgs: input,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}.toString()
|
|
||||||
const bsonModule = this.isolate.compileModuleSync(
|
|
||||||
`${textDecoderPolyfill};${bsonSource}`
|
|
||||||
)
|
|
||||||
bsonModule.instantiateSync(this.vm, specifier => {
|
|
||||||
throw new Error(`No imports allowed. Required: ${specifier}`)
|
|
||||||
})
|
|
||||||
|
|
||||||
this.moduleHandler.registerModule(bsonModule, "{deserialize, toJson}")
|
|
||||||
|
|
||||||
return this
|
|
||||||
}
|
|
||||||
|
|
||||||
execute(code: string): any {
|
|
||||||
if (this.isolateAccumulatedTimeout) {
|
|
||||||
const cpuMs = Number(this.isolate.cpuTime) / 1e6
|
|
||||||
if (cpuMs > this.isolateAccumulatedTimeout) {
|
|
||||||
throw new ExecutionTimeoutError(
|
|
||||||
`CPU time limit exceeded (${cpuMs}ms > ${this.isolateAccumulatedTimeout}ms)`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
code = `${this.moduleHandler.generateImports()};results.out=${this.codeWrapper(
|
|
||||||
code
|
|
||||||
)};`
|
|
||||||
|
|
||||||
const script = this.isolate.compileModuleSync(code)
|
|
||||||
|
|
||||||
script.instantiateSync(this.vm, specifier => {
|
|
||||||
const module = this.moduleHandler.getModule(specifier)
|
|
||||||
if (module) {
|
|
||||||
return module
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(`"${specifier}" import not allowed`)
|
|
||||||
})
|
|
||||||
|
|
||||||
script.evaluateSync({ timeout: this.invocationTimeout })
|
|
||||||
|
|
||||||
const result = this.getFromContext(this.resultKey)
|
|
||||||
return result.out
|
|
||||||
}
|
|
||||||
|
|
||||||
private registerCallbacks(functions: Record<string, any>) {
|
|
||||||
const libId = crypto.randomUUID().replace(/-/g, "")
|
|
||||||
|
|
||||||
const x: Record<string, string> = {}
|
|
||||||
for (const [funcName, func] of Object.entries(functions)) {
|
|
||||||
const key = `f${libId}${funcName}cb`
|
|
||||||
x[funcName] = key
|
|
||||||
|
|
||||||
this.addToContext({
|
|
||||||
[key]: new ivm.Callback((...params: any[]) => (func as any)(...params)),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const mod =
|
|
||||||
`{` +
|
|
||||||
Object.entries(x)
|
|
||||||
.map(([key, func]) => `${key}: ${func}`)
|
|
||||||
.join() +
|
|
||||||
"}"
|
|
||||||
return mod
|
|
||||||
}
|
|
||||||
|
|
||||||
private addToContext(context: Record<string, any>) {
|
|
||||||
for (let key in context) {
|
|
||||||
const value = context[key]
|
|
||||||
this.jail.setSync(
|
|
||||||
key,
|
|
||||||
typeof value === "function"
|
|
||||||
? value
|
|
||||||
: new ivm.ExternalCopy(value).copyInto({ release: true })
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private getFromContext(key: string) {
|
|
||||||
const ref = this.vm.global.getSync(key, { reference: true })
|
|
||||||
const result = ref.copySync()
|
|
||||||
ref.release()
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,246 @@
|
||||||
|
import ivm from "isolated-vm"
|
||||||
|
import bson from "bson"
|
||||||
|
|
||||||
|
import url from "url"
|
||||||
|
import crypto from "crypto"
|
||||||
|
import querystring from "querystring"
|
||||||
|
|
||||||
|
import { BundleType, loadBundle } from "../bundles"
|
||||||
|
import { VM } from "@budibase/types"
|
||||||
|
import environment from "../../environment"
|
||||||
|
|
||||||
|
class ExecutionTimeoutError extends Error {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message)
|
||||||
|
this.name = "ExecutionTimeoutError"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class IsolatedVM implements VM {
|
||||||
|
private isolate: ivm.Isolate
|
||||||
|
private vm: ivm.Context
|
||||||
|
private jail: ivm.Reference
|
||||||
|
private invocationTimeout: number
|
||||||
|
private isolateAccumulatedTimeout?: number
|
||||||
|
|
||||||
|
// By default the wrapper returns itself
|
||||||
|
private codeWrapper: (code: string) => string = code => code
|
||||||
|
|
||||||
|
private readonly resultKey = "results"
|
||||||
|
private runResultKey: string
|
||||||
|
|
||||||
|
constructor({
|
||||||
|
memoryLimit,
|
||||||
|
invocationTimeout,
|
||||||
|
isolateAccumulatedTimeout,
|
||||||
|
}: {
|
||||||
|
memoryLimit?: number
|
||||||
|
invocationTimeout?: number
|
||||||
|
isolateAccumulatedTimeout?: number
|
||||||
|
} = {}) {
|
||||||
|
this.isolate = new ivm.Isolate({
|
||||||
|
memoryLimit: memoryLimit || environment.JS_RUNNER_MEMORY_LIMIT,
|
||||||
|
})
|
||||||
|
this.vm = this.isolate.createContextSync()
|
||||||
|
this.jail = this.vm.global
|
||||||
|
this.jail.setSync("global", this.jail.derefInto())
|
||||||
|
|
||||||
|
this.runResultKey = crypto.randomUUID()
|
||||||
|
this.addToContext({
|
||||||
|
[this.resultKey]: { [this.runResultKey]: "" },
|
||||||
|
})
|
||||||
|
|
||||||
|
this.invocationTimeout =
|
||||||
|
invocationTimeout || environment.JS_PER_INVOCATION_TIMEOUT_MS
|
||||||
|
this.isolateAccumulatedTimeout = isolateAccumulatedTimeout
|
||||||
|
}
|
||||||
|
|
||||||
|
withHelpers() {
|
||||||
|
const urlModule = this.registerCallbacks({
|
||||||
|
resolve: url.resolve,
|
||||||
|
parse: url.parse,
|
||||||
|
})
|
||||||
|
|
||||||
|
const querystringModule = this.registerCallbacks({
|
||||||
|
escape: querystring.escape,
|
||||||
|
})
|
||||||
|
|
||||||
|
const cryptoModule = this.registerCallbacks({
|
||||||
|
randomUUID: crypto.randomUUID,
|
||||||
|
})
|
||||||
|
|
||||||
|
this.addToContext({
|
||||||
|
helpersStripProtocol: new ivm.Callback((str: string) => {
|
||||||
|
var parsed = url.parse(str) as any
|
||||||
|
parsed.protocol = ""
|
||||||
|
return parsed.format()
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
const injectedRequire = `require=function req(val) {
|
||||||
|
switch (val) {
|
||||||
|
case "url": return ${urlModule};
|
||||||
|
case "querystring": return ${querystringModule};
|
||||||
|
case "crypto": return ${cryptoModule};
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
const helpersSource = loadBundle(BundleType.HELPERS)
|
||||||
|
const script = this.isolate.compileScriptSync(
|
||||||
|
`${injectedRequire};${helpersSource};helpers=helpers.default`
|
||||||
|
)
|
||||||
|
|
||||||
|
script.runSync(this.vm, { timeout: this.invocationTimeout, release: false })
|
||||||
|
new Promise(() => {
|
||||||
|
script.release()
|
||||||
|
})
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
withContext<T>(context: Record<string, any>, executeWithContext: () => T) {
|
||||||
|
this.addToContext(context)
|
||||||
|
|
||||||
|
try {
|
||||||
|
return executeWithContext()
|
||||||
|
} finally {
|
||||||
|
this.removeFromContext(Object.keys(context))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
withParsingBson(data: any) {
|
||||||
|
this.addToContext({
|
||||||
|
bsonData: bson.BSON.serialize({ data }),
|
||||||
|
})
|
||||||
|
|
||||||
|
// If we need to parse bson, we follow the next steps:
|
||||||
|
// 1. Serialise the data from potential BSON to buffer before passing it to the isolate
|
||||||
|
// 2. Deserialise the data within the isolate, to get the original data
|
||||||
|
// 3. Process script
|
||||||
|
// 4. Stringify the result in order to convert the result from BSON to json
|
||||||
|
this.codeWrapper = code =>
|
||||||
|
`(function(){
|
||||||
|
const data = bson.deserialize(bsonData, { validation: { utf8: false } }).data;
|
||||||
|
const result = ${code}
|
||||||
|
return bson.toJson(result);
|
||||||
|
})();`
|
||||||
|
|
||||||
|
const bsonSource = loadBundle(BundleType.BSON)
|
||||||
|
|
||||||
|
this.addToContext({
|
||||||
|
textDecoderCb: new ivm.Callback(
|
||||||
|
(args: {
|
||||||
|
constructorArgs: any
|
||||||
|
functionArgs: Parameters<InstanceType<typeof TextDecoder>["decode"]>
|
||||||
|
}) => {
|
||||||
|
const result = new TextDecoder(...args.constructorArgs).decode(
|
||||||
|
...args.functionArgs
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
),
|
||||||
|
})
|
||||||
|
|
||||||
|
// "Polyfilling" text decoder. `bson.deserialize` requires decoding. We are creating a bridge function so we don't need to inject the full library
|
||||||
|
const textDecoderPolyfill = class TextDecoderMock {
|
||||||
|
constructorArgs
|
||||||
|
|
||||||
|
constructor(...constructorArgs: any) {
|
||||||
|
this.constructorArgs = constructorArgs
|
||||||
|
}
|
||||||
|
|
||||||
|
decode(...input: any) {
|
||||||
|
// @ts-ignore
|
||||||
|
return textDecoderCb({
|
||||||
|
constructorArgs: this.constructorArgs,
|
||||||
|
functionArgs: input,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.toString()
|
||||||
|
.replace(/TextDecoderMock/, "TextDecoder")
|
||||||
|
|
||||||
|
const script = this.isolate.compileScriptSync(
|
||||||
|
`${textDecoderPolyfill};${bsonSource}`
|
||||||
|
)
|
||||||
|
script.runSync(this.vm, { timeout: this.invocationTimeout, release: false })
|
||||||
|
new Promise(() => {
|
||||||
|
script.release()
|
||||||
|
})
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
execute(code: string): any {
|
||||||
|
if (this.isolateAccumulatedTimeout) {
|
||||||
|
const cpuMs = Number(this.isolate.cpuTime) / 1e6
|
||||||
|
if (cpuMs > this.isolateAccumulatedTimeout) {
|
||||||
|
throw new ExecutionTimeoutError(
|
||||||
|
`CPU time limit exceeded (${cpuMs}ms > ${this.isolateAccumulatedTimeout}ms)`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
code = `results['${this.runResultKey}']=${this.codeWrapper(code)}`
|
||||||
|
|
||||||
|
const script = this.isolate.compileScriptSync(code)
|
||||||
|
|
||||||
|
script.runSync(this.vm, { timeout: this.invocationTimeout, release: false })
|
||||||
|
new Promise(() => {
|
||||||
|
script.release()
|
||||||
|
})
|
||||||
|
|
||||||
|
// We can't rely on the script run result as it will not work for non-transferable values
|
||||||
|
const result = this.getFromContext(this.resultKey)
|
||||||
|
return result[this.runResultKey]
|
||||||
|
}
|
||||||
|
|
||||||
|
private registerCallbacks(functions: Record<string, any>) {
|
||||||
|
const libId = crypto.randomUUID().replace(/-/g, "")
|
||||||
|
|
||||||
|
const x: Record<string, string> = {}
|
||||||
|
for (const [funcName, func] of Object.entries(functions)) {
|
||||||
|
const key = `f${libId}${funcName}cb`
|
||||||
|
x[funcName] = key
|
||||||
|
|
||||||
|
this.addToContext({
|
||||||
|
[key]: new ivm.Callback((...params: any[]) => (func as any)(...params)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const mod =
|
||||||
|
`{` +
|
||||||
|
Object.entries(x)
|
||||||
|
.map(([key, func]) => `${key}: ${func}`)
|
||||||
|
.join() +
|
||||||
|
"}"
|
||||||
|
return mod
|
||||||
|
}
|
||||||
|
|
||||||
|
private addToContext(context: Record<string, any>) {
|
||||||
|
for (let key in context) {
|
||||||
|
const value = context[key]
|
||||||
|
this.jail.setSync(
|
||||||
|
key,
|
||||||
|
typeof value === "function"
|
||||||
|
? value
|
||||||
|
: new ivm.ExternalCopy(value).copyInto({ release: true })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private removeFromContext(keys: string[]) {
|
||||||
|
for (let key of keys) {
|
||||||
|
this.jail.deleteSync(key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private getFromContext(key: string) {
|
||||||
|
const ref = this.vm.global.getSync(key, { reference: true })
|
||||||
|
const result = ref.copySync()
|
||||||
|
|
||||||
|
new Promise(() => {
|
||||||
|
ref.release()
|
||||||
|
})
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
|
@ -84,7 +84,8 @@ describe("syncGlobalUsers", () => {
|
||||||
await syncGlobalUsers()
|
await syncGlobalUsers()
|
||||||
|
|
||||||
const metadata = await rawUserMetadata()
|
const metadata = await rawUserMetadata()
|
||||||
expect(metadata).toHaveLength(2)
|
|
||||||
|
expect(metadata).toHaveLength(2 + 1) // ADMIN user created in test bootstrap still in the application
|
||||||
expect(metadata).toContainEqual(
|
expect(metadata).toContainEqual(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
_id: db.generateUserMetadataID(user1._id!),
|
_id: db.generateUserMetadataID(user1._id!),
|
||||||
|
@ -121,7 +122,7 @@ describe("syncGlobalUsers", () => {
|
||||||
await syncGlobalUsers()
|
await syncGlobalUsers()
|
||||||
|
|
||||||
const metadata = await rawUserMetadata()
|
const metadata = await rawUserMetadata()
|
||||||
expect(metadata).toHaveLength(1) //ADMIN user created in test bootstrap still in the application
|
expect(metadata).toHaveLength(1) // ADMIN user created in test bootstrap still in the application
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -76,14 +76,6 @@ mocks.licenses.useUnlimited()
|
||||||
|
|
||||||
dbInit()
|
dbInit()
|
||||||
|
|
||||||
type DefaultUserValues = {
|
|
||||||
globalUserId: string
|
|
||||||
email: string
|
|
||||||
firstName: string
|
|
||||||
lastName: string
|
|
||||||
csrfToken: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TableToBuild extends Omit<Table, "sourceId" | "sourceType"> {
|
export interface TableToBuild extends Omit<Table, "sourceId" | "sourceType"> {
|
||||||
sourceId?: string
|
sourceId?: string
|
||||||
sourceType?: TableSourceType
|
sourceType?: TableSourceType
|
||||||
|
@ -99,14 +91,17 @@ export default class TestConfiguration {
|
||||||
prodApp: any
|
prodApp: any
|
||||||
prodAppId: any
|
prodAppId: any
|
||||||
user: any
|
user: any
|
||||||
globalUserId: any
|
|
||||||
userMetadataId: any
|
userMetadataId: any
|
||||||
table?: Table
|
table?: Table
|
||||||
automation: any
|
automation: any
|
||||||
datasource?: Datasource
|
datasource?: Datasource
|
||||||
tenantId?: string
|
tenantId?: string
|
||||||
defaultUserValues: DefaultUserValues
|
|
||||||
api: API
|
api: API
|
||||||
|
csrfToken?: string
|
||||||
|
|
||||||
|
private get globalUserId() {
|
||||||
|
return this.user._id
|
||||||
|
}
|
||||||
|
|
||||||
constructor(openServer = true) {
|
constructor(openServer = true) {
|
||||||
if (openServer) {
|
if (openServer) {
|
||||||
|
@ -121,21 +116,10 @@ export default class TestConfiguration {
|
||||||
}
|
}
|
||||||
this.appId = null
|
this.appId = null
|
||||||
this.allApps = []
|
this.allApps = []
|
||||||
this.defaultUserValues = this.populateDefaultUserValues()
|
|
||||||
|
|
||||||
this.api = new API(this)
|
this.api = new API(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
populateDefaultUserValues(): DefaultUserValues {
|
|
||||||
return {
|
|
||||||
globalUserId: `us_${newid()}`,
|
|
||||||
email: generator.email(),
|
|
||||||
firstName: generator.first(),
|
|
||||||
lastName: generator.last(),
|
|
||||||
csrfToken: generator.hash(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
getRequest() {
|
getRequest() {
|
||||||
return this.request
|
return this.request
|
||||||
}
|
}
|
||||||
|
@ -162,10 +146,10 @@ export default class TestConfiguration {
|
||||||
|
|
||||||
getUserDetails() {
|
getUserDetails() {
|
||||||
return {
|
return {
|
||||||
globalId: this.defaultUserValues.globalUserId,
|
globalId: this.globalUserId,
|
||||||
email: this.defaultUserValues.email,
|
email: this.user.email,
|
||||||
firstName: this.defaultUserValues.firstName,
|
firstName: this.user.firstName,
|
||||||
lastName: this.defaultUserValues.lastName,
|
lastName: this.user.lastName,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -300,15 +284,27 @@ export default class TestConfiguration {
|
||||||
}
|
}
|
||||||
|
|
||||||
// USER / AUTH
|
// USER / AUTH
|
||||||
async globalUser({
|
async globalUser(
|
||||||
id = this.defaultUserValues.globalUserId,
|
config: {
|
||||||
firstName = this.defaultUserValues.firstName,
|
id?: string
|
||||||
lastName = this.defaultUserValues.lastName,
|
firstName?: string
|
||||||
builder = true,
|
lastName?: string
|
||||||
admin = false,
|
builder?: boolean
|
||||||
email = this.defaultUserValues.email,
|
admin?: boolean
|
||||||
roles,
|
email?: string
|
||||||
}: any = {}): Promise<User> {
|
roles?: any
|
||||||
|
} = {}
|
||||||
|
): Promise<User> {
|
||||||
|
const {
|
||||||
|
id = `us_${newid()}`,
|
||||||
|
firstName = generator.first(),
|
||||||
|
lastName = generator.last(),
|
||||||
|
builder = true,
|
||||||
|
admin = false,
|
||||||
|
email = generator.email(),
|
||||||
|
roles,
|
||||||
|
} = config
|
||||||
|
|
||||||
const db = tenancy.getTenantDB(this.getTenantId())
|
const db = tenancy.getTenantDB(this.getTenantId())
|
||||||
let existing
|
let existing
|
||||||
try {
|
try {
|
||||||
|
@ -327,7 +323,7 @@ export default class TestConfiguration {
|
||||||
await sessions.createASession(id, {
|
await sessions.createASession(id, {
|
||||||
sessionId: "sessionid",
|
sessionId: "sessionid",
|
||||||
tenantId: this.getTenantId(),
|
tenantId: this.getTenantId(),
|
||||||
csrfToken: this.defaultUserValues.csrfToken,
|
csrfToken: this.csrfToken,
|
||||||
})
|
})
|
||||||
if (builder) {
|
if (builder) {
|
||||||
user.builder = { global: true }
|
user.builder = { global: true }
|
||||||
|
@ -357,14 +353,16 @@ export default class TestConfiguration {
|
||||||
roles?: UserRoles
|
roles?: UserRoles
|
||||||
} = {}
|
} = {}
|
||||||
): Promise<User> {
|
): Promise<User> {
|
||||||
let { id, firstName, lastName, email, builder, admin, roles } = user
|
const {
|
||||||
firstName = firstName || this.defaultUserValues.firstName
|
id,
|
||||||
lastName = lastName || this.defaultUserValues.lastName
|
firstName = generator.first(),
|
||||||
email = email || this.defaultUserValues.email
|
lastName = generator.last(),
|
||||||
roles = roles || {}
|
email = generator.email(),
|
||||||
if (builder == null) {
|
builder = true,
|
||||||
builder = true
|
admin,
|
||||||
}
|
roles,
|
||||||
|
} = user
|
||||||
|
|
||||||
const globalId = !id ? `us_${Math.random()}` : `us_${id}`
|
const globalId = !id ? `us_${Math.random()}` : `us_${id}`
|
||||||
const resp = await this.globalUser({
|
const resp = await this.globalUser({
|
||||||
id: globalId,
|
id: globalId,
|
||||||
|
@ -373,7 +371,7 @@ export default class TestConfiguration {
|
||||||
email,
|
email,
|
||||||
builder,
|
builder,
|
||||||
admin,
|
admin,
|
||||||
roles,
|
roles: roles || {},
|
||||||
})
|
})
|
||||||
await cache.user.invalidateUser(globalId)
|
await cache.user.invalidateUser(globalId)
|
||||||
return resp
|
return resp
|
||||||
|
@ -448,7 +446,7 @@ export default class TestConfiguration {
|
||||||
defaultHeaders(extras = {}, prodApp = false) {
|
defaultHeaders(extras = {}, prodApp = false) {
|
||||||
const tenantId = this.getTenantId()
|
const tenantId = this.getTenantId()
|
||||||
const authObj: AuthToken = {
|
const authObj: AuthToken = {
|
||||||
userId: this.defaultUserValues.globalUserId,
|
userId: this.globalUserId,
|
||||||
sessionId: "sessionid",
|
sessionId: "sessionid",
|
||||||
tenantId,
|
tenantId,
|
||||||
}
|
}
|
||||||
|
@ -457,7 +455,7 @@ export default class TestConfiguration {
|
||||||
const headers: any = {
|
const headers: any = {
|
||||||
Accept: "application/json",
|
Accept: "application/json",
|
||||||
Cookie: [`${constants.Cookie.Auth}=${authToken}`],
|
Cookie: [`${constants.Cookie.Auth}=${authToken}`],
|
||||||
[constants.Header.CSRF_TOKEN]: this.defaultUserValues.csrfToken,
|
[constants.Header.CSRF_TOKEN]: this.csrfToken,
|
||||||
Host: this.tenantHost(),
|
Host: this.tenantHost(),
|
||||||
...extras,
|
...extras,
|
||||||
}
|
}
|
||||||
|
@ -487,7 +485,7 @@ export default class TestConfiguration {
|
||||||
|
|
||||||
async basicRoleHeaders() {
|
async basicRoleHeaders() {
|
||||||
return await this.roleHeaders({
|
return await this.roleHeaders({
|
||||||
email: this.defaultUserValues.email,
|
email: generator.email(),
|
||||||
builder: false,
|
builder: false,
|
||||||
prodApp: true,
|
prodApp: true,
|
||||||
roleId: roles.BUILTIN_ROLE_IDS.BASIC,
|
roleId: roles.BUILTIN_ROLE_IDS.BASIC,
|
||||||
|
@ -495,7 +493,7 @@ export default class TestConfiguration {
|
||||||
}
|
}
|
||||||
|
|
||||||
async roleHeaders({
|
async roleHeaders({
|
||||||
email = this.defaultUserValues.email,
|
email = generator.email(),
|
||||||
roleId = roles.BUILTIN_ROLE_IDS.ADMIN,
|
roleId = roles.BUILTIN_ROLE_IDS.ADMIN,
|
||||||
builder = false,
|
builder = false,
|
||||||
prodApp = true,
|
prodApp = true,
|
||||||
|
@ -519,11 +517,12 @@ export default class TestConfiguration {
|
||||||
}
|
}
|
||||||
|
|
||||||
async newTenant(appName = newid()): Promise<App> {
|
async newTenant(appName = newid()): Promise<App> {
|
||||||
this.defaultUserValues = this.populateDefaultUserValues()
|
this.csrfToken = generator.hash()
|
||||||
|
|
||||||
this.tenantId = structures.tenant.id()
|
this.tenantId = structures.tenant.id()
|
||||||
this.user = await this.globalUser()
|
this.user = await this.globalUser()
|
||||||
this.globalUserId = this.user._id
|
this.userMetadataId = generateUserMetadataID(this.user._id)
|
||||||
this.userMetadataId = generateUserMetadataID(this.globalUserId)
|
|
||||||
return this.createApp(appName)
|
return this.createApp(appName)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -533,7 +532,7 @@ export default class TestConfiguration {
|
||||||
|
|
||||||
// API
|
// API
|
||||||
|
|
||||||
async generateApiKey(userId = this.defaultUserValues.globalUserId) {
|
async generateApiKey(userId = this.user._id) {
|
||||||
const db = tenancy.getTenantDB(this.getTenantId())
|
const db = tenancy.getTenantDB(this.getTenantId())
|
||||||
const id = dbCore.generateDevInfoID(userId)
|
const id = dbCore.generateDevInfoID(userId)
|
||||||
let devInfo: any
|
let devInfo: any
|
||||||
|
@ -867,28 +866,6 @@ export default class TestConfiguration {
|
||||||
|
|
||||||
// QUERY
|
// QUERY
|
||||||
|
|
||||||
async previewQuery(
|
|
||||||
request: any,
|
|
||||||
config: any,
|
|
||||||
datasource: any,
|
|
||||||
fields: any,
|
|
||||||
params: any,
|
|
||||||
verb?: string
|
|
||||||
) {
|
|
||||||
return request
|
|
||||||
.post(`/api/queries/preview`)
|
|
||||||
.send({
|
|
||||||
datasourceId: datasource._id,
|
|
||||||
parameters: params || {},
|
|
||||||
fields,
|
|
||||||
queryVerb: verb || "read",
|
|
||||||
name: datasource.name,
|
|
||||||
})
|
|
||||||
.set(config.defaultHeaders())
|
|
||||||
.expect("Content-Type", /json/)
|
|
||||||
.expect(200)
|
|
||||||
}
|
|
||||||
|
|
||||||
async createQuery(config?: any) {
|
async createQuery(config?: any) {
|
||||||
if (!this.datasource && !config) {
|
if (!this.datasource && !config) {
|
||||||
throw "No datasource created for query."
|
throw "No datasource created for query."
|
||||||
|
|
|
@ -31,6 +31,19 @@ export class BackupAPI extends TestAPI {
|
||||||
return result.body as CreateAppBackupResponse
|
return result.body as CreateAppBackupResponse
|
||||||
}
|
}
|
||||||
|
|
||||||
|
waitForBackupToComplete = async (appId: string, backupId: string) => {
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000))
|
||||||
|
const result = await this.request
|
||||||
|
.get(`/api/apps/${appId}/backups/${backupId}/file`)
|
||||||
|
.set(this.config.defaultHeaders())
|
||||||
|
if (result.status === 200) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new Error("Backup did not complete")
|
||||||
|
}
|
||||||
|
|
||||||
importBackup = async (
|
importBackup = async (
|
||||||
appId: string,
|
appId: string,
|
||||||
backupId: string
|
backupId: string
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import TestConfiguration from "../TestConfiguration"
|
import TestConfiguration from "../TestConfiguration"
|
||||||
import {
|
import {
|
||||||
Query,
|
Query,
|
||||||
|
QueryPreview,
|
||||||
type ExecuteQueryRequest,
|
type ExecuteQueryRequest,
|
||||||
type ExecuteQueryResponse,
|
type ExecuteQueryResponse,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
@ -41,4 +42,19 @@ export class QueryAPI extends TestAPI {
|
||||||
|
|
||||||
return res.body
|
return res.body
|
||||||
}
|
}
|
||||||
|
|
||||||
|
previewQuery = async (queryPreview: QueryPreview) => {
|
||||||
|
const res = await this.request
|
||||||
|
.post(`/api/queries/preview`)
|
||||||
|
.send(queryPreview)
|
||||||
|
.set(this.config.defaultHeaders())
|
||||||
|
.expect("Content-Type", /json/)
|
||||||
|
.expect(200)
|
||||||
|
|
||||||
|
if (res.status !== 200) {
|
||||||
|
throw new Error(JSON.stringify(res.body))
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.body
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -366,7 +366,7 @@ export function basicDatasource(): { datasource: Datasource } {
|
||||||
|
|
||||||
export function basicQuery(datasourceId: string): Query {
|
export function basicQuery(datasourceId: string): Query {
|
||||||
return {
|
return {
|
||||||
datasourceId: datasourceId,
|
datasourceId,
|
||||||
name: "New Query",
|
name: "New Query",
|
||||||
parameters: [],
|
parameters: [],
|
||||||
fields: {},
|
fields: {},
|
||||||
|
|
|
@ -7,10 +7,10 @@ export interface QueryEvent {
|
||||||
datasource: Datasource
|
datasource: Datasource
|
||||||
queryVerb: string
|
queryVerb: string
|
||||||
fields: { [key: string]: any }
|
fields: { [key: string]: any }
|
||||||
parameters: { [key: string]: any }
|
parameters: { [key: string]: unknown }
|
||||||
pagination?: any
|
pagination?: any
|
||||||
transformer: any
|
transformer: any
|
||||||
queryId: string
|
queryId?: string
|
||||||
environmentVariables?: Record<string, string>
|
environmentVariables?: Record<string, string>
|
||||||
ctx?: any
|
ctx?: any
|
||||||
schema?: Record<string, QuerySchema | string>
|
schema?: Record<string, QuerySchema | string>
|
||||||
|
|
|
@ -7,7 +7,7 @@ import {
|
||||||
QueryVariable,
|
QueryVariable,
|
||||||
QueryResponse,
|
QueryResponse,
|
||||||
} from "./definitions"
|
} from "./definitions"
|
||||||
import ScriptRunner from "../utilities/scriptRunner"
|
import { IsolatedVM } from "../jsRunner/vm"
|
||||||
import { getIntegration } from "../integrations"
|
import { getIntegration } from "../integrations"
|
||||||
import { processStringSync } from "@budibase/string-templates"
|
import { processStringSync } from "@budibase/string-templates"
|
||||||
import { context, cache, auth } from "@budibase/backend-core"
|
import { context, cache, auth } from "@budibase/backend-core"
|
||||||
|
@ -26,7 +26,7 @@ class QueryRunner {
|
||||||
fields: any
|
fields: any
|
||||||
parameters: any
|
parameters: any
|
||||||
pagination: any
|
pagination: any
|
||||||
transformer: any
|
transformer: string
|
||||||
cachedVariables: any[]
|
cachedVariables: any[]
|
||||||
ctx: any
|
ctx: any
|
||||||
queryResponse: any
|
queryResponse: any
|
||||||
|
@ -43,7 +43,7 @@ class QueryRunner {
|
||||||
this.parameters = input.parameters
|
this.parameters = input.parameters
|
||||||
this.pagination = input.pagination
|
this.pagination = input.pagination
|
||||||
this.transformer = input.transformer
|
this.transformer = input.transformer
|
||||||
this.queryId = input.queryId
|
this.queryId = input.queryId!
|
||||||
this.schema = input.schema
|
this.schema = input.schema
|
||||||
this.noRecursiveQuery = flags.noRecursiveQuery
|
this.noRecursiveQuery = flags.noRecursiveQuery
|
||||||
this.cachedVariables = []
|
this.cachedVariables = []
|
||||||
|
@ -127,11 +127,17 @@ class QueryRunner {
|
||||||
|
|
||||||
// transform as required
|
// transform as required
|
||||||
if (transformer) {
|
if (transformer) {
|
||||||
const runner = new ScriptRunner(transformer, {
|
transformer = `(function(){\n${transformer}\n})();`
|
||||||
|
let vm = new IsolatedVM()
|
||||||
|
if (datasource.source === SourceName.MONGODB) {
|
||||||
|
vm = vm.withParsingBson(rows)
|
||||||
|
}
|
||||||
|
|
||||||
|
const ctx = {
|
||||||
data: rows,
|
data: rows,
|
||||||
params: enrichedParameters,
|
params: enrichedParameters,
|
||||||
})
|
}
|
||||||
rows = runner.execute()
|
rows = vm.withContext(ctx, () => vm.execute(transformer))
|
||||||
}
|
}
|
||||||
|
|
||||||
// if the request fails we retry once, invalidating the cached value
|
// if the request fails we retry once, invalidating the cached value
|
||||||
|
|
|
@ -72,6 +72,9 @@ export class AttachmentCleanup {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
rows.forEach(row => {
|
rows.forEach(row => {
|
||||||
|
if (!Array.isArray(row[key])) {
|
||||||
|
return
|
||||||
|
}
|
||||||
files = files.concat(
|
files = files.concat(
|
||||||
row[key].map((attachment: any) => attachment.key)
|
row[key].map((attachment: any) => attachment.key)
|
||||||
)
|
)
|
||||||
|
|
|
@ -103,6 +103,14 @@ describe("attachment cleanup", () => {
|
||||||
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME])
|
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME])
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should handle row deletion and not throw when attachments are undefined", async () => {
|
||||||
|
await AttachmentCleanup.rowDelete(table(), [
|
||||||
|
{
|
||||||
|
attach: undefined,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
it("shouldn't cleanup attachments if row not updated", async () => {
|
it("shouldn't cleanup attachments if row not updated", async () => {
|
||||||
await AttachmentCleanup.rowUpdate(table(), { row: row(), oldRow: row() })
|
await AttachmentCleanup.rowUpdate(table(), { row: row(), oldRow: row() })
|
||||||
expect(mockedDeleteFiles).not.toBeCalled()
|
expect(mockedDeleteFiles).not.toBeCalled()
|
||||||
|
|
|
@ -1,29 +0,0 @@
|
||||||
import fetch from "node-fetch"
|
|
||||||
import { VM, VMScript } from "vm2"
|
|
||||||
|
|
||||||
const JS_TIMEOUT_MS = 1000
|
|
||||||
|
|
||||||
class ScriptRunner {
|
|
||||||
vm: VM
|
|
||||||
results: { out: string }
|
|
||||||
script: VMScript
|
|
||||||
|
|
||||||
constructor(script: string, context: any) {
|
|
||||||
const code = `let fn = () => {\n${script}\n}; results.out = fn();`
|
|
||||||
this.vm = new VM({
|
|
||||||
timeout: JS_TIMEOUT_MS,
|
|
||||||
})
|
|
||||||
this.results = { out: "" }
|
|
||||||
this.vm.setGlobals(context)
|
|
||||||
this.vm.setGlobal("fetch", fetch)
|
|
||||||
this.vm.setGlobal("results", this.results)
|
|
||||||
this.script = new VMScript(code)
|
|
||||||
}
|
|
||||||
|
|
||||||
execute() {
|
|
||||||
this.vm.run(this.script)
|
|
||||||
return this.results.out
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default ScriptRunner
|
|
|
@ -1,4 +1,4 @@
|
||||||
const { atob } = require("../utilities")
|
const { atob, isBackendService, isJSAllowed } = require("../utilities")
|
||||||
const cloneDeep = require("lodash.clonedeep")
|
const cloneDeep = require("lodash.clonedeep")
|
||||||
const { LITERAL_MARKER } = require("../helpers/constants")
|
const { LITERAL_MARKER } = require("../helpers/constants")
|
||||||
const { getJsHelperList } = require("./list")
|
const { getJsHelperList } = require("./list")
|
||||||
|
@ -7,6 +7,9 @@ const { getJsHelperList } = require("./list")
|
||||||
// This setter is used in the entrypoint (either index.js or index.mjs).
|
// This setter is used in the entrypoint (either index.js or index.mjs).
|
||||||
let runJS
|
let runJS
|
||||||
module.exports.setJSRunner = runner => (runJS = runner)
|
module.exports.setJSRunner = runner => (runJS = runner)
|
||||||
|
module.exports.removeJSRunner = () => {
|
||||||
|
runJS = undefined
|
||||||
|
}
|
||||||
|
|
||||||
let onErrorLog
|
let onErrorLog
|
||||||
module.exports.setOnErrorLog = delegate => (onErrorLog = delegate)
|
module.exports.setOnErrorLog = delegate => (onErrorLog = delegate)
|
||||||
|
@ -39,7 +42,7 @@ const getContextValue = (path, context) => {
|
||||||
|
|
||||||
// Evaluates JS code against a certain context
|
// Evaluates JS code against a certain context
|
||||||
module.exports.processJS = (handlebars, context) => {
|
module.exports.processJS = (handlebars, context) => {
|
||||||
if (process && process.env.NO_JS) {
|
if (!isJSAllowed() || (isBackendService() && !runJS)) {
|
||||||
throw new Error("JS disabled in environment.")
|
throw new Error("JS disabled in environment.")
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -2,7 +2,7 @@ const vm = require("vm")
|
||||||
const handlebars = require("handlebars")
|
const handlebars = require("handlebars")
|
||||||
const { registerAll, registerMinimum } = require("./helpers/index")
|
const { registerAll, registerMinimum } = require("./helpers/index")
|
||||||
const processors = require("./processors")
|
const processors = require("./processors")
|
||||||
const { atob, btoa } = require("./utilities")
|
const { atob, btoa, isBackendService } = require("./utilities")
|
||||||
const manifest = require("../manifest.json")
|
const manifest = require("../manifest.json")
|
||||||
const {
|
const {
|
||||||
FIND_HBS_REGEX,
|
FIND_HBS_REGEX,
|
||||||
|
@ -404,18 +404,25 @@ module.exports.JsErrorTimeout = errors.JsErrorTimeout
|
||||||
|
|
||||||
module.exports.helpersToRemoveForJs = helpersToRemoveForJs
|
module.exports.helpersToRemoveForJs = helpersToRemoveForJs
|
||||||
|
|
||||||
if (process && !process.env.NO_JS) {
|
function defaultJSSetup() {
|
||||||
/**
|
if (!isBackendService()) {
|
||||||
* Use polyfilled vm to run JS scripts in a browser Env
|
/**
|
||||||
*/
|
* Use polyfilled vm to run JS scripts in a browser Env
|
||||||
javascript.setJSRunner((js, context) => {
|
*/
|
||||||
context = {
|
javascript.setJSRunner((js, context) => {
|
||||||
...context,
|
context = {
|
||||||
alert: undefined,
|
...context,
|
||||||
setInterval: undefined,
|
alert: undefined,
|
||||||
setTimeout: undefined,
|
setInterval: undefined,
|
||||||
}
|
setTimeout: undefined,
|
||||||
vm.createContext(context)
|
}
|
||||||
return vm.runInNewContext(js, context, { timeout: 1000 })
|
vm.createContext(context)
|
||||||
})
|
return vm.runInNewContext(js, context, { timeout: 1000 })
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
javascript.removeJSRunner()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
defaultJSSetup()
|
||||||
|
|
||||||
|
module.exports.defaultJSSetup = defaultJSSetup
|
||||||
|
|
|
@ -4,6 +4,14 @@ module.exports.FIND_HBS_REGEX = /{{([^{].*?)}}/g
|
||||||
module.exports.FIND_ANY_HBS_REGEX = /{?{{([^{].*?)}}}?/g
|
module.exports.FIND_ANY_HBS_REGEX = /{?{{([^{].*?)}}}?/g
|
||||||
module.exports.FIND_TRIPLE_HBS_REGEX = /{{{([^{].*?)}}}/g
|
module.exports.FIND_TRIPLE_HBS_REGEX = /{{{([^{].*?)}}}/g
|
||||||
|
|
||||||
|
module.exports.isBackendService = () => {
|
||||||
|
return typeof window === "undefined"
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.isJSAllowed = () => {
|
||||||
|
return process && !process.env.NO_JS
|
||||||
|
}
|
||||||
|
|
||||||
// originally this could be done with a single regex using look behinds
|
// originally this could be done with a single regex using look behinds
|
||||||
// but safari does not support this feature
|
// but safari does not support this feature
|
||||||
// original regex: /(?<!{){{[^{}]+}}(?!})/g
|
// original regex: /(?<!{){{[^{}]+}}(?!})/g
|
||||||
|
|
|
@ -0,0 +1,27 @@
|
||||||
|
jest.mock("../src/utilities", () => {
|
||||||
|
const utilities = jest.requireActual("../src/utilities")
|
||||||
|
return {
|
||||||
|
...utilities,
|
||||||
|
isBackendService: jest.fn().mockReturnValue(true),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
const { defaultJSSetup, processStringSync, encodeJSBinding } = require("../src")
|
||||||
|
const { isBackendService } = require("../src/utilities")
|
||||||
|
const mockedBackendService = jest.mocked(isBackendService)
|
||||||
|
|
||||||
|
const binding = encodeJSBinding("return 1")
|
||||||
|
describe("confirm VM is available when expected and when not", () => {
|
||||||
|
it("shouldn't have JS available in a backend service by default", () => {
|
||||||
|
defaultJSSetup()
|
||||||
|
const result = processStringSync(binding, {})
|
||||||
|
// shouldn't process at all
|
||||||
|
expect(result).toBe(binding)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have JS available in frontend environments", () => {
|
||||||
|
mockedBackendService.mockReturnValue(false)
|
||||||
|
defaultJSSetup()
|
||||||
|
const result = processStringSync(binding, {})
|
||||||
|
expect(result).toBe(1)
|
||||||
|
})
|
||||||
|
})
|
|
@ -19,7 +19,7 @@ export interface Query extends Document {
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface QueryPreview extends Omit<Query, "_id"> {
|
export interface QueryPreview extends Omit<Query, "_id"> {
|
||||||
queryId: string
|
queryId?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface QueryParameter {
|
export interface QueryParameter {
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
export interface VM {
|
export interface VM {
|
||||||
execute(code: string): any
|
execute(code: string): any
|
||||||
|
withContext<T>(context: Record<string, any>, executeWithContext: () => T): T
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,6 @@ function runBuild(entry, outfile) {
|
||||||
preserveSymlinks: true,
|
preserveSymlinks: true,
|
||||||
loader: {
|
loader: {
|
||||||
".svelte": "copy",
|
".svelte": "copy",
|
||||||
".ivm.bundle.js": "text",
|
|
||||||
},
|
},
|
||||||
metafile: true,
|
metafile: true,
|
||||||
external: [
|
external: [
|
||||||
|
@ -70,7 +69,7 @@ function runBuild(entry, outfile) {
|
||||||
platform: "node",
|
platform: "node",
|
||||||
outfile,
|
outfile,
|
||||||
}).then(result => {
|
}).then(result => {
|
||||||
glob(`${process.cwd()}/src/**/*.hbs`, {}, (err, files) => {
|
glob(`${process.cwd()}/src/**/*.{hbs,ivm.bundle.js}`, {}, (err, files) => {
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
fs.copyFileSync(file, `${process.cwd()}/dist/${path.basename(file)}`)
|
fs.copyFileSync(file, `${process.cwd()}/dist/${path.basename(file)}`)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
apt-get install -y gnupg
|
|
||||||
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor | tee /usr/share/keyrings/nodesource.gpg > /dev/null
|
|
||||||
echo "deb [signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
|
|
||||||
apt-get update
|
|
||||||
echo "INSTALLING NODE $NODE_MAJOR"
|
|
||||||
apt-get install -y --no-install-recommends nodejs
|
|
||||||
npm install --global yarn pm2
|
|
12
yarn.lock
12
yarn.lock
|
@ -6463,7 +6463,7 @@ acorn@^7.1.1:
|
||||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
|
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
|
||||||
integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==
|
integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==
|
||||||
|
|
||||||
acorn@^8.1.0, acorn@^8.10.0, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.0, acorn@^8.7.1, acorn@^8.8.1, acorn@^8.8.2, acorn@^8.9.0:
|
acorn@^8.1.0, acorn@^8.10.0, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.1, acorn@^8.8.2, acorn@^8.9.0:
|
||||||
version "8.11.3"
|
version "8.11.3"
|
||||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
|
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
|
||||||
integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==
|
integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==
|
||||||
|
@ -10763,7 +10763,7 @@ fetch-cookie@0.11.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0"
|
tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0"
|
||||||
|
|
||||||
fflate@^0.4.1, fflate@^0.4.8:
|
fflate@^0.4.1:
|
||||||
version "0.4.8"
|
version "0.4.8"
|
||||||
resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae"
|
resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae"
|
||||||
integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==
|
integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==
|
||||||
|
@ -21633,14 +21633,6 @@ vlq@^0.2.2:
|
||||||
resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26"
|
resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26"
|
||||||
integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==
|
integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==
|
||||||
|
|
||||||
vm2@^3.9.19:
|
|
||||||
version "3.9.19"
|
|
||||||
resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.19.tgz#be1e1d7a106122c6c492b4d51c2e8b93d3ed6a4a"
|
|
||||||
integrity sha512-J637XF0DHDMV57R6JyVsTak7nIL8gy5KH4r1HiwWLf/4GBbb5MKL5y7LpmF4A8E2nR6XmzpmMFQ7V7ppPTmUQg==
|
|
||||||
dependencies:
|
|
||||||
acorn "^8.7.0"
|
|
||||||
acorn-walk "^8.2.0"
|
|
||||||
|
|
||||||
vuvuzela@1.0.3:
|
vuvuzela@1.0.3:
|
||||||
version "1.0.3"
|
version "1.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/vuvuzela/-/vuvuzela-1.0.3.tgz#3be145e58271c73ca55279dd851f12a682114b0b"
|
resolved "https://registry.yarnpkg.com/vuvuzela/-/vuvuzela-1.0.3.tgz#3be145e58271c73ca55279dd851f12a682114b0b"
|
||||||
|
|
Loading…
Reference in New Issue