Merge branch 'master' of github.com:budibase/budibase into budi-7664-sqs-self-host-ui-for-detecting-lack-of-sqs-support-2

This commit is contained in:
Sam Rose 2024-02-21 11:07:30 +00:00
commit bb793149f4
No known key found for this signature in database
60 changed files with 1040 additions and 632 deletions

View File

@ -1,4 +1,101 @@
FROM couchdb:3.2.1 # Modified from https://github.com/apache/couchdb-docker/blob/main/3.3.3/Dockerfile
#
# Everything in this `base` image is adapted from the official `couchdb` image's
# Dockerfile. Only modifications related to upgrading from Debian bullseye to
# bookworm have been included. The `runner` image contains Budibase's
# customisations to the image, e.g. adding Clouseau.
FROM node:20-slim AS base
# Add CouchDB user account to make sure the IDs are assigned consistently
RUN groupadd -g 5984 -r couchdb && useradd -u 5984 -d /opt/couchdb -g couchdb couchdb
# be sure GPG and apt-transport-https are available and functional
RUN set -ex; \
apt-get update; \
apt-get install -y --no-install-recommends \
apt-transport-https \
ca-certificates \
dirmngr \
gnupg \
; \
rm -rf /var/lib/apt/lists/*
# grab tini for signal handling and zombie reaping
# see https://github.com/apache/couchdb-docker/pull/28#discussion_r141112407
RUN set -eux; \
apt-get update; \
apt-get install -y --no-install-recommends tini; \
rm -rf /var/lib/apt/lists/*; \
tini --version
# http://docs.couchdb.org/en/latest/install/unix.html#installing-the-apache-couchdb-packages
ENV GPG_COUCH_KEY \
# gpg: rsa8192 205-01-19 The Apache Software Foundation (Package repository signing key) <root@apache.org>
390EF70BB1EA12B2773962950EE62FB37A00258D
RUN set -eux; \
apt-get update; \
apt-get install -y curl; \
export GNUPGHOME="$(mktemp -d)"; \
curl -fL -o keys.asc https://couchdb.apache.org/repo/keys.asc; \
gpg --batch --import keys.asc; \
gpg --batch --export "${GPG_COUCH_KEY}" > /usr/share/keyrings/couchdb-archive-keyring.gpg; \
command -v gpgconf && gpgconf --kill all || :; \
rm -rf "$GNUPGHOME"; \
apt-key list; \
apt purge -y --autoremove curl; \
rm -rf /var/lib/apt/lists/*
ENV COUCHDB_VERSION 3.3.3
RUN . /etc/os-release; \
echo "deb [signed-by=/usr/share/keyrings/couchdb-archive-keyring.gpg] https://apache.jfrog.io/artifactory/couchdb-deb/ ${VERSION_CODENAME} main" | \
tee /etc/apt/sources.list.d/couchdb.list >/dev/null
# https://github.com/apache/couchdb-pkg/blob/master/debian/README.Debian
RUN set -eux; \
apt-get update; \
\
echo "couchdb couchdb/mode select none" | debconf-set-selections; \
# we DO want recommends this time
DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \
couchdb="$COUCHDB_VERSION"~bookworm \
; \
# Undo symlinks to /var/log and /var/lib
rmdir /var/lib/couchdb /var/log/couchdb; \
rm /opt/couchdb/data /opt/couchdb/var/log; \
mkdir -p /opt/couchdb/data /opt/couchdb/var/log; \
chown couchdb:couchdb /opt/couchdb/data /opt/couchdb/var/log; \
chmod 777 /opt/couchdb/data /opt/couchdb/var/log; \
# Remove file that sets logging to a file
rm /opt/couchdb/etc/default.d/10-filelog.ini; \
# Check we own everything in /opt/couchdb. Matches the command in dockerfile_entrypoint.sh
find /opt/couchdb \! \( -user couchdb -group couchdb \) -exec chown -f couchdb:couchdb '{}' +; \
# Setup directories and permissions for config. Technically these could be 555 and 444 respectively
# but we keep them as 755 and 644 for consistency with CouchDB defaults and the dockerfile_entrypoint.sh.
find /opt/couchdb/etc -type d ! -perm 0755 -exec chmod -f 0755 '{}' +; \
find /opt/couchdb/etc -type f ! -perm 0644 -exec chmod -f 0644 '{}' +; \
# only local.d needs to be writable for the docker_entrypoint.sh
chmod -f 0777 /opt/couchdb/etc/local.d; \
# apt clean-up
rm -rf /var/lib/apt/lists/*;
# Add configuration
COPY --chown=couchdb:couchdb couch/10-docker-default.ini /opt/couchdb/etc/default.d/
# COPY --chown=couchdb:couchdb vm.args /opt/couchdb/etc/
COPY docker-entrypoint.sh /usr/local/bin
RUN ln -s usr/local/bin/docker-entrypoint.sh /docker-entrypoint.sh # backwards compat
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
VOLUME /opt/couchdb/data
# 5984: Main CouchDB endpoint
# 4369: Erlang portmap daemon (epmd)
# 9100: CouchDB cluster communication port
EXPOSE 5984 4369 9100
CMD ["/opt/couchdb/bin/couchdb"]
FROM base as runner
ENV COUCHDB_USER admin ENV COUCHDB_USER admin
ENV COUCHDB_PASSWORD admin ENV COUCHDB_PASSWORD admin
@ -6,9 +103,9 @@ EXPOSE 5984
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \ RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \ wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
apt-add-repository 'deb http://security.debian.org/debian-security bullseye-security/updates main' && \ apt-add-repository 'deb http://security.debian.org/debian-security bookworm-security/updates main' && \
apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \ apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \
apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bullseye main' && \ apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bookworm main' && \
apt-get update && apt-get install -y --no-install-recommends temurin-8-jdk && \ apt-get update && apt-get install -y --no-install-recommends temurin-8-jdk && \
rm -rf /var/lib/apt/lists/ rm -rf /var/lib/apt/lists/

View File

@ -4,7 +4,7 @@
name=clouseau@127.0.0.1 name=clouseau@127.0.0.1
; set this to the same distributed Erlang cookie used by the CouchDB nodes ; set this to the same distributed Erlang cookie used by the CouchDB nodes
cookie=monster cookie=COUCHDB_ERLANG_COOKIE
; the path where you would like to store the search index files ; the path where you would like to store the search index files
dir=DATA_DIR/search dir=DATA_DIR/search

View File

@ -0,0 +1,8 @@
; CouchDB Configuration Settings
; Custom settings should be made in this file. They will override settings
; in default.ini, but unlike changes made to default.ini, this file won't be
; overwritten on server upgrade.
[chttpd]
bind_address = any

View File

@ -12,7 +12,7 @@
# erlang cookie for clouseau security # erlang cookie for clouseau security
-name couchdb@127.0.0.1 -name couchdb@127.0.0.1
-setcookie monster -setcookie COUCHDB_ERLANG_COOKIE
# Ensure that the Erlang VM listens on a known port # Ensure that the Erlang VM listens on a known port
-kernel inet_dist_listen_min 9100 -kernel inet_dist_listen_min 9100

View File

@ -0,0 +1,122 @@
#!/bin/bash
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
set -e
# first arg is `-something` or `+something`
if [ "${1#-}" != "$1" ] || [ "${1#+}" != "$1" ]; then
set -- /opt/couchdb/bin/couchdb "$@"
fi
# first arg is the bare word `couchdb`
if [ "$1" = 'couchdb' ]; then
shift
set -- /opt/couchdb/bin/couchdb "$@"
fi
if [ "$1" = '/opt/couchdb/bin/couchdb' ]; then
# this is where runtime configuration changes will be written.
# we need to explicitly touch it here in case /opt/couchdb/etc has
# been mounted as an external volume, in which case it won't exist.
# If running as the couchdb user (i.e. container starts as root),
# write permissions will be granted below.
touch /opt/couchdb/etc/local.d/docker.ini
# if user is root, assume running under the couchdb user (default)
# and ensure it is able to access files and directories that may be mounted externally
if [ "$(id -u)" = '0' ]; then
# Check that we own everything in /opt/couchdb and fix if necessary. We also
# add the `-f` flag in all the following invocations because there may be
# cases where some of these ownership and permissions issues are non-fatal
# (e.g. a config file owned by root with o+r is actually fine), and we don't
# to be too aggressive about crashing here ...
find /opt/couchdb \! \( -user couchdb -group couchdb \) -exec chown -f couchdb:couchdb '{}' +
# Ensure that data files have the correct permissions. We were previously
# preventing any access to these files outside of couchdb:couchdb, but it
# turns out that CouchDB itself does not set such restrictive permissions
# when it creates the files. The approach taken here ensures that the
# contents of the datadir have the same permissions as they had when they
# were initially created. This should minimize any startup delay.
find /opt/couchdb/data -type d ! -perm 0755 -exec chmod -f 0755 '{}' +
find /opt/couchdb/data -type f ! -perm 0644 -exec chmod -f 0644 '{}' +
# Do the same thing for configuration files and directories. Technically
# CouchDB only needs read access to the configuration files as all online
# changes will be applied to the "docker.ini" file below, but we set 644
# for the sake of consistency.
find /opt/couchdb/etc -type d ! -perm 0755 -exec chmod -f 0755 '{}' +
find /opt/couchdb/etc -type f ! -perm 0644 -exec chmod -f 0644 '{}' +
fi
if [ ! -z "$NODENAME" ] && ! grep "couchdb@" /opt/couchdb/etc/vm.args; then
echo "-name couchdb@$NODENAME" >> /opt/couchdb/etc/vm.args
fi
if [ "$COUCHDB_USER" ] && [ "$COUCHDB_PASSWORD" ]; then
# Create admin only if not already present
if ! grep -Pzoqr "\[admins\]\n$COUCHDB_USER =" /opt/couchdb/etc/local.d/*.ini /opt/couchdb/etc/local.ini; then
printf "\n[admins]\n%s = %s\n" "$COUCHDB_USER" "$COUCHDB_PASSWORD" >> /opt/couchdb/etc/local.d/docker.ini
fi
fi
if [ "$COUCHDB_SECRET" ]; then
# Set secret only if not already present
if ! grep -Pzoqr "\[chttpd_auth\]\nsecret =" /opt/couchdb/etc/local.d/*.ini /opt/couchdb/etc/local.ini; then
printf "\n[chttpd_auth]\nsecret = %s\n" "$COUCHDB_SECRET" >> /opt/couchdb/etc/local.d/docker.ini
fi
fi
if [ "$COUCHDB_ERLANG_COOKIE" ]; then
cookieFile='/opt/couchdb/.erlang.cookie'
if [ -e "$cookieFile" ]; then
if [ "$(cat "$cookieFile" 2>/dev/null)" != "$COUCHDB_ERLANG_COOKIE" ]; then
echo >&2
echo >&2 "warning: $cookieFile contents do not match COUCHDB_ERLANG_COOKIE"
echo >&2
fi
else
echo "$COUCHDB_ERLANG_COOKIE" > "$cookieFile"
fi
chown couchdb:couchdb "$cookieFile"
chmod 600 "$cookieFile"
fi
if [ "$(id -u)" = '0' ]; then
chown -f couchdb:couchdb /opt/couchdb/etc/local.d/docker.ini || true
fi
# if we don't find an [admins] section followed by a non-comment, display a warning
if ! grep -Pzoqr '\[admins\]\n[^;]\w+' /opt/couchdb/etc/default.d/*.ini /opt/couchdb/etc/local.d/*.ini /opt/couchdb/etc/local.ini; then
# The - option suppresses leading tabs but *not* spaces. :)
cat >&2 <<-'EOWARN'
*************************************************************
ERROR: CouchDB 3.0+ will no longer run in "Admin Party"
mode. You *MUST* specify an admin user and
password, either via your own .ini file mapped
into the container at /opt/couchdb/etc/local.ini
or inside /opt/couchdb/etc/local.d, or with
"-e COUCHDB_USER=admin -e COUCHDB_PASSWORD=password"
to set it via "docker run".
*************************************************************
EOWARN
exit 1
fi
if [ "$(id -u)" = '0' ]; then
export HOME=$(echo ~couchdb)
exec setpriv --reuid=couchdb --regid=couchdb --clear-groups "$@"
fi
fi
exec "$@"

View File

@ -1,6 +1,7 @@
#!/bin/bash #!/bin/bash
DATA_DIR=${DATA_DIR:-/data} DATA_DIR=${DATA_DIR:-/data}
COUCHDB_ERLANG_COOKIE=${COUCHDB_ERLANG_COOKIE:-B9CFC32C-3458-4A86-8448-B3C753991CA7}
mkdir -p ${DATA_DIR} mkdir -p ${DATA_DIR}
mkdir -p ${DATA_DIR}/couch/{dbs,views} mkdir -p ${DATA_DIR}/couch/{dbs,views}
@ -60,6 +61,9 @@ else
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi fi
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/couchdb/etc/vm.args
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouseau.ini
# Start Clouseau. Budibase won't function correctly without Clouseau running, it # Start Clouseau. Budibase won't function correctly without Clouseau running, it
# powers the search API endpoints which are used to do all sorts, including # powers the search API endpoints which are used to do all sorts, including
# populating app grids. # populating app grids.

View File

@ -3,7 +3,6 @@ FROM node:20-slim as build
# install node-gyp dependencies # install node-gyp dependencies
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
# copy and install dependencies # copy and install dependencies
WORKDIR /app WORKDIR /app
COPY package.json . COPY package.json .
@ -39,10 +38,9 @@ COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
COPY packages/string-templates packages/string-templates COPY packages/string-templates packages/string-templates
FROM budibase/couchdb as runner FROM budibase/couchdb:v3.3.3 as runner
ARG TARGETARCH ARG TARGETARCH
ENV TARGETARCH $TARGETARCH ENV TARGETARCH $TARGETARCH
ENV NODE_MAJOR 20
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service) #TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas .... # e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single ARG TARGETBUILD=single
@ -60,10 +58,8 @@ RUN apt install -y software-properties-common apt-transport-https ca-certificate
&& apt install postgresql-client-15 -y \ && apt install postgresql-client-15 -y \
&& apt remove software-properties-common apt-transport-https gpg -y && apt remove software-properties-common apt-transport-https gpg -y
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx # We use pm2 in order to run multiple node processes in a single container
WORKDIR /nodejs RUN npm install --global pm2
COPY scripts/install-node.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
# setup nginx # setup nginx
COPY hosting/single/nginx/nginx.conf /etc/nginx COPY hosting/single/nginx/nginx.conf /etc/nginx

View File

@ -97,10 +97,12 @@ fi
sleep 10 sleep 10
pushd app pushd app
pm2 start -l /dev/stdout --name app "yarn run:docker" pm2 start --name app "yarn run:docker"
popd popd
pushd worker pushd worker
pm2 start -l /dev/stdout --name worker "yarn run:docker" pm2 start --name worker "yarn run:docker"
popd popd
echo "end of runner.sh, sleeping ..." echo "end of runner.sh, sleeping ..."
tail -f $HOME/.pm2/logs/*.log
sleep infinity sleep infinity

View File

@ -1,5 +1,5 @@
{ {
"version": "2.19.5", "version": "2.20.5",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

@ -1 +1 @@
Subproject commit 8c446c4ba385592127fa31755d3b64467b291882 Subproject commit 4384bc742ca22fb1e9bf91843e65ae929daf17e2

View File

@ -3,6 +3,7 @@ import {
Event, Event,
Datasource, Datasource,
Query, Query,
QueryPreview,
QueryCreatedEvent, QueryCreatedEvent,
QueryUpdatedEvent, QueryUpdatedEvent,
QueryDeletedEvent, QueryDeletedEvent,
@ -68,9 +69,9 @@ const run = async (count: number, timestamp?: string | number) => {
await publishEvent(Event.QUERIES_RUN, properties, timestamp) await publishEvent(Event.QUERIES_RUN, properties, timestamp)
} }
const previewed = async (datasource: Datasource, query: Query) => { const previewed = async (datasource: Datasource, query: QueryPreview) => {
const properties: QueryPreviewedEvent = { const properties: QueryPreviewedEvent = {
queryId: query._id, queryId: query.queryId,
datasourceId: datasource._id as string, datasourceId: datasource._id as string,
source: datasource.source, source: datasource.source,
queryVerb: query.queryVerb, queryVerb: query.queryVerb,

View File

@ -127,10 +127,14 @@
} }
}) })
$: jsonArrays = bindings $: jsonArrays = bindings
.filter(x => x.fieldSchema?.type === "jsonarray") .filter(
x =>
x.fieldSchema?.type === "jsonarray" ||
(x.fieldSchema?.type === "json" && x.fieldSchema?.subtype === "array")
)
.map(binding => { .map(binding => {
const { providerId, readableBinding, runtimeBinding, tableId } = binding const { providerId, readableBinding, runtimeBinding, tableId } = binding
const { name, type, prefixKeys } = binding.fieldSchema const { name, type, prefixKeys, subtype } = binding.fieldSchema
return { return {
providerId, providerId,
label: readableBinding, label: readableBinding,
@ -138,7 +142,8 @@
fieldType: type, fieldType: type,
tableId, tableId,
prefixKeys, prefixKeys,
type: "jsonarray", type: type === "jsonarray" ? "jsonarray" : "queryarray",
subtype,
value: `{{ literal ${runtimeBinding} }}`, value: `{{ literal ${runtimeBinding} }}`,
} }
}) })

View File

@ -1,15 +1,25 @@
<script> <script>
import EditComponentPopover from "../EditComponentPopover.svelte" import EditComponentPopover from "../EditComponentPopover.svelte"
import { FieldTypeToComponentMap } from "../FieldConfiguration/utils"
import { Toggle, Icon } from "@budibase/bbui" import { Toggle, Icon } from "@budibase/bbui"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { componentStore } from "stores/builder" import { FIELDS } from "constants/backend"
export let item export let item
export let anchor export let anchor
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
$: fieldIconLookupMap = buildFieldIconLookupMap(FIELDS)
const buildFieldIconLookupMap = fields => {
let map = {}
Object.values(fields).forEach(fieldInfo => {
map[fieldInfo.type] = fieldInfo.icon
})
return map
}
const onToggle = item => { const onToggle = item => {
return e => { return e => {
item.active = e.detail item.active = e.detail
@ -24,13 +34,6 @@
return { ...setting, nested: true } return { ...setting, nested: true }
}) })
} }
const getIcon = () => {
const component = `@budibase/standard-components/${
FieldTypeToComponentMap[item.columnType]
}`
return componentStore.getDefinition(component).icon
}
</script> </script>
<div class="list-item-body"> <div class="list-item-body">
@ -42,7 +45,7 @@
on:change on:change
> >
<div slot="header" class="type-icon"> <div slot="header" class="type-icon">
<Icon name={getIcon()} /> <Icon name={fieldIconLookupMap[item.columnType]} />
<span>{item.field}</span> <span>{item.field}</span>
</div> </div>
</EditComponentPopover> </EditComponentPopover>

View File

@ -85,6 +85,16 @@
activity = newActivity activity = newActivity
dispatch("change", fields) dispatch("change", fields)
} }
function isJsonArray(value) {
if (!value || typeof value === "string") {
return false
}
if (value.type === "array") {
return true
}
return value.type === "json" && value.subtype === "array"
}
</script> </script>
<!-- Builds Objects with Key Value Pairs. Useful for building things like Request Headers. --> <!-- Builds Objects with Key Value Pairs. Useful for building things like Request Headers. -->
@ -112,7 +122,9 @@
bind:value={field.name} bind:value={field.name}
on:blur={changed} on:blur={changed}
/> />
{#if options} {#if isJsonArray(field.value)}
<Select readonly={true} value="Array" options={["Array"]} />
{:else if options}
<Select <Select
bind:value={field.value} bind:value={field.value}
{compare} {compare}

View File

@ -40,6 +40,7 @@
let schemaType let schemaType
let autoSchema = {} let autoSchema = {}
let nestedSchemaFields = {}
let rows = [] let rows = []
let keys = {} let keys = {}
@ -83,13 +84,14 @@
return return
} }
nestedSchemaFields = response.nestedSchemaFields
if (Object.keys(newQuery.schema).length === 0) { if (Object.keys(newQuery.schema).length === 0) {
// Assign this to a variable instead of directly to the newQuery.schema so that a user // Assign this to a variable instead of directly to the newQuery.schema so that a user
// can change the table they're querying and have the schema update until they first // can change the table they're querying and have the schema update until they first
// edit it // edit it
autoSchema = response.schema autoSchema = response.schema
} }
rows = response.rows rows = response.rows
notifications.success("Query executed successfully") notifications.success("Query executed successfully")
@ -120,6 +122,7 @@
Object.keys(newQuery.schema).length === 0 Object.keys(newQuery.schema).length === 0
? autoSchema ? autoSchema
: newQuery.schema, : newQuery.schema,
nestedSchemaFields,
}) })
notifications.success("Query saved successfully") notifications.success("Query saved successfully")

View File

@ -5,7 +5,6 @@
Label, Label,
Input, Input,
Select, Select,
Divider,
Layout, Layout,
Icon, Icon,
Button, Button,
@ -124,7 +123,6 @@
{#each query.fields.steps ?? [] as step, index} {#each query.fields.steps ?? [] as step, index}
<div class="block"> <div class="block">
<div class="subblock"> <div class="subblock">
<Divider noMargin />
<div class="blockSection"> <div class="blockSection">
<div class="block-options"> <div class="block-options">
Stage {index + 1} Stage {index + 1}

View File

@ -310,6 +310,7 @@ export const BannedSearchTypes = [
"formula", "formula",
"json", "json",
"jsonarray", "jsonarray",
"queryarray",
] ]
export const DatasourceTypes = { export const DatasourceTypes = {

View File

@ -425,7 +425,7 @@ const generateComponentContextBindings = (asset, componentContext) => {
table = info.table table = info.table
// Determine what to prefix bindings with // Determine what to prefix bindings with
if (datasource.type === "jsonarray") { if (datasource.type === "jsonarray" || datasource.type === "queryarray") {
// For JSON arrays, use the array name as the readable prefix // For JSON arrays, use the array name as the readable prefix
const split = datasource.label.split(".") const split = datasource.label.split(".")
readablePrefix = split[split.length - 1] readablePrefix = split[split.length - 1]
@ -904,6 +904,19 @@ export const getSchemaForDatasource = (asset, datasource, options) => {
schema = JSONUtils.getJSONArrayDatasourceSchema(tableSchema, datasource) schema = JSONUtils.getJSONArrayDatasourceSchema(tableSchema, datasource)
} }
// "queryarray" datasources are arrays inside JSON responses
else if (type === "queryarray") {
const queries = get(queriesStores).list
table = queries.find(query => query._id === datasource.tableId)
let tableSchema = table?.schema
let nestedSchemaFields = table?.nestedSchemaFields
schema = JSONUtils.generateQueryArraySchemas(
tableSchema,
nestedSchemaFields
)
schema = JSONUtils.getJSONArrayDatasourceSchema(schema, datasource)
}
// Otherwise we assume we're targeting an internal table or a plus // Otherwise we assume we're targeting an internal table or a plus
// datasource, and we can treat it as a table with a schema // datasource, and we can treat it as a table with a schema
else { else {

View File

@ -84,7 +84,7 @@
// Fetches the form schema from this form's dataSource // Fetches the form schema from this form's dataSource
const fetchSchema = async dataSource => { const fetchSchema = async dataSource => {
if (dataSource?.tableId && dataSource?.type !== "query") { if (dataSource?.tableId && !dataSource?.type?.startsWith("query")) {
try { try {
table = await API.fetchTableDefinition(dataSource.tableId) table = await API.fetchTableDefinition(dataSource.tableId)
} catch (error) { } catch (error) {

View File

@ -7,6 +7,7 @@ import NestedProviderFetch from "@budibase/frontend-core/src/fetch/NestedProvide
import FieldFetch from "@budibase/frontend-core/src/fetch/FieldFetch.js" import FieldFetch from "@budibase/frontend-core/src/fetch/FieldFetch.js"
import JSONArrayFetch from "@budibase/frontend-core/src/fetch/JSONArrayFetch.js" import JSONArrayFetch from "@budibase/frontend-core/src/fetch/JSONArrayFetch.js"
import ViewV2Fetch from "@budibase/frontend-core/src/fetch/ViewV2Fetch.js" import ViewV2Fetch from "@budibase/frontend-core/src/fetch/ViewV2Fetch.js"
import QueryArrayFetch from "@budibase/frontend-core/src/fetch/QueryArrayFetch"
/** /**
* Fetches the schema of any kind of datasource. * Fetches the schema of any kind of datasource.
@ -28,6 +29,7 @@ export const fetchDatasourceSchema = async (
provider: NestedProviderFetch, provider: NestedProviderFetch,
field: FieldFetch, field: FieldFetch,
jsonarray: JSONArrayFetch, jsonarray: JSONArrayFetch,
queryarray: QueryArrayFetch,
}[datasource?.type] }[datasource?.type]
if (!handler) { if (!handler) {
return null return null

View File

@ -0,0 +1,25 @@
import FieldFetch from "./FieldFetch.js"
import {
getJSONArrayDatasourceSchema,
generateQueryArraySchemas,
} from "../utils/json"
export default class QueryArrayFetch extends FieldFetch {
async getDefinition(datasource) {
if (!datasource?.tableId) {
return null
}
// JSON arrays need their table definitions fetched.
// We can then extract their schema as a subset of the table schema.
try {
const table = await this.API.fetchQueryDefinition(datasource.tableId)
const schema = generateQueryArraySchemas(
table?.schema,
table?.nestedSchemaFields
)
return { schema: getJSONArrayDatasourceSchema(schema, datasource) }
} catch (error) {
return null
}
}
}

View File

@ -9,6 +9,7 @@ import JSONArrayFetch from "./JSONArrayFetch.js"
import UserFetch from "./UserFetch.js" import UserFetch from "./UserFetch.js"
import GroupUserFetch from "./GroupUserFetch.js" import GroupUserFetch from "./GroupUserFetch.js"
import CustomFetch from "./CustomFetch.js" import CustomFetch from "./CustomFetch.js"
import QueryArrayFetch from "./QueryArrayFetch.js"
const DataFetchMap = { const DataFetchMap = {
table: TableFetch, table: TableFetch,
@ -24,6 +25,7 @@ const DataFetchMap = {
provider: NestedProviderFetch, provider: NestedProviderFetch,
field: FieldFetch, field: FieldFetch,
jsonarray: JSONArrayFetch, jsonarray: JSONArrayFetch,
queryarray: QueryArrayFetch,
} }
// Constructs a new fetch model for a certain datasource // Constructs a new fetch model for a certain datasource

View File

@ -1,3 +1,5 @@
import { utils } from "@budibase/shared-core"
/** /**
* Gets the schema for a datasource which is targeting a JSON array, including * Gets the schema for a datasource which is targeting a JSON array, including
* nested JSON arrays. The returned schema is a squashed, table-like schema * nested JSON arrays. The returned schema is a squashed, table-like schema
@ -119,3 +121,33 @@ const extractJSONSchemaKeys = (jsonSchema, squashObjects = false) => {
}) })
return keys return keys
} }
export const generateQueryArraySchemas = (schema, nestedSchemaFields) => {
for (let key in schema) {
if (
schema[key]?.type === "json" &&
schema[key]?.subtype === "array" &&
utils.hasSchema(nestedSchemaFields[key])
) {
schema[key] = {
schema: {
schema: Object.entries(nestedSchemaFields[key] || {}).reduce(
(acc, [nestedKey, fieldSchema]) => {
acc[nestedKey] = {
name: nestedKey,
type: fieldSchema.type,
subtype: fieldSchema.subtype,
}
return acc
},
{}
),
type: "json",
},
type: "json",
subtype: "array",
}
}
}
return schema
}

@ -1 +1 @@
Subproject commit 336bf2184cf632fdc2bffbad5628e8b15dd381bd Subproject commit 60e47a8249fd6291a6bc20fe3fe6776b11938fa1

View File

@ -13,8 +13,8 @@
"build": "node ./scripts/build.js", "build": "node ./scripts/build.js",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/", "postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/",
"check:types": "tsc -p tsconfig.json --noEmit --paths null", "check:types": "tsc -p tsconfig.json --noEmit --paths null",
"build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=esm --external:handlebars", "build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=iife --external:handlebars --global-name=helpers",
"build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=esm", "build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=iife --global-name=bson",
"build:isolated-vm-libs": "yarn build:isolated-vm-lib:string-templates && yarn build:isolated-vm-lib:bson", "build:isolated-vm-libs": "yarn build:isolated-vm-lib:string-templates && yarn build:isolated-vm-lib:bson",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js",
@ -114,7 +114,6 @@
"undici-types": "^6.0.1", "undici-types": "^6.0.1",
"uuid": "^8.3.2", "uuid": "^8.3.2",
"validate.js": "0.13.1", "validate.js": "0.13.1",
"vm2": "^3.9.19",
"worker-farm": "1.7.0", "worker-farm": "1.7.0",
"xml2js": "0.5.0" "xml2js": "0.5.0"
}, },

View File

@ -1,5 +1,4 @@
import { generateQueryID } from "../../../db/utils" import { generateQueryID } from "../../../db/utils"
import { BaseQueryVerbs } from "../../../constants"
import { Thread, ThreadType } from "../../../threads" import { Thread, ThreadType } from "../../../threads"
import { save as saveDatasource } from "../datasource" import { save as saveDatasource } from "../datasource"
import { RestImporter } from "./import" import { RestImporter } from "./import"
@ -7,36 +6,27 @@ import { invalidateDynamicVariables } from "../../../threads/utils"
import env from "../../../environment" import env from "../../../environment"
import { events, context, utils, constants } from "@budibase/backend-core" import { events, context, utils, constants } from "@budibase/backend-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { QueryEvent, QueryResponse } from "../../../threads/definitions" import { QueryEvent } from "../../../threads/definitions"
import { import {
ConfigType, ConfigType,
Query, Query,
UserCtx, UserCtx,
SessionCookie, SessionCookie,
JsonFieldSubType,
QueryResponse,
QueryPreview,
QuerySchema, QuerySchema,
FieldType, FieldType,
type ExecuteQueryRequest, type ExecuteQueryRequest,
type ExecuteQueryResponse, type ExecuteQueryResponse,
type Row, type Row,
} from "@budibase/types" } from "@budibase/types"
import { ValidQueryNameRegex } from "@budibase/shared-core" import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
const Runner = new Thread(ThreadType.QUERY, { const Runner = new Thread(ThreadType.QUERY, {
timeoutMs: env.QUERY_THREAD_TIMEOUT, timeoutMs: env.QUERY_THREAD_TIMEOUT,
}) })
// simple function to append "readable" to all read queries
function enrichQueries(input: any) {
const wasArray = Array.isArray(input)
const queries = wasArray ? input : [input]
for (let query of queries) {
if (query.queryVerb === BaseQueryVerbs.READ) {
query.readable = true
}
}
return wasArray ? queries : queries[0]
}
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx) {
ctx.body = await sdk.queries.fetch() ctx.body = await sdk.queries.fetch()
} }
@ -84,7 +74,7 @@ export { _import as import }
export async function save(ctx: UserCtx) { export async function save(ctx: UserCtx) {
const db = context.getAppDB() const db = context.getAppDB()
const query = ctx.request.body const query: Query = ctx.request.body
// Validate query name // Validate query name
if (!query?.name.match(ValidQueryNameRegex)) { if (!query?.name.match(ValidQueryNameRegex)) {
@ -100,7 +90,6 @@ export async function save(ctx: UserCtx) {
} else { } else {
eventFn = () => events.query.updated(datasource, query) eventFn = () => events.query.updated(datasource, query)
} }
const response = await db.put(query) const response = await db.put(query)
await eventFn() await eventFn()
query._rev = response.rev query._rev = response.rev
@ -133,7 +122,7 @@ export async function preview(ctx: UserCtx) {
const { datasource, envVars } = await sdk.datasources.getWithEnvVars( const { datasource, envVars } = await sdk.datasources.getWithEnvVars(
ctx.request.body.datasourceId ctx.request.body.datasourceId
) )
const query = ctx.request.body const query: QueryPreview = ctx.request.body
// preview may not have a queryId as it hasn't been saved, but if it does // preview may not have a queryId as it hasn't been saved, but if it does
// this stops dynamic variables from calling the same query // this stops dynamic variables from calling the same query
const { fields, parameters, queryVerb, transformer, queryId, schema } = query const { fields, parameters, queryVerb, transformer, queryId, schema } = query
@ -153,6 +142,69 @@ export async function preview(ctx: UserCtx) {
const authConfigCtx: any = getAuthConfig(ctx) const authConfigCtx: any = getAuthConfig(ctx)
function getSchemaFields(
rows: any[],
keys: string[]
): {
previewSchema: Record<string, string | QuerySchema>
nestedSchemaFields: {
[key: string]: Record<string, string | QuerySchema>
}
} {
const previewSchema: Record<string, string | QuerySchema> = {}
const nestedSchemaFields: {
[key: string]: Record<string, string | QuerySchema>
} = {}
const makeQuerySchema = (
type: FieldType,
name: string,
subtype?: string
): QuerySchema => ({
type,
name,
subtype,
})
if (rows?.length > 0) {
for (let key of [...new Set(keys)] as string[]) {
const field = rows[0][key]
let type = typeof field,
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
if (field)
switch (type) {
case "boolean":
fieldMetadata = makeQuerySchema(FieldType.BOOLEAN, key)
break
case "object":
if (field instanceof Date) {
fieldMetadata = makeQuerySchema(FieldType.DATETIME, key)
} else if (Array.isArray(field)) {
if (JsonUtils.hasSchema(field[0])) {
fieldMetadata = makeQuerySchema(
FieldType.JSON,
key,
JsonFieldSubType.ARRAY
)
} else {
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
}
nestedSchemaFields[key] = getSchemaFields(
field,
Object.keys(field[0])
).previewSchema
} else {
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
}
break
case "number":
fieldMetadata = makeQuerySchema(FieldType.NUMBER, key)
break
}
previewSchema[key] = fieldMetadata
}
}
return { previewSchema, nestedSchemaFields }
}
try { try {
const inputs: QueryEvent = { const inputs: QueryEvent = {
appId: ctx.appId, appId: ctx.appId,
@ -171,38 +223,11 @@ export async function preview(ctx: UserCtx) {
}, },
} }
const { rows, keys, info, extra } = await Runner.run<QueryResponse>(inputs) const { rows, keys, info, extra } = (await Runner.run(
const previewSchema: Record<string, QuerySchema> = {} inputs
const makeQuerySchema = (type: FieldType, name: string): QuerySchema => ({ )) as QueryResponse
type, const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
name,
})
if (rows?.length > 0) {
for (let key of [...new Set(keys)] as string[]) {
const field = rows[0][key]
let type = typeof field,
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
if (field)
switch (type) {
case "boolean":
fieldMetadata = makeQuerySchema(FieldType.BOOLEAN, key)
break
case "object":
if (field instanceof Date) {
fieldMetadata = makeQuerySchema(FieldType.DATETIME, key)
} else if (Array.isArray(field)) {
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
} else {
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
}
break
case "number":
fieldMetadata = makeQuerySchema(FieldType.NUMBER, key)
break
}
previewSchema[key] = fieldMetadata
}
}
// if existing schema, update to include any previous schema keys // if existing schema, update to include any previous schema keys
if (existingSchema) { if (existingSchema) {
for (let key of Object.keys(previewSchema)) { for (let key of Object.keys(previewSchema)) {
@ -216,6 +241,7 @@ export async function preview(ctx: UserCtx) {
await events.query.previewed(datasource, query) await events.query.previewed(datasource, query)
ctx.body = { ctx.body = {
rows, rows,
nestedSchemaFields,
schema: previewSchema, schema: previewSchema,
info, info,
extra, extra,

View File

@ -1,10 +1,13 @@
import ScriptRunner from "../../utilities/scriptRunner"
import { Ctx } from "@budibase/types" import { Ctx } from "@budibase/types"
import { IsolatedVM } from "../../jsRunner/vm"
export async function execute(ctx: Ctx) { export async function execute(ctx: Ctx) {
const { script, context } = ctx.request.body const { script, context } = ctx.request.body
const runner = new ScriptRunner(script, context) const vm = new IsolatedVM()
ctx.body = runner.execute() const result = vm.withContext(context, () =>
vm.execute(`(function(){\n${script}\n})();`)
)
ctx.body = result
} }
export async function save(ctx: Ctx) { export async function save(ctx: Ctx) {

View File

@ -8,7 +8,6 @@ import { mocks } from "@budibase/backend-core/tests"
mocks.licenses.useBackups() mocks.licenses.useBackups()
describe("/backups", () => { describe("/backups", () => {
let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
afterAll(setup.afterAll) afterAll(setup.afterAll)
@ -59,10 +58,8 @@ describe("/backups", () => {
await config.createScreen() await config.createScreen()
const exportRes = await config.api.backup.createBackup(appId) const exportRes = await config.api.backup.createBackup(appId)
expect(exportRes.backupId).toBeDefined() expect(exportRes.backupId).toBeDefined()
const importRes = await config.api.backup.importBackup( await config.api.backup.waitForBackupToComplete(appId, exportRes.backupId)
appId, await config.api.backup.importBackup(appId, exportRes.backupId)
exportRes.backupId
)
}) })
}) })

View File

@ -2135,5 +2135,48 @@ describe.each([
} }
) )
}) })
it("should not carry over context between formulas", async () => {
const js = Buffer.from(`return $("[text]");`).toString("base64")
const table = await config.createTable({
name: "table",
type: "table",
schema: {
text: {
name: "text",
type: FieldType.STRING,
},
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: `{{ js "${js}"}}`,
formulaType: FormulaType.DYNAMIC,
},
},
})
for (let i = 0; i < 10; i++) {
await config.api.row.save(table._id!, { text: `foo${i}` })
}
const { rows } = await config.api.row.search(table._id!)
expect(rows).toHaveLength(10)
const formulaValues = rows.map(r => r.formula)
expect(formulaValues).toEqual(
expect.arrayContaining([
"foo0",
"foo1",
"foo2",
"foo3",
"foo4",
"foo5",
"foo6",
"foo7",
"foo8",
"foo9",
])
)
})
}) })
}) })

View File

@ -368,10 +368,12 @@ describe("/tables", () => {
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
const fetchedTable = res.body[0]
expect(fetchedTable.name).toEqual(testTable.name) const table = res.body.find((t: Table) => t._id === testTable._id)
expect(fetchedTable.type).toEqual("table") expect(table).toBeDefined()
expect(fetchedTable.sourceType).toEqual("internal") expect(table.name).toEqual(testTable.name)
expect(table.type).toEqual("table")
expect(table.sourceType).toEqual("internal")
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {

View File

@ -26,6 +26,7 @@ async function start() {
start().catch(err => { start().catch(err => {
console.error(`Failed server startup - ${err.message}`) console.error(`Failed server startup - ${err.message}`)
throw err
}) })
export function getServer() { export function getServer() {

View File

@ -23,7 +23,7 @@ const DEFAULTS = {
AUTOMATION_THREAD_TIMEOUT: 12000, AUTOMATION_THREAD_TIMEOUT: 12000,
AUTOMATION_SYNC_TIMEOUT: 120000, AUTOMATION_SYNC_TIMEOUT: 120000,
AUTOMATION_MAX_ITERATIONS: 200, AUTOMATION_MAX_ITERATIONS: 200,
JS_PER_EXECUTION_TIME_LIMIT_MS: 1000, JS_PER_EXECUTION_TIME_LIMIT_MS: 1500,
TEMPLATE_REPOSITORY: "app", TEMPLATE_REPOSITORY: "app",
PLUGINS_DIR: "/plugins", PLUGINS_DIR: "/plugins",
FORKED_PROCESS_NAME: "main", FORKED_PROCESS_NAME: "main",
@ -113,6 +113,7 @@ const environment = {
process.env[key] = value process.env[key] = value
// @ts-ignore // @ts-ignore
environment[key] = value environment[key] = value
cleanVariables()
}, },
isTest: coreEnv.isTest, isTest: coreEnv.isTest,
isJest: coreEnv.isJest, isJest: coreEnv.isJest,
@ -128,6 +129,7 @@ const environment = {
}, },
} }
function cleanVariables() {
// clean up any environment variable edge cases // clean up any environment variable edge cases
for (let [key, value] of Object.entries(environment)) { for (let [key, value] of Object.entries(environment)) {
// handle the edge case of "0" to disable an environment variable // handle the edge case of "0" to disable an environment variable
@ -141,5 +143,8 @@ for (let [key, value] of Object.entries(environment)) {
environment[key] = 0 environment[key] = 0
} }
} }
}
cleanVariables()
export default environment export default environment

File diff suppressed because one or more lines are too long

View File

@ -1,4 +1,2 @@
import { EJSON } from "bson" export const deserialize = require("bson").deserialize
export const toJson = require("bson").EJSON.deserialize
export { deserialize } from "bson"
export const toJson = EJSON.deserialize

File diff suppressed because one or more lines are too long

View File

@ -2,9 +2,8 @@ const {
getJsHelperList, getJsHelperList,
} = require("../../../../string-templates/src/helpers/list.js") } = require("../../../../string-templates/src/helpers/list.js")
const helpers = getJsHelperList()
export default { export default {
...helpers, ...getJsHelperList(),
// pointing stripProtocol to a unexisting function to be able to declare it on isolated-vm // pointing stripProtocol to a unexisting function to be able to declare it on isolated-vm
// @ts-ignore // @ts-ignore
// eslint-disable-next-line no-undef // eslint-disable-next-line no-undef

View File

@ -7,22 +7,19 @@ export const enum BundleType {
BSON = "bson", BSON = "bson",
} }
const bundleSourceCode = { const bundleSourceFile: Record<BundleType, string> = {
[BundleType.HELPERS]: "../bundles/index-helpers.ivm.bundle.js", [BundleType.HELPERS]: "./index-helpers.ivm.bundle.js",
[BundleType.BSON]: "../bundles/bson.ivm.bundle.js", [BundleType.BSON]: "./bson.ivm.bundle.js",
} }
const bundleSourceCode: Partial<Record<BundleType, string>> = {}
export function loadBundle(type: BundleType) { export function loadBundle(type: BundleType) {
if (environment.isJest()) { let sourceCode = bundleSourceCode[type]
return fs.readFileSync(require.resolve(bundleSourceCode[type]), "utf-8") if (sourceCode) {
return sourceCode
} }
switch (type) { sourceCode = fs.readFileSync(require.resolve(bundleSourceFile[type]), "utf-8")
case BundleType.HELPERS: bundleSourceCode[type] = sourceCode
return require("../bundles/index-helpers.ivm.bundle.js") return sourceCode
case BundleType.BSON:
return require("../bundles/bson.ivm.bundle.js")
default:
utils.unreachable(type)
}
} }

View File

@ -1,68 +1,49 @@
import vm from "vm"
import env from "../environment"
import { setJSRunner, setOnErrorLog } from "@budibase/string-templates"
import { context, logging, timers } from "@budibase/backend-core"
import tracer from "dd-trace"
import { serializeError } from "serialize-error" import { serializeError } from "serialize-error"
import env from "../environment"
import {
JsErrorTimeout,
setJSRunner,
setOnErrorLog,
} from "@budibase/string-templates"
import { context, logging } from "@budibase/backend-core"
import tracer from "dd-trace"
type TrackerFn = <T>(f: () => T) => T import { IsolatedVM } from "./vm"
export function init() { export function init() {
setJSRunner((js: string, ctx: vm.Context) => { setJSRunner((js: string, ctx: Record<string, any>) => {
return tracer.trace("runJS", {}, span => { return tracer.trace("runJS", {}, span => {
const perRequestLimit = env.JS_PER_REQUEST_TIMEOUT_MS try {
let track: TrackerFn = f => f() const bbCtx = context.getCurrentContext()
if (perRequestLimit) {
const bbCtx = tracer.trace("runJS.getCurrentContext", {}, span => const vm = bbCtx?.vm
context.getCurrentContext() ? bbCtx.vm
) : new IsolatedVM({
memoryLimit: env.JS_RUNNER_MEMORY_LIMIT,
invocationTimeout: env.JS_PER_INVOCATION_TIMEOUT_MS,
isolateAccumulatedTimeout: env.JS_PER_REQUEST_TIMEOUT_MS,
}).withHelpers()
if (bbCtx) { if (bbCtx) {
if (!bbCtx.jsExecutionTracker) { // If we have a context, we want to persist it to reuse the isolate
span?.addTags({ bbCtx.vm = vm
createdExecutionTracker: true,
})
bbCtx.jsExecutionTracker = tracer.trace(
"runJS.createExecutionTimeTracker",
{},
span => timers.ExecutionTimeTracker.withLimit(perRequestLimit)
)
} }
span?.addTags({ const { helpers, ...rest } = ctx
js: { return vm.withContext(rest, () => vm.execute(js))
limitMS: bbCtx.jsExecutionTracker.limitMs, } catch (error: any) {
elapsedMS: bbCtx.jsExecutionTracker.elapsedMS, if (error.message === "Script execution timed out.") {
}, throw new JsErrorTimeout()
})
// We call checkLimit() here to prevent paying the cost of creating
// a new VM context below when we don't need to.
tracer.trace("runJS.checkLimitAndBind", {}, span => {
bbCtx.jsExecutionTracker!.checkLimit()
track = bbCtx.jsExecutionTracker!.track.bind(
bbCtx.jsExecutionTracker
)
})
} }
throw error
} }
ctx = {
...ctx,
alert: undefined,
setInterval: undefined,
setTimeout: undefined,
}
vm.createContext(ctx)
return track(() =>
vm.runInNewContext(js, ctx, {
timeout: env.JS_PER_INVOCATION_TIMEOUT_MS,
})
)
}) })
}) })
if (env.LOG_JS_ERRORS) { if (env.LOG_JS_ERRORS) {
setOnErrorLog((error: Error) => { setOnErrorLog((error: Error) => {
logging.logWarn(JSON.stringify(serializeError(error))) logging.logWarn(
`Error while executing js: ${JSON.stringify(serializeError(error))}`
)
}) })
} }
} }

View File

@ -1,22 +1,4 @@
// import { validate as isValidUUID } from "uuid" import { validate as isValidUUID } from "uuid"
jest.mock("@budibase/handlebars-helpers/lib/math", () => {
const actual = jest.requireActual("@budibase/handlebars-helpers/lib/math")
return {
...actual,
random: () => 10,
}
})
jest.mock("@budibase/handlebars-helpers/lib/uuid", () => {
const actual = jest.requireActual("@budibase/handlebars-helpers/lib/uuid")
return {
...actual,
uuid: () => "f34ebc66-93bd-4f7c-b79b-92b5569138bc",
}
})
import { processStringSync, encodeJSBinding } from "@budibase/string-templates" import { processStringSync, encodeJSBinding } from "@budibase/string-templates"
const { runJsHelpersTests } = require("@budibase/string-templates/test/utils") const { runJsHelpersTests } = require("@budibase/string-templates/test/utils")
@ -27,7 +9,7 @@ import TestConfiguration from "../../tests/utilities/TestConfiguration"
tk.freeze("2021-01-21T12:00:00") tk.freeze("2021-01-21T12:00:00")
describe("jsRunner", () => { describe("jsRunner (using isolated-vm)", () => {
const config = new TestConfiguration() const config = new TestConfiguration()
beforeAll(async () => { beforeAll(async () => {
@ -36,6 +18,10 @@ describe("jsRunner", () => {
await config.init() await config.init()
}) })
afterAll(() => {
config.end()
})
const processJS = (js: string, context?: object) => { const processJS = (js: string, context?: object) => {
return config.doInContext(config.getAppId(), async () => return config.doInContext(config.getAppId(), async () =>
processStringSync(encodeJSBinding(js), context || {}) processStringSync(encodeJSBinding(js), context || {})
@ -47,10 +33,14 @@ describe("jsRunner", () => {
expect(output).toBe(3) expect(output).toBe(3)
}) })
// TODO This should be reenabled when running on isolated-vm it("it can execute sloppy javascript", async () => {
it.skip("should prevent sandbox escape", async () => { const output = await processJS(`a=2;b=3;return a + b`)
expect(output).toBe(5)
})
it("should prevent sandbox escape", async () => {
const output = await processJS( const output = await processJS(
`return this.constructor.constructor("return process")()` `return this.constructor.constructor("return process.env")()`
) )
expect(output).toBe("Error while executing JS") expect(output).toBe("Error while executing JS")
}) })
@ -58,26 +48,26 @@ describe("jsRunner", () => {
describe("helpers", () => { describe("helpers", () => {
runJsHelpersTests({ runJsHelpersTests({
funcWrap: (func: any) => config.doInContext(config.getAppId(), func), funcWrap: (func: any) => config.doInContext(config.getAppId(), func),
// testsToSkip: ["random", "uuid"], testsToSkip: ["random", "uuid"],
}) })
// describe("uuid", () => { describe("uuid", () => {
// it("uuid helper returns a valid uuid", async () => { it("uuid helper returns a valid uuid", async () => {
// const result = await processJS("return helpers.uuid()") const result = await processJS("return helpers.uuid()")
// expect(result).toBeDefined() expect(result).toBeDefined()
// expect(isValidUUID(result)).toBe(true) expect(isValidUUID(result)).toBe(true)
// }) })
// }) })
// describe("random", () => { describe("random", () => {
// it("random helper returns a valid number", async () => { it("random helper returns a valid number", async () => {
// const min = 1 const min = 1
// const max = 8 const max = 8
// const result = await processJS(`return helpers.random(${min}, ${max})`) const result = await processJS(`return helpers.random(${min}, ${max})`)
// expect(result).toBeDefined() expect(result).toBeDefined()
// expect(result).toBeGreaterThanOrEqual(min) expect(result).toBeGreaterThanOrEqual(min)
// expect(result).toBeLessThanOrEqual(max) expect(result).toBeLessThanOrEqual(max)
// }) })
// }) })
}) })
}) })

View File

@ -1,270 +1 @@
import ivm from "isolated-vm" export * from "./isolated-vm"
import bson from "bson"
import url from "url"
import crypto from "crypto"
import querystring from "querystring"
import { BundleType, loadBundle } from "../bundles"
import { VM } from "@budibase/types"
class ExecutionTimeoutError extends Error {
constructor(message: string) {
super(message)
this.name = "ExecutionTimeoutError"
}
}
class ModuleHandler {
private modules: {
import: string
moduleKey: string
module: ivm.Module
}[] = []
private generateRandomKey = () => `i${crypto.randomUUID().replace(/-/g, "")}`
registerModule(module: ivm.Module, imports: string) {
this.modules.push({
moduleKey: this.generateRandomKey(),
import: imports,
module: module,
})
}
generateImports() {
return this.modules
.map(m => `import ${m.import} from "${m.moduleKey}"`)
.join(";")
}
getModule(key: string) {
const module = this.modules.find(m => m.moduleKey === key)
return module?.module
}
}
export class IsolatedVM implements VM {
private isolate: ivm.Isolate
private vm: ivm.Context
private jail: ivm.Reference
private invocationTimeout: number
private isolateAccumulatedTimeout?: number
// By default the wrapper returns itself
private codeWrapper: (code: string) => string = code => code
private moduleHandler = new ModuleHandler()
private readonly resultKey = "results"
constructor({
memoryLimit,
invocationTimeout,
isolateAccumulatedTimeout,
}: {
memoryLimit: number
invocationTimeout: number
isolateAccumulatedTimeout?: number
}) {
this.isolate = new ivm.Isolate({ memoryLimit })
this.vm = this.isolate.createContextSync()
this.jail = this.vm.global
this.jail.setSync("global", this.jail.derefInto())
this.addToContext({
[this.resultKey]: { out: "" },
})
this.invocationTimeout = invocationTimeout
this.isolateAccumulatedTimeout = isolateAccumulatedTimeout
}
withHelpers() {
const urlModule = this.registerCallbacks({
resolve: url.resolve,
parse: url.parse,
})
const querystringModule = this.registerCallbacks({
escape: querystring.escape,
})
this.addToContext({
helpersStripProtocol: new ivm.Callback((str: string) => {
var parsed = url.parse(str) as any
parsed.protocol = ""
return parsed.format()
}),
})
const injectedRequire = `const require=function req(val) {
switch (val) {
case "url": return ${urlModule};
case "querystring": return ${querystringModule};
}
}`
const helpersSource = loadBundle(BundleType.HELPERS)
const helpersModule = this.isolate.compileModuleSync(
`${injectedRequire};${helpersSource}`
)
helpersModule.instantiateSync(this.vm, specifier => {
if (specifier === "crypto") {
const cryptoModule = this.registerCallbacks({
randomUUID: crypto.randomUUID,
})
const module = this.isolate.compileModuleSync(
`export default ${cryptoModule}`
)
module.instantiateSync(this.vm, specifier => {
throw new Error(`No imports allowed. Required: ${specifier}`)
})
return module
}
throw new Error(`No imports allowed. Required: ${specifier}`)
})
this.moduleHandler.registerModule(helpersModule, "helpers")
return this
}
withContext(context: Record<string, any>) {
this.addToContext(context)
return this
}
withParsingBson(data: any) {
this.addToContext({
bsonData: bson.BSON.serialize({ data }),
})
// If we need to parse bson, we follow the next steps:
// 1. Serialise the data from potential BSON to buffer before passing it to the isolate
// 2. Deserialise the data within the isolate, to get the original data
// 3. Process script
// 4. Stringify the result in order to convert the result from BSON to json
this.codeWrapper = code =>
`(function(){
const data = deserialize(bsonData, { validation: { utf8: false } }).data;
const result = ${code}
return toJson(result);
})();`
const bsonSource = loadBundle(BundleType.BSON)
this.addToContext({
textDecoderCb: new ivm.Callback(
(args: {
constructorArgs: any
functionArgs: Parameters<InstanceType<typeof TextDecoder>["decode"]>
}) => {
const result = new TextDecoder(...args.constructorArgs).decode(
...args.functionArgs
)
return result
}
),
})
// "Polyfilling" text decoder. `bson.deserialize` requires decoding. We are creating a bridge function so we don't need to inject the full library
const textDecoderPolyfill = class TextDecoder {
constructorArgs
constructor(...constructorArgs: any) {
this.constructorArgs = constructorArgs
}
decode(...input: any) {
// @ts-ignore
return textDecoderCb({
constructorArgs: this.constructorArgs,
functionArgs: input,
})
}
}.toString()
const bsonModule = this.isolate.compileModuleSync(
`${textDecoderPolyfill};${bsonSource}`
)
bsonModule.instantiateSync(this.vm, specifier => {
throw new Error(`No imports allowed. Required: ${specifier}`)
})
this.moduleHandler.registerModule(bsonModule, "{deserialize, toJson}")
return this
}
execute(code: string): any {
if (this.isolateAccumulatedTimeout) {
const cpuMs = Number(this.isolate.cpuTime) / 1e6
if (cpuMs > this.isolateAccumulatedTimeout) {
throw new ExecutionTimeoutError(
`CPU time limit exceeded (${cpuMs}ms > ${this.isolateAccumulatedTimeout}ms)`
)
}
}
code = `${this.moduleHandler.generateImports()};results.out=${this.codeWrapper(
code
)};`
const script = this.isolate.compileModuleSync(code)
script.instantiateSync(this.vm, specifier => {
const module = this.moduleHandler.getModule(specifier)
if (module) {
return module
}
throw new Error(`"${specifier}" import not allowed`)
})
script.evaluateSync({ timeout: this.invocationTimeout })
const result = this.getFromContext(this.resultKey)
return result.out
}
private registerCallbacks(functions: Record<string, any>) {
const libId = crypto.randomUUID().replace(/-/g, "")
const x: Record<string, string> = {}
for (const [funcName, func] of Object.entries(functions)) {
const key = `f${libId}${funcName}cb`
x[funcName] = key
this.addToContext({
[key]: new ivm.Callback((...params: any[]) => (func as any)(...params)),
})
}
const mod =
`{` +
Object.entries(x)
.map(([key, func]) => `${key}: ${func}`)
.join() +
"}"
return mod
}
private addToContext(context: Record<string, any>) {
for (let key in context) {
const value = context[key]
this.jail.setSync(
key,
typeof value === "function"
? value
: new ivm.ExternalCopy(value).copyInto({ release: true })
)
}
}
private getFromContext(key: string) {
const ref = this.vm.global.getSync(key, { reference: true })
const result = ref.copySync()
ref.release()
return result
}
}

View File

@ -0,0 +1,246 @@
import ivm from "isolated-vm"
import bson from "bson"
import url from "url"
import crypto from "crypto"
import querystring from "querystring"
import { BundleType, loadBundle } from "../bundles"
import { VM } from "@budibase/types"
import environment from "../../environment"
class ExecutionTimeoutError extends Error {
constructor(message: string) {
super(message)
this.name = "ExecutionTimeoutError"
}
}
export class IsolatedVM implements VM {
private isolate: ivm.Isolate
private vm: ivm.Context
private jail: ivm.Reference
private invocationTimeout: number
private isolateAccumulatedTimeout?: number
// By default the wrapper returns itself
private codeWrapper: (code: string) => string = code => code
private readonly resultKey = "results"
private runResultKey: string
constructor({
memoryLimit,
invocationTimeout,
isolateAccumulatedTimeout,
}: {
memoryLimit?: number
invocationTimeout?: number
isolateAccumulatedTimeout?: number
} = {}) {
this.isolate = new ivm.Isolate({
memoryLimit: memoryLimit || environment.JS_RUNNER_MEMORY_LIMIT,
})
this.vm = this.isolate.createContextSync()
this.jail = this.vm.global
this.jail.setSync("global", this.jail.derefInto())
this.runResultKey = crypto.randomUUID()
this.addToContext({
[this.resultKey]: { [this.runResultKey]: "" },
})
this.invocationTimeout =
invocationTimeout || environment.JS_PER_INVOCATION_TIMEOUT_MS
this.isolateAccumulatedTimeout = isolateAccumulatedTimeout
}
withHelpers() {
const urlModule = this.registerCallbacks({
resolve: url.resolve,
parse: url.parse,
})
const querystringModule = this.registerCallbacks({
escape: querystring.escape,
})
const cryptoModule = this.registerCallbacks({
randomUUID: crypto.randomUUID,
})
this.addToContext({
helpersStripProtocol: new ivm.Callback((str: string) => {
var parsed = url.parse(str) as any
parsed.protocol = ""
return parsed.format()
}),
})
const injectedRequire = `require=function req(val) {
switch (val) {
case "url": return ${urlModule};
case "querystring": return ${querystringModule};
case "crypto": return ${cryptoModule};
}
}`
const helpersSource = loadBundle(BundleType.HELPERS)
const script = this.isolate.compileScriptSync(
`${injectedRequire};${helpersSource};helpers=helpers.default`
)
script.runSync(this.vm, { timeout: this.invocationTimeout, release: false })
new Promise(() => {
script.release()
})
return this
}
withContext<T>(context: Record<string, any>, executeWithContext: () => T) {
this.addToContext(context)
try {
return executeWithContext()
} finally {
this.removeFromContext(Object.keys(context))
}
}
withParsingBson(data: any) {
this.addToContext({
bsonData: bson.BSON.serialize({ data }),
})
// If we need to parse bson, we follow the next steps:
// 1. Serialise the data from potential BSON to buffer before passing it to the isolate
// 2. Deserialise the data within the isolate, to get the original data
// 3. Process script
// 4. Stringify the result in order to convert the result from BSON to json
this.codeWrapper = code =>
`(function(){
const data = bson.deserialize(bsonData, { validation: { utf8: false } }).data;
const result = ${code}
return bson.toJson(result);
})();`
const bsonSource = loadBundle(BundleType.BSON)
this.addToContext({
textDecoderCb: new ivm.Callback(
(args: {
constructorArgs: any
functionArgs: Parameters<InstanceType<typeof TextDecoder>["decode"]>
}) => {
const result = new TextDecoder(...args.constructorArgs).decode(
...args.functionArgs
)
return result
}
),
})
// "Polyfilling" text decoder. `bson.deserialize` requires decoding. We are creating a bridge function so we don't need to inject the full library
const textDecoderPolyfill = class TextDecoderMock {
constructorArgs
constructor(...constructorArgs: any) {
this.constructorArgs = constructorArgs
}
decode(...input: any) {
// @ts-ignore
return textDecoderCb({
constructorArgs: this.constructorArgs,
functionArgs: input,
})
}
}
.toString()
.replace(/TextDecoderMock/, "TextDecoder")
const script = this.isolate.compileScriptSync(
`${textDecoderPolyfill};${bsonSource}`
)
script.runSync(this.vm, { timeout: this.invocationTimeout, release: false })
new Promise(() => {
script.release()
})
return this
}
execute(code: string): any {
if (this.isolateAccumulatedTimeout) {
const cpuMs = Number(this.isolate.cpuTime) / 1e6
if (cpuMs > this.isolateAccumulatedTimeout) {
throw new ExecutionTimeoutError(
`CPU time limit exceeded (${cpuMs}ms > ${this.isolateAccumulatedTimeout}ms)`
)
}
}
code = `results['${this.runResultKey}']=${this.codeWrapper(code)}`
const script = this.isolate.compileScriptSync(code)
script.runSync(this.vm, { timeout: this.invocationTimeout, release: false })
new Promise(() => {
script.release()
})
// We can't rely on the script run result as it will not work for non-transferable values
const result = this.getFromContext(this.resultKey)
return result[this.runResultKey]
}
private registerCallbacks(functions: Record<string, any>) {
const libId = crypto.randomUUID().replace(/-/g, "")
const x: Record<string, string> = {}
for (const [funcName, func] of Object.entries(functions)) {
const key = `f${libId}${funcName}cb`
x[funcName] = key
this.addToContext({
[key]: new ivm.Callback((...params: any[]) => (func as any)(...params)),
})
}
const mod =
`{` +
Object.entries(x)
.map(([key, func]) => `${key}: ${func}`)
.join() +
"}"
return mod
}
private addToContext(context: Record<string, any>) {
for (let key in context) {
const value = context[key]
this.jail.setSync(
key,
typeof value === "function"
? value
: new ivm.ExternalCopy(value).copyInto({ release: true })
)
}
}
private removeFromContext(keys: string[]) {
for (let key of keys) {
this.jail.deleteSync(key)
}
}
private getFromContext(key: string) {
const ref = this.vm.global.getSync(key, { reference: true })
const result = ref.copySync()
new Promise(() => {
ref.release()
})
return result
}
}

View File

@ -84,7 +84,8 @@ describe("syncGlobalUsers", () => {
await syncGlobalUsers() await syncGlobalUsers()
const metadata = await rawUserMetadata() const metadata = await rawUserMetadata()
expect(metadata).toHaveLength(2)
expect(metadata).toHaveLength(2 + 1) // ADMIN user created in test bootstrap still in the application
expect(metadata).toContainEqual( expect(metadata).toContainEqual(
expect.objectContaining({ expect.objectContaining({
_id: db.generateUserMetadataID(user1._id!), _id: db.generateUserMetadataID(user1._id!),

View File

@ -76,14 +76,6 @@ mocks.licenses.useUnlimited()
dbInit() dbInit()
type DefaultUserValues = {
globalUserId: string
email: string
firstName: string
lastName: string
csrfToken: string
}
export interface TableToBuild extends Omit<Table, "sourceId" | "sourceType"> { export interface TableToBuild extends Omit<Table, "sourceId" | "sourceType"> {
sourceId?: string sourceId?: string
sourceType?: TableSourceType sourceType?: TableSourceType
@ -99,14 +91,17 @@ export default class TestConfiguration {
prodApp: any prodApp: any
prodAppId: any prodAppId: any
user: any user: any
globalUserId: any
userMetadataId: any userMetadataId: any
table?: Table table?: Table
automation: any automation: any
datasource?: Datasource datasource?: Datasource
tenantId?: string tenantId?: string
defaultUserValues: DefaultUserValues
api: API api: API
csrfToken?: string
private get globalUserId() {
return this.user._id
}
constructor(openServer = true) { constructor(openServer = true) {
if (openServer) { if (openServer) {
@ -121,21 +116,10 @@ export default class TestConfiguration {
} }
this.appId = null this.appId = null
this.allApps = [] this.allApps = []
this.defaultUserValues = this.populateDefaultUserValues()
this.api = new API(this) this.api = new API(this)
} }
populateDefaultUserValues(): DefaultUserValues {
return {
globalUserId: `us_${newid()}`,
email: generator.email(),
firstName: generator.first(),
lastName: generator.last(),
csrfToken: generator.hash(),
}
}
getRequest() { getRequest() {
return this.request return this.request
} }
@ -162,10 +146,10 @@ export default class TestConfiguration {
getUserDetails() { getUserDetails() {
return { return {
globalId: this.defaultUserValues.globalUserId, globalId: this.globalUserId,
email: this.defaultUserValues.email, email: this.user.email,
firstName: this.defaultUserValues.firstName, firstName: this.user.firstName,
lastName: this.defaultUserValues.lastName, lastName: this.user.lastName,
} }
} }
@ -300,15 +284,27 @@ export default class TestConfiguration {
} }
// USER / AUTH // USER / AUTH
async globalUser({ async globalUser(
id = this.defaultUserValues.globalUserId, config: {
firstName = this.defaultUserValues.firstName, id?: string
lastName = this.defaultUserValues.lastName, firstName?: string
lastName?: string
builder?: boolean
admin?: boolean
email?: string
roles?: any
} = {}
): Promise<User> {
const {
id = `us_${newid()}`,
firstName = generator.first(),
lastName = generator.last(),
builder = true, builder = true,
admin = false, admin = false,
email = this.defaultUserValues.email, email = generator.email(),
roles, roles,
}: any = {}): Promise<User> { } = config
const db = tenancy.getTenantDB(this.getTenantId()) const db = tenancy.getTenantDB(this.getTenantId())
let existing let existing
try { try {
@ -327,7 +323,7 @@ export default class TestConfiguration {
await sessions.createASession(id, { await sessions.createASession(id, {
sessionId: "sessionid", sessionId: "sessionid",
tenantId: this.getTenantId(), tenantId: this.getTenantId(),
csrfToken: this.defaultUserValues.csrfToken, csrfToken: this.csrfToken,
}) })
if (builder) { if (builder) {
user.builder = { global: true } user.builder = { global: true }
@ -357,14 +353,16 @@ export default class TestConfiguration {
roles?: UserRoles roles?: UserRoles
} = {} } = {}
): Promise<User> { ): Promise<User> {
let { id, firstName, lastName, email, builder, admin, roles } = user const {
firstName = firstName || this.defaultUserValues.firstName id,
lastName = lastName || this.defaultUserValues.lastName firstName = generator.first(),
email = email || this.defaultUserValues.email lastName = generator.last(),
roles = roles || {} email = generator.email(),
if (builder == null) { builder = true,
builder = true admin,
} roles,
} = user
const globalId = !id ? `us_${Math.random()}` : `us_${id}` const globalId = !id ? `us_${Math.random()}` : `us_${id}`
const resp = await this.globalUser({ const resp = await this.globalUser({
id: globalId, id: globalId,
@ -373,7 +371,7 @@ export default class TestConfiguration {
email, email,
builder, builder,
admin, admin,
roles, roles: roles || {},
}) })
await cache.user.invalidateUser(globalId) await cache.user.invalidateUser(globalId)
return resp return resp
@ -448,7 +446,7 @@ export default class TestConfiguration {
defaultHeaders(extras = {}, prodApp = false) { defaultHeaders(extras = {}, prodApp = false) {
const tenantId = this.getTenantId() const tenantId = this.getTenantId()
const authObj: AuthToken = { const authObj: AuthToken = {
userId: this.defaultUserValues.globalUserId, userId: this.globalUserId,
sessionId: "sessionid", sessionId: "sessionid",
tenantId, tenantId,
} }
@ -457,7 +455,7 @@ export default class TestConfiguration {
const headers: any = { const headers: any = {
Accept: "application/json", Accept: "application/json",
Cookie: [`${constants.Cookie.Auth}=${authToken}`], Cookie: [`${constants.Cookie.Auth}=${authToken}`],
[constants.Header.CSRF_TOKEN]: this.defaultUserValues.csrfToken, [constants.Header.CSRF_TOKEN]: this.csrfToken,
Host: this.tenantHost(), Host: this.tenantHost(),
...extras, ...extras,
} }
@ -487,7 +485,7 @@ export default class TestConfiguration {
async basicRoleHeaders() { async basicRoleHeaders() {
return await this.roleHeaders({ return await this.roleHeaders({
email: this.defaultUserValues.email, email: generator.email(),
builder: false, builder: false,
prodApp: true, prodApp: true,
roleId: roles.BUILTIN_ROLE_IDS.BASIC, roleId: roles.BUILTIN_ROLE_IDS.BASIC,
@ -495,7 +493,7 @@ export default class TestConfiguration {
} }
async roleHeaders({ async roleHeaders({
email = this.defaultUserValues.email, email = generator.email(),
roleId = roles.BUILTIN_ROLE_IDS.ADMIN, roleId = roles.BUILTIN_ROLE_IDS.ADMIN,
builder = false, builder = false,
prodApp = true, prodApp = true,
@ -519,11 +517,12 @@ export default class TestConfiguration {
} }
async newTenant(appName = newid()): Promise<App> { async newTenant(appName = newid()): Promise<App> {
this.defaultUserValues = this.populateDefaultUserValues() this.csrfToken = generator.hash()
this.tenantId = structures.tenant.id() this.tenantId = structures.tenant.id()
this.user = await this.globalUser() this.user = await this.globalUser()
this.globalUserId = this.user._id this.userMetadataId = generateUserMetadataID(this.user._id)
this.userMetadataId = generateUserMetadataID(this.globalUserId)
return this.createApp(appName) return this.createApp(appName)
} }
@ -533,7 +532,7 @@ export default class TestConfiguration {
// API // API
async generateApiKey(userId = this.defaultUserValues.globalUserId) { async generateApiKey(userId = this.user._id) {
const db = tenancy.getTenantDB(this.getTenantId()) const db = tenancy.getTenantDB(this.getTenantId())
const id = dbCore.generateDevInfoID(userId) const id = dbCore.generateDevInfoID(userId)
let devInfo: any let devInfo: any

View File

@ -31,6 +31,19 @@ export class BackupAPI extends TestAPI {
return result.body as CreateAppBackupResponse return result.body as CreateAppBackupResponse
} }
waitForBackupToComplete = async (appId: string, backupId: string) => {
for (let i = 0; i < 10; i++) {
await new Promise(resolve => setTimeout(resolve, 1000))
const result = await this.request
.get(`/api/apps/${appId}/backups/${backupId}/file`)
.set(this.config.defaultHeaders())
if (result.status === 200) {
return
}
}
throw new Error("Backup did not complete")
}
importBackup = async ( importBackup = async (
appId: string, appId: string,
backupId: string backupId: string

View File

@ -7,7 +7,7 @@ import {
QueryVariable, QueryVariable,
QueryResponse, QueryResponse,
} from "./definitions" } from "./definitions"
import ScriptRunner from "../utilities/scriptRunner" import { IsolatedVM } from "../jsRunner/vm"
import { getIntegration } from "../integrations" import { getIntegration } from "../integrations"
import { processStringSync } from "@budibase/string-templates" import { processStringSync } from "@budibase/string-templates"
import { context, cache, auth } from "@budibase/backend-core" import { context, cache, auth } from "@budibase/backend-core"
@ -26,7 +26,7 @@ class QueryRunner {
fields: any fields: any
parameters: any parameters: any
pagination: any pagination: any
transformer: any transformer: string
cachedVariables: any[] cachedVariables: any[]
ctx: any ctx: any
queryResponse: any queryResponse: any
@ -127,11 +127,17 @@ class QueryRunner {
// transform as required // transform as required
if (transformer) { if (transformer) {
const runner = new ScriptRunner(transformer, { transformer = `(function(){\n${transformer}\n})();`
let vm = new IsolatedVM()
if (datasource.source === SourceName.MONGODB) {
vm = vm.withParsingBson(rows)
}
const ctx = {
data: rows, data: rows,
params: enrichedParameters, params: enrichedParameters,
}) }
rows = runner.execute() rows = vm.withContext(ctx, () => vm.execute(transformer))
} }
// if the request fails we retry once, invalidating the cached value // if the request fails we retry once, invalidating the cached value

View File

@ -72,6 +72,9 @@ export class AttachmentCleanup {
continue continue
} }
rows.forEach(row => { rows.forEach(row => {
if (!Array.isArray(row[key])) {
return
}
files = files.concat( files = files.concat(
row[key].map((attachment: any) => attachment.key) row[key].map((attachment: any) => attachment.key)
) )

View File

@ -103,6 +103,14 @@ describe("attachment cleanup", () => {
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME]) expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME])
}) })
it("should handle row deletion and not throw when attachments are undefined", async () => {
await AttachmentCleanup.rowDelete(table(), [
{
attach: undefined,
},
])
})
it("shouldn't cleanup attachments if row not updated", async () => { it("shouldn't cleanup attachments if row not updated", async () => {
await AttachmentCleanup.rowUpdate(table(), { row: row(), oldRow: row() }) await AttachmentCleanup.rowUpdate(table(), { row: row(), oldRow: row() })
expect(mockedDeleteFiles).not.toBeCalled() expect(mockedDeleteFiles).not.toBeCalled()

View File

@ -1,29 +0,0 @@
import fetch from "node-fetch"
import { VM, VMScript } from "vm2"
const JS_TIMEOUT_MS = 1000
class ScriptRunner {
vm: VM
results: { out: string }
script: VMScript
constructor(script: string, context: any) {
const code = `let fn = () => {\n${script}\n}; results.out = fn();`
this.vm = new VM({
timeout: JS_TIMEOUT_MS,
})
this.results = { out: "" }
this.vm.setGlobals(context)
this.vm.setGlobal("fetch", fetch)
this.vm.setGlobal("results", this.results)
this.script = new VMScript(code)
}
execute() {
this.vm.run(this.script)
return this.results.out
}
}
export default ScriptRunner

View File

@ -57,3 +57,13 @@ export function filterValueToLabel() {
{} {}
) )
} }
export function hasSchema(test: any) {
return (
typeof test === "object" &&
!Array.isArray(test) &&
test !== null &&
!(test instanceof Date) &&
Object.keys(test).length > 0
)
}

View File

@ -1,4 +1,4 @@
const { atob } = require("../utilities") const { atob, isBackendService, isJSAllowed } = require("../utilities")
const cloneDeep = require("lodash.clonedeep") const cloneDeep = require("lodash.clonedeep")
const { LITERAL_MARKER } = require("../helpers/constants") const { LITERAL_MARKER } = require("../helpers/constants")
const { getJsHelperList } = require("./list") const { getJsHelperList } = require("./list")
@ -7,6 +7,9 @@ const { getJsHelperList } = require("./list")
// This setter is used in the entrypoint (either index.js or index.mjs). // This setter is used in the entrypoint (either index.js or index.mjs).
let runJS let runJS
module.exports.setJSRunner = runner => (runJS = runner) module.exports.setJSRunner = runner => (runJS = runner)
module.exports.removeJSRunner = () => {
runJS = undefined
}
let onErrorLog let onErrorLog
module.exports.setOnErrorLog = delegate => (onErrorLog = delegate) module.exports.setOnErrorLog = delegate => (onErrorLog = delegate)
@ -39,7 +42,7 @@ const getContextValue = (path, context) => {
// Evaluates JS code against a certain context // Evaluates JS code against a certain context
module.exports.processJS = (handlebars, context) => { module.exports.processJS = (handlebars, context) => {
if (process && process.env.NO_JS) { if (!isJSAllowed() || (isBackendService() && !runJS)) {
throw new Error("JS disabled in environment.") throw new Error("JS disabled in environment.")
} }
try { try {

View File

@ -2,7 +2,7 @@ const vm = require("vm")
const handlebars = require("handlebars") const handlebars = require("handlebars")
const { registerAll, registerMinimum } = require("./helpers/index") const { registerAll, registerMinimum } = require("./helpers/index")
const processors = require("./processors") const processors = require("./processors")
const { atob, btoa } = require("./utilities") const { atob, btoa, isBackendService } = require("./utilities")
const manifest = require("../manifest.json") const manifest = require("../manifest.json")
const { const {
FIND_HBS_REGEX, FIND_HBS_REGEX,
@ -404,7 +404,8 @@ module.exports.JsErrorTimeout = errors.JsErrorTimeout
module.exports.helpersToRemoveForJs = helpersToRemoveForJs module.exports.helpersToRemoveForJs = helpersToRemoveForJs
if (process && !process.env.NO_JS) { function defaultJSSetup() {
if (!isBackendService()) {
/** /**
* Use polyfilled vm to run JS scripts in a browser Env * Use polyfilled vm to run JS scripts in a browser Env
*/ */
@ -418,4 +419,10 @@ if (process && !process.env.NO_JS) {
vm.createContext(context) vm.createContext(context)
return vm.runInNewContext(js, context, { timeout: 1000 }) return vm.runInNewContext(js, context, { timeout: 1000 })
}) })
} else {
javascript.removeJSRunner()
} }
}
defaultJSSetup()
module.exports.defaultJSSetup = defaultJSSetup

View File

@ -4,6 +4,14 @@ module.exports.FIND_HBS_REGEX = /{{([^{].*?)}}/g
module.exports.FIND_ANY_HBS_REGEX = /{?{{([^{].*?)}}}?/g module.exports.FIND_ANY_HBS_REGEX = /{?{{([^{].*?)}}}?/g
module.exports.FIND_TRIPLE_HBS_REGEX = /{{{([^{].*?)}}}/g module.exports.FIND_TRIPLE_HBS_REGEX = /{{{([^{].*?)}}}/g
module.exports.isBackendService = () => {
return typeof window === "undefined"
}
module.exports.isJSAllowed = () => {
return process && !process.env.NO_JS
}
// originally this could be done with a single regex using look behinds // originally this could be done with a single regex using look behinds
// but safari does not support this feature // but safari does not support this feature
// original regex: /(?<!{){{[^{}]+}}(?!})/g // original regex: /(?<!{){{[^{}]+}}(?!})/g

View File

@ -0,0 +1,27 @@
jest.mock("../src/utilities", () => {
const utilities = jest.requireActual("../src/utilities")
return {
...utilities,
isBackendService: jest.fn().mockReturnValue(true),
}
})
const { defaultJSSetup, processStringSync, encodeJSBinding } = require("../src")
const { isBackendService } = require("../src/utilities")
const mockedBackendService = jest.mocked(isBackendService)
const binding = encodeJSBinding("return 1")
describe("confirm VM is available when expected and when not", () => {
it("shouldn't have JS available in a backend service by default", () => {
defaultJSSetup()
const result = processStringSync(binding, {})
// shouldn't process at all
expect(result).toBe(binding)
})
it("should have JS available in frontend environments", () => {
mockedBackendService.mockReturnValue(false)
defaultJSSetup()
const result = processStringSync(binding, {})
expect(result).toBe(1)
})
})

View File

@ -4,6 +4,7 @@ import type { Row } from "./row"
export interface QuerySchema { export interface QuerySchema {
name?: string name?: string
type: string type: string
subtype?: string
} }
export interface Query extends Document { export interface Query extends Document {
@ -17,11 +18,23 @@ export interface Query extends Document {
queryVerb: string queryVerb: string
} }
export interface QueryPreview extends Omit<Query, "_id"> {
queryId: string
}
export interface QueryParameter { export interface QueryParameter {
name: string name: string
default: string default: string
} }
export interface QueryResponse {
rows: any[]
keys: string[]
info: any
extra: any
pagination: any
}
export interface RestQueryFields { export interface RestQueryFields {
path: string path: string
queryString?: string queryString?: string

View File

@ -16,6 +16,10 @@ export enum AutoFieldSubType {
AUTO_ID = "autoID", AUTO_ID = "autoID",
} }
export enum JsonFieldSubType {
ARRAY = "array",
}
export enum FormulaType { export enum FormulaType {
STATIC = "static", STATIC = "static",
DYNAMIC = "dynamic", DYNAMIC = "dynamic",

View File

@ -5,6 +5,7 @@ import {
AutoFieldSubType, AutoFieldSubType,
AutoReason, AutoReason,
FormulaType, FormulaType,
JsonFieldSubType,
RelationshipType, RelationshipType,
} from "./constants" } from "./constants"
@ -81,6 +82,11 @@ export interface NumberFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
} }
} }
export interface JsonFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.JSON
subtype?: JsonFieldSubType.ARRAY
}
export interface DateFieldMetadata extends Omit<BaseFieldSchema, "subtype"> { export interface DateFieldMetadata extends Omit<BaseFieldSchema, "subtype"> {
type: FieldType.DATETIME type: FieldType.DATETIME
ignoreTimezones?: boolean ignoreTimezones?: boolean
@ -162,6 +168,7 @@ export type FieldSchema =
| NumberFieldMetadata | NumberFieldMetadata
| LongFormFieldMetadata | LongFormFieldMetadata
| BBReferenceFieldMetadata | BBReferenceFieldMetadata
| JsonFieldMetadata
export interface TableSchema { export interface TableSchema {
[key: string]: FieldSchema [key: string]: FieldSchema

View File

@ -1,3 +1,4 @@
export interface VM { export interface VM {
execute(code: string): any execute(code: string): any
withContext<T>(context: Record<string, any>, executeWithContext: () => T): T
} }

View File

@ -49,7 +49,6 @@ function runBuild(entry, outfile) {
preserveSymlinks: true, preserveSymlinks: true,
loader: { loader: {
".svelte": "copy", ".svelte": "copy",
".ivm.bundle.js": "text",
}, },
metafile: true, metafile: true,
external: [ external: [
@ -70,7 +69,7 @@ function runBuild(entry, outfile) {
platform: "node", platform: "node",
outfile, outfile,
}).then(result => { }).then(result => {
glob(`${process.cwd()}/src/**/*.hbs`, {}, (err, files) => { glob(`${process.cwd()}/src/**/*.{hbs,ivm.bundle.js}`, {}, (err, files) => {
for (const file of files) { for (const file of files) {
fs.copyFileSync(file, `${process.cwd()}/dist/${path.basename(file)}`) fs.copyFileSync(file, `${process.cwd()}/dist/${path.basename(file)}`)
} }

View File

@ -1,8 +0,0 @@
#!/bin/bash
apt-get install -y gnupg
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor | tee /usr/share/keyrings/nodesource.gpg > /dev/null
echo "deb [signed-by=/usr/share/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
apt-get update
echo "INSTALLING NODE $NODE_MAJOR"
apt-get install -y --no-install-recommends nodejs
npm install --global yarn pm2

View File

@ -5572,9 +5572,9 @@
integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg== integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg==
"@types/node@^18.11.18": "@types/node@^18.11.18":
version "18.19.10" version "18.19.13"
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.10.tgz#4de314ab66faf6bc8ba691021a091ddcdf13a158" resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.13.tgz#c3e989ca967b862a1f6c8c4148fe31865eedaf1a"
integrity sha512-IZD8kAM02AW1HRDTPOlz3npFava678pr8Ie9Vp8uRhBROXAv8MXT2pCnGZZAKYdromsNQLHQcfWQ6EOatVLtqA== integrity sha512-kgnbRDj8ioDyGxoiaXsiu1Ybm/K14ajCgMOkwiqpHrnF7d7QiYRoRqHIpglMMs3DwXinlK4qJ8TZGlj4hfleJg==
dependencies: dependencies:
undici-types "~5.26.4" undici-types "~5.26.4"
@ -6463,7 +6463,7 @@ acorn@^7.1.1:
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==
acorn@^8.1.0, acorn@^8.10.0, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.0, acorn@^8.7.1, acorn@^8.8.1, acorn@^8.8.2, acorn@^8.9.0: acorn@^8.1.0, acorn@^8.10.0, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.1, acorn@^8.8.2, acorn@^8.9.0:
version "8.11.3" version "8.11.3"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg== integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==
@ -21633,14 +21633,6 @@ vlq@^0.2.2:
resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26" resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26"
integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow== integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==
vm2@^3.9.19:
version "3.9.19"
resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.19.tgz#be1e1d7a106122c6c492b4d51c2e8b93d3ed6a4a"
integrity sha512-J637XF0DHDMV57R6JyVsTak7nIL8gy5KH4r1HiwWLf/4GBbb5MKL5y7LpmF4A8E2nR6XmzpmMFQ7V7ppPTmUQg==
dependencies:
acorn "^8.7.0"
acorn-walk "^8.2.0"
vuvuzela@1.0.3: vuvuzela@1.0.3:
version "1.0.3" version "1.0.3"
resolved "https://registry.yarnpkg.com/vuvuzela/-/vuvuzela-1.0.3.tgz#3be145e58271c73ca55279dd851f12a682114b0b" resolved "https://registry.yarnpkg.com/vuvuzela/-/vuvuzela-1.0.3.tgz#3be145e58271c73ca55279dd851f12a682114b0b"