Merge branch 'master' into BUDI-7656/add-migration
This commit is contained in:
commit
c7276b9005
|
@ -6,6 +6,7 @@ packages/server/coverage
|
|||
packages/worker/coverage
|
||||
packages/backend-core/coverage
|
||||
packages/server/client
|
||||
packages/server/coverage
|
||||
packages/builder/.routify
|
||||
packages/sdk/sdk
|
||||
packages/account-portal/packages/server/build
|
||||
|
|
|
@ -44,7 +44,8 @@
|
|||
"no-undef": "off",
|
||||
"no-prototype-builtins": "off",
|
||||
"local-rules/no-budibase-imports": "error",
|
||||
"local-rules/no-test-com": "error"
|
||||
"local-rules/no-test-com": "error",
|
||||
"local-rules/email-domain-example-com": "error"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
|
@ -107,9 +107,9 @@ jobs:
|
|||
- name: Test
|
||||
run: |
|
||||
if ${{ env.USE_NX_AFFECTED }}; then
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/pro
|
||||
yarn test --ignore=@budibase/worker --ignore=@budibase/server
|
||||
fi
|
||||
|
||||
test-worker:
|
||||
|
@ -160,31 +160,6 @@ jobs:
|
|||
yarn test --scope=@budibase/server
|
||||
fi
|
||||
|
||||
test-pro:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
|
||||
steps:
|
||||
- name: Checkout repo and submodules
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Use Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: yarn
|
||||
- run: yarn --frozen-lockfile
|
||||
- name: Test
|
||||
run: |
|
||||
if ${{ env.USE_NX_AFFECTED }}; then
|
||||
yarn test --scope=@budibase/pro --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn test --scope=@budibase/pro
|
||||
fi
|
||||
|
||||
integration-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
|
|
@ -7,11 +7,12 @@ module.exports = {
|
|||
|
||||
if (
|
||||
/^@budibase\/[^/]+\/.*$/.test(importPath) &&
|
||||
importPath !== "@budibase/backend-core/tests"
|
||||
importPath !== "@budibase/backend-core/tests" &&
|
||||
importPath !== "@budibase/string-templates/test/utils"
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests.`,
|
||||
message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests and @budibase/string-templates/test/utils.`,
|
||||
})
|
||||
}
|
||||
},
|
||||
|
@ -51,4 +52,41 @@ module.exports = {
|
|||
}
|
||||
},
|
||||
},
|
||||
"email-domain-example-com": {
|
||||
meta: {
|
||||
type: "problem",
|
||||
docs: {
|
||||
description:
|
||||
"enforce using the example.com domain for generator.email calls",
|
||||
category: "Possible Errors",
|
||||
recommended: false,
|
||||
},
|
||||
fixable: "code",
|
||||
schema: [],
|
||||
},
|
||||
create: function (context) {
|
||||
return {
|
||||
CallExpression(node) {
|
||||
if (
|
||||
node.callee.type === "MemberExpression" &&
|
||||
node.callee.object.name === "generator" &&
|
||||
node.callee.property.name === "email" &&
|
||||
node.arguments.length === 0
|
||||
) {
|
||||
context.report({
|
||||
node,
|
||||
message:
|
||||
"Prefer using generator.email with the domain \"{ domain: 'example.com' }\".",
|
||||
fix: function (fixer) {
|
||||
return fixer.replaceText(
|
||||
node,
|
||||
'generator.email({ domain: "example.com" })'
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -1,4 +1,101 @@
|
|||
FROM couchdb:3.2.1
|
||||
# Modified from https://github.com/apache/couchdb-docker/blob/main/3.3.3/Dockerfile
|
||||
#
|
||||
# Everything in this `base` image is adapted from the official `couchdb` image's
|
||||
# Dockerfile. Only modifications related to upgrading from Debian bullseye to
|
||||
# bookworm have been included. The `runner` image contains Budibase's
|
||||
# customisations to the image, e.g. adding Clouseau.
|
||||
FROM node:20-slim AS base
|
||||
|
||||
# Add CouchDB user account to make sure the IDs are assigned consistently
|
||||
RUN groupadd -g 5984 -r couchdb && useradd -u 5984 -d /opt/couchdb -g couchdb couchdb
|
||||
|
||||
# be sure GPG and apt-transport-https are available and functional
|
||||
RUN set -ex; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
dirmngr \
|
||||
gnupg \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# grab tini for signal handling and zombie reaping
|
||||
# see https://github.com/apache/couchdb-docker/pull/28#discussion_r141112407
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends tini; \
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
tini --version
|
||||
|
||||
# http://docs.couchdb.org/en/latest/install/unix.html#installing-the-apache-couchdb-packages
|
||||
ENV GPG_COUCH_KEY \
|
||||
# gpg: rsa8192 205-01-19 The Apache Software Foundation (Package repository signing key) <root@apache.org>
|
||||
390EF70BB1EA12B2773962950EE62FB37A00258D
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y curl; \
|
||||
export GNUPGHOME="$(mktemp -d)"; \
|
||||
curl -fL -o keys.asc https://couchdb.apache.org/repo/keys.asc; \
|
||||
gpg --batch --import keys.asc; \
|
||||
gpg --batch --export "${GPG_COUCH_KEY}" > /usr/share/keyrings/couchdb-archive-keyring.gpg; \
|
||||
command -v gpgconf && gpgconf --kill all || :; \
|
||||
rm -rf "$GNUPGHOME"; \
|
||||
apt-key list; \
|
||||
apt purge -y --autoremove curl; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV COUCHDB_VERSION 3.3.3
|
||||
|
||||
RUN . /etc/os-release; \
|
||||
echo "deb [signed-by=/usr/share/keyrings/couchdb-archive-keyring.gpg] https://apache.jfrog.io/artifactory/couchdb-deb/ ${VERSION_CODENAME} main" | \
|
||||
tee /etc/apt/sources.list.d/couchdb.list >/dev/null
|
||||
|
||||
# https://github.com/apache/couchdb-pkg/blob/master/debian/README.Debian
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
\
|
||||
echo "couchdb couchdb/mode select none" | debconf-set-selections; \
|
||||
# we DO want recommends this time
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \
|
||||
couchdb="$COUCHDB_VERSION"~bookworm \
|
||||
; \
|
||||
# Undo symlinks to /var/log and /var/lib
|
||||
rmdir /var/lib/couchdb /var/log/couchdb; \
|
||||
rm /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
mkdir -p /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
chown couchdb:couchdb /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
chmod 777 /opt/couchdb/data /opt/couchdb/var/log; \
|
||||
# Remove file that sets logging to a file
|
||||
rm /opt/couchdb/etc/default.d/10-filelog.ini; \
|
||||
# Check we own everything in /opt/couchdb. Matches the command in dockerfile_entrypoint.sh
|
||||
find /opt/couchdb \! \( -user couchdb -group couchdb \) -exec chown -f couchdb:couchdb '{}' +; \
|
||||
# Setup directories and permissions for config. Technically these could be 555 and 444 respectively
|
||||
# but we keep them as 755 and 644 for consistency with CouchDB defaults and the dockerfile_entrypoint.sh.
|
||||
find /opt/couchdb/etc -type d ! -perm 0755 -exec chmod -f 0755 '{}' +; \
|
||||
find /opt/couchdb/etc -type f ! -perm 0644 -exec chmod -f 0644 '{}' +; \
|
||||
# only local.d needs to be writable for the docker_entrypoint.sh
|
||||
chmod -f 0777 /opt/couchdb/etc/local.d; \
|
||||
# apt clean-up
|
||||
rm -rf /var/lib/apt/lists/*;
|
||||
|
||||
# Add configuration
|
||||
COPY --chown=couchdb:couchdb couch/10-docker-default.ini /opt/couchdb/etc/default.d/
|
||||
# COPY --chown=couchdb:couchdb vm.args /opt/couchdb/etc/
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
RUN ln -s usr/local/bin/docker-entrypoint.sh /docker-entrypoint.sh # backwards compat
|
||||
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
|
||||
|
||||
VOLUME /opt/couchdb/data
|
||||
|
||||
# 5984: Main CouchDB endpoint
|
||||
# 4369: Erlang portmap daemon (epmd)
|
||||
# 9100: CouchDB cluster communication port
|
||||
EXPOSE 5984 4369 9100
|
||||
CMD ["/opt/couchdb/bin/couchdb"]
|
||||
|
||||
FROM base as runner
|
||||
|
||||
ENV COUCHDB_USER admin
|
||||
ENV COUCHDB_PASSWORD admin
|
||||
|
@ -6,9 +103,9 @@ EXPOSE 5984
|
|||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
|
||||
wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \
|
||||
apt-add-repository 'deb http://security.debian.org/debian-security bullseye-security/updates main' && \
|
||||
apt-add-repository 'deb http://security.debian.org/debian-security bookworm-security/updates main' && \
|
||||
apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \
|
||||
apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bullseye main' && \
|
||||
apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bookworm main' && \
|
||||
apt-get update && apt-get install -y --no-install-recommends temurin-8-jdk && \
|
||||
rm -rf /var/lib/apt/lists/
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
name=clouseau@127.0.0.1
|
||||
|
||||
; set this to the same distributed Erlang cookie used by the CouchDB nodes
|
||||
cookie=monster
|
||||
cookie=COUCHDB_ERLANG_COOKIE
|
||||
|
||||
; the path where you would like to store the search index files
|
||||
dir=DATA_DIR/search
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
; CouchDB Configuration Settings
|
||||
|
||||
; Custom settings should be made in this file. They will override settings
|
||||
; in default.ini, but unlike changes made to default.ini, this file won't be
|
||||
; overwritten on server upgrade.
|
||||
|
||||
[chttpd]
|
||||
bind_address = any
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
# erlang cookie for clouseau security
|
||||
-name couchdb@127.0.0.1
|
||||
-setcookie monster
|
||||
-setcookie COUCHDB_ERLANG_COOKIE
|
||||
|
||||
# Ensure that the Erlang VM listens on a known port
|
||||
-kernel inet_dist_listen_min 9100
|
||||
|
|
|
@ -0,0 +1,122 @@
|
|||
#!/bin/bash
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
|
||||
# use this file except in compliance with the License. You may obtain a copy of
|
||||
# the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations under
|
||||
# the License.
|
||||
|
||||
set -e
|
||||
|
||||
# first arg is `-something` or `+something`
|
||||
if [ "${1#-}" != "$1" ] || [ "${1#+}" != "$1" ]; then
|
||||
set -- /opt/couchdb/bin/couchdb "$@"
|
||||
fi
|
||||
|
||||
# first arg is the bare word `couchdb`
|
||||
if [ "$1" = 'couchdb' ]; then
|
||||
shift
|
||||
set -- /opt/couchdb/bin/couchdb "$@"
|
||||
fi
|
||||
|
||||
if [ "$1" = '/opt/couchdb/bin/couchdb' ]; then
|
||||
# this is where runtime configuration changes will be written.
|
||||
# we need to explicitly touch it here in case /opt/couchdb/etc has
|
||||
# been mounted as an external volume, in which case it won't exist.
|
||||
# If running as the couchdb user (i.e. container starts as root),
|
||||
# write permissions will be granted below.
|
||||
touch /opt/couchdb/etc/local.d/docker.ini
|
||||
|
||||
# if user is root, assume running under the couchdb user (default)
|
||||
# and ensure it is able to access files and directories that may be mounted externally
|
||||
if [ "$(id -u)" = '0' ]; then
|
||||
# Check that we own everything in /opt/couchdb and fix if necessary. We also
|
||||
# add the `-f` flag in all the following invocations because there may be
|
||||
# cases where some of these ownership and permissions issues are non-fatal
|
||||
# (e.g. a config file owned by root with o+r is actually fine), and we don't
|
||||
# to be too aggressive about crashing here ...
|
||||
find /opt/couchdb \! \( -user couchdb -group couchdb \) -exec chown -f couchdb:couchdb '{}' +
|
||||
|
||||
# Ensure that data files have the correct permissions. We were previously
|
||||
# preventing any access to these files outside of couchdb:couchdb, but it
|
||||
# turns out that CouchDB itself does not set such restrictive permissions
|
||||
# when it creates the files. The approach taken here ensures that the
|
||||
# contents of the datadir have the same permissions as they had when they
|
||||
# were initially created. This should minimize any startup delay.
|
||||
find /opt/couchdb/data -type d ! -perm 0755 -exec chmod -f 0755 '{}' +
|
||||
find /opt/couchdb/data -type f ! -perm 0644 -exec chmod -f 0644 '{}' +
|
||||
|
||||
# Do the same thing for configuration files and directories. Technically
|
||||
# CouchDB only needs read access to the configuration files as all online
|
||||
# changes will be applied to the "docker.ini" file below, but we set 644
|
||||
# for the sake of consistency.
|
||||
find /opt/couchdb/etc -type d ! -perm 0755 -exec chmod -f 0755 '{}' +
|
||||
find /opt/couchdb/etc -type f ! -perm 0644 -exec chmod -f 0644 '{}' +
|
||||
fi
|
||||
|
||||
if [ ! -z "$NODENAME" ] && ! grep "couchdb@" /opt/couchdb/etc/vm.args; then
|
||||
echo "-name couchdb@$NODENAME" >> /opt/couchdb/etc/vm.args
|
||||
fi
|
||||
|
||||
if [ "$COUCHDB_USER" ] && [ "$COUCHDB_PASSWORD" ]; then
|
||||
# Create admin only if not already present
|
||||
if ! grep -Pzoqr "\[admins\]\n$COUCHDB_USER =" /opt/couchdb/etc/local.d/*.ini /opt/couchdb/etc/local.ini; then
|
||||
printf "\n[admins]\n%s = %s\n" "$COUCHDB_USER" "$COUCHDB_PASSWORD" >> /opt/couchdb/etc/local.d/docker.ini
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$COUCHDB_SECRET" ]; then
|
||||
# Set secret only if not already present
|
||||
if ! grep -Pzoqr "\[chttpd_auth\]\nsecret =" /opt/couchdb/etc/local.d/*.ini /opt/couchdb/etc/local.ini; then
|
||||
printf "\n[chttpd_auth]\nsecret = %s\n" "$COUCHDB_SECRET" >> /opt/couchdb/etc/local.d/docker.ini
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$COUCHDB_ERLANG_COOKIE" ]; then
|
||||
cookieFile='/opt/couchdb/.erlang.cookie'
|
||||
if [ -e "$cookieFile" ]; then
|
||||
if [ "$(cat "$cookieFile" 2>/dev/null)" != "$COUCHDB_ERLANG_COOKIE" ]; then
|
||||
echo >&2
|
||||
echo >&2 "warning: $cookieFile contents do not match COUCHDB_ERLANG_COOKIE"
|
||||
echo >&2
|
||||
fi
|
||||
else
|
||||
echo "$COUCHDB_ERLANG_COOKIE" > "$cookieFile"
|
||||
fi
|
||||
chown couchdb:couchdb "$cookieFile"
|
||||
chmod 600 "$cookieFile"
|
||||
fi
|
||||
|
||||
if [ "$(id -u)" = '0' ]; then
|
||||
chown -f couchdb:couchdb /opt/couchdb/etc/local.d/docker.ini || true
|
||||
fi
|
||||
|
||||
# if we don't find an [admins] section followed by a non-comment, display a warning
|
||||
if ! grep -Pzoqr '\[admins\]\n[^;]\w+' /opt/couchdb/etc/default.d/*.ini /opt/couchdb/etc/local.d/*.ini /opt/couchdb/etc/local.ini; then
|
||||
# The - option suppresses leading tabs but *not* spaces. :)
|
||||
cat >&2 <<-'EOWARN'
|
||||
*************************************************************
|
||||
ERROR: CouchDB 3.0+ will no longer run in "Admin Party"
|
||||
mode. You *MUST* specify an admin user and
|
||||
password, either via your own .ini file mapped
|
||||
into the container at /opt/couchdb/etc/local.ini
|
||||
or inside /opt/couchdb/etc/local.d, or with
|
||||
"-e COUCHDB_USER=admin -e COUCHDB_PASSWORD=password"
|
||||
to set it via "docker run".
|
||||
*************************************************************
|
||||
EOWARN
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$(id -u)" = '0' ]; then
|
||||
export HOME=$(echo ~couchdb)
|
||||
exec setpriv --reuid=couchdb --regid=couchdb --clear-groups "$@"
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$@"
|
|
@ -1,6 +1,7 @@
|
|||
#!/bin/bash
|
||||
|
||||
DATA_DIR=${DATA_DIR:-/data}
|
||||
COUCHDB_ERLANG_COOKIE=${COUCHDB_ERLANG_COOKIE:-B9CFC32C-3458-4A86-8448-B3C753991CA7}
|
||||
|
||||
mkdir -p ${DATA_DIR}
|
||||
mkdir -p ${DATA_DIR}/couch/{dbs,views}
|
||||
|
@ -60,6 +61,9 @@ else
|
|||
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
|
||||
fi
|
||||
|
||||
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/couchdb/etc/vm.args
|
||||
sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouseau.ini
|
||||
|
||||
# Start Clouseau. Budibase won't function correctly without Clouseau running, it
|
||||
# powers the search API endpoints which are used to do all sorts, including
|
||||
# populating app grids.
|
||||
|
|
|
@ -3,7 +3,6 @@ FROM node:20-slim as build
|
|||
# install node-gyp dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
|
||||
|
||||
|
||||
# copy and install dependencies
|
||||
WORKDIR /app
|
||||
COPY package.json .
|
||||
|
@ -13,8 +12,6 @@ COPY .yarnrc .
|
|||
|
||||
COPY packages/server/package.json packages/server/package.json
|
||||
COPY packages/worker/package.json packages/worker/package.json
|
||||
# string-templates does not get bundled during the esbuild process, so we want to use the local version
|
||||
COPY packages/string-templates/package.json packages/string-templates/package.json
|
||||
|
||||
|
||||
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
|
||||
|
@ -27,7 +24,7 @@ RUN ./scripts/removeWorkspaceDependencies.sh packages/worker/package.json
|
|||
RUN echo '' > scripts/syncProPackage.js
|
||||
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
|
||||
RUN ./scripts/removeWorkspaceDependencies.sh package.json
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
|
||||
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production --frozen-lockfile
|
||||
|
||||
# copy the actual code
|
||||
COPY packages/server/dist packages/server/dist
|
||||
|
@ -36,13 +33,11 @@ COPY packages/server/client packages/server/client
|
|||
COPY packages/server/builder packages/server/builder
|
||||
COPY packages/worker/dist packages/worker/dist
|
||||
COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
|
||||
COPY packages/string-templates packages/string-templates
|
||||
|
||||
|
||||
FROM budibase/couchdb as runner
|
||||
FROM budibase/couchdb:v3.3.3 as runner
|
||||
ARG TARGETARCH
|
||||
ENV TARGETARCH $TARGETARCH
|
||||
ENV NODE_MAJOR 20
|
||||
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
|
||||
# e.g. docker build --build-arg TARGETBUILD=aas ....
|
||||
ARG TARGETBUILD=single
|
||||
|
@ -54,16 +49,14 @@ RUN apt-get update && \
|
|||
|
||||
# Install postgres client for pg_dump utils
|
||||
RUN apt install -y software-properties-common apt-transport-https ca-certificates gnupg \
|
||||
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
|
||||
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
|
||||
&& apt update -y \
|
||||
&& apt install postgresql-client-15 -y \
|
||||
&& apt remove software-properties-common apt-transport-https gpg -y
|
||||
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
|
||||
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
|
||||
&& apt update -y \
|
||||
&& apt install postgresql-client-15 -y \
|
||||
&& apt remove software-properties-common apt-transport-https gpg -y
|
||||
|
||||
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
|
||||
WORKDIR /nodejs
|
||||
COPY scripts/install-node.sh ./install.sh
|
||||
RUN chmod +x install.sh && ./install.sh
|
||||
# We use pm2 in order to run multiple node processes in a single container
|
||||
RUN npm install --global pm2
|
||||
|
||||
# setup nginx
|
||||
COPY hosting/single/nginx/nginx.conf /etc/nginx
|
||||
|
@ -104,9 +97,6 @@ COPY --from=build /app/node_modules /node_modules
|
|||
COPY --from=build /app/package.json /package.json
|
||||
COPY --from=build /app/packages/server /app
|
||||
COPY --from=build /app/packages/worker /worker
|
||||
COPY --from=build /app/packages/string-templates /string-templates
|
||||
|
||||
RUN cd /string-templates && yarn link && cd ../app && yarn link @budibase/string-templates && cd ../worker && yarn link @budibase/string-templates
|
||||
|
||||
|
||||
EXPOSE 80
|
||||
|
|
|
@ -97,10 +97,12 @@ fi
|
|||
sleep 10
|
||||
|
||||
pushd app
|
||||
pm2 start -l /dev/stdout --name app "yarn run:docker"
|
||||
pm2 start --name app "yarn run:docker"
|
||||
popd
|
||||
pushd worker
|
||||
pm2 start -l /dev/stdout --name worker "yarn run:docker"
|
||||
pm2 start --name worker "yarn run:docker"
|
||||
popd
|
||||
echo "end of runner.sh, sleeping ..."
|
||||
|
||||
tail -f $HOME/.pm2/logs/*.log
|
||||
sleep infinity
|
||||
|
|
|
@ -0,0 +1,221 @@
|
|||
<p align="center">
|
||||
<a href="https://www.budibase.com">
|
||||
<img alt="Budibase" src="https://res.cloudinary.com/daog6scxm/image/upload/v1696515725/Branding/Assets/Symbol/RGB/Full%20Colour/Budibase_Symbol_RGB_FullColour_cbqvha_1_z5cwq2.svg" width="60" />
|
||||
</a>
|
||||
</p>
|
||||
<h1 align="center">
|
||||
Budibase
|
||||
</h1>
|
||||
<h3 align="center">
|
||||
자체 인프라에서 몇 분 만에 맞춤형 비즈니스 도구를 구축하세요.
|
||||
</h3>
|
||||
<p align="center">
|
||||
Budibase는 개발자와 IT 전문가가 몇 분 만에 맞춤형 애플리케이션을 구축하고 자동화할 수 있는 오픈 소스 로우코드 플랫폼입니다.
|
||||
</p>
|
||||
|
||||
<h3 align="center">
|
||||
🤖 🎨 🚀
|
||||
</h3>
|
||||
|
||||
<p align="center">
|
||||
<img alt="Budibase design ui" src="https://res.cloudinary.com/daog6scxm/image/upload/v1633524049/ui/design-ui-wide-mobile_gdaveq.jpg">
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/Budibase/budibase/releases">
|
||||
<img alt="GitHub all releases" src="https://img.shields.io/github/downloads/Budibase/budibase/total">
|
||||
</a>
|
||||
<a href="https://github.com/Budibase/budibase/releases">
|
||||
<img alt="GitHub release (latest by date)" src="https://img.shields.io/github/v/release/Budibase/budibase">
|
||||
</a>
|
||||
<a href="https://twitter.com/intent/follow?screen_name=budibase">
|
||||
<img src="https://img.shields.io/twitter/follow/budibase?style=social" alt="Follow @budibase" />
|
||||
</a>
|
||||
<img src="https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg" alt="Code of conduct" />
|
||||
<a href="https://codecov.io/gh/Budibase/budibase">
|
||||
<img src="https://codecov.io/gh/Budibase/budibase/graph/badge.svg?token=E8W2ZFXQOH"/>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<h3 align="center">
|
||||
<a href="https://docs.budibase.com/getting-started">소개</a>
|
||||
<span> · </span>
|
||||
<a href="https://docs.budibase.com">문서</a>
|
||||
<span> · </span>
|
||||
<a href="https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas">기능 요청</a>
|
||||
<span> · </span>
|
||||
<a href="https://github.com/Budibase/budibase/issues">버그 보고</a>
|
||||
<span> · </span>
|
||||
지원: <a href="https://github.com/Budibase/budibase/discussions">토론</a>
|
||||
</h3>
|
||||
|
||||
<br /><br />
|
||||
## ✨ 특징
|
||||
|
||||
### "실제" 소프트웨어를 구축할 수 있습니다.
|
||||
Budibase를 사용하면 고성능 단일 페이지 애플리케이션을 구축할 수 있습니다. 또한 반응형 디자인으로 제작하여 사용자에게 멋진 경험을 제공할 수 있습니다.
|
||||
<br /><br />
|
||||
|
||||
### 오픈 소스 및 확장성
|
||||
Budibase는 오픈소스이며, GPL v3 라이선스에 따라 공개되어 있습니다. 이는 Budibase가 항상 당신 곁에 있다는 안도감을 줄 것입니다. 그리고 우리는 개발자 친화적인 환경을 제공하고 있기 때문에, 당신은 원하는 만큼 소스 코드를 포크하여 수정하거나 Budibase에 직접 기여할 수 있습니다.
|
||||
<br /><br />
|
||||
|
||||
### 기존 데이터 또는 처음부터 시작
|
||||
Budibase를 사용하면 다음과 같은 여러 소스에서 데이터를 가져올 수 있습니다: MondoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB 또는 REST API.
|
||||
|
||||
또는 원하는 경우 외부 도구 없이도 Budibase를 사용하여 처음부터 시작하여 자체 애플리케이션을 구축할 수 있습니다.[데이터 소스 제안](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
|
||||
|
||||
<p align="center">
|
||||
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970242/Out%20of%20beta%20launch/data_n1tlhf.png">
|
||||
</p>
|
||||
<br /><br />
|
||||
|
||||
### 강력한 내장 구성 요소로 애플리케이션을 설계하고 구축할 수 있습니다.
|
||||
|
||||
Budibase에는 아름답게 디자인된 강력한 컴포넌트들이 제공되며, 이를 사용하여 UI를 쉽게 구축할 수 있습니다. 또한, CSS를 통한 스타일링 옵션도 풍부하게 제공되어 보다 창의적인 표현도 가능하다.
|
||||
[Request new component](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
|
||||
|
||||
<p align="center">
|
||||
<img alt="Budibase design" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970243/Out%20of%20beta%20launch/design-like-a-pro_qhlfeu.gif">
|
||||
</p>
|
||||
<br /><br />
|
||||
|
||||
### 프로세스를 자동화하고, 다른 도구와 연동하고, 웹훅으로 연결하세요!
|
||||
워크플로우와 수동 프로세스를 자동화하여 시간을 절약하세요. 웹훅 이벤트 연결부터 이메일 자동화까지, Budibase에 수행할 작업을 지시하기만 하면 자동으로 처리됩니다. [새로운 자동화 만들기](https://github.com/Budibase/automations)또는[새로운 자동화를 요청할 수 있습니다](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
|
||||
|
||||
<p align="center">
|
||||
<img alt="Budibase automations" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970486/Out%20of%20beta%20launch/automation_riro7u.png">
|
||||
</p>
|
||||
<br /><br />
|
||||
|
||||
### 선호하는 도구
|
||||
Budibase는 사용자의 선호도에 따라 애플리케이션을 구축할 수 있는 다양한 도구를 통합하고 있습니다.
|
||||
|
||||
<p align="center">
|
||||
<img alt="Budibase integrations" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970242/Out%20of%20beta%20launch/integrations_kc7dqt.png">
|
||||
</p>
|
||||
<br /><br />
|
||||
|
||||
### 관리자의 천국
|
||||
Budibase는 어떤 규모의 프로젝트에도 유연하게 대응할 수 있으며, Budibase를 사용하면 개인 또는 조직의 서버에서 자체 호스팅하고 사용자, 온보딩, SMTP, 앱, 그룹, 테마 등을 한꺼번에 관리할 수 있습니다. 또한, 사용자나 그룹에 앱 포털을 제공하고 그룹 관리자에게 사용자 관리를 맡길 수도 있다.
|
||||
- 프로모션 비디오: https://youtu.be/xoljVpty_Kw
|
||||
|
||||
<br /><br /><br />
|
||||
|
||||
## 🏁 시작
|
||||
|
||||
Docker, Kubernetes 또는 Digital Ocean을 사용하여 자체 인프라에서 Budibase를 호스팅하거나, 걱정 없이 빠르게 애플리케이션을 구축하려는 경우 클라우드에서 Budibase를 사용할 수 있습니다.
|
||||
|
||||
### [Budibase 셀프 호스팅으로 시작하기](https://docs.budibase.com/docs/hosting-methods)
|
||||
|
||||
- [Docker - single ARM compatible image](https://docs.budibase.com/docs/docker)
|
||||
- [Docker Compose](https://docs.budibase.com/docs/docker-compose)
|
||||
- [Kubernetes](https://docs.budibase.com/docs/kubernetes-k8s)
|
||||
- [Digital Ocean](https://docs.budibase.com/docs/digitalocean)
|
||||
- [Portainer](https://docs.budibase.com/docs/portainer)
|
||||
|
||||
|
||||
### [클라우드에서 Budibase 시작하기](https://budibase.com)
|
||||
|
||||
<br /><br />
|
||||
|
||||
## 🎓 Budibase 알아보기
|
||||
|
||||
문서 [documentacion de Budibase](https://docs.budibase.com/docs).
|
||||
<br />
|
||||
|
||||
|
||||
<br /><br />
|
||||
|
||||
## 💬 커뮤니티
|
||||
|
||||
질문하고, 다른 사람을 돕고, 다른 Budibase 사용자와 즐거운 대화를 나눌 수 있는 Budibase 커뮤니티에 여러분을 초대합니다.
|
||||
[깃허브 토론](https://github.com/Budibase/budibase/discussions)
|
||||
<br /><br /><br />
|
||||
|
||||
|
||||
## ❗ 행동강령
|
||||
|
||||
Budibase 는 모든 계층의 사람들을 환영하고 상호 존중하는 환경을 제공하는 데 특별한 주의를 기울이고 있습니다. 저희는 커뮤니티에도 같은 기대를 가지고 있습니다.
|
||||
[**행동 강령**](https://github.com/Budibase/budibase/blob/HEAD/.github/CODE_OF_CONDUCT.md).
|
||||
<br />
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
||||
## 🙌 Contribuir en Budibase
|
||||
|
||||
버그 신고부터 코드의 버그 수정에 이르기까지 모든 기여를 감사하고 환영합니다. 새로운 기능을 구현하거나 API를 변경할 계획이 있다면 [여기에 새 메시지](https://github.com/Budibase/budibase/issues),
|
||||
이렇게 하면 여러분의 노력이 헛되지 않도록 보장할 수 있습니다.
|
||||
|
||||
여기에는 다음을 위해 Budibase 환경을 설정하는 방법에 대한 지침이 나와 있습니다. [여기를 클릭하세요](https://github.com/Budibase/budibase/tree/HEAD/docs/CONTRIBUTING.md).
|
||||
|
||||
### 어디서부터 시작해야 할지 혼란스러우신가요?
|
||||
이곳은 기여를 시작하기에 최적의 장소입니다! [First time issues project](https://github.com/Budibase/budibase/projects/22).
|
||||
|
||||
### 리포지토리 구성
|
||||
|
||||
Budibase는 Lerna에서 관리하는 단일 리포지토리입니다. Lerna는 변경 사항이 있을 때마다 이를 동기화하여 Budibase 패키지를 빌드하고 게시합니다. 크게 보면 이러한 패키지가 Budibase를 구성하는 패키지입니다:
|
||||
|
||||
- [packages/builder](https://github.com/Budibase/budibase/tree/HEAD/packages/builder) - budibase builder 클라이언트 측의 svelte 애플리케이션 코드가 포함되어 있습니다.
|
||||
|
||||
- [packages/client](https://github.com/Budibase/budibase/tree/HEAD/packages/client) - budibase builder 클라이언트 측의 svelte 애플리케이션 코드가 포함되어 있습니다.
|
||||
|
||||
- [packages/server](https://github.com/Budibase/budibase/tree/HEAD/packages/server) - Budibase의 서버 부분입니다. 이 Koa 애플리케이션은 빌더에게 Budibase 애플리케이션을 생성하는 데 필요한 것을 제공하는 역할을 합니다. 또한 데이터베이스 및 파일 저장소와 상호 작용할 수 있는 API를 제공합니다.
|
||||
|
||||
자세한 내용은 다음 문서를 참조하세요. [CONTRIBUTING.md](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md)
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
||||
## 📝 라이선스
|
||||
|
||||
Budibase는 오픈 소스이며, 라이선스는 다음과 같습니다 [GPL v3](https://www.gnu.org/licenses/gpl-3.0.en.html). 클라이언트 및 컴포넌트 라이브러리는 다음과 같이 라이선스가 부여됩니다. [MPL](https://directory.fsf.org/wiki/License:MPL-2.0) - 이렇게 하면 빌드한 애플리케이션에 원하는 대로 라이선스를 부여할 수 있습니다.
|
||||
|
||||
<br /><br />
|
||||
|
||||
## ⭐ 스타 수의 역사
|
||||
|
||||
[![Stargazers over time](https://starchart.cc/Budibase/budibase.svg)](https://starchart.cc/Budibase/budibase)
|
||||
|
||||
빌더 업데이트 중 문제가 발생하는 경우 [여기](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md#troubleshooting) 를 참고하여 환경을 정리해 주세요.
|
||||
|
||||
<br /><br />
|
||||
|
||||
## Contributors ✨
|
||||
|
||||
훌륭한 여러분께 감사할 따름입니다. ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
|
||||
<!-- prettier-ignore-start -->
|
||||
<!-- markdownlint-disable -->
|
||||
<table>
|
||||
<tr>
|
||||
<td align="center"><a href="http://martinmck.com"><img src="https://avatars1.githubusercontent.com/u/11256663?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Martin McKeaveney</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=shogunpurple" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=shogunpurple" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=shogunpurple" title="Tests">⚠️</a> <a href="#infra-shogunpurple" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
<td align="center"><a href="http://www.michaeldrury.co.uk/"><img src="https://avatars2.githubusercontent.com/u/4407001?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Michael Drury</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=mike12345567" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=mike12345567" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=mike12345567" title="Tests">⚠️</a> <a href="#infra-mike12345567" title="Infrastructure (Hosting, Build-Tools, etc)">🚇</a></td>
|
||||
<td align="center"><a href="https://github.com/aptkingston"><img src="https://avatars3.githubusercontent.com/u/9075550?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Andrew Kingston</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=aptkingston" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=aptkingston" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=aptkingston" title="Tests">⚠️</a> <a href="#design-aptkingston" title="Design">🎨</a></td>
|
||||
<td align="center"><a href="https://budibase.com/"><img src="https://avatars3.githubusercontent.com/u/3524181?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Michael Shanks</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=mjashanks" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=mjashanks" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=mjashanks" title="Tests">⚠️</a></td>
|
||||
<td align="center"><a href="https://github.com/kevmodrome"><img src="https://avatars3.githubusercontent.com/u/534488?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Kevin Åberg Kultalahti</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=kevmodrome" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=kevmodrome" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=kevmodrome" title="Tests">⚠️</a></td>
|
||||
<td align="center"><a href="https://www.budibase.com/"><img src="https://avatars2.githubusercontent.com/u/49767913?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Joe</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=joebudi" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=joebudi" title="Code">💻</a> <a href="#content-joebudi" title="Content">🖋</a> <a href="#design-joebudi" title="Design">🎨</a></td>
|
||||
<td align="center"><a href="https://github.com/Rory-Powell"><img src="https://avatars.githubusercontent.com/u/8755148?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Rory Powell</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=Rory-Powell" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=Rory-Powell" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=Rory-Powell" title="Tests">⚠️</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center"><a href="https://github.com/PClmnt"><img src="https://avatars.githubusercontent.com/u/5665926?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Peter Clement</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=PClmnt" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=PClmnt" title="Documentation">📖</a> <a href="https://github.com/Budibase/budibase/commits?author=PClmnt" title="Tests">⚠️</a></td>
|
||||
<td align="center"><a href="https://github.com/Conor-Mack"><img src="https://avatars1.githubusercontent.com/u/36074859?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Conor_Mack</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=Conor-Mack" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=Conor-Mack" title="Tests">⚠️</a></td>
|
||||
<td align="center"><a href="https://github.com/pngwn"><img src="https://avatars1.githubusercontent.com/u/12937446?v=4?s=100" width="100px;" alt=""/><br /><sub><b>pngwn</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=pngwn" title="Code">💻</a> <a href="https://github.com/Budibase/budibase/commits?author=pngwn" title="Tests">⚠️</a></td>
|
||||
<td align="center"><a href="https://github.com/HugoLd"><img src="https://avatars0.githubusercontent.com/u/26521848?v=4?s=100" width="100px;" alt=""/><br /><sub><b>HugoLd</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=HugoLd" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/victoriasloan"><img src="https://avatars.githubusercontent.com/u/9913651?v=4?s=100" width="100px;" alt=""/><br /><sub><b>victoriasloan</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=victoriasloan" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/yashank09"><img src="https://avatars.githubusercontent.com/u/37672190?v=4?s=100" width="100px;" alt=""/><br /><sub><b>yashank09</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=yashank09" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/SOVLOOKUP"><img src="https://avatars.githubusercontent.com/u/53158137?v=4?s=100" width="100px;" alt=""/><br /><sub><b>SOVLOOKUP</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=SOVLOOKUP" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/seoulaja"><img src="https://avatars.githubusercontent.com/u/15101654?v=4?s=100" width="100px;" alt=""/><br /><sub><b>seoulaja</b></sub></a><br /><a href="#translation-seoulaja" title="Translation">🌍</a></td>
|
||||
<td align="center"><a href="https://github.com/mslourens"><img src="https://avatars.githubusercontent.com/u/1907152?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Maurits Lourens</b></sub></a><br /><a href="https://github.com/Budibase/budibase/commits?author=mslourens" title="Tests">⚠️</a> <a href="https://github.com/Budibase/budibase/commits?author=mslourens" title="Code">💻</a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<!-- markdownlint-restore -->
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
<!-- ALL-CONTRIBUTORS-LIST:END -->
|
||||
|
||||
이 프로젝트는 다음 사양을 따릅니다. [all-contributors](https://github.com/all-contributors/all-contributors).
|
||||
모든 종류의 기여를 환영합니다!
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.19.6",
|
||||
"version": "2.22.1",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
"nx-cloud": "16.0.5",
|
||||
"prettier": "2.8.8",
|
||||
"prettier-plugin-svelte": "^2.3.0",
|
||||
"svelte": "3.49.0",
|
||||
"svelte": "^4.2.10",
|
||||
"svelte-eslint-parser": "^0.33.1",
|
||||
"typescript": "5.2.2",
|
||||
"yargs": "^17.7.2"
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 8c446c4ba385592127fa31755d3b64467b291882
|
||||
Subproject commit 23a1219732bd778654c0bcc4f49910c511e2d51f
|
|
@ -67,7 +67,7 @@
|
|||
"@types/lodash": "4.14.200",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
"@types/redlock": "4.0.3",
|
||||
"@types/redlock": "4.0.7",
|
||||
"@types/semver": "7.3.7",
|
||||
"@types/tar-fs": "2.0.1",
|
||||
"@types/uuid": "8.3.4",
|
||||
|
@ -78,6 +78,7 @@
|
|||
"jest-serial-runner": "1.2.1",
|
||||
"pino-pretty": "10.0.0",
|
||||
"pouchdb-adapter-memory": "7.2.2",
|
||||
"testcontainers": "^10.7.2",
|
||||
"timekeeper": "2.2.0",
|
||||
"typescript": "5.2.2"
|
||||
},
|
||||
|
|
|
@ -23,6 +23,18 @@ export default class BaseCache {
|
|||
return client.keys(pattern)
|
||||
}
|
||||
|
||||
async exists(key: string, opts = { useTenancy: true }) {
|
||||
key = opts.useTenancy ? generateTenantKey(key) : key
|
||||
const client = await this.getClient()
|
||||
return client.exists(key)
|
||||
}
|
||||
|
||||
async scan(key: string, opts = { useTenancy: true }) {
|
||||
key = opts.useTenancy ? generateTenantKey(key) : key
|
||||
const client = await this.getClient()
|
||||
return client.scan(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Read only from the cache.
|
||||
*/
|
||||
|
@ -32,6 +44,15 @@ export default class BaseCache {
|
|||
return client.get(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Read only from the cache.
|
||||
*/
|
||||
async bulkGet<T>(keys: string[], opts = { useTenancy: true }) {
|
||||
keys = opts.useTenancy ? keys.map(key => generateTenantKey(key)) : keys
|
||||
const client = await this.getClient()
|
||||
return client.bulkGet<T>(keys)
|
||||
}
|
||||
|
||||
/**
|
||||
* Write to the cache.
|
||||
*/
|
||||
|
@ -46,6 +67,25 @@ export default class BaseCache {
|
|||
await client.store(key, value, ttl)
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk write to the cache.
|
||||
*/
|
||||
async bulkStore(
|
||||
data: Record<string, any>,
|
||||
ttl: number | null = null,
|
||||
opts = { useTenancy: true }
|
||||
) {
|
||||
if (opts.useTenancy) {
|
||||
data = Object.entries(data).reduce((acc, [key, value]) => {
|
||||
acc[generateTenantKey(key)] = value
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
}
|
||||
|
||||
const client = await this.getClient()
|
||||
await client.bulkStore(data, ttl)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove from cache.
|
||||
*/
|
||||
|
@ -55,15 +95,24 @@ export default class BaseCache {
|
|||
return client.delete(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove from cache.
|
||||
*/
|
||||
async bulkDelete(keys: string[], opts = { useTenancy: true }) {
|
||||
keys = opts.useTenancy ? keys.map(key => generateTenantKey(key)) : keys
|
||||
const client = await this.getClient()
|
||||
return client.bulkDelete(keys)
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from the cache. Write to the cache if not exists.
|
||||
*/
|
||||
async withCache(
|
||||
async withCache<T>(
|
||||
key: string,
|
||||
ttl: number,
|
||||
fetchFn: any,
|
||||
ttl: number | null = null,
|
||||
fetchFn: () => Promise<T> | T,
|
||||
opts = { useTenancy: true }
|
||||
) {
|
||||
): Promise<T> {
|
||||
const cachedValue = await this.get(key, opts)
|
||||
if (cachedValue) {
|
||||
return cachedValue
|
||||
|
@ -89,4 +138,13 @@ export default class BaseCache {
|
|||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the entry if the provided value matches the stored one.
|
||||
*/
|
||||
async deleteIfValue(key: string, value: any, opts = { useTenancy: true }) {
|
||||
key = opts.useTenancy ? generateTenantKey(key) : key
|
||||
const client = await this.getClient()
|
||||
await client.deleteIfValue(key, value)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,97 @@
|
|||
import { AnyDocument, Database } from "@budibase/types"
|
||||
|
||||
import { JobQueue, createQueue } from "../queue"
|
||||
import * as dbUtils from "../db"
|
||||
|
||||
interface ProcessDocMessage {
|
||||
dbName: string
|
||||
docId: string
|
||||
data: Record<string, any>
|
||||
}
|
||||
|
||||
const PERSIST_MAX_ATTEMPTS = 100
|
||||
let processor: DocWritethroughProcessor | undefined
|
||||
|
||||
export const docWritethroughProcessorQueue = createQueue<ProcessDocMessage>(
|
||||
JobQueue.DOC_WRITETHROUGH_QUEUE,
|
||||
{
|
||||
jobOptions: {
|
||||
attempts: PERSIST_MAX_ATTEMPTS,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
class DocWritethroughProcessor {
|
||||
init() {
|
||||
docWritethroughProcessorQueue.process(async message => {
|
||||
try {
|
||||
await this.persistToDb(message.data)
|
||||
} catch (err: any) {
|
||||
if (err.status === 409) {
|
||||
// If we get a 409, it means that another job updated it meanwhile. We want to retry it to persist it again.
|
||||
throw new Error(
|
||||
`Conflict persisting message ${message.id}. Attempt ${message.attemptsMade}`
|
||||
)
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
})
|
||||
return this
|
||||
}
|
||||
|
||||
private async persistToDb({
|
||||
dbName,
|
||||
docId,
|
||||
data,
|
||||
}: {
|
||||
dbName: string
|
||||
docId: string
|
||||
data: Record<string, any>
|
||||
}) {
|
||||
const db = dbUtils.getDB(dbName)
|
||||
let doc: AnyDocument | undefined
|
||||
try {
|
||||
doc = await db.get(docId)
|
||||
} catch {
|
||||
doc = { _id: docId }
|
||||
}
|
||||
|
||||
doc = { ...doc, ...data }
|
||||
await db.put(doc)
|
||||
}
|
||||
}
|
||||
|
||||
export class DocWritethrough {
|
||||
private db: Database
|
||||
private _docId: string
|
||||
|
||||
constructor(db: Database, docId: string) {
|
||||
this.db = db
|
||||
this._docId = docId
|
||||
}
|
||||
|
||||
get docId() {
|
||||
return this._docId
|
||||
}
|
||||
|
||||
async patch(data: Record<string, any>) {
|
||||
await docWritethroughProcessorQueue.add({
|
||||
dbName: this.db.name,
|
||||
docId: this.docId,
|
||||
data,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function init(): DocWritethroughProcessor {
|
||||
processor = new DocWritethroughProcessor().init()
|
||||
return processor
|
||||
}
|
||||
|
||||
export function getProcessor(): DocWritethroughProcessor {
|
||||
if (!processor) {
|
||||
return init()
|
||||
}
|
||||
return processor
|
||||
}
|
|
@ -26,7 +26,8 @@ export const store = (...args: Parameters<typeof GENERIC.store>) =>
|
|||
GENERIC.store(...args)
|
||||
export const destroy = (...args: Parameters<typeof GENERIC.delete>) =>
|
||||
GENERIC.delete(...args)
|
||||
export const withCache = (...args: Parameters<typeof GENERIC.withCache>) =>
|
||||
GENERIC.withCache(...args)
|
||||
export const withCache = <T>(
|
||||
...args: Parameters<typeof GENERIC.withCache<T>>
|
||||
) => GENERIC.withCache(...args)
|
||||
export const bustCache = (...args: Parameters<typeof GENERIC.bustCache>) =>
|
||||
GENERIC.bustCache(...args)
|
||||
|
|
|
@ -5,3 +5,4 @@ export * as writethrough from "./writethrough"
|
|||
export * as invite from "./invite"
|
||||
export * as passwordReset from "./passwordReset"
|
||||
export * from "./generic"
|
||||
export * as docWritethrough from "./docWritethrough"
|
||||
|
|
|
@ -0,0 +1,293 @@
|
|||
import tk from "timekeeper"
|
||||
|
||||
import _ from "lodash"
|
||||
import { DBTestConfiguration, generator, structures } from "../../../tests"
|
||||
import { getDB } from "../../db"
|
||||
|
||||
import {
|
||||
DocWritethrough,
|
||||
docWritethroughProcessorQueue,
|
||||
init,
|
||||
} from "../docWritethrough"
|
||||
|
||||
import InMemoryQueue from "../../queue/inMemoryQueue"
|
||||
|
||||
const initialTime = Date.now()
|
||||
|
||||
async function waitForQueueCompletion() {
|
||||
const queue: InMemoryQueue = docWritethroughProcessorQueue as never
|
||||
await queue.waitForCompletion()
|
||||
}
|
||||
|
||||
describe("docWritethrough", () => {
|
||||
beforeAll(() => {
|
||||
init()
|
||||
})
|
||||
|
||||
const config = new DBTestConfiguration()
|
||||
|
||||
const db = getDB(structures.db.id())
|
||||
let documentId: string
|
||||
let docWritethrough: DocWritethrough
|
||||
|
||||
describe("patch", () => {
|
||||
function generatePatchObject(fieldCount: number) {
|
||||
const keys = generator.unique(() => generator.guid(), fieldCount)
|
||||
return keys.reduce((acc, c) => {
|
||||
acc[c] = generator.word()
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
jest.clearAllMocks()
|
||||
documentId = structures.uuid()
|
||||
docWritethrough = new DocWritethrough(db, documentId)
|
||||
})
|
||||
|
||||
it("patching will not persist until the messages are persisted", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
await docWritethrough.patch(generatePatchObject(2))
|
||||
await docWritethrough.patch(generatePatchObject(2))
|
||||
|
||||
expect(await db.exists(documentId)).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
it("patching will persist when the messages are persisted", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const patch1 = generatePatchObject(2)
|
||||
const patch2 = generatePatchObject(2)
|
||||
await docWritethrough.patch(patch1)
|
||||
await docWritethrough.patch(patch2)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
|
||||
// This will not be persisted
|
||||
const patch3 = generatePatchObject(3)
|
||||
await docWritethrough.patch(patch3)
|
||||
|
||||
expect(await db.get(documentId)).toEqual({
|
||||
_id: documentId,
|
||||
...patch1,
|
||||
...patch2,
|
||||
_rev: expect.stringMatching(/2-.+/),
|
||||
createdAt: new Date(initialTime).toISOString(),
|
||||
updatedAt: new Date(initialTime).toISOString(),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("patching will persist keeping the previous data", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const patch1 = generatePatchObject(2)
|
||||
const patch2 = generatePatchObject(2)
|
||||
await docWritethrough.patch(patch1)
|
||||
await docWritethrough.patch(patch2)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
|
||||
const patch3 = generatePatchObject(3)
|
||||
await docWritethrough.patch(patch3)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
_id: documentId,
|
||||
...patch1,
|
||||
...patch2,
|
||||
...patch3,
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("date audit fields are set correctly when persisting", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const patch1 = generatePatchObject(2)
|
||||
const patch2 = generatePatchObject(2)
|
||||
await docWritethrough.patch(patch1)
|
||||
const date1 = new Date()
|
||||
await waitForQueueCompletion()
|
||||
await docWritethrough.patch(patch2)
|
||||
|
||||
tk.travel(Date.now() + 100)
|
||||
const date2 = new Date()
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(date1).not.toEqual(date2)
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
createdAt: date1.toISOString(),
|
||||
updatedAt: date2.toISOString(),
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("concurrent patches will override keys", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const patch1 = generatePatchObject(2)
|
||||
await docWritethrough.patch(patch1)
|
||||
await waitForQueueCompletion()
|
||||
const patch2 = generatePatchObject(1)
|
||||
await docWritethrough.patch(patch2)
|
||||
|
||||
const keyToOverride = _.sample(Object.keys(patch1))!
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
[keyToOverride]: patch1[keyToOverride],
|
||||
})
|
||||
)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
|
||||
const patch3 = {
|
||||
...generatePatchObject(3),
|
||||
[keyToOverride]: generator.word(),
|
||||
}
|
||||
await docWritethrough.patch(patch3)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
...patch1,
|
||||
...patch2,
|
||||
...patch3,
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("concurrent patches to different docWritethrough will not pollute each other", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const secondDocWritethrough = new DocWritethrough(
|
||||
db,
|
||||
structures.db.id()
|
||||
)
|
||||
|
||||
const doc1Patch = generatePatchObject(2)
|
||||
await docWritethrough.patch(doc1Patch)
|
||||
const doc2Patch = generatePatchObject(1)
|
||||
await secondDocWritethrough.patch(doc2Patch)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
|
||||
const doc1Patch2 = generatePatchObject(3)
|
||||
await docWritethrough.patch(doc1Patch2)
|
||||
const doc2Patch2 = generatePatchObject(3)
|
||||
await secondDocWritethrough.patch(doc2Patch2)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(docWritethrough.docId)).toEqual(
|
||||
expect.objectContaining({
|
||||
...doc1Patch,
|
||||
...doc1Patch2,
|
||||
})
|
||||
)
|
||||
|
||||
expect(await db.get(secondDocWritethrough.docId)).toEqual(
|
||||
expect.objectContaining({
|
||||
...doc2Patch,
|
||||
...doc2Patch2,
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("cached values are persisted only once", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
const initialPatch = generatePatchObject(5)
|
||||
|
||||
await docWritethrough.patch(initialPatch)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining(initialPatch)
|
||||
)
|
||||
|
||||
await db.remove(await db.get(documentId))
|
||||
|
||||
await waitForQueueCompletion()
|
||||
const extraPatch = generatePatchObject(5)
|
||||
await docWritethrough.patch(extraPatch)
|
||||
await waitForQueueCompletion()
|
||||
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining(extraPatch)
|
||||
)
|
||||
expect(await db.get(documentId)).not.toEqual(
|
||||
expect.objectContaining(initialPatch)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("concurrent calls will not cause conflicts", async () => {
|
||||
async function parallelPatch(count: number) {
|
||||
const patches = Array.from({ length: count }).map(() =>
|
||||
generatePatchObject(1)
|
||||
)
|
||||
await Promise.all(patches.map(p => docWritethrough.patch(p)))
|
||||
|
||||
return patches.reduce((acc, c) => {
|
||||
acc = { ...acc, ...c }
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
const queueMessageSpy = jest.spyOn(docWritethroughProcessorQueue, "add")
|
||||
|
||||
await config.doInTenant(async () => {
|
||||
let patches = await parallelPatch(5)
|
||||
expect(queueMessageSpy).toBeCalledTimes(5)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining(patches)
|
||||
)
|
||||
|
||||
patches = { ...patches, ...(await parallelPatch(40)) }
|
||||
expect(queueMessageSpy).toBeCalledTimes(45)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining(patches)
|
||||
)
|
||||
|
||||
patches = { ...patches, ...(await parallelPatch(10)) }
|
||||
expect(queueMessageSpy).toBeCalledTimes(55)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining(patches)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
// This is not yet supported
|
||||
it.skip("patches will execute in order", async () => {
|
||||
let incrementalValue = 0
|
||||
const keyToOverride = generator.word()
|
||||
async function incrementalPatches(count: number) {
|
||||
for (let i = 0; i < count; i++) {
|
||||
await docWritethrough.patch({ [keyToOverride]: incrementalValue++ })
|
||||
}
|
||||
}
|
||||
|
||||
await config.doInTenant(async () => {
|
||||
await incrementalPatches(5)
|
||||
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({ [keyToOverride]: 5 })
|
||||
)
|
||||
|
||||
await incrementalPatches(40)
|
||||
await waitForQueueCompletion()
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({ [keyToOverride]: 45 })
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -6,7 +6,7 @@ import env from "../environment"
|
|||
import * as accounts from "../accounts"
|
||||
import { UserDB } from "../users"
|
||||
import { sdk } from "@budibase/shared-core"
|
||||
import { User } from "@budibase/types"
|
||||
import { User, UserMetadata } from "@budibase/types"
|
||||
|
||||
const EXPIRY_SECONDS = 3600
|
||||
|
||||
|
@ -15,7 +15,7 @@ const EXPIRY_SECONDS = 3600
|
|||
*/
|
||||
async function populateFromDB(userId: string, tenantId: string) {
|
||||
const db = tenancy.getTenantDB(tenantId)
|
||||
const user = await db.get<any>(userId)
|
||||
const user = await db.get<UserMetadata>(userId)
|
||||
user.budibaseAccess = true
|
||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||
const account = await accounts.getAccount(user.email)
|
||||
|
|
|
@ -57,6 +57,9 @@ export const StaticDatabases = {
|
|||
AUDIT_LOGS: {
|
||||
name: "audit-logs",
|
||||
},
|
||||
SCIM_LOGS: {
|
||||
name: "scim-logs",
|
||||
},
|
||||
}
|
||||
|
||||
export const APP_PREFIX = prefixed(DocumentType.APP)
|
||||
|
|
|
@ -10,7 +10,7 @@ import {
|
|||
StaticDatabases,
|
||||
DEFAULT_TENANT_ID,
|
||||
} from "../constants"
|
||||
import { Database, IdentityContext } from "@budibase/types"
|
||||
import { Database, IdentityContext, Snippet, App } from "@budibase/types"
|
||||
import { ContextMap } from "./types"
|
||||
|
||||
let TEST_APP_ID: string | null = null
|
||||
|
@ -35,6 +35,17 @@ export function getAuditLogDBName(tenantId?: string) {
|
|||
}
|
||||
}
|
||||
|
||||
export function getScimDBName(tenantId?: string) {
|
||||
if (!tenantId) {
|
||||
tenantId = getTenantId()
|
||||
}
|
||||
if (tenantId === DEFAULT_TENANT_ID) {
|
||||
return StaticDatabases.SCIM_LOGS.name
|
||||
} else {
|
||||
return `${tenantId}${SEPARATOR}${StaticDatabases.SCIM_LOGS.name}`
|
||||
}
|
||||
}
|
||||
|
||||
export function baseGlobalDBName(tenantId: string | undefined | null) {
|
||||
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
|
||||
return StaticDatabases.GLOBAL.name
|
||||
|
@ -111,10 +122,10 @@ export async function doInAutomationContext<T>(params: {
|
|||
automationId: string
|
||||
task: () => T
|
||||
}): Promise<T> {
|
||||
const tenantId = getTenantIDFromAppID(params.appId)
|
||||
await ensureSnippetContext()
|
||||
return newContext(
|
||||
{
|
||||
tenantId,
|
||||
tenantId: getTenantIDFromAppID(params.appId),
|
||||
appId: params.appId,
|
||||
automationId: params.automationId,
|
||||
},
|
||||
|
@ -270,6 +281,27 @@ export function doInScimContext(task: any) {
|
|||
return newContext(updates, task)
|
||||
}
|
||||
|
||||
export async function ensureSnippetContext() {
|
||||
const ctx = getCurrentContext()
|
||||
|
||||
// If we've already added snippets to context, continue
|
||||
if (!ctx || ctx.snippets) {
|
||||
return
|
||||
}
|
||||
|
||||
// Otherwise get snippets for this app and update context
|
||||
let snippets: Snippet[] | undefined
|
||||
const db = getAppDB()
|
||||
if (db && !env.isTest()) {
|
||||
const app = await db.get<App>(DocumentType.APP_METADATA)
|
||||
snippets = app.snippets
|
||||
}
|
||||
|
||||
// Always set snippets to a non-null value so that we can tell we've attempted
|
||||
// to load snippets
|
||||
ctx.snippets = snippets || []
|
||||
}
|
||||
|
||||
export function getEnvironmentVariables() {
|
||||
const context = Context.get()
|
||||
if (!context.environmentVariables) {
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import { IdentityContext, VM } from "@budibase/types"
|
||||
import { ExecutionTimeTracker } from "../timers"
|
||||
import { IdentityContext, Snippet, VM } from "@budibase/types"
|
||||
|
||||
// keep this out of Budibase types, don't want to expose context info
|
||||
export type ContextMap = {
|
||||
|
@ -10,6 +9,7 @@ export type ContextMap = {
|
|||
isScim?: boolean
|
||||
automationId?: string
|
||||
isMigrating?: boolean
|
||||
jsExecutionTracker?: ExecutionTimeTracker
|
||||
vm?: VM
|
||||
cleanup?: (() => void | Promise<void>)[]
|
||||
snippets?: Snippet[]
|
||||
}
|
||||
|
|
|
@ -1,66 +1,57 @@
|
|||
import PouchDB from "pouchdb"
|
||||
import { getPouchDB, closePouchDB } from "./couch"
|
||||
import { DocumentType } from "../constants"
|
||||
|
||||
class Replication {
|
||||
source: any
|
||||
target: any
|
||||
replication: any
|
||||
source: PouchDB.Database
|
||||
target: PouchDB.Database
|
||||
|
||||
/**
|
||||
*
|
||||
* @param source - the DB you want to replicate or rollback to
|
||||
* @param target - the DB you want to replicate to, or rollback from
|
||||
*/
|
||||
constructor({ source, target }: any) {
|
||||
constructor({ source, target }: { source: string; target: string }) {
|
||||
this.source = getPouchDB(source)
|
||||
this.target = getPouchDB(target)
|
||||
}
|
||||
|
||||
close() {
|
||||
return Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
|
||||
async close() {
|
||||
await Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
|
||||
}
|
||||
|
||||
promisify(operation: any, opts = {}) {
|
||||
return new Promise(resolve => {
|
||||
operation(this.target, opts)
|
||||
.on("denied", function (err: any) {
|
||||
replicate(opts: PouchDB.Replication.ReplicateOptions = {}) {
|
||||
return new Promise<PouchDB.Replication.ReplicationResult<{}>>(resolve => {
|
||||
this.source.replicate
|
||||
.to(this.target, opts)
|
||||
.on("denied", function (err) {
|
||||
// a document failed to replicate (e.g. due to permissions)
|
||||
throw new Error(`Denied: Document failed to replicate ${err}`)
|
||||
})
|
||||
.on("complete", function (info: any) {
|
||||
.on("complete", function (info) {
|
||||
return resolve(info)
|
||||
})
|
||||
.on("error", function (err: any) {
|
||||
.on("error", function (err) {
|
||||
throw new Error(`Replication Error: ${err}`)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Two way replication operation, intended to be promise based.
|
||||
* @param opts - PouchDB replication options
|
||||
*/
|
||||
sync(opts = {}) {
|
||||
this.replication = this.promisify(this.source.sync, opts)
|
||||
return this.replication
|
||||
}
|
||||
appReplicateOpts(
|
||||
opts: PouchDB.Replication.ReplicateOptions = {}
|
||||
): PouchDB.Replication.ReplicateOptions {
|
||||
if (typeof opts.filter === "string") {
|
||||
return opts
|
||||
}
|
||||
|
||||
/**
|
||||
* One way replication operation, intended to be promise based.
|
||||
* @param opts - PouchDB replication options
|
||||
*/
|
||||
replicate(opts = {}) {
|
||||
this.replication = this.promisify(this.source.replicate.to, opts)
|
||||
return this.replication
|
||||
}
|
||||
const filter = opts.filter
|
||||
delete opts.filter
|
||||
|
||||
appReplicateOpts() {
|
||||
return {
|
||||
filter: (doc: any) => {
|
||||
...opts,
|
||||
filter: (doc: any, params: any) => {
|
||||
if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) {
|
||||
return false
|
||||
}
|
||||
return doc._id !== DocumentType.APP_METADATA
|
||||
if (doc._id === DocumentType.APP_METADATA) {
|
||||
return false
|
||||
}
|
||||
return filter ? filter(doc, params) : true
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -75,10 +66,6 @@ class Replication {
|
|||
// take the opportunity to remove deleted tombstones
|
||||
await this.replicate()
|
||||
}
|
||||
|
||||
cancel() {
|
||||
this.replication.cancel()
|
||||
}
|
||||
}
|
||||
|
||||
export default Replication
|
||||
|
|
|
@ -11,6 +11,7 @@ import {
|
|||
Document,
|
||||
isDocument,
|
||||
RowResponse,
|
||||
RowValue,
|
||||
} from "@budibase/types"
|
||||
import { getCouchInfo } from "./connections"
|
||||
import { directCouchUrlCall } from "./utils"
|
||||
|
@ -69,7 +70,15 @@ export class DatabaseImpl implements Database {
|
|||
DatabaseImpl.nano = buildNano(couchInfo)
|
||||
}
|
||||
|
||||
async exists() {
|
||||
exists(docId?: string) {
|
||||
if (docId === undefined) {
|
||||
return this.dbExists()
|
||||
}
|
||||
|
||||
return this.docExists(docId)
|
||||
}
|
||||
|
||||
private async dbExists() {
|
||||
const response = await directCouchUrlCall({
|
||||
url: `${this.couchInfo.url}/${this.name}`,
|
||||
method: "HEAD",
|
||||
|
@ -78,6 +87,15 @@ export class DatabaseImpl implements Database {
|
|||
return response.status === 200
|
||||
}
|
||||
|
||||
private async docExists(id: string): Promise<boolean> {
|
||||
try {
|
||||
await this.performCall(db => () => db.head(id))
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
private nano() {
|
||||
return this.instanceNano || DatabaseImpl.nano
|
||||
}
|
||||
|
@ -221,7 +239,7 @@ export class DatabaseImpl implements Database {
|
|||
})
|
||||
}
|
||||
|
||||
async allDocs<T extends Document>(
|
||||
async allDocs<T extends Document | RowValue>(
|
||||
params: DatabaseQueryOpts
|
||||
): Promise<AllDocsResponse<T>> {
|
||||
return this.performCall(db => {
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import {
|
||||
DocumentScope,
|
||||
DocumentDestroyResponse,
|
||||
DocumentInsertResponse,
|
||||
DocumentBulkResponse,
|
||||
|
@ -13,6 +12,7 @@ import {
|
|||
DatabasePutOpts,
|
||||
DatabaseQueryOpts,
|
||||
Document,
|
||||
RowValue,
|
||||
} from "@budibase/types"
|
||||
import tracer from "dd-trace"
|
||||
import { Writable } from "stream"
|
||||
|
@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database {
|
|||
return this.db.name
|
||||
}
|
||||
|
||||
exists(): Promise<boolean> {
|
||||
exists(docId?: string): Promise<boolean> {
|
||||
return tracer.trace("db.exists", span => {
|
||||
span?.addTags({ db_name: this.name })
|
||||
span?.addTags({ db_name: this.name, doc_id: docId })
|
||||
if (docId) {
|
||||
return this.db.exists(docId)
|
||||
}
|
||||
return this.db.exists()
|
||||
})
|
||||
}
|
||||
|
@ -79,7 +82,7 @@ export class DDInstrumentedDatabase implements Database {
|
|||
})
|
||||
}
|
||||
|
||||
allDocs<T extends Document>(
|
||||
allDocs<T extends Document | RowValue>(
|
||||
params: DatabaseQueryOpts
|
||||
): Promise<AllDocsResponse<T>> {
|
||||
return tracer.trace("db.allDocs", span => {
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
import _ from "lodash"
|
||||
import { AnyDocument } from "@budibase/types"
|
||||
import { generator } from "../../../tests"
|
||||
import { DatabaseImpl } from "../couch"
|
||||
import { newid } from "../../utils"
|
||||
|
||||
describe("DatabaseImpl", () => {
|
||||
const database = new DatabaseImpl(generator.word())
|
||||
const documents: AnyDocument[] = []
|
||||
|
||||
beforeAll(async () => {
|
||||
const docsToCreate = Array.from({ length: 10 }).map(() => ({
|
||||
_id: newid(),
|
||||
}))
|
||||
const createdDocs = await database.bulkDocs(docsToCreate)
|
||||
|
||||
documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev })))
|
||||
})
|
||||
|
||||
describe("document exists", () => {
|
||||
it("can check existing docs by id", async () => {
|
||||
const existingDoc = _.sample(documents)
|
||||
const result = await database.exists(existingDoc!._id!)
|
||||
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it("can check non existing docs by id", async () => {
|
||||
const result = await database.exists(newid())
|
||||
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it("can check an existing doc by id multiple times", async () => {
|
||||
const existingDoc = _.sample(documents)
|
||||
const id = existingDoc!._id!
|
||||
|
||||
const results = []
|
||||
results.push(await database.exists(id))
|
||||
results.push(await database.exists(id))
|
||||
results.push(await database.exists(id))
|
||||
|
||||
expect(results).toEqual([true, true, true])
|
||||
})
|
||||
|
||||
it("returns false after the doc is deleted", async () => {
|
||||
const existingDoc = _.sample(documents)
|
||||
const id = existingDoc!._id!
|
||||
expect(await database.exists(id)).toBe(true)
|
||||
|
||||
await database.remove(existingDoc!)
|
||||
expect(await database.exists(id)).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -74,7 +74,7 @@ export function getGlobalIDFromUserMetadataID(id: string) {
|
|||
* Generates a template ID.
|
||||
* @param ownerId The owner/user of the template, this could be global or a workspace level.
|
||||
*/
|
||||
export function generateTemplateID(ownerId: any) {
|
||||
export function generateTemplateID(ownerId: string) {
|
||||
return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
|
@ -105,7 +105,7 @@ export function prefixRoleID(name: string) {
|
|||
* Generates a new dev info document ID - this is scoped to a user.
|
||||
* @returns The new dev info ID which info for dev (like api key) can be stored under.
|
||||
*/
|
||||
export const generateDevInfoID = (userId: any) => {
|
||||
export const generateDevInfoID = (userId: string) => {
|
||||
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
|
||||
}
|
||||
|
||||
|
|
|
@ -186,6 +186,7 @@ const environment = {
|
|||
environment[key] = value
|
||||
},
|
||||
ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M",
|
||||
DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS,
|
||||
SKIP_APP_MIGRATIONS: process.env.SKIP_APP_MIGRATIONS || false,
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ import {
|
|||
AppVersionRevertedEvent,
|
||||
AppRevertedEvent,
|
||||
AppExportedEvent,
|
||||
AppDuplicatedEvent,
|
||||
} from "@budibase/types"
|
||||
|
||||
const created = async (app: App, timestamp?: string | number) => {
|
||||
|
@ -77,6 +78,17 @@ async function fileImported(app: App) {
|
|||
await publishEvent(Event.APP_FILE_IMPORTED, properties)
|
||||
}
|
||||
|
||||
async function duplicated(app: App, duplicateAppId: string) {
|
||||
const properties: AppDuplicatedEvent = {
|
||||
duplicateAppId,
|
||||
appId: app.appId,
|
||||
audited: {
|
||||
name: app.name,
|
||||
},
|
||||
}
|
||||
await publishEvent(Event.APP_DUPLICATED, properties)
|
||||
}
|
||||
|
||||
async function templateImported(app: App, templateKey: string) {
|
||||
const properties: AppTemplateImportedEvent = {
|
||||
appId: app.appId,
|
||||
|
@ -147,6 +159,7 @@ export default {
|
|||
published,
|
||||
unpublished,
|
||||
fileImported,
|
||||
duplicated,
|
||||
templateImported,
|
||||
versionUpdated,
|
||||
versionReverted,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { APIError } from "@budibase/types"
|
||||
import * as errors from "../errors"
|
||||
import environment from "../environment"
|
||||
|
||||
export async function errorHandling(ctx: any, next: any) {
|
||||
try {
|
||||
|
@ -14,15 +15,19 @@ export async function errorHandling(ctx: any, next: any) {
|
|||
console.error(err)
|
||||
}
|
||||
|
||||
const error = errors.getPublicError(err)
|
||||
const body: APIError = {
|
||||
let error: APIError = {
|
||||
message: err.message,
|
||||
status: status,
|
||||
validationErrors: err.validation,
|
||||
error,
|
||||
error: errors.getPublicError(err),
|
||||
}
|
||||
|
||||
ctx.body = body
|
||||
if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) {
|
||||
// @ts-ignore
|
||||
error.stack = err.stack
|
||||
}
|
||||
|
||||
ctx.body = error
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import Joi, { ObjectSchema } from "joi"
|
||||
import { BBContext } from "@budibase/types"
|
||||
import Joi from "joi"
|
||||
import { Ctx } from "@budibase/types"
|
||||
|
||||
function validate(
|
||||
schema: Joi.ObjectSchema | Joi.ArraySchema,
|
||||
property: string
|
||||
) {
|
||||
// Return a Koa middleware function
|
||||
return (ctx: BBContext, next: any) => {
|
||||
return (ctx: Ctx, next: any) => {
|
||||
if (!schema) {
|
||||
return next()
|
||||
}
|
||||
|
@ -30,7 +30,6 @@ function validate(
|
|||
const { error } = schema.validate(params)
|
||||
if (error) {
|
||||
ctx.throw(400, `Invalid ${property} - ${error.message}`)
|
||||
return
|
||||
}
|
||||
return next()
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import { Plugin } from "@budibase/types"
|
|||
|
||||
// URLS
|
||||
|
||||
export function enrichPluginURLs(plugins: Plugin[]) {
|
||||
export function enrichPluginURLs(plugins?: Plugin[]): Plugin[] {
|
||||
if (!plugins || !plugins.length) {
|
||||
return []
|
||||
}
|
||||
|
|
|
@ -4,4 +4,5 @@ export enum JobQueue {
|
|||
AUDIT_LOG = "auditLogQueue",
|
||||
SYSTEM_EVENT_QUEUE = "systemEventQueue",
|
||||
APP_MIGRATION = "appMigration",
|
||||
DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue",
|
||||
}
|
||||
|
|
|
@ -1,5 +1,14 @@
|
|||
import events from "events"
|
||||
import { timeout } from "../utils"
|
||||
import { newid, timeout } from "../utils"
|
||||
import { Queue, QueueOptions, JobOptions } from "./queue"
|
||||
|
||||
interface JobMessage {
|
||||
id: string
|
||||
timestamp: number
|
||||
queue: string
|
||||
data: any
|
||||
opts?: JobOptions
|
||||
}
|
||||
|
||||
/**
|
||||
* Bull works with a Job wrapper around all messages that contains a lot more information about
|
||||
|
@ -10,12 +19,13 @@ import { timeout } from "../utils"
|
|||
* @returns A new job which can now be put onto the queue, this is mostly an
|
||||
* internal structure so that an in memory queue can be easily swapped for a Bull queue.
|
||||
*/
|
||||
function newJob(queue: string, message: any) {
|
||||
function newJob(queue: string, message: any, opts?: JobOptions): JobMessage {
|
||||
return {
|
||||
id: newid(),
|
||||
timestamp: Date.now(),
|
||||
queue: queue,
|
||||
data: message,
|
||||
opts: {},
|
||||
opts,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -24,26 +34,29 @@ function newJob(queue: string, message: any) {
|
|||
* It is relatively simple, using an event emitter internally to register when messages are available
|
||||
* to the consumers - in can support many inputs and many consumers.
|
||||
*/
|
||||
class InMemoryQueue {
|
||||
class InMemoryQueue implements Partial<Queue> {
|
||||
_name: string
|
||||
_opts?: any
|
||||
_messages: any[]
|
||||
_opts?: QueueOptions
|
||||
_messages: JobMessage[]
|
||||
_queuedJobIds: Set<string>
|
||||
_emitter: EventEmitter
|
||||
_runCount: number
|
||||
_addCount: number
|
||||
|
||||
/**
|
||||
* The constructor the queue, exactly the same as that of Bulls.
|
||||
* @param name The name of the queue which is being configured.
|
||||
* @param opts This is not used by the in memory queue as there is no real use
|
||||
* case when in memory, but is the same API as Bull
|
||||
*/
|
||||
constructor(name: string, opts?: any) {
|
||||
constructor(name: string, opts?: QueueOptions) {
|
||||
this._name = name
|
||||
this._opts = opts
|
||||
this._messages = []
|
||||
this._emitter = new events.EventEmitter()
|
||||
this._runCount = 0
|
||||
this._addCount = 0
|
||||
this._queuedJobIds = new Set<string>()
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -55,22 +68,42 @@ class InMemoryQueue {
|
|||
* note this is incredibly limited compared to Bull as in reality the Job would contain
|
||||
* a lot more information about the queue and current status of Bull cluster.
|
||||
*/
|
||||
process(func: any) {
|
||||
async process(func: any) {
|
||||
this._emitter.on("message", async () => {
|
||||
if (this._messages.length <= 0) {
|
||||
return
|
||||
}
|
||||
let msg = this._messages.shift()
|
||||
|
||||
let resp = func(msg)
|
||||
|
||||
async function retryFunc(fnc: any) {
|
||||
try {
|
||||
await fnc
|
||||
} catch (e: any) {
|
||||
await new Promise<void>(r => setTimeout(() => r(), 50))
|
||||
|
||||
await retryFunc(func(msg))
|
||||
}
|
||||
}
|
||||
|
||||
if (resp.then != null) {
|
||||
await resp
|
||||
try {
|
||||
await retryFunc(resp)
|
||||
} catch (e: any) {
|
||||
console.error(e)
|
||||
}
|
||||
}
|
||||
this._runCount++
|
||||
const jobId = msg?.opts?.jobId?.toString()
|
||||
if (jobId && msg?.opts?.removeOnComplete) {
|
||||
this._queuedJobIds.delete(jobId)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async isReady() {
|
||||
return true
|
||||
return this as any
|
||||
}
|
||||
|
||||
// simply puts a message to the queue and emits to the queue for processing
|
||||
|
@ -83,27 +116,45 @@ class InMemoryQueue {
|
|||
* @param repeat serves no purpose for the import queue.
|
||||
*/
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
add(msg: any, repeat: boolean) {
|
||||
if (typeof msg !== "object") {
|
||||
async add(data: any, opts?: JobOptions) {
|
||||
const jobId = opts?.jobId?.toString()
|
||||
if (jobId && this._queuedJobIds.has(jobId)) {
|
||||
console.log(`Ignoring already queued job ${jobId}`)
|
||||
return
|
||||
}
|
||||
|
||||
if (typeof data !== "object") {
|
||||
throw "Queue only supports carrying JSON."
|
||||
}
|
||||
this._messages.push(newJob(this._name, msg))
|
||||
this._addCount++
|
||||
this._emitter.emit("message")
|
||||
if (jobId) {
|
||||
this._queuedJobIds.add(jobId)
|
||||
}
|
||||
|
||||
const pushMessage = () => {
|
||||
this._messages.push(newJob(this._name, data, opts))
|
||||
this._addCount++
|
||||
this._emitter.emit("message")
|
||||
}
|
||||
|
||||
const delay = opts?.delay
|
||||
if (delay) {
|
||||
setTimeout(pushMessage, delay)
|
||||
} else {
|
||||
pushMessage()
|
||||
}
|
||||
return {} as any
|
||||
}
|
||||
|
||||
/**
|
||||
* replicating the close function from bull, which waits for jobs to finish.
|
||||
*/
|
||||
async close() {
|
||||
return []
|
||||
}
|
||||
async close() {}
|
||||
|
||||
/**
|
||||
* This removes a cron which has been implemented, this is part of Bull API.
|
||||
* @param cronJobId The cron which is to be removed.
|
||||
*/
|
||||
removeRepeatableByKey(cronJobId: string) {
|
||||
async removeRepeatableByKey(cronJobId: string) {
|
||||
// TODO: implement for testing
|
||||
console.log(cronJobId)
|
||||
}
|
||||
|
@ -111,12 +162,12 @@ class InMemoryQueue {
|
|||
/**
|
||||
* Implemented for tests
|
||||
*/
|
||||
getRepeatableJobs() {
|
||||
async getRepeatableJobs() {
|
||||
return []
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
removeJobs(pattern: string) {
|
||||
async removeJobs(pattern: string) {
|
||||
// no-op
|
||||
}
|
||||
|
||||
|
@ -128,18 +179,22 @@ class InMemoryQueue {
|
|||
}
|
||||
|
||||
async getJob() {
|
||||
return {}
|
||||
return null
|
||||
}
|
||||
|
||||
on() {
|
||||
// do nothing
|
||||
return this
|
||||
return this as any
|
||||
}
|
||||
|
||||
async waitForCompletion() {
|
||||
do {
|
||||
await timeout(50)
|
||||
} while (this._addCount < this._runCount)
|
||||
} while (this.hasRunningJobs())
|
||||
}
|
||||
|
||||
hasRunningJobs() {
|
||||
return this._addCount > this._runCount
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -88,6 +88,7 @@ enum QueueEventType {
|
|||
AUDIT_LOG_EVENT = "audit-log-event",
|
||||
SYSTEM_EVENT = "system-event",
|
||||
APP_MIGRATION = "app-migration",
|
||||
DOC_WRITETHROUGH = "doc-writethrough",
|
||||
}
|
||||
|
||||
const EventTypeMap: { [key in JobQueue]: QueueEventType } = {
|
||||
|
@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = {
|
|||
[JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT,
|
||||
[JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT,
|
||||
[JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION,
|
||||
[JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH,
|
||||
}
|
||||
|
||||
function logging(queue: Queue, jobQueue: JobQueue) {
|
||||
|
|
|
@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners"
|
|||
import { Duration } from "../utils"
|
||||
import * as timers from "../timers"
|
||||
|
||||
export { QueueOptions, Queue, JobOptions } from "bull"
|
||||
|
||||
// the queue lock is held for 5 minutes
|
||||
const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
|
||||
// queue lock is refreshed every 30 seconds
|
||||
|
|
|
@ -9,7 +9,8 @@ let userClient: Client,
|
|||
lockClient: Client,
|
||||
socketClient: Client,
|
||||
inviteClient: Client,
|
||||
passwordResetClient: Client
|
||||
passwordResetClient: Client,
|
||||
docWritethroughClient: Client
|
||||
|
||||
export async function init() {
|
||||
userClient = await new Client(utils.Databases.USER_CACHE).init()
|
||||
|
@ -24,6 +25,9 @@ export async function init() {
|
|||
utils.Databases.SOCKET_IO,
|
||||
utils.SelectableDatabase.SOCKET_IO
|
||||
).init()
|
||||
docWritethroughClient = await new Client(
|
||||
utils.Databases.DOC_WRITE_THROUGH
|
||||
).init()
|
||||
}
|
||||
|
||||
export async function shutdown() {
|
||||
|
@ -104,3 +108,10 @@ export async function getPasswordResetClient() {
|
|||
}
|
||||
return passwordResetClient
|
||||
}
|
||||
|
||||
export async function getDocWritethroughClient() {
|
||||
if (!writethroughClient) {
|
||||
await init()
|
||||
}
|
||||
return writethroughClient
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import env from "../environment"
|
||||
import Redis from "ioredis"
|
||||
import Redis, { Cluster } from "ioredis"
|
||||
// mock-redis doesn't have any typing
|
||||
let MockRedis: any | undefined
|
||||
if (env.MOCK_REDIS) {
|
||||
|
@ -28,7 +28,7 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
|
|||
|
||||
// for testing just generate the client once
|
||||
let CLOSED = false
|
||||
let CLIENTS: { [key: number]: any } = {}
|
||||
const CLIENTS: Record<number, Redis> = {}
|
||||
let CONNECTED = false
|
||||
|
||||
// mock redis always connected
|
||||
|
@ -36,7 +36,7 @@ if (env.MOCK_REDIS) {
|
|||
CONNECTED = true
|
||||
}
|
||||
|
||||
function pickClient(selectDb: number): any {
|
||||
function pickClient(selectDb: number) {
|
||||
return CLIENTS[selectDb]
|
||||
}
|
||||
|
||||
|
@ -201,12 +201,15 @@ class RedisWrapper {
|
|||
key = `${db}${SEPARATOR}${key}`
|
||||
let stream
|
||||
if (CLUSTERED) {
|
||||
let node = this.getClient().nodes("master")
|
||||
let node = (this.getClient() as never as Cluster).nodes("master")
|
||||
stream = node[0].scanStream({ match: key + "*", count: 100 })
|
||||
} else {
|
||||
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
|
||||
stream = (this.getClient() as Redis).scanStream({
|
||||
match: key + "*",
|
||||
count: 100,
|
||||
})
|
||||
}
|
||||
return promisifyStream(stream, this.getClient())
|
||||
return promisifyStream(stream, this.getClient() as any)
|
||||
}
|
||||
|
||||
async keys(pattern: string) {
|
||||
|
@ -221,14 +224,16 @@ class RedisWrapper {
|
|||
|
||||
async get(key: string) {
|
||||
const db = this._db
|
||||
let response = await this.getClient().get(addDbPrefix(db, key))
|
||||
const response = await this.getClient().get(addDbPrefix(db, key))
|
||||
// overwrite the prefixed key
|
||||
// @ts-ignore
|
||||
if (response != null && response.key) {
|
||||
// @ts-ignore
|
||||
response.key = key
|
||||
}
|
||||
// if its not an object just return the response
|
||||
try {
|
||||
return JSON.parse(response)
|
||||
return JSON.parse(response!)
|
||||
} catch (err) {
|
||||
return response
|
||||
}
|
||||
|
@ -274,13 +279,37 @@ class RedisWrapper {
|
|||
}
|
||||
}
|
||||
|
||||
async bulkStore(
|
||||
data: Record<string, any>,
|
||||
expirySeconds: number | null = null
|
||||
) {
|
||||
const client = this.getClient()
|
||||
|
||||
const dataToStore = Object.entries(data).reduce((acc, [key, value]) => {
|
||||
acc[addDbPrefix(this._db, key)] =
|
||||
typeof value === "object" ? JSON.stringify(value) : value
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
|
||||
const pipeline = client.pipeline()
|
||||
pipeline.mset(dataToStore)
|
||||
|
||||
if (expirySeconds !== null) {
|
||||
for (const key of Object.keys(dataToStore)) {
|
||||
pipeline.expire(key, expirySeconds)
|
||||
}
|
||||
}
|
||||
|
||||
await pipeline.exec()
|
||||
}
|
||||
|
||||
async getTTL(key: string) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
return this.getClient().ttl(prefixedKey)
|
||||
}
|
||||
|
||||
async setExpiry(key: string, expirySeconds: number | null) {
|
||||
async setExpiry(key: string, expirySeconds: number) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||
|
@ -291,10 +320,35 @@ class RedisWrapper {
|
|||
await this.getClient().del(addDbPrefix(db, key))
|
||||
}
|
||||
|
||||
async bulkDelete(keys: string[]) {
|
||||
const db = this._db
|
||||
await this.getClient().del(keys.map(key => addDbPrefix(db, key)))
|
||||
}
|
||||
|
||||
async clear() {
|
||||
let items = await this.scan()
|
||||
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
|
||||
}
|
||||
|
||||
async increment(key: string) {
|
||||
const result = await this.getClient().incr(addDbPrefix(this._db, key))
|
||||
if (isNaN(result)) {
|
||||
throw new Error(`Redis ${key} does not contain a number`)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
async deleteIfValue(key: string, value: any) {
|
||||
const client = this.getClient()
|
||||
|
||||
const luaScript = `
|
||||
if redis.call('GET', KEYS[1]) == ARGV[1] then
|
||||
redis.call('DEL', KEYS[1])
|
||||
end
|
||||
`
|
||||
|
||||
await client.eval(luaScript, 1, addDbPrefix(this._db, key), value)
|
||||
}
|
||||
}
|
||||
|
||||
export default RedisWrapper
|
||||
|
|
|
@ -72,7 +72,7 @@ const OPTIONS: Record<keyof typeof LockType, Redlock.Options> = {
|
|||
export async function newRedlock(opts: Redlock.Options = {}) {
|
||||
const options = { ...OPTIONS.DEFAULT, ...opts }
|
||||
const redisWrapper = await getLockClient()
|
||||
const client = redisWrapper.getClient()
|
||||
const client = redisWrapper.getClient() as any
|
||||
return new Redlock([client], options)
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,214 @@
|
|||
import { GenericContainer, StartedTestContainer } from "testcontainers"
|
||||
import { generator, structures } from "../../../tests"
|
||||
import RedisWrapper from "../redis"
|
||||
import { env } from "../.."
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("redis", () => {
|
||||
let redis: RedisWrapper
|
||||
let container: StartedTestContainer
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("redis")
|
||||
.withExposedPorts(6379)
|
||||
.start()
|
||||
|
||||
env._set(
|
||||
"REDIS_URL",
|
||||
`${container.getHost()}:${container.getMappedPort(6379)}`
|
||||
)
|
||||
env._set("MOCK_REDIS", 0)
|
||||
env._set("REDIS_PASSWORD", 0)
|
||||
})
|
||||
|
||||
afterAll(() => container?.stop())
|
||||
|
||||
beforeEach(async () => {
|
||||
redis = new RedisWrapper(structures.db.id())
|
||||
await redis.init()
|
||||
})
|
||||
|
||||
describe("store", () => {
|
||||
it("a basic value can be persisted", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = generator.word()
|
||||
|
||||
await redis.store(key, value)
|
||||
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
})
|
||||
|
||||
it("objects can be persisted", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = { [generator.word()]: generator.word() }
|
||||
|
||||
await redis.store(key, value)
|
||||
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
})
|
||||
})
|
||||
|
||||
describe("bulkStore", () => {
|
||||
function createRandomObject(
|
||||
keyLength: number,
|
||||
valueGenerator: () => any = () => generator.word()
|
||||
) {
|
||||
return generator
|
||||
.unique(() => generator.word(), keyLength)
|
||||
.reduce((acc, key) => {
|
||||
acc[key] = valueGenerator()
|
||||
return acc
|
||||
}, {} as Record<string, string>)
|
||||
}
|
||||
|
||||
it("a basic object can be persisted", async () => {
|
||||
const data = createRandomObject(10)
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("a complex object can be persisted", async () => {
|
||||
const data = {
|
||||
...createRandomObject(10, () => createRandomObject(5)),
|
||||
...createRandomObject(5),
|
||||
}
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(15)
|
||||
})
|
||||
|
||||
it("no TTL is set by default", async () => {
|
||||
const data = createRandomObject(10)
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
expect(await redis.getTTL(key)).toEqual(-1)
|
||||
}
|
||||
})
|
||||
|
||||
it("a bulk store can be persisted with TTL", async () => {
|
||||
const ttl = 500
|
||||
const data = createRandomObject(8)
|
||||
|
||||
await redis.bulkStore(data, ttl)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
expect(await redis.getTTL(key)).toEqual(ttl)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(8)
|
||||
})
|
||||
|
||||
it("setting a TTL of -1 will not persist the key", async () => {
|
||||
const ttl = -1
|
||||
const data = createRandomObject(5)
|
||||
|
||||
await redis.bulkStore(data, ttl)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toBe(null)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("increment", () => {
|
||||
it("can increment on a new key", async () => {
|
||||
const key = structures.uuid()
|
||||
const result = await redis.increment(key)
|
||||
expect(result).toBe(1)
|
||||
})
|
||||
|
||||
it("can increment multiple times", async () => {
|
||||
const key = structures.uuid()
|
||||
const results = [
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
await redis.increment(key),
|
||||
]
|
||||
expect(results).toEqual([1, 2, 3, 4, 5])
|
||||
})
|
||||
|
||||
it("can increment on a new key", async () => {
|
||||
const key1 = structures.uuid()
|
||||
const key2 = structures.uuid()
|
||||
|
||||
const result1 = await redis.increment(key1)
|
||||
expect(result1).toBe(1)
|
||||
|
||||
const result2 = await redis.increment(key2)
|
||||
expect(result2).toBe(1)
|
||||
})
|
||||
|
||||
it("can increment multiple times in parallel", async () => {
|
||||
const key = structures.uuid()
|
||||
const results = await Promise.all(
|
||||
Array.from({ length: 100 }).map(() => redis.increment(key))
|
||||
)
|
||||
expect(results).toHaveLength(100)
|
||||
expect(results).toEqual(Array.from({ length: 100 }).map((_, i) => i + 1))
|
||||
})
|
||||
|
||||
it("can increment existing set keys", async () => {
|
||||
const key = structures.uuid()
|
||||
await redis.store(key, 70)
|
||||
await redis.increment(key)
|
||||
|
||||
const result = await redis.increment(key)
|
||||
expect(result).toBe(72)
|
||||
})
|
||||
|
||||
it.each([
|
||||
generator.word(),
|
||||
generator.bool(),
|
||||
{ [generator.word()]: generator.word() },
|
||||
])("cannot increment if the store value is not a number", async value => {
|
||||
const key = structures.uuid()
|
||||
await redis.store(key, value)
|
||||
|
||||
await expect(redis.increment(key)).rejects.toThrowError(
|
||||
"ERR value is not an integer or out of range"
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("deleteIfValue", () => {
|
||||
it("can delete if the value matches", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = generator.word()
|
||||
await redis.store(key, value)
|
||||
|
||||
await redis.deleteIfValue(key, value)
|
||||
|
||||
expect(await redis.get(key)).toBeNull()
|
||||
})
|
||||
|
||||
it("will not delete if the value does not matches", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = generator.word()
|
||||
await redis.store(key, value)
|
||||
|
||||
await redis.deleteIfValue(key, generator.word())
|
||||
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -29,6 +29,8 @@ export enum Databases {
|
|||
WRITE_THROUGH = "writeThrough",
|
||||
LOCKS = "locks",
|
||||
SOCKET_IO = "socket_io",
|
||||
BPM_EVENTS = "bpmEvents",
|
||||
DOC_WRITE_THROUGH = "docWriteThrough",
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -84,25 +84,24 @@ export function getBuiltinRoles(): { [key: string]: RoleDoc } {
|
|||
return cloneDeep(BUILTIN_ROLES)
|
||||
}
|
||||
|
||||
export const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map(
|
||||
role => role._id
|
||||
)
|
||||
export function isBuiltin(role: string) {
|
||||
return getBuiltinRole(role) !== undefined
|
||||
}
|
||||
|
||||
export const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map(
|
||||
role => role.name
|
||||
)
|
||||
|
||||
export function isBuiltin(role?: string) {
|
||||
return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin))
|
||||
export function getBuiltinRole(roleId: string): Role | undefined {
|
||||
const role = Object.values(BUILTIN_ROLES).find(role =>
|
||||
roleId.includes(role._id)
|
||||
)
|
||||
if (!role) {
|
||||
return undefined
|
||||
}
|
||||
return cloneDeep(role)
|
||||
}
|
||||
|
||||
/**
|
||||
* Works through the inheritance ranks to see how far up the builtin stack this ID is.
|
||||
*/
|
||||
export function builtinRoleToNumber(id?: string) {
|
||||
if (!id) {
|
||||
return 0
|
||||
}
|
||||
export function builtinRoleToNumber(id: string) {
|
||||
const builtins = getBuiltinRoles()
|
||||
const MAX = Object.values(builtins).length + 1
|
||||
if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) {
|
||||
|
@ -123,7 +122,7 @@ export function builtinRoleToNumber(id?: string) {
|
|||
/**
|
||||
* Converts any role to a number, but has to be async to get the roles from db.
|
||||
*/
|
||||
export async function roleToNumber(id?: string) {
|
||||
export async function roleToNumber(id: string) {
|
||||
if (isBuiltin(id)) {
|
||||
return builtinRoleToNumber(id)
|
||||
}
|
||||
|
@ -131,7 +130,7 @@ export async function roleToNumber(id?: string) {
|
|||
defaultPublic: true,
|
||||
})) as RoleDoc[]
|
||||
for (let role of hierarchy) {
|
||||
if (isBuiltin(role?.inherits)) {
|
||||
if (role?.inherits && isBuiltin(role.inherits)) {
|
||||
return builtinRoleToNumber(role.inherits) + 1
|
||||
}
|
||||
}
|
||||
|
@ -161,35 +160,28 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
|
|||
* @returns The role object, which may contain an "inherits" property.
|
||||
*/
|
||||
export async function getRole(
|
||||
roleId?: string,
|
||||
roleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
): Promise<RoleDoc | undefined> {
|
||||
if (!roleId) {
|
||||
return undefined
|
||||
}
|
||||
let role: any = {}
|
||||
): Promise<RoleDoc> {
|
||||
// built in roles mostly come from the in-code implementation,
|
||||
// but can be extended by a doc stored about them (e.g. permissions)
|
||||
if (isBuiltin(roleId)) {
|
||||
role = cloneDeep(
|
||||
Object.values(BUILTIN_ROLES).find(role => role._id === roleId)
|
||||
)
|
||||
} else {
|
||||
let role: RoleDoc | undefined = getBuiltinRole(roleId)
|
||||
if (!role) {
|
||||
// make sure has the prefix (if it has it then it won't be added)
|
||||
roleId = prefixRoleID(roleId)
|
||||
}
|
||||
try {
|
||||
const db = getAppDB()
|
||||
const dbRole = await db.get(getDBRoleID(roleId))
|
||||
role = Object.assign(role, dbRole)
|
||||
const dbRole = await db.get<RoleDoc>(getDBRoleID(roleId))
|
||||
role = Object.assign(role || {}, dbRole)
|
||||
// finalise the ID
|
||||
role._id = getExternalRoleID(role._id, role.version)
|
||||
role._id = getExternalRoleID(role._id!, role.version)
|
||||
} catch (err) {
|
||||
if (!isBuiltin(roleId) && opts?.defaultPublic) {
|
||||
return cloneDeep(BUILTIN_ROLES.PUBLIC)
|
||||
}
|
||||
// only throw an error if there is no role at all
|
||||
if (Object.keys(role).length === 0) {
|
||||
if (!role || Object.keys(role).length === 0) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
@ -200,7 +192,7 @@ export async function getRole(
|
|||
* Simple function to get all the roles based on the top level user role ID.
|
||||
*/
|
||||
async function getAllUserRoles(
|
||||
userRoleId?: string,
|
||||
userRoleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
): Promise<RoleDoc[]> {
|
||||
// admins have access to all roles
|
||||
|
@ -226,7 +218,7 @@ async function getAllUserRoles(
|
|||
}
|
||||
|
||||
export async function getUserRoleIdHierarchy(
|
||||
userRoleId?: string
|
||||
userRoleId: string
|
||||
): Promise<string[]> {
|
||||
const roles = await getUserRoleHierarchy(userRoleId)
|
||||
return roles.map(role => role._id!)
|
||||
|
@ -241,7 +233,7 @@ export async function getUserRoleIdHierarchy(
|
|||
* highest level of access and the last being the lowest level.
|
||||
*/
|
||||
export async function getUserRoleHierarchy(
|
||||
userRoleId?: string,
|
||||
userRoleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
) {
|
||||
// special case, if they don't have a role then they are a public user
|
||||
|
@ -265,9 +257,9 @@ export function checkForRoleResourceArray(
|
|||
return rolePerms
|
||||
}
|
||||
|
||||
export async function getAllRoleIds(appId?: string) {
|
||||
export async function getAllRoleIds(appId: string): Promise<string[]> {
|
||||
const roles = await getAllRoles(appId)
|
||||
return roles.map(role => role._id)
|
||||
return roles.map(role => role._id!)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,41 +20,3 @@ export function cleanup() {
|
|||
}
|
||||
intervals = []
|
||||
}
|
||||
|
||||
export class ExecutionTimeoutError extends Error {
|
||||
public readonly name = "ExecutionTimeoutError"
|
||||
}
|
||||
|
||||
export class ExecutionTimeTracker {
|
||||
static withLimit(limitMs: number) {
|
||||
return new ExecutionTimeTracker(limitMs)
|
||||
}
|
||||
|
||||
constructor(readonly limitMs: number) {}
|
||||
|
||||
private totalTimeMs = 0
|
||||
|
||||
track<T>(f: () => T): T {
|
||||
this.checkLimit()
|
||||
const start = process.hrtime.bigint()
|
||||
try {
|
||||
return f()
|
||||
} finally {
|
||||
const end = process.hrtime.bigint()
|
||||
this.totalTimeMs += Number(end - start) / 1e6
|
||||
this.checkLimit()
|
||||
}
|
||||
}
|
||||
|
||||
get elapsedMS() {
|
||||
return this.totalTimeMs
|
||||
}
|
||||
|
||||
checkLimit() {
|
||||
if (this.totalTimeMs > this.limitMs) {
|
||||
throw new ExecutionTimeoutError(
|
||||
`Execution time limit of ${this.limitMs}ms exceeded: ${this.totalTimeMs}ms`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ beforeAll(async () => {
|
|||
|
||||
jest.spyOn(events.app, "created")
|
||||
jest.spyOn(events.app, "updated")
|
||||
jest.spyOn(events.app, "duplicated")
|
||||
jest.spyOn(events.app, "deleted")
|
||||
jest.spyOn(events.app, "published")
|
||||
jest.spyOn(events.app, "unpublished")
|
||||
|
|
|
@ -58,7 +58,7 @@ export const useCloudFree = () => {
|
|||
// FEATURES
|
||||
|
||||
const useFeature = (feature: Feature) => {
|
||||
const license = cloneDeep(UNLIMITED_LICENSE)
|
||||
const license = cloneDeep(getCachedLicense() || UNLIMITED_LICENSE)
|
||||
const opts: UseLicenseOpts = {
|
||||
features: [feature],
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ export const account = (partial: Partial<Account> = {}): Account => {
|
|||
return {
|
||||
accountId: uuid(),
|
||||
tenantId: generator.word(),
|
||||
email: generator.email(),
|
||||
email: generator.email({ domain: "example.com" }),
|
||||
tenantName: generator.word(),
|
||||
hosting: Hosting.SELF,
|
||||
createdAt: Date.now(),
|
||||
|
|
|
@ -13,7 +13,7 @@ interface CreateUserRequestFields {
|
|||
export function createUserRequest(userData?: Partial<CreateUserRequestFields>) {
|
||||
const defaultValues = {
|
||||
externalId: uuid(),
|
||||
email: generator.email(),
|
||||
email: `${uuid()}@example.com`,
|
||||
firstName: generator.first(),
|
||||
lastName: generator.last(),
|
||||
username: generator.name(),
|
||||
|
|
|
@ -24,8 +24,7 @@
|
|||
"rollup": "^2.45.2",
|
||||
"rollup-plugin-postcss": "^4.0.0",
|
||||
"rollup-plugin-svelte": "^7.1.0",
|
||||
"rollup-plugin-terser": "^7.0.2",
|
||||
"svelte": "3.49.0"
|
||||
"rollup-plugin-terser": "^7.0.2"
|
||||
},
|
||||
"keywords": [
|
||||
"svelte"
|
||||
|
|
|
@ -41,6 +41,7 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<span
|
||||
class="btn-wrap"
|
||||
on:mouseover={() => (showTooltip = true)}
|
||||
|
|
|
@ -33,10 +33,12 @@
|
|||
setContext("actionMenu", { show, hide })
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div use:getAnchor on:click={openMenu}>
|
||||
<slot name="control" />
|
||||
</div>
|
||||
<Popover bind:this={dropdown} {anchor} {align} {portalTarget}>
|
||||
<Popover bind:this={dropdown} {anchor} {align} {portalTarget} on:open on:close>
|
||||
<Menu>
|
||||
<slot />
|
||||
</Menu>
|
||||
|
|
|
@ -32,6 +32,13 @@ const handleClick = event => {
|
|||
return
|
||||
}
|
||||
|
||||
// Ignore clicks for drawers, unless the handler is registered from a drawer
|
||||
const sourceInDrawer = handler.anchor.closest(".drawer-wrapper") != null
|
||||
const clickInDrawer = event.target.closest(".drawer-wrapper") != null
|
||||
if (clickInDrawer && !sourceInDrawer) {
|
||||
return
|
||||
}
|
||||
|
||||
handler.callback?.(event)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ export default function positionDropdown(element, opts) {
|
|||
align,
|
||||
maxHeight,
|
||||
maxWidth,
|
||||
minWidth,
|
||||
useAnchorWidth,
|
||||
offset = 5,
|
||||
customUpdate,
|
||||
|
@ -28,18 +29,26 @@ export default function positionDropdown(element, opts) {
|
|||
const elementBounds = element.getBoundingClientRect()
|
||||
let styles = {
|
||||
maxHeight: null,
|
||||
minWidth: null,
|
||||
minWidth,
|
||||
maxWidth,
|
||||
left: null,
|
||||
top: null,
|
||||
}
|
||||
|
||||
if (typeof customUpdate === "function") {
|
||||
styles = customUpdate(anchorBounds, elementBounds, styles)
|
||||
styles = customUpdate(anchorBounds, elementBounds, {
|
||||
...styles,
|
||||
offset: opts.offset,
|
||||
})
|
||||
} else {
|
||||
// Determine vertical styles
|
||||
if (align === "right-outside") {
|
||||
styles.top = anchorBounds.top
|
||||
if (align === "right-outside" || align === "left-outside") {
|
||||
styles.top =
|
||||
anchorBounds.top + anchorBounds.height / 2 - elementBounds.height / 2
|
||||
styles.maxHeight = maxHeight
|
||||
if (styles.top + elementBounds.height > window.innerHeight) {
|
||||
styles.top = window.innerHeight - elementBounds.height
|
||||
}
|
||||
} else if (
|
||||
window.innerHeight - anchorBounds.bottom <
|
||||
(maxHeight || 100)
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
export let hoverable = false
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<span
|
||||
on:click
|
||||
class="spectrum-Label"
|
||||
|
|
|
@ -123,6 +123,8 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div
|
||||
bind:this={preview}
|
||||
class="preview size--{size || 'M'}"
|
||||
|
@ -137,6 +139,8 @@
|
|||
/>
|
||||
</div>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<Popover bind:this={dropdown} anchor={preview} maxHeight={320} {offset} {align}>
|
||||
<Layout paddingX="XL" paddingY="L">
|
||||
<div class="container">
|
||||
|
|
|
@ -15,6 +15,8 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div class="property-group-container">
|
||||
{#if name}
|
||||
<div class="property-group-name" on:click={onHeaderClick}>
|
||||
|
|
|
@ -1,28 +1,111 @@
|
|||
<script context="module">
|
||||
import { writable, get } from "svelte/store"
|
||||
|
||||
// Observe this class name if possible in order to know how to size the
|
||||
// drawer. If this doesn't exist we'll use a fixed size.
|
||||
const drawerContainer = "drawer-container"
|
||||
|
||||
// Context level stores to keep drawers in sync
|
||||
const openDrawers = writable([])
|
||||
const modal = writable(false)
|
||||
const resizable = writable(true)
|
||||
const drawerLeft = writable(null)
|
||||
const drawerWidth = writable(null)
|
||||
|
||||
// Resize observer to keep track of size changes
|
||||
let observer
|
||||
|
||||
// Starts observing the target node to watching to size changes.
|
||||
// Invoked when the first drawer of a chain is rendered.
|
||||
const observe = () => {
|
||||
const target = document.getElementsByClassName(drawerContainer)[0]
|
||||
if (observer || !target) {
|
||||
return
|
||||
}
|
||||
observer = new ResizeObserver(entries => {
|
||||
if (!entries?.[0]) {
|
||||
return
|
||||
}
|
||||
const bounds = entries[0].target.getBoundingClientRect()
|
||||
drawerLeft.set(bounds.left)
|
||||
drawerWidth.set(bounds.width)
|
||||
})
|
||||
observer.observe(target)
|
||||
|
||||
// Manually measure once to ensure that we have dimensions for the initial
|
||||
// paint
|
||||
const bounds = target.getBoundingClientRect()
|
||||
drawerLeft.set(bounds.left)
|
||||
drawerWidth.set(bounds.width)
|
||||
}
|
||||
|
||||
// Stops observing the target node.
|
||||
// Invoked when the last drawer of a chain is removed.
|
||||
const unobserve = () => {
|
||||
if (get(openDrawers).length) {
|
||||
return
|
||||
}
|
||||
observer?.disconnect()
|
||||
|
||||
// Reset state
|
||||
observer = null
|
||||
modal.set(false)
|
||||
resizable.set(true)
|
||||
drawerLeft.set(null)
|
||||
drawerWidth.set(null)
|
||||
}
|
||||
</script>
|
||||
|
||||
<script>
|
||||
import Portal from "svelte-portal"
|
||||
import Button from "../Button/Button.svelte"
|
||||
import Body from "../Typography/Body.svelte"
|
||||
import Heading from "../Typography/Heading.svelte"
|
||||
import { setContext, createEventDispatcher } from "svelte"
|
||||
import Icon from "../Icon/Icon.svelte"
|
||||
import ActionButton from "../ActionButton/ActionButton.svelte"
|
||||
import Portal from "svelte-portal"
|
||||
import { setContext, createEventDispatcher, onDestroy } from "svelte"
|
||||
import { generate } from "shortid"
|
||||
|
||||
export let title
|
||||
export let fillWidth
|
||||
export let left = "314px"
|
||||
export let width = "calc(100% - 626px)"
|
||||
export let headless = false
|
||||
export let forceModal = false
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const spacing = 11
|
||||
|
||||
let visible = false
|
||||
let drawerId = generate()
|
||||
|
||||
$: depth = $openDrawers.length - $openDrawers.indexOf(drawerId) - 1
|
||||
$: style = getStyle(depth, $drawerLeft, $drawerWidth, $modal)
|
||||
|
||||
const getStyle = (depth, left, width, modal) => {
|
||||
let style = `
|
||||
--scale-factor: ${getScaleFactor(depth)};
|
||||
--spacing: ${spacing}px;
|
||||
`
|
||||
// Most modal styles are handled by class names
|
||||
if (modal || left == null || width == null) {
|
||||
return style
|
||||
}
|
||||
|
||||
// Drawers observing another dom node need custom position styles
|
||||
return `
|
||||
${style}
|
||||
left: ${left + spacing}px;
|
||||
width: ${width - 2 * spacing}px;
|
||||
`
|
||||
}
|
||||
|
||||
export function show() {
|
||||
if (visible) {
|
||||
return
|
||||
}
|
||||
if (forceModal) {
|
||||
modal.set(true)
|
||||
resizable.set(false)
|
||||
}
|
||||
observe()
|
||||
visible = true
|
||||
dispatch("drawerShow", drawerId)
|
||||
openDrawers.update(state => [...state, drawerId])
|
||||
}
|
||||
|
||||
export function hide() {
|
||||
|
@ -31,12 +114,15 @@
|
|||
}
|
||||
visible = false
|
||||
dispatch("drawerHide", drawerId)
|
||||
openDrawers.update(state => state.filter(id => id !== drawerId))
|
||||
unobserve()
|
||||
}
|
||||
|
||||
setContext("drawer-actions", {
|
||||
setContext("drawer", {
|
||||
hide,
|
||||
show,
|
||||
headless,
|
||||
modal,
|
||||
resizable,
|
||||
})
|
||||
|
||||
const easeInOutQuad = x => {
|
||||
|
@ -45,66 +131,142 @@
|
|||
|
||||
// Use a custom svelte transition here because the built-in slide
|
||||
// transition has a horrible overshoot
|
||||
const slide = () => {
|
||||
const drawerSlide = () => {
|
||||
return {
|
||||
duration: 360,
|
||||
duration: 260,
|
||||
css: t => {
|
||||
const translation = 100 - Math.round(easeInOutQuad(t) * 100)
|
||||
return `transform: translateY(${translation}%);`
|
||||
const f = easeInOutQuad(t)
|
||||
const yOffset = (1 - f) * 200
|
||||
return `
|
||||
transform: translateY(calc(${yOffset}px - 800px * (1 - var(--scale-factor))));
|
||||
opacity: ${f};
|
||||
`
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Custom fade transition because the default svelte one doesn't work any more
|
||||
// with svelte 4
|
||||
const drawerFade = () => {
|
||||
return {
|
||||
duration: 260,
|
||||
css: t => {
|
||||
return `opacity: ${easeInOutQuad(t)};`
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
const getScaleFactor = depth => {
|
||||
// Quadratic function approaching a limit of 1 as depth tends to infinity
|
||||
const lim = 1 - 1 / (depth * depth + 1)
|
||||
// Scale drawers between 1 and 0.9 as depth approaches infinity
|
||||
return 1 - lim * 0.1
|
||||
}
|
||||
|
||||
onDestroy(() => {
|
||||
if (visible) {
|
||||
hide()
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
{#if visible}
|
||||
<Portal>
|
||||
<section
|
||||
class:fillWidth
|
||||
class="drawer"
|
||||
class:headless
|
||||
transition:slide|local
|
||||
style={`width: ${width}; left: ${left};`}
|
||||
>
|
||||
{#if !headless}
|
||||
<Portal target=".modal-container">
|
||||
<!-- This class is unstyled, but needed by click_outside -->
|
||||
<div class="drawer-wrapper">
|
||||
<div
|
||||
class="underlay"
|
||||
class:hidden={!$modal}
|
||||
transition:drawerFade|local
|
||||
/>
|
||||
<div
|
||||
class="drawer"
|
||||
class:stacked={depth > 0}
|
||||
class:modal={$modal}
|
||||
transition:drawerSlide|local
|
||||
{style}
|
||||
>
|
||||
<header>
|
||||
<div class="text">
|
||||
<Heading size="XS">{title}</Heading>
|
||||
<Body size="S">
|
||||
<slot name="description" />
|
||||
</Body>
|
||||
</div>
|
||||
{#if $$slots.title}
|
||||
<slot name="title" />
|
||||
{:else}
|
||||
<div class="text">{title || "Bindings"}</div>
|
||||
{/if}
|
||||
<div class="buttons">
|
||||
<Button secondary quiet on:click={hide}>Cancel</Button>
|
||||
<slot name="buttons" />
|
||||
{#if $resizable}
|
||||
<ActionButton
|
||||
size="M"
|
||||
quiet
|
||||
selected={$modal}
|
||||
on:click={() => modal.set(!$modal)}
|
||||
>
|
||||
<Icon name={$modal ? "Minimize" : "Maximize"} size="S" />
|
||||
</ActionButton>
|
||||
{/if}
|
||||
</div>
|
||||
</header>
|
||||
{/if}
|
||||
<slot name="body" />
|
||||
</section>
|
||||
<slot name="body" />
|
||||
<div class="overlay" class:hidden={$modal || depth === 0} />
|
||||
</div>
|
||||
</div>
|
||||
</Portal>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.drawer.headless :global(.drawer-contents) {
|
||||
height: calc(40vh + 75px);
|
||||
}
|
||||
|
||||
.buttons {
|
||||
display: flex;
|
||||
gap: var(--spacing-m);
|
||||
}
|
||||
|
||||
.drawer {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 25vw;
|
||||
width: 50vw;
|
||||
bottom: var(--spacing);
|
||||
height: 420px;
|
||||
background: var(--background);
|
||||
border-top: var(--border-light);
|
||||
z-index: 3;
|
||||
border: var(--border-light);
|
||||
z-index: 100;
|
||||
border-radius: 8px;
|
||||
overflow: hidden;
|
||||
box-sizing: border-box;
|
||||
transition: transform 260ms ease-out, bottom 260ms ease-out,
|
||||
left 260ms ease-out, width 260ms ease-out, height 260ms ease-out;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: stretch;
|
||||
}
|
||||
.drawer.modal {
|
||||
left: 15vw;
|
||||
width: 70vw;
|
||||
bottom: 15vh;
|
||||
height: 70vh;
|
||||
}
|
||||
.drawer.stacked {
|
||||
transform: translateY(calc(-1 * 1024px * (1 - var(--scale-factor))))
|
||||
scale(var(--scale-factor));
|
||||
}
|
||||
|
||||
.fillWidth {
|
||||
left: 260px !important;
|
||||
width: calc(100% - 260px) !important;
|
||||
.overlay,
|
||||
.underlay {
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
z-index: 100;
|
||||
display: block;
|
||||
transition: opacity 260ms ease-out;
|
||||
}
|
||||
.overlay {
|
||||
position: absolute;
|
||||
background: var(--background);
|
||||
opacity: 0.5;
|
||||
}
|
||||
.underlay {
|
||||
position: fixed;
|
||||
background: rgba(0, 0, 0, 0.5);
|
||||
}
|
||||
.underlay.hidden,
|
||||
.overlay.hidden {
|
||||
opacity: 0 !important;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
header {
|
||||
|
@ -112,10 +274,9 @@
|
|||
justify-content: space-between;
|
||||
align-items: center;
|
||||
border-bottom: var(--border-light);
|
||||
padding: var(--spacing-l) var(--spacing-xl);
|
||||
padding: var(--spacing-m) var(--spacing-xl);
|
||||
gap: var(--spacing-xl);
|
||||
}
|
||||
|
||||
.text {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
@ -123,7 +284,6 @@
|
|||
align-items: flex-start;
|
||||
gap: var(--spacing-xs);
|
||||
}
|
||||
|
||||
.buttons {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
|
@ -131,4 +291,8 @@
|
|||
align-items: center;
|
||||
gap: var(--spacing-m);
|
||||
}
|
||||
.buttons :global(.icon) {
|
||||
width: 16px;
|
||||
display: flex;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
<div class="drawer-contents">
|
||||
<script>
|
||||
export let padding = true
|
||||
</script>
|
||||
|
||||
<div class="drawer-contents" class:padding>
|
||||
<div class:no-sidebar={!$$slots.sidebar} class="container">
|
||||
{#if $$slots.sidebar}
|
||||
<div class="sidebar">
|
||||
|
@ -13,8 +17,8 @@
|
|||
|
||||
<style>
|
||||
.drawer-contents {
|
||||
height: 40vh;
|
||||
overflow-y: auto;
|
||||
flex: 1 1 auto;
|
||||
}
|
||||
.container {
|
||||
height: 100%;
|
||||
|
@ -27,14 +31,22 @@
|
|||
.sidebar {
|
||||
border-right: var(--border-light);
|
||||
overflow: auto;
|
||||
padding: var(--spacing-xl);
|
||||
scrollbar-width: none;
|
||||
}
|
||||
.padding .sidebar {
|
||||
padding: var(--spacing-xl);
|
||||
}
|
||||
.sidebar::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
.main {
|
||||
height: 100%;
|
||||
overflow: auto;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
.padding .main {
|
||||
padding: var(--spacing-xl);
|
||||
height: calc(100% - var(--spacing-xl) * 2);
|
||||
}
|
||||
.main :global(textarea) {
|
||||
min-height: 200px;
|
||||
|
|
|
@ -36,6 +36,8 @@
|
|||
})
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div
|
||||
bind:this={ref}
|
||||
class="fancy-field"
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div
|
||||
class="spectrum-InputGroup"
|
||||
class:is-focused={open || focus}
|
||||
|
|
|
@ -193,6 +193,8 @@
|
|||
aria-required="false"
|
||||
aria-haspopup="true"
|
||||
>
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div
|
||||
on:click={flatpickr?.open}
|
||||
class="spectrum-Textfield spectrum-InputGroup-textfield"
|
||||
|
@ -230,6 +232,7 @@
|
|||
</Flatpickr>
|
||||
{/key}
|
||||
{#if open}
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div class="overlay" on:mousedown|self={flatpickr?.close} />
|
||||
{/if}
|
||||
|
||||
|
|
|
@ -137,6 +137,9 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-noninteractive-tabindex -->
|
||||
<div class="container" class:compact>
|
||||
{#if selectedImage}
|
||||
{#if gallery}
|
||||
|
@ -194,7 +197,9 @@
|
|||
>
|
||||
<Icon name="ChevronRight" />
|
||||
</div>
|
||||
<div class="footer">File {selectedImageIdx + 1} of {fileCount}</div>
|
||||
{#if maximum !== 1}
|
||||
<div class="footer">File {selectedImageIdx + 1} of {fileCount}</div>
|
||||
{/if}
|
||||
</div>
|
||||
{:else if value?.length}
|
||||
{#each value as file}
|
||||
|
|
|
@ -96,6 +96,8 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div class="spectrum-InputGroup">
|
||||
<div
|
||||
class:is-disabled={disabled || hbsValue.length}
|
||||
|
|
|
@ -50,6 +50,8 @@
|
|||
on:change={handleFile}
|
||||
/>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div class="field">
|
||||
{#if value}
|
||||
<div class="file-view">
|
||||
|
|
|
@ -110,6 +110,7 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div class="spectrum-InputGroup" class:is-disabled={disabled}>
|
||||
<div
|
||||
class="spectrum-Textfield spectrum-InputGroup-textfield"
|
||||
|
|
|
@ -146,6 +146,7 @@
|
|||
<use xlink:href="#spectrum-css-icon-Chevron100" />
|
||||
</svg>
|
||||
</button>
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<Popover
|
||||
anchor={customAnchor ? customAnchor : button}
|
||||
align={align || "left"}
|
||||
|
|
|
@ -104,6 +104,7 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div class="spectrum-InputGroup" class:is-disabled={disabled}>
|
||||
<div
|
||||
class="spectrum-Textfield spectrum-InputGroup-textfield"
|
||||
|
|
|
@ -1,56 +1,54 @@
|
|||
<script context="module">
|
||||
export const directions = ["n", "ne", "e", "se", "s", "sw", "w", "nw"]
|
||||
</script>
|
||||
|
||||
<script>
|
||||
import Tooltip from "../Tooltip/Tooltip.svelte"
|
||||
import { fade } from "svelte/transition"
|
||||
import {
|
||||
default as AbsTooltip,
|
||||
TooltipPosition,
|
||||
TooltipType,
|
||||
} from "../Tooltip/AbsTooltip.svelte"
|
||||
|
||||
export let direction = "n"
|
||||
export let name = "Add"
|
||||
export let hidden = false
|
||||
export let size = "M"
|
||||
export let hoverable = false
|
||||
export let disabled = false
|
||||
export let color
|
||||
export let hoverColor
|
||||
export let tooltip
|
||||
|
||||
$: rotation = getRotation(direction)
|
||||
|
||||
let showTooltip = false
|
||||
|
||||
const getRotation = direction => {
|
||||
return directions.indexOf(direction) * 45
|
||||
}
|
||||
export let tooltipPosition = TooltipPosition.Bottom
|
||||
export let tooltipType = TooltipType.Default
|
||||
export let tooltipColor
|
||||
export let tooltipWrap = true
|
||||
export let newStyles = false
|
||||
</script>
|
||||
|
||||
<div
|
||||
class="icon"
|
||||
on:mouseover={() => (showTooltip = true)}
|
||||
on:focus={() => (showTooltip = true)}
|
||||
on:mouseleave={() => (showTooltip = false)}
|
||||
on:click={() => (showTooltip = false)}
|
||||
<AbsTooltip
|
||||
text={tooltip}
|
||||
type={tooltipType}
|
||||
position={tooltipPosition}
|
||||
color={tooltipColor}
|
||||
noWrap={tooltipWrap}
|
||||
>
|
||||
<svg
|
||||
on:click
|
||||
class:hoverable
|
||||
class:disabled
|
||||
class="spectrum-Icon spectrum-Icon--size{size}"
|
||||
focusable="false"
|
||||
aria-hidden={hidden}
|
||||
aria-label={name}
|
||||
style={`transform: rotate(${rotation}deg); ${
|
||||
color ? `color: ${color};` : ""
|
||||
}`}
|
||||
>
|
||||
<use style="pointer-events: none;" xlink:href="#spectrum-icon-18-{name}" />
|
||||
</svg>
|
||||
{#if tooltip && showTooltip}
|
||||
<div class="tooltip" in:fade={{ duration: 130, delay: 250 }}>
|
||||
<Tooltip textWrapping direction="top" text={tooltip} />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="icon" class:newStyles>
|
||||
<svg
|
||||
on:click
|
||||
class:hoverable
|
||||
class:disabled
|
||||
class="spectrum-Icon spectrum-Icon--size{size}"
|
||||
focusable="false"
|
||||
aria-hidden={hidden}
|
||||
aria-label={name}
|
||||
style={`${color ? `color: ${color};` : ""} ${
|
||||
hoverColor
|
||||
? `--hover-color: ${hoverColor}`
|
||||
: "--hover-color: var(--spectrum-alias-icon-color-selected-hover)"
|
||||
}`}
|
||||
>
|
||||
<use
|
||||
style="pointer-events: none;"
|
||||
xlink:href="#spectrum-icon-18-{name}"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</AbsTooltip>
|
||||
|
||||
<style>
|
||||
.icon {
|
||||
|
@ -58,19 +56,25 @@
|
|||
display: grid;
|
||||
place-items: center;
|
||||
}
|
||||
.newStyles {
|
||||
color: var(--spectrum-global-color-gray-700);
|
||||
}
|
||||
|
||||
svg.hoverable {
|
||||
pointer-events: all;
|
||||
transition: color var(--spectrum-global-animation-duration-100, 130ms);
|
||||
}
|
||||
svg.hoverable:hover {
|
||||
color: var(--spectrum-alias-icon-color-selected-hover) !important;
|
||||
color: var(--hover-color) !important;
|
||||
cursor: pointer;
|
||||
}
|
||||
svg.hoverable:active {
|
||||
color: var(--spectrum-global-color-blue-400) !important;
|
||||
}
|
||||
|
||||
.newStyles svg.hoverable:hover,
|
||||
.newStyles svg.hoverable:active {
|
||||
color: var(--spectrum-global-color-gray-900) !important;
|
||||
}
|
||||
svg.disabled {
|
||||
color: var(--spectrum-global-color-gray-500) !important;
|
||||
pointer-events: none !important;
|
||||
|
|
|
@ -1,22 +1,41 @@
|
|||
<script>
|
||||
import Icon from "./Icon.svelte"
|
||||
|
||||
import Tooltip from "../Tooltip/Tooltip.svelte"
|
||||
import { fade } from "svelte/transition"
|
||||
|
||||
export let icon
|
||||
export let background
|
||||
export let color
|
||||
export let size = "M"
|
||||
export let tooltip
|
||||
|
||||
let showTooltip = false
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div
|
||||
class="icon size--{size}"
|
||||
style="background: {background || `transparent`};"
|
||||
class:filled={!!background}
|
||||
on:mouseover={() => (showTooltip = true)}
|
||||
on:mouseleave={() => (showTooltip = false)}
|
||||
on:focus={() => (showTooltip = true)}
|
||||
on:blur={() => (showTooltip = false)}
|
||||
on:click={() => (showTooltip = false)}
|
||||
>
|
||||
<Icon name={icon} color={background ? "white" : color} />
|
||||
{#if tooltip && showTooltip}
|
||||
<div class="tooltip" in:fade={{ duration: 130, delay: 250 }}>
|
||||
<Tooltip textWrapping direction="right" text={tooltip} />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.icon {
|
||||
position: relative;
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
flex: 0 0 28px;
|
||||
|
@ -32,6 +51,15 @@
|
|||
width: 16px;
|
||||
height: 16px;
|
||||
}
|
||||
.icon.size--XS {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
flex: 0 0 18px;
|
||||
}
|
||||
.icon.size--XS :global(.spectrum-Icon) {
|
||||
width: 10px;
|
||||
height: 10px;
|
||||
}
|
||||
.icon.size--S {
|
||||
width: 22px;
|
||||
height: 22px;
|
||||
|
@ -58,4 +86,14 @@
|
|||
width: 22px;
|
||||
height: 22px;
|
||||
}
|
||||
|
||||
.tooltip {
|
||||
position: absolute;
|
||||
pointer-events: none;
|
||||
left: calc(50% + 8px);
|
||||
bottom: calc(-50% + 6px);
|
||||
/* transform: translateY(-50%); */
|
||||
text-align: center;
|
||||
z-index: 1;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -58,6 +58,8 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div class="container">
|
||||
<div class="preview size--{size || 'M'}" on:click={() => (open = true)}>
|
||||
<div
|
||||
|
|
|
@ -10,6 +10,8 @@
|
|||
let showTooltip = false
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div
|
||||
class="icon-side-nav-item"
|
||||
class:active
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div>
|
||||
<Input readonly {value} {label} />
|
||||
<div class="icon" on:click={() => copyToClipboard(value)}>
|
||||
|
|
|
@ -43,6 +43,7 @@
|
|||
flex-direction: row;
|
||||
justify-content: flex-start;
|
||||
align-items: stretch;
|
||||
overflow-y: scroll !important;
|
||||
flex: 1 1 auto;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
|
|
@ -15,6 +15,8 @@
|
|||
$: initials = avatar ? title?.[0] : null
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div class="list-item" class:hoverable on:click>
|
||||
<div class="left">
|
||||
{#if icon}
|
||||
|
|
|
@ -33,6 +33,7 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<li
|
||||
on:click|preventDefault={disabled ? null : onClick}
|
||||
class="spectrum-Menu-item"
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div on:click={increment}>
|
||||
Click me
|
||||
{remaining}
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
export let inline = false
|
||||
export let disableCancel = false
|
||||
export let autoFocus = true
|
||||
export let zIndex = 999
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
let visible = fixed || inline
|
||||
|
@ -100,7 +101,12 @@
|
|||
-->
|
||||
<Portal target=".modal-container">
|
||||
{#if visible}
|
||||
<div class="spectrum-Underlay is-open" on:mousedown|self={cancel}>
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div
|
||||
class="spectrum-Underlay is-open"
|
||||
on:mousedown|self={cancel}
|
||||
style="z-index:{zIndex || 999}"
|
||||
>
|
||||
<div
|
||||
class="background"
|
||||
in:fade={{ duration: 200 }}
|
||||
|
@ -131,7 +137,6 @@
|
|||
flex-direction: row;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
z-index: 999;
|
||||
overflow: auto;
|
||||
overflow-x: hidden;
|
||||
background: transparent;
|
||||
|
|
|
@ -81,6 +81,8 @@
|
|||
})
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div>
|
||||
<div
|
||||
class="actions"
|
||||
|
|
|
@ -10,6 +10,8 @@
|
|||
export let hasNextPage = true
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<nav class="spectrum-Pagination spectrum-Pagination--explicit">
|
||||
<div
|
||||
href="#"
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
export let anchor
|
||||
export let align = "right"
|
||||
export let portalTarget
|
||||
export let minWidth
|
||||
export let maxWidth
|
||||
export let maxHeight
|
||||
export let open = false
|
||||
|
@ -21,7 +22,6 @@
|
|||
export let customHeight
|
||||
export let animate = true
|
||||
export let customZindex
|
||||
|
||||
export let handlePostionUpdate
|
||||
export let showPopover = true
|
||||
export let clickOutsideOverride = false
|
||||
|
@ -78,6 +78,7 @@
|
|||
</script>
|
||||
|
||||
{#if open}
|
||||
<!-- svelte-ignore a11y-no-noninteractive-tabindex -->
|
||||
<Portal {target}>
|
||||
<div
|
||||
tabindex="0"
|
||||
|
@ -86,6 +87,7 @@
|
|||
align,
|
||||
maxHeight,
|
||||
maxWidth,
|
||||
minWidth,
|
||||
useAnchorWidth,
|
||||
offset,
|
||||
customUpdate: handlePostionUpdate,
|
||||
|
@ -101,6 +103,8 @@
|
|||
role="presentation"
|
||||
style="height: {customHeight}; --customZindex: {customZindex};"
|
||||
transition:fly|local={{ y: -20, duration: animate ? 200 : 0 }}
|
||||
on:mouseenter
|
||||
on:mouseleave
|
||||
>
|
||||
<slot />
|
||||
</div>
|
||||
|
|
|
@ -40,6 +40,8 @@
|
|||
export let overBackground
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div
|
||||
on:click
|
||||
class:spectrum-ProgressCircle--indeterminate={value == null}
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
export let badge = ""
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-noninteractive-element-interactions -->
|
||||
<li
|
||||
class="spectrum-SideNav-item"
|
||||
class:is-selected={selected}
|
||||
|
|
|
@ -22,6 +22,8 @@
|
|||
export let hoverable = false
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div
|
||||
on:click
|
||||
class="spectrum-StatusLight spectrum-StatusLight--size{size}"
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div on:click|stopPropagation={onClick}>
|
||||
<Icon size="S" name="Copy" />
|
||||
</div>
|
||||
|
|
|
@ -303,6 +303,8 @@
|
|||
</script>
|
||||
|
||||
{#key fields?.length}
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<div
|
||||
class="wrapper"
|
||||
class:wrapper--quiet={quiet}
|
||||
|
@ -468,6 +470,7 @@
|
|||
--table-border: 1px solid var(--spectrum-alias-border-color-mid);
|
||||
--cell-padding: var(--spectrum-global-dimension-size-250);
|
||||
overflow: auto;
|
||||
display: contents;
|
||||
}
|
||||
.wrapper--quiet {
|
||||
--table-bg: var(--spectrum-alias-background-color-transparent);
|
||||
|
|
|
@ -48,6 +48,9 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-noninteractive-tabindex -->
|
||||
<div
|
||||
{id}
|
||||
bind:this={tab_internal}
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
export let text = ""
|
||||
export let fixed = false
|
||||
export let color = null
|
||||
export let noWrap = false
|
||||
|
||||
let wrapper
|
||||
let hovered = false
|
||||
|
@ -90,6 +91,7 @@
|
|||
onDestroy(hide)
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div
|
||||
bind:this={wrapper}
|
||||
class="abs-tooltip"
|
||||
|
@ -104,6 +106,7 @@
|
|||
<Portal target=".spectrum">
|
||||
<span
|
||||
class="spectrum-Tooltip spectrum-Tooltip--{type} spectrum-Tooltip--{position} is-open"
|
||||
class:noWrap
|
||||
style={`left:${left}px;top:${top}px;${tooltipStyle}`}
|
||||
transition:fade|local={{ duration: 130 }}
|
||||
>
|
||||
|
@ -117,6 +120,9 @@
|
|||
.abs-tooltip {
|
||||
display: contents;
|
||||
}
|
||||
.spectrum-Tooltip.noWrap .spectrum-Tooltip-label {
|
||||
width: max-content;
|
||||
}
|
||||
.spectrum-Tooltip {
|
||||
position: absolute;
|
||||
z-index: 9999;
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
let showTooltip = false
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div class:container={!!tooltip}>
|
||||
<slot />
|
||||
{#if tooltip}
|
||||
|
|
|
@ -20,3 +20,9 @@
|
|||
>
|
||||
<slot />
|
||||
</p>
|
||||
|
||||
<style>
|
||||
p {
|
||||
text-wrap: pretty;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -21,4 +21,8 @@
|
|||
h1 {
|
||||
font-family: var(--font-accent);
|
||||
}
|
||||
|
||||
h1 {
|
||||
text-wrap: balance;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -19,7 +19,7 @@ export { default as ActionMenu } from "./ActionMenu/ActionMenu.svelte"
|
|||
export { default as Button } from "./Button/Button.svelte"
|
||||
export { default as ButtonGroup } from "./ButtonGroup/ButtonGroup.svelte"
|
||||
export { default as ClearButton } from "./ClearButton/ClearButton.svelte"
|
||||
export { default as Icon, directions } from "./Icon/Icon.svelte"
|
||||
export { default as Icon } from "./Icon/Icon.svelte"
|
||||
export { default as IconAvatar } from "./Icon/IconAvatar.svelte"
|
||||
export { default as Toggle } from "./Form/Toggle.svelte"
|
||||
export { default as RadioGroup } from "./Form/RadioGroup.svelte"
|
||||
|
|
|
@ -66,10 +66,11 @@
|
|||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
"@zerodevx/svelte-json-view": "^1.0.7",
|
||||
"codemirror": "^5.59.0",
|
||||
"codemirror": "^5.65.16",
|
||||
"dayjs": "^1.10.8",
|
||||
"downloadjs": "1.4.7",
|
||||
"fast-json-patch": "^3.1.1",
|
||||
"json-format-highlight": "^1.0.4",
|
||||
"lodash": "4.17.21",
|
||||
"posthog-js": "^1.36.0",
|
||||
"remixicon": "2.5.0",
|
||||
|
@ -86,14 +87,13 @@
|
|||
"@rollup/plugin-replace": "^5.0.3",
|
||||
"@roxi/routify": "2.18.12",
|
||||
"@sveltejs/vite-plugin-svelte": "1.4.0",
|
||||
"@testing-library/jest-dom": "5.17.0",
|
||||
"@testing-library/svelte": "^3.2.2",
|
||||
"@testing-library/jest-dom": "6.4.2",
|
||||
"@testing-library/svelte": "^4.1.0",
|
||||
"babel-jest": "^29.6.2",
|
||||
"identity-obj-proxy": "^3.0.0",
|
||||
"jest": "29.7.0",
|
||||
"jsdom": "^21.1.1",
|
||||
"ncp": "^2.0.0",
|
||||
"svelte": "^3.49.0",
|
||||
"svelte-jester": "^1.3.2",
|
||||
"vite": "^4.5.0",
|
||||
"vite-plugin-static-copy": "^0.17.0",
|
||||
|
|
|
@ -9,13 +9,17 @@ const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
|
|||
class AnalyticsHub {
|
||||
constructor() {
|
||||
this.clients = [posthog, intercom]
|
||||
this.initialised = false
|
||||
}
|
||||
|
||||
async activate() {
|
||||
// Check analytics are enabled
|
||||
const analyticsStatus = await API.getAnalyticsStatus()
|
||||
if (analyticsStatus.enabled) {
|
||||
this.clients.forEach(client => client.init())
|
||||
if (analyticsStatus.enabled && !this.initialised) {
|
||||
this.clients.forEach(client => {
|
||||
client.init()
|
||||
})
|
||||
this.initialised = true
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -110,6 +110,8 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<ModalContent
|
||||
title="Add automation step"
|
||||
confirmText="Save"
|
||||
|
|
|
@ -46,6 +46,8 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div class="header" class:scrolling>
|
||||
<div class="header-left">
|
||||
<UndoRedoControl store={automationHistoryStore} />
|
||||
|
@ -130,6 +132,7 @@
|
|||
flex-grow: 1;
|
||||
padding: 23px 23px 80px;
|
||||
box-sizing: border-box;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
.header.scrolling {
|
||||
|
|
|
@ -103,6 +103,8 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div class={`block ${block.type} hoverable`} class:selected on:click={() => {}}>
|
||||
{#if loopBlock}
|
||||
<div class="blockSection">
|
||||
|
|
|
@ -93,6 +93,8 @@
|
|||
}
|
||||
</script>
|
||||
|
||||
<!-- svelte-ignore a11y-click-events-have-key-events -->
|
||||
<!-- svelte-ignore a11y-no-static-element-interactions -->
|
||||
<div
|
||||
class:typing={typing && !automationNameError}
|
||||
class:typing-error={automationNameError}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue