Merge branch 'develop' of github.com:Budibase/budibase into fix/cli-memory-issues

This commit is contained in:
mike12345567 2023-02-07 09:56:49 +00:00
commit 464ab7a1bb
135 changed files with 2412 additions and 959 deletions

View File

@ -2,7 +2,7 @@
name: Bug report name: Bug report
about: Create a report to help us improve about: Create a report to help us improve
title: '' title: ''
labels: bug labels: ["bug", "linear"]
assignees: '' assignees: ''
--- ---

1
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@ -0,0 +1 @@
blank_issues_enabled: false

View File

@ -2,7 +2,7 @@
name: Feature Request name: Feature Request
about: Request a new budibase feature or enhancement about: Request a new budibase feature or enhancement
title: '' title: ''
labels: enhancement labels: ["enhancement", "linear"]
assignees: '' assignees: ''
--- ---

View File

@ -19,10 +19,11 @@ COUCH_DB_PORT=4005
REDIS_PORT=6379 REDIS_PORT=6379
WATCHTOWER_PORT=6161 WATCHTOWER_PORT=6161
BUDIBASE_ENVIRONMENT=PRODUCTION BUDIBASE_ENVIRONMENT=PRODUCTION
SQL_MAX_ROWS=
# An admin user can be automatically created initially if these are set # An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL= BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD= BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/ # A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR= PLUGINS_DIR=

View File

@ -0,0 +1,32 @@
FROM couchdb:3.2.1
ENV COUCHDB_USER admin
ENV COUCHDB_PASSWORD admin
EXPOSE 5984
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' && \
apt-get update && apt-get install -y --no-install-recommends openjdk-8-jre && \
rm -rf /var/lib/apt/lists/
# setup clouseau
WORKDIR /
RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \
unzip clouseau-2.21.0-dist.zip && \
mv clouseau-2.21.0 /opt/clouseau && \
rm clouseau-2.21.0-dist.zip
WORKDIR /opt/clouseau
RUN mkdir ./bin
ADD clouseau/clouseau ./bin/
ADD clouseau/log4j.properties clouseau/clouseau.ini ./
# setup CouchDB
WORKDIR /opt/couchdb
ADD couch/vm.args couch/local.ini ./etc/
WORKDIR /
ADD build-target-paths.sh .
ADD runner.sh ./bbcouch-runner.sh
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau ./build-target-paths.sh
CMD ["./bbcouch-runner.sh"]

View File

@ -0,0 +1,24 @@
#!/bin/bash
echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
echo "root:Docker!" | chpasswd
mkdir -p /tmp
chmod +x /tmp/ssh_setup.sh \
&& (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
cp /etc/sshd_config /etc/ssh/sshd_config
/etc/init.d/ssh restart
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi

14
hosting/couchdb/runner.sh Normal file
View File

@ -0,0 +1,14 @@
#!/bin/bash
DATA_DIR=${DATA_DIR:-/data}
mkdir -p ${DATA_DIR}
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couch
/build-target-paths.sh
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
sleep 10
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_users
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_replicator
sleep infinity

View File

@ -0,0 +1,23 @@
FROM budibase/couchdb
ENV DATA_DIR /data
RUN mkdir /data
RUN apt-get update && \
apt-get install -y --no-install-recommends redis-server
WORKDIR /minio
ADD scripts/install-minio.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
WORKDIR /
ADD dependencies/runner.sh .
RUN chmod +x ./runner.sh
EXPOSE 5984
EXPOSE 9000
EXPOSE 9001
EXPOSE 6379
CMD ["./runner.sh"]

View File

@ -0,0 +1,57 @@
# Docker Image for Running Budibase Tests
## Overview
This image contains the basic setup for running
## Usage
- Build the Image
- Run the Container
### Build the Image
The guidance below is based on building the Budibase single image on Debian 11 and AlmaLinux 8. If you use another distro or OS you will need to amend the commands to suit.
#### Install Node
Budibase requires a more recent version of node (14+) than is available in the base Debian repos so:
```
curl -sL https://deb.nodesource.com/setup_16.x | sudo bash -
apt install -y nodejs
node -v
```
Install yarn and lerna:
```
npm install -g yarn jest lerna
```
#### Install Docker
```
apt install -y docker.io
```
Check the versions of each installed version. This process was tested with the version numbers below so YMMV using anything else:
- Docker: 20.10.5
- node: 16.15.1
- yarn: 1.22.19
- lerna: 5.1.4
#### Get the Code
Clone the Budibase repo
```
git clone https://github.com/Budibase/budibase.git
cd budibase
```
#### Setup Node
Node setup:
```
node ./hosting/scripts/setup.js
yarn
yarn bootstrap
yarn build
```
#### Build Image
The following yarn command does some prep and then runs the docker build command:
```
yarn build:docker:dependencies
```

View File

@ -0,0 +1,8 @@
#!/bin/bash
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
/bbcouch-runner.sh &
/minio/minio server ${DATA_DIR}/minio --console-address ":9001" > /dev/stdout 2>&1 &
echo "Budibase dependencies started..."
sleep infinity

View File

@ -42,25 +42,16 @@ services:
couchdb-service: couchdb-service:
# platform: linux/amd64 # platform: linux/amd64
container_name: budi-couchdb-dev container_name: budi-couchdb3-dev
restart: on-failure restart: on-failure
image: ibmcom/couchdb3 image: budibase/couchdb
environment: environment:
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD} - COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
- COUCHDB_USER=${COUCH_DB_USER} - COUCHDB_USER=${COUCH_DB_USER}
ports: ports:
- "${COUCH_DB_PORT}:5984" - "${COUCH_DB_PORT}:5984"
volumes: volumes:
- couchdb3_data:/opt/couchdb/data - couchdb_data:/data
couch-init:
container_name: budi-couchdb-init-dev
image: curlimages/curl
environment:
PUT_CALL: "curl -u ${COUCH_DB_USER}:${COUCH_DB_PASSWORD} -X PUT couchdb-service:5984"
depends_on:
- couchdb-service
command: ["sh","-c","sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;"]
redis-service: redis-service:
container_name: budi-redis-dev container_name: budi-redis-dev
@ -73,7 +64,7 @@ services:
- redis_data:/data - redis_data:/data
volumes: volumes:
couchdb3_data: couchdb_data:
driver: local driver: local
minio_data: minio_data:
driver: local driver: local

View File

@ -0,0 +1,47 @@
version: "3"
# optional ports are specified throughout for more advanced use cases.
services:
minio-service:
restart: on-failure
# Last version that supports the "fs" backend
image: minio/minio:RELEASE.2022-10-24T18-35-07Z
ports:
- 9000
- 9001
environment:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
command: server /data --console-address ":9001"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s
timeout: 20s
retries: 3
couchdb-service:
# platform: linux/amd64
restart: on-failure
image: budibase/couchdb
environment:
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
- COUCHDB_USER=${COUCH_DB_USER}
ports:
- 5984
- 4369
- 9100
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5984/_up"]
interval: 30s
timeout: 20s
retries: 3
redis-service:
restart: on-failure
image: redis
command: redis-server --requirepass ${REDIS_PASSWORD}
ports:
- 6379
healthcheck:
test: ["CMD", "redis-cli", "ping"]

View File

@ -0,0 +1,10 @@
#!/bin/bash
if [[ $TARGETARCH == arm* ]] ;
then
echo "INSTALLING ARM64 MINIO"
wget https://dl.min.io/server/minio/release/linux-arm64/minio
else
echo "INSTALLING AMD64 MINIO"
wget https://dl.min.io/server/minio/release/linux-amd64/minio
fi
chmod +x minio

View File

@ -0,0 +1,15 @@
#!/bin/bash
tag=$1
if [[ ! "$tag" ]]; then
echo "No tag present. You must pass a tag to this script"
exit 1
fi
echo "Tagging images with tag: $tag"
docker tag budibase-couchdb budibase/couchdb:$tag
docker push --all-tags budibase/couchdb

View File

@ -18,7 +18,7 @@ WORKDIR /worker
ADD packages/worker . ADD packages/worker .
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
FROM couchdb:3.2.1 FROM budibase/couchdb
ARG TARGETARCH ARG TARGETARCH
ENV TARGETARCH $TARGETARCH ENV TARGETARCH $TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service) #TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
@ -29,23 +29,9 @@ ENV TARGETBUILD $TARGETBUILD
COPY --from=build /app /app COPY --from=build /app /app
COPY --from=build /worker /worker COPY --from=build /worker /worker
# ENV CUSTOM_DOMAIN=budi001.custom.com \
# See runner.sh for Env Vars
# These secret env variables are generated by the runner at startup
# their values can be overriden by the user, they will be written
# to the .env file in the /data directory for use later on
# REDIS_PASSWORD=budibase \
# COUCHDB_PASSWORD=budibase \
# COUCHDB_USER=budibase \
# COUCH_DB_URL=http://budibase:budibase@localhost:5984 \
# INTERNAL_API_KEY=budibase \
# JWT_SECRET=testsecret \
# MINIO_ACCESS_KEY=budibase \
# MINIO_SECRET_KEY=budibase \
# install base dependencies # install base dependencies
RUN apt-get update && \ RUN apt-get update && \
apt-get install -y software-properties-common wget nginx uuid-runtime && \ apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server && \
apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' && \ apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' && \
apt-get update apt-get update
@ -53,7 +39,7 @@ RUN apt-get update && \
WORKDIR /nodejs WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_16.x -o /tmp/nodesource_setup.sh && \ RUN curl -sL https://deb.nodesource.com/setup_16.x -o /tmp/nodesource_setup.sh && \
bash /tmp/nodesource_setup.sh && \ bash /tmp/nodesource_setup.sh && \
apt-get install -y libaio1 nodejs nginx openjdk-8-jdk redis-server unzip && \ apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2 npm install --global yarn pm2
# setup nginx # setup nginx
@ -69,23 +55,6 @@ RUN mkdir -p scripts/integrations/oracle
ADD packages/server/scripts/integrations/oracle scripts/integrations/oracle ADD packages/server/scripts/integrations/oracle scripts/integrations/oracle
RUN /bin/bash -e ./scripts/integrations/oracle/instantclient/linux/install.sh RUN /bin/bash -e ./scripts/integrations/oracle/instantclient/linux/install.sh
# setup clouseau
WORKDIR /
RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \
unzip clouseau-2.21.0-dist.zip && \
mv clouseau-2.21.0 /opt/clouseau && \
rm clouseau-2.21.0-dist.zip
WORKDIR /opt/clouseau
RUN mkdir ./bin
ADD hosting/single/clouseau/clouseau ./bin/
ADD hosting/single/clouseau/log4j.properties hosting/single/clouseau/clouseau.ini ./
RUN chmod +x ./bin/clouseau
# setup CouchDB
WORKDIR /opt/couchdb
ADD hosting/single/couch/vm.args hosting/single/couch/local.ini ./etc/
# setup minio # setup minio
WORKDIR /minio WORKDIR /minio
ADD scripts/install-minio.sh ./install.sh ADD scripts/install-minio.sh ./install.sh
@ -98,9 +67,6 @@ RUN chmod +x ./runner.sh
ADD hosting/single/healthcheck.sh . ADD hosting/single/healthcheck.sh .
RUN chmod +x ./healthcheck.sh RUN chmod +x ./healthcheck.sh
ADD hosting/scripts/build-target-paths.sh .
RUN chmod +x ./build-target-paths.sh
# Script below sets the path for storing data based on $DATA_DIR # Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home # For Azure App Service install SSH & point data locations to /home
ADD hosting/single/ssh/sshd_config /etc/ ADD hosting/single/ssh/sshd_config /etc/

View File

@ -72,14 +72,11 @@ for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
ln -s ${DATA_DIR}/.env /app/.env ln -s ${DATA_DIR}/.env /app/.env
ln -s ${DATA_DIR}/.env /worker/.env ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount # make these directories in runner, incase of mount
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couch chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 & redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 & /bbcouch-runner.sh &
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 & /minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
/etc/init.d/nginx restart /etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
# Add monthly cron job to renew certbot certificate # Add monthly cron job to renew certbot certificate
@ -90,15 +87,14 @@ if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
/etc/init.d/nginx restart /etc/init.d/nginx restart
fi fi
# wait for backend services to start
sleep 10
pushd app pushd app
pm2 start -l /dev/stdout --name app "yarn run:docker" pm2 start -l /dev/stdout --name app "yarn run:docker"
popd popd
pushd worker pushd worker
pm2 start -l /dev/stdout --name worker "yarn run:docker" pm2 start -l /dev/stdout --name worker "yarn run:docker"
popd popd
sleep 10
echo "curl to couchdb endpoints"
curl -X PUT ${COUCH_DB_URL}/_users
curl -X PUT ${COUCH_DB_URL}/_replicator
echo "end of runner.sh, sleeping ..." echo "end of runner.sh, sleeping ..."
sleep infinity sleep infinity

View File

@ -0,0 +1,9 @@
module.exports = () => {
return {
dockerCompose: {
composeFilePath: "../../hosting",
composeFile: "docker-compose.test.yaml",
startupTimeout: 10000,
},
}
}

View File

@ -1,5 +1,5 @@
{ {
"version": "2.3.0", "version": "2.3.1",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -3,6 +3,7 @@
"private": true, "private": true,
"devDependencies": { "devDependencies": {
"@rollup/plugin-json": "^4.0.2", "@rollup/plugin-json": "^4.0.2",
"@types/supertest": "^2.0.12",
"@typescript-eslint/parser": "5.45.0", "@typescript-eslint/parser": "5.45.0",
"babel-eslint": "^10.0.3", "babel-eslint": "^10.0.3",
"eslint": "^7.28.0", "eslint": "^7.28.0",
@ -62,6 +63,9 @@
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .", "build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .", "build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single": "npm run build:docker:pre && npm run build:docker:single:image", "build:docker:single": "npm run build:docker:pre && npm run build:docker:single:image",
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"build:docs": "lerna run build:docs", "build:docs": "lerna run build:docs",
"release:helm": "node scripts/releaseHelmChart", "release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run env:multi:enable", "env:multi:enable": "lerna run env:multi:enable",

View File

@ -0,0 +1,8 @@
const { join } = require("path")
require("dotenv").config({
path: join(__dirname, "..", "..", "hosting", ".env"),
})
const jestTestcontainersConfigGenerator = require("../../jestTestcontainersConfigGenerator")
module.exports = jestTestcontainersConfigGenerator()

View File

@ -1,11 +1,11 @@
import { Config } from "@jest/types" import { Config } from "@jest/types"
const preset = require("ts-jest/jest-preset")
const config: Config.InitialOptions = { const baseConfig: Config.InitialProjectOptions = {
preset: "ts-jest", ...preset,
testEnvironment: "node", preset: "@trendyol/jest-testcontainers",
setupFiles: ["./tests/jestSetup.ts"], setupFiles: ["./tests/jestEnv.ts"],
collectCoverageFrom: ["src/**/*.{js,ts}"], setupFilesAfterEnv: ["./tests/jestSetup.ts"],
coverageReporters: ["lcov", "json", "clover"],
transform: { transform: {
"^.+\\.ts?$": "@swc/jest", "^.+\\.ts?$": "@swc/jest",
}, },
@ -13,12 +13,28 @@ const config: Config.InitialOptions = {
if (!process.env.CI) { if (!process.env.CI) {
// use sources when not in CI // use sources when not in CI
config.moduleNameMapper = { baseConfig.moduleNameMapper = {
"@budibase/types": "<rootDir>/../types/src", "@budibase/types": "<rootDir>/../types/src",
"^axios.*$": "<rootDir>/node_modules/axios/lib/axios.js",
} }
} else { } else {
console.log("Running tests with compiled dependency sources") console.log("Running tests with compiled dependency sources")
} }
const config: Config.InitialOptions = {
projects: [
{
...baseConfig,
displayName: "sequential test",
testMatch: ["<rootDir>/**/*.seq.spec.[jt]s"],
runner: "jest-serial-runner",
},
{
...baseConfig,
testMatch: ["<rootDir>/**/!(*.seq).spec.[jt]s"],
},
],
collectCoverageFrom: ["src/**/*.{js,ts}"],
coverageReporters: ["lcov", "json", "clover"],
}
export default config export default config

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/backend-core", "name": "@budibase/backend-core",
"version": "2.3.0", "version": "2.3.1",
"description": "Budibase backend core libraries used in server and worker", "description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js", "main": "dist/src/index.js",
"types": "dist/src/index.d.ts", "types": "dist/src/index.d.ts",
@ -23,7 +23,7 @@
}, },
"dependencies": { "dependencies": {
"@budibase/nano": "10.1.1", "@budibase/nano": "10.1.1",
"@budibase/types": "^2.3.0", "@budibase/types": "^2.3.1",
"@shopify/jest-koa-mocks": "5.0.1", "@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2", "@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0", "aws-cloudfront-sign": "2.2.0",
@ -59,6 +59,7 @@
"devDependencies": { "devDependencies": {
"@swc/core": "^1.3.25", "@swc/core": "^1.3.25",
"@swc/jest": "^0.2.24", "@swc/jest": "^0.2.24",
"@trendyol/jest-testcontainers": "^2.1.1",
"@types/chance": "1.1.3", "@types/chance": "1.1.3",
"@types/ioredis": "4.28.0", "@types/ioredis": "4.28.0",
"@types/jest": "27.5.1", "@types/jest": "27.5.1",
@ -76,6 +77,7 @@
"chance": "1.1.8", "chance": "1.1.8",
"ioredis-mock": "5.8.0", "ioredis-mock": "5.8.0",
"jest": "28.1.1", "jest": "28.1.1",
"jest-serial-runner": "^1.2.1",
"koa": "2.13.4", "koa": "2.13.4",
"nodemon": "2.0.16", "nodemon": "2.0.16",
"pouchdb-adapter-memory": "7.2.2", "pouchdb-adapter-memory": "7.2.2",

View File

@ -9,16 +9,8 @@ import {
jwt as jwtPassport, jwt as jwtPassport,
local, local,
authenticated, authenticated,
auditLog,
tenancy, tenancy,
authError,
ssoCallbackUrl,
csrf, csrf,
internalApi,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
oidc, oidc,
google, google,
} from "../middleware" } from "../middleware"

View File

@ -2,14 +2,16 @@ require("../../../tests")
const { Writethrough } = require("../writethrough") const { Writethrough } = require("../writethrough")
const { getDB } = require("../../db") const { getDB } = require("../../db")
const tk = require("timekeeper") const tk = require("timekeeper")
const { structures } = require("../../../tests")
const START_DATE = Date.now() const START_DATE = Date.now()
tk.freeze(START_DATE) tk.freeze(START_DATE)
const DELAY = 5000 const DELAY = 5000
const db = getDB("test") const db = getDB(structures.db.id())
const db2 = getDB("test2") const db2 = getDB(structures.db.id())
const writethrough = new Writethrough(db, DELAY), writethrough2 = new Writethrough(db2, DELAY) const writethrough = new Writethrough(db, DELAY), writethrough2 = new Writethrough(db2, DELAY)
describe("writethrough", () => { describe("writethrough", () => {

View File

@ -83,7 +83,14 @@ export class DatabaseImpl implements Database {
throw new Error("DB does not exist") throw new Error("DB does not exist")
} }
if (!exists) { if (!exists) {
await this.nano().db.create(this.name) try {
await this.nano().db.create(this.name)
} catch (err: any) {
// Handling race conditions
if (err.statusCode !== 412) {
throw err
}
}
} }
return this.nano().db.use(this.name) return this.nano().db.use(this.name)
} }
@ -178,7 +185,7 @@ export class DatabaseImpl implements Database {
async destroy() { async destroy() {
try { try {
await this.nano().db.destroy(this.name) return await this.nano().db.destroy(this.name)
} catch (err: any) { } catch (err: any) {
// didn't exist, don't worry // didn't exist, don't worry
if (err.statusCode === 404) { if (err.statusCode === 404) {

View File

@ -6,12 +6,6 @@ import { DatabaseImpl } from "../db"
const dbList = new Set() const dbList = new Set()
export function getDB(dbName?: string, opts?: any): Database { export function getDB(dbName?: string, opts?: any): Database {
// TODO: once using the test image, need to remove this
if (env.isTest()) {
dbList.add(dbName)
// @ts-ignore
return getPouchDB(dbName, opts)
}
return new DatabaseImpl(dbName, opts) return new DatabaseImpl(dbName, opts)
} }

View File

@ -1,19 +1,19 @@
require("../../../tests") require("../../../tests")
const { getDB } = require("../") const { structures } = require("../../../tests")
const { getDB } = require("../db")
describe("db", () => { describe("db", () => {
describe("getDB", () => { describe("getDB", () => {
it("returns a db", async () => { it("returns a db", async () => {
const db = getDB("test")
const dbName = structures.db.id()
const db = getDB(dbName)
expect(db).toBeDefined() expect(db).toBeDefined()
expect(db._adapter).toBe("memory") expect(db.name).toBe(dbName)
expect(db.prefix).toBe("_pouch_")
expect(db.name).toBe("test")
}) })
it("uses the custom put function", async () => { it("uses the custom put function", async () => {
const db = getDB("test") const db = getDB(structures.db.id())
let doc = { _id: "test" } let doc = { _id: "test" }
await db.put(doc) await db.put(doc)
doc = await db.get(doc._id) doc = await db.get(doc._id)
@ -23,4 +23,3 @@ describe("db", () => {
}) })
}) })
}) })

View File

@ -8,6 +8,7 @@ const {
const { generateAppID, getPlatformUrl, getScopedConfig } = require("../utils") const { generateAppID, getPlatformUrl, getScopedConfig } = require("../utils")
const tenancy = require("../../tenancy") const tenancy = require("../../tenancy")
const { Config, DEFAULT_TENANT_ID } = require("../../constants") const { Config, DEFAULT_TENANT_ID } = require("../../constants")
import { generator } from "../../../tests"
import env from "../../environment" import env from "../../environment"
describe("utils", () => { describe("utils", () => {
@ -66,17 +67,16 @@ describe("utils", () => {
}) })
}) })
const DB_URL = "http://dburl.com"
const DEFAULT_URL = "http://localhost:10000" const DEFAULT_URL = "http://localhost:10000"
const ENV_URL = "http://env.com" const ENV_URL = "http://env.com"
const setDbPlatformUrl = async () => { const setDbPlatformUrl = async (dbUrl: string) => {
const db = tenancy.getGlobalDB() const db = tenancy.getGlobalDB()
db.put({ await db.put({
_id: "config_settings", _id: "config_settings",
type: Config.SETTINGS, type: Config.SETTINGS,
config: { config: {
platformUrl: DB_URL, platformUrl: dbUrl,
}, },
}) })
} }
@ -119,9 +119,10 @@ describe("getPlatformUrl", () => {
it("gets the platform url from the database", async () => { it("gets the platform url from the database", async () => {
await tenancy.doInTenant(null, async () => { await tenancy.doInTenant(null, async () => {
await setDbPlatformUrl() const dbUrl = generator.url()
await setDbPlatformUrl(dbUrl)
const url = await getPlatformUrl() const url = await getPlatformUrl()
expect(url).toBe(DB_URL) expect(url).toBe(dbUrl)
}) })
}) })
}) })
@ -152,7 +153,7 @@ describe("getPlatformUrl", () => {
it("never gets the platform url from the database", async () => { it("never gets the platform url from the database", async () => {
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => { await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
await setDbPlatformUrl() await setDbPlatformUrl(generator.url())
const url = await getPlatformUrl() const url = await getPlatformUrl()
expect(url).toBe(TENANT_AWARE_URL) expect(url).toBe(TENANT_AWARE_URL)
}) })
@ -170,10 +171,11 @@ describe("getScopedConfig", () => {
it("returns the platform url with an existing config", async () => { it("returns the platform url with an existing config", async () => {
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => { await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
await setDbPlatformUrl() const dbUrl = generator.url()
await setDbPlatformUrl(dbUrl)
const db = tenancy.getGlobalDB() const db = tenancy.getGlobalDB()
const config = await getScopedConfig(db, { type: Config.SETTINGS }) const config = await getScopedConfig(db, { type: Config.SETTINGS })
expect(config.platformUrl).toBe(DB_URL) expect(config.platformUrl).toBe(dbUrl)
}) })
}) })

View File

@ -10,7 +10,7 @@ import {
APP_PREFIX, APP_PREFIX,
} from "../constants" } from "../constants"
import { getTenantId, getGlobalDB, getGlobalDBName } from "../context" import { getTenantId, getGlobalDB, getGlobalDBName } from "../context"
import { doWithDB, allDbs, directCouchAllDbs } from "./db" import { doWithDB, directCouchAllDbs } from "./db"
import { getAppMetadata } from "../cache/appMetadata" import { getAppMetadata } from "../cache/appMetadata"
import { isDevApp, isDevAppID, getProdAppID } from "./conversions" import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
import * as events from "../events" import * as events from "../events"
@ -262,10 +262,7 @@ export function getStartEndKeyURL(baseKey: any, tenantId?: string) {
*/ */
export async function getAllDbs(opts = { efficient: false }) { export async function getAllDbs(opts = { efficient: false }) {
const efficient = opts && opts.efficient const efficient = opts && opts.efficient
// specifically for testing we use the pouch package for this
if (env.isTest()) {
return allDbs()
}
let dbs: any[] = [] let dbs: any[] = []
async function addDbs(queryString?: string) { async function addDbs(queryString?: string) {
const json = await directCouchAllDbs(queryString) const json = await directCouchAllDbs(queryString)

View File

@ -2,7 +2,7 @@ import { newid } from "./utils"
import * as events from "./events" import * as events from "./events"
import { StaticDatabases } from "./db" import { StaticDatabases } from "./db"
import { doWithDB } from "./db" import { doWithDB } from "./db"
import { Installation, IdentityType } from "@budibase/types" import { Installation, IdentityType, Database } from "@budibase/types"
import * as context from "./context" import * as context from "./context"
import semver from "semver" import semver from "semver"
import { bustCache, withCache, TTL, CacheKey } from "./cache/generic" import { bustCache, withCache, TTL, CacheKey } from "./cache/generic"
@ -14,6 +14,24 @@ export const getInstall = async (): Promise<Installation> => {
useTenancy: false, useTenancy: false,
}) })
} }
async function createInstallDoc(platformDb: Database) {
const install: Installation = {
_id: StaticDatabases.PLATFORM_INFO.docs.install,
installId: newid(),
version: pkg.version,
}
try {
const resp = await platformDb.put(install)
install._rev = resp.rev
return install
} catch (err: any) {
if (err.status === 409) {
return getInstallFromDB()
} else {
throw err
}
}
}
const getInstallFromDB = async (): Promise<Installation> => { const getInstallFromDB = async (): Promise<Installation> => {
return doWithDB( return doWithDB(
@ -26,13 +44,7 @@ const getInstallFromDB = async (): Promise<Installation> => {
) )
} catch (e: any) { } catch (e: any) {
if (e.status === 404) { if (e.status === 404) {
install = { install = await createInstallDoc(platformDb)
_id: StaticDatabases.PLATFORM_INFO.docs.install,
installId: newid(),
version: pkg.version,
}
const resp = await platformDb.put(install)
install._rev = resp.rev
} else { } else {
throw e throw e
} }

View File

@ -64,7 +64,9 @@ const print = (fn: any, data: any[]) => {
message = message + ` [identityId=${identityId}]` message = message + ` [identityId=${identityId}]`
} }
fn(message, data) if (!process.env.CI) {
fn(message, data)
}
} }
const logging = (ctx: any, next: any) => { const logging = (ctx: any, next: any) => {

View File

@ -3,7 +3,7 @@
exports[`migrations should match snapshot 1`] = ` exports[`migrations should match snapshot 1`] = `
Object { Object {
"_id": "migrations", "_id": "migrations",
"_rev": "1-a32b0b708e59eeb006ed5e063cfeb36a", "_rev": "1-2f64479842a0513aa8b97f356b0b9127",
"createdAt": "2020-01-01T00:00:00.000Z", "createdAt": "2020-01-01T00:00:00.000Z",
"test": 1577836800000, "test": 1577836800000,
"updatedAt": "2020-01-01T00:00:00.000Z", "updatedAt": "2020-01-01T00:00:00.000Z",

View File

@ -1,9 +1,9 @@
require("../../../tests") require("../../../tests")
const { runMigrations, getMigrationsDoc } = require("../index") const { runMigrations, getMigrationsDoc } = require("../index")
const { getDB } = require("../../db") const { getGlobalDBName, getDB } = require("../../db")
const {
StaticDatabases, const { structures, testEnv } = require("../../../tests")
} = require("../../constants") testEnv.multiTenant()
let db let db
@ -17,8 +17,11 @@ describe("migrations", () => {
fn: migrationFunction fn: migrationFunction
}] }]
let tenantId
beforeEach(() => { beforeEach(() => {
db = getDB(StaticDatabases.GLOBAL.name) tenantId = structures.tenant.id()
db = getDB(getGlobalDBName(tenantId))
}) })
afterEach(async () => { afterEach(async () => {
@ -27,7 +30,7 @@ describe("migrations", () => {
}) })
const migrate = () => { const migrate = () => {
return runMigrations(MIGRATIONS) return runMigrations(MIGRATIONS, { tenantIds: [tenantId]})
} }
it("should run a new migration", async () => { it("should run a new migration", async () => {

View File

@ -361,8 +361,8 @@ export const deleteFolder = async (
Prefix: folder, Prefix: folder,
} }
let response: any = await client.listObjects(listParams).promise() const existingObjectsResponse = await client.listObjects(listParams).promise()
if (response.Contents.length === 0) { if (existingObjectsResponse.Contents?.length === 0) {
return return
} }
const deleteParams: any = { const deleteParams: any = {
@ -372,13 +372,13 @@ export const deleteFolder = async (
}, },
} }
response.Contents.forEach((content: any) => { existingObjectsResponse.Contents?.forEach((content: any) => {
deleteParams.Delete.Objects.push({ Key: content.Key }) deleteParams.Delete.Objects.push({ Key: content.Key })
}) })
response = await client.deleteObjects(deleteParams).promise() const deleteResponse = await client.deleteObjects(deleteParams).promise()
// can only empty 1000 items at once // can only empty 1000 items at once
if (response.Deleted.length === 1000) { if (deleteResponse.Deleted?.length === 1000) {
return deleteFolder(bucketName, folder) return deleteFolder(bucketName, folder)
} }
} }

View File

@ -2,13 +2,14 @@ import { structures } from "../../../tests"
import * as utils from "../../utils" import * as utils from "../../utils"
import * as events from "../../events" import * as events from "../../events"
import * as db from "../../db" import * as db from "../../db"
import { DEFAULT_TENANT_ID, Header } from "../../constants" import { Header } from "../../constants"
import { doInTenant } from "../../context" import { doInTenant } from "../../context"
import { newid } from "../../utils"
describe("utils", () => { describe("utils", () => {
describe("platformLogout", () => { describe("platformLogout", () => {
it("should call platform logout", async () => { it("should call platform logout", async () => {
await doInTenant(DEFAULT_TENANT_ID, async () => { await doInTenant(structures.tenant.id(), async () => {
const ctx = structures.koa.newContext() const ctx = structures.koa.newContext()
await utils.platformLogout({ ctx, userId: "test" }) await utils.platformLogout({ ctx, userId: "test" })
expect(events.auth.logout).toBeCalledTimes(1) expect(events.auth.logout).toBeCalledTimes(1)
@ -54,7 +55,7 @@ describe("utils", () => {
const app = structures.apps.app(expected) const app = structures.apps.app(expected)
// set custom url // set custom url
const appUrl = "custom-url" const appUrl = newid()
app.url = `/${appUrl}` app.url = `/${appUrl}`
ctx.path = `/app/${appUrl}` ctx.path = `/app/${appUrl}`

View File

@ -0,0 +1,23 @@
import env from "../src/environment"
import { mocks } from "./utilities"
// must explicitly enable fetch mock
mocks.fetch.enable()
// mock all dates to 2020-01-01T00:00:00.000Z
// use tk.reset() to use real dates in individual tests
import tk from "timekeeper"
tk.freeze(mocks.date.MOCK_DATE)
env._set("SELF_HOSTED", "1")
env._set("NODE_ENV", "jest")
if (!process.env.DEBUG) {
global.console.log = jest.fn() // console.log are ignored in tests
}
if (!process.env.CI) {
// set a longer timeout in dev for debugging
// 100 seconds
jest.setTimeout(100000)
}

View File

@ -1,28 +1,4 @@
import env from "../src/environment" import env from "../src/environment"
import { mocks } from "./utilities" import { testContainerUtils } from "./utilities"
// must explicitly enable fetch mock testContainerUtils.setupEnv(env)
mocks.fetch.enable()
// mock all dates to 2020-01-01T00:00:00.000Z
// use tk.reset() to use real dates in individual tests
import tk from "timekeeper"
tk.freeze(mocks.date.MOCK_DATE)
env._set("SELF_HOSTED", "1")
env._set("NODE_ENV", "jest")
env._set("JWT_SECRET", "test-jwtsecret")
env._set("LOG_LEVEL", "silent")
env._set("MINIO_URL", "http://localhost")
env._set("MINIO_ACCESS_KEY", "test")
env._set("MINIO_SECRET_KEY", "test")
if (!process.env.DEBUG) {
global.console.log = jest.fn() // console.log are ignored in tests
}
if (!process.env.CI) {
// set a longer timeout in dev for debugging
// 100 seconds
jest.setTimeout(100000)
}

View File

@ -2,6 +2,7 @@ export * as mocks from "./mocks"
export * as structures from "./structures" export * as structures from "./structures"
export { generator } from "./structures" export { generator } from "./structures"
export * as testEnv from "./testEnv" export * as testEnv from "./testEnv"
export * as testContainerUtils from "./testContainerUtils"
import * as dbConfig from "./db" import * as dbConfig from "./db"
dbConfig.init() dbConfig.init()

View File

@ -0,0 +1,5 @@
import { newid } from "../../../src/newid"
export function id() {
return `db_${newid()}`
}

View File

@ -8,3 +8,5 @@ export * as apps from "./apps"
export * as koa from "./koa" export * as koa from "./koa"
export * as licenses from "./licenses" export * as licenses from "./licenses"
export * as plugins from "./plugins" export * as plugins from "./plugins"
export * as tenant from "./tenants"
export * as db from "./db"

View File

@ -0,0 +1,5 @@
import { newid } from "../../../src/newid"
export function id() {
return `tenant-${newid()}`
}

View File

@ -0,0 +1,42 @@
function getTestContainerSettings(serverName: string, key: string) {
const entry = Object.entries(global).find(
([k]) =>
k.includes(`_${serverName.toUpperCase()}`) &&
k.includes(`_${key.toUpperCase()}__`)
)
if (!entry) {
return null
}
return entry[1]
}
function getCouchConfig() {
const port = getTestContainerSettings("COUCHDB-SERVICE", "PORT_5984")
return {
port,
url: `http://${getTestContainerSettings("COUCHDB-SERVICE", "IP")}:${port}`,
}
}
function getMinioConfig() {
const port = getTestContainerSettings("MINIO-SERVICE", "PORT_9000")
return {
port,
url: `http://${getTestContainerSettings("MINIO-SERVICE", "IP")}:${port}`,
}
}
export function setupEnv(...envs: any[]) {
const configs = [
{ key: "COUCH_DB_PORT", value: getCouchConfig().port },
{ key: "COUCH_DB_URL", value: getCouchConfig().url },
{ key: "MINIO_PORT", value: getMinioConfig().port },
{ key: "MINIO_URL", value: getMinioConfig().url },
]
for (const config of configs.filter(x => x.value !== null)) {
for (const env of envs) {
env._set(config.key, config.value)
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/bbui", "name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.", "description": "A UI solution used in the different Budibase projects.",
"version": "2.3.0", "version": "2.3.1",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"module": "dist/bbui.es.js", "module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
], ],
"dependencies": { "dependencies": {
"@adobe/spectrum-css-workflow-icons": "1.2.1", "@adobe/spectrum-css-workflow-icons": "1.2.1",
"@budibase/string-templates": "^2.3.0", "@budibase/string-templates": "^2.3.1",
"@spectrum-css/accordion": "3.0.24", "@spectrum-css/accordion": "3.0.24",
"@spectrum-css/actionbutton": "1.0.1", "@spectrum-css/actionbutton": "1.0.1",
"@spectrum-css/actiongroup": "1.0.1", "@spectrum-css/actiongroup": "1.0.1",

View File

@ -25,7 +25,7 @@
let open = false let open = false
//eslint-disable-next-line //eslint-disable-next-line
const STRIP_NAME_REGEX = /(?<=\.)(.*?)(?=\ })/g const STRIP_NAME_REGEX = /(\w+?)(?=\ })/g
// Strips the name out of the value which is {{ env.Variable }} resulting in an array like ["Variable"] // Strips the name out of the value which is {{ env.Variable }} resulting in an array like ["Variable"]
$: hbsValue = String(value)?.match(STRIP_NAME_REGEX) || [] $: hbsValue = String(value)?.match(STRIP_NAME_REGEX) || []

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/builder", "name": "@budibase/builder",
"version": "2.3.0", "version": "2.3.1",
"license": "GPL-3.0", "license": "GPL-3.0",
"private": true, "private": true,
"scripts": { "scripts": {
@ -58,10 +58,10 @@
} }
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^2.3.0", "@budibase/bbui": "^2.3.1",
"@budibase/client": "^2.3.0", "@budibase/client": "^2.3.1",
"@budibase/frontend-core": "^2.3.0", "@budibase/frontend-core": "^2.3.1",
"@budibase/string-templates": "^2.3.0", "@budibase/string-templates": "^2.3.1",
"@fortawesome/fontawesome-svg-core": "^6.2.1", "@fortawesome/fontawesome-svg-core": "^6.2.1",
"@fortawesome/free-brands-svg-icons": "^6.2.1", "@fortawesome/free-brands-svg-icons": "^6.2.1",
"@fortawesome/free-solid-svg-icons": "^6.2.1", "@fortawesome/free-solid-svg-icons": "^6.2.1",

View File

@ -70,7 +70,7 @@
return Number(value) return Number(value)
} }
if (type === "options") { if (type === "options") {
return [value] return value
} }
if (type === "array") { if (type === "array") {
if (Array.isArray(value)) { if (Array.isArray(value)) {

View File

@ -1,6 +1,7 @@
import { writable } from "svelte/store" import { writable, get } from "svelte/store"
import { API } from "api" import { API } from "api"
import { Constants } from "@budibase/frontend-core" import { Constants } from "@budibase/frontend-core"
import { licensing } from "stores/portal"
export function createEnvironmentStore() { export function createEnvironmentStore() {
const { subscribe, update } = writable({ const { subscribe, update } = writable({
@ -17,12 +18,14 @@ export function createEnvironmentStore() {
} }
async function loadVariables() { async function loadVariables() {
const envVars = await API.fetchEnvironmentVariables() if (get(licensing).environmentVariablesEnabled) {
const mappedVars = envVars.variables.map(name => ({ name })) const envVars = await API.fetchEnvironmentVariables()
update(store => { const mappedVars = envVars.variables.map(name => ({ name }))
store.variables = mappedVars update(store => {
return store store.variables = mappedVars
}) return store
})
}
} }
async function createVariable(data) { async function createVariable(data) {

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/cli", "name": "@budibase/cli",
"version": "2.3.0", "version": "2.3.1",
"description": "Budibase CLI, for developers, self hosting and migrations.", "description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js", "main": "src/index.js",
"bin": { "bin": {
@ -26,9 +26,9 @@
"outputPath": "build" "outputPath": "build"
}, },
"dependencies": { "dependencies": {
"@budibase/backend-core": "^2.3.0", "@budibase/backend-core": "^2.3.1",
"@budibase/string-templates": "^2.3.0", "@budibase/string-templates": "^2.3.1",
"@budibase/types": "^2.3.0", "@budibase/types": "^2.3.1",
"axios": "0.21.2", "axios": "0.21.2",
"chalk": "4.1.0", "chalk": "4.1.0",
"cli-progress": "3.11.2", "cli-progress": "3.11.2",

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/client", "name": "@budibase/client",
"version": "2.3.0", "version": "2.3.1",
"license": "MPL-2.0", "license": "MPL-2.0",
"module": "dist/budibase-client.js", "module": "dist/budibase-client.js",
"main": "dist/budibase-client.js", "main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw" "dev:builder": "rollup -cw"
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^2.3.0", "@budibase/bbui": "^2.3.1",
"@budibase/frontend-core": "^2.3.0", "@budibase/frontend-core": "^2.3.1",
"@budibase/string-templates": "^2.3.0", "@budibase/string-templates": "^2.3.1",
"@spectrum-css/button": "^3.0.3", "@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3", "@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3", "@spectrum-css/divider": "^1.0.3",

View File

@ -1,12 +1,12 @@
{ {
"name": "@budibase/frontend-core", "name": "@budibase/frontend-core",
"version": "2.3.0", "version": "2.3.1",
"description": "Budibase frontend core libraries used in builder and client", "description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase", "author": "Budibase",
"license": "MPL-2.0", "license": "MPL-2.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"dependencies": { "dependencies": {
"@budibase/bbui": "^2.3.0", "@budibase/bbui": "^2.3.1",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"svelte": "^3.46.2" "svelte": "^3.46.2"
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/sdk", "name": "@budibase/sdk",
"version": "2.3.0", "version": "2.3.1",
"description": "Budibase Public API SDK", "description": "Budibase Public API SDK",
"author": "Budibase", "author": "Budibase",
"license": "MPL-2.0", "license": "MPL-2.0",

View File

@ -34,7 +34,7 @@ module AwsMock {
// @ts-ignore // @ts-ignore
this.listObjects = jest.fn( this.listObjects = jest.fn(
response({ response({
Contents: {}, Contents: [],
}) })
) )

View File

@ -0,0 +1,8 @@
const { join } = require("path")
require("dotenv").config({
path: join(__dirname, "..", "..", "hosting", ".env"),
})
const jestTestcontainersConfigGenerator = require("../../jestTestcontainersConfigGenerator")
module.exports = jestTestcontainersConfigGenerator()

View File

@ -1,16 +1,13 @@
import { Config } from "@jest/types" import { Config } from "@jest/types"
import * as fs from "fs"
const config: Config.InitialOptions = { import * as fs from "fs"
testEnvironment: "node", const preset = require("ts-jest/jest-preset")
const baseConfig: Config.InitialProjectOptions = {
...preset,
preset: "@trendyol/jest-testcontainers",
setupFiles: ["./src/tests/jestEnv.ts"], setupFiles: ["./src/tests/jestEnv.ts"],
setupFilesAfterEnv: ["./src/tests/jestSetup.ts"], setupFilesAfterEnv: ["./src/tests/jestSetup.ts"],
collectCoverageFrom: [
"src/**/*.{js,ts}",
// The use of coverage with couchdb view functions breaks tests
"!src/db/views/staticViews.*",
],
coverageReporters: ["lcov", "json", "clover"],
transform: { transform: {
"^.+\\.ts?$": "@swc/jest", "^.+\\.ts?$": "@swc/jest",
}, },
@ -18,19 +15,39 @@ const config: Config.InitialOptions = {
if (!process.env.CI) { if (!process.env.CI) {
// use sources when not in CI // use sources when not in CI
config.moduleNameMapper = { baseConfig.moduleNameMapper = {
"@budibase/backend-core/(.*)": "<rootDir>/../backend-core/$1", "@budibase/backend-core/(.*)": "<rootDir>/../backend-core/$1",
"@budibase/backend-core": "<rootDir>/../backend-core/src", "@budibase/backend-core": "<rootDir>/../backend-core/src",
"@budibase/types": "<rootDir>/../types/src", "@budibase/types": "<rootDir>/../types/src",
"^axios.*$": "<rootDir>/node_modules/axios/lib/axios.js",
} }
// add pro sources if they exist // add pro sources if they exist
if (fs.existsSync("../../../budibase-pro")) { if (fs.existsSync("../../../budibase-pro")) {
config.moduleNameMapper["@budibase/pro"] = baseConfig.moduleNameMapper["@budibase/pro"] =
"<rootDir>/../../../budibase-pro/packages/pro/src" "<rootDir>/../../../budibase-pro/packages/pro/src"
} }
} else { } else {
console.log("Running tests with compiled dependency sources") console.log("Running tests with compiled dependency sources")
} }
const config: Config.InitialOptions = {
projects: [
{
...baseConfig,
displayName: "sequential test",
testMatch: ["<rootDir>/**/*.seq.spec.[jt]s"],
runner: "jest-serial-runner",
},
{
...baseConfig,
testMatch: ["<rootDir>/**/!(*.seq).spec.[jt]s"],
},
],
collectCoverageFrom: [
"src/**/*.{js,ts}",
// The use of coverage with couchdb view functions breaks tests
"!src/db/views/staticViews.*",
],
coverageReporters: ["lcov", "json", "clover"],
}
export default config export default config

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/server", "name": "@budibase/server",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "2.3.0", "version": "2.3.1",
"description": "Budibase Web Server", "description": "Budibase Web Server",
"main": "src/index.ts", "main": "src/index.ts",
"repository": { "repository": {
@ -43,11 +43,11 @@
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@apidevtools/swagger-parser": "10.0.3", "@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "^2.3.0", "@budibase/backend-core": "^2.3.1",
"@budibase/client": "^2.3.0", "@budibase/client": "^2.3.1",
"@budibase/pro": "2.3.0", "@budibase/pro": "2.3.1",
"@budibase/string-templates": "^2.3.0", "@budibase/string-templates": "^2.3.1",
"@budibase/types": "^2.3.0", "@budibase/types": "^2.3.1",
"@bull-board/api": "3.7.0", "@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4", "@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",
@ -125,6 +125,7 @@
"@jest/test-sequencer": "24.9.0", "@jest/test-sequencer": "24.9.0",
"@swc/core": "^1.3.25", "@swc/core": "^1.3.25",
"@swc/jest": "^0.2.24", "@swc/jest": "^0.2.24",
"@trendyol/jest-testcontainers": "^2.1.1",
"@types/apidoc": "0.50.0", "@types/apidoc": "0.50.0",
"@types/bson": "4.2.0", "@types/bson": "4.2.0",
"@types/global-agent": "2.1.1", "@types/global-agent": "2.1.1",
@ -151,6 +152,7 @@
"is-wsl": "2.2.0", "is-wsl": "2.2.0",
"jest": "28.1.1", "jest": "28.1.1",
"jest-openapi": "0.14.2", "jest-openapi": "0.14.2",
"jest-serial-runner": "^1.2.1",
"nodemon": "2.0.15", "nodemon": "2.0.15",
"openapi-types": "9.3.1", "openapi-types": "9.3.1",
"openapi-typescript": "5.2.0", "openapi-typescript": "5.2.0",

View File

@ -41,7 +41,7 @@ const datasets = {
describe("Rest Importer", () => { describe("Rest Importer", () => {
const config = new TestConfig(false) const config = new TestConfig(false)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })

View File

@ -7,7 +7,7 @@ Array [
"entities": Array [ "entities": Array [
Object { Object {
"_id": "ta_users", "_id": "ta_users",
"_rev": "1-6f4013e796887f1771bf7837598d87e7", "_rev": "1-2375e1bc58aeec664dc1b1f04ad43e44",
"createdAt": "2020-01-01T00:00:00.000Z", "createdAt": "2020-01-01T00:00:00.000Z",
"name": "Users", "name": "Users",
"primaryDisplay": "email", "primaryDisplay": "email",

View File

@ -10,8 +10,11 @@ describe("/static", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
app = await config.init() app = await config.init()
})
beforeEach(()=>{
jest.clearAllMocks() jest.clearAllMocks()
}) })

View File

@ -7,7 +7,7 @@ describe("/api/keys", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })

View File

@ -14,18 +14,22 @@ jest.mock("../../../utilities/redis", () => ({
import { clearAllApps, checkBuilderEndpoint } from "./utilities/TestFunctions" import { clearAllApps, checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities" import * as setup from "./utilities"
import { AppStatus } from "../../../db/utils" import { AppStatus } from "../../../db/utils"
import { events } from "@budibase/backend-core" import { events, utils } from "@budibase/backend-core"
import env from "../../../environment" import env from "../../../environment"
jest.setTimeout(15000)
describe("/applications", () => { describe("/applications", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await clearAllApps()
await config.init() await config.init()
})
beforeEach(async () => {
jest.clearAllMocks() jest.clearAllMocks()
}) })
@ -33,7 +37,7 @@ describe("/applications", () => {
it("creates empty app", async () => { it("creates empty app", async () => {
const res = await request const res = await request
.post("/api/applications") .post("/api/applications")
.field("name", "My App") .field("name", utils.newid())
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
@ -44,7 +48,7 @@ describe("/applications", () => {
it("creates app from template", async () => { it("creates app from template", async () => {
const res = await request const res = await request
.post("/api/applications") .post("/api/applications")
.field("name", "My App") .field("name", utils.newid())
.field("useTemplate", "true") .field("useTemplate", "true")
.field("templateKey", "test") .field("templateKey", "test")
.field("templateString", "{}") // override the file download .field("templateString", "{}") // override the file download
@ -59,7 +63,7 @@ describe("/applications", () => {
it("creates app from file", async () => { it("creates app from file", async () => {
const res = await request const res = await request
.post("/api/applications") .post("/api/applications")
.field("name", "My App") .field("name", utils.newid())
.field("useTemplate", "true") .field("useTemplate", "true")
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.attach("templateFile", "src/api/routes/tests/data/export.txt") .attach("templateFile", "src/api/routes/tests/data/export.txt")
@ -106,6 +110,11 @@ describe("/applications", () => {
}) })
describe("fetch", () => { describe("fetch", () => {
beforeEach(async () => {
// Clean all apps but the onde from config
await clearAllApps(config.getTenantId(), [config.getAppId()!])
})
it("lists all applications", async () => { it("lists all applications", async () => {
await config.createApp("app1") await config.createApp("app1")
await config.createApp("app2") await config.createApp("app2")
@ -266,6 +275,11 @@ describe("/applications", () => {
}) })
describe("unpublish", () => { describe("unpublish", () => {
beforeEach(async () => {
// We want to republish as the unpublish will delete the prod app
await config.publish()
})
it("should unpublish app with dev app ID", async () => { it("should unpublish app with dev app ID", async () => {
const appId = config.getAppId() const appId = config.getAppId()
await request await request

View File

@ -7,7 +7,7 @@ describe("/authenticate", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })
@ -18,7 +18,7 @@ describe("/authenticate", () => {
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body._id).toEqual(generateUserMetadataID("us_uuid1")) expect(res.body._id).toEqual(generateUserMetadataID(config.user._id))
}) })
}) })
}) })

View File

@ -10,12 +10,16 @@ const MAX_RETRIES = 4
const { TRIGGER_DEFINITIONS, ACTION_DEFINITIONS } = require("../../../automations") const { TRIGGER_DEFINITIONS, ACTION_DEFINITIONS } = require("../../../automations")
const { events } = require("@budibase/backend-core") const { events } = require("@budibase/backend-core")
jest.setTimeout(30000)
describe("/automations", () => { describe("/automations", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
afterAll(setup.afterAll) afterAll(setup.afterAll)
// For some reason this cannot be a beforeAll or the test "tests the automation successfully" fail
beforeEach(async () => { beforeEach(async () => {
await config.init() await config.init()
}) })
@ -305,7 +309,7 @@ describe("/automations", () => {
.expect('Content-Type', /json/) .expect('Content-Type', /json/)
.expect(200) .expect(200)
expect(res.body[0]).toEqual(expect.objectContaining(autoConfig)) expect(res.body[0]).toEqual(expect.objectContaining(autoConfig))
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {

View File

@ -1,17 +1,8 @@
jest.mock("@budibase/backend-core", () => {
const core = jest.requireActual("@budibase/backend-core")
return {
...core,
objectStore: {
budibaseTempDir: core.objectStore.budibaseTempDir,
},
}
})
import * as setup from "./utilities" import * as setup from "./utilities"
import { events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { checkBuilderEndpoint } from "./utilities/TestFunctions" import { checkBuilderEndpoint } from "./utilities/TestFunctions"
describe("/backups", () => { describe("/backups", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
@ -44,9 +35,9 @@ describe("/backups", () => {
describe("calculateBackupStats", () => { describe("calculateBackupStats", () => {
it("should be able to calculate the backup statistics", async () => { it("should be able to calculate the backup statistics", async () => {
config.createAutomation() await config.createAutomation()
config.createScreen() await config.createScreen()
let res = await sdk.backups.calculateBackupStats(config.getAppId()) let res = await sdk.backups.calculateBackupStats(config.getAppId()!)
expect(res.automations).toEqual(1) expect(res.automations).toEqual(1)
expect(res.datasources).toEqual(1) expect(res.datasources).toEqual(1)
expect(res.screens).toEqual(1) expect(res.screens).toEqual(1)

View File

@ -1,14 +1,20 @@
import { db as dbCore } from "@budibase/backend-core"
import { AppStatus } from "../../../db/utils" import { AppStatus } from "../../../db/utils"
import * as setup from "./utilities" import * as setup from "./utilities"
import { wipeDb } from "./utilities/TestFunctions"
describe("/cloud", () => { describe("/cloud", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeAll(() => {
// Importing is only allowed in self hosted environments
config.modeSelf()
})
beforeEach(async () => { beforeEach(async () => {
await config.init() await config.init()
}) })
@ -22,19 +28,7 @@ describe("/cloud", () => {
it("should be able to import apps", async () => { it("should be able to import apps", async () => {
// first we need to delete any existing apps on the system so it looks clean otherwise the // first we need to delete any existing apps on the system so it looks clean otherwise the
// import will not run // import will not run
await request await wipeDb()
.post(
`/api/applications/${dbCore.getProdAppID(
config.getAppId()
)}/unpublish`
)
.set(config.defaultHeaders())
.expect(204)
await request
.delete(`/api/applications/${config.getAppId()}`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
// get a count of apps before the import // get a count of apps before the import
const preImportApps = await request const preImportApps = await request

View File

@ -7,7 +7,7 @@ describe("/component", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })

View File

@ -5,6 +5,10 @@ import { checkCacheForDynamicVariable } from "../../../threads/utils"
import { context, events } from "@budibase/backend-core" import { context, events } from "@budibase/backend-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import tk from "timekeeper"
import { mocks } from "@budibase/backend-core/tests"
tk.freeze(mocks.date.MOCK_DATE)
let { basicDatasource } = setup.structures let { basicDatasource } = setup.structures
const pg = require("pg") const pg = require("pg")
@ -15,11 +19,13 @@ describe("/datasources", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { async function setupTest() {
await config.init() await config.init()
datasource = await config.createDatasource() datasource = await config.createDatasource()
jest.clearAllMocks() jest.clearAllMocks()
}) }
beforeAll(setupTest)
describe("create", () => { describe("create", () => {
it("should create a new datasource", async () => { it("should create a new datasource", async () => {
@ -56,7 +62,14 @@ describe("/datasources", () => {
datasource: any, datasource: any,
fields: { path: string; queryString: string } fields: { path: string; queryString: string }
) { ) {
return config.previewQuery(request, config, datasource, fields) return config.previewQuery(
request,
config,
datasource,
fields,
undefined,
""
)
} }
it("should invalidate changed or removed variables", async () => { it("should invalidate changed or removed variables", async () => {
@ -91,6 +104,8 @@ describe("/datasources", () => {
}) })
describe("fetch", () => { describe("fetch", () => {
beforeAll(setupTest)
it("returns all the datasources from the server", async () => { it("returns all the datasources from the server", async () => {
const res = await request const res = await request
.get(`/api/datasources`) .get(`/api/datasources`)
@ -159,6 +174,8 @@ describe("/datasources", () => {
}) })
describe("destroy", () => { describe("destroy", () => {
beforeAll(setupTest)
it("deletes queries for the datasource after deletion and returns a success message", async () => { it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.createQuery() await config.createQuery()

View File

@ -8,7 +8,7 @@ describe("/dev", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
jest.clearAllMocks() jest.clearAllMocks()
}) })

View File

@ -7,7 +7,7 @@ describe("/integrations", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })

View File

@ -10,7 +10,7 @@ describe("/layouts", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
layout = await config.createLayout() layout = await config.createLayout()
jest.clearAllMocks() jest.clearAllMocks()

View File

@ -9,7 +9,7 @@ describe("/metadata", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
automation = await config.createAutomation() automation = await config.createAutomation()
}) })

View File

@ -7,7 +7,7 @@ describe("run misc tests", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })

View File

@ -15,8 +15,11 @@ describe("/permission", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
})
beforeEach(async () => {
table = await config.createTable() table = await config.createTable()
row = await config.createRow() row = await config.createRow()
perms = await config.addPermission(STD_ROLE_ID, table._id) perms = await config.addPermission(STD_ROLE_ID, table._id)

View File

@ -1,3 +1,6 @@
const tk = require( "timekeeper")
tk.freeze(Date.now())
// Mock out postgres for this // Mock out postgres for this
jest.mock("pg") jest.mock("pg")
jest.mock("node-fetch") jest.mock("node-fetch")
@ -27,10 +30,15 @@ describe("/queries", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { const setupTest = async()=>{
await config.init() await config.init()
datasource = await config.createDatasource() datasource = await config.createDatasource()
query = await config.createQuery() query = await config.createQuery()
}
beforeAll(async () => {
await setupTest()
}) })
async function createInvalidIntegration() { async function createInvalidIntegration() {
@ -101,6 +109,10 @@ describe("/queries", () => {
}) })
describe("fetch", () => { describe("fetch", () => {
beforeEach(async() => {
await setupTest()
})
it("returns all the queries from the server", async () => { it("returns all the queries from the server", async () => {
const res = await request const res = await request
.get(`/api/queries`) .get(`/api/queries`)
@ -178,6 +190,10 @@ describe("/queries", () => {
}) })
describe("destroy", () => { describe("destroy", () => {
beforeEach(async() => {
await setupTest()
})
it("deletes a query and returns a success message", async () => { it("deletes a query and returns a success message", async () => {
await request await request
.delete(`/api/queries/${query._id}/${query._rev}`) .delete(`/api/queries/${query._id}/${query._rev}`)
@ -239,6 +255,10 @@ describe("/queries", () => {
}) })
describe("execute", () => { describe("execute", () => {
beforeEach(async() => {
await setupTest()
})
it("should be able to execute the query", async () => { it("should be able to execute the query", async () => {
const res = await request const res = await request
.post(`/api/queries/${query._id}`) .post(`/api/queries/${query._id}`)

View File

@ -10,7 +10,7 @@ describe("/roles", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })
@ -60,6 +60,11 @@ describe("/roles", () => {
}) })
describe("fetch", () => { describe("fetch", () => {
beforeAll(async () => {
// Recreate the app
await config.init()
})
it("should list custom roles, plus 2 default roles", async () => { it("should list custom roles, plus 2 default roles", async () => {
const customRole = await config.createRole() const customRole = await config.createRole()

View File

@ -16,7 +16,7 @@ describe("/routing", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
screen = basicScreen() screen = basicScreen()
screen.routing.route = route screen.routing.route = route

View File

@ -1,3 +1,8 @@
const tk = require( "timekeeper")
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
tk.freeze(timestamp)
const { outputProcessing } = require("../../../utilities/rowProcessor") const { outputProcessing } = require("../../../utilities/rowProcessor")
const setup = require("./utilities") const setup = require("./utilities")
const { basicRow } = setup.structures const { basicRow } = setup.structures
@ -20,8 +25,11 @@ describe("/rows", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
})
beforeEach(async()=>{
table = await config.createTable() table = await config.createTable()
row = basicRow(table._id) row = basicRow(table._id)
}) })
@ -111,8 +119,8 @@ describe("/rows", () => {
_id: existing._id, _id: existing._id,
_rev: existing._rev, _rev: existing._rev,
type: "row", type: "row",
createdAt: "2020-01-01T00:00:00.000Z", createdAt: timestamp,
updatedAt: "2020-01-01T00:00:00.000Z", updatedAt: timestamp,
}) })
await assertQueryUsage(queryUsage + 1) await assertQueryUsage(queryUsage + 1)
}) })

View File

@ -10,7 +10,7 @@ describe("/screens", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
screen = await config.createScreen() screen = await config.createScreen()
}) })

View File

@ -8,7 +8,8 @@ describe("/static", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
config.modeSelf()
app = await config.init() app = await config.init()
}) })

View File

@ -10,7 +10,7 @@ describe("/tables", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
const app = await config.init() const app = await config.init()
appId = app.appId appId = app.appId
}) })

View File

@ -6,7 +6,7 @@ describe("/templates", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })

View File

@ -3,6 +3,8 @@ const { checkPermissionsEndpoint } = require("./utilities/TestFunctions")
const setup = require("./utilities") const setup = require("./utilities")
const { BUILTIN_ROLE_IDS } = roles const { BUILTIN_ROLE_IDS } = roles
jest.setTimeout(30000)
jest.mock("../../../utilities/workerRequests", () => ({ jest.mock("../../../utilities/workerRequests", () => ({
getGlobalUsers: jest.fn(() => { getGlobalUsers: jest.fn(() => {
return {} return {}
@ -19,6 +21,7 @@ describe("/users", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
// For some reason this cannot be a beforeAll or the test "should be able to update the user" fail
beforeEach(async () => { beforeEach(async () => {
await config.init() await config.init()
}) })

View File

@ -4,6 +4,8 @@ import { AppStatus } from "../../../../db/utils"
import { roles, tenancy, context } from "@budibase/backend-core" import { roles, tenancy, context } from "@budibase/backend-core"
import { TENANT_ID } from "../../../../tests/utilities/structures" import { TENANT_ID } from "../../../../tests/utilities/structures"
import env from "../../../../environment" import env from "../../../../environment"
import { db } from "@budibase/backend-core"
import Nano from "@budibase/nano"
class Request { class Request {
appId: any appId: any
@ -30,7 +32,10 @@ export const getAllTableRows = async (config: any) => {
return req.body return req.body
} }
export const clearAllApps = async (tenantId = TENANT_ID) => { export const clearAllApps = async (
tenantId = TENANT_ID,
exceptions: Array<string> = []
) => {
await tenancy.doInTenant(tenantId, async () => { await tenancy.doInTenant(tenantId, async () => {
const req: any = { query: { status: AppStatus.DEV }, user: { tenantId } } const req: any = { query: { status: AppStatus.DEV }, user: { tenantId } }
await appController.fetch(req) await appController.fetch(req)
@ -38,7 +43,7 @@ export const clearAllApps = async (tenantId = TENANT_ID) => {
if (!apps || apps.length <= 0) { if (!apps || apps.length <= 0) {
return return
} }
for (let app of apps) { for (let app of apps.filter((x: any) => !exceptions.includes(x.appId))) {
const { appId } = app const { appId } = app
const req = new Request(null, { appId }) const req = new Request(null, { appId })
await runRequest(appId, appController.destroy, req) await runRequest(appId, appController.destroy, req)
@ -55,6 +60,24 @@ export const clearAllAutomations = async (config: any) => {
} }
} }
export const wipeDb = async () => {
const couchInfo = db.getCouchInfo()
const nano = Nano({
url: couchInfo.url,
requestDefaults: {
headers: {
Authorization: couchInfo.cookie,
},
},
parseUrl: false,
})
let dbs
do {
dbs = await nano.db.list()
await Promise.all(dbs.map(x => nano.db.destroy(x)))
} while (dbs.length)
}
export const createRequest = ( export const createRequest = (
request: any, request: any,
method: any, method: any,

View File

@ -1,5 +1,6 @@
import TestConfig from "../../../../tests/utilities/TestConfiguration" import TestConfig from "../../../../tests/utilities/TestConfiguration"
import env from "../../../../environment" import env from "../../../../environment"
import supertest from "supertest"
export * as structures from "../../../../tests/utilities/structures" export * as structures from "../../../../tests/utilities/structures"
function user() { function user() {
@ -44,7 +45,8 @@ export function delay(ms: number) {
return new Promise(resolve => setTimeout(resolve, ms)) return new Promise(resolve => setTimeout(resolve, ms))
} }
let request: any, config: any let request: supertest.SuperTest<supertest.Test> | undefined | null,
config: TestConfig | null
export function beforeAll() { export function beforeAll() {
config = new TestConfig() config = new TestConfig()
@ -65,14 +67,14 @@ export function getRequest() {
if (!request) { if (!request) {
beforeAll() beforeAll()
} }
return request return request!
} }
export function getConfig() { export function getConfig() {
if (!config) { if (!config) {
beforeAll() beforeAll()
} }
return config return config!
} }
export async function switchToSelfHosted(func: any) { export async function switchToSelfHosted(func: any) {

View File

@ -28,8 +28,11 @@ describe("/views", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
})
beforeEach(async() => {
table = await config.createTable(priceTable()) table = await config.createTable(priceTable())
}) })

View File

@ -9,7 +9,8 @@ describe("/webhooks", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
beforeEach(async () => { const setupTest = async () => {
config.modeSelf()
await config.init() await config.init()
const autoConfig = basicAutomation() const autoConfig = basicAutomation()
autoConfig.definition.trigger = { autoConfig.definition.trigger = {
@ -18,7 +19,9 @@ describe("/webhooks", () => {
} }
await config.createAutomation(autoConfig) await config.createAutomation(autoConfig)
webhook = await config.createWebhook() webhook = await config.createWebhook()
}) }
beforeAll(setupTest)
describe("create", () => { describe("create", () => {
it("should create a webhook successfully", async () => { it("should create a webhook successfully", async () => {
@ -44,6 +47,8 @@ describe("/webhooks", () => {
}) })
describe("fetch", () => { describe("fetch", () => {
beforeAll(setupTest)
it("returns the correct routing for basic user", async () => { it("returns the correct routing for basic user", async () => {
const res = await request const res = await request
.get(`/api/webhooks`) .get(`/api/webhooks`)
@ -64,6 +69,8 @@ describe("/webhooks", () => {
}) })
describe("delete", () => { describe("delete", () => {
beforeAll(setupTest)
it("should successfully delete", async () => { it("should successfully delete", async () => {
const res = await request const res = await request
.delete(`/api/webhooks/${webhook._id}/${webhook._rev}`) .delete(`/api/webhooks/${webhook._id}/${webhook._rev}`)
@ -84,6 +91,8 @@ describe("/webhooks", () => {
}) })
describe("build schema", () => { describe("build schema", () => {
beforeAll(setupTest)
it("should allow building a schema", async () => { it("should allow building a schema", async () => {
const res = await request const res = await request
.post(`/api/webhooks/schema/${config.getAppId()}/${webhook._id}`) .post(`/api/webhooks/schema/${config.getAppId()}/${webhook._id}`)

View File

@ -62,6 +62,7 @@ initialiseWebsockets(server)
let shuttingDown = false, let shuttingDown = false,
errCode = 0 errCode = 0
server.on("close", async () => { server.on("close", async () => {
// already in process // already in process
if (shuttingDown) { if (shuttingDown) {
@ -71,7 +72,7 @@ server.on("close", async () => {
console.log("Server Closed") console.log("Server Closed")
await automations.shutdown() await automations.shutdown()
await redis.shutdown() await redis.shutdown()
await events.shutdown() events.shutdown()
await Thread.shutdown() await Thread.shutdown()
api.shutdown() api.shutdown()
if (!env.isTest()) { if (!env.isTest()) {

View File

@ -21,7 +21,7 @@ const setup = require("./utilities")
describe("Run through some parts of the automations system", () => { describe("Run through some parts of the automations system", () => {
let config = setup.getConfig() let config = setup.getConfig()
beforeEach(async () => { beforeAll(async () => {
await automation.init() await automation.init()
await config.init() await config.init()
}) })
@ -29,9 +29,9 @@ describe("Run through some parts of the automations system", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
it("should be able to init in builder", async () => { it("should be able to init in builder", async () => {
await triggers.externalTrigger(basicAutomation(), { a: 1, appId: "app_123" }) await triggers.externalTrigger(basicAutomation(), { a: 1, appId: config.appId })
await wait(100) await wait(100)
expect(thread.execute).toHaveBeenCalled() expect(thread.execute).toHaveBeenCalled()
}) })
it("should check coercion", async () => { it("should check coercion", async () => {

View File

@ -3,7 +3,7 @@ const setup = require("./utilities")
describe("test the bash action", () => { describe("test the bash action", () => {
let config = setup.getConfig() let config = setup.getConfig()
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })
afterAll(setup.afterAll) afterAll(setup.afterAll)

View File

@ -7,7 +7,7 @@ describe("test the outgoing webhook action", () => {
let inputs let inputs
let config = setup.getConfig() let config = setup.getConfig()
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
inputs = { inputs = {
username: "joe_bloggs", username: "joe_bloggs",

View File

@ -1,10 +1,9 @@
const setup = require("./utilities") const setup = require("./utilities")
describe("test the execute query action", () => { describe("test the execute query action", () => {
let datasource
let config = setup.getConfig() let config = setup.getConfig()
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
await config.createDatasource() await config.createDatasource()
@ -41,7 +40,7 @@ describe("test the execute query action", () => {
query: { queryId: "wrong_id" } query: { queryId: "wrong_id" }
} }
) )
expect(res.response).toEqual('{"status":404,"name":"not_found","message":"missing","reason":"missing"}') expect(res.response).toEqual('Error: missing')
expect(res.success).toEqual(false) expect(res.success).toEqual(false)
}) })

View File

@ -3,7 +3,7 @@ const setup = require("./utilities")
describe("test the execute script action", () => { describe("test the execute script action", () => {
let config = setup.getConfig() let config = setup.getConfig()
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })
afterAll(setup.afterAll) afterAll(setup.afterAll)

View File

@ -7,7 +7,7 @@ describe("test the outgoing webhook action", () => {
let inputs let inputs
let config = setup.getConfig() let config = setup.getConfig()
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
inputs = { inputs = {
requestMethod: "POST", requestMethod: "POST",

View File

@ -36,7 +36,7 @@ const setup = require("./utilities")
describe("test the outgoing webhook action", () => { describe("test the outgoing webhook action", () => {
let inputs let inputs
let config = setup.getConfig() let config = setup.getConfig()
beforeEach(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })

Some files were not shown because too many files have changed in this diff Show More