Merge pull request #9419 from Budibase/feature/dependencies-image

Chore - use testcontainers
This commit is contained in:
Adria Navarro 2023-02-06 09:09:07 +00:00 committed by GitHub
commit 5ee9073d9b
118 changed files with 2332 additions and 884 deletions

View File

@ -0,0 +1,32 @@
FROM couchdb:3.2.1
ENV COUCHDB_USER admin
ENV COUCHDB_PASSWORD admin
EXPOSE 5984
RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \
apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' && \
apt-get update && apt-get install -y --no-install-recommends openjdk-8-jre && \
rm -rf /var/lib/apt/lists/
# setup clouseau
WORKDIR /
RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \
unzip clouseau-2.21.0-dist.zip && \
mv clouseau-2.21.0 /opt/clouseau && \
rm clouseau-2.21.0-dist.zip
WORKDIR /opt/clouseau
RUN mkdir ./bin
ADD clouseau/clouseau ./bin/
ADD clouseau/log4j.properties clouseau/clouseau.ini ./
# setup CouchDB
WORKDIR /opt/couchdb
ADD couch/vm.args couch/local.ini ./etc/
WORKDIR /
ADD build-target-paths.sh .
ADD runner.sh ./bbcouch-runner.sh
RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau ./build-target-paths.sh
CMD ["./bbcouch-runner.sh"]

View File

@ -0,0 +1,24 @@
#!/bin/bash
echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}
chown -R couchdb:couchdb $DATA_DIR/couch/
apt update
apt-get install -y openssh-server
echo "root:Docker!" | chpasswd
mkdir -p /tmp
chmod +x /tmp/ssh_setup.sh \
&& (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null)
cp /etc/sshd_config /etc/ssh/sshd_config
/etc/init.d/ssh restart
sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini
else
sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini
sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini
fi

14
hosting/couchdb/runner.sh Normal file
View File

@ -0,0 +1,14 @@
#!/bin/bash
DATA_DIR=${DATA_DIR:-/data}
mkdir -p ${DATA_DIR}
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couch
/build-target-paths.sh
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
sleep 10
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_users
curl -X PUT http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@localhost:5984/_replicator
sleep infinity

View File

@ -0,0 +1,23 @@
FROM budibase/couchdb
ENV DATA_DIR /data
RUN mkdir /data
RUN apt-get update && \
apt-get install -y --no-install-recommends redis-server
WORKDIR /minio
ADD scripts/install-minio.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
WORKDIR /
ADD dependencies/runner.sh .
RUN chmod +x ./runner.sh
EXPOSE 5984
EXPOSE 9000
EXPOSE 9001
EXPOSE 6379
CMD ["./runner.sh"]

View File

@ -0,0 +1,57 @@
# Docker Image for Running Budibase Tests
## Overview
This image contains the basic setup for running
## Usage
- Build the Image
- Run the Container
### Build the Image
The guidance below is based on building the Budibase single image on Debian 11 and AlmaLinux 8. If you use another distro or OS you will need to amend the commands to suit.
#### Install Node
Budibase requires a more recent version of node (14+) than is available in the base Debian repos so:
```
curl -sL https://deb.nodesource.com/setup_16.x | sudo bash -
apt install -y nodejs
node -v
```
Install yarn and lerna:
```
npm install -g yarn jest lerna
```
#### Install Docker
```
apt install -y docker.io
```
Check the versions of each installed version. This process was tested with the version numbers below so YMMV using anything else:
- Docker: 20.10.5
- node: 16.15.1
- yarn: 1.22.19
- lerna: 5.1.4
#### Get the Code
Clone the Budibase repo
```
git clone https://github.com/Budibase/budibase.git
cd budibase
```
#### Setup Node
Node setup:
```
node ./hosting/scripts/setup.js
yarn
yarn bootstrap
yarn build
```
#### Build Image
The following yarn command does some prep and then runs the docker build command:
```
yarn build:docker:dependencies
```

View File

@ -0,0 +1,8 @@
#!/bin/bash
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
/bbcouch-runner.sh &
/minio/minio server ${DATA_DIR}/minio --console-address ":9001" > /dev/stdout 2>&1 &
echo "Budibase dependencies started..."
sleep infinity

View File

@ -42,25 +42,16 @@ services:
couchdb-service:
# platform: linux/amd64
container_name: budi-couchdb-dev
container_name: budi-couchdb3-dev
restart: on-failure
image: ibmcom/couchdb3
image: budibase/couchdb
environment:
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
- COUCHDB_USER=${COUCH_DB_USER}
ports:
- "${COUCH_DB_PORT}:5984"
volumes:
- couchdb3_data:/opt/couchdb/data
couch-init:
container_name: budi-couchdb-init-dev
image: curlimages/curl
environment:
PUT_CALL: "curl -u ${COUCH_DB_USER}:${COUCH_DB_PASSWORD} -X PUT couchdb-service:5984"
depends_on:
- couchdb-service
command: ["sh","-c","sleep 10 && $${PUT_CALL}/_users && $${PUT_CALL}/_replicator; fg;"]
- couchdb_data:/data
redis-service:
container_name: budi-redis-dev
@ -73,7 +64,7 @@ services:
- redis_data:/data
volumes:
couchdb3_data:
couchdb_data:
driver: local
minio_data:
driver: local

View File

@ -0,0 +1,47 @@
version: "3"
# optional ports are specified throughout for more advanced use cases.
services:
minio-service:
restart: on-failure
# Last version that supports the "fs" backend
image: minio/minio:RELEASE.2022-10-24T18-35-07Z
ports:
- 9000
- 9001
environment:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
command: server /data --console-address ":9001"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s
timeout: 20s
retries: 3
couchdb-service:
# platform: linux/amd64
restart: on-failure
image: budibase/couchdb
environment:
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
- COUCHDB_USER=${COUCH_DB_USER}
ports:
- 5984
- 4369
- 9100
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5984/_up"]
interval: 30s
timeout: 20s
retries: 3
redis-service:
restart: on-failure
image: redis
command: redis-server --requirepass ${REDIS_PASSWORD}
ports:
- 6379
healthcheck:
test: ["CMD", "redis-cli", "ping"]

View File

@ -0,0 +1,10 @@
#!/bin/bash
if [[ $TARGETARCH == arm* ]] ;
then
echo "INSTALLING ARM64 MINIO"
wget https://dl.min.io/server/minio/release/linux-arm64/minio
else
echo "INSTALLING AMD64 MINIO"
wget https://dl.min.io/server/minio/release/linux-amd64/minio
fi
chmod +x minio

View File

@ -0,0 +1,15 @@
#!/bin/bash
tag=$1
if [[ ! "$tag" ]]; then
echo "No tag present. You must pass a tag to this script"
exit 1
fi
echo "Tagging images with tag: $tag"
docker tag budibase-couchdb budibase/couchdb:$tag
docker push --all-tags budibase/couchdb

View File

@ -18,7 +18,7 @@ WORKDIR /worker
ADD packages/worker .
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
FROM couchdb:3.2.1
FROM budibase/couchdb
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
@ -29,23 +29,9 @@ ENV TARGETBUILD $TARGETBUILD
COPY --from=build /app /app
COPY --from=build /worker /worker
# ENV CUSTOM_DOMAIN=budi001.custom.com \
# See runner.sh for Env Vars
# These secret env variables are generated by the runner at startup
# their values can be overriden by the user, they will be written
# to the .env file in the /data directory for use later on
# REDIS_PASSWORD=budibase \
# COUCHDB_PASSWORD=budibase \
# COUCHDB_USER=budibase \
# COUCH_DB_URL=http://budibase:budibase@localhost:5984 \
# INTERNAL_API_KEY=budibase \
# JWT_SECRET=testsecret \
# MINIO_ACCESS_KEY=budibase \
# MINIO_SECRET_KEY=budibase \
# install base dependencies
RUN apt-get update && \
apt-get install -y software-properties-common wget nginx uuid-runtime && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server && \
apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' && \
apt-get update
@ -53,7 +39,7 @@ RUN apt-get update && \
WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_16.x -o /tmp/nodesource_setup.sh && \
bash /tmp/nodesource_setup.sh && \
apt-get install -y libaio1 nodejs nginx openjdk-8-jdk redis-server unzip && \
apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2
# setup nginx
@ -69,23 +55,6 @@ RUN mkdir -p scripts/integrations/oracle
ADD packages/server/scripts/integrations/oracle scripts/integrations/oracle
RUN /bin/bash -e ./scripts/integrations/oracle/instantclient/linux/install.sh
# setup clouseau
WORKDIR /
RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \
unzip clouseau-2.21.0-dist.zip && \
mv clouseau-2.21.0 /opt/clouseau && \
rm clouseau-2.21.0-dist.zip
WORKDIR /opt/clouseau
RUN mkdir ./bin
ADD hosting/single/clouseau/clouseau ./bin/
ADD hosting/single/clouseau/log4j.properties hosting/single/clouseau/clouseau.ini ./
RUN chmod +x ./bin/clouseau
# setup CouchDB
WORKDIR /opt/couchdb
ADD hosting/single/couch/vm.args hosting/single/couch/local.ini ./etc/
# setup minio
WORKDIR /minio
ADD scripts/install-minio.sh ./install.sh
@ -98,9 +67,6 @@ RUN chmod +x ./runner.sh
ADD hosting/single/healthcheck.sh .
RUN chmod +x ./healthcheck.sh
ADD hosting/scripts/build-target-paths.sh .
RUN chmod +x ./build-target-paths.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
ADD hosting/single/ssh/sshd_config /etc/

View File

@ -72,14 +72,11 @@ for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
ln -s ${DATA_DIR}/.env /app/.env
ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount
mkdir -p ${DATA_DIR}/couch/{dbs,views}
mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/search
chown -R couchdb:couchdb ${DATA_DIR}/couch
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 &
/bbcouch-runner.sh &
/minio/minio server --console-address ":9001" ${DATA_DIR}/minio > /dev/stdout 2>&1 &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
/etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
# Add monthly cron job to renew certbot certificate
@ -90,15 +87,14 @@ if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then
/etc/init.d/nginx restart
fi
# wait for backend services to start
sleep 10
pushd app
pm2 start -l /dev/stdout --name app "yarn run:docker"
popd
pushd worker
pm2 start -l /dev/stdout --name worker "yarn run:docker"
popd
sleep 10
echo "curl to couchdb endpoints"
curl -X PUT ${COUCH_DB_URL}/_users
curl -X PUT ${COUCH_DB_URL}/_replicator
echo "end of runner.sh, sleeping ..."
sleep infinity

View File

@ -0,0 +1,9 @@
module.exports = () => {
return {
dockerCompose: {
composeFilePath: "../../hosting",
composeFile: "docker-compose.test.yaml",
startupTimeout: 10000,
},
}
}

View File

@ -3,6 +3,7 @@
"private": true,
"devDependencies": {
"@rollup/plugin-json": "^4.0.2",
"@types/supertest": "^2.0.12",
"@typescript-eslint/parser": "5.45.0",
"babel-eslint": "^10.0.3",
"eslint": "^7.28.0",
@ -62,6 +63,9 @@
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single": "npm run build:docker:pre && npm run build:docker:single:image",
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"build:docs": "lerna run build:docs",
"release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run env:multi:enable",

View File

@ -0,0 +1,8 @@
const { join } = require("path")
require("dotenv").config({
path: join(__dirname, "..", "..", "hosting", ".env"),
})
const jestTestcontainersConfigGenerator = require("../../jestTestcontainersConfigGenerator")
module.exports = jestTestcontainersConfigGenerator()

View File

@ -1,11 +1,11 @@
import { Config } from "@jest/types"
const preset = require("ts-jest/jest-preset")
const config: Config.InitialOptions = {
preset: "ts-jest",
testEnvironment: "node",
setupFiles: ["./tests/jestSetup.ts"],
collectCoverageFrom: ["src/**/*.{js,ts}"],
coverageReporters: ["lcov", "json", "clover"],
const baseConfig: Config.InitialProjectOptions = {
...preset,
preset: "@trendyol/jest-testcontainers",
setupFiles: ["./tests/jestEnv.ts"],
setupFilesAfterEnv: ["./tests/jestSetup.ts"],
transform: {
"^.+\\.ts?$": "@swc/jest",
},
@ -13,12 +13,28 @@ const config: Config.InitialOptions = {
if (!process.env.CI) {
// use sources when not in CI
config.moduleNameMapper = {
baseConfig.moduleNameMapper = {
"@budibase/types": "<rootDir>/../types/src",
"^axios.*$": "<rootDir>/node_modules/axios/lib/axios.js",
}
} else {
console.log("Running tests with compiled dependency sources")
}
const config: Config.InitialOptions = {
projects: [
{
...baseConfig,
displayName: "sequential test",
testMatch: ["<rootDir>/**/*.seq.spec.[jt]s"],
runner: "jest-serial-runner",
},
{
...baseConfig,
testMatch: ["<rootDir>/**/!(*.seq).spec.[jt]s"],
},
],
collectCoverageFrom: ["src/**/*.{js,ts}"],
coverageReporters: ["lcov", "json", "clover"],
}
export default config

View File

@ -59,6 +59,7 @@
"devDependencies": {
"@swc/core": "^1.3.25",
"@swc/jest": "^0.2.24",
"@trendyol/jest-testcontainers": "^2.1.1",
"@types/chance": "1.1.3",
"@types/ioredis": "4.28.0",
"@types/jest": "27.5.1",
@ -76,6 +77,7 @@
"chance": "1.1.8",
"ioredis-mock": "5.8.0",
"jest": "28.1.1",
"jest-serial-runner": "^1.2.1",
"koa": "2.13.4",
"nodemon": "2.0.16",
"pouchdb-adapter-memory": "7.2.2",
@ -86,4 +88,4 @@
"typescript": "4.7.3"
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
}
}

View File

@ -9,16 +9,8 @@ import {
jwt as jwtPassport,
local,
authenticated,
auditLog,
tenancy,
authError,
ssoCallbackUrl,
csrf,
internalApi,
adminOnly,
builderOnly,
builderOrAdmin,
joiValidator,
oidc,
google,
} from "../middleware"

View File

@ -2,14 +2,16 @@ require("../../../tests")
const { Writethrough } = require("../writethrough")
const { getDB } = require("../../db")
const tk = require("timekeeper")
const { structures } = require("../../../tests")
const START_DATE = Date.now()
tk.freeze(START_DATE)
const DELAY = 5000
const db = getDB("test")
const db2 = getDB("test2")
const db = getDB(structures.db.id())
const db2 = getDB(structures.db.id())
const writethrough = new Writethrough(db, DELAY), writethrough2 = new Writethrough(db2, DELAY)
describe("writethrough", () => {

View File

@ -83,7 +83,14 @@ export class DatabaseImpl implements Database {
throw new Error("DB does not exist")
}
if (!exists) {
await this.nano().db.create(this.name)
try {
await this.nano().db.create(this.name)
} catch (err: any) {
// Handling race conditions
if (err.statusCode !== 412) {
throw err
}
}
}
return this.nano().db.use(this.name)
}
@ -178,7 +185,7 @@ export class DatabaseImpl implements Database {
async destroy() {
try {
await this.nano().db.destroy(this.name)
return await this.nano().db.destroy(this.name)
} catch (err: any) {
// didn't exist, don't worry
if (err.statusCode === 404) {

View File

@ -6,12 +6,6 @@ import { DatabaseImpl } from "../db"
const dbList = new Set()
export function getDB(dbName?: string, opts?: any): Database {
// TODO: once using the test image, need to remove this
if (env.isTest()) {
dbList.add(dbName)
// @ts-ignore
return getPouchDB(dbName, opts)
}
return new DatabaseImpl(dbName, opts)
}

View File

@ -1,19 +1,19 @@
require("../../../tests")
const { getDB } = require("../")
const { structures } = require("../../../tests")
const { getDB } = require("../db")
describe("db", () => {
describe("db", () => {
describe("getDB", () => {
it("returns a db", async () => {
const db = getDB("test")
const dbName = structures.db.id()
const db = getDB(dbName)
expect(db).toBeDefined()
expect(db._adapter).toBe("memory")
expect(db.prefix).toBe("_pouch_")
expect(db.name).toBe("test")
expect(db.name).toBe(dbName)
})
it("uses the custom put function", async () => {
const db = getDB("test")
const db = getDB(structures.db.id())
let doc = { _id: "test" }
await db.put(doc)
doc = await db.get(doc._id)
@ -23,4 +23,3 @@ describe("db", () => {
})
})
})

View File

@ -8,6 +8,7 @@ const {
const { generateAppID, getPlatformUrl, getScopedConfig } = require("../utils")
const tenancy = require("../../tenancy")
const { Config, DEFAULT_TENANT_ID } = require("../../constants")
import { generator } from "../../../tests"
import env from "../../environment"
describe("utils", () => {
@ -66,17 +67,16 @@ describe("utils", () => {
})
})
const DB_URL = "http://dburl.com"
const DEFAULT_URL = "http://localhost:10000"
const ENV_URL = "http://env.com"
const setDbPlatformUrl = async () => {
const setDbPlatformUrl = async (dbUrl: string) => {
const db = tenancy.getGlobalDB()
db.put({
await db.put({
_id: "config_settings",
type: Config.SETTINGS,
config: {
platformUrl: DB_URL,
platformUrl: dbUrl,
},
})
}
@ -119,9 +119,10 @@ describe("getPlatformUrl", () => {
it("gets the platform url from the database", async () => {
await tenancy.doInTenant(null, async () => {
await setDbPlatformUrl()
const dbUrl = generator.url()
await setDbPlatformUrl(dbUrl)
const url = await getPlatformUrl()
expect(url).toBe(DB_URL)
expect(url).toBe(dbUrl)
})
})
})
@ -152,7 +153,7 @@ describe("getPlatformUrl", () => {
it("never gets the platform url from the database", async () => {
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
await setDbPlatformUrl()
await setDbPlatformUrl(generator.url())
const url = await getPlatformUrl()
expect(url).toBe(TENANT_AWARE_URL)
})
@ -170,10 +171,11 @@ describe("getScopedConfig", () => {
it("returns the platform url with an existing config", async () => {
await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => {
await setDbPlatformUrl()
const dbUrl = generator.url()
await setDbPlatformUrl(dbUrl)
const db = tenancy.getGlobalDB()
const config = await getScopedConfig(db, { type: Config.SETTINGS })
expect(config.platformUrl).toBe(DB_URL)
expect(config.platformUrl).toBe(dbUrl)
})
})

View File

@ -10,7 +10,7 @@ import {
APP_PREFIX,
} from "../constants"
import { getTenantId, getGlobalDB, getGlobalDBName } from "../context"
import { doWithDB, allDbs, directCouchAllDbs } from "./db"
import { doWithDB, directCouchAllDbs } from "./db"
import { getAppMetadata } from "../cache/appMetadata"
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
import * as events from "../events"
@ -262,10 +262,7 @@ export function getStartEndKeyURL(baseKey: any, tenantId?: string) {
*/
export async function getAllDbs(opts = { efficient: false }) {
const efficient = opts && opts.efficient
// specifically for testing we use the pouch package for this
if (env.isTest()) {
return allDbs()
}
let dbs: any[] = []
async function addDbs(queryString?: string) {
const json = await directCouchAllDbs(queryString)

View File

@ -2,7 +2,7 @@ import { newid } from "./utils"
import * as events from "./events"
import { StaticDatabases } from "./db"
import { doWithDB } from "./db"
import { Installation, IdentityType } from "@budibase/types"
import { Installation, IdentityType, Database } from "@budibase/types"
import * as context from "./context"
import semver from "semver"
import { bustCache, withCache, TTL, CacheKey } from "./cache/generic"
@ -14,6 +14,24 @@ export const getInstall = async (): Promise<Installation> => {
useTenancy: false,
})
}
async function createInstallDoc(platformDb: Database) {
const install: Installation = {
_id: StaticDatabases.PLATFORM_INFO.docs.install,
installId: newid(),
version: pkg.version,
}
try {
const resp = await platformDb.put(install)
install._rev = resp.rev
return install
} catch (err: any) {
if (err.status === 409) {
return getInstallFromDB()
} else {
throw err
}
}
}
const getInstallFromDB = async (): Promise<Installation> => {
return doWithDB(
@ -26,13 +44,7 @@ const getInstallFromDB = async (): Promise<Installation> => {
)
} catch (e: any) {
if (e.status === 404) {
install = {
_id: StaticDatabases.PLATFORM_INFO.docs.install,
installId: newid(),
version: pkg.version,
}
const resp = await platformDb.put(install)
install._rev = resp.rev
install = await createInstallDoc(platformDb)
} else {
throw e
}

View File

@ -64,7 +64,9 @@ const print = (fn: any, data: any[]) => {
message = message + ` [identityId=${identityId}]`
}
fn(message, data)
if (!process.env.CI) {
fn(message, data)
}
}
const logging = (ctx: any, next: any) => {

View File

@ -3,7 +3,7 @@
exports[`migrations should match snapshot 1`] = `
Object {
"_id": "migrations",
"_rev": "1-a32b0b708e59eeb006ed5e063cfeb36a",
"_rev": "1-2f64479842a0513aa8b97f356b0b9127",
"createdAt": "2020-01-01T00:00:00.000Z",
"test": 1577836800000,
"updatedAt": "2020-01-01T00:00:00.000Z",

View File

@ -1,9 +1,9 @@
require("../../../tests")
const { runMigrations, getMigrationsDoc } = require("../index")
const { getDB } = require("../../db")
const {
StaticDatabases,
} = require("../../constants")
const { getGlobalDBName, getDB } = require("../../db")
const { structures, testEnv } = require("../../../tests")
testEnv.multiTenant()
let db
@ -17,8 +17,11 @@ describe("migrations", () => {
fn: migrationFunction
}]
let tenantId
beforeEach(() => {
db = getDB(StaticDatabases.GLOBAL.name)
tenantId = structures.tenant.id()
db = getDB(getGlobalDBName(tenantId))
})
afterEach(async () => {
@ -27,7 +30,7 @@ describe("migrations", () => {
})
const migrate = () => {
return runMigrations(MIGRATIONS)
return runMigrations(MIGRATIONS, { tenantIds: [tenantId]})
}
it("should run a new migration", async () => {

View File

@ -361,8 +361,8 @@ export const deleteFolder = async (
Prefix: folder,
}
let response: any = await client.listObjects(listParams).promise()
if (response.Contents.length === 0) {
const existingObjectsResponse = await client.listObjects(listParams).promise()
if (existingObjectsResponse.Contents?.length === 0) {
return
}
const deleteParams: any = {
@ -372,13 +372,13 @@ export const deleteFolder = async (
},
}
response.Contents.forEach((content: any) => {
existingObjectsResponse.Contents?.forEach((content: any) => {
deleteParams.Delete.Objects.push({ Key: content.Key })
})
response = await client.deleteObjects(deleteParams).promise()
const deleteResponse = await client.deleteObjects(deleteParams).promise()
// can only empty 1000 items at once
if (response.Deleted.length === 1000) {
if (deleteResponse.Deleted?.length === 1000) {
return deleteFolder(bucketName, folder)
}
}

View File

@ -2,13 +2,14 @@ import { structures } from "../../../tests"
import * as utils from "../../utils"
import * as events from "../../events"
import * as db from "../../db"
import { DEFAULT_TENANT_ID, Header } from "../../constants"
import { Header } from "../../constants"
import { doInTenant } from "../../context"
import { newid } from "../../utils"
describe("utils", () => {
describe("platformLogout", () => {
it("should call platform logout", async () => {
await doInTenant(DEFAULT_TENANT_ID, async () => {
await doInTenant(structures.tenant.id(), async () => {
const ctx = structures.koa.newContext()
await utils.platformLogout({ ctx, userId: "test" })
expect(events.auth.logout).toBeCalledTimes(1)
@ -54,7 +55,7 @@ describe("utils", () => {
const app = structures.apps.app(expected)
// set custom url
const appUrl = "custom-url"
const appUrl = newid()
app.url = `/${appUrl}`
ctx.path = `/app/${appUrl}`

View File

@ -0,0 +1,23 @@
import env from "../src/environment"
import { mocks } from "./utilities"
// must explicitly enable fetch mock
mocks.fetch.enable()
// mock all dates to 2020-01-01T00:00:00.000Z
// use tk.reset() to use real dates in individual tests
import tk from "timekeeper"
tk.freeze(mocks.date.MOCK_DATE)
env._set("SELF_HOSTED", "1")
env._set("NODE_ENV", "jest")
if (!process.env.DEBUG) {
global.console.log = jest.fn() // console.log are ignored in tests
}
if (!process.env.CI) {
// set a longer timeout in dev for debugging
// 100 seconds
jest.setTimeout(100000)
}

View File

@ -1,28 +1,4 @@
import env from "../src/environment"
import { mocks } from "./utilities"
import { testContainerUtils } from "./utilities"
// must explicitly enable fetch mock
mocks.fetch.enable()
// mock all dates to 2020-01-01T00:00:00.000Z
// use tk.reset() to use real dates in individual tests
import tk from "timekeeper"
tk.freeze(mocks.date.MOCK_DATE)
env._set("SELF_HOSTED", "1")
env._set("NODE_ENV", "jest")
env._set("JWT_SECRET", "test-jwtsecret")
env._set("LOG_LEVEL", "silent")
env._set("MINIO_URL", "http://localhost")
env._set("MINIO_ACCESS_KEY", "test")
env._set("MINIO_SECRET_KEY", "test")
if (!process.env.DEBUG) {
global.console.log = jest.fn() // console.log are ignored in tests
}
if (!process.env.CI) {
// set a longer timeout in dev for debugging
// 100 seconds
jest.setTimeout(100000)
}
testContainerUtils.setupEnv(env)

View File

@ -2,6 +2,7 @@ export * as mocks from "./mocks"
export * as structures from "./structures"
export { generator } from "./structures"
export * as testEnv from "./testEnv"
export * as testContainerUtils from "./testContainerUtils"
import * as dbConfig from "./db"
dbConfig.init()

View File

@ -0,0 +1,5 @@
import { newid } from "../../../src/newid"
export function id() {
return `db_${newid()}`
}

View File

@ -8,3 +8,5 @@ export * as apps from "./apps"
export * as koa from "./koa"
export * as licenses from "./licenses"
export * as plugins from "./plugins"
export * as tenant from "./tenants"
export * as db from "./db"

View File

@ -0,0 +1,5 @@
import { newid } from "../../../src/newid"
export function id() {
return `tenant-${newid()}`
}

View File

@ -0,0 +1,42 @@
function getTestContainerSettings(serverName: string, key: string) {
const entry = Object.entries(global).find(
([k]) =>
k.includes(`_${serverName.toUpperCase()}`) &&
k.includes(`_${key.toUpperCase()}__`)
)
if (!entry) {
return null
}
return entry[1]
}
function getCouchConfig() {
const port = getTestContainerSettings("COUCHDB-SERVICE", "PORT_5984")
return {
port,
url: `http://${getTestContainerSettings("COUCHDB-SERVICE", "IP")}:${port}`,
}
}
function getMinioConfig() {
const port = getTestContainerSettings("MINIO-SERVICE", "PORT_9000")
return {
port,
url: `http://${getTestContainerSettings("MINIO-SERVICE", "IP")}:${port}`,
}
}
export function setupEnv(...envs: any[]) {
const configs = [
{ key: "COUCH_DB_PORT", value: getCouchConfig().port },
{ key: "COUCH_DB_URL", value: getCouchConfig().url },
{ key: "MINIO_PORT", value: getMinioConfig().port },
{ key: "MINIO_URL", value: getMinioConfig().url },
]
for (const config of configs.filter(x => x.value !== null)) {
for (const env of envs) {
env._set(config.key, config.value)
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -34,7 +34,7 @@ module AwsMock {
// @ts-ignore
this.listObjects = jest.fn(
response({
Contents: {},
Contents: [],
})
)

View File

@ -0,0 +1,8 @@
const { join } = require("path")
require("dotenv").config({
path: join(__dirname, "..", "..", "hosting", ".env"),
})
const jestTestcontainersConfigGenerator = require("../../jestTestcontainersConfigGenerator")
module.exports = jestTestcontainersConfigGenerator()

View File

@ -1,16 +1,13 @@
import { Config } from "@jest/types"
import * as fs from "fs"
const config: Config.InitialOptions = {
testEnvironment: "node",
import * as fs from "fs"
const preset = require("ts-jest/jest-preset")
const baseConfig: Config.InitialProjectOptions = {
...preset,
preset: "@trendyol/jest-testcontainers",
setupFiles: ["./src/tests/jestEnv.ts"],
setupFilesAfterEnv: ["./src/tests/jestSetup.ts"],
collectCoverageFrom: [
"src/**/*.{js,ts}",
// The use of coverage with couchdb view functions breaks tests
"!src/db/views/staticViews.*",
],
coverageReporters: ["lcov", "json", "clover"],
transform: {
"^.+\\.ts?$": "@swc/jest",
},
@ -18,19 +15,39 @@ const config: Config.InitialOptions = {
if (!process.env.CI) {
// use sources when not in CI
config.moduleNameMapper = {
baseConfig.moduleNameMapper = {
"@budibase/backend-core/(.*)": "<rootDir>/../backend-core/$1",
"@budibase/backend-core": "<rootDir>/../backend-core/src",
"@budibase/types": "<rootDir>/../types/src",
"^axios.*$": "<rootDir>/node_modules/axios/lib/axios.js",
}
// add pro sources if they exist
if (fs.existsSync("../../../budibase-pro")) {
config.moduleNameMapper["@budibase/pro"] =
baseConfig.moduleNameMapper["@budibase/pro"] =
"<rootDir>/../../../budibase-pro/packages/pro/src"
}
} else {
console.log("Running tests with compiled dependency sources")
}
const config: Config.InitialOptions = {
projects: [
{
...baseConfig,
displayName: "sequential test",
testMatch: ["<rootDir>/**/*.seq.spec.[jt]s"],
runner: "jest-serial-runner",
},
{
...baseConfig,
testMatch: ["<rootDir>/**/!(*.seq).spec.[jt]s"],
},
],
collectCoverageFrom: [
"src/**/*.{js,ts}",
// The use of coverage with couchdb view functions breaks tests
"!src/db/views/staticViews.*",
],
coverageReporters: ["lcov", "json", "clover"],
}
export default config

View File

@ -125,6 +125,7 @@
"@jest/test-sequencer": "24.9.0",
"@swc/core": "^1.3.25",
"@swc/jest": "^0.2.24",
"@trendyol/jest-testcontainers": "^2.1.1",
"@types/apidoc": "0.50.0",
"@types/bson": "4.2.0",
"@types/global-agent": "2.1.1",
@ -151,6 +152,7 @@
"is-wsl": "2.2.0",
"jest": "28.1.1",
"jest-openapi": "0.14.2",
"jest-serial-runner": "^1.2.1",
"nodemon": "2.0.15",
"openapi-types": "9.3.1",
"openapi-typescript": "5.2.0",
@ -170,4 +172,4 @@
"oracledb": "5.3.0"
},
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc"
}
}

View File

@ -41,7 +41,7 @@ const datasets = {
describe("Rest Importer", () => {
const config = new TestConfig(false)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View File

@ -7,7 +7,7 @@ Array [
"entities": Array [
Object {
"_id": "ta_users",
"_rev": "1-6f4013e796887f1771bf7837598d87e7",
"_rev": "1-2375e1bc58aeec664dc1b1f04ad43e44",
"createdAt": "2020-01-01T00:00:00.000Z",
"name": "Users",
"primaryDisplay": "email",

View File

@ -10,8 +10,11 @@ describe("/static", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
app = await config.init()
})
beforeEach(()=>{
jest.clearAllMocks()
})

View File

@ -7,7 +7,7 @@ describe("/api/keys", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View File

@ -14,18 +14,22 @@ jest.mock("../../../utilities/redis", () => ({
import { clearAllApps, checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
import { AppStatus } from "../../../db/utils"
import { events } from "@budibase/backend-core"
import { events, utils } from "@budibase/backend-core"
import env from "../../../environment"
jest.setTimeout(15000)
describe("/applications", () => {
let request = setup.getRequest()
let config = setup.getConfig()
afterAll(setup.afterAll)
beforeEach(async () => {
await clearAllApps()
beforeAll(async () => {
await config.init()
})
beforeEach(async () => {
jest.clearAllMocks()
})
@ -33,7 +37,7 @@ describe("/applications", () => {
it("creates empty app", async () => {
const res = await request
.post("/api/applications")
.field("name", "My App")
.field("name", utils.newid())
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
@ -44,7 +48,7 @@ describe("/applications", () => {
it("creates app from template", async () => {
const res = await request
.post("/api/applications")
.field("name", "My App")
.field("name", utils.newid())
.field("useTemplate", "true")
.field("templateKey", "test")
.field("templateString", "{}") // override the file download
@ -59,7 +63,7 @@ describe("/applications", () => {
it("creates app from file", async () => {
const res = await request
.post("/api/applications")
.field("name", "My App")
.field("name", utils.newid())
.field("useTemplate", "true")
.set(config.defaultHeaders())
.attach("templateFile", "src/api/routes/tests/data/export.txt")
@ -106,6 +110,11 @@ describe("/applications", () => {
})
describe("fetch", () => {
beforeEach(async () => {
// Clean all apps but the onde from config
await clearAllApps(config.getTenantId(), [config.getAppId()!])
})
it("lists all applications", async () => {
await config.createApp("app1")
await config.createApp("app2")
@ -266,6 +275,11 @@ describe("/applications", () => {
})
describe("unpublish", () => {
beforeEach(async () => {
// We want to republish as the unpublish will delete the prod app
await config.publish()
})
it("should unpublish app with dev app ID", async () => {
const appId = config.getAppId()
await request

View File

@ -7,7 +7,7 @@ describe("/authenticate", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
@ -18,7 +18,7 @@ describe("/authenticate", () => {
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body._id).toEqual(generateUserMetadataID("us_uuid1"))
expect(res.body._id).toEqual(generateUserMetadataID(config.user._id))
})
})
})

View File

@ -10,12 +10,16 @@ const MAX_RETRIES = 4
const { TRIGGER_DEFINITIONS, ACTION_DEFINITIONS } = require("../../../automations")
const { events } = require("@budibase/backend-core")
jest.setTimeout(30000)
describe("/automations", () => {
let request = setup.getRequest()
let config = setup.getConfig()
afterAll(setup.afterAll)
// For some reason this cannot be a beforeAll or the test "tests the automation successfully" fail
beforeEach(async () => {
await config.init()
})
@ -305,7 +309,7 @@ describe("/automations", () => {
.expect('Content-Type', /json/)
.expect(200)
expect(res.body[0]).toEqual(expect.objectContaining(autoConfig))
expect(res.body[0]).toEqual(expect.objectContaining(autoConfig))
})
it("should apply authorization to endpoint", async () => {

View File

@ -1,17 +1,8 @@
jest.mock("@budibase/backend-core", () => {
const core = jest.requireActual("@budibase/backend-core")
return {
...core,
objectStore: {
budibaseTempDir: core.objectStore.budibaseTempDir,
},
}
})
import * as setup from "./utilities"
import { events } from "@budibase/backend-core"
import sdk from "../../../sdk"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
describe("/backups", () => {
let request = setup.getRequest()
let config = setup.getConfig()
@ -44,9 +35,9 @@ describe("/backups", () => {
describe("calculateBackupStats", () => {
it("should be able to calculate the backup statistics", async () => {
config.createAutomation()
config.createScreen()
let res = await sdk.backups.calculateBackupStats(config.getAppId())
await config.createAutomation()
await config.createScreen()
let res = await sdk.backups.calculateBackupStats(config.getAppId()!)
expect(res.automations).toEqual(1)
expect(res.datasources).toEqual(1)
expect(res.screens).toEqual(1)

View File

@ -1,14 +1,20 @@
import { db as dbCore } from "@budibase/backend-core"
import { AppStatus } from "../../../db/utils"
import * as setup from "./utilities"
import { wipeDb } from "./utilities/TestFunctions"
describe("/cloud", () => {
let request = setup.getRequest()
let config = setup.getConfig()
afterAll(setup.afterAll)
beforeAll(() => {
// Importing is only allowed in self hosted environments
config.modeSelf()
})
beforeEach(async () => {
await config.init()
})
@ -22,19 +28,7 @@ describe("/cloud", () => {
it("should be able to import apps", async () => {
// first we need to delete any existing apps on the system so it looks clean otherwise the
// import will not run
await request
.post(
`/api/applications/${dbCore.getProdAppID(
config.getAppId()
)}/unpublish`
)
.set(config.defaultHeaders())
.expect(204)
await request
.delete(`/api/applications/${config.getAppId()}`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
await wipeDb()
// get a count of apps before the import
const preImportApps = await request

View File

@ -7,7 +7,7 @@ describe("/component", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View File

@ -5,6 +5,10 @@ import { checkCacheForDynamicVariable } from "../../../threads/utils"
import { context, events } from "@budibase/backend-core"
import sdk from "../../../sdk"
import tk from "timekeeper"
import { mocks } from "@budibase/backend-core/tests"
tk.freeze(mocks.date.MOCK_DATE)
let { basicDatasource } = setup.structures
const pg = require("pg")
@ -15,11 +19,13 @@ describe("/datasources", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
async function setupTest() {
await config.init()
datasource = await config.createDatasource()
jest.clearAllMocks()
})
}
beforeAll(setupTest)
describe("create", () => {
it("should create a new datasource", async () => {
@ -56,7 +62,14 @@ describe("/datasources", () => {
datasource: any,
fields: { path: string; queryString: string }
) {
return config.previewQuery(request, config, datasource, fields)
return config.previewQuery(
request,
config,
datasource,
fields,
undefined,
""
)
}
it("should invalidate changed or removed variables", async () => {
@ -91,6 +104,8 @@ describe("/datasources", () => {
})
describe("fetch", () => {
beforeAll(setupTest)
it("returns all the datasources from the server", async () => {
const res = await request
.get(`/api/datasources`)
@ -159,6 +174,8 @@ describe("/datasources", () => {
})
describe("destroy", () => {
beforeAll(setupTest)
it("deletes queries for the datasource after deletion and returns a success message", async () => {
await config.createQuery()

View File

@ -8,7 +8,7 @@ describe("/dev", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
jest.clearAllMocks()
})

View File

@ -7,7 +7,7 @@ describe("/integrations", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View File

@ -10,7 +10,7 @@ describe("/layouts", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
layout = await config.createLayout()
jest.clearAllMocks()

View File

@ -9,7 +9,7 @@ describe("/metadata", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
automation = await config.createAutomation()
})

View File

@ -7,7 +7,7 @@ describe("run misc tests", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View File

@ -15,8 +15,11 @@ describe("/permission", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
beforeEach(async () => {
table = await config.createTable()
row = await config.createRow()
perms = await config.addPermission(STD_ROLE_ID, table._id)

View File

@ -1,3 +1,6 @@
const tk = require( "timekeeper")
tk.freeze(Date.now())
// Mock out postgres for this
jest.mock("pg")
jest.mock("node-fetch")
@ -27,10 +30,15 @@ describe("/queries", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
const setupTest = async()=>{
await config.init()
datasource = await config.createDatasource()
query = await config.createQuery()
}
beforeAll(async () => {
await setupTest()
})
async function createInvalidIntegration() {
@ -101,6 +109,10 @@ describe("/queries", () => {
})
describe("fetch", () => {
beforeEach(async() => {
await setupTest()
})
it("returns all the queries from the server", async () => {
const res = await request
.get(`/api/queries`)
@ -178,6 +190,10 @@ describe("/queries", () => {
})
describe("destroy", () => {
beforeEach(async() => {
await setupTest()
})
it("deletes a query and returns a success message", async () => {
await request
.delete(`/api/queries/${query._id}/${query._rev}`)
@ -239,6 +255,10 @@ describe("/queries", () => {
})
describe("execute", () => {
beforeEach(async() => {
await setupTest()
})
it("should be able to execute the query", async () => {
const res = await request
.post(`/api/queries/${query._id}`)

View File

@ -10,7 +10,7 @@ describe("/roles", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
@ -60,6 +60,11 @@ describe("/roles", () => {
})
describe("fetch", () => {
beforeAll(async () => {
// Recreate the app
await config.init()
})
it("should list custom roles, plus 2 default roles", async () => {
const customRole = await config.createRole()

View File

@ -16,7 +16,7 @@ describe("/routing", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
screen = basicScreen()
screen.routing.route = route

View File

@ -1,3 +1,8 @@
const tk = require( "timekeeper")
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
tk.freeze(timestamp)
const { outputProcessing } = require("../../../utilities/rowProcessor")
const setup = require("./utilities")
const { basicRow } = setup.structures
@ -20,8 +25,11 @@ describe("/rows", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
beforeEach(async()=>{
table = await config.createTable()
row = basicRow(table._id)
})
@ -111,8 +119,8 @@ describe("/rows", () => {
_id: existing._id,
_rev: existing._rev,
type: "row",
createdAt: "2020-01-01T00:00:00.000Z",
updatedAt: "2020-01-01T00:00:00.000Z",
createdAt: timestamp,
updatedAt: timestamp,
})
await assertQueryUsage(queryUsage + 1)
})

View File

@ -10,7 +10,7 @@ describe("/screens", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
screen = await config.createScreen()
})

View File

@ -8,7 +8,8 @@ describe("/static", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
config.modeSelf()
app = await config.init()
})

View File

@ -10,7 +10,7 @@ describe("/tables", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
const app = await config.init()
appId = app.appId
})

View File

@ -6,7 +6,7 @@ describe("/templates", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View File

@ -3,6 +3,8 @@ const { checkPermissionsEndpoint } = require("./utilities/TestFunctions")
const setup = require("./utilities")
const { BUILTIN_ROLE_IDS } = roles
jest.setTimeout(30000)
jest.mock("../../../utilities/workerRequests", () => ({
getGlobalUsers: jest.fn(() => {
return {}
@ -19,6 +21,7 @@ describe("/users", () => {
afterAll(setup.afterAll)
// For some reason this cannot be a beforeAll or the test "should be able to update the user" fail
beforeEach(async () => {
await config.init()
})

View File

@ -4,6 +4,8 @@ import { AppStatus } from "../../../../db/utils"
import { roles, tenancy, context } from "@budibase/backend-core"
import { TENANT_ID } from "../../../../tests/utilities/structures"
import env from "../../../../environment"
import { db } from "@budibase/backend-core"
import Nano from "@budibase/nano"
class Request {
appId: any
@ -30,7 +32,10 @@ export const getAllTableRows = async (config: any) => {
return req.body
}
export const clearAllApps = async (tenantId = TENANT_ID) => {
export const clearAllApps = async (
tenantId = TENANT_ID,
exceptions: Array<string> = []
) => {
await tenancy.doInTenant(tenantId, async () => {
const req: any = { query: { status: AppStatus.DEV }, user: { tenantId } }
await appController.fetch(req)
@ -38,7 +43,7 @@ export const clearAllApps = async (tenantId = TENANT_ID) => {
if (!apps || apps.length <= 0) {
return
}
for (let app of apps) {
for (let app of apps.filter((x: any) => !exceptions.includes(x.appId))) {
const { appId } = app
const req = new Request(null, { appId })
await runRequest(appId, appController.destroy, req)
@ -55,6 +60,24 @@ export const clearAllAutomations = async (config: any) => {
}
}
export const wipeDb = async () => {
const couchInfo = db.getCouchInfo()
const nano = Nano({
url: couchInfo.url,
requestDefaults: {
headers: {
Authorization: couchInfo.cookie,
},
},
parseUrl: false,
})
let dbs
do {
dbs = await nano.db.list()
await Promise.all(dbs.map(x => nano.db.destroy(x)))
} while (dbs.length)
}
export const createRequest = (
request: any,
method: any,

View File

@ -1,5 +1,6 @@
import TestConfig from "../../../../tests/utilities/TestConfiguration"
import env from "../../../../environment"
import supertest from "supertest"
export * as structures from "../../../../tests/utilities/structures"
function user() {
@ -44,7 +45,8 @@ export function delay(ms: number) {
return new Promise(resolve => setTimeout(resolve, ms))
}
let request: any, config: any
let request: supertest.SuperTest<supertest.Test> | undefined | null,
config: TestConfig | null
export function beforeAll() {
config = new TestConfig()
@ -65,14 +67,14 @@ export function getRequest() {
if (!request) {
beforeAll()
}
return request
return request!
}
export function getConfig() {
if (!config) {
beforeAll()
}
return config
return config!
}
export async function switchToSelfHosted(func: any) {

View File

@ -28,8 +28,11 @@ describe("/views", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
beforeEach(async() => {
table = await config.createTable(priceTable())
})

View File

@ -9,7 +9,8 @@ describe("/webhooks", () => {
afterAll(setup.afterAll)
beforeEach(async () => {
const setupTest = async () => {
config.modeSelf()
await config.init()
const autoConfig = basicAutomation()
autoConfig.definition.trigger = {
@ -18,7 +19,9 @@ describe("/webhooks", () => {
}
await config.createAutomation(autoConfig)
webhook = await config.createWebhook()
})
}
beforeAll(setupTest)
describe("create", () => {
it("should create a webhook successfully", async () => {
@ -44,6 +47,8 @@ describe("/webhooks", () => {
})
describe("fetch", () => {
beforeAll(setupTest)
it("returns the correct routing for basic user", async () => {
const res = await request
.get(`/api/webhooks`)
@ -64,6 +69,8 @@ describe("/webhooks", () => {
})
describe("delete", () => {
beforeAll(setupTest)
it("should successfully delete", async () => {
const res = await request
.delete(`/api/webhooks/${webhook._id}/${webhook._rev}`)
@ -84,6 +91,8 @@ describe("/webhooks", () => {
})
describe("build schema", () => {
beforeAll(setupTest)
it("should allow building a schema", async () => {
const res = await request
.post(`/api/webhooks/schema/${config.getAppId()}/${webhook._id}`)

View File

@ -62,6 +62,7 @@ initialiseWebsockets(server)
let shuttingDown = false,
errCode = 0
server.on("close", async () => {
// already in process
if (shuttingDown) {
@ -71,7 +72,7 @@ server.on("close", async () => {
console.log("Server Closed")
await automations.shutdown()
await redis.shutdown()
await events.shutdown()
events.shutdown()
await Thread.shutdown()
api.shutdown()
if (!env.isTest()) {

View File

@ -21,7 +21,7 @@ const setup = require("./utilities")
describe("Run through some parts of the automations system", () => {
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await automation.init()
await config.init()
})
@ -29,9 +29,9 @@ describe("Run through some parts of the automations system", () => {
afterAll(setup.afterAll)
it("should be able to init in builder", async () => {
await triggers.externalTrigger(basicAutomation(), { a: 1, appId: "app_123" })
await wait(100)
expect(thread.execute).toHaveBeenCalled()
await triggers.externalTrigger(basicAutomation(), { a: 1, appId: config.appId })
await wait(100)
expect(thread.execute).toHaveBeenCalled()
})
it("should check coercion", async () => {

View File

@ -3,7 +3,7 @@ const setup = require("./utilities")
describe("test the bash action", () => {
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
afterAll(setup.afterAll)

View File

@ -7,7 +7,7 @@ describe("test the outgoing webhook action", () => {
let inputs
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
inputs = {
username: "joe_bloggs",

View File

@ -1,10 +1,9 @@
const setup = require("./utilities")
describe("test the execute query action", () => {
let datasource
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
await config.createDatasource()
@ -41,7 +40,7 @@ describe("test the execute query action", () => {
query: { queryId: "wrong_id" }
}
)
expect(res.response).toEqual('{"status":404,"name":"not_found","message":"missing","reason":"missing"}')
expect(res.response).toEqual('Error: missing')
expect(res.success).toEqual(false)
})

View File

@ -3,7 +3,7 @@ const setup = require("./utilities")
describe("test the execute script action", () => {
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})
afterAll(setup.afterAll)

View File

@ -7,7 +7,7 @@ describe("test the outgoing webhook action", () => {
let inputs
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
inputs = {
requestMethod: "POST",

View File

@ -36,7 +36,7 @@ const setup = require("./utilities")
describe("test the outgoing webhook action", () => {
let inputs
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View File

@ -3,7 +3,7 @@ const setup = require("./utilities")
describe("test the server log action", () => {
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
inputs = {
text: "log message",

View File

@ -4,7 +4,7 @@ describe("test the update row action", () => {
let table, row, inputs
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
table = await config.createTable()
row = await config.createRow()

View File

@ -7,7 +7,7 @@ describe("test the outgoing webhook action", () => {
let inputs
let config = setup.getConfig()
beforeEach(async () => {
beforeAll(async () => {
await config.init()
inputs = {
value1: "test",

View File

@ -9,9 +9,12 @@ describe("test the link controller", () => {
let config = new TestConfig(false)
let table1, table2, appId
beforeEach(async () => {
beforeAll(async () => {
const app = await config.init()
appId = app.appId
})
beforeEach(async () => {
const { _id } = await config.createTable()
table2 = await config.createLinkedTable(RelationshipTypes.MANY_TO_MANY, ["link", "link2"])
// update table after creating link

View File

@ -9,7 +9,7 @@ describe("test link functionality", () => {
describe("getLinkedTable", () => {
let table
beforeEach(async () => {
beforeAll(async () => {
const app = await config.init()
appId = app.appId
table = await config.createTable()

View File

@ -6,7 +6,7 @@ const migration = require("../appUrls")
describe("run", () => {
let config = new TestConfig(false)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View File

@ -11,7 +11,7 @@ const migration = require("../syncQuotas")
describe("run", () => {
let config = new TestConfig(false)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View File

@ -19,7 +19,7 @@ const migration = require("../userEmailViewCasing")
describe("run", () => {
let config = new TestConfig(false)
beforeEach(async () => {
beforeAll(async () => {
await config.init()
})

View File

@ -10,8 +10,9 @@ import * as structures from "../../tests/utilities/structures"
import { MIGRATIONS } from "../"
import * as helpers from "./helpers"
const { mocks } = require("@budibase/backend-core/tests")
const timestamp = mocks.date.MOCK_DATE.toISOString()
import tk from "timekeeper"
const timestamp = new Date().toISOString()
tk.freeze(timestamp)
const clearMigrations = async () => {
const dbs = [context.getDevAppDB(), context.getProdAppDB()]

View File

@ -3,11 +3,7 @@ import { tmpdir } from "os"
env._set("SELF_HOSTED", "1")
env._set("NODE_ENV", "jest")
env._set("JWT_SECRET", "test-jwtsecret")
env._set("CLIENT_ID", "test-client-id")
env._set("MULTI_TENANCY", "1")
// @ts-ignore
env._set("BUDIBASE_DIR", tmpdir("budibase-unittests"))
env._set("LOG_LEVEL", "silent")
env._set("PORT", 0)
env._set("MINIO_URL", "http://localhost")
env._set("MINIO_ACCESS_KEY", "test")
env._set("MINIO_SECRET_KEY", "test")

View File

@ -1,9 +1,6 @@
import { mocks } from "@budibase/backend-core/tests"
// mock all dates to 2020-01-01T00:00:00.000Z
// use tk.reset() to use real dates in individual tests
const tk = require("timekeeper")
tk.freeze(mocks.date.MOCK_DATE)
import env from "../environment"
import { env as coreEnv } from "@budibase/backend-core"
import { testContainerUtils } from "@budibase/backend-core/tests"
if (!process.env.DEBUG) {
global.console.log = jest.fn() // console.log are ignored in tests
@ -15,3 +12,5 @@ if (!process.env.CI) {
// 100 seconds
jest.setTimeout(100000)
}
testContainerUtils.setupEnv(env, coreEnv)

View File

@ -1,4 +1,4 @@
import { mocks } from "@budibase/backend-core/tests"
import { generator, mocks, structures } from "@budibase/backend-core/tests"
// init the licensing mock
import * as pro from "@budibase/pro"
@ -10,6 +10,7 @@ mocks.licenses.useUnlimited()
import { init as dbInit } from "../../db"
dbInit()
import env from "../../environment"
import { env as coreEnv } from "@budibase/backend-core"
import {
basicTable,
basicRow,
@ -38,13 +39,16 @@ import { cleanup } from "../../utilities/fileSystem"
import newid from "../../db/newid"
import { generateUserMetadataID } from "../../db/utils"
import { startup } from "../../startup"
import { AuthToken, Database } from "@budibase/types"
const supertest = require("supertest")
const GLOBAL_USER_ID = "us_uuid1"
const EMAIL = "babs@babs.com"
const FIRSTNAME = "Barbara"
const LASTNAME = "Barbington"
const CSRF_TOKEN = "e3727778-7af0-4226-b5eb-f43cbe60a306"
type DefaultUserValues = {
globalUserId: string
email: string
firstName: string
lastName: string
csrfToken: string
}
class TestConfiguration {
server: any
@ -62,6 +66,8 @@ class TestConfiguration {
linkedTable: any
automation: any
datasource: any
tenantId: string | null
defaultUserValues: DefaultUserValues
constructor(openServer = true) {
if (openServer) {
@ -76,6 +82,18 @@ class TestConfiguration {
}
this.appId = null
this.allApps = []
this.tenantId = null
this.defaultUserValues = this.populateDefaultUserValues()
}
populateDefaultUserValues(): DefaultUserValues {
return {
globalUserId: `us_${newid()}`,
email: generator.email(),
firstName: generator.first(),
lastName: generator.last(),
csrfToken: generator.hash(),
}
}
getRequest() {
@ -100,10 +118,10 @@ class TestConfiguration {
getUserDetails() {
return {
globalId: GLOBAL_USER_ID,
email: EMAIL,
firstName: FIRSTNAME,
lastName: LASTNAME,
globalId: this.defaultUserValues.globalUserId,
email: this.defaultUserValues.email,
firstName: this.defaultUserValues.firstName,
lastName: this.defaultUserValues.lastName,
}
}
@ -111,7 +129,9 @@ class TestConfiguration {
if (!appId) {
appId = this.appId
}
return tenancy.doInTenant(TENANT_ID, () => {
const tenant = this.getTenantId()
return tenancy.doInTenant(tenant, () => {
// check if already in a context
if (context.getAppId() == null && appId !== null) {
return context.doInAppContext(appId, async () => {
@ -127,6 +147,11 @@ class TestConfiguration {
// use a new id as the name to avoid name collisions
async init(appName = newid()) {
this.defaultUserValues = this.populateDefaultUserValues()
if (context.isMultiTenant()) {
this.tenantId = structures.tenant.id()
}
if (!this.started) {
await startup()
}
@ -149,6 +174,27 @@ class TestConfiguration {
}
}
// MODES
#setMultiTenancy = (value: boolean) => {
env._set("MULTI_TENANCY", value)
coreEnv._set("MULTI_TENANCY", value)
}
#setSelfHosted = (value: boolean) => {
env._set("SELF_HOSTED", value)
coreEnv._set("SELF_HOSTED", value)
}
modeCloud = () => {
this.#setSelfHosted(false)
this.#setMultiTenancy(true)
}
modeSelf = () => {
this.#setSelfHosted(true)
this.#setMultiTenancy(false)
}
// UTILS
async _req(body: any, params: any, controlFunc: any) {
@ -159,7 +205,7 @@ class TestConfiguration {
// fake cookies, we don't need them
request.cookies = { set: () => {}, get: () => {} }
request.config = { jwtSecret: env.JWT_SECRET }
request.user = { appId, tenantId: TENANT_ID }
request.user = { appId, tenantId: this.getTenantId() }
request.query = {}
request.request = {
body,
@ -175,15 +221,15 @@ class TestConfiguration {
// USER / AUTH
async globalUser({
id = GLOBAL_USER_ID,
firstName = FIRSTNAME,
lastName = LASTNAME,
id = this.defaultUserValues.globalUserId,
firstName = this.defaultUserValues.firstName,
lastName = this.defaultUserValues.lastName,
builder = true,
admin = false,
email = EMAIL,
email = this.defaultUserValues.email,
roles,
}: any = {}) {
return tenancy.doWithGlobalDB(TENANT_ID, async (db: any) => {
return tenancy.doWithGlobalDB(this.getTenantId(), async (db: Database) => {
let existing
try {
existing = await db.get(id)
@ -194,14 +240,14 @@ class TestConfiguration {
_id: id,
...existing,
roles: roles || {},
tenantId: TENANT_ID,
tenantId: this.getTenantId(),
firstName,
lastName,
}
await sessions.createASession(id, {
sessionId: "sessionid",
tenantId: TENANT_ID,
csrfToken: CSRF_TOKEN,
tenantId: this.getTenantId(),
csrfToken: this.defaultUserValues.csrfToken,
})
if (builder) {
user.builder = { global: true }
@ -215,7 +261,7 @@ class TestConfiguration {
}
const resp = await db.put(user)
return {
_rev: resp._rev,
_rev: resp.rev,
...user,
}
})
@ -223,9 +269,9 @@ class TestConfiguration {
async createUser(
id = null,
firstName = FIRSTNAME,
lastName = LASTNAME,
email = EMAIL,
firstName = this.defaultUserValues.firstName,
lastName = this.defaultUserValues.lastName,
email = this.defaultUserValues.email,
builder = true,
admin = false,
roles = {}
@ -264,13 +310,13 @@ class TestConfiguration {
}
await sessions.createASession(userId, {
sessionId: "sessionid",
tenantId: TENANT_ID,
tenantId: this.getTenantId(),
})
// have to fake this
const authObj = {
userId,
sessionId: "sessionid",
tenantId: TENANT_ID,
tenantId: this.getTenantId(),
}
const app = {
roleId: roleId,
@ -293,10 +339,11 @@ class TestConfiguration {
}
defaultHeaders(extras = {}) {
const authObj = {
userId: GLOBAL_USER_ID,
const tenantId = this.getTenantId()
const authObj: AuthToken = {
userId: this.defaultUserValues.globalUserId,
sessionId: "sessionid",
tenantId: TENANT_ID,
tenantId,
}
const app = {
roleId: roles.BUILTIN_ROLE_IDS.ADMIN,
@ -310,7 +357,7 @@ class TestConfiguration {
`${constants.Cookie.Auth}=${authToken}`,
`${constants.Cookie.CurrentApp}=${appToken}`,
],
[constants.Header.CSRF_TOKEN]: CSRF_TOKEN,
[constants.Header.CSRF_TOKEN]: this.defaultUserValues.csrfToken,
...extras,
}
if (this.appId) {
@ -319,6 +366,10 @@ class TestConfiguration {
return headers
}
getTenantId() {
return this.tenantId || TENANT_ID
}
publicHeaders({ prodApp = true } = {}) {
const appId = prodApp ? this.prodAppId : this.appId
@ -328,11 +379,16 @@ class TestConfiguration {
if (appId) {
headers[constants.Header.APP_ID] = appId
}
if (this.tenantId) {
headers[constants.Header.TENANT_ID] = this.tenantId
}
return headers
}
async roleHeaders({
email = EMAIL,
email = this.defaultUserValues.email,
roleId = roles.BUILTIN_ROLE_IDS.ADMIN,
builder = false,
prodApp = true,
@ -342,8 +398,8 @@ class TestConfiguration {
// API
async generateApiKey(userId = GLOBAL_USER_ID) {
return tenancy.doWithGlobalDB(TENANT_ID, async (db: any) => {
async generateApiKey(userId = this.defaultUserValues.globalUserId) {
return tenancy.doWithGlobalDB(this.getTenantId(), async (db: any) => {
const id = dbCore.generateDevInfoID(userId)
let devInfo
try {
@ -352,7 +408,7 @@ class TestConfiguration {
devInfo = { _id: id, userId }
}
devInfo.apiKey = encryption.encrypt(
`${TENANT_ID}${dbCore.SEPARATOR}${newid()}`
`${this.getTenantId()}${dbCore.SEPARATOR}${newid()}`
)
await db.put(devInfo)
return devInfo.apiKey

View File

@ -0,0 +1,18 @@
export async function retry<T extends (...arg0: any[]) => any>(
fn: T,
maxTry: number = 5,
retryCount = 1
): Promise<Awaited<ReturnType<T>>> {
const currRetry = typeof retryCount === "number" ? retryCount : 1
try {
const result = await fn()
return result
} catch (e) {
console.log(`Retry ${currRetry} failed.`)
if (currRetry > maxTry) {
console.log(`All ${maxTry} retry attempts exhausted`)
throw e
}
return retry(fn, maxTry, currRetry + 1)
}
}

View File

@ -1268,6 +1268,11 @@
"@babel/helper-validator-identifier" "^7.19.1"
to-fast-properties "^2.0.0"
"@balena/dockerignore@^1.0.2":
version "1.0.2"
resolved "https://registry.yarnpkg.com/@balena/dockerignore/-/dockerignore-1.0.2.tgz#9ffe4726915251e8eb69f44ef3547e0da2c03e0d"
integrity sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==
"@bcoe/v8-coverage@^0.2.3":
version "0.2.3"
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
@ -2945,6 +2950,15 @@
async "^3.2.1"
simple-lru-cache "^0.0.2"
"@trendyol/jest-testcontainers@^2.1.1":
version "2.1.1"
resolved "https://registry.yarnpkg.com/@trendyol/jest-testcontainers/-/jest-testcontainers-2.1.1.tgz#dced95cf9c37b75efe0a65db9b75ae8912f2f14a"
integrity sha512-4iAc2pMsev4BTUzoA7jO1VvbTOU2N3juQUYa8TwiSPXPuQtxKwV9WB9ZEP+JQ+Pj15YqfGOXp5H0WNMPtapjiA==
dependencies:
cwd "^0.10.0"
node-duration "^1.0.4"
testcontainers "4.7.0"
"@tsconfig/node10@^1.0.7":
version "1.0.8"
resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.8.tgz#c1e4e80d6f964fbecb3359c43bd48b40f7cadad9"
@ -3074,6 +3088,13 @@
dependencies:
"@types/ms" "*"
"@types/dockerode@^2.5.34":
version "2.5.34"
resolved "https://registry.yarnpkg.com/@types/dockerode/-/dockerode-2.5.34.tgz#9adb884f7cc6c012a6eb4b2ad794cc5d01439959"
integrity sha512-LcbLGcvcBwBAvjH9UrUI+4qotY+A5WCer5r43DR5XHv2ZIEByNXFdPLo1XxR+v/BjkGjlggW8qUiXuVEhqfkpA==
dependencies:
"@types/node" "*"
"@types/eslint-scope@^3.7.3":
version "3.7.3"
resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.3.tgz#125b88504b61e3c8bc6f870882003253005c3224"
@ -4037,7 +4058,7 @@ any-base@^1.1.0:
resolved "https://registry.yarnpkg.com/any-base/-/any-base-1.1.0.tgz#ae101a62bc08a597b4c9ab5b7089d456630549fe"
integrity sha512-uMgjozySS8adZZYePpaWs8cxB9/kdzmpX6SgJZ+wbz1K5eYk5QMYDVJaZKhxyIHUdnnJkfR7SVgStgH7LkGUyg==
any-promise@^1.0.0:
any-promise@^1.0.0, any-promise@^1.1.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f"
integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==
@ -4236,7 +4257,7 @@ asn1.js@^5.0.0, asn1.js@^5.4.1:
minimalistic-assert "^1.0.0"
safer-buffer "^2.1.0"
asn1@~0.2.3:
asn1@^0.2.4, asn1@~0.2.3:
version "0.2.6"
resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d"
integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==
@ -4634,7 +4655,7 @@ basic-auth@^2.0.1:
dependencies:
safe-buffer "5.1.2"
bcrypt-pbkdf@^1.0.0:
bcrypt-pbkdf@^1.0.0, bcrypt-pbkdf@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e"
integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==
@ -4935,6 +4956,11 @@ buffer@^5.1.0, buffer@^5.2.0, buffer@^5.2.1, buffer@^5.5.0, buffer@^5.6.0:
base64-js "^1.3.1"
ieee754 "^1.1.13"
buildcheck@0.0.3:
version "0.0.3"
resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.3.tgz#70451897a95d80f7807e68fc412eb2e7e35ff4d5"
integrity sha512-pziaA+p/wdVImfcbsZLNF32EiWyujlQLwolMqUQE8xpKNOH7KmZQaY8sXN7DGOEzPAElo9QTaeNRfGnf3iOJbA==
bull@4.10.1:
version "4.10.1"
resolved "https://registry.yarnpkg.com/bull/-/bull-4.10.1.tgz#f14974b6089358b62b495a2cbf838aadc098e43f"
@ -4950,6 +4976,11 @@ bull@4.10.1:
semver "^7.3.2"
uuid "^8.3.0"
byline@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/byline/-/byline-5.0.0.tgz#741c5216468eadc457b03410118ad77de8c1ddb1"
integrity sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q==
bytes@3.1.2, bytes@^3.0.0:
version "3.1.2"
resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5"
@ -5538,6 +5569,14 @@ cors@~2.8.5:
object-assign "^4"
vary "^1"
cpu-features@~0.0.4:
version "0.0.4"
resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.4.tgz#0023475bb4f4c525869c162e4108099e35bf19d8"
integrity sha512-fKiZ/zp1mUwQbnzb9IghXtHtDoTMtNeb8oYGx6kX2SYfhnG0HNdBEBIzB9b5KlXu5DQPhfy3mInbBxFcgwAr3A==
dependencies:
buildcheck "0.0.3"
nan "^2.15.0"
crc@^3.4.4:
version "3.8.0"
resolved "https://registry.yarnpkg.com/crc/-/crc-3.8.0.tgz#ad60269c2c856f8c299e2c4cc0de4556914056c6"
@ -5629,6 +5668,14 @@ curlconverter@3.21.0:
string.prototype.startswith "^1.0.0"
yamljs "^0.3.0"
cwd@^0.10.0:
version "0.10.0"
resolved "https://registry.yarnpkg.com/cwd/-/cwd-0.10.0.tgz#172400694057c22a13b0cf16162c7e4b7a7fe567"
integrity sha512-YGZxdTTL9lmLkCUTpg4j0zQ7IhRB5ZmqNBbGCl3Tg6MP/d5/6sY7L5mmTjzbc6JKgVZYiqTQTNhPFsbXNGlRaA==
dependencies:
find-pkg "^0.1.2"
fs-exists-sync "^0.1.0"
dashdash@^1.12.0:
version "1.14.1"
resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0"
@ -5944,13 +5991,32 @@ dir-glob@^3.0.1:
dependencies:
path-type "^4.0.0"
docker-compose@0.23.17:
docker-compose@0.23.17, docker-compose@^0.23.5:
version "0.23.17"
resolved "https://registry.yarnpkg.com/docker-compose/-/docker-compose-0.23.17.tgz#8816bef82562d9417dc8c790aa4871350f93a2ba"
integrity sha512-YJV18YoYIcxOdJKeFcCFihE6F4M2NExWM/d4S1ITcS9samHKnNUihz9kjggr0dNtsrbpFNc7/Yzd19DWs+m1xg==
dependencies:
yaml "^1.10.2"
docker-modem@^3.0.0:
version "3.0.6"
resolved "https://registry.yarnpkg.com/docker-modem/-/docker-modem-3.0.6.tgz#8c76338641679e28ec2323abb65b3276fb1ce597"
integrity sha512-h0Ow21gclbYsZ3mkHDfsYNDqtRhXS8fXr51bU0qr1dxgTMJj0XufbzX+jhNOvA8KuEEzn6JbvLVhXyv+fny9Uw==
dependencies:
debug "^4.1.1"
readable-stream "^3.5.0"
split-ca "^1.0.1"
ssh2 "^1.11.0"
dockerode@^3.2.1:
version "3.3.4"
resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.4.tgz#875de614a1be797279caa9fe27e5637cf0e40548"
integrity sha512-3EUwuXnCU+RUlQEheDjmBE0B7q66PV9Rw5NiH1sXwINq0M9c5ERP9fxgkw36ZHOtzf4AGEEYySnkx/sACC9EgQ==
dependencies:
"@balena/dockerignore" "^1.0.2"
docker-modem "^3.0.0"
tar-fs "~2.0.1"
doctrine@3.0.0, doctrine@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961"
@ -6737,6 +6803,13 @@ expand-brackets@^2.1.4:
snapdragon "^0.8.1"
to-regex "^3.0.1"
expand-tilde@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-1.2.2.tgz#0b81eba897e5a3d31d1c3d102f8f01441e559449"
integrity sha512-rtmc+cjLZqnu9dSYosX9EWmSJhTwpACgJQTfj4hgg2JjOD/6SIQalZrt4a3aQeh++oNxkazcaxrhPUj6+g5G/Q==
dependencies:
os-homedir "^1.0.1"
expand-tilde@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502"
@ -7071,6 +7144,21 @@ filter-obj@^1.1.0:
resolved "https://registry.yarnpkg.com/filter-obj/-/filter-obj-1.1.0.tgz#9b311112bc6c6127a16e016c6c5d7f19e0805c5b"
integrity sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==
find-file-up@^0.1.2:
version "0.1.3"
resolved "https://registry.yarnpkg.com/find-file-up/-/find-file-up-0.1.3.tgz#cf68091bcf9f300a40da411b37da5cce5a2fbea0"
integrity sha512-mBxmNbVyjg1LQIIpgO8hN+ybWBgDQK8qjht+EbrTCGmmPV/sc7RF1i9stPTD6bpvXZywBdrwRYxhSdJv867L6A==
dependencies:
fs-exists-sync "^0.1.0"
resolve-dir "^0.1.0"
find-pkg@^0.1.2:
version "0.1.2"
resolved "https://registry.yarnpkg.com/find-pkg/-/find-pkg-0.1.2.tgz#1bdc22c06e36365532e2a248046854b9788da557"
integrity sha512-0rnQWcFwZr7eO0513HahrWafsc3CTFioEB7DRiEYCUM/70QXSY8f3mCST17HXLcPvEhzH/Ty/Bxd72ZZsr/yvw==
dependencies:
find-file-up "^0.1.2"
find-up@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
@ -7225,6 +7313,11 @@ fs-constants@^1.0.0:
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==
fs-exists-sync@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/fs-exists-sync/-/fs-exists-sync-0.1.0.tgz#982d6893af918e72d08dec9e8673ff2b5a8d6add"
integrity sha512-cR/vflFyPZtrN6b38ZyWxpWdhlXrzZEBawlpBQMq7033xVY7/kg0GDMBK5jg8lDYQckdJ5x/YC88lM3C7VMsLg==
fs-extra@8.1.0:
version "8.1.0"
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-8.1.0.tgz#49d43c45a88cd9677668cb7be1b46efdb8d2e1c0"
@ -7521,6 +7614,24 @@ global-dirs@^3.0.0:
dependencies:
ini "2.0.0"
global-modules@^0.2.3:
version "0.2.3"
resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-0.2.3.tgz#ea5a3bed42c6d6ce995a4f8a1269b5dae223828d"
integrity sha512-JeXuCbvYzYXcwE6acL9V2bAOeSIGl4dD+iwLY9iUx2VBJJ80R18HCn+JCwHM9Oegdfya3lEkGCdaRkSyc10hDA==
dependencies:
global-prefix "^0.1.4"
is-windows "^0.2.0"
global-prefix@^0.1.4:
version "0.1.5"
resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-0.1.5.tgz#8d3bc6b8da3ca8112a160d8d496ff0462bfef78f"
integrity sha512-gOPiyxcD9dJGCEArAhF4Hd0BAqvAe/JzERP7tYumE4yIkmIedPUVXcJFWbV3/p/ovIIvKjkrTk+f1UVkq7vvbw==
dependencies:
homedir-polyfill "^1.0.0"
ini "^1.3.4"
is-windows "^0.2.0"
which "^1.2.12"
global@~4.4.0:
version "4.4.0"
resolved "https://registry.yarnpkg.com/global/-/global-4.4.0.tgz#3e7b105179006a323ed71aafca3e9c57a5cc6406"
@ -7823,7 +7934,7 @@ has@^1.0.3:
dependencies:
function-bind "^1.1.1"
homedir-polyfill@^1.0.1:
homedir-polyfill@^1.0.0, homedir-polyfill@^1.0.1:
version "1.0.3"
resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8"
integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==
@ -8562,6 +8673,11 @@ is-whitespace@^0.3.0:
resolved "https://registry.yarnpkg.com/is-whitespace/-/is-whitespace-0.3.0.tgz#1639ecb1be036aec69a54cbb401cfbed7114ab7f"
integrity sha512-RydPhl4S6JwAyj0JJjshWJEFG6hNye3pZFBRZaTUfZFwGHxzppNaNOVgQuS/E/SlhrApuMXrpnK1EEIXfdo3Dg==
is-windows@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-0.2.0.tgz#de1aa6d63ea29dd248737b69f1ff8b8002d2108c"
integrity sha512-n67eJYmXbniZB7RF4I/FTjK1s6RPOCTxhYrVYLRaCt3lF0mpWZPKr3T2LSZAqyjQsxR2qMmGYXXzK0YWwcPM1Q==
is-windows@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d"
@ -9273,6 +9389,11 @@ jest-runtime@^28.1.3:
slash "^3.0.0"
strip-bom "^4.0.0"
jest-serial-runner@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/jest-serial-runner/-/jest-serial-runner-1.2.1.tgz#0f5f8dbe6f077119bd1fdd7e8518f92353c194d5"
integrity sha512-d59fF+7HdjNvQEL7B4WyFE+f8q5tGzlNUqtOnxTrT1ofun7O6/Lgm/j255BBgCY2fmSue/34M7Xy9+VWRByP0Q==
jest-serializer@^24.9.0:
version "24.9.0"
resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-24.9.0.tgz#e6d7d7ef96d31e8b9079a714754c5d5c58288e73"
@ -10872,6 +10993,11 @@ nan@^2.12.1:
resolved "https://registry.yarnpkg.com/nan/-/nan-2.16.0.tgz#664f43e45460fb98faf00edca0bb0d7b8dce7916"
integrity sha512-UdAqHyFngu7TfQKsCBgAA6pWDkT8MAO7d0jyOecVhN5354xbLqdn8mV9Tat9gepAupm0bt2DbeaSC8vS52MuFA==
nan@^2.15.0, nan@^2.16.0:
version "2.17.0"
resolved "https://registry.yarnpkg.com/nan/-/nan-2.17.0.tgz#c0150a2368a182f033e9aa5195ec76ea41a199cb"
integrity sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ==
nanomatch@^1.2.9:
version "1.2.13"
resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"
@ -10944,6 +11070,11 @@ node-addon-api@^3.1.0:
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161"
integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==
node-duration@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/node-duration/-/node-duration-1.0.4.tgz#3e94ecc0e473691c89c4560074503362071cecac"
integrity sha512-eUXYNSY7DL53vqfTosggWkvyIW3bhAcqBDIlolgNYlZhianXTrCL50rlUJWD1eRqkIxMppXTfiFbp+9SjpPrgA==
node-fetch@2.6.0:
version "2.6.0"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
@ -11364,6 +11495,11 @@ original-url@^1.2.3:
dependencies:
forwarded-parse "^2.1.0"
os-homedir@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
integrity sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==
os-locale@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a"
@ -12485,7 +12621,7 @@ readable-stream@1.1.14, readable-stream@^1.0.27-1:
isarray "0.0.1"
string_decoder "~0.10.x"
"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.0.1, readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0:
"readable-stream@2 || 3", readable-stream@^3.0.0, readable-stream@^3.0.1, readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.5.0, readable-stream@^3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
@ -12813,6 +12949,14 @@ resolve-cwd@^3.0.0:
dependencies:
resolve-from "^5.0.0"
resolve-dir@^0.1.0:
version "0.1.1"
resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-0.1.1.tgz#b219259a5602fac5c5c496ad894a6e8cc430261e"
integrity sha512-QxMPqI6le2u0dCLyiGzgy92kjkkL6zO0XyvHzjdTNH3zM6e5Hz3BwG6+aEyNgiQ5Xz6PwTwgQEj3U50dByPKIA==
dependencies:
expand-tilde "^1.2.2"
global-modules "^0.2.3"
resolve-from@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
@ -13502,6 +13646,11 @@ spdx-license-ids@^3.0.0:
resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.11.tgz#50c0d8c40a14ec1bf449bae69a0ea4685a9d9f95"
integrity sha512-Ctl2BrFiM0X3MANYgj3CkygxhRmr9mi6xhejbdO960nF6EDJApTYpn0BQnDKlnNBULKiCN1n3w9EBkHK8ZWg+g==
split-ca@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/split-ca/-/split-ca-1.0.1.tgz#6c83aff3692fa61256e0cd197e05e9de157691a6"
integrity sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==
split-on-first@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/split-on-first/-/split-on-first-1.1.0.tgz#f610afeee3b12bce1d0c30425e76398b78249a5f"
@ -13553,6 +13702,17 @@ sqlstring@^2.3.2:
resolved "https://registry.yarnpkg.com/sqlstring/-/sqlstring-2.3.3.tgz#2ddc21f03bce2c387ed60680e739922c65751d0c"
integrity sha512-qC9iz2FlN7DQl3+wjwn3802RTyjCx7sDvfQEXchwa6CWOx07/WVfh91gBmQ9fahw8snwGEWU3xGzOt4tFyHLxg==
ssh2@^1.11.0:
version "1.11.0"
resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.11.0.tgz#ce60186216971e12f6deb553dcf82322498fe2e4"
integrity sha512-nfg0wZWGSsfUe/IBJkXVll3PEZ//YH2guww+mP88gTpuSU4FtZN7zu9JoeTGOyCNx2dTDtT9fOpWwlzyj4uOOw==
dependencies:
asn1 "^0.2.4"
bcrypt-pbkdf "^1.0.2"
optionalDependencies:
cpu-features "~0.0.4"
nan "^2.16.0"
sshpk@^1.7.0:
version "1.17.0"
resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.17.0.tgz#578082d92d4fe612b13007496e543fa0fbcbe4c5"
@ -13637,6 +13797,13 @@ stream-shift@^1.0.0:
resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d"
integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==
stream-to-array@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/stream-to-array/-/stream-to-array-2.3.0.tgz#bbf6b39f5f43ec30bc71babcb37557acecf34353"
integrity sha512-UsZtOYEn4tWU2RGLOXr/o/xjRBftZRlG3dEWoaHr8j4GuypJ3isitGbVyjQKAuMu+xbiop8q224TjiZWc4XTZA==
dependencies:
any-promise "^1.1.0"
strict-uri-encode@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713"
@ -14000,7 +14167,7 @@ tapable@^2.1.1, tapable@^2.2.0:
resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0"
integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==
tar-fs@2.1.1:
tar-fs@2.1.1, tar-fs@^2.1.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784"
integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==
@ -14010,6 +14177,16 @@ tar-fs@2.1.1:
pump "^3.0.0"
tar-stream "^2.1.4"
tar-fs@~2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.0.1.tgz#e44086c1c60d31a4f0cf893b1c4e155dabfae9e2"
integrity sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==
dependencies:
chownr "^1.1.1"
mkdirp-classic "^0.5.2"
pump "^3.0.0"
tar-stream "^2.0.0"
tar-stream@^1.5.2:
version "1.6.2"
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.6.2.tgz#8ea55dab37972253d9a9af90fdcd559ae435c555"
@ -14023,7 +14200,7 @@ tar-stream@^1.5.2:
to-buffer "^1.1.1"
xtend "^4.0.0"
tar-stream@^2.1.4:
tar-stream@^2.0.0, tar-stream@^2.1.4:
version "2.2.0"
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287"
integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==
@ -14126,6 +14303,23 @@ test-exclude@^6.0.0:
glob "^7.1.4"
minimatch "^3.0.4"
testcontainers@4.7.0:
version "4.7.0"
resolved "https://registry.yarnpkg.com/testcontainers/-/testcontainers-4.7.0.tgz#5a9a864b1b0cc86984086dcc737c2f5e73490cf3"
integrity sha512-5SrG9RMfDRRZig34fDZeMcGD5i3lHCOJzn0kjouyK4TiEWjZB3h7kCk8524lwNRHROFE1j6DGjceonv/5hl5ag==
dependencies:
"@types/dockerode" "^2.5.34"
byline "^5.0.0"
debug "^4.1.1"
docker-compose "^0.23.5"
dockerode "^3.2.1"
get-port "^5.1.1"
glob "^7.1.6"
node-duration "^1.0.4"
slash "^3.0.0"
stream-to-array "^2.3.0"
tar-fs "^2.1.0"
text-hex@1.0.x:
version "1.0.0"
resolved "https://registry.yarnpkg.com/text-hex/-/text-hex-1.0.0.tgz#69dc9c1b17446ee79a92bf5b884bb4b9127506f5"
@ -15072,7 +15266,7 @@ which-typed-array@^1.1.2:
has-tostringtag "^1.0.0"
is-typed-array "^1.1.9"
which@^1.2.9:
which@^1.2.12, which@^1.2.9:
version "1.3.1"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==

Some files were not shown because too many files have changed in this diff Show More