diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml
index 4ae0766242..42d73ba8bb 100644
--- a/.github/workflows/budibase_ci.yml
+++ b/.github/workflows/budibase_ci.yml
@@ -91,6 +91,9 @@ jobs:
test-libraries:
runs-on: ubuntu-latest
+ env:
+ DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull
+ REUSE_CONTAINERS: true
steps:
- name: Checkout repo
uses: actions/checkout@v4
@@ -104,6 +107,14 @@ jobs:
with:
node-version: 20.x
cache: yarn
+ - name: Pull testcontainers images
+ run: |
+ docker pull testcontainers/ryuk:0.5.1 &
+ docker pull budibase/couchdb &
+ docker pull redis &
+
+ wait $(jobs -p)
+
- run: yarn --frozen-lockfile
- name: Test
run: |
@@ -138,9 +149,10 @@ jobs:
fi
test-server:
- runs-on: ubuntu-latest
+ runs-on: budi-tubby-tornado-quad-core-150gb
env:
DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull
+ REUSE_CONTAINERS: true
steps:
- name: Checkout repo
uses: actions/checkout@v4
@@ -157,13 +169,16 @@ jobs:
- name: Pull testcontainers images
run: |
- docker pull mcr.microsoft.com/mssql/server:2022-latest
- docker pull mysql:8.3
- docker pull postgres:16.1-bullseye
- docker pull mongo:7.0-jammy
- docker pull mariadb:lts
- docker pull testcontainers/ryuk:0.5.1
- docker pull budibase/couchdb
+ docker pull mcr.microsoft.com/mssql/server:2022-latest &
+ docker pull mysql:8.3 &
+ docker pull postgres:16.1-bullseye &
+ docker pull mongo:7.0-jammy &
+ docker pull mariadb:lts &
+ docker pull testcontainers/ryuk:0.5.1 &
+ docker pull budibase/couchdb &
+ docker pull redis &
+
+ wait $(jobs -p)
- run: yarn --frozen-lockfile
diff --git a/globalSetup.ts b/globalSetup.ts
index 4cb542a3c3..7bf5e2152c 100644
--- a/globalSetup.ts
+++ b/globalSetup.ts
@@ -1,25 +1,47 @@
import { GenericContainer, Wait } from "testcontainers"
+import path from "path"
+import lockfile from "proper-lockfile"
export default async function setup() {
- await new GenericContainer("budibase/couchdb")
- .withExposedPorts(5984)
- .withEnvironment({
- COUCHDB_PASSWORD: "budibase",
- COUCHDB_USER: "budibase",
- })
- .withCopyContentToContainer([
- {
- content: `
+ const lockPath = path.resolve(__dirname, "globalSetup.ts")
+ if (process.env.REUSE_CONTAINERS) {
+ // If you run multiple tests at the same time, it's possible for the CouchDB
+ // shared container to get started multiple times despite having an
+ // identical reuse hash. To avoid that, we do a filesystem-based lock so
+ // that only one globalSetup.ts is running at a time.
+ lockfile.lockSync(lockPath)
+ }
+
+ try {
+ let couchdb = new GenericContainer("budibase/couchdb")
+ .withExposedPorts(5984)
+ .withEnvironment({
+ COUCHDB_PASSWORD: "budibase",
+ COUCHDB_USER: "budibase",
+ })
+ .withCopyContentToContainer([
+ {
+ content: `
[log]
level = warn
`,
- target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
- },
- ])
- .withWaitStrategy(
- Wait.forSuccessfulCommand(
- "curl http://budibase:budibase@localhost:5984/_up"
- ).withStartupTimeout(20000)
- )
- .start()
+ target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
+ },
+ ])
+ .withWaitStrategy(
+ Wait.forSuccessfulCommand(
+ "curl http://budibase:budibase@localhost:5984/_up"
+ ).withStartupTimeout(20000)
+ )
+
+ if (process.env.REUSE_CONTAINERS) {
+ couchdb = couchdb.withReuse()
+ }
+
+ await couchdb.start()
+ } finally {
+ if (process.env.REUSE_CONTAINERS) {
+ lockfile.unlockSync(lockPath)
+ }
+ }
}
diff --git a/lerna.json b/lerna.json
index 93b103ee00..7186c0ca17 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "2.22.15",
+ "version": "2.22.16",
"npmClient": "yarn",
"packages": [
"packages/*",
diff --git a/package.json b/package.json
index c927002c88..4b6716f7e7 100644
--- a/package.json
+++ b/package.json
@@ -7,6 +7,7 @@
"@babel/preset-env": "^7.22.5",
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
"@types/node": "20.10.0",
+ "@types/proper-lockfile": "^4.1.4",
"@typescript-eslint/parser": "6.9.0",
"esbuild": "^0.18.17",
"esbuild-node-externals": "^1.8.0",
@@ -23,6 +24,7 @@
"nx-cloud": "16.0.5",
"prettier": "2.8.8",
"prettier-plugin-svelte": "^2.3.0",
+ "proper-lockfile": "^4.1.2",
"svelte": "^4.2.10",
"svelte-eslint-parser": "^0.33.1",
"typescript": "5.2.2",
diff --git a/packages/account-portal b/packages/account-portal
index 360ad2dc29..011fa3c175 160000
--- a/packages/account-portal
+++ b/packages/account-portal
@@ -1 +1 @@
-Subproject commit 360ad2dc29c3f1fd5a1182ae258c45666b7f5eb1
+Subproject commit 011fa3c175ae0a1bbbb0f6e1341ba0154bca5c76
diff --git a/packages/backend-core/tests/core/utilities/testContainerUtils.ts b/packages/backend-core/tests/core/utilities/testContainerUtils.ts
index 5d4f5a3c11..951a6f0517 100644
--- a/packages/backend-core/tests/core/utilities/testContainerUtils.ts
+++ b/packages/backend-core/tests/core/utilities/testContainerUtils.ts
@@ -1,6 +1,7 @@
-import { DatabaseImpl } from "../../../src/db"
import { execSync } from "child_process"
+const IPV4_PORT_REGEX = new RegExp(`0\\.0\\.0\\.0:(\\d+)->(\\d+)/tcp`, "g")
+
interface ContainerInfo {
Command: string
CreatedAt: string
@@ -19,7 +20,10 @@ interface ContainerInfo {
}
function getTestcontainers(): ContainerInfo[] {
- return execSync("docker ps --format json")
+ // We use --format json to make sure the output is nice and machine-readable,
+ // and we use --no-trunc so that the command returns full container IDs so we
+ // can filter on them correctly.
+ return execSync("docker ps --format json --no-trunc")
.toString()
.split("\n")
.filter(x => x.length > 0)
@@ -27,32 +31,55 @@ function getTestcontainers(): ContainerInfo[] {
.filter(x => x.Labels.includes("org.testcontainers=true"))
}
-function getContainerByImage(image: string) {
- return getTestcontainers().find(x => x.Image.startsWith(image))
+export function getContainerByImage(image: string) {
+ const containers = getTestcontainers().filter(x => x.Image.startsWith(image))
+ if (containers.length > 1) {
+ let errorMessage = `Multiple containers found starting with image: "${image}"\n\n`
+ for (const container of containers) {
+ errorMessage += JSON.stringify(container, null, 2)
+ }
+ throw new Error(errorMessage)
+ }
+ return containers[0]
}
-function getExposedPort(container: ContainerInfo, port: number) {
- const match = container.Ports.match(new RegExp(`0.0.0.0:(\\d+)->${port}/tcp`))
- if (!match) {
- return undefined
+export function getContainerById(id: string) {
+ return getTestcontainers().find(x => x.ID === id)
+}
+
+export interface Port {
+ host: number
+ container: number
+}
+
+export function getExposedV4Ports(container: ContainerInfo): Port[] {
+ let ports: Port[] = []
+ for (const match of container.Ports.matchAll(IPV4_PORT_REGEX)) {
+ ports.push({ host: parseInt(match[1]), container: parseInt(match[2]) })
}
- return parseInt(match[1])
+ return ports
+}
+
+export function getExposedV4Port(container: ContainerInfo, port: number) {
+ return getExposedV4Ports(container).find(x => x.container === port)?.host
}
export function setupEnv(...envs: any[]) {
+ // We start couchdb in globalSetup.ts, in the root of the monorepo, so it
+ // should be relatively safe to look for it by its image name.
const couch = getContainerByImage("budibase/couchdb")
if (!couch) {
throw new Error("CouchDB container not found")
}
- const couchPort = getExposedPort(couch, 5984)
+ const couchPort = getExposedV4Port(couch, 5984)
if (!couchPort) {
throw new Error("CouchDB port not found")
}
const configs = [
{ key: "COUCH_DB_PORT", value: `${couchPort}` },
- { key: "COUCH_DB_URL", value: `http://localhost:${couchPort}` },
+ { key: "COUCH_DB_URL", value: `http://127.0.0.1:${couchPort}` },
]
for (const config of configs.filter(x => !!x.value)) {
@@ -60,7 +87,4 @@ export function setupEnv(...envs: any[]) {
env._set(config.key, config.value)
}
}
-
- // @ts-expect-error
- DatabaseImpl.nano = undefined
}
diff --git a/packages/builder/package.json b/packages/builder/package.json
index f61ac4fe26..253f5a0c14 100644
--- a/packages/builder/package.json
+++ b/packages/builder/package.json
@@ -72,7 +72,7 @@
"fast-json-patch": "^3.1.1",
"json-format-highlight": "^1.0.4",
"lodash": "4.17.21",
- "posthog-js": "^1.36.0",
+ "posthog-js": "^1.116.6",
"remixicon": "2.5.0",
"sanitize-html": "^2.7.0",
"shortid": "2.2.15",
diff --git a/packages/builder/src/components/backend/DataTable/formula.js b/packages/builder/src/components/backend/DataTable/formula.js
index f5ff3caec4..e3da4249bc 100644
--- a/packages/builder/src/components/backend/DataTable/formula.js
+++ b/packages/builder/src/components/backend/DataTable/formula.js
@@ -1,3 +1,4 @@
+import { FieldType } from "@budibase/types"
import { FIELDS } from "constants/backend"
import { tables } from "stores/builder"
import { get as svelteGet } from "svelte/store"
@@ -5,14 +6,12 @@ import { get as svelteGet } from "svelte/store"
// currently supported level of relationship depth (server side)
const MAX_DEPTH = 1
-//https://github.com/Budibase/budibase/issues/3030
-const internalType = "internal"
-
const TYPES_TO_SKIP = [
- FIELDS.FORMULA.type,
- FIELDS.LONGFORM.type,
- FIELDS.ATTACHMENT.type,
- internalType,
+ FieldType.FORMULA,
+ FieldType.LONGFORM,
+ FieldType.ATTACHMENT,
+ //https://github.com/Budibase/budibase/issues/3030
+ FieldType.INTERNAL,
]
export function getBindings({
@@ -26,7 +25,7 @@ export function getBindings({
return bindings
}
for (let [column, schema] of Object.entries(table.schema)) {
- const isRelationship = schema.type === FIELDS.LINK.type
+ const isRelationship = schema.type === FieldType.LINK
// skip relationships after a certain depth and types which
// can't bind to
if (
diff --git a/packages/builder/src/components/backend/Datasources/relationshipErrors.js b/packages/builder/src/components/backend/Datasources/relationshipErrors.js
index 259484e9a9..610ff9f1fe 100644
--- a/packages/builder/src/components/backend/Datasources/relationshipErrors.js
+++ b/packages/builder/src/components/backend/Datasources/relationshipErrors.js
@@ -1,4 +1,4 @@
-import { RelationshipType } from "constants/backend"
+import { RelationshipType } from "@budibase/types"
const typeMismatch = "Column type of the foreign key must match the primary key"
const columnBeingUsed = "Column name cannot be an existing column"
diff --git a/packages/builder/src/components/backend/TableNavigator/utils.js b/packages/builder/src/components/backend/TableNavigator/utils.js
index b7e46042be..ae7aaa0f0a 100644
--- a/packages/builder/src/components/backend/TableNavigator/utils.js
+++ b/packages/builder/src/components/backend/TableNavigator/utils.js
@@ -12,7 +12,7 @@ const getDefaultSchema = rows => {
newSchema[column] = {
name: column,
type: "string",
- constraints: FIELDS["STRING"].constraints,
+ constraints: FIELDS.STRING.constraints,
}
})
})
diff --git a/packages/builder/src/components/design/settings/controls/FieldConfiguration/utils.js b/packages/builder/src/components/design/settings/controls/FieldConfiguration/utils.js
index c929263db1..18ebf57d98 100644
--- a/packages/builder/src/components/design/settings/controls/FieldConfiguration/utils.js
+++ b/packages/builder/src/components/design/settings/controls/FieldConfiguration/utils.js
@@ -1,3 +1,5 @@
+import { FieldType } from "@budibase/types"
+
export const convertOldFieldFormat = fields => {
if (!fields) {
return []
@@ -31,17 +33,17 @@ export const getComponentForField = (field, schema) => {
}
export const FieldTypeToComponentMap = {
- string: "stringfield",
- number: "numberfield",
- bigint: "bigintfield",
- options: "optionsfield",
- array: "multifieldselect",
- boolean: "booleanfield",
- longform: "longformfield",
- datetime: "datetimefield",
- attachment: "attachmentfield",
- link: "relationshipfield",
- json: "jsonfield",
- barcodeqr: "codescanner",
- bb_reference: "bbreferencefield",
+ [FieldType.STRING]: "stringfield",
+ [FieldType.NUMBER]: "numberfield",
+ [FieldType.BIGINT]: "bigintfield",
+ [FieldType.OPTIONS]: "optionsfield",
+ [FieldType.ARRAY]: "multifieldselect",
+ [FieldType.BOOLEAN]: "booleanfield",
+ [FieldType.LONGFORM]: "longformfield",
+ [FieldType.DATETIME]: "datetimefield",
+ [FieldType.ATTACHMENT]: "attachmentfield",
+ [FieldType.LINK]: "relationshipfield",
+ [FieldType.JSON]: "jsonfield",
+ [FieldType.BARCODEQR]: "codescanner",
+ [FieldType.BB_REFERENCE]: "bbreferencefield",
}
diff --git a/packages/builder/src/constants/backend/index.js b/packages/builder/src/constants/backend/index.js
index f1e3e1e2c2..dd751d4e13 100644
--- a/packages/builder/src/constants/backend/index.js
+++ b/packages/builder/src/constants/backend/index.js
@@ -1,12 +1,14 @@
-import { FieldType, FieldSubtype } from "@budibase/types"
+import {
+ FieldType,
+ FieldSubtype,
+ INTERNAL_TABLE_SOURCE_ID,
+ AutoFieldSubType,
+ Hosting,
+} from "@budibase/types"
-export const AUTO_COLUMN_SUB_TYPES = {
- AUTO_ID: "autoID",
- CREATED_BY: "createdBy",
- CREATED_AT: "createdAt",
- UPDATED_BY: "updatedBy",
- UPDATED_AT: "updatedAt",
-}
+export { RelationshipType } from "@budibase/types"
+
+export const AUTO_COLUMN_SUB_TYPES = AutoFieldSubType
export const AUTO_COLUMN_DISPLAY_NAMES = {
AUTO_ID: "Auto ID",
@@ -167,10 +169,7 @@ export const FILE_TYPES = {
DOCUMENT: ["odf", "docx", "doc", "pdf", "csv"],
}
-export const HostingTypes = {
- CLOUD: "cloud",
- SELF: "self",
-}
+export const HostingTypes = Hosting
export const Roles = {
ADMIN: "ADMIN",
@@ -187,12 +186,6 @@ export function isAutoColumnUserRelationship(subtype) {
)
}
-export const RelationshipType = {
- MANY_TO_MANY: "many-to-many",
- ONE_TO_MANY: "one-to-many",
- MANY_TO_ONE: "many-to-one",
-}
-
export const PrettyRelationshipDefinitions = {
MANY: "Many rows",
ONE: "One row",
@@ -218,7 +211,7 @@ export const SWITCHABLE_TYPES = [
...ALLOWABLE_NUMBER_TYPES,
]
-export const BUDIBASE_INTERNAL_DB_ID = "bb_internal"
+export const BUDIBASE_INTERNAL_DB_ID = INTERNAL_TABLE_SOURCE_ID
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
export const BUDIBASE_DATASOURCE_TYPE = "budibase"
export const DB_TYPE_INTERNAL = "internal"
@@ -265,10 +258,10 @@ export const IntegrationNames = {
}
export const SchemaTypeOptions = [
- { label: "Text", value: "string" },
- { label: "Number", value: "number" },
- { label: "Boolean", value: "boolean" },
- { label: "Datetime", value: "datetime" },
+ { label: "Text", value: FieldType.STRING },
+ { label: "Number", value: FieldType.NUMBER },
+ { label: "Boolean", value: FieldType.BOOLEAN },
+ { label: "Datetime", value: FieldType.DATETIME },
]
export const SchemaTypeOptionsExpanded = SchemaTypeOptions.map(el => ({
@@ -305,10 +298,10 @@ export const PaginationLocations = [
]
export const BannedSearchTypes = [
- "link",
- "attachment",
- "formula",
- "json",
+ FieldType.LINK,
+ FieldType.ATTACHMENT,
+ FieldType.FORMULA,
+ FieldType.JSON,
"jsonarray",
"queryarray",
]
diff --git a/packages/builder/src/helpers/schemaGenerator.js b/packages/builder/src/helpers/schemaGenerator.js
index 33115fc997..eb044496f6 100644
--- a/packages/builder/src/helpers/schemaGenerator.js
+++ b/packages/builder/src/helpers/schemaGenerator.js
@@ -1,17 +1,17 @@
-import { FIELDS } from "constants/backend"
+import { FieldType } from "@budibase/types"
function baseConversion(type) {
if (type === "string") {
return {
- type: FIELDS.STRING.type,
+ type: FieldType.STRING,
}
} else if (type === "boolean") {
return {
- type: FIELDS.BOOLEAN.type,
+ type: FieldType.BOOLEAN,
}
} else if (type === "number") {
return {
- type: FIELDS.NUMBER.type,
+ type: FieldType.NUMBER,
}
}
}
@@ -31,7 +31,7 @@ function recurse(schemaLevel = {}, objectLevel) {
const schema = recurse(schemaLevel[key], value[0])
if (schema) {
schemaLevel[key] = {
- type: FIELDS.ARRAY.type,
+ type: FieldType.ARRAY,
schema,
}
}
@@ -45,7 +45,7 @@ function recurse(schemaLevel = {}, objectLevel) {
}
}
if (!schemaLevel.type) {
- return { type: FIELDS.JSON.type, schema: schemaLevel }
+ return { type: FieldType.JSON, schema: schemaLevel }
} else {
return schemaLevel
}
diff --git a/packages/builder/src/helpers/utils.js b/packages/builder/src/helpers/utils.js
index 6bb333f3c4..a1f9b34e3d 100644
--- a/packages/builder/src/helpers/utils.js
+++ b/packages/builder/src/helpers/utils.js
@@ -1,3 +1,4 @@
+import { FieldType } from "@budibase/types"
import { ActionStepID } from "constants/backend/automations"
import { TableNames } from "constants"
import {
@@ -20,20 +21,20 @@ export function buildAutoColumn(tableName, name, subtype) {
switch (subtype) {
case AUTO_COLUMN_SUB_TYPES.UPDATED_BY:
case AUTO_COLUMN_SUB_TYPES.CREATED_BY:
- type = FIELDS.LINK.type
+ type = FieldType.LINK
constraints = FIELDS.LINK.constraints
break
case AUTO_COLUMN_SUB_TYPES.AUTO_ID:
- type = FIELDS.NUMBER.type
+ type = FieldType.NUMBER
constraints = FIELDS.NUMBER.constraints
break
case AUTO_COLUMN_SUB_TYPES.UPDATED_AT:
case AUTO_COLUMN_SUB_TYPES.CREATED_AT:
- type = FIELDS.DATETIME.type
+ type = FieldType.DATETIME
constraints = FIELDS.DATETIME.constraints
break
default:
- type = FIELDS.STRING.type
+ type = FieldType.STRING
constraints = FIELDS.STRING.constraints
break
}
diff --git a/packages/builder/src/stores/builder/tables.js b/packages/builder/src/stores/builder/tables.js
index f86b37ab85..0163281480 100644
--- a/packages/builder/src/stores/builder/tables.js
+++ b/packages/builder/src/stores/builder/tables.js
@@ -1,7 +1,8 @@
+import { FieldType } from "@budibase/types"
import { get, writable, derived } from "svelte/store"
import { cloneDeep } from "lodash/fp"
import { API } from "api"
-import { SWITCHABLE_TYPES, FIELDS } from "constants/backend"
+import { SWITCHABLE_TYPES } from "constants/backend"
export function createTablesStore() {
const store = writable({
@@ -83,14 +84,14 @@ export function createTablesStore() {
// make sure tables up to date (related)
let newTableIds = []
for (let column of Object.values(updatedTable?.schema || {})) {
- if (column.type === FIELDS.LINK.type) {
+ if (column.type === FieldType.LINK) {
newTableIds.push(column.tableId)
}
}
let oldTableIds = []
for (let column of Object.values(oldTable?.schema || {})) {
- if (column.type === FIELDS.LINK.type) {
+ if (column.type === FieldType.LINK) {
oldTableIds.push(column.tableId)
}
}
diff --git a/packages/builder/src/templates/commonComponents.js b/packages/builder/src/templates/commonComponents.js
index 1a953224a7..884419ae6c 100644
--- a/packages/builder/src/templates/commonComponents.js
+++ b/packages/builder/src/templates/commonComponents.js
@@ -1,21 +1,7 @@
+import { FieldTypeToComponentMap } from "components/design/settings/controls/FieldConfiguration/utils"
import { Component } from "./Component"
import { getSchemaForDatasource } from "dataBinding"
-const fieldTypeToComponentMap = {
- string: "stringfield",
- number: "numberfield",
- bigint: "bigintfield",
- options: "optionsfield",
- array: "multifieldselect",
- boolean: "booleanfield",
- longform: "longformfield",
- datetime: "datetimefield",
- attachment: "attachmentfield",
- link: "relationshipfield",
- json: "jsonfield",
- barcodeqr: "codescanner",
-}
-
export function makeDatasourceFormComponents(datasource) {
const { schema } = getSchemaForDatasource(null, datasource, {
formSchema: true,
@@ -30,7 +16,7 @@ export function makeDatasourceFormComponents(datasource) {
}
const fieldType =
typeof fieldSchema === "object" ? fieldSchema.type : fieldSchema
- const componentType = fieldTypeToComponentMap[fieldType]
+ const componentType = FieldTypeToComponentMap[fieldType]
const fullComponentType = `@budibase/standard-components/${componentType}`
if (componentType) {
const component = new Component(fullComponentType)
diff --git a/packages/client/src/components/app/blocks/FormBlockComponent.svelte b/packages/client/src/components/app/blocks/FormBlockComponent.svelte
index ea1c3b0a37..34168355c4 100644
--- a/packages/client/src/components/app/blocks/FormBlockComponent.svelte
+++ b/packages/client/src/components/app/blocks/FormBlockComponent.svelte
@@ -7,19 +7,19 @@
export let order
const FieldTypeToComponentMap = {
- string: "stringfield",
- number: "numberfield",
- bigint: "bigintfield",
- options: "optionsfield",
- array: "multifieldselect",
- boolean: "booleanfield",
- longform: "longformfield",
- datetime: "datetimefield",
- attachment: "attachmentfield",
- link: "relationshipfield",
- json: "jsonfield",
- barcodeqr: "codescanner",
- bb_reference: "bbreferencefield",
+ [FieldType.STRING]: "stringfield",
+ [FieldType.NUMBER]: "numberfield",
+ [FieldType.BIGINT]: "bigintfield",
+ [FieldType.OPTIONS]: "optionsfield",
+ [FieldType.ARRAY]: "multifieldselect",
+ [FieldType.BOOLEAN]: "booleanfield",
+ [FieldType.LONGFORM]: "longformfield",
+ [FieldType.DATETIME]: "datetimefield",
+ [FieldType.ATTACHMENT]: "attachmentfield",
+ [FieldType.LINK]: "relationshipfield",
+ [FieldType.JSON]: "jsonfield",
+ [FieldType.BARCODEQR]: "codescanner",
+ [FieldType.BB_REFERENCE]: "bbreferencefield",
}
const getFieldSchema = field => {
diff --git a/packages/client/src/components/devtools/DevToolsStatsTab.svelte b/packages/client/src/components/devtools/DevToolsStatsTab.svelte
index 24f587332c..bc0b1a562b 100644
--- a/packages/client/src/components/devtools/DevToolsStatsTab.svelte
+++ b/packages/client/src/components/devtools/DevToolsStatsTab.svelte
@@ -23,6 +23,6 @@
label="Components"
value={$componentStore.mountedComponentCount}
/>
-
-
+
+
diff --git a/packages/frontend-core/package.json b/packages/frontend-core/package.json
index fd37af63dc..4ca88de8f2 100644
--- a/packages/frontend-core/package.json
+++ b/packages/frontend-core/package.json
@@ -8,6 +8,7 @@
"dependencies": {
"@budibase/bbui": "0.0.0",
"@budibase/shared-core": "0.0.0",
+ "@budibase/types": "0.0.0",
"dayjs": "^1.10.8",
"lodash": "4.17.21",
"socket.io-client": "^4.6.1"
diff --git a/packages/frontend-core/src/components/grid/lib/renderers.js b/packages/frontend-core/src/components/grid/lib/renderers.js
index f5d4cfe297..19bf63312d 100644
--- a/packages/frontend-core/src/components/grid/lib/renderers.js
+++ b/packages/frontend-core/src/components/grid/lib/renderers.js
@@ -1,3 +1,5 @@
+import { FieldType } from "@budibase/types"
+
import OptionsCell from "../cells/OptionsCell.svelte"
import DateCell from "../cells/DateCell.svelte"
import MultiSelectCell from "../cells/MultiSelectCell.svelte"
@@ -12,19 +14,19 @@ import AttachmentCell from "../cells/AttachmentCell.svelte"
import BBReferenceCell from "../cells/BBReferenceCell.svelte"
const TypeComponentMap = {
- text: TextCell,
- options: OptionsCell,
- datetime: DateCell,
- barcodeqr: TextCell,
- longform: LongFormCell,
- array: MultiSelectCell,
- number: NumberCell,
- boolean: BooleanCell,
- attachment: AttachmentCell,
- link: RelationshipCell,
- formula: FormulaCell,
- json: JSONCell,
- bb_reference: BBReferenceCell,
+ [FieldType.STRING]: TextCell,
+ [FieldType.OPTIONS]: OptionsCell,
+ [FieldType.DATETIME]: DateCell,
+ [FieldType.BARCODEQR]: TextCell,
+ [FieldType.LONGFORM]: LongFormCell,
+ [FieldType.ARRAY]: MultiSelectCell,
+ [FieldType.NUMBER]: NumberCell,
+ [FieldType.BOOLEAN]: BooleanCell,
+ [FieldType.ATTACHMENT]: AttachmentCell,
+ [FieldType.LINK]: RelationshipCell,
+ [FieldType.FORMULA]: FormulaCell,
+ [FieldType.JSON]: JSONCell,
+ [FieldType.BB_REFERENCE]: BBReferenceCell,
}
export const getCellRenderer = column => {
return TypeComponentMap[column?.schema?.type] || TextCell
diff --git a/packages/frontend-core/src/components/grid/lib/utils.js b/packages/frontend-core/src/components/grid/lib/utils.js
index 80bc3d9d67..8382bfece8 100644
--- a/packages/frontend-core/src/components/grid/lib/utils.js
+++ b/packages/frontend-core/src/components/grid/lib/utils.js
@@ -1,3 +1,5 @@
+import { FieldType, FieldTypeSubtypes } from "@budibase/types"
+
export const getColor = (idx, opacity = 0.3) => {
if (idx == null || idx === -1) {
idx = 0
@@ -6,22 +8,22 @@ export const getColor = (idx, opacity = 0.3) => {
}
const TypeIconMap = {
- text: "Text",
- options: "Dropdown",
- datetime: "Date",
- barcodeqr: "Camera",
- longform: "TextAlignLeft",
- array: "Dropdown",
- number: "123",
- boolean: "Boolean",
- attachment: "AppleFiles",
- link: "DataCorrelated",
- formula: "Calculator",
- json: "Brackets",
- bigint: "TagBold",
- bb_reference: {
- user: "User",
- users: "UserGroup",
+ [FieldType.STRING]: "Text",
+ [FieldType.OPTIONS]: "Dropdown",
+ [FieldType.DATETIME]: "Date",
+ [FieldType.BARCODEQR]: "Camera",
+ [FieldType.LONGFORM]: "TextAlignLeft",
+ [FieldType.ARRAY]: "Dropdown",
+ [FieldType.NUMBER]: "123",
+ [FieldType.BOOLEAN]: "Boolean",
+ [FieldType.ATTACHMENT]: "AppleFiles",
+ [FieldType.LINK]: "DataCorrelated",
+ [FieldType.FORMULA]: "Calculator",
+ [FieldType.JSON]: "Brackets",
+ [FieldType.BIGINT]: "TagBold",
+ [FieldType.BB_REFERENCE]: {
+ [FieldTypeSubtypes.BB_REFERENCE.USER]: "User",
+ [FieldTypeSubtypes.BB_REFERENCE.USERS]: "UserGroup",
},
}
diff --git a/packages/server/scripts/test.sh b/packages/server/scripts/test.sh
index 3ecf8bb794..48766026aa 100644
--- a/packages/server/scripts/test.sh
+++ b/packages/server/scripts/test.sh
@@ -4,8 +4,8 @@ set -e
if [[ -n $CI ]]
then
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS"
- echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@"
- jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
+ echo "jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@"
+ jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
else
# --maxWorkers performs better in development
export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS"
diff --git a/packages/server/src/api/routes/public/tests/metrics.spec.js b/packages/server/src/api/routes/public/tests/metrics.spec.js
index 8231596d59..2fb5e91000 100644
--- a/packages/server/src/api/routes/public/tests/metrics.spec.js
+++ b/packages/server/src/api/routes/public/tests/metrics.spec.js
@@ -1,7 +1,5 @@
const setup = require("../../tests/utilities")
-jest.setTimeout(30000)
-
describe("/metrics", () => {
let request = setup.getRequest()
let config = setup.getConfig()
diff --git a/packages/server/src/api/routes/tests/appImport.spec.ts b/packages/server/src/api/routes/tests/appImport.spec.ts
index 75e9f91d63..bc211024d4 100644
--- a/packages/server/src/api/routes/tests/appImport.spec.ts
+++ b/packages/server/src/api/routes/tests/appImport.spec.ts
@@ -1,7 +1,6 @@
import * as setup from "./utilities"
import path from "path"
-jest.setTimeout(15000)
const PASSWORD = "testtest"
describe("/applications/:appId/import", () => {
diff --git a/packages/server/src/api/routes/tests/automation.spec.ts b/packages/server/src/api/routes/tests/automation.spec.ts
index 322694df75..7885e97fbf 100644
--- a/packages/server/src/api/routes/tests/automation.spec.ts
+++ b/packages/server/src/api/routes/tests/automation.spec.ts
@@ -23,8 +23,6 @@ let {
collectAutomation,
} = setup.structures
-jest.setTimeout(30000)
-
describe("/automations", () => {
let request = setup.getRequest()
let config = setup.getConfig()
diff --git a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts
index f9a3ac6e03..585288bc43 100644
--- a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts
+++ b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts
@@ -1,9 +1,10 @@
import { Datasource, Query, SourceName } from "@budibase/types"
import * as setup from "../utilities"
-import { databaseTestProviders } from "../../../../integrations/tests/utils"
-import pg from "pg"
-import mysql from "mysql2/promise"
-import mssql from "mssql"
+import {
+ DatabaseName,
+ getDatasource,
+ rawQuery,
+} from "../../../../integrations/tests/utils"
jest.unmock("pg")
@@ -34,13 +35,16 @@ const createTableSQL: Record = {
const insertSQL = `INSERT INTO test_table (name) VALUES ('one'), ('two'), ('three'), ('four'), ('five')`
const dropTableSQL = `DROP TABLE test_table;`
-describe.each([
- ["postgres", databaseTestProviders.postgres],
- ["mysql", databaseTestProviders.mysql],
- ["mssql", databaseTestProviders.mssql],
- ["mariadb", databaseTestProviders.mariadb],
-])("queries (%s)", (dbName, dsProvider) => {
+describe.each(
+ [
+ DatabaseName.POSTGRES,
+ DatabaseName.MYSQL,
+ DatabaseName.SQL_SERVER,
+ DatabaseName.MARIADB,
+ ].map(name => [name, getDatasource(name)])
+)("queries (%s)", (dbName, dsProvider) => {
const config = setup.getConfig()
+ let rawDatasource: Datasource
let datasource: Datasource
async function createQuery(query: Partial): Promise {
@@ -57,62 +61,22 @@ describe.each([
return await config.api.query.save({ ...defaultQuery, ...query })
}
- async function rawQuery(sql: string): Promise {
- // We re-fetch the datasource here because the one returned by
- // config.api.datasource.create has the password field blanked out, and we
- // need the password to connect to the database.
- const ds = await dsProvider.datasource()
- switch (ds.source) {
- case SourceName.POSTGRES: {
- const client = new pg.Client(ds.config!)
- await client.connect()
- try {
- const { rows } = await client.query(sql)
- return rows
- } finally {
- await client.end()
- }
- }
- case SourceName.MYSQL: {
- const con = await mysql.createConnection(ds.config!)
- try {
- const [rows] = await con.query(sql)
- return rows
- } finally {
- con.end()
- }
- }
- case SourceName.SQL_SERVER: {
- const pool = new mssql.ConnectionPool(ds.config! as mssql.config)
- const client = await pool.connect()
- try {
- const { recordset } = await client.query(sql)
- return recordset
- } finally {
- await pool.close()
- }
- }
- }
- }
-
beforeAll(async () => {
await config.init()
- datasource = await config.api.datasource.create(
- await dsProvider.datasource()
- )
+ rawDatasource = await dsProvider
+ datasource = await config.api.datasource.create(rawDatasource)
})
beforeEach(async () => {
- await rawQuery(createTableSQL[datasource.source])
- await rawQuery(insertSQL)
+ await rawQuery(rawDatasource, createTableSQL[datasource.source])
+ await rawQuery(rawDatasource, insertSQL)
})
afterEach(async () => {
- await rawQuery(dropTableSQL)
+ await rawQuery(rawDatasource, dropTableSQL)
})
afterAll(async () => {
- await dsProvider.stop()
setup.afterAll()
})
@@ -143,7 +107,10 @@ describe.each([
},
])
- const rows = await rawQuery("SELECT * FROM test_table WHERE name = 'baz'")
+ const rows = await rawQuery(
+ rawDatasource,
+ "SELECT * FROM test_table WHERE name = 'baz'"
+ )
expect(rows).toHaveLength(1)
})
@@ -171,6 +138,7 @@ describe.each([
expect(result.data).toEqual([{ created: true }])
const rows = await rawQuery(
+ rawDatasource,
`SELECT * FROM test_table WHERE birthday = '${date.toISOString()}'`
)
expect(rows).toHaveLength(1)
@@ -202,6 +170,7 @@ describe.each([
expect(result.data).toEqual([{ created: true }])
const rows = await rawQuery(
+ rawDatasource,
`SELECT * FROM test_table WHERE name = '${notDateStr}'`
)
expect(rows).toHaveLength(1)
@@ -338,7 +307,10 @@ describe.each([
},
])
- const rows = await rawQuery("SELECT * FROM test_table WHERE id = 1")
+ const rows = await rawQuery(
+ rawDatasource,
+ "SELECT * FROM test_table WHERE id = 1"
+ )
expect(rows).toEqual([
{ id: 1, name: "foo", birthday: null, number: null },
])
@@ -406,7 +378,10 @@ describe.each([
},
])
- const rows = await rawQuery("SELECT * FROM test_table WHERE id = 1")
+ const rows = await rawQuery(
+ rawDatasource,
+ "SELECT * FROM test_table WHERE id = 1"
+ )
expect(rows).toHaveLength(0)
})
})
@@ -443,7 +418,7 @@ describe.each([
} catch (err: any) {
error = err.message
}
- if (dbName === "mssql") {
+ if (dbName === DatabaseName.SQL_SERVER) {
expect(error).toBeUndefined()
} else {
expect(error).toBeDefined()
diff --git a/packages/server/src/api/routes/tests/queries/mongodb.spec.ts b/packages/server/src/api/routes/tests/queries/mongodb.spec.ts
index 492f24abf9..bdcfd85437 100644
--- a/packages/server/src/api/routes/tests/queries/mongodb.spec.ts
+++ b/packages/server/src/api/routes/tests/queries/mongodb.spec.ts
@@ -1,14 +1,17 @@
import { Datasource, Query } from "@budibase/types"
import * as setup from "../utilities"
-import { databaseTestProviders } from "../../../../integrations/tests/utils"
-import { MongoClient, type Collection, BSON } from "mongodb"
-
-const collection = "test_collection"
+import {
+ DatabaseName,
+ getDatasource,
+} from "../../../../integrations/tests/utils"
+import { MongoClient, type Collection, BSON, Db } from "mongodb"
+import { generator } from "@budibase/backend-core/tests"
const expectValidId = expect.stringMatching(/^\w{24}$/)
const expectValidBsonObjectId = expect.any(BSON.ObjectId)
describe("/queries", () => {
+ let collection: string
let config = setup.getConfig()
let datasource: Datasource
@@ -37,8 +40,7 @@ describe("/queries", () => {
async function withClient(
callback: (client: MongoClient) => Promise
): Promise {
- const ds = await databaseTestProviders.mongodb.datasource()
- const client = new MongoClient(ds.config!.connectionString)
+ const client = new MongoClient(datasource.config!.connectionString)
await client.connect()
try {
return await callback(client)
@@ -47,30 +49,33 @@ describe("/queries", () => {
}
}
+ async function withDb(callback: (db: Db) => Promise): Promise {
+ return await withClient(async client => {
+ return await callback(client.db(datasource.config!.db))
+ })
+ }
+
async function withCollection(
callback: (collection: Collection) => Promise
): Promise {
- return await withClient(async client => {
- const db = client.db(
- (await databaseTestProviders.mongodb.datasource()).config!.db
- )
+ return await withDb(async db => {
return await callback(db.collection(collection))
})
}
afterAll(async () => {
- await databaseTestProviders.mongodb.stop()
setup.afterAll()
})
beforeAll(async () => {
await config.init()
datasource = await config.api.datasource.create(
- await databaseTestProviders.mongodb.datasource()
+ await getDatasource(DatabaseName.MONGODB)
)
})
beforeEach(async () => {
+ collection = generator.guid()
await withCollection(async collection => {
await collection.insertMany([
{ name: "one" },
diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts
index f638f2c4bf..8910522565 100644
--- a/packages/server/src/api/routes/tests/row.spec.ts
+++ b/packages/server/src/api/routes/tests/row.spec.ts
@@ -1,4 +1,4 @@
-import { databaseTestProviders } from "../../../integrations/tests/utils"
+import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import tk from "timekeeper"
import { outputProcessing } from "../../../utilities/rowProcessor"
@@ -34,10 +34,10 @@ jest.unmock("pg")
describe.each([
["internal", undefined],
- ["postgres", databaseTestProviders.postgres],
- ["mysql", databaseTestProviders.mysql],
- ["mssql", databaseTestProviders.mssql],
- ["mariadb", databaseTestProviders.mariadb],
+ [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
+ [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
+ [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
+ [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/rows (%s)", (__, dsProvider) => {
const isInternal = dsProvider === undefined
const config = setup.getConfig()
@@ -49,23 +49,23 @@ describe.each([
await config.init()
if (dsProvider) {
datasource = await config.createDatasource({
- datasource: await dsProvider.datasource(),
+ datasource: await dsProvider,
})
}
})
afterAll(async () => {
- if (dsProvider) {
- await dsProvider.stop()
- }
setup.afterAll()
})
function saveTableRequest(
- ...overrides: Partial[]
+ // We omit the name field here because it's generated in the function with a
+ // high likelihood to be unique. Tests should not have any reason to control
+ // the table name they're writing to.
+ ...overrides: Partial>[]
): SaveTableRequest {
const req: SaveTableRequest = {
- name: uuid.v4().substring(0, 16),
+ name: uuid.v4().substring(0, 10),
type: "table",
sourceType: datasource
? TableSourceType.EXTERNAL
@@ -87,7 +87,10 @@ describe.each([
}
function defaultTable(
- ...overrides: Partial[]
+ // We omit the name field here because it's generated in the function with a
+ // high likelihood to be unique. Tests should not have any reason to control
+ // the table name they're writing to.
+ ...overrides: Partial>[]
): SaveTableRequest {
return saveTableRequest(
{
@@ -194,7 +197,6 @@ describe.each([
const newTable = await config.api.table.save(
saveTableRequest({
- name: "TestTableAuto",
schema: {
"Row ID": {
name: "Row ID",
@@ -383,11 +385,9 @@ describe.each([
isInternal &&
it("doesn't allow creating in user table", async () => {
- const userTableId = InternalTable.USER_METADATA
const response = await config.api.row.save(
- userTableId,
+ InternalTable.USER_METADATA,
{
- tableId: userTableId,
firstName: "Joe",
lastName: "Joe",
email: "joe@joe.com",
@@ -462,7 +462,6 @@ describe.each([
table = await config.api.table.save(defaultTable())
otherTable = await config.api.table.save(
defaultTable({
- name: "a",
schema: {
relationship: {
name: "relationship",
@@ -898,8 +897,8 @@ describe.each([
let o2mTable: Table
let m2mTable: Table
beforeAll(async () => {
- o2mTable = await config.api.table.save(defaultTable({ name: "o2m" }))
- m2mTable = await config.api.table.save(defaultTable({ name: "m2m" }))
+ o2mTable = await config.api.table.save(defaultTable())
+ m2mTable = await config.api.table.save(defaultTable())
})
describe.each([
@@ -1256,7 +1255,6 @@ describe.each([
otherTable = await config.api.table.save(defaultTable())
table = await config.api.table.save(
saveTableRequest({
- name: "b",
schema: {
links: {
name: "links",
@@ -1354,7 +1352,6 @@ describe.each([
const table = await config.api.table.save(
saveTableRequest({
- name: "table",
schema: {
text: {
name: "text",
diff --git a/packages/server/src/api/routes/tests/user.spec.ts b/packages/server/src/api/routes/tests/user.spec.ts
index ff8c0d54b3..a46de8f3b3 100644
--- a/packages/server/src/api/routes/tests/user.spec.ts
+++ b/packages/server/src/api/routes/tests/user.spec.ts
@@ -3,8 +3,6 @@ import { checkPermissionsEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
import { UserMetadata } from "@budibase/types"
-jest.setTimeout(30000)
-
jest.mock("../../../utilities/workerRequests", () => ({
getGlobalUsers: jest.fn(() => {
return {}
diff --git a/packages/server/src/api/routes/tests/viewV2.spec.ts b/packages/server/src/api/routes/tests/viewV2.spec.ts
index f9d213a26b..d3e38b0f23 100644
--- a/packages/server/src/api/routes/tests/viewV2.spec.ts
+++ b/packages/server/src/api/routes/tests/viewV2.spec.ts
@@ -19,8 +19,7 @@ import {
ViewV2,
} from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests"
-import * as uuid from "uuid"
-import { databaseTestProviders } from "../../../integrations/tests/utils"
+import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import merge from "lodash/merge"
import { quotas } from "@budibase/pro"
import { roles } from "@budibase/backend-core"
@@ -30,10 +29,10 @@ jest.unmock("pg")
describe.each([
["internal", undefined],
- ["postgres", databaseTestProviders.postgres],
- ["mysql", databaseTestProviders.mysql],
- ["mssql", databaseTestProviders.mssql],
- ["mariadb", databaseTestProviders.mariadb],
+ [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
+ [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
+ [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
+ [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/v2/views (%s)", (_, dsProvider) => {
const config = setup.getConfig()
const isInternal = !dsProvider
@@ -42,10 +41,10 @@ describe.each([
let datasource: Datasource
function saveTableRequest(
- ...overrides: Partial[]
+ ...overrides: Partial>[]
): SaveTableRequest {
const req: SaveTableRequest = {
- name: uuid.v4().substring(0, 16),
+ name: generator.guid().replaceAll("-", "").substring(0, 16),
type: "table",
sourceType: datasource
? TableSourceType.EXTERNAL
@@ -90,16 +89,13 @@ describe.each([
if (dsProvider) {
datasource = await config.createDatasource({
- datasource: await dsProvider.datasource(),
+ datasource: await dsProvider,
})
}
table = await config.api.table.save(priceTable())
})
afterAll(async () => {
- if (dsProvider) {
- await dsProvider.stop()
- }
setup.afterAll()
})
@@ -231,7 +227,7 @@ describe.each([
view = await config.api.viewV2.create({
tableId: table._id!,
- name: "View A",
+ name: generator.guid(),
})
})
@@ -307,12 +303,13 @@ describe.each([
it("can update an existing view name", async () => {
const tableId = table._id!
- await config.api.viewV2.update({ ...view, name: "View B" })
+ const newName = generator.guid()
+ await config.api.viewV2.update({ ...view, name: newName })
expect(await config.api.table.get(tableId)).toEqual(
expect.objectContaining({
views: {
- "View B": { ...view, name: "View B", schema: expect.anything() },
+ [newName]: { ...view, name: newName, schema: expect.anything() },
},
})
)
@@ -507,7 +504,6 @@ describe.each([
it("views have extra data trimmed", async () => {
const table = await config.api.table.save(
saveTableRequest({
- name: "orders",
schema: {
Country: {
type: FieldType.STRING,
@@ -523,7 +519,7 @@ describe.each([
const view = await config.api.viewV2.create({
tableId: table._id!,
- name: uuid.v4(),
+ name: generator.guid(),
schema: {
Country: {
visible: true,
@@ -853,7 +849,6 @@ describe.each([
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
- name: `users_${uuid.v4()}`,
type: "table",
schema: {
name: {
diff --git a/packages/server/src/integration-test/mysql.spec.ts b/packages/server/src/integration-test/mysql.spec.ts
index 92420fb336..7e54b53b15 100644
--- a/packages/server/src/integration-test/mysql.spec.ts
+++ b/packages/server/src/integration-test/mysql.spec.ts
@@ -3,7 +3,6 @@ import {
generateMakeRequest,
MakeRequestResponse,
} from "../api/routes/public/tests/utils"
-import { v4 as uuidv4 } from "uuid"
import * as setup from "../api/routes/tests/utilities"
import {
Datasource,
@@ -12,12 +11,23 @@ import {
TableRequest,
TableSourceType,
} from "@budibase/types"
-import { databaseTestProviders } from "../integrations/tests/utils"
-import mysql from "mysql2/promise"
+import {
+ DatabaseName,
+ getDatasource,
+ rawQuery,
+} from "../integrations/tests/utils"
import { builderSocket } from "../websockets"
+import { generator } from "@budibase/backend-core/tests"
// @ts-ignore
fetch.mockSearch()
+function uniqueTableName(length?: number): string {
+ return generator
+ .guid()
+ .replaceAll("-", "_")
+ .substring(0, length || 10)
+}
+
const config = setup.getConfig()!
jest.mock("../websockets", () => ({
@@ -37,7 +47,8 @@ jest.mock("../websockets", () => ({
describe("mysql integrations", () => {
let makeRequest: MakeRequestResponse,
- mysqlDatasource: Datasource,
+ rawDatasource: Datasource,
+ datasource: Datasource,
primaryMySqlTable: Table
beforeAll(async () => {
@@ -46,18 +57,13 @@ describe("mysql integrations", () => {
makeRequest = generateMakeRequest(apiKey, true)
- mysqlDatasource = await config.api.datasource.create(
- await databaseTestProviders.mysql.datasource()
- )
- })
-
- afterAll(async () => {
- await databaseTestProviders.mysql.stop()
+ rawDatasource = await getDatasource(DatabaseName.MYSQL)
+ datasource = await config.api.datasource.create(rawDatasource)
})
beforeEach(async () => {
primaryMySqlTable = await config.createTable({
- name: uuidv4(),
+ name: uniqueTableName(),
type: "table",
primary: ["id"],
schema: {
@@ -79,7 +85,7 @@ describe("mysql integrations", () => {
type: FieldType.NUMBER,
},
},
- sourceId: mysqlDatasource._id,
+ sourceId: datasource._id,
sourceType: TableSourceType.EXTERNAL,
})
})
@@ -87,18 +93,15 @@ describe("mysql integrations", () => {
afterAll(config.end)
it("validate table schema", async () => {
- const res = await makeRequest(
- "get",
- `/api/datasources/${mysqlDatasource._id}`
- )
+ const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
expect(res.status).toBe(200)
expect(res.body).toEqual({
config: {
- database: "mysql",
- host: mysqlDatasource.config!.host,
+ database: expect.any(String),
+ host: datasource.config!.host,
password: "--secret-value--",
- port: mysqlDatasource.config!.port,
+ port: datasource.config!.port,
user: "root",
},
plus: true,
@@ -117,7 +120,7 @@ describe("mysql integrations", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
- datasource: await databaseTestProviders.mysql.datasource(),
+ datasource: rawDatasource,
},
{
body: {
@@ -128,13 +131,12 @@ describe("mysql integrations", () => {
})
it("should state an invalid datasource cannot connect", async () => {
- const dbConfig = await databaseTestProviders.mysql.datasource()
await config.api.datasource.verify(
{
datasource: {
- ...dbConfig,
+ ...rawDatasource,
config: {
- ...dbConfig.config,
+ ...rawDatasource.config,
password: "wrongpassword",
},
},
@@ -154,7 +156,7 @@ describe("mysql integrations", () => {
it("should fetch information about mysql datasource", async () => {
const primaryName = primaryMySqlTable.name
const response = await makeRequest("post", "/api/datasources/info", {
- datasource: mysqlDatasource,
+ datasource: datasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
@@ -163,40 +165,38 @@ describe("mysql integrations", () => {
})
describe("Integration compatibility with mysql search_path", () => {
- let client: mysql.Connection, pathDatasource: Datasource
- const database = "test1"
- const database2 = "test-2"
+ let datasource: Datasource, rawDatasource: Datasource
+ const database = generator.guid()
+ const database2 = generator.guid()
beforeAll(async () => {
- const dsConfig = await databaseTestProviders.mysql.datasource()
- const dbConfig = dsConfig.config!
+ rawDatasource = await getDatasource(DatabaseName.MYSQL)
- client = await mysql.createConnection(dbConfig)
- await client.query(`CREATE DATABASE \`${database}\`;`)
- await client.query(`CREATE DATABASE \`${database2}\`;`)
+ await rawQuery(rawDatasource, `CREATE DATABASE \`${database}\`;`)
+ await rawQuery(rawDatasource, `CREATE DATABASE \`${database2}\`;`)
const pathConfig: any = {
- ...dsConfig,
+ ...rawDatasource,
config: {
- ...dbConfig,
+ ...rawDatasource.config!,
database,
},
}
- pathDatasource = await config.api.datasource.create(pathConfig)
+ datasource = await config.api.datasource.create(pathConfig)
})
afterAll(async () => {
- await client.query(`DROP DATABASE \`${database}\`;`)
- await client.query(`DROP DATABASE \`${database2}\`;`)
- await client.end()
+ await rawQuery(rawDatasource, `DROP DATABASE \`${database}\`;`)
+ await rawQuery(rawDatasource, `DROP DATABASE \`${database2}\`;`)
})
it("discovers tables from any schema in search path", async () => {
- await client.query(
+ await rawQuery(
+ rawDatasource,
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
)
const response = await makeRequest("post", "/api/datasources/info", {
- datasource: pathDatasource,
+ datasource: datasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
@@ -207,15 +207,17 @@ describe("mysql integrations", () => {
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
- await client.query(
+ await rawQuery(
+ rawDatasource,
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
)
- await client.query(
+ await rawQuery(
+ rawDatasource,
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
)
const response = await makeRequest(
"post",
- `/api/datasources/${pathDatasource._id}/schema`,
+ `/api/datasources/${datasource._id}/schema`,
{
tablesFilter: [repeated_table_name],
}
@@ -231,30 +233,14 @@ describe("mysql integrations", () => {
})
describe("POST /api/tables/", () => {
- let client: mysql.Connection
const emitDatasourceUpdateMock = jest.fn()
- beforeEach(async () => {
- client = await mysql.createConnection(
- (
- await databaseTestProviders.mysql.datasource()
- ).config!
- )
- mysqlDatasource = await config.api.datasource.create(
- await databaseTestProviders.mysql.datasource()
- )
- })
-
- afterEach(async () => {
- await client.end()
- })
-
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
const addColumnToTable: TableRequest = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
- name: "table",
- sourceId: mysqlDatasource._id!,
+ name: uniqueTableName(),
+ sourceId: datasource._id!,
primary: ["id"],
schema: {
id: {
@@ -301,14 +287,16 @@ describe("mysql integrations", () => {
},
},
created: true,
- _id: `${mysqlDatasource._id}__table`,
+ _id: `${datasource._id}__${addColumnToTable.name}`,
}
delete expectedTable._add
expect(emitDatasourceUpdateMock).toHaveBeenCalledTimes(1)
const emittedDatasource: Datasource =
emitDatasourceUpdateMock.mock.calls[0][1]
- expect(emittedDatasource.entities!["table"]).toEqual(expectedTable)
+ expect(emittedDatasource.entities![expectedTable.name]).toEqual(
+ expectedTable
+ )
})
it("will rename a column", async () => {
@@ -346,17 +334,18 @@ describe("mysql integrations", () => {
"/api/tables/",
renameColumnOnTable
)
- mysqlDatasource = (
- await makeRequest(
- "post",
- `/api/datasources/${mysqlDatasource._id}/schema`
- )
+
+ const ds = (
+ await makeRequest("post", `/api/datasources/${datasource._id}/schema`)
).body.datasource
expect(response.status).toEqual(200)
- expect(
- Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema)
- ).toEqual(["id", "name", "description", "age"])
+ expect(Object.keys(ds.entities![primaryMySqlTable.name].schema)).toEqual([
+ "id",
+ "name",
+ "description",
+ "age",
+ ])
})
})
})
diff --git a/packages/server/src/integration-test/postgres.spec.ts b/packages/server/src/integration-test/postgres.spec.ts
index 107c4ade1e..5ecc3ca3ef 100644
--- a/packages/server/src/integration-test/postgres.spec.ts
+++ b/packages/server/src/integration-test/postgres.spec.ts
@@ -16,8 +16,12 @@ import {
import _ from "lodash"
import { generator } from "@budibase/backend-core/tests"
import { utils } from "@budibase/backend-core"
-import { databaseTestProviders } from "../integrations/tests/utils"
-import { Client } from "pg"
+import {
+ DatabaseName,
+ getDatasource,
+ rawQuery,
+} from "../integrations/tests/utils"
+
// @ts-ignore
fetch.mockSearch()
@@ -28,7 +32,8 @@ jest.mock("../websockets")
describe("postgres integrations", () => {
let makeRequest: MakeRequestResponse,
- postgresDatasource: Datasource,
+ rawDatasource: Datasource,
+ datasource: Datasource,
primaryPostgresTable: Table,
oneToManyRelationshipInfo: ForeignTableInfo,
manyToOneRelationshipInfo: ForeignTableInfo,
@@ -40,19 +45,17 @@ describe("postgres integrations", () => {
makeRequest = generateMakeRequest(apiKey, true)
- postgresDatasource = await config.api.datasource.create(
- await databaseTestProviders.postgres.datasource()
- )
- })
-
- afterAll(async () => {
- await databaseTestProviders.postgres.stop()
+ rawDatasource = await getDatasource(DatabaseName.POSTGRES)
+ datasource = await config.api.datasource.create(rawDatasource)
})
beforeEach(async () => {
async function createAuxTable(prefix: string) {
return await config.createTable({
- name: `${prefix}_${generator.word({ length: 6 })}`,
+ name: `${prefix}_${generator
+ .guid()
+ .replaceAll("-", "")
+ .substring(0, 6)}`,
type: "table",
primary: ["id"],
primaryDisplay: "title",
@@ -67,7 +70,7 @@ describe("postgres integrations", () => {
type: FieldType.STRING,
},
},
- sourceId: postgresDatasource._id,
+ sourceId: datasource._id,
sourceType: TableSourceType.EXTERNAL,
})
}
@@ -89,7 +92,7 @@ describe("postgres integrations", () => {
}
primaryPostgresTable = await config.createTable({
- name: `p_${generator.word({ length: 6 })}`,
+ name: `p_${generator.guid().replaceAll("-", "").substring(0, 6)}`,
type: "table",
primary: ["id"],
schema: {
@@ -144,7 +147,7 @@ describe("postgres integrations", () => {
main: true,
},
},
- sourceId: postgresDatasource._id,
+ sourceId: datasource._id,
sourceType: TableSourceType.EXTERNAL,
})
})
@@ -251,7 +254,7 @@ describe("postgres integrations", () => {
async function createDefaultPgTable() {
return await config.createTable({
- name: generator.word({ length: 10 }),
+ name: generator.guid().replaceAll("-", "").substring(0, 10),
type: "table",
primary: ["id"],
schema: {
@@ -261,7 +264,7 @@ describe("postgres integrations", () => {
autocolumn: true,
},
},
- sourceId: postgresDatasource._id,
+ sourceId: datasource._id,
sourceType: TableSourceType.EXTERNAL,
})
}
@@ -299,19 +302,16 @@ describe("postgres integrations", () => {
}
it("validate table schema", async () => {
- const res = await makeRequest(
- "get",
- `/api/datasources/${postgresDatasource._id}`
- )
+ const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
expect(res.status).toBe(200)
expect(res.body).toEqual({
config: {
ca: false,
- database: "postgres",
- host: postgresDatasource.config!.host,
+ database: expect.any(String),
+ host: datasource.config!.host,
password: "--secret-value--",
- port: postgresDatasource.config!.port,
+ port: datasource.config!.port,
rejectUnauthorized: false,
schema: "public",
ssl: false,
@@ -1043,7 +1043,7 @@ describe("postgres integrations", () => {
it("should be able to verify the connection", async () => {
await config.api.datasource.verify(
{
- datasource: await databaseTestProviders.postgres.datasource(),
+ datasource: await getDatasource(DatabaseName.POSTGRES),
},
{
body: {
@@ -1054,7 +1054,7 @@ describe("postgres integrations", () => {
})
it("should state an invalid datasource cannot connect", async () => {
- const dbConfig = await databaseTestProviders.postgres.datasource()
+ const dbConfig = await getDatasource(DatabaseName.POSTGRES)
await config.api.datasource.verify(
{
datasource: {
@@ -1079,7 +1079,7 @@ describe("postgres integrations", () => {
it("should fetch information about postgres datasource", async () => {
const primaryName = primaryPostgresTable.name
const response = await makeRequest("post", "/api/datasources/info", {
- datasource: postgresDatasource,
+ datasource: datasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
@@ -1088,86 +1088,88 @@ describe("postgres integrations", () => {
})
describe("POST /api/datasources/:datasourceId/schema", () => {
- let client: Client
+ let tableName: string
beforeEach(async () => {
- client = new Client(
- (await databaseTestProviders.postgres.datasource()).config!
- )
- await client.connect()
+ tableName = generator.guid().replaceAll("-", "").substring(0, 10)
})
afterEach(async () => {
- await client.query(`DROP TABLE IF EXISTS "table"`)
- await client.end()
+ await rawQuery(rawDatasource, `DROP TABLE IF EXISTS "${tableName}"`)
})
it("recognises when a table has no primary key", async () => {
- await client.query(`CREATE TABLE "table" (id SERIAL)`)
+ await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`)
const response = await makeRequest(
"post",
- `/api/datasources/${postgresDatasource._id}/schema`
+ `/api/datasources/${datasource._id}/schema`
)
expect(response.body.errors).toEqual({
- table: "Table must have a primary key.",
+ [tableName]: "Table must have a primary key.",
})
})
it("recognises when a table is using a reserved column name", async () => {
- await client.query(`CREATE TABLE "table" (_id SERIAL PRIMARY KEY) `)
+ await rawQuery(
+ rawDatasource,
+ `CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) `
+ )
const response = await makeRequest(
"post",
- `/api/datasources/${postgresDatasource._id}/schema`
+ `/api/datasources/${datasource._id}/schema`
)
expect(response.body.errors).toEqual({
- table: "Table contains invalid columns.",
+ [tableName]: "Table contains invalid columns.",
})
})
})
describe("Integration compatibility with postgres search_path", () => {
- let client: Client, pathDatasource: Datasource
- const schema1 = "test1",
- schema2 = "test-2"
+ let rawDatasource: Datasource,
+ datasource: Datasource,
+ schema1: string,
+ schema2: string
- beforeAll(async () => {
- const dsConfig = await databaseTestProviders.postgres.datasource()
- const dbConfig = dsConfig.config!
+ beforeEach(async () => {
+ schema1 = generator.guid().replaceAll("-", "")
+ schema2 = generator.guid().replaceAll("-", "")
- client = new Client(dbConfig)
- await client.connect()
- await client.query(`CREATE SCHEMA "${schema1}";`)
- await client.query(`CREATE SCHEMA "${schema2}";`)
+ rawDatasource = await getDatasource(DatabaseName.POSTGRES)
+ const dbConfig = rawDatasource.config!
+
+ await rawQuery(rawDatasource, `CREATE SCHEMA "${schema1}";`)
+ await rawQuery(rawDatasource, `CREATE SCHEMA "${schema2}";`)
const pathConfig: any = {
- ...dsConfig,
+ ...rawDatasource,
config: {
...dbConfig,
schema: `${schema1}, ${schema2}`,
},
}
- pathDatasource = await config.api.datasource.create(pathConfig)
+ datasource = await config.api.datasource.create(pathConfig)
})
- afterAll(async () => {
- await client.query(`DROP SCHEMA "${schema1}" CASCADE;`)
- await client.query(`DROP SCHEMA "${schema2}" CASCADE;`)
- await client.end()
+ afterEach(async () => {
+ await rawQuery(rawDatasource, `DROP SCHEMA "${schema1}" CASCADE;`)
+ await rawQuery(rawDatasource, `DROP SCHEMA "${schema2}" CASCADE;`)
})
it("discovers tables from any schema in search path", async () => {
- await client.query(
+ await rawQuery(
+ rawDatasource,
`CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);`
)
- await client.query(
+ await rawQuery(
+ rawDatasource,
`CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);`
)
const response = await makeRequest("post", "/api/datasources/info", {
- datasource: pathDatasource,
+ datasource: datasource,
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
@@ -1178,15 +1180,17 @@ describe("postgres integrations", () => {
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
- await client.query(
+ await rawQuery(
+ rawDatasource,
`CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
)
- await client.query(
+ await rawQuery(
+ rawDatasource,
`CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
)
const response = await makeRequest(
"post",
- `/api/datasources/${pathDatasource._id}/schema`,
+ `/api/datasources/${datasource._id}/schema`,
{
tablesFilter: [repeated_table_name],
}
diff --git a/packages/server/src/integrations/tests/utils/index.ts b/packages/server/src/integrations/tests/utils/index.ts
index b2be3df4e0..bbdb41b38a 100644
--- a/packages/server/src/integrations/tests/utils/index.ts
+++ b/packages/server/src/integrations/tests/utils/index.ts
@@ -1,25 +1,90 @@
jest.unmock("pg")
-import { Datasource } from "@budibase/types"
+import { Datasource, SourceName } from "@budibase/types"
import * as postgres from "./postgres"
import * as mongodb from "./mongodb"
import * as mysql from "./mysql"
import * as mssql from "./mssql"
import * as mariadb from "./mariadb"
-import { StartedTestContainer } from "testcontainers"
+import { GenericContainer } from "testcontainers"
+import { testContainerUtils } from "@budibase/backend-core/tests"
-jest.setTimeout(30000)
+export type DatasourceProvider = () => Promise
-export interface DatabaseProvider {
- start(): Promise
- stop(): Promise
- datasource(): Promise
+export enum DatabaseName {
+ POSTGRES = "postgres",
+ MONGODB = "mongodb",
+ MYSQL = "mysql",
+ SQL_SERVER = "mssql",
+ MARIADB = "mariadb",
}
-export const databaseTestProviders = {
- postgres,
- mongodb,
- mysql,
- mssql,
- mariadb,
+const providers: Record = {
+ [DatabaseName.POSTGRES]: postgres.getDatasource,
+ [DatabaseName.MONGODB]: mongodb.getDatasource,
+ [DatabaseName.MYSQL]: mysql.getDatasource,
+ [DatabaseName.SQL_SERVER]: mssql.getDatasource,
+ [DatabaseName.MARIADB]: mariadb.getDatasource,
+}
+
+export function getDatasourceProviders(
+ ...sourceNames: DatabaseName[]
+): Promise[] {
+ return sourceNames.map(sourceName => providers[sourceName]())
+}
+
+export function getDatasourceProvider(
+ sourceName: DatabaseName
+): DatasourceProvider {
+ return providers[sourceName]
+}
+
+export function getDatasource(sourceName: DatabaseName): Promise {
+ return providers[sourceName]()
+}
+
+export async function getDatasources(
+ ...sourceNames: DatabaseName[]
+): Promise {
+ return Promise.all(sourceNames.map(sourceName => providers[sourceName]()))
+}
+
+export async function rawQuery(ds: Datasource, sql: string): Promise {
+ switch (ds.source) {
+ case SourceName.POSTGRES: {
+ return postgres.rawQuery(ds, sql)
+ }
+ case SourceName.MYSQL: {
+ return mysql.rawQuery(ds, sql)
+ }
+ case SourceName.SQL_SERVER: {
+ return mssql.rawQuery(ds, sql)
+ }
+ default: {
+ throw new Error(`Unsupported source: ${ds.source}`)
+ }
+ }
+}
+
+export async function startContainer(container: GenericContainer) {
+ if (process.env.REUSE_CONTAINERS) {
+ container = container.withReuse()
+ }
+
+ const startedContainer = await container.start()
+
+ const info = testContainerUtils.getContainerById(startedContainer.getId())
+ if (!info) {
+ throw new Error("Container not found")
+ }
+
+ // Some Docker runtimes, when you expose a port, will bind it to both
+ // 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6
+ // addresses are not shared, and testcontainers will sometimes give you back
+ // the ipv6 port. There's no way to know that this has happened, and if you
+ // try to then connect to `localhost:port` you may attempt to bind to the v4
+ // address which could be unbound or even an entirely different container. For
+ // that reason, we don't use testcontainers' `getExposedPort` function,
+ // preferring instead our own method that guaranteed v4 ports.
+ return testContainerUtils.getExposedV4Ports(info)
}
diff --git a/packages/server/src/integrations/tests/utils/mariadb.ts b/packages/server/src/integrations/tests/utils/mariadb.ts
index a097e0aaa1..fcd79b8e56 100644
--- a/packages/server/src/integrations/tests/utils/mariadb.ts
+++ b/packages/server/src/integrations/tests/utils/mariadb.ts
@@ -1,8 +1,11 @@
import { Datasource, SourceName } from "@budibase/types"
-import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
+import { GenericContainer, Wait } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
+import { rawQuery } from "./mysql"
+import { generator, testContainerUtils } from "@budibase/backend-core/tests"
+import { startContainer } from "."
-let container: StartedTestContainer | undefined
+let ports: Promise
class MariaDBWaitStrategy extends AbstractWaitStrategy {
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
@@ -21,38 +24,38 @@ class MariaDBWaitStrategy extends AbstractWaitStrategy {
}
}
-export async function start(): Promise {
- return await new GenericContainer("mariadb:lts")
- .withExposedPorts(3306)
- .withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
- .withWaitStrategy(new MariaDBWaitStrategy())
- .start()
-}
-
-export async function datasource(): Promise {
- if (!container) {
- container = await start()
+export async function getDatasource(): Promise {
+ if (!ports) {
+ ports = startContainer(
+ new GenericContainer("mariadb:lts")
+ .withExposedPorts(3306)
+ .withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
+ .withWaitStrategy(new MariaDBWaitStrategy())
+ )
}
- const host = container.getHost()
- const port = container.getMappedPort(3306)
- return {
+ const port = (await ports).find(x => x.container === 3306)?.host
+ if (!port) {
+ throw new Error("MariaDB port not found")
+ }
+
+ const config = {
+ host: "127.0.0.1",
+ port,
+ user: "root",
+ password: "password",
+ database: "mysql",
+ }
+
+ const datasource = {
type: "datasource_plus",
source: SourceName.MYSQL,
plus: true,
- config: {
- host,
- port,
- user: "root",
- password: "password",
- database: "mysql",
- },
+ config,
}
-}
-export async function stop() {
- if (container) {
- await container.stop()
- container = undefined
- }
+ const database = generator.guid().replaceAll("-", "")
+ await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
+ datasource.config.database = database
+ return datasource
}
diff --git a/packages/server/src/integrations/tests/utils/mongodb.ts b/packages/server/src/integrations/tests/utils/mongodb.ts
index 0baafc6276..0bdbb2808c 100644
--- a/packages/server/src/integrations/tests/utils/mongodb.ts
+++ b/packages/server/src/integrations/tests/utils/mongodb.ts
@@ -1,43 +1,39 @@
+import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { Datasource, SourceName } from "@budibase/types"
-import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
+import { GenericContainer, Wait } from "testcontainers"
+import { startContainer } from "."
-let container: StartedTestContainer | undefined
+let ports: Promise
-export async function start(): Promise {
- return await new GenericContainer("mongo:7.0-jammy")
- .withExposedPorts(27017)
- .withEnvironment({
- MONGO_INITDB_ROOT_USERNAME: "mongo",
- MONGO_INITDB_ROOT_PASSWORD: "password",
- })
- .withWaitStrategy(
- Wait.forSuccessfulCommand(
- `mongosh --eval "db.version()"`
- ).withStartupTimeout(10000)
+export async function getDatasource(): Promise {
+ if (!ports) {
+ ports = startContainer(
+ new GenericContainer("mongo:7.0-jammy")
+ .withExposedPorts(27017)
+ .withEnvironment({
+ MONGO_INITDB_ROOT_USERNAME: "mongo",
+ MONGO_INITDB_ROOT_PASSWORD: "password",
+ })
+ .withWaitStrategy(
+ Wait.forSuccessfulCommand(
+ `mongosh --eval "db.version()"`
+ ).withStartupTimeout(10000)
+ )
)
- .start()
-}
-
-export async function datasource(): Promise {
- if (!container) {
- container = await start()
}
- const host = container.getHost()
- const port = container.getMappedPort(27017)
+
+ const port = (await ports).find(x => x.container === 27017)
+ if (!port) {
+ throw new Error("MongoDB port not found")
+ }
+
return {
type: "datasource",
source: SourceName.MONGODB,
plus: false,
config: {
- connectionString: `mongodb://mongo:password@${host}:${port}`,
- db: "mongo",
+ connectionString: `mongodb://mongo:password@127.0.0.1:${port.host}`,
+ db: generator.guid(),
},
}
}
-
-export async function stop() {
- if (container) {
- await container.stop()
- container = undefined
- }
-}
diff --git a/packages/server/src/integrations/tests/utils/mssql.ts b/packages/server/src/integrations/tests/utils/mssql.ts
index 6bd4290a90..647f461272 100644
--- a/packages/server/src/integrations/tests/utils/mssql.ts
+++ b/packages/server/src/integrations/tests/utils/mssql.ts
@@ -1,43 +1,41 @@
import { Datasource, SourceName } from "@budibase/types"
-import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
+import { GenericContainer, Wait } from "testcontainers"
+import mssql from "mssql"
+import { generator, testContainerUtils } from "@budibase/backend-core/tests"
+import { startContainer } from "."
-let container: StartedTestContainer | undefined
+let ports: Promise
-export async function start(): Promise {
- return await new GenericContainer(
- "mcr.microsoft.com/mssql/server:2022-latest"
- )
- .withExposedPorts(1433)
- .withEnvironment({
- ACCEPT_EULA: "Y",
- MSSQL_SA_PASSWORD: "Password_123",
- // This is important, as Microsoft allow us to use the "Developer" edition
- // of SQL Server for development and testing purposes. We can't use other
- // versions without a valid license, and we cannot use the Developer
- // version in production.
- MSSQL_PID: "Developer",
- })
- .withWaitStrategy(
- Wait.forSuccessfulCommand(
- "/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
- )
+export async function getDatasource(): Promise {
+ if (!ports) {
+ ports = startContainer(
+ new GenericContainer("mcr.microsoft.com/mssql/server:2022-latest")
+ .withExposedPorts(1433)
+ .withEnvironment({
+ ACCEPT_EULA: "Y",
+ MSSQL_SA_PASSWORD: "Password_123",
+ // This is important, as Microsoft allow us to use the "Developer" edition
+ // of SQL Server for development and testing purposes. We can't use other
+ // versions without a valid license, and we cannot use the Developer
+ // version in production.
+ MSSQL_PID: "Developer",
+ })
+ .withWaitStrategy(
+ Wait.forSuccessfulCommand(
+ "/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
+ )
+ )
)
- .start()
-}
-
-export async function datasource(): Promise {
- if (!container) {
- container = await start()
}
- const host = container.getHost()
- const port = container.getMappedPort(1433)
- return {
+ const port = (await ports).find(x => x.container === 1433)?.host
+
+ const datasource: Datasource = {
type: "datasource_plus",
source: SourceName.SQL_SERVER,
plus: true,
config: {
- server: host,
+ server: "127.0.0.1",
port,
user: "sa",
password: "Password_123",
@@ -46,11 +44,28 @@ export async function datasource(): Promise {
},
},
}
+
+ const database = generator.guid().replaceAll("-", "")
+ await rawQuery(datasource, `CREATE DATABASE "${database}"`)
+ datasource.config!.database = database
+
+ return datasource
}
-export async function stop() {
- if (container) {
- await container.stop()
- container = undefined
+export async function rawQuery(ds: Datasource, sql: string) {
+ if (!ds.config) {
+ throw new Error("Datasource config is missing")
+ }
+ if (ds.source !== SourceName.SQL_SERVER) {
+ throw new Error("Datasource source is not SQL Server")
+ }
+
+ const pool = new mssql.ConnectionPool(ds.config! as mssql.config)
+ const client = await pool.connect()
+ try {
+ const { recordset } = await client.query(sql)
+ return recordset
+ } finally {
+ await pool.close()
}
}
diff --git a/packages/server/src/integrations/tests/utils/mysql.ts b/packages/server/src/integrations/tests/utils/mysql.ts
index 5e51478998..a78833e1de 100644
--- a/packages/server/src/integrations/tests/utils/mysql.ts
+++ b/packages/server/src/integrations/tests/utils/mysql.ts
@@ -1,8 +1,11 @@
import { Datasource, SourceName } from "@budibase/types"
-import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
+import { GenericContainer, Wait } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
+import mysql from "mysql2/promise"
+import { generator, testContainerUtils } from "@budibase/backend-core/tests"
+import { startContainer } from "."
-let container: StartedTestContainer | undefined
+let ports: Promise
class MySQLWaitStrategy extends AbstractWaitStrategy {
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
@@ -24,38 +27,50 @@ class MySQLWaitStrategy extends AbstractWaitStrategy {
}
}
-export async function start(): Promise {
- return await new GenericContainer("mysql:8.3")
- .withExposedPorts(3306)
- .withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
- .withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
- .start()
-}
-
-export async function datasource(): Promise {
- if (!container) {
- container = await start()
+export async function getDatasource(): Promise {
+ if (!ports) {
+ ports = startContainer(
+ new GenericContainer("mysql:8.3")
+ .withExposedPorts(3306)
+ .withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
+ .withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
+ )
}
- const host = container.getHost()
- const port = container.getMappedPort(3306)
- return {
+ const port = (await ports).find(x => x.container === 3306)?.host
+
+ const datasource: Datasource = {
type: "datasource_plus",
source: SourceName.MYSQL,
plus: true,
config: {
- host,
+ host: "127.0.0.1",
port,
user: "root",
password: "password",
database: "mysql",
},
}
+
+ const database = generator.guid().replaceAll("-", "")
+ await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
+ datasource.config!.database = database
+ return datasource
}
-export async function stop() {
- if (container) {
- await container.stop()
- container = undefined
+export async function rawQuery(ds: Datasource, sql: string) {
+ if (!ds.config) {
+ throw new Error("Datasource config is missing")
+ }
+ if (ds.source !== SourceName.MYSQL) {
+ throw new Error("Datasource source is not MySQL")
+ }
+
+ const connection = await mysql.createConnection(ds.config)
+ try {
+ const [rows] = await connection.query(sql)
+ return rows
+ } finally {
+ connection.end()
}
}
diff --git a/packages/server/src/integrations/tests/utils/postgres.ts b/packages/server/src/integrations/tests/utils/postgres.ts
index 82a62e3916..4191b107e9 100644
--- a/packages/server/src/integrations/tests/utils/postgres.ts
+++ b/packages/server/src/integrations/tests/utils/postgres.ts
@@ -1,33 +1,33 @@
import { Datasource, SourceName } from "@budibase/types"
-import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
+import { GenericContainer, Wait } from "testcontainers"
+import pg from "pg"
+import { generator, testContainerUtils } from "@budibase/backend-core/tests"
+import { startContainer } from "."
-let container: StartedTestContainer | undefined
+let ports: Promise
-export async function start(): Promise {
- return await new GenericContainer("postgres:16.1-bullseye")
- .withExposedPorts(5432)
- .withEnvironment({ POSTGRES_PASSWORD: "password" })
- .withWaitStrategy(
- Wait.forSuccessfulCommand(
- "pg_isready -h localhost -p 5432"
- ).withStartupTimeout(10000)
+export async function getDatasource(): Promise {
+ if (!ports) {
+ ports = startContainer(
+ new GenericContainer("postgres:16.1-bullseye")
+ .withExposedPorts(5432)
+ .withEnvironment({ POSTGRES_PASSWORD: "password" })
+ .withWaitStrategy(
+ Wait.forSuccessfulCommand(
+ "pg_isready -h localhost -p 5432"
+ ).withStartupTimeout(10000)
+ )
)
- .start()
-}
-
-export async function datasource(): Promise {
- if (!container) {
- container = await start()
}
- const host = container.getHost()
- const port = container.getMappedPort(5432)
- return {
+ const port = (await ports).find(x => x.container === 5432)?.host
+
+ const datasource: Datasource = {
type: "datasource_plus",
source: SourceName.POSTGRES,
plus: true,
config: {
- host,
+ host: "127.0.0.1",
port,
database: "postgres",
user: "postgres",
@@ -38,11 +38,28 @@ export async function datasource(): Promise {
ca: false,
},
}
+
+ const database = generator.guid().replaceAll("-", "")
+ await rawQuery(datasource, `CREATE DATABASE "${database}"`)
+ datasource.config!.database = database
+
+ return datasource
}
-export async function stop() {
- if (container) {
- await container.stop()
- container = undefined
+export async function rawQuery(ds: Datasource, sql: string) {
+ if (!ds.config) {
+ throw new Error("Datasource config is missing")
+ }
+ if (ds.source !== SourceName.POSTGRES) {
+ throw new Error("Datasource source is not Postgres")
+ }
+
+ const client = new pg.Client(ds.config)
+ await client.connect()
+ try {
+ const { rows } = await client.query(sql)
+ return rows
+ } finally {
+ await client.end()
}
}
diff --git a/packages/server/src/migrations/tests/index.spec.ts b/packages/server/src/migrations/tests/index.spec.ts
index 8eb59b8a0e..d06cd37b69 100644
--- a/packages/server/src/migrations/tests/index.spec.ts
+++ b/packages/server/src/migrations/tests/index.spec.ts
@@ -25,8 +25,6 @@ const clearMigrations = async () => {
}
}
-jest.setTimeout(10000)
-
describe("migrations", () => {
const config = new TestConfig()
diff --git a/packages/server/src/sdk/app/rows/search/tests/external.spec.ts b/packages/server/src/sdk/app/rows/search/tests/external.spec.ts
index bae58d6a2c..596e41cece 100644
--- a/packages/server/src/sdk/app/rows/search/tests/external.spec.ts
+++ b/packages/server/src/sdk/app/rows/search/tests/external.spec.ts
@@ -17,8 +17,6 @@ import {
generator,
} from "@budibase/backend-core/tests"
-jest.setTimeout(30000)
-
describe("external search", () => {
const config = new TestConfiguration()
diff --git a/packages/server/src/tests/jestSetup.ts b/packages/server/src/tests/jestSetup.ts
index e233e7152e..c01f415f9e 100644
--- a/packages/server/src/tests/jestSetup.ts
+++ b/packages/server/src/tests/jestSetup.ts
@@ -2,17 +2,11 @@ import env from "../environment"
import { env as coreEnv, timers } from "@budibase/backend-core"
import { testContainerUtils } from "@budibase/backend-core/tests"
-if (!process.env.DEBUG) {
- global.console.log = jest.fn() // console.log are ignored in tests
- global.console.warn = jest.fn() // console.warn are ignored in tests
-}
-
if (!process.env.CI) {
- // set a longer timeout in dev for debugging
- // 100 seconds
+ // set a longer timeout in dev for debugging 100 seconds
jest.setTimeout(100 * 1000)
} else {
- jest.setTimeout(10 * 1000)
+ jest.setTimeout(30 * 1000)
}
testContainerUtils.setupEnv(env, coreEnv)
diff --git a/packages/server/src/tests/utilities/api/base.ts b/packages/server/src/tests/utilities/api/base.ts
index 4df58ff425..3a5f6529f8 100644
--- a/packages/server/src/tests/utilities/api/base.ts
+++ b/packages/server/src/tests/utilities/api/base.ts
@@ -1,6 +1,7 @@
import TestConfiguration from "../TestConfiguration"
-import { SuperTest, Test, Response } from "supertest"
+import request, { SuperTest, Test, Response } from "supertest"
import { ReadStream } from "fs"
+import { getServer } from "../../../app"
type Headers = Record
type Method = "get" | "post" | "put" | "patch" | "delete"
@@ -76,7 +77,8 @@ export abstract class TestAPI {
protected _requestRaw = async (
method: "get" | "post" | "put" | "patch" | "delete",
url: string,
- opts?: RequestOpts
+ opts?: RequestOpts,
+ attempt = 0
): Promise => {
const {
headers = {},
@@ -107,26 +109,29 @@ export abstract class TestAPI {
const headersFn = publicUser
? this.config.publicHeaders.bind(this.config)
: this.config.defaultHeaders.bind(this.config)
- let request = this.request[method](url).set(
+
+ const app = getServer()
+ let req = request(app)[method](url)
+ req = req.set(
headersFn({
"x-budibase-include-stacktrace": "true",
})
)
if (headers) {
- request = request.set(headers)
+ req = req.set(headers)
}
if (body) {
- request = request.send(body)
+ req = req.send(body)
}
for (const [key, value] of Object.entries(fields)) {
- request = request.field(key, value)
+ req = req.field(key, value)
}
for (const [key, value] of Object.entries(files)) {
if (isAttachedFile(value)) {
- request = request.attach(key, value.file, value.name)
+ req = req.attach(key, value.file, value.name)
} else {
- request = request.attach(key, value as any)
+ req = req.attach(key, value as any)
}
}
if (expectations?.headers) {
@@ -136,11 +141,25 @@ export abstract class TestAPI {
`Got an undefined expected value for header "${key}", if you want to check for the absence of a header, use headersNotPresent`
)
}
- request = request.expect(key, value as any)
+ req = req.expect(key, value as any)
}
}
- return await request
+ try {
+ return await req
+ } catch (e: any) {
+ // We've found that occasionally the connection between supertest and the
+ // server supertest starts gets reset. Not sure why, but retrying it
+ // appears to work. I don't particularly like this, but it's better than
+ // flakiness.
+ if (e.code === "ECONNRESET") {
+ if (attempt > 2) {
+ throw e
+ }
+ return await this._requestRaw(method, url, opts, attempt + 1)
+ }
+ throw e
+ }
}
protected _checkResponse = (
@@ -170,7 +189,18 @@ export abstract class TestAPI {
}
}
- throw new Error(message)
+ if (response.error) {
+ // Sometimes the error can be between supertest and the app, and when
+ // that happens response.error is sometimes populated with `text` that
+ // gives more detail about the error. The `message` is almost always
+ // useless from what I've seen.
+ if (response.error.text) {
+ response.error.message = response.error.text
+ }
+ throw new Error(message, { cause: response.error })
+ } else {
+ throw new Error(message)
+ }
}
if (expectations?.headersNotPresent) {
diff --git a/yarn.lock b/yarn.lock
index 6acdcce3b6..4deda92484 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -5875,6 +5875,13 @@
"@types/pouchdb-node" "*"
"@types/pouchdb-replication" "*"
+"@types/proper-lockfile@^4.1.4":
+ version "4.1.4"
+ resolved "https://registry.yarnpkg.com/@types/proper-lockfile/-/proper-lockfile-4.1.4.tgz#cd9fab92bdb04730c1ada542c356f03620f84008"
+ integrity sha512-uo2ABllncSqg9F1D4nugVl9v93RmjxF6LJzQLMLDdPaXCUIDPeOJ21Gbqi43xNKzBi/WQ0Q0dICqufzQbMjipQ==
+ dependencies:
+ "@types/retry" "*"
+
"@types/qs@*":
version "6.9.7"
resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb"
@@ -5937,6 +5944,11 @@
dependencies:
"@types/node" "*"
+"@types/retry@*":
+ version "0.12.5"
+ resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.5.tgz#f090ff4bd8d2e5b940ff270ab39fd5ca1834a07e"
+ integrity sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw==
+
"@types/rimraf@^3.0.2":
version "3.0.2"
resolved "https://registry.yarnpkg.com/@types/rimraf/-/rimraf-3.0.2.tgz#a63d175b331748e5220ad48c901d7bbf1f44eef8"
@@ -7249,7 +7261,37 @@ axios-retry@^3.1.9:
"@babel/runtime" "^7.15.4"
is-retry-allowed "^2.2.0"
-axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^0.21.4, axios@^0.26.0, axios@^1.0.0, axios@^1.1.3, axios@^1.5.0:
+axios@0.24.0:
+ version "0.24.0"
+ resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6"
+ integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==
+ dependencies:
+ follow-redirects "^1.14.4"
+
+axios@1.1.3:
+ version "1.1.3"
+ resolved "https://registry.yarnpkg.com/axios/-/axios-1.1.3.tgz#8274250dada2edf53814ed7db644b9c2866c1e35"
+ integrity sha512-00tXVRwKx/FZr/IDVFt4C+f9FYairX517WoGCL6dpOntqLkZofjhu43F/Xl44UOpqa+9sLFDrG/XAnFsUYgkDA==
+ dependencies:
+ follow-redirects "^1.15.0"
+ form-data "^4.0.0"
+ proxy-from-env "^1.1.0"
+
+axios@^0.21.1, axios@^0.21.4:
+ version "0.21.4"
+ resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575"
+ integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==
+ dependencies:
+ follow-redirects "^1.14.0"
+
+axios@^0.26.0:
+ version "0.26.1"
+ resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9"
+ integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==
+ dependencies:
+ follow-redirects "^1.14.8"
+
+axios@^1.0.0, axios@^1.1.3, axios@^1.5.0:
version "1.6.3"
resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.3.tgz#7f50f23b3aa246eff43c54834272346c396613f4"
integrity sha512-fWyNdeawGam70jXSVlKl+SUNVcL6j6W79CuSIPfi6HnDUmSCH6gyUys/HrqHeA/wU0Az41rRgean494d0Jb+ww==
@@ -11022,7 +11064,7 @@ fetch-cookie@0.11.0:
dependencies:
tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0"
-fflate@^0.4.1, fflate@^0.4.8:
+fflate@^0.4.8:
version "0.4.8"
resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae"
integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==
@@ -11200,6 +11242,11 @@ fn.name@1.x.x:
resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc"
integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==
+follow-redirects@^1.14.0, follow-redirects@^1.14.4, follow-redirects@^1.14.8:
+ version "1.15.6"
+ resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b"
+ integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==
+
follow-redirects@^1.15.0:
version "1.15.2"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13"
@@ -12318,7 +12365,12 @@ http-assert@^1.3.0:
deep-equal "~1.0.1"
http-errors "~1.8.0"
-http-cache-semantics@3.8.1, http-cache-semantics@4.1.1, http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1:
+http-cache-semantics@3.8.1:
+ version "3.8.1"
+ resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2"
+ integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w==
+
+http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a"
integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==
@@ -13263,11 +13315,6 @@ isobject@^3.0.1:
resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==
-isobject@^4.0.0:
- version "4.0.0"
- resolved "https://registry.yarnpkg.com/isobject/-/isobject-4.0.0.tgz#3f1c9155e73b192022a80819bacd0343711697b0"
- integrity sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA==
-
isolated-vm@^4.7.2:
version "4.7.2"
resolved "https://registry.yarnpkg.com/isolated-vm/-/isolated-vm-4.7.2.tgz#5670d5cce1d92004f9b825bec5b0b11fc7501b65"
@@ -15862,7 +15909,7 @@ msgpackr-extract@^3.0.2:
"@msgpackr-extract/msgpackr-extract-linux-x64" "3.0.2"
"@msgpackr-extract/msgpackr-extract-win32-x64" "3.0.2"
-msgpackr@1.10.1, msgpackr@^1.5.2:
+msgpackr@^1.5.2:
version "1.10.1"
resolved "https://registry.yarnpkg.com/msgpackr/-/msgpackr-1.10.1.tgz#51953bb4ce4f3494f0c4af3f484f01cfbb306555"
integrity sha512-r5VRLv9qouXuLiIBrLpl2d5ZvPt8svdQTl5/vMvE4nzDMyEX4sgW5yWhuBBj5UmgwOTWj8CIdSXn5sAfsHAWIQ==
@@ -16066,13 +16113,25 @@ node-addon-api@^6.1.0:
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76"
integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==
-node-fetch@2.6.0, node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9, node-fetch@^2.7.0:
+node-fetch@2.6.0:
+ version "2.6.0"
+ resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
+ integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
+
+node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7:
version "2.6.7"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
dependencies:
whatwg-url "^5.0.0"
+node-fetch@^2.6.9, node-fetch@^2.7.0:
+ version "2.7.0"
+ resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d"
+ integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==
+ dependencies:
+ whatwg-url "^5.0.0"
+
node-forge@^1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3"
@@ -17217,7 +17276,15 @@ passport-strategy@1.x.x, passport-strategy@^1.0.0:
resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4"
integrity sha512-CB97UUvDKJde2V0KDWWB3lyf6PC3FaZP7YxZ2G8OAtn9p4HI9j9JLP9qjOGZFvyl8uwNT8qM+hGnz/n16NI7oA==
-passport@0.6.0, passport@^0.4.0, passport@^0.6.0:
+passport@^0.4.0:
+ version "0.4.1"
+ resolved "https://registry.yarnpkg.com/passport/-/passport-0.4.1.tgz#941446a21cb92fc688d97a0861c38ce9f738f270"
+ integrity sha512-IxXgZZs8d7uFSt3eqNjM9NQ3g3uQCW5avD8mRNoXV99Yig50vjuaez6dQK2qC0kVWPRTujxY0dWgGfT09adjYg==
+ dependencies:
+ passport-strategy "1.x.x"
+ pause "0.0.1"
+
+passport@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/passport/-/passport-0.6.0.tgz#e869579fab465b5c0b291e841e6cc95c005fac9d"
integrity sha512-0fe+p3ZnrWRW74fe8+SvCyf4a3Pb2/h7gFkQ8yTJpAO50gDzlfjZUZTO1k5Eg9kUct22OxHLqDZoKUWRHOh9ug==
@@ -17935,6 +18002,14 @@ postgres-interval@^1.1.0:
dependencies:
xtend "^4.0.0"
+posthog-js@^1.116.6:
+ version "1.116.6"
+ resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.116.6.tgz#9a5c9f49230a76642f4c44d93b96710f886c2880"
+ integrity sha512-rvt8HxzJD4c2B/xsUa4jle8ApdqljeBI2Qqjp4XJMohQf18DXRyM6b96H5/UMs8jxYuZG14Er0h/kEIWeU6Fmw==
+ dependencies:
+ fflate "^0.4.8"
+ preact "^10.19.3"
+
posthog-js@^1.13.4:
version "1.103.1"
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.103.1.tgz#f846c413c28aca204dc1527f49d39f651348f3c4"
@@ -17943,13 +18018,6 @@ posthog-js@^1.13.4:
fflate "^0.4.8"
preact "^10.19.3"
-posthog-js@^1.36.0:
- version "1.96.1"
- resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.96.1.tgz#4f9719a24e4e14037b0e72d430194d7cdb576447"
- integrity sha512-kv1vQqYMt2BV3YHS+wxsbGuP+tz+M3y1AzNhz8TfkpY1HT8W/ONT0i0eQpeRr9Y+d4x/fZ6M4cXG5GMvi9lRCA==
- dependencies:
- fflate "^0.4.1"
-
posthog-node@1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/posthog-node/-/posthog-node-1.3.0.tgz#804ed2f213a2f05253f798bf9569d55a9cad94f7"
@@ -18517,7 +18585,7 @@ pseudomap@^1.0.2:
resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3"
integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==
-psl@^1.1.33:
+psl@^1.1.28, psl@^1.1.33:
version "1.9.0"
resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7"
integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==
@@ -19534,6 +19602,11 @@ sax@1.2.1:
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a"
integrity sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==
+sax@>=0.1.1:
+ version "1.3.0"
+ resolved "https://registry.yarnpkg.com/sax/-/sax-1.3.0.tgz#a5dbe77db3be05c9d1ee7785dbd3ea9de51593d0"
+ integrity sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==
+
sax@>=0.6.0:
version "1.2.4"
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
@@ -19615,13 +19688,40 @@ semver-diff@^3.1.1:
dependencies:
semver "^6.3.0"
-"semver@2 || 3 || 4 || 5", semver@7.5.3, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.0.0, semver@^7.1.1, semver@^7.1.2, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@~2.3.1, semver@~7.0.0:
+"semver@2 || 3 || 4 || 5", semver@^5.5.0, semver@^5.6.0, semver@^5.7.1:
+ version "5.7.2"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8"
+ integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==
+
+semver@7.5.3, semver@^7.0.0, semver@^7.1.1, semver@^7.1.2, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3:
version "7.5.3"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e"
integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==
dependencies:
lru-cache "^6.0.0"
+semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1:
+ version "6.3.1"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4"
+ integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==
+
+semver@^7.5.4:
+ version "7.6.0"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d"
+ integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==
+ dependencies:
+ lru-cache "^6.0.0"
+
+semver@~2.3.1:
+ version "2.3.2"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-2.3.2.tgz#b9848f25d6cf36333073ec9ef8856d42f1233e52"
+ integrity sha512-abLdIKCosKfpnmhS52NCTjO4RiLspDfsn37prjzGrp9im5DPJOgh82Os92vtwGh6XdQryKI/7SREZnV+aqiXrA==
+
+semver@~7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e"
+ integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==
+
seq-queue@^0.0.5:
version "0.0.5"
resolved "https://registry.yarnpkg.com/seq-queue/-/seq-queue-0.0.5.tgz#d56812e1c017a6e4e7c3e3a37a1da6d78dd3c93e"
@@ -21207,7 +21307,7 @@ touch@^3.1.0:
dependencies:
nopt "~1.0.10"
-tough-cookie@4.1.3, "tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2, tough-cookie@~2.5.0:
+"tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2:
version "4.1.3"
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf"
integrity sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==
@@ -21217,6 +21317,14 @@ tough-cookie@4.1.3, "tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0
universalify "^0.2.0"
url-parse "^1.5.3"
+tough-cookie@~2.5.0:
+ version "2.5.0"
+ resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2"
+ integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==
+ dependencies:
+ psl "^1.1.28"
+ punycode "^2.1.1"
+
tr46@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240"
@@ -21693,14 +21801,6 @@ unpipe@1.0.0:
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==
-unset-value@2.0.1:
- version "2.0.1"
- resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-2.0.1.tgz#57bed0c22d26f28d69acde5df9a11b77c74d2df3"
- integrity sha512-2hvrBfjUE00PkqN+q0XP6yRAOGrR06uSiUoIQGZkc7GxvQ9H7v8quUPNtZjMg4uux69i8HWpIjLPUKwCuRGyNg==
- dependencies:
- has-value "^2.0.2"
- isobject "^4.0.0"
-
untildify@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b"
@@ -22471,10 +22571,33 @@ xml-parse-from-string@^1.0.0:
resolved "https://registry.yarnpkg.com/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz#a9029e929d3dbcded169f3c6e28238d95a5d5a28"
integrity sha512-ErcKwJTF54uRzzNMXq2X5sMIy88zJvfN2DmdoQvy7PAFJ+tPRU6ydWuOKNMyfmOjdyBQTFREi60s0Y0SyI0G0g==
-xml2js@0.1.x, xml2js@0.4.19, xml2js@0.5.0, xml2js@0.6.2, xml2js@^0.4.19, xml2js@^0.4.5:
- version "0.6.2"
- resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499"
- integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==
+xml2js@0.1.x:
+ version "0.1.14"
+ resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.1.14.tgz#5274e67f5a64c5f92974cd85139e0332adc6b90c"
+ integrity sha512-pbdws4PPPNc1HPluSUKamY4GWMk592K7qwcj6BExbVOhhubub8+pMda/ql68b6L3luZs/OGjGSB5goV7SnmgnA==
+ dependencies:
+ sax ">=0.1.1"
+
+xml2js@0.4.19:
+ version "0.4.19"
+ resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7"
+ integrity sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==
+ dependencies:
+ sax ">=0.6.0"
+ xmlbuilder "~9.0.1"
+
+xml2js@0.5.0:
+ version "0.5.0"
+ resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7"
+ integrity sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==
+ dependencies:
+ sax ">=0.6.0"
+ xmlbuilder "~11.0.0"
+
+xml2js@^0.4.19, xml2js@^0.4.5:
+ version "0.4.23"
+ resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66"
+ integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==
dependencies:
sax ">=0.6.0"
xmlbuilder "~11.0.0"
@@ -22484,6 +22607,11 @@ xmlbuilder@~11.0.0:
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3"
integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==
+xmlbuilder@~9.0.1:
+ version "9.0.7"
+ resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
+ integrity sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ==
+
xmlchars@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb"