Merge master.

This commit is contained in:
Sam Rose 2024-04-03 15:17:38 +01:00
commit b2ef045ecf
No known key found for this signature in database
189 changed files with 1019 additions and 11597 deletions

View File

@ -34,7 +34,6 @@
}, },
{ {
"files": ["**/*.ts"], "files": ["**/*.ts"],
"excludedFiles": ["qa-core/**"],
"parser": "@typescript-eslint/parser", "parser": "@typescript-eslint/parser",
"plugins": ["@typescript-eslint"], "plugins": ["@typescript-eslint"],
"extends": ["eslint:recommended"], "extends": ["eslint:recommended"],
@ -49,7 +48,6 @@
}, },
{ {
"files": ["**/*.spec.ts"], "files": ["**/*.spec.ts"],
"excludedFiles": ["qa-core/**"],
"parser": "@typescript-eslint/parser", "parser": "@typescript-eslint/parser",
"plugins": ["jest", "@typescript-eslint"], "plugins": ["jest", "@typescript-eslint"],
"extends": ["eslint:recommended", "plugin:jest/recommended"], "extends": ["eslint:recommended", "plugin:jest/recommended"],

View File

@ -66,7 +66,8 @@ jobs:
# Run build all the projects # Run build all the projects
- name: Build - name: Build
run: | run: |
yarn build yarn build:oss
yarn build:account-portal
# Check the types of the projects built via esbuild # Check the types of the projects built via esbuild
- name: Check types - name: Check types
run: | run: |
@ -90,6 +91,9 @@ jobs:
test-libraries: test-libraries:
runs-on: ubuntu-latest runs-on: ubuntu-latest
env:
DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull
REUSE_CONTAINERS: true
steps: steps:
- name: Checkout repo - name: Checkout repo
uses: actions/checkout@v4 uses: actions/checkout@v4
@ -103,6 +107,14 @@ jobs:
with: with:
node-version: 20.x node-version: 20.x
cache: yarn cache: yarn
- name: Pull testcontainers images
run: |
docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb &
docker pull redis &
wait $(jobs -p)
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
- name: Test - name: Test
run: | run: |
@ -137,9 +149,10 @@ jobs:
fi fi
test-server: test-server:
runs-on: ubuntu-latest runs-on: budi-tubby-tornado-quad-core-150gb
env: env:
DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull
REUSE_CONTAINERS: true
steps: steps:
- name: Checkout repo - name: Checkout repo
uses: actions/checkout@v4 uses: actions/checkout@v4
@ -156,13 +169,16 @@ jobs:
- name: Pull testcontainers images - name: Pull testcontainers images
run: | run: |
docker pull mcr.microsoft.com/mssql/server:2022-latest docker pull mcr.microsoft.com/mssql/server:2022-latest &
docker pull mysql:8.3 docker pull mysql:8.3 &
docker pull postgres:16.1-bullseye docker pull postgres:16.1-bullseye &
docker pull mongo:7.0-jammy docker pull mongo:7.0-jammy &
docker pull mariadb:lts docker pull mariadb:lts &
docker pull testcontainers/ryuk:0.5.1 docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb docker pull budibase/couchdb &
docker pull redis &
wait $(jobs -p)
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
@ -174,35 +190,6 @@ jobs:
yarn test --scope=@budibase/server yarn test --scope=@budibase/server
fi fi
integration-test:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Use Node.js 20.x
uses: actions/setup-node@v4
with:
node-version: 20.x
cache: yarn
- run: yarn --frozen-lockfile
- name: Build packages
run: yarn build --scope @budibase/server --scope @budibase/worker
- name: Build backend-core for OSS contributor (required for pro)
if: ${{ env.IS_OSS_CONTRIBUTOR == 'true' }}
run: yarn build --scope @budibase/backend-core
- name: Run tests
run: |
cd qa-core
yarn setup
yarn serve:test:self:ci
env:
BB_ADMIN_USER_EMAIL: admin
BB_ADMIN_USER_PASSWORD: admin
check-pro-submodule: check-pro-submodule:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase') if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
@ -231,27 +218,34 @@ jobs:
echo "pro_commit=$pro_commit" echo "pro_commit=$pro_commit"
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT" echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit" echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
base_commit_excluding_merges=$(git log --no-merges -n 1 --format=format:%H $base_commit)
echo "base_commit_excluding_merges=$base_commit_excluding_merges"
echo "base_commit_excluding_merges=$base_commit_excluding_merges" >> "$GITHUB_OUTPUT"
else else
echo "Nothing to do - branch to branch merge." echo "Nothing to do - branch to branch merge."
fi fi
- name: Check submodule merged to base branch - name: Check submodule merged and latest on base branch
if: ${{ steps.get_pro_commits.outputs.base_commit != '' }} if: ${{ steps.get_pro_commits.outputs.base_commit_excluding_merges != '' }}
uses: actions/github-script@v7 run: |
with: cd packages/pro
github-token: ${{ secrets.GITHUB_TOKEN }} base_commit_excluding_merges='${{ steps.get_pro_commits.outputs.base_commit_excluding_merges }}'
script: | pro_commit='${{ steps.get_pro_commits.outputs.pro_commit }}'
const submoduleCommit = '${{ steps.get_pro_commits.outputs.pro_commit }}';
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
if (submoduleCommit !== baseCommit) { any_commit=$(git log --no-merges $base_commit_excluding_merges...$pro_commit)
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.');
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/master/docs/getting_started.md') if [ -n "$any_commit" ]; then
process.exit(1); echo $any_commit
} else {
console.log('All good, the submodule had been merged and setup correctly!') echo "An error occurred: <error_message>"
} echo 'Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.'
echo 'Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/master/docs/getting_started.md'
exit 1
else
echo 'All good, the submodule had been merged and setup correctly!'
fi
check-accountportal-submodule: check-accountportal-submodule:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -264,7 +258,15 @@ jobs:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0 fetch-depth: 0
- name: Check account portal commit - uses: dorny/paths-filter@v3
id: changes
with:
filters: |
src:
- packages/account-portal/**
- if: steps.changes.outputs.src == 'true'
name: Check account portal commit
id: get_accountportal_commits id: get_accountportal_commits
run: | run: |
cd packages/account-portal cd packages/account-portal

1
.gitignore vendored
View File

@ -69,7 +69,6 @@ typings/
# dotenv environment variables file # dotenv environment variables file
.env .env
!qa-core/.env
!hosting/.env !hosting/.env
# parcel-bundler cache (https://parceljs.org/) # parcel-bundler cache (https://parceljs.org/)

View File

@ -1,25 +1,47 @@
import { GenericContainer, Wait } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import path from "path"
import lockfile from "proper-lockfile"
export default async function setup() { export default async function setup() {
await new GenericContainer("budibase/couchdb") const lockPath = path.resolve(__dirname, "globalSetup.ts")
.withExposedPorts(5984) if (process.env.REUSE_CONTAINERS) {
.withEnvironment({ // If you run multiple tests at the same time, it's possible for the CouchDB
COUCHDB_PASSWORD: "budibase", // shared container to get started multiple times despite having an
COUCHDB_USER: "budibase", // identical reuse hash. To avoid that, we do a filesystem-based lock so
}) // that only one globalSetup.ts is running at a time.
.withCopyContentToContainer([ lockfile.lockSync(lockPath)
{ }
content: `
try {
let couchdb = new GenericContainer("budibase/couchdb")
.withExposedPorts(5984)
.withEnvironment({
COUCHDB_PASSWORD: "budibase",
COUCHDB_USER: "budibase",
})
.withCopyContentToContainer([
{
content: `
[log] [log]
level = warn level = warn
`, `,
target: "/opt/couchdb/etc/local.d/test-couchdb.ini", target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
}, },
]) ])
.withWaitStrategy( .withWaitStrategy(
Wait.forSuccessfulCommand( Wait.forSuccessfulCommand(
"curl http://budibase:budibase@localhost:5984/_up" "curl http://budibase:budibase@localhost:5984/_up"
).withStartupTimeout(20000) ).withStartupTimeout(20000)
) )
.start()
if (process.env.REUSE_CONTAINERS) {
couchdb = couchdb.withReuse()
}
await couchdb.start()
} finally {
if (process.env.REUSE_CONTAINERS) {
lockfile.unlockSync(lockPath)
}
}
} }

View File

@ -1,5 +1,5 @@
{ {
"version": "2.22.12", "version": "2.22.15",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -7,6 +7,7 @@
"@babel/preset-env": "^7.22.5", "@babel/preset-env": "^7.22.5",
"@esbuild-plugins/tsconfig-paths": "^0.1.2", "@esbuild-plugins/tsconfig-paths": "^0.1.2",
"@types/node": "20.10.0", "@types/node": "20.10.0",
"@types/proper-lockfile": "^4.1.4",
"@typescript-eslint/parser": "6.9.0", "@typescript-eslint/parser": "6.9.0",
"esbuild": "^0.18.17", "esbuild": "^0.18.17",
"esbuild-node-externals": "^1.8.0", "esbuild-node-externals": "^1.8.0",
@ -23,6 +24,7 @@
"nx-cloud": "16.0.5", "nx-cloud": "16.0.5",
"prettier": "2.8.8", "prettier": "2.8.8",
"prettier-plugin-svelte": "^2.3.0", "prettier-plugin-svelte": "^2.3.0",
"proper-lockfile": "^4.1.2",
"svelte": "^4.2.10", "svelte": "^4.2.10",
"svelte-eslint-parser": "^0.33.1", "svelte-eslint-parser": "^0.33.1",
"typescript": "5.2.2", "typescript": "5.2.2",
@ -34,6 +36,8 @@
"get-past-client-version": "node scripts/getPastClientVersion.js", "get-past-client-version": "node scripts/getPastClientVersion.js",
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev", "setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
"build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream", "build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
"build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
"build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal --scope @budibase/account-portal-server --scope @budibase/account-portal-ui",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput", "build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run check:types", "check:types": "lerna run check:types",
"build:sdk": "lerna run --stream build:sdk", "build:sdk": "lerna run --stream build:sdk",
@ -56,11 +60,11 @@
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0", "dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"test": "lerna run --stream test --stream", "test": "lerna run --stream test --stream",
"lint:eslint": "eslint packages qa-core --max-warnings=0", "lint:eslint": "eslint packages --max-warnings=0",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"", "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
"lint": "yarn run lint:eslint && yarn run lint:prettier", "lint": "yarn run lint:eslint && yarn run lint:prettier",
"lint:fix:eslint": "eslint --fix --max-warnings=0 packages qa-core", "lint:fix:eslint": "eslint --fix --max-warnings=0 packages",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"", "lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:eslint && yarn run lint:fix:prettier", "lint:fix": "yarn run lint:fix:eslint && yarn run lint:fix:prettier",
"build:specs": "lerna run --stream specs", "build:specs": "lerna run --stream specs",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild", "build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",

View File

@ -20,7 +20,7 @@ export async function lookupTenantId(userId: string) {
return user.tenantId return user.tenantId
} }
async function getUserDoc(emailOrId: string): Promise<PlatformUser> { export async function getUserDoc(emailOrId: string): Promise<PlatformUser> {
const db = getPlatformDB() const db = getPlatformDB()
return db.get(emailOrId) return db.get(emailOrId)
} }
@ -79,6 +79,17 @@ async function addUserDoc(emailOrId: string, newDocFn: () => PlatformUser) {
} }
} }
export async function addSsoUser(
ssoId: string,
email: string,
userId: string,
tenantId: string
) {
return addUserDoc(ssoId, () =>
newUserSsoIdDoc(ssoId, email, userId, tenantId)
)
}
export async function addUser( export async function addUser(
tenantId: string, tenantId: string,
userId: string, userId: string,
@ -91,9 +102,7 @@ export async function addUser(
] ]
if (ssoId) { if (ssoId) {
promises.push( promises.push(addSsoUser(ssoId, email, userId, tenantId))
addUserDoc(ssoId, () => newUserSsoIdDoc(ssoId, email, userId, tenantId))
)
} }
await Promise.all(promises) await Promise.all(promises)

View File

@ -14,16 +14,16 @@ import {
} from "../db" } from "../db"
import { import {
BulkDocsResponse, BulkDocsResponse,
ContextUser,
CouchFindOptions,
DatabaseQueryOpts,
SearchQuery, SearchQuery,
SearchQueryOperators, SearchQueryOperators,
SearchUsersRequest, SearchUsersRequest,
User, User,
ContextUser,
DatabaseQueryOpts,
CouchFindOptions,
} from "@budibase/types" } from "@budibase/types"
import { getGlobalDB } from "../context"
import * as context from "../context" import * as context from "../context"
import { getGlobalDB } from "../context"
import { isCreator } from "./utils" import { isCreator } from "./utils"
import { UserDB } from "./db" import { UserDB } from "./db"
@ -48,6 +48,7 @@ export function isSupportedUserSearch(query: SearchQuery) {
const allowed = [ const allowed = [
{ op: SearchQueryOperators.STRING, key: "email" }, { op: SearchQueryOperators.STRING, key: "email" },
{ op: SearchQueryOperators.EQUAL, key: "_id" }, { op: SearchQueryOperators.EQUAL, key: "_id" },
{ op: SearchQueryOperators.ONE_OF, key: "_id" },
] ]
for (let [key, operation] of Object.entries(query)) { for (let [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") { if (typeof operation !== "object") {
@ -285,6 +286,10 @@ export async function paginatedUsers({
} else if (query?.string?.email) { } else if (query?.string?.email) {
userList = await searchGlobalUsersByEmail(query?.string?.email, opts) userList = await searchGlobalUsersByEmail(query?.string?.email, opts)
property = "email" property = "email"
} else if (query?.oneOf?._id) {
userList = await bulkGetGlobalUsersById(query?.oneOf?._id, {
cleanup: true,
})
} else { } else {
// no search, query allDocs // no search, query allDocs
const response = await db.allDocs(getGlobalUserParams(null, opts)) const response = await db.allDocs(getGlobalUserParams(null, opts))

View File

@ -1,6 +1,7 @@
import { DatabaseImpl } from "../../../src/db"
import { execSync } from "child_process" import { execSync } from "child_process"
const IPV4_PORT_REGEX = new RegExp(`0\\.0\\.0\\.0:(\\d+)->(\\d+)/tcp`, "g")
interface ContainerInfo { interface ContainerInfo {
Command: string Command: string
CreatedAt: string CreatedAt: string
@ -19,7 +20,10 @@ interface ContainerInfo {
} }
function getTestcontainers(): ContainerInfo[] { function getTestcontainers(): ContainerInfo[] {
return execSync("docker ps --format json") // We use --format json to make sure the output is nice and machine-readable,
// and we use --no-trunc so that the command returns full container IDs so we
// can filter on them correctly.
return execSync("docker ps --format json --no-trunc")
.toString() .toString()
.split("\n") .split("\n")
.filter(x => x.length > 0) .filter(x => x.length > 0)
@ -27,32 +31,55 @@ function getTestcontainers(): ContainerInfo[] {
.filter(x => x.Labels.includes("org.testcontainers=true")) .filter(x => x.Labels.includes("org.testcontainers=true"))
} }
function getContainerByImage(image: string) { export function getContainerByImage(image: string) {
return getTestcontainers().find(x => x.Image.startsWith(image)) const containers = getTestcontainers().filter(x => x.Image.startsWith(image))
if (containers.length > 1) {
let errorMessage = `Multiple containers found starting with image: "${image}"\n\n`
for (const container of containers) {
errorMessage += JSON.stringify(container, null, 2)
}
throw new Error(errorMessage)
}
return containers[0]
} }
function getExposedPort(container: ContainerInfo, port: number) { export function getContainerById(id: string) {
const match = container.Ports.match(new RegExp(`0.0.0.0:(\\d+)->${port}/tcp`)) return getTestcontainers().find(x => x.ID === id)
if (!match) { }
return undefined
export interface Port {
host: number
container: number
}
export function getExposedV4Ports(container: ContainerInfo): Port[] {
let ports: Port[] = []
for (const match of container.Ports.matchAll(IPV4_PORT_REGEX)) {
ports.push({ host: parseInt(match[1]), container: parseInt(match[2]) })
} }
return parseInt(match[1]) return ports
}
export function getExposedV4Port(container: ContainerInfo, port: number) {
return getExposedV4Ports(container).find(x => x.container === port)?.host
} }
export function setupEnv(...envs: any[]) { export function setupEnv(...envs: any[]) {
// We start couchdb in globalSetup.ts, in the root of the monorepo, so it
// should be relatively safe to look for it by its image name.
const couch = getContainerByImage("budibase/couchdb") const couch = getContainerByImage("budibase/couchdb")
if (!couch) { if (!couch) {
throw new Error("CouchDB container not found") throw new Error("CouchDB container not found")
} }
const couchPort = getExposedPort(couch, 5984) const couchPort = getExposedV4Port(couch, 5984)
if (!couchPort) { if (!couchPort) {
throw new Error("CouchDB port not found") throw new Error("CouchDB port not found")
} }
const configs = [ const configs = [
{ key: "COUCH_DB_PORT", value: `${couchPort}` }, { key: "COUCH_DB_PORT", value: `${couchPort}` },
{ key: "COUCH_DB_URL", value: `http://localhost:${couchPort}` }, { key: "COUCH_DB_URL", value: `http://127.0.0.1:${couchPort}` },
] ]
for (const config of configs.filter(x => !!x.value)) { for (const config of configs.filter(x => !!x.value)) {
@ -60,7 +87,4 @@ export function setupEnv(...envs: any[]) {
env._set(config.key, config.value) env._set(config.key, config.value)
} }
} }
// @ts-expect-error
DatabaseImpl.nano = undefined
} }

View File

@ -12,6 +12,13 @@ export default {
format: "esm", format: "esm",
file: "dist/bbui.es.js", file: "dist/bbui.es.js",
}, },
onwarn(warning, warn) {
// suppress eval warnings
if (warning.code === "EVAL") {
return
}
warn(warning)
},
plugins: [ plugins: [
resolve(), resolve(),
commonjs(), commonjs(),

View File

@ -72,7 +72,7 @@
"fast-json-patch": "^3.1.1", "fast-json-patch": "^3.1.1",
"json-format-highlight": "^1.0.4", "json-format-highlight": "^1.0.4",
"lodash": "4.17.21", "lodash": "4.17.21",
"posthog-js": "^1.36.0", "posthog-js": "^1.116.6",
"remixicon": "2.5.0", "remixicon": "2.5.0",
"sanitize-html": "^2.7.0", "sanitize-html": "^2.7.0",
"shortid": "2.2.15", "shortid": "2.2.15",

View File

@ -31,7 +31,7 @@
import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte" import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte"
import CodeEditor from "components/common/CodeEditor/CodeEditor.svelte" import CodeEditor from "components/common/CodeEditor/CodeEditor.svelte"
import BindingSidePanel from "components/common/bindings/BindingSidePanel.svelte" import BindingSidePanel from "components/common/bindings/BindingSidePanel.svelte"
import { BindingHelpers } from "components/common/bindings/utils" import { BindingHelpers, BindingType } from "components/common/bindings/utils"
import { import {
bindingsToCompletions, bindingsToCompletions,
hbAutocomplete, hbAutocomplete,
@ -576,6 +576,7 @@
{ {
js: true, js: true,
dontDecode: true, dontDecode: true,
type: BindingType.RUNTIME,
} }
)} )}
mode="javascript" mode="javascript"

View File

@ -1,3 +1,4 @@
import { FieldType } from "@budibase/types"
import { FIELDS } from "constants/backend" import { FIELDS } from "constants/backend"
import { tables } from "stores/builder" import { tables } from "stores/builder"
import { get as svelteGet } from "svelte/store" import { get as svelteGet } from "svelte/store"
@ -5,14 +6,12 @@ import { get as svelteGet } from "svelte/store"
// currently supported level of relationship depth (server side) // currently supported level of relationship depth (server side)
const MAX_DEPTH = 1 const MAX_DEPTH = 1
//https://github.com/Budibase/budibase/issues/3030
const internalType = "internal"
const TYPES_TO_SKIP = [ const TYPES_TO_SKIP = [
FIELDS.FORMULA.type, FieldType.FORMULA,
FIELDS.LONGFORM.type, FieldType.LONGFORM,
FIELDS.ATTACHMENT.type, FieldType.ATTACHMENT,
internalType, //https://github.com/Budibase/budibase/issues/3030
FieldType.INTERNAL,
] ]
export function getBindings({ export function getBindings({
@ -26,7 +25,7 @@ export function getBindings({
return bindings return bindings
} }
for (let [column, schema] of Object.entries(table.schema)) { for (let [column, schema] of Object.entries(table.schema)) {
const isRelationship = schema.type === FIELDS.LINK.type const isRelationship = schema.type === FieldType.LINK
// skip relationships after a certain depth and types which // skip relationships after a certain depth and types which
// can't bind to // can't bind to
if ( if (

View File

@ -1,4 +1,4 @@
import { RelationshipType } from "constants/backend" import { RelationshipType } from "@budibase/types"
const typeMismatch = "Column type of the foreign key must match the primary key" const typeMismatch = "Column type of the foreign key must match the primary key"
const columnBeingUsed = "Column name cannot be an existing column" const columnBeingUsed = "Column name cannot be an existing column"

View File

@ -12,7 +12,7 @@ const getDefaultSchema = rows => {
newSchema[column] = { newSchema[column] = {
name: column, name: column,
type: "string", type: "string",
constraints: FIELDS["STRING"].constraints, constraints: FIELDS.STRING.constraints,
} }
}) })
}) })

View File

@ -5,7 +5,7 @@
import { licensing } from "stores/portal" import { licensing } from "stores/portal"
import { isPremiumOrAbove } from "helpers/planTitle" import { isPremiumOrAbove } from "helpers/planTitle"
$: premiumOrAboveLicense = isPremiumOrAbove($licensing?.license.plan.type) $: premiumOrAboveLicense = isPremiumOrAbove($licensing?.license?.plan?.type)
let show let show
let hide let hide

View File

@ -371,6 +371,7 @@
<style> <style>
.binding-panel { .binding-panel {
height: 100%; height: 100%;
overflow: hidden;
} }
.binding-panel, .binding-panel,
.tabs { .tabs {

View File

@ -1,6 +1,11 @@
import { decodeJSBinding } from "@budibase/string-templates" import { decodeJSBinding } from "@budibase/string-templates"
import { hbInsert, jsInsert } from "components/common/CodeEditor" import { hbInsert, jsInsert } from "components/common/CodeEditor"
export const BindingType = {
READABLE: "readableBinding",
RUNTIME: "runtimeBinding",
}
export class BindingHelpers { export class BindingHelpers {
constructor(getCaretPosition, insertAtPos, { disableWrapping } = {}) { constructor(getCaretPosition, insertAtPos, { disableWrapping } = {}) {
this.getCaretPosition = getCaretPosition this.getCaretPosition = getCaretPosition
@ -25,16 +30,20 @@ export class BindingHelpers {
} }
// Adds a data binding to the expression // Adds a data binding to the expression
onSelectBinding(value, binding, { js, dontDecode }) { onSelectBinding(
value,
binding,
{ js, dontDecode, type = BindingType.READABLE }
) {
const { start, end } = this.getCaretPosition() const { start, end } = this.getCaretPosition()
if (js) { if (js) {
const jsVal = dontDecode ? value : decodeJSBinding(value) const jsVal = dontDecode ? value : decodeJSBinding(value)
const insertVal = jsInsert(jsVal, start, end, binding.readableBinding, { const insertVal = jsInsert(jsVal, start, end, binding[type], {
disableWrapping: this.disableWrapping, disableWrapping: this.disableWrapping,
}) })
this.insertAtPos({ start, end, value: insertVal }) this.insertAtPos({ start, end, value: insertVal })
} else { } else {
const insertVal = hbInsert(value, start, end, binding.readableBinding) const insertVal = hbInsert(value, start, end, binding[type])
this.insertAtPos({ start, end, value: insertVal }) this.insertAtPos({ start, end, value: insertVal })
} }
} }

View File

@ -1,3 +1,5 @@
import { FieldType } from "@budibase/types"
export const convertOldFieldFormat = fields => { export const convertOldFieldFormat = fields => {
if (!fields) { if (!fields) {
return [] return []
@ -31,17 +33,17 @@ export const getComponentForField = (field, schema) => {
} }
export const FieldTypeToComponentMap = { export const FieldTypeToComponentMap = {
string: "stringfield", [FieldType.STRING]: "stringfield",
number: "numberfield", [FieldType.NUMBER]: "numberfield",
bigint: "bigintfield", [FieldType.BIGINT]: "bigintfield",
options: "optionsfield", [FieldType.OPTIONS]: "optionsfield",
array: "multifieldselect", [FieldType.ARRAY]: "multifieldselect",
boolean: "booleanfield", [FieldType.BOOLEAN]: "booleanfield",
longform: "longformfield", [FieldType.LONGFORM]: "longformfield",
datetime: "datetimefield", [FieldType.DATETIME]: "datetimefield",
attachment: "attachmentfield", [FieldType.ATTACHMENT]: "attachmentfield",
link: "relationshipfield", [FieldType.LINK]: "relationshipfield",
json: "jsonfield", [FieldType.JSON]: "jsonfield",
barcodeqr: "codescanner", [FieldType.BARCODEQR]: "codescanner",
bb_reference: "bbreferencefield", [FieldType.BB_REFERENCE]: "bbreferencefield",
} }

View File

@ -1,12 +1,14 @@
import { FieldType, FieldSubtype } from "@budibase/types" import {
FieldType,
FieldSubtype,
INTERNAL_TABLE_SOURCE_ID,
AutoFieldSubType,
Hosting,
} from "@budibase/types"
export const AUTO_COLUMN_SUB_TYPES = { export { RelationshipType } from "@budibase/types"
AUTO_ID: "autoID",
CREATED_BY: "createdBy", export const AUTO_COLUMN_SUB_TYPES = AutoFieldSubType
CREATED_AT: "createdAt",
UPDATED_BY: "updatedBy",
UPDATED_AT: "updatedAt",
}
export const AUTO_COLUMN_DISPLAY_NAMES = { export const AUTO_COLUMN_DISPLAY_NAMES = {
AUTO_ID: "Auto ID", AUTO_ID: "Auto ID",
@ -167,10 +169,7 @@ export const FILE_TYPES = {
DOCUMENT: ["odf", "docx", "doc", "pdf", "csv"], DOCUMENT: ["odf", "docx", "doc", "pdf", "csv"],
} }
export const HostingTypes = { export const HostingTypes = Hosting
CLOUD: "cloud",
SELF: "self",
}
export const Roles = { export const Roles = {
ADMIN: "ADMIN", ADMIN: "ADMIN",
@ -187,12 +186,6 @@ export function isAutoColumnUserRelationship(subtype) {
) )
} }
export const RelationshipType = {
MANY_TO_MANY: "many-to-many",
ONE_TO_MANY: "one-to-many",
MANY_TO_ONE: "many-to-one",
}
export const PrettyRelationshipDefinitions = { export const PrettyRelationshipDefinitions = {
MANY: "Many rows", MANY: "Many rows",
ONE: "One row", ONE: "One row",
@ -218,7 +211,7 @@ export const SWITCHABLE_TYPES = [
...ALLOWABLE_NUMBER_TYPES, ...ALLOWABLE_NUMBER_TYPES,
] ]
export const BUDIBASE_INTERNAL_DB_ID = "bb_internal" export const BUDIBASE_INTERNAL_DB_ID = INTERNAL_TABLE_SOURCE_ID
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default" export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
export const BUDIBASE_DATASOURCE_TYPE = "budibase" export const BUDIBASE_DATASOURCE_TYPE = "budibase"
export const DB_TYPE_INTERNAL = "internal" export const DB_TYPE_INTERNAL = "internal"
@ -265,10 +258,10 @@ export const IntegrationNames = {
} }
export const SchemaTypeOptions = [ export const SchemaTypeOptions = [
{ label: "Text", value: "string" }, { label: "Text", value: FieldType.STRING },
{ label: "Number", value: "number" }, { label: "Number", value: FieldType.NUMBER },
{ label: "Boolean", value: "boolean" }, { label: "Boolean", value: FieldType.BOOLEAN },
{ label: "Datetime", value: "datetime" }, { label: "Datetime", value: FieldType.DATETIME },
] ]
export const SchemaTypeOptionsExpanded = SchemaTypeOptions.map(el => ({ export const SchemaTypeOptionsExpanded = SchemaTypeOptions.map(el => ({
@ -305,10 +298,10 @@ export const PaginationLocations = [
] ]
export const BannedSearchTypes = [ export const BannedSearchTypes = [
"link", FieldType.LINK,
"attachment", FieldType.ATTACHMENT,
"formula", FieldType.FORMULA,
"json", FieldType.JSON,
"jsonarray", "jsonarray",
"queryarray", "queryarray",
] ]

View File

@ -1,17 +1,17 @@
import { FIELDS } from "constants/backend" import { FieldType } from "@budibase/types"
function baseConversion(type) { function baseConversion(type) {
if (type === "string") { if (type === "string") {
return { return {
type: FIELDS.STRING.type, type: FieldType.STRING,
} }
} else if (type === "boolean") { } else if (type === "boolean") {
return { return {
type: FIELDS.BOOLEAN.type, type: FieldType.BOOLEAN,
} }
} else if (type === "number") { } else if (type === "number") {
return { return {
type: FIELDS.NUMBER.type, type: FieldType.NUMBER,
} }
} }
} }
@ -31,7 +31,7 @@ function recurse(schemaLevel = {}, objectLevel) {
const schema = recurse(schemaLevel[key], value[0]) const schema = recurse(schemaLevel[key], value[0])
if (schema) { if (schema) {
schemaLevel[key] = { schemaLevel[key] = {
type: FIELDS.ARRAY.type, type: FieldType.ARRAY,
schema, schema,
} }
} }
@ -45,7 +45,7 @@ function recurse(schemaLevel = {}, objectLevel) {
} }
} }
if (!schemaLevel.type) { if (!schemaLevel.type) {
return { type: FIELDS.JSON.type, schema: schemaLevel } return { type: FieldType.JSON, schema: schemaLevel }
} else { } else {
return schemaLevel return schemaLevel
} }

View File

@ -1,3 +1,4 @@
import { FieldType } from "@budibase/types"
import { ActionStepID } from "constants/backend/automations" import { ActionStepID } from "constants/backend/automations"
import { TableNames } from "constants" import { TableNames } from "constants"
import { import {
@ -20,20 +21,20 @@ export function buildAutoColumn(tableName, name, subtype) {
switch (subtype) { switch (subtype) {
case AUTO_COLUMN_SUB_TYPES.UPDATED_BY: case AUTO_COLUMN_SUB_TYPES.UPDATED_BY:
case AUTO_COLUMN_SUB_TYPES.CREATED_BY: case AUTO_COLUMN_SUB_TYPES.CREATED_BY:
type = FIELDS.LINK.type type = FieldType.LINK
constraints = FIELDS.LINK.constraints constraints = FIELDS.LINK.constraints
break break
case AUTO_COLUMN_SUB_TYPES.AUTO_ID: case AUTO_COLUMN_SUB_TYPES.AUTO_ID:
type = FIELDS.NUMBER.type type = FieldType.NUMBER
constraints = FIELDS.NUMBER.constraints constraints = FIELDS.NUMBER.constraints
break break
case AUTO_COLUMN_SUB_TYPES.UPDATED_AT: case AUTO_COLUMN_SUB_TYPES.UPDATED_AT:
case AUTO_COLUMN_SUB_TYPES.CREATED_AT: case AUTO_COLUMN_SUB_TYPES.CREATED_AT:
type = FIELDS.DATETIME.type type = FieldType.DATETIME
constraints = FIELDS.DATETIME.constraints constraints = FIELDS.DATETIME.constraints
break break
default: default:
type = FIELDS.STRING.type type = FieldType.STRING
constraints = FIELDS.STRING.constraints constraints = FIELDS.STRING.constraints
break break
} }

View File

@ -249,7 +249,7 @@
<div class="component-container"> <div class="component-container">
{#if loading} {#if loading}
<div <div
class={`loading ${$themeStore.theme}`} class={`loading ${$themeStore.baseTheme} ${$themeStore.theme}`}
class:tablet={$previewStore.previewDevice === "tablet"} class:tablet={$previewStore.previewDevice === "tablet"}
class:mobile={$previewStore.previewDevice === "mobile"} class:mobile={$previewStore.previewDevice === "mobile"}
> >

View File

@ -20,7 +20,7 @@
import { sdk } from "@budibase/shared-core" import { sdk } from "@budibase/shared-core"
import { API } from "api" import { API } from "api"
import ErrorSVG from "./ErrorSVG.svelte" import ErrorSVG from "./ErrorSVG.svelte"
import { ClientAppSkeleton } from "@budibase/frontend-core" import { getBaseTheme, ClientAppSkeleton } from "@budibase/frontend-core"
$: app = $enrichedApps.find(app => app.appId === $params.appId) $: app = $enrichedApps.find(app => app.appId === $params.appId)
$: iframeUrl = getIframeURL(app) $: iframeUrl = getIframeURL(app)
@ -137,7 +137,9 @@
class:hide={!loading || !app?.features?.skeletonLoader} class:hide={!loading || !app?.features?.skeletonLoader}
class="loading" class="loading"
> >
<div class={`loadingThemeWrapper ${app.theme}`}> <div
class={`loadingThemeWrapper ${getBaseTheme(app.theme)} ${app.theme}`}
>
<ClientAppSkeleton <ClientAppSkeleton
noAnimation noAnimation
hideDevTools={app?.status === "published"} hideDevTools={app?.status === "published"}

View File

@ -1,7 +1,8 @@
import { FieldType } from "@budibase/types"
import { get, writable, derived } from "svelte/store" import { get, writable, derived } from "svelte/store"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { API } from "api" import { API } from "api"
import { SWITCHABLE_TYPES, FIELDS } from "constants/backend" import { SWITCHABLE_TYPES } from "constants/backend"
export function createTablesStore() { export function createTablesStore() {
const store = writable({ const store = writable({
@ -83,14 +84,14 @@ export function createTablesStore() {
// make sure tables up to date (related) // make sure tables up to date (related)
let newTableIds = [] let newTableIds = []
for (let column of Object.values(updatedTable?.schema || {})) { for (let column of Object.values(updatedTable?.schema || {})) {
if (column.type === FIELDS.LINK.type) { if (column.type === FieldType.LINK) {
newTableIds.push(column.tableId) newTableIds.push(column.tableId)
} }
} }
let oldTableIds = [] let oldTableIds = []
for (let column of Object.values(oldTable?.schema || {})) { for (let column of Object.values(oldTable?.schema || {})) {
if (column.type === FIELDS.LINK.type) { if (column.type === FieldType.LINK) {
oldTableIds.push(column.tableId) oldTableIds.push(column.tableId)
} }
} }

View File

@ -1,5 +1,6 @@
import { writable, get } from "svelte/store" import { writable, get } from "svelte/store"
import { API } from "api" import { API } from "api"
import { getBaseTheme } from "@budibase/frontend-core"
const INITIAL_THEMES_STATE = { const INITIAL_THEMES_STATE = {
theme: "", theme: "",
@ -12,11 +13,15 @@ export const themes = () => {
}) })
const syncAppTheme = app => { const syncAppTheme = app => {
store.update(state => ({ store.update(state => {
...state, const theme = app.theme || "spectrum--light"
theme: app.theme || "spectrum--light", return {
customTheme: app.customTheme, ...state,
})) theme,
baseTheme: getBaseTheme(theme),
customTheme: app.customTheme,
}
})
} }
const save = async (theme, appId) => { const save = async (theme, appId) => {

View File

@ -148,7 +148,7 @@ export const enrichedApps = derived([appsStore, auth], ([$store, $auth]) => {
deployed: app.status === AppStatus.DEPLOYED, deployed: app.status === AppStatus.DEPLOYED,
lockedYou: app.lockedBy && app.lockedBy.email === $auth.user?.email, lockedYou: app.lockedBy && app.lockedBy.email === $auth.user?.email,
lockedOther: app.lockedBy && app.lockedBy.email !== $auth.user?.email, lockedOther: app.lockedBy && app.lockedBy.email !== $auth.user?.email,
favourite: $auth?.user.appFavourites?.includes(app.appId), favourite: $auth.user?.appFavourites?.includes(app.appId),
})) }))
: [] : []

View File

@ -1,21 +1,7 @@
import { FieldTypeToComponentMap } from "components/design/settings/controls/FieldConfiguration/utils"
import { Component } from "./Component" import { Component } from "./Component"
import { getSchemaForDatasource } from "dataBinding" import { getSchemaForDatasource } from "dataBinding"
const fieldTypeToComponentMap = {
string: "stringfield",
number: "numberfield",
bigint: "bigintfield",
options: "optionsfield",
array: "multifieldselect",
boolean: "booleanfield",
longform: "longformfield",
datetime: "datetimefield",
attachment: "attachmentfield",
link: "relationshipfield",
json: "jsonfield",
barcodeqr: "codescanner",
}
export function makeDatasourceFormComponents(datasource) { export function makeDatasourceFormComponents(datasource) {
const { schema } = getSchemaForDatasource(null, datasource, { const { schema } = getSchemaForDatasource(null, datasource, {
formSchema: true, formSchema: true,
@ -30,7 +16,7 @@ export function makeDatasourceFormComponents(datasource) {
} }
const fieldType = const fieldType =
typeof fieldSchema === "object" ? fieldSchema.type : fieldSchema typeof fieldSchema === "object" ? fieldSchema.type : fieldSchema
const componentType = fieldTypeToComponentMap[fieldType] const componentType = FieldTypeToComponentMap[fieldType]
const fullComponentType = `@budibase/standard-components/${componentType}` const fullComponentType = `@budibase/standard-components/${componentType}`
if (componentType) { if (componentType) {
const component = new Component(fullComponentType) const component = new Component(fullComponentType)

View File

@ -45,7 +45,8 @@ export default {
onwarn(warning, warn) { onwarn(warning, warn) {
if ( if (
warning.code === "THIS_IS_UNDEFINED" || warning.code === "THIS_IS_UNDEFINED" ||
warning.code === "CIRCULAR_DEPENDENCY" warning.code === "CIRCULAR_DEPENDENCY" ||
warning.code === "EVAL"
) { ) {
return return
} }

View File

@ -7,19 +7,19 @@
export let order export let order
const FieldTypeToComponentMap = { const FieldTypeToComponentMap = {
string: "stringfield", [FieldType.STRING]: "stringfield",
number: "numberfield", [FieldType.NUMBER]: "numberfield",
bigint: "bigintfield", [FieldType.BIGINT]: "bigintfield",
options: "optionsfield", [FieldType.OPTIONS]: "optionsfield",
array: "multifieldselect", [FieldType.ARRAY]: "multifieldselect",
boolean: "booleanfield", [FieldType.BOOLEAN]: "booleanfield",
longform: "longformfield", [FieldType.LONGFORM]: "longformfield",
datetime: "datetimefield", [FieldType.DATETIME]: "datetimefield",
attachment: "attachmentfield", [FieldType.ATTACHMENT]: "attachmentfield",
link: "relationshipfield", [FieldType.LINK]: "relationshipfield",
json: "jsonfield", [FieldType.JSON]: "jsonfield",
barcodeqr: "codescanner", [FieldType.BARCODEQR]: "codescanner",
bb_reference: "bbreferencefield", [FieldType.BB_REFERENCE]: "bbreferencefield",
} }
const getFieldSchema = field => { const getFieldSchema = field => {

View File

@ -1,7 +1,7 @@
import { derived } from "svelte/store" import { derived } from "svelte/store"
import { appStore } from "./app" import { appStore } from "./app"
import { builderStore } from "./builder" import { builderStore } from "./builder"
import { Constants } from "@budibase/frontend-core" import { getBaseTheme } from "@budibase/frontend-core"
// This is the good old acorn bug where having the word "g l o b a l" makes it // This is the good old acorn bug where having the word "g l o b a l" makes it
// think that this is not ES6 compatible and starts throwing errors when using // think that this is not ES6 compatible and starts throwing errors when using
@ -29,13 +29,6 @@ const createThemeStore = () => {
// Ensure theme is set // Ensure theme is set
theme = theme || defaultTheme theme = theme || defaultTheme
// Get base theme
let base =
Constants.Themes.find(x => `spectrum--${x.class}` === theme)?.base || ""
if (base) {
base = `spectrum--${base}`
}
// Delete and nullish keys from the custom theme // Delete and nullish keys from the custom theme
if (customTheme) { if (customTheme) {
Object.entries(customTheme).forEach(([key, value]) => { Object.entries(customTheme).forEach(([key, value]) => {
@ -59,7 +52,7 @@ const createThemeStore = () => {
return { return {
theme, theme,
baseTheme: base, baseTheme: getBaseTheme(theme),
customTheme, customTheme,
customThemeCss, customThemeCss,
} }

View File

@ -8,6 +8,7 @@
"dependencies": { "dependencies": {
"@budibase/bbui": "0.0.0", "@budibase/bbui": "0.0.0",
"@budibase/shared-core": "0.0.0", "@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0",
"dayjs": "^1.10.8", "dayjs": "^1.10.8",
"lodash": "4.17.21", "lodash": "4.17.21",
"socket.io-client": "^4.6.1" "socket.io-client": "^4.6.1"

View File

@ -1,3 +1,5 @@
import { FieldType } from "@budibase/types"
import OptionsCell from "../cells/OptionsCell.svelte" import OptionsCell from "../cells/OptionsCell.svelte"
import DateCell from "../cells/DateCell.svelte" import DateCell from "../cells/DateCell.svelte"
import MultiSelectCell from "../cells/MultiSelectCell.svelte" import MultiSelectCell from "../cells/MultiSelectCell.svelte"
@ -12,19 +14,19 @@ import AttachmentCell from "../cells/AttachmentCell.svelte"
import BBReferenceCell from "../cells/BBReferenceCell.svelte" import BBReferenceCell from "../cells/BBReferenceCell.svelte"
const TypeComponentMap = { const TypeComponentMap = {
text: TextCell, [FieldType.STRING]: TextCell,
options: OptionsCell, [FieldType.OPTIONS]: OptionsCell,
datetime: DateCell, [FieldType.DATETIME]: DateCell,
barcodeqr: TextCell, [FieldType.BARCODEQR]: TextCell,
longform: LongFormCell, [FieldType.LONGFORM]: LongFormCell,
array: MultiSelectCell, [FieldType.ARRAY]: MultiSelectCell,
number: NumberCell, [FieldType.NUMBER]: NumberCell,
boolean: BooleanCell, [FieldType.BOOLEAN]: BooleanCell,
attachment: AttachmentCell, [FieldType.ATTACHMENT]: AttachmentCell,
link: RelationshipCell, [FieldType.LINK]: RelationshipCell,
formula: FormulaCell, [FieldType.FORMULA]: FormulaCell,
json: JSONCell, [FieldType.JSON]: JSONCell,
bb_reference: BBReferenceCell, [FieldType.BB_REFERENCE]: BBReferenceCell,
} }
export const getCellRenderer = column => { export const getCellRenderer = column => {
return TypeComponentMap[column?.schema?.type] || TextCell return TypeComponentMap[column?.schema?.type] || TextCell

View File

@ -1,3 +1,5 @@
import { FieldType, FieldTypeSubtypes } from "@budibase/types"
export const getColor = (idx, opacity = 0.3) => { export const getColor = (idx, opacity = 0.3) => {
if (idx == null || idx === -1) { if (idx == null || idx === -1) {
idx = 0 idx = 0
@ -6,22 +8,22 @@ export const getColor = (idx, opacity = 0.3) => {
} }
const TypeIconMap = { const TypeIconMap = {
text: "Text", [FieldType.STRING]: "Text",
options: "Dropdown", [FieldType.OPTIONS]: "Dropdown",
datetime: "Date", [FieldType.DATETIME]: "Date",
barcodeqr: "Camera", [FieldType.BARCODEQR]: "Camera",
longform: "TextAlignLeft", [FieldType.LONGFORM]: "TextAlignLeft",
array: "Dropdown", [FieldType.ARRAY]: "Dropdown",
number: "123", [FieldType.NUMBER]: "123",
boolean: "Boolean", [FieldType.BOOLEAN]: "Boolean",
attachment: "AppleFiles", [FieldType.ATTACHMENT]: "AppleFiles",
link: "DataCorrelated", [FieldType.LINK]: "DataCorrelated",
formula: "Calculator", [FieldType.FORMULA]: "Calculator",
json: "Brackets", [FieldType.JSON]: "Brackets",
bigint: "TagBold", [FieldType.BIGINT]: "TagBold",
bb_reference: { [FieldType.BB_REFERENCE]: {
user: "User", [FieldTypeSubtypes.BB_REFERENCE.USER]: "User",
users: "UserGroup", [FieldTypeSubtypes.BB_REFERENCE.USERS]: "UserGroup",
}, },
} }

View File

@ -7,3 +7,4 @@ export * as RowUtils from "./rows"
export { memo, derivedMemo } from "./memo" export { memo, derivedMemo } from "./memo"
export { createWebsocket } from "./websocket" export { createWebsocket } from "./websocket"
export * from "./download" export * from "./download"
export * from "./theme"

View File

@ -0,0 +1,12 @@
import { Themes } from "../constants.js"
export const getBaseTheme = theme => {
if (!theme) {
return ""
}
let base = Themes.find(x => `spectrum--${x.class}` === theme)?.base || ""
if (base) {
base = `spectrum--${base}`
}
return base
}

View File

@ -4,8 +4,8 @@ set -e
if [[ -n $CI ]] if [[ -n $CI ]]
then then
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS" export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS"
echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@" echo "jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@"
jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@ jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
else else
# --maxWorkers performs better in development # --maxWorkers performs better in development
export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS" export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS"

View File

@ -1,7 +1,5 @@
const setup = require("../../tests/utilities") const setup = require("../../tests/utilities")
jest.setTimeout(30000)
describe("/metrics", () => { describe("/metrics", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()

View File

@ -1,7 +1,6 @@
import * as setup from "./utilities" import * as setup from "./utilities"
import path from "path" import path from "path"
jest.setTimeout(15000)
const PASSWORD = "testtest" const PASSWORD = "testtest"
describe("/applications/:appId/import", () => { describe("/applications/:appId/import", () => {

View File

@ -23,8 +23,6 @@ let {
collectAutomation, collectAutomation,
} = setup.structures } = setup.structures
jest.setTimeout(30000)
describe("/automations", () => { describe("/automations", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()

View File

@ -6,10 +6,11 @@ import {
SourceName, SourceName,
} from "@budibase/types" } from "@budibase/types"
import * as setup from "../utilities" import * as setup from "../utilities"
import { databaseTestProviders } from "../../../../integrations/tests/utils" import {
import pg from "pg" DatabaseName,
import mysql from "mysql2/promise" getDatasource,
import mssql from "mssql" rawQuery,
} from "../../../../integrations/tests/utils"
import { Expectations } from "src/tests/utilities/api/base" import { Expectations } from "src/tests/utilities/api/base"
import { events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
@ -40,13 +41,16 @@ const createTableSQL: Record<string, string> = {
const insertSQL = `INSERT INTO test_table (name) VALUES ('one'), ('two'), ('three'), ('four'), ('five')` const insertSQL = `INSERT INTO test_table (name) VALUES ('one'), ('two'), ('three'), ('four'), ('five')`
const dropTableSQL = `DROP TABLE test_table;` const dropTableSQL = `DROP TABLE test_table;`
describe.each([ describe.each(
["postgres", databaseTestProviders.postgres], [
["mysql", databaseTestProviders.mysql], DatabaseName.POSTGRES,
["mssql", databaseTestProviders.mssql], DatabaseName.MYSQL,
["mariadb", databaseTestProviders.mariadb], DatabaseName.SQL_SERVER,
])("queries (%s)", (dbName, dsProvider) => { DatabaseName.MARIADB,
].map(name => [name, getDatasource(name)])
)("queries (%s)", (dbName, dsProvider) => {
const config = setup.getConfig() const config = setup.getConfig()
let rawDatasource: Datasource
let datasource: Datasource let datasource: Datasource
async function createQuery( async function createQuery(
@ -69,57 +73,21 @@ describe.each([
) )
} }
async function rawQuery(sql: string): Promise<any> {
switch (datasource.source) {
case SourceName.POSTGRES: {
const client = new pg.Client(datasource.config!)
await client.connect()
try {
const { rows } = await client.query(sql)
return rows
} finally {
await client.end()
}
}
case SourceName.MYSQL: {
const con = await mysql.createConnection(datasource.config!)
try {
const [rows] = await con.query(sql)
return rows
} finally {
con.end()
}
}
case SourceName.SQL_SERVER: {
const pool = new mssql.ConnectionPool(
datasource.config! as mssql.config
)
const client = await pool.connect()
try {
const { recordset } = await client.query(sql)
return recordset
} finally {
await pool.close()
}
}
}
}
beforeAll(async () => { beforeAll(async () => {
await config.init() await config.init()
}) })
beforeEach(async () => { beforeEach(async () => {
const datasourceRequest = await dsProvider.datasource() rawDatasource = await dsProvider
datasource = await config.api.datasource.create(datasourceRequest) datasource = await config.api.datasource.create(rawDatasource)
// The Datasource API does not return the password, but we need // The Datasource API does not return the password, but we need
// it later to connect to the underlying database, so we fill it // it later to connect to the underlying database, so we fill it
// back in here. // back in here.
datasource.config!.password = datasourceRequest.config!.password datasource.config!.password = rawDatasource.config!.password
await rawQuery(createTableSQL[datasource.source]) await rawQuery(datasource, createTableSQL[datasource.source])
await rawQuery(insertSQL) await rawQuery(datasource, insertSQL)
jest.clearAllMocks() jest.clearAllMocks()
}) })
@ -127,11 +95,10 @@ describe.each([
afterEach(async () => { afterEach(async () => {
const ds = await config.api.datasource.get(datasource._id!) const ds = await config.api.datasource.get(datasource._id!)
config.api.datasource.delete(ds) config.api.datasource.delete(ds)
await rawQuery(dropTableSQL) await rawQuery(datasource, dropTableSQL)
}) })
afterAll(async () => { afterAll(async () => {
await dsProvider.stop()
setup.afterAll() setup.afterAll()
}) })
@ -462,6 +429,7 @@ describe.each([
]) ])
const rows = await rawQuery( const rows = await rawQuery(
datasource,
"SELECT * FROM test_table WHERE name = 'baz'" "SELECT * FROM test_table WHERE name = 'baz'"
) )
expect(rows).toHaveLength(1) expect(rows).toHaveLength(1)
@ -522,6 +490,7 @@ describe.each([
expect(result.data).toEqual([{ created: true }]) expect(result.data).toEqual([{ created: true }])
const rows = await rawQuery( const rows = await rawQuery(
datasource,
`SELECT * FROM test_table WHERE birthday = '${date.toISOString()}'` `SELECT * FROM test_table WHERE birthday = '${date.toISOString()}'`
) )
expect(rows).toHaveLength(1) expect(rows).toHaveLength(1)
@ -553,6 +522,7 @@ describe.each([
expect(result.data).toEqual([{ created: true }]) expect(result.data).toEqual([{ created: true }])
const rows = await rawQuery( const rows = await rawQuery(
datasource,
`SELECT * FROM test_table WHERE name = '${notDateStr}'` `SELECT * FROM test_table WHERE name = '${notDateStr}'`
) )
expect(rows).toHaveLength(1) expect(rows).toHaveLength(1)
@ -689,7 +659,10 @@ describe.each([
}, },
]) ])
const rows = await rawQuery("SELECT * FROM test_table WHERE id = 1") const rows = await rawQuery(
datasource,
"SELECT * FROM test_table WHERE id = 1"
)
expect(rows).toEqual([ expect(rows).toEqual([
{ id: 1, name: "foo", birthday: null, number: null }, { id: 1, name: "foo", birthday: null, number: null },
]) ])
@ -757,7 +730,10 @@ describe.each([
}, },
]) ])
const rows = await rawQuery("SELECT * FROM test_table WHERE id = 1") const rows = await rawQuery(
datasource,
"SELECT * FROM test_table WHERE id = 1"
)
expect(rows).toHaveLength(0) expect(rows).toHaveLength(0)
}) })
}) })

View File

@ -1,15 +1,17 @@
import { Datasource, Query } from "@budibase/types" import { Datasource, Query } from "@budibase/types"
import * as setup from "../utilities" import * as setup from "../utilities"
import { databaseTestProviders } from "../../../../integrations/tests/utils" import {
import { MongoClient, type Collection, BSON } from "mongodb" DatabaseName,
getDatasource,
} from "../../../../integrations/tests/utils"
import { MongoClient, type Collection, BSON, Db } from "mongodb"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
const collection = "test_collection"
const expectValidId = expect.stringMatching(/^\w{24}$/) const expectValidId = expect.stringMatching(/^\w{24}$/)
const expectValidBsonObjectId = expect.any(BSON.ObjectId) const expectValidBsonObjectId = expect.any(BSON.ObjectId)
describe("/queries", () => { describe("/queries", () => {
let collection: string
let config = setup.getConfig() let config = setup.getConfig()
let datasource: Datasource let datasource: Datasource
@ -38,8 +40,7 @@ describe("/queries", () => {
async function withClient<T>( async function withClient<T>(
callback: (client: MongoClient) => Promise<T> callback: (client: MongoClient) => Promise<T>
): Promise<T> { ): Promise<T> {
const ds = await databaseTestProviders.mongodb.datasource() const client = new MongoClient(datasource.config!.connectionString)
const client = new MongoClient(ds.config!.connectionString)
await client.connect() await client.connect()
try { try {
return await callback(client) return await callback(client)
@ -48,30 +49,33 @@ describe("/queries", () => {
} }
} }
async function withDb<T>(callback: (db: Db) => Promise<T>): Promise<T> {
return await withClient(async client => {
return await callback(client.db(datasource.config!.db))
})
}
async function withCollection<T>( async function withCollection<T>(
callback: (collection: Collection) => Promise<T> callback: (collection: Collection) => Promise<T>
): Promise<T> { ): Promise<T> {
return await withClient(async client => { return await withDb(async db => {
const db = client.db(
(await databaseTestProviders.mongodb.datasource()).config!.db
)
return await callback(db.collection(collection)) return await callback(db.collection(collection))
}) })
} }
afterAll(async () => { afterAll(async () => {
await databaseTestProviders.mongodb.stop()
setup.afterAll() setup.afterAll()
}) })
beforeAll(async () => { beforeAll(async () => {
await config.init() await config.init()
datasource = await config.api.datasource.create( datasource = await config.api.datasource.create(
await databaseTestProviders.mongodb.datasource() await getDatasource(DatabaseName.MONGODB)
) )
}) })
beforeEach(async () => { beforeEach(async () => {
collection = generator.guid()
await withCollection(async collection => { await withCollection(async collection => {
await collection.insertMany([ await collection.insertMany([
{ name: "one" }, { name: "one" },
@ -87,7 +91,7 @@ describe("/queries", () => {
await withCollection(collection => collection.drop()) await withCollection(collection => collection.drop())
}) })
describe.only("preview", () => { describe("preview", () => {
it("should generate a nested schema with an empty array", async () => { it("should generate a nested schema with an empty array", async () => {
const name = generator.guid() const name = generator.guid()
await withCollection( await withCollection(

View File

@ -1,4 +1,4 @@
import { databaseTestProviders } from "../../../integrations/tests/utils" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import tk from "timekeeper" import tk from "timekeeper"
import { outputProcessing } from "../../../utilities/rowProcessor" import { outputProcessing } from "../../../utilities/rowProcessor"
@ -33,10 +33,10 @@ jest.unmock("mssql")
describe.each([ describe.each([
["internal", undefined], ["internal", undefined],
["postgres", databaseTestProviders.postgres], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
["mysql", databaseTestProviders.mysql], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
["mssql", databaseTestProviders.mssql], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
["mariadb", databaseTestProviders.mariadb], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/rows (%s)", (__, dsProvider) => { ])("/rows (%s)", (__, dsProvider) => {
const isInternal = dsProvider === undefined const isInternal = dsProvider === undefined
const config = setup.getConfig() const config = setup.getConfig()
@ -48,23 +48,23 @@ describe.each([
await config.init() await config.init()
if (dsProvider) { if (dsProvider) {
datasource = await config.createDatasource({ datasource = await config.createDatasource({
datasource: await dsProvider.datasource(), datasource: await dsProvider,
}) })
} }
}) })
afterAll(async () => { afterAll(async () => {
if (dsProvider) {
await dsProvider.stop()
}
setup.afterAll() setup.afterAll()
}) })
function saveTableRequest( function saveTableRequest(
...overrides: Partial<SaveTableRequest>[] // We omit the name field here because it's generated in the function with a
// high likelihood to be unique. Tests should not have any reason to control
// the table name they're writing to.
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
): SaveTableRequest { ): SaveTableRequest {
const req: SaveTableRequest = { const req: SaveTableRequest = {
name: uuid.v4().substring(0, 16), name: uuid.v4().substring(0, 10),
type: "table", type: "table",
sourceType: datasource sourceType: datasource
? TableSourceType.EXTERNAL ? TableSourceType.EXTERNAL
@ -86,7 +86,10 @@ describe.each([
} }
function defaultTable( function defaultTable(
...overrides: Partial<SaveTableRequest>[] // We omit the name field here because it's generated in the function with a
// high likelihood to be unique. Tests should not have any reason to control
// the table name they're writing to.
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
): SaveTableRequest { ): SaveTableRequest {
return saveTableRequest( return saveTableRequest(
{ {
@ -193,7 +196,6 @@ describe.each([
const newTable = await config.api.table.save( const newTable = await config.api.table.save(
saveTableRequest({ saveTableRequest({
name: "TestTableAuto",
schema: { schema: {
"Row ID": { "Row ID": {
name: "Row ID", name: "Row ID",
@ -382,11 +384,9 @@ describe.each([
isInternal && isInternal &&
it("doesn't allow creating in user table", async () => { it("doesn't allow creating in user table", async () => {
const userTableId = InternalTable.USER_METADATA
const response = await config.api.row.save( const response = await config.api.row.save(
userTableId, InternalTable.USER_METADATA,
{ {
tableId: userTableId,
firstName: "Joe", firstName: "Joe",
lastName: "Joe", lastName: "Joe",
email: "joe@joe.com", email: "joe@joe.com",
@ -461,7 +461,6 @@ describe.each([
table = await config.api.table.save(defaultTable()) table = await config.api.table.save(defaultTable())
otherTable = await config.api.table.save( otherTable = await config.api.table.save(
defaultTable({ defaultTable({
name: "a",
schema: { schema: {
relationship: { relationship: {
name: "relationship", name: "relationship",
@ -897,8 +896,8 @@ describe.each([
let o2mTable: Table let o2mTable: Table
let m2mTable: Table let m2mTable: Table
beforeAll(async () => { beforeAll(async () => {
o2mTable = await config.api.table.save(defaultTable({ name: "o2m" })) o2mTable = await config.api.table.save(defaultTable())
m2mTable = await config.api.table.save(defaultTable({ name: "m2m" })) m2mTable = await config.api.table.save(defaultTable())
}) })
describe.each([ describe.each([
@ -1255,7 +1254,6 @@ describe.each([
otherTable = await config.api.table.save(defaultTable()) otherTable = await config.api.table.save(defaultTable())
table = await config.api.table.save( table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
name: "b",
schema: { schema: {
links: { links: {
name: "links", name: "links",
@ -1353,7 +1351,6 @@ describe.each([
const table = await config.api.table.save( const table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
name: "table",
schema: { schema: {
text: { text: {
name: "text", name: "text",

View File

@ -3,8 +3,6 @@ import { checkPermissionsEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities" import * as setup from "./utilities"
import { UserMetadata } from "@budibase/types" import { UserMetadata } from "@budibase/types"
jest.setTimeout(30000)
jest.mock("../../../utilities/workerRequests", () => ({ jest.mock("../../../utilities/workerRequests", () => ({
getGlobalUsers: jest.fn(() => { getGlobalUsers: jest.fn(() => {
return {} return {}

View File

@ -19,8 +19,7 @@ import {
ViewV2, ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests" import { generator, mocks } from "@budibase/backend-core/tests"
import * as uuid from "uuid" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import { databaseTestProviders } from "../../../integrations/tests/utils"
import merge from "lodash/merge" import merge from "lodash/merge"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { roles } from "@budibase/backend-core" import { roles } from "@budibase/backend-core"
@ -29,10 +28,10 @@ jest.unmock("mssql")
describe.each([ describe.each([
["internal", undefined], ["internal", undefined],
["postgres", databaseTestProviders.postgres], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
["mysql", databaseTestProviders.mysql], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
["mssql", databaseTestProviders.mssql], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
["mariadb", databaseTestProviders.mariadb], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/v2/views (%s)", (_, dsProvider) => { ])("/v2/views (%s)", (_, dsProvider) => {
const config = setup.getConfig() const config = setup.getConfig()
const isInternal = !dsProvider const isInternal = !dsProvider
@ -41,10 +40,10 @@ describe.each([
let datasource: Datasource let datasource: Datasource
function saveTableRequest( function saveTableRequest(
...overrides: Partial<SaveTableRequest>[] ...overrides: Partial<Omit<SaveTableRequest, "name">>[]
): SaveTableRequest { ): SaveTableRequest {
const req: SaveTableRequest = { const req: SaveTableRequest = {
name: uuid.v4().substring(0, 16), name: generator.guid().replaceAll("-", "").substring(0, 16),
type: "table", type: "table",
sourceType: datasource sourceType: datasource
? TableSourceType.EXTERNAL ? TableSourceType.EXTERNAL
@ -89,16 +88,13 @@ describe.each([
if (dsProvider) { if (dsProvider) {
datasource = await config.createDatasource({ datasource = await config.createDatasource({
datasource: await dsProvider.datasource(), datasource: await dsProvider,
}) })
} }
table = await config.api.table.save(priceTable()) table = await config.api.table.save(priceTable())
}) })
afterAll(async () => { afterAll(async () => {
if (dsProvider) {
await dsProvider.stop()
}
setup.afterAll() setup.afterAll()
}) })
@ -230,7 +226,7 @@ describe.each([
view = await config.api.viewV2.create({ view = await config.api.viewV2.create({
tableId: table._id!, tableId: table._id!,
name: "View A", name: generator.guid(),
}) })
}) })
@ -306,12 +302,13 @@ describe.each([
it("can update an existing view name", async () => { it("can update an existing view name", async () => {
const tableId = table._id! const tableId = table._id!
await config.api.viewV2.update({ ...view, name: "View B" }) const newName = generator.guid()
await config.api.viewV2.update({ ...view, name: newName })
expect(await config.api.table.get(tableId)).toEqual( expect(await config.api.table.get(tableId)).toEqual(
expect.objectContaining({ expect.objectContaining({
views: { views: {
"View B": { ...view, name: "View B", schema: expect.anything() }, [newName]: { ...view, name: newName, schema: expect.anything() },
}, },
}) })
) )
@ -506,7 +503,6 @@ describe.each([
it("views have extra data trimmed", async () => { it("views have extra data trimmed", async () => {
const table = await config.api.table.save( const table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
name: "orders",
schema: { schema: {
Country: { Country: {
type: FieldType.STRING, type: FieldType.STRING,
@ -522,7 +518,7 @@ describe.each([
const view = await config.api.viewV2.create({ const view = await config.api.viewV2.create({
tableId: table._id!, tableId: table._id!,
name: uuid.v4(), name: generator.guid(),
schema: { schema: {
Country: { Country: {
visible: true, visible: true,
@ -852,7 +848,6 @@ describe.each([
beforeAll(async () => { beforeAll(async () => {
table = await config.api.table.save( table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
name: `users_${uuid.v4()}`,
type: "table", type: "table",
schema: { schema: {
name: { name: {

View File

@ -3,7 +3,6 @@ import {
generateMakeRequest, generateMakeRequest,
MakeRequestResponse, MakeRequestResponse,
} from "../api/routes/public/tests/utils" } from "../api/routes/public/tests/utils"
import { v4 as uuidv4 } from "uuid"
import * as setup from "../api/routes/tests/utilities" import * as setup from "../api/routes/tests/utilities"
import { import {
Datasource, Datasource,
@ -12,12 +11,23 @@ import {
TableRequest, TableRequest,
TableSourceType, TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { databaseTestProviders } from "../integrations/tests/utils" import {
import mysql from "mysql2/promise" DatabaseName,
getDatasource,
rawQuery,
} from "../integrations/tests/utils"
import { builderSocket } from "../websockets" import { builderSocket } from "../websockets"
import { generator } from "@budibase/backend-core/tests"
// @ts-ignore // @ts-ignore
fetch.mockSearch() fetch.mockSearch()
function uniqueTableName(length?: number): string {
return generator
.guid()
.replaceAll("-", "_")
.substring(0, length || 10)
}
const config = setup.getConfig()! const config = setup.getConfig()!
jest.mock("../websockets", () => ({ jest.mock("../websockets", () => ({
@ -37,7 +47,8 @@ jest.mock("../websockets", () => ({
describe("mysql integrations", () => { describe("mysql integrations", () => {
let makeRequest: MakeRequestResponse, let makeRequest: MakeRequestResponse,
mysqlDatasource: Datasource, rawDatasource: Datasource,
datasource: Datasource,
primaryMySqlTable: Table primaryMySqlTable: Table
beforeAll(async () => { beforeAll(async () => {
@ -46,18 +57,13 @@ describe("mysql integrations", () => {
makeRequest = generateMakeRequest(apiKey, true) makeRequest = generateMakeRequest(apiKey, true)
mysqlDatasource = await config.api.datasource.create( rawDatasource = await getDatasource(DatabaseName.MYSQL)
await databaseTestProviders.mysql.datasource() datasource = await config.api.datasource.create(rawDatasource)
)
})
afterAll(async () => {
await databaseTestProviders.mysql.stop()
}) })
beforeEach(async () => { beforeEach(async () => {
primaryMySqlTable = await config.createTable({ primaryMySqlTable = await config.createTable({
name: uuidv4(), name: uniqueTableName(),
type: "table", type: "table",
primary: ["id"], primary: ["id"],
schema: { schema: {
@ -79,7 +85,7 @@ describe("mysql integrations", () => {
type: FieldType.NUMBER, type: FieldType.NUMBER,
}, },
}, },
sourceId: mysqlDatasource._id, sourceId: datasource._id,
sourceType: TableSourceType.EXTERNAL, sourceType: TableSourceType.EXTERNAL,
}) })
}) })
@ -87,18 +93,15 @@ describe("mysql integrations", () => {
afterAll(config.end) afterAll(config.end)
it("validate table schema", async () => { it("validate table schema", async () => {
const res = await makeRequest( const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
"get",
`/api/datasources/${mysqlDatasource._id}`
)
expect(res.status).toBe(200) expect(res.status).toBe(200)
expect(res.body).toEqual({ expect(res.body).toEqual({
config: { config: {
database: "mysql", database: expect.any(String),
host: mysqlDatasource.config!.host, host: datasource.config!.host,
password: "--secret-value--", password: "--secret-value--",
port: mysqlDatasource.config!.port, port: datasource.config!.port,
user: "root", user: "root",
}, },
plus: true, plus: true,
@ -117,7 +120,7 @@ describe("mysql integrations", () => {
it("should be able to verify the connection", async () => { it("should be able to verify the connection", async () => {
await config.api.datasource.verify( await config.api.datasource.verify(
{ {
datasource: await databaseTestProviders.mysql.datasource(), datasource: rawDatasource,
}, },
{ {
body: { body: {
@ -128,13 +131,12 @@ describe("mysql integrations", () => {
}) })
it("should state an invalid datasource cannot connect", async () => { it("should state an invalid datasource cannot connect", async () => {
const dbConfig = await databaseTestProviders.mysql.datasource()
await config.api.datasource.verify( await config.api.datasource.verify(
{ {
datasource: { datasource: {
...dbConfig, ...rawDatasource,
config: { config: {
...dbConfig.config, ...rawDatasource.config,
password: "wrongpassword", password: "wrongpassword",
}, },
}, },
@ -154,7 +156,7 @@ describe("mysql integrations", () => {
it("should fetch information about mysql datasource", async () => { it("should fetch information about mysql datasource", async () => {
const primaryName = primaryMySqlTable.name const primaryName = primaryMySqlTable.name
const response = await makeRequest("post", "/api/datasources/info", { const response = await makeRequest("post", "/api/datasources/info", {
datasource: mysqlDatasource, datasource: datasource,
}) })
expect(response.status).toBe(200) expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined() expect(response.body.tableNames).toBeDefined()
@ -163,40 +165,38 @@ describe("mysql integrations", () => {
}) })
describe("Integration compatibility with mysql search_path", () => { describe("Integration compatibility with mysql search_path", () => {
let client: mysql.Connection, pathDatasource: Datasource let datasource: Datasource, rawDatasource: Datasource
const database = "test1" const database = generator.guid()
const database2 = "test-2" const database2 = generator.guid()
beforeAll(async () => { beforeAll(async () => {
const dsConfig = await databaseTestProviders.mysql.datasource() rawDatasource = await getDatasource(DatabaseName.MYSQL)
const dbConfig = dsConfig.config!
client = await mysql.createConnection(dbConfig) await rawQuery(rawDatasource, `CREATE DATABASE \`${database}\`;`)
await client.query(`CREATE DATABASE \`${database}\`;`) await rawQuery(rawDatasource, `CREATE DATABASE \`${database2}\`;`)
await client.query(`CREATE DATABASE \`${database2}\`;`)
const pathConfig: any = { const pathConfig: any = {
...dsConfig, ...rawDatasource,
config: { config: {
...dbConfig, ...rawDatasource.config!,
database, database,
}, },
} }
pathDatasource = await config.api.datasource.create(pathConfig) datasource = await config.api.datasource.create(pathConfig)
}) })
afterAll(async () => { afterAll(async () => {
await client.query(`DROP DATABASE \`${database}\`;`) await rawQuery(rawDatasource, `DROP DATABASE \`${database}\`;`)
await client.query(`DROP DATABASE \`${database2}\`;`) await rawQuery(rawDatasource, `DROP DATABASE \`${database2}\`;`)
await client.end()
}) })
it("discovers tables from any schema in search path", async () => { it("discovers tables from any schema in search path", async () => {
await client.query( await rawQuery(
rawDatasource,
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);` `CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
) )
const response = await makeRequest("post", "/api/datasources/info", { const response = await makeRequest("post", "/api/datasources/info", {
datasource: pathDatasource, datasource: datasource,
}) })
expect(response.status).toBe(200) expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined() expect(response.body.tableNames).toBeDefined()
@ -207,15 +207,17 @@ describe("mysql integrations", () => {
it("does not mix columns from different tables", async () => { it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name" const repeated_table_name = "table_same_name"
await client.query( await rawQuery(
rawDatasource,
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);` `CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
) )
await client.query( await rawQuery(
rawDatasource,
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);` `CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
) )
const response = await makeRequest( const response = await makeRequest(
"post", "post",
`/api/datasources/${pathDatasource._id}/schema`, `/api/datasources/${datasource._id}/schema`,
{ {
tablesFilter: [repeated_table_name], tablesFilter: [repeated_table_name],
} }
@ -231,30 +233,14 @@ describe("mysql integrations", () => {
}) })
describe("POST /api/tables/", () => { describe("POST /api/tables/", () => {
let client: mysql.Connection
const emitDatasourceUpdateMock = jest.fn() const emitDatasourceUpdateMock = jest.fn()
beforeEach(async () => {
client = await mysql.createConnection(
(
await databaseTestProviders.mysql.datasource()
).config!
)
mysqlDatasource = await config.api.datasource.create(
await databaseTestProviders.mysql.datasource()
)
})
afterEach(async () => {
await client.end()
})
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => { it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
const addColumnToTable: TableRequest = { const addColumnToTable: TableRequest = {
type: "table", type: "table",
sourceType: TableSourceType.EXTERNAL, sourceType: TableSourceType.EXTERNAL,
name: "table", name: uniqueTableName(),
sourceId: mysqlDatasource._id!, sourceId: datasource._id!,
primary: ["id"], primary: ["id"],
schema: { schema: {
id: { id: {
@ -301,14 +287,16 @@ describe("mysql integrations", () => {
}, },
}, },
created: true, created: true,
_id: `${mysqlDatasource._id}__table`, _id: `${datasource._id}__${addColumnToTable.name}`,
} }
delete expectedTable._add delete expectedTable._add
expect(emitDatasourceUpdateMock).toHaveBeenCalledTimes(1) expect(emitDatasourceUpdateMock).toHaveBeenCalledTimes(1)
const emittedDatasource: Datasource = const emittedDatasource: Datasource =
emitDatasourceUpdateMock.mock.calls[0][1] emitDatasourceUpdateMock.mock.calls[0][1]
expect(emittedDatasource.entities!["table"]).toEqual(expectedTable) expect(emittedDatasource.entities![expectedTable.name]).toEqual(
expectedTable
)
}) })
it("will rename a column", async () => { it("will rename a column", async () => {
@ -346,17 +334,18 @@ describe("mysql integrations", () => {
"/api/tables/", "/api/tables/",
renameColumnOnTable renameColumnOnTable
) )
mysqlDatasource = (
await makeRequest( const ds = (
"post", await makeRequest("post", `/api/datasources/${datasource._id}/schema`)
`/api/datasources/${mysqlDatasource._id}/schema`
)
).body.datasource ).body.datasource
expect(response.status).toEqual(200) expect(response.status).toEqual(200)
expect( expect(Object.keys(ds.entities![primaryMySqlTable.name].schema)).toEqual([
Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema) "id",
).toEqual(["id", "name", "description", "age"]) "name",
"description",
"age",
])
}) })
}) })
}) })

View File

@ -16,8 +16,12 @@ import {
import _ from "lodash" import _ from "lodash"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { utils } from "@budibase/backend-core" import { utils } from "@budibase/backend-core"
import { databaseTestProviders } from "../integrations/tests/utils" import {
import { Client } from "pg" DatabaseName,
getDatasource,
rawQuery,
} from "../integrations/tests/utils"
// @ts-ignore // @ts-ignore
fetch.mockSearch() fetch.mockSearch()
@ -27,7 +31,8 @@ jest.mock("../websockets")
describe("postgres integrations", () => { describe("postgres integrations", () => {
let makeRequest: MakeRequestResponse, let makeRequest: MakeRequestResponse,
postgresDatasource: Datasource, rawDatasource: Datasource,
datasource: Datasource,
primaryPostgresTable: Table, primaryPostgresTable: Table,
oneToManyRelationshipInfo: ForeignTableInfo, oneToManyRelationshipInfo: ForeignTableInfo,
manyToOneRelationshipInfo: ForeignTableInfo, manyToOneRelationshipInfo: ForeignTableInfo,
@ -39,19 +44,17 @@ describe("postgres integrations", () => {
makeRequest = generateMakeRequest(apiKey, true) makeRequest = generateMakeRequest(apiKey, true)
postgresDatasource = await config.api.datasource.create( rawDatasource = await getDatasource(DatabaseName.POSTGRES)
await databaseTestProviders.postgres.datasource() datasource = await config.api.datasource.create(rawDatasource)
)
})
afterAll(async () => {
await databaseTestProviders.postgres.stop()
}) })
beforeEach(async () => { beforeEach(async () => {
async function createAuxTable(prefix: string) { async function createAuxTable(prefix: string) {
return await config.createTable({ return await config.createTable({
name: `${prefix}_${generator.word({ length: 6 })}`, name: `${prefix}_${generator
.guid()
.replaceAll("-", "")
.substring(0, 6)}`,
type: "table", type: "table",
primary: ["id"], primary: ["id"],
primaryDisplay: "title", primaryDisplay: "title",
@ -66,7 +69,7 @@ describe("postgres integrations", () => {
type: FieldType.STRING, type: FieldType.STRING,
}, },
}, },
sourceId: postgresDatasource._id, sourceId: datasource._id,
sourceType: TableSourceType.EXTERNAL, sourceType: TableSourceType.EXTERNAL,
}) })
} }
@ -88,7 +91,7 @@ describe("postgres integrations", () => {
} }
primaryPostgresTable = await config.createTable({ primaryPostgresTable = await config.createTable({
name: `p_${generator.word({ length: 6 })}`, name: `p_${generator.guid().replaceAll("-", "").substring(0, 6)}`,
type: "table", type: "table",
primary: ["id"], primary: ["id"],
schema: { schema: {
@ -143,7 +146,7 @@ describe("postgres integrations", () => {
main: true, main: true,
}, },
}, },
sourceId: postgresDatasource._id, sourceId: datasource._id,
sourceType: TableSourceType.EXTERNAL, sourceType: TableSourceType.EXTERNAL,
}) })
}) })
@ -250,7 +253,7 @@ describe("postgres integrations", () => {
async function createDefaultPgTable() { async function createDefaultPgTable() {
return await config.createTable({ return await config.createTable({
name: generator.word({ length: 10 }), name: generator.guid().replaceAll("-", "").substring(0, 10),
type: "table", type: "table",
primary: ["id"], primary: ["id"],
schema: { schema: {
@ -260,7 +263,7 @@ describe("postgres integrations", () => {
autocolumn: true, autocolumn: true,
}, },
}, },
sourceId: postgresDatasource._id, sourceId: datasource._id,
sourceType: TableSourceType.EXTERNAL, sourceType: TableSourceType.EXTERNAL,
}) })
} }
@ -298,19 +301,16 @@ describe("postgres integrations", () => {
} }
it("validate table schema", async () => { it("validate table schema", async () => {
const res = await makeRequest( const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
"get",
`/api/datasources/${postgresDatasource._id}`
)
expect(res.status).toBe(200) expect(res.status).toBe(200)
expect(res.body).toEqual({ expect(res.body).toEqual({
config: { config: {
ca: false, ca: false,
database: "postgres", database: expect.any(String),
host: postgresDatasource.config!.host, host: datasource.config!.host,
password: "--secret-value--", password: "--secret-value--",
port: postgresDatasource.config!.port, port: datasource.config!.port,
rejectUnauthorized: false, rejectUnauthorized: false,
schema: "public", schema: "public",
ssl: false, ssl: false,
@ -1042,7 +1042,7 @@ describe("postgres integrations", () => {
it("should be able to verify the connection", async () => { it("should be able to verify the connection", async () => {
await config.api.datasource.verify( await config.api.datasource.verify(
{ {
datasource: await databaseTestProviders.postgres.datasource(), datasource: await getDatasource(DatabaseName.POSTGRES),
}, },
{ {
body: { body: {
@ -1053,7 +1053,7 @@ describe("postgres integrations", () => {
}) })
it("should state an invalid datasource cannot connect", async () => { it("should state an invalid datasource cannot connect", async () => {
const dbConfig = await databaseTestProviders.postgres.datasource() const dbConfig = await getDatasource(DatabaseName.POSTGRES)
await config.api.datasource.verify( await config.api.datasource.verify(
{ {
datasource: { datasource: {
@ -1078,7 +1078,7 @@ describe("postgres integrations", () => {
it("should fetch information about postgres datasource", async () => { it("should fetch information about postgres datasource", async () => {
const primaryName = primaryPostgresTable.name const primaryName = primaryPostgresTable.name
const response = await makeRequest("post", "/api/datasources/info", { const response = await makeRequest("post", "/api/datasources/info", {
datasource: postgresDatasource, datasource: datasource,
}) })
expect(response.status).toBe(200) expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined() expect(response.body.tableNames).toBeDefined()
@ -1087,86 +1087,88 @@ describe("postgres integrations", () => {
}) })
describe("POST /api/datasources/:datasourceId/schema", () => { describe("POST /api/datasources/:datasourceId/schema", () => {
let client: Client let tableName: string
beforeEach(async () => { beforeEach(async () => {
client = new Client( tableName = generator.guid().replaceAll("-", "").substring(0, 10)
(await databaseTestProviders.postgres.datasource()).config!
)
await client.connect()
}) })
afterEach(async () => { afterEach(async () => {
await client.query(`DROP TABLE IF EXISTS "table"`) await rawQuery(rawDatasource, `DROP TABLE IF EXISTS "${tableName}"`)
await client.end()
}) })
it("recognises when a table has no primary key", async () => { it("recognises when a table has no primary key", async () => {
await client.query(`CREATE TABLE "table" (id SERIAL)`) await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`)
const response = await makeRequest( const response = await makeRequest(
"post", "post",
`/api/datasources/${postgresDatasource._id}/schema` `/api/datasources/${datasource._id}/schema`
) )
expect(response.body.errors).toEqual({ expect(response.body.errors).toEqual({
table: "Table must have a primary key.", [tableName]: "Table must have a primary key.",
}) })
}) })
it("recognises when a table is using a reserved column name", async () => { it("recognises when a table is using a reserved column name", async () => {
await client.query(`CREATE TABLE "table" (_id SERIAL PRIMARY KEY) `) await rawQuery(
rawDatasource,
`CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) `
)
const response = await makeRequest( const response = await makeRequest(
"post", "post",
`/api/datasources/${postgresDatasource._id}/schema` `/api/datasources/${datasource._id}/schema`
) )
expect(response.body.errors).toEqual({ expect(response.body.errors).toEqual({
table: "Table contains invalid columns.", [tableName]: "Table contains invalid columns.",
}) })
}) })
}) })
describe("Integration compatibility with postgres search_path", () => { describe("Integration compatibility with postgres search_path", () => {
let client: Client, pathDatasource: Datasource let rawDatasource: Datasource,
const schema1 = "test1", datasource: Datasource,
schema2 = "test-2" schema1: string,
schema2: string
beforeAll(async () => { beforeEach(async () => {
const dsConfig = await databaseTestProviders.postgres.datasource() schema1 = generator.guid().replaceAll("-", "")
const dbConfig = dsConfig.config! schema2 = generator.guid().replaceAll("-", "")
client = new Client(dbConfig) rawDatasource = await getDatasource(DatabaseName.POSTGRES)
await client.connect() const dbConfig = rawDatasource.config!
await client.query(`CREATE SCHEMA "${schema1}";`)
await client.query(`CREATE SCHEMA "${schema2}";`) await rawQuery(rawDatasource, `CREATE SCHEMA "${schema1}";`)
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema2}";`)
const pathConfig: any = { const pathConfig: any = {
...dsConfig, ...rawDatasource,
config: { config: {
...dbConfig, ...dbConfig,
schema: `${schema1}, ${schema2}`, schema: `${schema1}, ${schema2}`,
}, },
} }
pathDatasource = await config.api.datasource.create(pathConfig) datasource = await config.api.datasource.create(pathConfig)
}) })
afterAll(async () => { afterEach(async () => {
await client.query(`DROP SCHEMA "${schema1}" CASCADE;`) await rawQuery(rawDatasource, `DROP SCHEMA "${schema1}" CASCADE;`)
await client.query(`DROP SCHEMA "${schema2}" CASCADE;`) await rawQuery(rawDatasource, `DROP SCHEMA "${schema2}" CASCADE;`)
await client.end()
}) })
it("discovers tables from any schema in search path", async () => { it("discovers tables from any schema in search path", async () => {
await client.query( await rawQuery(
rawDatasource,
`CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);` `CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);`
) )
await client.query( await rawQuery(
rawDatasource,
`CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);` `CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);`
) )
const response = await makeRequest("post", "/api/datasources/info", { const response = await makeRequest("post", "/api/datasources/info", {
datasource: pathDatasource, datasource: datasource,
}) })
expect(response.status).toBe(200) expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined() expect(response.body.tableNames).toBeDefined()
@ -1177,15 +1179,17 @@ describe("postgres integrations", () => {
it("does not mix columns from different tables", async () => { it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name" const repeated_table_name = "table_same_name"
await client.query( await rawQuery(
rawDatasource,
`CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);` `CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
) )
await client.query( await rawQuery(
rawDatasource,
`CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);` `CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
) )
const response = await makeRequest( const response = await makeRequest(
"post", "post",
`/api/datasources/${pathDatasource._id}/schema`, `/api/datasources/${datasource._id}/schema`,
{ {
tablesFilter: [repeated_table_name], tablesFilter: [repeated_table_name],
} }

View File

@ -9,6 +9,7 @@ import {
QueryType, QueryType,
} from "@budibase/types" } from "@budibase/types"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import { HOST_ADDRESS } from "./utils"
interface CouchDBConfig { interface CouchDBConfig {
url: string url: string
@ -28,7 +29,7 @@ const SCHEMA: Integration = {
url: { url: {
type: DatasourceFieldType.STRING, type: DatasourceFieldType.STRING,
required: true, required: true,
default: "http://localhost:5984", default: `http://${HOST_ADDRESS}:5984`,
}, },
database: { database: {
type: DatasourceFieldType.STRING, type: DatasourceFieldType.STRING,

View File

@ -8,6 +8,7 @@ import {
} from "@budibase/types" } from "@budibase/types"
import { Client, ClientOptions } from "@elastic/elasticsearch" import { Client, ClientOptions } from "@elastic/elasticsearch"
import { HOST_ADDRESS } from "./utils"
interface ElasticsearchConfig { interface ElasticsearchConfig {
url: string url: string
@ -29,7 +30,7 @@ const SCHEMA: Integration = {
url: { url: {
type: DatasourceFieldType.STRING, type: DatasourceFieldType.STRING,
required: true, required: true,
default: "http://localhost:9200", default: `http://${HOST_ADDRESS}:9200`,
}, },
ssl: { ssl: {
type: DatasourceFieldType.BOOLEAN, type: DatasourceFieldType.BOOLEAN,

View File

@ -22,6 +22,7 @@ import {
finaliseExternalTables, finaliseExternalTables,
SqlClient, SqlClient,
checkExternalTables, checkExternalTables,
HOST_ADDRESS,
} from "./utils" } from "./utils"
import Sql from "./base/sql" import Sql from "./base/sql"
import { MSSQLTablesResponse, MSSQLColumn } from "./base/types" import { MSSQLTablesResponse, MSSQLColumn } from "./base/types"
@ -88,7 +89,6 @@ const SCHEMA: Integration = {
user: { user: {
type: DatasourceFieldType.STRING, type: DatasourceFieldType.STRING,
required: true, required: true,
default: "localhost",
}, },
password: { password: {
type: DatasourceFieldType.PASSWORD, type: DatasourceFieldType.PASSWORD,
@ -96,7 +96,7 @@ const SCHEMA: Integration = {
}, },
server: { server: {
type: DatasourceFieldType.STRING, type: DatasourceFieldType.STRING,
default: "localhost", default: HOST_ADDRESS,
}, },
port: { port: {
type: DatasourceFieldType.NUMBER, type: DatasourceFieldType.NUMBER,

View File

@ -22,6 +22,7 @@ import {
InsertManyResult, InsertManyResult,
} from "mongodb" } from "mongodb"
import environment from "../environment" import environment from "../environment"
import { HOST_ADDRESS } from "./utils"
export interface MongoDBConfig { export interface MongoDBConfig {
connectionString: string connectionString: string
@ -51,7 +52,7 @@ const getSchema = () => {
connectionString: { connectionString: {
type: DatasourceFieldType.STRING, type: DatasourceFieldType.STRING,
required: true, required: true,
default: "mongodb://localhost:27017", default: `mongodb://${HOST_ADDRESS}:27017`,
display: "Connection string", display: "Connection string",
}, },
db: { db: {

View File

@ -21,6 +21,7 @@ import {
generateColumnDefinition, generateColumnDefinition,
finaliseExternalTables, finaliseExternalTables,
checkExternalTables, checkExternalTables,
HOST_ADDRESS,
} from "./utils" } from "./utils"
import dayjs from "dayjs" import dayjs from "dayjs"
import { NUMBER_REGEX } from "../utilities" import { NUMBER_REGEX } from "../utilities"
@ -49,7 +50,7 @@ const SCHEMA: Integration = {
datasource: { datasource: {
host: { host: {
type: DatasourceFieldType.STRING, type: DatasourceFieldType.STRING,
default: "localhost", default: HOST_ADDRESS,
required: true, required: true,
}, },
port: { port: {

View File

@ -22,6 +22,7 @@ import {
finaliseExternalTables, finaliseExternalTables,
getSqlQuery, getSqlQuery,
SqlClient, SqlClient,
HOST_ADDRESS,
} from "./utils" } from "./utils"
import Sql from "./base/sql" import Sql from "./base/sql"
import { import {
@ -63,7 +64,7 @@ const SCHEMA: Integration = {
datasource: { datasource: {
host: { host: {
type: DatasourceFieldType.STRING, type: DatasourceFieldType.STRING,
default: "localhost", default: HOST_ADDRESS,
required: true, required: true,
}, },
port: { port: {

View File

@ -21,6 +21,7 @@ import {
finaliseExternalTables, finaliseExternalTables,
SqlClient, SqlClient,
checkExternalTables, checkExternalTables,
HOST_ADDRESS,
} from "./utils" } from "./utils"
import Sql from "./base/sql" import Sql from "./base/sql"
import { PostgresColumn } from "./base/types" import { PostgresColumn } from "./base/types"
@ -72,7 +73,7 @@ const SCHEMA: Integration = {
datasource: { datasource: {
host: { host: {
type: DatasourceFieldType.STRING, type: DatasourceFieldType.STRING,
default: "localhost", default: HOST_ADDRESS,
required: true, required: true,
}, },
port: { port: {

View File

@ -6,6 +6,7 @@ import {
QueryType, QueryType,
} from "@budibase/types" } from "@budibase/types"
import Redis from "ioredis" import Redis from "ioredis"
import { HOST_ADDRESS } from "./utils"
interface RedisConfig { interface RedisConfig {
host: string host: string
@ -28,7 +29,7 @@ const SCHEMA: Integration = {
host: { host: {
type: DatasourceFieldType.STRING, type: DatasourceFieldType.STRING,
required: true, required: true,
default: "localhost", default: HOST_ADDRESS,
}, },
port: { port: {
type: DatasourceFieldType.NUMBER, type: DatasourceFieldType.NUMBER,

View File

@ -1,23 +1,88 @@
import { Datasource } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import * as postgres from "./postgres" import * as postgres from "./postgres"
import * as mongodb from "./mongodb" import * as mongodb from "./mongodb"
import * as mysql from "./mysql" import * as mysql from "./mysql"
import * as mssql from "./mssql" import * as mssql from "./mssql"
import * as mariadb from "./mariadb" import * as mariadb from "./mariadb"
import { StartedTestContainer } from "testcontainers" import { GenericContainer } from "testcontainers"
import { testContainerUtils } from "@budibase/backend-core/tests"
jest.setTimeout(30000) export type DatasourceProvider = () => Promise<Datasource>
export interface DatabaseProvider { export enum DatabaseName {
start(): Promise<StartedTestContainer> POSTGRES = "postgres",
stop(): Promise<void> MONGODB = "mongodb",
datasource(): Promise<Datasource> MYSQL = "mysql",
SQL_SERVER = "mssql",
MARIADB = "mariadb",
} }
export const databaseTestProviders = { const providers: Record<DatabaseName, DatasourceProvider> = {
postgres, [DatabaseName.POSTGRES]: postgres.getDatasource,
mongodb, [DatabaseName.MONGODB]: mongodb.getDatasource,
mysql, [DatabaseName.MYSQL]: mysql.getDatasource,
mssql, [DatabaseName.SQL_SERVER]: mssql.getDatasource,
mariadb, [DatabaseName.MARIADB]: mariadb.getDatasource,
}
export function getDatasourceProviders(
...sourceNames: DatabaseName[]
): Promise<Datasource>[] {
return sourceNames.map(sourceName => providers[sourceName]())
}
export function getDatasourceProvider(
sourceName: DatabaseName
): DatasourceProvider {
return providers[sourceName]
}
export function getDatasource(sourceName: DatabaseName): Promise<Datasource> {
return providers[sourceName]()
}
export async function getDatasources(
...sourceNames: DatabaseName[]
): Promise<Datasource[]> {
return Promise.all(sourceNames.map(sourceName => providers[sourceName]()))
}
export async function rawQuery(ds: Datasource, sql: string): Promise<any> {
switch (ds.source) {
case SourceName.POSTGRES: {
return postgres.rawQuery(ds, sql)
}
case SourceName.MYSQL: {
return mysql.rawQuery(ds, sql)
}
case SourceName.SQL_SERVER: {
return mssql.rawQuery(ds, sql)
}
default: {
throw new Error(`Unsupported source: ${ds.source}`)
}
}
}
export async function startContainer(container: GenericContainer) {
if (process.env.REUSE_CONTAINERS) {
container = container.withReuse()
}
const startedContainer = await container.start()
const info = testContainerUtils.getContainerById(startedContainer.getId())
if (!info) {
throw new Error("Container not found")
}
// Some Docker runtimes, when you expose a port, will bind it to both
// 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6
// addresses are not shared, and testcontainers will sometimes give you back
// the ipv6 port. There's no way to know that this has happened, and if you
// try to then connect to `localhost:port` you may attempt to bind to the v4
// address which could be unbound or even an entirely different container. For
// that reason, we don't use testcontainers' `getExposedPort` function,
// preferring instead our own method that guaranteed v4 ports.
return testContainerUtils.getExposedV4Ports(info)
} }

View File

@ -1,8 +1,11 @@
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy" import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
import { rawQuery } from "./mysql"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
let container: StartedTestContainer | undefined let ports: Promise<testContainerUtils.Port[]>
class MariaDBWaitStrategy extends AbstractWaitStrategy { class MariaDBWaitStrategy extends AbstractWaitStrategy {
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) { async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
@ -21,38 +24,38 @@ class MariaDBWaitStrategy extends AbstractWaitStrategy {
} }
} }
export async function start(): Promise<StartedTestContainer> { export async function getDatasource(): Promise<Datasource> {
return await new GenericContainer("mariadb:lts") if (!ports) {
.withExposedPorts(3306) ports = startContainer(
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" }) new GenericContainer("mariadb:lts")
.withWaitStrategy(new MariaDBWaitStrategy()) .withExposedPorts(3306)
.start() .withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
} .withWaitStrategy(new MariaDBWaitStrategy())
)
export async function datasource(): Promise<Datasource> {
if (!container) {
container = await start()
} }
const host = container.getHost()
const port = container.getMappedPort(3306)
return { const port = (await ports).find(x => x.container === 3306)?.host
if (!port) {
throw new Error("MariaDB port not found")
}
const config = {
host: "127.0.0.1",
port,
user: "root",
password: "password",
database: "mysql",
}
const datasource = {
type: "datasource_plus", type: "datasource_plus",
source: SourceName.MYSQL, source: SourceName.MYSQL,
plus: true, plus: true,
config: { config,
host,
port,
user: "root",
password: "password",
database: "mysql",
},
} }
}
export async function stop() { const database = generator.guid().replaceAll("-", "")
if (container) { await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
await container.stop() datasource.config.database = database
container = undefined return datasource
}
} }

View File

@ -1,43 +1,39 @@
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import { startContainer } from "."
let container: StartedTestContainer | undefined let ports: Promise<testContainerUtils.Port[]>
export async function start(): Promise<StartedTestContainer> { export async function getDatasource(): Promise<Datasource> {
return await new GenericContainer("mongo:7.0-jammy") if (!ports) {
.withExposedPorts(27017) ports = startContainer(
.withEnvironment({ new GenericContainer("mongo:7.0-jammy")
MONGO_INITDB_ROOT_USERNAME: "mongo", .withExposedPorts(27017)
MONGO_INITDB_ROOT_PASSWORD: "password", .withEnvironment({
}) MONGO_INITDB_ROOT_USERNAME: "mongo",
.withWaitStrategy( MONGO_INITDB_ROOT_PASSWORD: "password",
Wait.forSuccessfulCommand( })
`mongosh --eval "db.version()"` .withWaitStrategy(
).withStartupTimeout(10000) Wait.forSuccessfulCommand(
`mongosh --eval "db.version()"`
).withStartupTimeout(10000)
)
) )
.start()
}
export async function datasource(): Promise<Datasource> {
if (!container) {
container = await start()
} }
const host = container.getHost()
const port = container.getMappedPort(27017) const port = (await ports).find(x => x.container === 27017)
if (!port) {
throw new Error("MongoDB port not found")
}
return { return {
type: "datasource", type: "datasource",
source: SourceName.MONGODB, source: SourceName.MONGODB,
plus: false, plus: false,
config: { config: {
connectionString: `mongodb://mongo:password@${host}:${port}`, connectionString: `mongodb://mongo:password@127.0.0.1:${port.host}`,
db: "mongo", db: generator.guid(),
}, },
} }
} }
export async function stop() {
if (container) {
await container.stop()
container = undefined
}
}

View File

@ -1,43 +1,41 @@
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import mssql from "mssql"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
let container: StartedTestContainer | undefined let ports: Promise<testContainerUtils.Port[]>
export async function start(): Promise<StartedTestContainer> { export async function getDatasource(): Promise<Datasource> {
return await new GenericContainer( if (!ports) {
"mcr.microsoft.com/mssql/server:2022-latest" ports = startContainer(
) new GenericContainer("mcr.microsoft.com/mssql/server:2022-latest")
.withExposedPorts(1433) .withExposedPorts(1433)
.withEnvironment({ .withEnvironment({
ACCEPT_EULA: "Y", ACCEPT_EULA: "Y",
MSSQL_SA_PASSWORD: "Password_123", MSSQL_SA_PASSWORD: "Password_123",
// This is important, as Microsoft allow us to use the "Developer" edition // This is important, as Microsoft allow us to use the "Developer" edition
// of SQL Server for development and testing purposes. We can't use other // of SQL Server for development and testing purposes. We can't use other
// versions without a valid license, and we cannot use the Developer // versions without a valid license, and we cannot use the Developer
// version in production. // version in production.
MSSQL_PID: "Developer", MSSQL_PID: "Developer",
}) })
.withWaitStrategy( .withWaitStrategy(
Wait.forSuccessfulCommand( Wait.forSuccessfulCommand(
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'" "/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
) )
)
) )
.start()
}
export async function datasource(): Promise<Datasource> {
if (!container) {
container = await start()
} }
const host = container.getHost()
const port = container.getMappedPort(1433)
return { const port = (await ports).find(x => x.container === 1433)?.host
const datasource: Datasource = {
type: "datasource_plus", type: "datasource_plus",
source: SourceName.SQL_SERVER, source: SourceName.SQL_SERVER,
plus: true, plus: true,
config: { config: {
server: host, server: "127.0.0.1",
port, port,
user: "sa", user: "sa",
password: "Password_123", password: "Password_123",
@ -46,11 +44,28 @@ export async function datasource(): Promise<Datasource> {
}, },
}, },
} }
const database = generator.guid().replaceAll("-", "")
await rawQuery(datasource, `CREATE DATABASE "${database}"`)
datasource.config!.database = database
return datasource
} }
export async function stop() { export async function rawQuery(ds: Datasource, sql: string) {
if (container) { if (!ds.config) {
await container.stop() throw new Error("Datasource config is missing")
container = undefined }
if (ds.source !== SourceName.SQL_SERVER) {
throw new Error("Datasource source is not SQL Server")
}
const pool = new mssql.ConnectionPool(ds.config! as mssql.config)
const client = await pool.connect()
try {
const { recordset } = await client.query(sql)
return recordset
} finally {
await pool.close()
} }
} }

View File

@ -1,8 +1,11 @@
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy" import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
import mysql from "mysql2/promise"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
let container: StartedTestContainer | undefined let ports: Promise<testContainerUtils.Port[]>
class MySQLWaitStrategy extends AbstractWaitStrategy { class MySQLWaitStrategy extends AbstractWaitStrategy {
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) { async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
@ -24,38 +27,50 @@ class MySQLWaitStrategy extends AbstractWaitStrategy {
} }
} }
export async function start(): Promise<StartedTestContainer> { export async function getDatasource(): Promise<Datasource> {
return await new GenericContainer("mysql:8.3") if (!ports) {
.withExposedPorts(3306) ports = startContainer(
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" }) new GenericContainer("mysql:8.3")
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000)) .withExposedPorts(3306)
.start() .withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
} .withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
)
export async function datasource(): Promise<Datasource> {
if (!container) {
container = await start()
} }
const host = container.getHost()
const port = container.getMappedPort(3306)
return { const port = (await ports).find(x => x.container === 3306)?.host
const datasource: Datasource = {
type: "datasource_plus", type: "datasource_plus",
source: SourceName.MYSQL, source: SourceName.MYSQL,
plus: true, plus: true,
config: { config: {
host, host: "127.0.0.1",
port, port,
user: "root", user: "root",
password: "password", password: "password",
database: "mysql", database: "mysql",
}, },
} }
const database = generator.guid().replaceAll("-", "")
await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
datasource.config!.database = database
return datasource
} }
export async function stop() { export async function rawQuery(ds: Datasource, sql: string) {
if (container) { if (!ds.config) {
await container.stop() throw new Error("Datasource config is missing")
container = undefined }
if (ds.source !== SourceName.MYSQL) {
throw new Error("Datasource source is not MySQL")
}
const connection = await mysql.createConnection(ds.config)
try {
const [rows] = await connection.query(sql)
return rows
} finally {
connection.end()
} }
} }

View File

@ -1,33 +1,33 @@
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers" import { GenericContainer, Wait } from "testcontainers"
import pg from "pg"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
let container: StartedTestContainer | undefined let ports: Promise<testContainerUtils.Port[]>
export async function start(): Promise<StartedTestContainer> { export async function getDatasource(): Promise<Datasource> {
return await new GenericContainer("postgres:16.1-bullseye") if (!ports) {
.withExposedPorts(5432) ports = startContainer(
.withEnvironment({ POSTGRES_PASSWORD: "password" }) new GenericContainer("postgres:16.1-bullseye")
.withWaitStrategy( .withExposedPorts(5432)
Wait.forSuccessfulCommand( .withEnvironment({ POSTGRES_PASSWORD: "password" })
"pg_isready -h localhost -p 5432" .withWaitStrategy(
).withStartupTimeout(10000) Wait.forSuccessfulCommand(
"pg_isready -h localhost -p 5432"
).withStartupTimeout(10000)
)
) )
.start()
}
export async function datasource(): Promise<Datasource> {
if (!container) {
container = await start()
} }
const host = container.getHost()
const port = container.getMappedPort(5432)
return { const port = (await ports).find(x => x.container === 5432)?.host
const datasource: Datasource = {
type: "datasource_plus", type: "datasource_plus",
source: SourceName.POSTGRES, source: SourceName.POSTGRES,
plus: true, plus: true,
config: { config: {
host, host: "127.0.0.1",
port, port,
database: "postgres", database: "postgres",
user: "postgres", user: "postgres",
@ -38,11 +38,28 @@ export async function datasource(): Promise<Datasource> {
ca: false, ca: false,
}, },
} }
const database = generator.guid().replaceAll("-", "")
await rawQuery(datasource, `CREATE DATABASE "${database}"`)
datasource.config!.database = database
return datasource
} }
export async function stop() { export async function rawQuery(ds: Datasource, sql: string) {
if (container) { if (!ds.config) {
await container.stop() throw new Error("Datasource config is missing")
container = undefined }
if (ds.source !== SourceName.POSTGRES) {
throw new Error("Datasource source is not Postgres")
}
const client = new pg.Client(ds.config)
await client.connect()
try {
const { rows } = await client.query(sql)
return rows
} finally {
await client.end()
} }
} }

View File

@ -13,6 +13,7 @@ import {
DEFAULT_BB_DATASOURCE_ID, DEFAULT_BB_DATASOURCE_ID,
} from "../constants" } from "../constants"
import { helpers } from "@budibase/shared-core" import { helpers } from "@budibase/shared-core"
import env from "../environment"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g const ROW_ID_REGEX = /^\[.*]$/g
@ -92,6 +93,14 @@ export enum SqlClient {
ORACLE = "oracledb", ORACLE = "oracledb",
} }
const isCloud = env.isProd() && !env.SELF_HOSTED
const isSelfHost = env.isProd() && env.SELF_HOSTED
export const HOST_ADDRESS = isSelfHost
? "host.docker.internal"
: isCloud
? ""
: "localhost"
export function isExternalTableID(tableId: string) { export function isExternalTableID(tableId: string) {
return tableId.includes(DocumentType.DATASOURCE) return tableId.includes(DocumentType.DATASOURCE)
} }

View File

@ -25,8 +25,6 @@ const clearMigrations = async () => {
} }
} }
jest.setTimeout(10000)
describe("migrations", () => { describe("migrations", () => {
const config = new TestConfig() const config = new TestConfig()

View File

@ -17,8 +17,6 @@ import {
generator, generator,
} from "@budibase/backend-core/tests" } from "@budibase/backend-core/tests"
jest.setTimeout(30000)
describe("external search", () => { describe("external search", () => {
const config = new TestConfiguration() const config = new TestConfiguration()

View File

@ -2,17 +2,11 @@ import env from "../environment"
import { env as coreEnv, timers } from "@budibase/backend-core" import { env as coreEnv, timers } from "@budibase/backend-core"
import { testContainerUtils } from "@budibase/backend-core/tests" import { testContainerUtils } from "@budibase/backend-core/tests"
if (!process.env.DEBUG) {
global.console.log = jest.fn() // console.log are ignored in tests
global.console.warn = jest.fn() // console.warn are ignored in tests
}
if (!process.env.CI) { if (!process.env.CI) {
// set a longer timeout in dev for debugging // set a longer timeout in dev for debugging 100 seconds
// 100 seconds
jest.setTimeout(100 * 1000) jest.setTimeout(100 * 1000)
} else { } else {
jest.setTimeout(10 * 1000) jest.setTimeout(30 * 1000)
} }
testContainerUtils.setupEnv(env, coreEnv) testContainerUtils.setupEnv(env, coreEnv)

View File

@ -1,6 +1,7 @@
import TestConfiguration from "../TestConfiguration" import TestConfiguration from "../TestConfiguration"
import { SuperTest, Test, Response } from "supertest" import request, { SuperTest, Test, Response } from "supertest"
import { ReadStream } from "fs" import { ReadStream } from "fs"
import { getServer } from "../../../app"
type Headers = Record<string, string | string[] | undefined> type Headers = Record<string, string | string[] | undefined>
type Method = "get" | "post" | "put" | "patch" | "delete" type Method = "get" | "post" | "put" | "patch" | "delete"
@ -76,7 +77,8 @@ export abstract class TestAPI {
protected _requestRaw = async ( protected _requestRaw = async (
method: "get" | "post" | "put" | "patch" | "delete", method: "get" | "post" | "put" | "patch" | "delete",
url: string, url: string,
opts?: RequestOpts opts?: RequestOpts,
attempt = 0
): Promise<Response> => { ): Promise<Response> => {
const { const {
headers = {}, headers = {},
@ -107,26 +109,29 @@ export abstract class TestAPI {
const headersFn = publicUser const headersFn = publicUser
? this.config.publicHeaders.bind(this.config) ? this.config.publicHeaders.bind(this.config)
: this.config.defaultHeaders.bind(this.config) : this.config.defaultHeaders.bind(this.config)
let request = this.request[method](url).set(
const app = getServer()
let req = request(app)[method](url)
req = req.set(
headersFn({ headersFn({
"x-budibase-include-stacktrace": "true", "x-budibase-include-stacktrace": "true",
}) })
) )
if (headers) { if (headers) {
request = request.set(headers) req = req.set(headers)
} }
if (body) { if (body) {
request = request.send(body) req = req.send(body)
} }
for (const [key, value] of Object.entries(fields)) { for (const [key, value] of Object.entries(fields)) {
request = request.field(key, value) req = req.field(key, value)
} }
for (const [key, value] of Object.entries(files)) { for (const [key, value] of Object.entries(files)) {
if (isAttachedFile(value)) { if (isAttachedFile(value)) {
request = request.attach(key, value.file, value.name) req = req.attach(key, value.file, value.name)
} else { } else {
request = request.attach(key, value as any) req = req.attach(key, value as any)
} }
} }
if (expectations?.headers) { if (expectations?.headers) {
@ -136,11 +141,25 @@ export abstract class TestAPI {
`Got an undefined expected value for header "${key}", if you want to check for the absence of a header, use headersNotPresent` `Got an undefined expected value for header "${key}", if you want to check for the absence of a header, use headersNotPresent`
) )
} }
request = request.expect(key, value as any) req = req.expect(key, value as any)
} }
} }
return await request try {
return await req
} catch (e: any) {
// We've found that occasionally the connection between supertest and the
// server supertest starts gets reset. Not sure why, but retrying it
// appears to work. I don't particularly like this, but it's better than
// flakiness.
if (e.code === "ECONNRESET") {
if (attempt > 2) {
throw e
}
return await this._requestRaw(method, url, opts, attempt + 1)
}
throw e
}
} }
protected _checkResponse = ( protected _checkResponse = (
@ -170,7 +189,18 @@ export abstract class TestAPI {
} }
} }
throw new Error(message) if (response.error) {
// Sometimes the error can be between supertest and the app, and when
// that happens response.error is sometimes populated with `text` that
// gives more detail about the error. The `message` is almost always
// useless from what I've seen.
if (response.error.text) {
response.error.message = response.error.text
}
throw new Error(message, { cause: response.error })
} else {
throw new Error(message)
}
} }
if (expectations?.headersNotPresent) { if (expectations?.headersNotPresent) {

View File

@ -17,6 +17,12 @@ const config = (format, outputFile) => ({
format, format,
file: outputFile, file: outputFile,
}, },
onwarn(warning, warn) {
if (warning.code === "EVAL") {
return
}
warn(warning)
},
plugins: [ plugins: [
typescript(), typescript(),
resolve({ resolve({

View File

@ -68,6 +68,11 @@ export interface CreateAdminUserRequest {
ssoId?: string ssoId?: string
} }
export interface AddSSoUserRequest {
ssoId: string
email: string
}
export interface CreateAdminUserResponse { export interface CreateAdminUserResponse {
_id: string _id: string
_rev: string _rev: string

View File

@ -3,6 +3,7 @@ import env from "../../../environment"
import { import {
AcceptUserInviteRequest, AcceptUserInviteRequest,
AcceptUserInviteResponse, AcceptUserInviteResponse,
AddSSoUserRequest,
BulkUserRequest, BulkUserRequest,
BulkUserResponse, BulkUserResponse,
CloudAccount, CloudAccount,
@ -15,6 +16,7 @@ import {
LockName, LockName,
LockType, LockType,
MigrationType, MigrationType,
PlatformUserByEmail,
SaveUserResponse, SaveUserResponse,
SearchUsersRequest, SearchUsersRequest,
User, User,
@ -53,6 +55,25 @@ export const save = async (ctx: UserCtx<User, SaveUserResponse>) => {
} }
} }
export const addSsoSupport = async (ctx: Ctx<AddSSoUserRequest>) => {
const { email, ssoId } = ctx.request.body
try {
// Status is changed to 404 from getUserDoc if user is not found
let userByEmail = (await platform.users.getUserDoc(
email
)) as PlatformUserByEmail
await platform.users.addSsoUser(
ssoId,
email,
userByEmail.userId,
userByEmail.tenantId
)
ctx.status = 200
} catch (err: any) {
ctx.throw(err.status || 400, err)
}
}
const bulkDelete = async (userIds: string[], currentUserId: string) => { const bulkDelete = async (userIds: string[], currentUserId: string) => {
if (userIds?.indexOf(currentUserId) !== -1) { if (userIds?.indexOf(currentUserId) !== -1) {
throw new Error("Unable to delete self.") throw new Error("Unable to delete self.")
@ -208,7 +229,7 @@ export const search = async (ctx: Ctx<SearchUsersRequest>) => {
} }
// Validate we aren't trying to search on any illegal fields // Validate we aren't trying to search on any illegal fields
if (!userSdk.core.isSupportedUserSearch(body.query)) { if (!userSdk.core.isSupportedUserSearch(body.query)) {
ctx.throw(400, "Can only search by string.email or equal._id") ctx.throw(400, "Can only search by string.email, equal._id or oneOf._id")
} }
} }

View File

@ -41,6 +41,10 @@ const PUBLIC_ENDPOINTS = [
route: "/api/global/users/init", route: "/api/global/users/init",
method: "POST", method: "POST",
}, },
{
route: "/api/global/users/sso",
method: "POST",
},
{ {
route: "/api/global/users/invite/accept", route: "/api/global/users/invite/accept",
method: "POST", method: "POST",
@ -81,6 +85,11 @@ const NO_TENANCY_ENDPOINTS = [
route: "/api/global/users/init", route: "/api/global/users/init",
method: "POST", method: "POST",
}, },
// tenant is retrieved from the user found by the requested email
{
route: "/api/global/users/sso",
method: "POST",
},
// deprecated single tenant sso callback // deprecated single tenant sso callback
{ {
route: "/api/admin/auth/google/callback", route: "/api/admin/auth/google/callback",

View File

@ -520,10 +520,51 @@ describe("/api/global/users", () => {
}) })
} }
function createPasswordUser() {
return config.doInTenant(() => {
const user = structures.users.user()
return userSdk.db.save(user)
})
}
it("should be able to update an sso user that has no password", async () => { it("should be able to update an sso user that has no password", async () => {
const user = await createSSOUser() const user = await createSSOUser()
await config.api.users.saveUser(user) await config.api.users.saveUser(user)
}) })
it("sso support couldn't be used by admin. It is cloud restricted and needs internal key", async () => {
const user = await config.createUser()
const ssoId = "fake-ssoId"
await config.api.users
.addSsoSupportDefaultAuth(ssoId, user.email)
.expect("Content-Type", /json/)
.expect(403)
})
it("if user email doesn't exist, SSO support couldn't be added. Not found error returned", async () => {
const ssoId = "fake-ssoId"
const email = "fake-email@budibase.com"
await config.api.users
.addSsoSupportInternalAPIAuth(ssoId, email)
.expect("Content-Type", /json/)
.expect(404)
})
it("if user email exist, SSO support is added", async () => {
const user = await createPasswordUser()
const ssoId = "fakessoId"
await config.api.users
.addSsoSupportInternalAPIAuth(ssoId, user.email)
.expect(200)
})
it("if user ssoId is already assigned, no change will be applied", async () => {
const user = await createSSOUser()
user.ssoId = "testssoId"
await config.api.users
.addSsoSupportInternalAPIAuth(user.ssoId, user.email)
.expect(200)
})
}) })
}) })
@ -608,6 +649,24 @@ describe("/api/global/users", () => {
expect(response.body.data[0]._id).toBe(user._id) expect(response.body.data[0]._id).toBe(user._id)
}) })
it("should be able to search by oneOf _id", async () => {
const [user, user2, user3] = await Promise.all([
config.createUser(),
config.createUser(),
config.createUser(),
])
const response = await config.api.users.searchUsers({
query: { oneOf: { _id: [user._id, user2._id] } },
})
expect(response.body.data.length).toBe(2)
const foundUserIds = response.body.data.map((user: User) => user._id)
expect(foundUserIds).toContain(user._id)
expect(foundUserIds).toContain(user2._id)
expect(
response.body.data.find((user: User) => user._id === user3._id)
).toBeUndefined()
})
it("should be able to search by _id with numeric prefixing", async () => { it("should be able to search by _id with numeric prefixing", async () => {
const user = await config.createUser() const user = await config.createUser()
const response = await config.api.users.searchUsers({ const response = await config.api.users.searchUsers({

View File

@ -65,6 +65,12 @@ router
users.buildUserSaveValidation(), users.buildUserSaveValidation(),
controller.save controller.save
) )
.post(
"/api/global/users/sso",
cloudRestricted,
users.buildAddSsoSupport(),
controller.addSsoSupport
)
.post( .post(
"/api/global/users/bulk", "/api/global/users/bulk",
auth.adminOnly, auth.adminOnly,

View File

@ -41,6 +41,15 @@ export const buildUserSaveValidation = () => {
return auth.joiValidator.body(Joi.object(schema).required().unknown(true)) return auth.joiValidator.body(Joi.object(schema).required().unknown(true))
} }
export const buildAddSsoSupport = () => {
return auth.joiValidator.body(
Joi.object({
ssoId: Joi.string().required(),
email: Joi.string().required(),
}).required()
)
}
export const buildUserBulkUserValidation = (isSelf = false) => { export const buildUserBulkUserValidation = (isSelf = false) => {
if (!isSelf) { if (!isSelf) {
schema = { schema = {

View File

@ -127,6 +127,20 @@ export class UserAPI extends TestAPI {
.expect(status ? status : 200) .expect(status ? status : 200)
} }
addSsoSupportInternalAPIAuth = (ssoId: string, email: string) => {
return this.request
.post(`/api/global/users/sso`)
.send({ ssoId, email })
.set(this.config.internalAPIHeaders())
}
addSsoSupportDefaultAuth = (ssoId: string, email: string) => {
return this.request
.post(`/api/global/users/sso`)
.send({ ssoId, email })
.set(this.config.defaultHeaders())
}
deleteUser = (userId: string, status?: number) => { deleteUser = (userId: string, status?: number) => {
return this.request return this.request
.delete(`/api/global/users/${userId}`) .delete(`/api/global/users/${userId}`)

5
qa-core/.gitignore vendored
View File

@ -1,5 +0,0 @@
node_modules/
.env
watchtower-hook.json
dist/
testResults.json

View File

@ -1,28 +0,0 @@
# QA Core API Tests
The QA Core API tests are a jest suite that run directly against the budibase backend APIs.
## Auto Setup
You can run the whole test suite with one command, that spins up the budibase server and runs the jest tests:
`yarn test:ci`
## Setup Server
You can run the local development stack by following the instructions on the main readme.
## Run Tests
If you configured the server using the previous command, you can run the whole test suite by using:
`yarn test`
for watch mode, where the tests will run on every change:
`yarn test:watch`
To run tests locally against a cloud service you can update the configuration inside the `.env` file and run:
`yarn test`

View File

@ -1,21 +0,0 @@
import { Config } from "@jest/types"
const config: Config.InitialOptions = {
preset: "ts-jest",
setupFiles: ["./src/jest/jestSetup.ts"],
setupFilesAfterEnv: ["./src/jest/jest.extends.ts"],
testEnvironment: "node",
transform: {
"^.+\\.ts?$": "@swc/jest",
},
globalSetup: "./src/jest/globalSetup.ts",
globalTeardown: "./src/jest/globalTeardown.ts",
moduleNameMapper: {
"@budibase/types": "<rootDir>/../packages/types/src",
"@budibase/server": "<rootDir>/../packages/server/src",
"@budibase/backend-core": "<rootDir>/../packages/backend-core/src",
"@budibase/backend-core/(.*)": "<rootDir>/../packages/backend-core/$1",
},
}
export default config

View File

@ -1,49 +0,0 @@
{
"name": "@budibase/qa-core",
"email": "hi@budibase.com",
"version": "0.0.1",
"main": "index.js",
"description": "Budibase Integration Test Suite",
"repository": {
"type": "git",
"url": "https://github.com/Budibase/budibase.git"
},
"scripts": {
"setup": "yarn && node scripts/createEnv.js",
"user": "yarn && node scripts/createEnv.js && node scripts/createUser.js",
"test": "jest --runInBand --json --outputFile=testResults.json --forceExit",
"test:watch": "yarn run test --watch",
"test:debug": "DEBUG=1 yarn run test",
"test:notify": "node scripts/testResultsWebhook",
"test:cloud:prod": "yarn run test --testPathIgnorePatterns=\\.integration\\.",
"test:cloud:qa": "yarn run test",
"test:self:ci": "yarn run test --testPathIgnorePatterns=\\.integration\\. \\.cloud\\. \\.licensing\\.",
"serve:test:self:ci": "start-server-and-test dev:built http://localhost:4001/health test:self:ci",
"serve": "start-server-and-test dev:built http://localhost:4001/health",
"dev:built": "cd ../ && DISABLE_RATE_LIMITING=1 yarn dev:built"
},
"devDependencies": {
"@budibase/types": "^2.3.17",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
"@trendyol/jest-testcontainers": "2.1.1",
"@types/jest": "29.5.3",
"@types/node-fetch": "2.6.4",
"chance": "1.1.8",
"dotenv": "16.0.1",
"jest": "29.7.0",
"prettier": "2.7.1",
"start-server-and-test": "1.14.0",
"timekeeper": "2.2.0",
"ts-jest": "29.1.1",
"ts-node": "10.8.1",
"tsconfig-paths": "4.0.0",
"typescript": "5.2.2"
},
"dependencies": {
"@budibase/backend-core": "^2.3.17",
"form-data": "^4.0.0",
"node-fetch": "2.6.7",
"stripe": "^14.11.0"
}
}

View File

@ -1,26 +0,0 @@
#!/usr/bin/env node
const path = require("path")
const fs = require("fs")
function init() {
const envFilePath = path.join(process.cwd(), ".env")
if (!fs.existsSync(envFilePath)) {
const envFileJson = {
BUDIBASE_URL: "http://localhost:10000",
ACCOUNT_PORTAL_URL: "http://localhost:10001",
ACCOUNT_PORTAL_API_KEY: "budibase",
BB_ADMIN_USER_EMAIL: "admin",
BB_ADMIN_USER_PASSWORD: "admin",
LOG_LEVEL: "info",
JEST_TIMEOUT: "60000",
DISABLE_PINO_LOGGER: "1",
}
let envFile = ""
Object.keys(envFileJson).forEach(key => {
envFile += `${key}=${envFileJson[key]}\n`
})
fs.writeFileSync(envFilePath, envFile)
}
}
init()

View File

@ -1,49 +0,0 @@
const dotenv = require("dotenv")
const { join } = require("path")
const fs = require("fs")
const fetch = require("node-fetch")
function getVarFromDotEnv(path, varName) {
const parsed = dotenv.parse(fs.readFileSync(path))
return parsed[varName]
}
async function createUser() {
const serverPath = join(__dirname, "..", "..", "packages", "server", ".env")
const qaCorePath = join(__dirname, "..", ".env")
const apiKey = getVarFromDotEnv(serverPath, "INTERNAL_API_KEY")
const username = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_EMAIL")
const password = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_PASSWORD")
const url = getVarFromDotEnv(qaCorePath, "BUDIBASE_URL")
const resp = await fetch(`${url}/api/public/v1/users`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"x-budibase-api-key": apiKey,
},
body: JSON.stringify({
email: username,
password,
builder: {
global: true,
},
admin: {
global: true,
},
roles: {},
}),
})
if (resp.status !== 200) {
throw new Error(await resp.text())
} else {
return await resp.json()
}
}
createUser()
.then(() => {
console.log("User created - ready to use")
})
.catch(err => {
console.error("Failed to create user - ", err)
})

View File

@ -1,130 +0,0 @@
#!/usr/bin/env node
const fetch = require("node-fetch")
const path = require("path")
const fs = require("fs")
const WEBHOOK_URL = process.env.WEBHOOK_URL
const GIT_SHA = process.env.GITHUB_SHA
const GITHUB_ACTIONS_RUN_URL = process.env.GITHUB_ACTIONS_RUN_URL
async function generateReport() {
// read the report file
const REPORT_PATH = path.resolve(__dirname, "..", "testResults.json")
const report = fs.readFileSync(REPORT_PATH, "utf-8")
return JSON.parse(report)
}
const env = process.argv.slice(2)[0]
if (!env) {
throw new Error("environment argument is required")
}
async function discordResultsNotification(report) {
const {
numTotalTestSuites,
numTotalTests,
numPassedTests,
numPendingTests,
numFailedTests,
success,
startTime,
endTime,
} = report
const OUTCOME = success ? "success" : "failure"
const options = {
method: "POST",
headers: {
"Content-Type": "application/json",
Accept: "application/json",
},
body: JSON.stringify({
content: `**Tests Status**: ${OUTCOME}`,
embeds: [
{
title: `Budi QA Bot - ${env}`,
description: `API Integration Tests`,
url: GITHUB_ACTIONS_RUN_URL,
color: OUTCOME === "success" ? 3066993 : 15548997,
timestamp: new Date(),
footer: {
icon_url: "http://bbui.budibase.com/budibase-logo.png",
text: "Budibase QA Bot",
},
thumbnail: {
url: "http://bbui.budibase.com/budibase-logo.png",
},
author: {
name: "Budibase QA Bot",
url: "https://discordapp.com",
icon_url: "http://bbui.budibase.com/budibase-logo.png",
},
fields: [
{
name: "Commit",
value: `https://github.com/Budibase/budibase/commit/${GIT_SHA}`,
},
{
name: "Github Actions Run URL",
value: GITHUB_ACTIONS_RUN_URL || "None Supplied",
},
{
name: "Test Suites",
value: numTotalTestSuites,
},
{
name: "Tests",
value: numTotalTests,
},
{
name: "Passed",
value: numPassedTests,
},
{
name: "Pending",
value: numPendingTests,
},
{
name: "Failures",
value: numFailedTests,
},
{
name: "Duration",
value: endTime
? `${(endTime - startTime) / 1000} Seconds`
: "DNF",
},
{
name: "Pass Percentage",
value: Math.floor((numPassedTests / numTotalTests) * 100),
},
],
},
],
}),
}
// Only post in discord when tests fail
if (success) {
return
}
const response = await fetch(WEBHOOK_URL, options)
if (response.status >= 201) {
const text = await response.text()
console.error(
`Error sending discord webhook. \nStatus: ${response.status}. \nResponse Body: ${text}. \nRequest Body: ${options.body}`
)
}
}
async function run() {
const report = await generateReport()
await discordResultsNotification(report)
}
run()

View File

@ -1,20 +0,0 @@
import AccountInternalAPIClient from "./AccountInternalAPIClient"
import { AccountAPI, LicenseAPI, AuthAPI, StripeAPI } from "./apis"
import { State } from "../../types"
export default class AccountInternalAPI {
client: AccountInternalAPIClient
auth: AuthAPI
accounts: AccountAPI
licenses: LicenseAPI
stripe: StripeAPI
constructor(state: State) {
this.client = new AccountInternalAPIClient(state)
this.auth = new AuthAPI(this.client)
this.accounts = new AccountAPI(this.client)
this.licenses = new LicenseAPI(this.client)
this.stripe = new StripeAPI(this.client)
}
}

View File

@ -1,89 +0,0 @@
import fetch, { Response, HeadersInit } from "node-fetch"
import env from "../../environment"
import { State } from "../../types"
import { Header } from "@budibase/backend-core"
type APIMethod = "GET" | "POST" | "PUT" | "PATCH" | "DELETE"
interface ApiOptions {
method?: APIMethod
body?: object
headers?: HeadersInit | undefined
internal?: boolean
}
export default class AccountInternalAPIClient {
state: State
host: string
constructor(state: State) {
if (!env.ACCOUNT_PORTAL_URL) {
throw new Error("Must set ACCOUNT_PORTAL_URL env var")
}
if (!env.ACCOUNT_PORTAL_API_KEY) {
throw new Error("Must set ACCOUNT_PORTAL_API_KEY env var")
}
this.host = `${env.ACCOUNT_PORTAL_URL}`
this.state = state
}
apiCall =
(method: APIMethod) =>
async (url = "", options: ApiOptions = {}): Promise<[Response, any]> => {
const requestOptions = {
method,
body: JSON.stringify(options.body),
headers: {
"Content-Type": "application/json",
Accept: "application/json",
cookie: this.state.cookie,
redirect: "follow",
follow: 20,
...options.headers,
},
credentials: "include",
}
if (options.internal) {
requestOptions.headers = {
...requestOptions.headers,
...{ [Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY },
cookie: "",
}
}
// @ts-ignore
const response = await fetch(`${this.host}${url}`, requestOptions)
let body: any
const contentType = response.headers.get("content-type")
if (contentType && contentType.includes("application/json")) {
body = await response.json()
} else {
body = await response.text()
}
const data = {
request: requestOptions.body,
response: body,
}
const message = `${method} ${url} - ${response.status}`
const isDebug = process.env.LOG_LEVEL === "debug"
if (response.status > 499) {
console.error(message, data)
} else if (response.status >= 400) {
console.warn(message, data)
} else if (isDebug) {
console.debug(message, data)
}
return [response, body]
}
post = this.apiCall("POST")
get = this.apiCall("GET")
patch = this.apiCall("PATCH")
del = this.apiCall("DELETE")
put = this.apiCall("PUT")
}

View File

@ -1,123 +0,0 @@
import { Response } from "node-fetch"
import {
Account,
CreateAccountRequest,
SearchAccountsRequest,
SearchAccountsResponse,
} from "@budibase/types"
import AccountInternalAPIClient from "../AccountInternalAPIClient"
import { APIRequestOpts } from "../../../types"
import { Header } from "@budibase/backend-core"
import BaseAPI from "./BaseAPI"
export default class AccountAPI extends BaseAPI {
client: AccountInternalAPIClient
constructor(client: AccountInternalAPIClient) {
super()
this.client = client
}
async validateEmail(email: string, opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.post(`/api/accounts/validate/email`, {
body: { email },
})
}, opts)
}
async validateTenantId(
tenantId: string,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/accounts/validate/tenantId`, {
body: { tenantId },
})
}, opts)
}
async create(
body: CreateAccountRequest,
opts: APIRequestOpts & { autoVerify: boolean } = {
status: 201,
autoVerify: false,
}
): Promise<[Response, Account]> {
return this.doRequest(() => {
const headers = {
"no-verify": opts.autoVerify ? "1" : "0",
}
return this.client.post(`/api/accounts`, {
body,
headers,
})
}, opts)
}
async delete(accountID: string, opts: APIRequestOpts = { status: 204 }) {
return this.doRequest(() => {
return this.client.del(`/api/accounts/${accountID}`, {
internal: true,
})
}, opts)
}
async deleteCurrentAccount(opts: APIRequestOpts = { status: 204 }) {
return this.doRequest(() => {
return this.client.del(`/api/accounts`)
}, opts)
}
async verifyAccount(
verificationCode: string,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/accounts/verify`, {
body: { verificationCode },
})
}, opts)
}
async sendVerificationEmail(
email: string,
opts: APIRequestOpts = { status: 200 }
): Promise<[Response, string]> {
return this.doRequest(async () => {
const [response] = await this.client.post(`/api/accounts/verify/send`, {
body: { email },
headers: {
[Header.RETURN_VERIFICATION_CODE]: "1",
},
})
const code = response.headers.get(Header.VERIFICATION_CODE)
return [response, code]
}, opts)
}
async search(
searchType: string,
search: "email" | "tenantId",
opts: APIRequestOpts = { status: 200 }
): Promise<[Response, SearchAccountsResponse]> {
return this.doRequest(() => {
let body: SearchAccountsRequest = {}
if (search === "email") {
body.email = searchType
} else if (search === "tenantId") {
body.tenantId = searchType
}
return this.client.post(`/api/accounts/search`, {
body,
internal: true,
})
}, opts)
}
async self(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.get(`/api/auth/self`)
}, opts)
}
}

View File

@ -1,68 +0,0 @@
import { Response } from "node-fetch"
import AccountInternalAPIClient from "../AccountInternalAPIClient"
import { APIRequestOpts } from "../../../types"
import BaseAPI from "./BaseAPI"
import { Header } from "@budibase/backend-core"
export default class AuthAPI extends BaseAPI {
client: AccountInternalAPIClient
constructor(client: AccountInternalAPIClient) {
super()
this.client = client
}
async login(
email: string,
password: string,
opts: APIRequestOpts = { doExpect: true, status: 200 }
): Promise<[Response, string]> {
return this.doRequest(async () => {
const [res] = await this.client.post(`/api/auth/login`, {
body: {
email: email,
password: password,
},
})
const cookie = res.headers.get("set-cookie")
return [res, cookie]
}, opts)
}
async logout(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.post(`/api/auth/logout`)
}, opts)
}
async resetPassword(
email: string,
opts: APIRequestOpts = { status: 200 }
): Promise<[Response, string]> {
return this.doRequest(async () => {
const [response] = await this.client.post(`/api/auth/reset`, {
body: { email },
headers: {
[Header.RETURN_RESET_PASSWORD_CODE]: "1",
},
})
const code = response.headers.get(Header.RESET_PASSWORD_CODE)
return [response, code]
}, opts)
}
async resetPasswordUpdate(
resetCode: string,
password: string,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/auth/reset/update`, {
body: {
resetCode: resetCode,
password: password,
},
})
}, opts)
}
}

View File

@ -1,20 +0,0 @@
import { Response } from "node-fetch"
import { APIRequestOpts } from "../../../types"
export default class BaseAPI {
async doRequest(
request: () => Promise<[Response, any]>,
opts: APIRequestOpts
): Promise<[Response, any]> {
const [response, body] = await request()
// do expect on by default
if (opts.doExpect === undefined) {
opts.doExpect = true
}
if (opts.doExpect && opts.status) {
expect(response).toHaveStatusCode(opts.status)
}
return [response, body]
}
}

View File

@ -1,140 +0,0 @@
import AccountInternalAPIClient from "../AccountInternalAPIClient"
import {
Account,
CreateOfflineLicenseRequest,
GetLicenseKeyResponse,
GetOfflineLicenseResponse,
UpdateLicenseRequest,
} from "@budibase/types"
import { Response } from "node-fetch"
import BaseAPI from "./BaseAPI"
import { APIRequestOpts } from "../../../types"
export default class LicenseAPI extends BaseAPI {
client: AccountInternalAPIClient
constructor(client: AccountInternalAPIClient) {
super()
this.client = client
}
async updateLicense(
accountId: string,
body: UpdateLicenseRequest,
opts: APIRequestOpts = { status: 200 }
): Promise<[Response, Account]> {
return this.doRequest(() => {
return this.client.put(`/api/accounts/${accountId}/license`, {
body,
internal: true,
})
}, opts)
}
// TODO: Better approach for setting tenant id header
async createOfflineLicense(
accountId: string,
tenantId: string,
body: CreateOfflineLicenseRequest,
opts: { status?: number } = {}
): Promise<Response> {
const [response, json] = await this.client.post(
`/api/internal/accounts/${accountId}/license/offline`,
{
body,
internal: true,
headers: {
"x-budibase-tenant-id": tenantId,
},
}
)
expect(response.status).toBe(opts.status ? opts.status : 201)
return response
}
async getOfflineLicense(
accountId: string,
tenantId: string,
opts: { status?: number } = {}
): Promise<[Response, GetOfflineLicenseResponse]> {
const [response, json] = await this.client.get(
`/api/internal/accounts/${accountId}/license/offline`,
{
internal: true,
headers: {
"x-budibase-tenant-id": tenantId,
},
}
)
expect(response.status).toBe(opts.status ? opts.status : 200)
return [response, json]
}
async getLicenseKey(
opts: { status?: number } = {}
): Promise<[Response, GetLicenseKeyResponse]> {
const [response, json] = await this.client.get(`/api/license/key`)
expect(response.status).toBe(opts.status || 200)
return [response, json]
}
async activateLicense(
apiKey: string,
tenantId: string,
licenseKey: string,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/license/activate`, {
body: {
apiKey: apiKey,
tenantId: tenantId,
licenseKey: licenseKey,
},
})
}, opts)
}
async regenerateLicenseKey(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.post(`/api/license/key/regenerate`, {})
}, opts)
}
async getPlans(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.get(`/api/plans`)
}, opts)
}
async updatePlan(priceId: string, opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.put(`/api/license/plan`, {
body: { priceId },
})
}, opts)
}
async refreshAccountLicense(
accountId: string,
opts: { status?: number } = {}
): Promise<Response> {
const [response, json] = await this.client.post(
`/api/accounts/${accountId}/license/refresh`,
{
internal: true,
}
)
expect(response.status).toBe(opts.status ? opts.status : 201)
return response
}
async getLicenseUsage(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.get(`/api/license/usage`)
}, opts)
}
async licenseUsageTriggered(
opts: { status?: number } = {}
): Promise<Response> {
const [response, json] = await this.client.post(
`/api/license/usage/triggered`
)
expect(response.status).toBe(opts.status ? opts.status : 201)
return response
}
}

View File

@ -1,74 +0,0 @@
import AccountInternalAPIClient from "../AccountInternalAPIClient"
import BaseAPI from "./BaseAPI"
import { APIRequestOpts } from "../../../types"
export default class StripeAPI extends BaseAPI {
client: AccountInternalAPIClient
constructor(client: AccountInternalAPIClient) {
super()
this.client = client
}
async createCheckoutSession(
price: object,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/stripe/checkout-session`, {
body: { prices: [price] },
})
}, opts)
}
async checkoutSuccess(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.post(`/api/stripe/checkout-success`)
}, opts)
}
async createPortalSession(
stripeCustomerId: string,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/stripe/portal-session`, {
body: { stripeCustomerId },
})
}, opts)
}
async linkStripeCustomer(
accountId: string,
stripeCustomerId: string,
opts: APIRequestOpts = { status: 200 }
) {
return this.doRequest(() => {
return this.client.post(`/api/stripe/link`, {
body: {
accountId,
stripeCustomerId,
},
internal: true,
})
}, opts)
}
async getInvoices(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.get(`/api/stripe/invoices`)
}, opts)
}
async getUpcomingInvoice(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.get(`/api/stripe/upcoming-invoice`)
}, opts)
}
async getStripeCustomers(opts: APIRequestOpts = { status: 200 }) {
return this.doRequest(() => {
return this.client.get(`/api/stripe/customers`)
}, opts)
}
}

View File

@ -1,4 +0,0 @@
export { default as AuthAPI } from "./AuthAPI"
export { default as AccountAPI } from "./AccountAPI"
export { default as LicenseAPI } from "./LicenseAPI"
export { default as StripeAPI } from "./StripeAPI"

View File

@ -1 +0,0 @@
export { default as AccountInternalAPI } from "./AccountInternalAPI"

View File

@ -1,29 +0,0 @@
import { AccountInternalAPI } from "../api"
import { BudibaseTestConfiguration } from "../../shared"
export default class TestConfiguration<T> extends BudibaseTestConfiguration {
// apis
api: AccountInternalAPI
context: T
constructor() {
super()
this.api = new AccountInternalAPI(this.state)
this.context = <T>{}
}
async beforeAll() {
await super.beforeAll()
await this.setApiKey()
}
async afterAll() {
await super.afterAll()
}
async setApiKey() {
const apiKeyResponse = await this.internalApi.self.getApiKey()
this.state.apiKey = apiKeyResponse.apiKey
}
}

View File

@ -1,24 +0,0 @@
import { generator } from "../../shared"
import { Hosting, CreateAccountRequest } from "@budibase/types"
// TODO: Refactor me to central location
export const generateAccount = (
partial: Partial<CreateAccountRequest>
): CreateAccountRequest => {
const uuid = generator.guid()
const email = `${uuid}@budibase.com`
const tenant = `tenant${uuid.replace(/-/g, "")}`
return {
email,
hosting: Hosting.CLOUD,
name: email,
password: uuid,
profession: "software_engineer",
size: "10+",
tenantId: tenant,
tenantName: tenant,
...partial,
}
}

View File

@ -1 +0,0 @@
export * as accounts from "./accounts"

View File

@ -1 +0,0 @@
export * from "./api"

View File

@ -1,32 +0,0 @@
import TestConfiguration from "../../config/TestConfiguration"
import * as fixtures from "../../fixtures"
import { generator } from "../../../shared"
import { Hosting } from "@budibase/types"
describe("Account Internal Operations", () => {
const config = new TestConfiguration()
beforeAll(async () => {
await config.beforeAll()
})
afterAll(async () => {
await config.afterAll()
})
it("performs account deletion by ID", async () => {
// Deleting by unknown id doesn't work
const accountId = generator.guid()
await config.api.accounts.delete(accountId, { status: 404 })
// Create new account
const [_, account] = await config.api.accounts.create({
...fixtures.accounts.generateAccount({
hosting: Hosting.CLOUD,
}),
})
// New account can be deleted
await config.api.accounts.delete(account.accountId)
})
})

View File

@ -1,102 +0,0 @@
import TestConfiguration from "../../config/TestConfiguration"
import * as fixtures from "../../fixtures"
import { generator } from "../../../shared"
import { Hosting } from "@budibase/types"
describe("Accounts", () => {
const config = new TestConfiguration()
beforeAll(async () => {
await config.beforeAll()
})
afterAll(async () => {
await config.afterAll()
})
it("performs signup and deletion flow", async () => {
await config.doInNewState(async () => {
// Create account
const createAccountRequest = fixtures.accounts.generateAccount({
hosting: Hosting.CLOUD,
})
const email = createAccountRequest.email
const tenantId = createAccountRequest.tenantId
// Validation - email and tenant ID allowed
await config.api.accounts.validateEmail(email)
await config.api.accounts.validateTenantId(tenantId)
// Create unverified account
await config.api.accounts.create(createAccountRequest)
// Validation - email and tenant ID no longer valid
await config.api.accounts.validateEmail(email, { status: 400 })
await config.api.accounts.validateTenantId(tenantId, { status: 400 })
// Attempt to log in using unverified account
await config.loginAsAccount(createAccountRequest, { status: 400 })
// Re-send verification email to get access to code
const [_, code] = await config.accountsApi.accounts.sendVerificationEmail(
email
)
// Send the verification request
await config.accountsApi.accounts.verifyAccount(code!)
// Verify self response is unauthorized
await config.api.accounts.self({ status: 403 })
// Can now log in to the account
await config.loginAsAccount(createAccountRequest)
// Verify self response matches account
const [selfRes, selfBody] = await config.api.accounts.self()
expect(selfBody.email).toBe(email)
// Delete account
await config.api.accounts.deleteCurrentAccount()
// Can't log in
await config.loginAsAccount(createAccountRequest, { status: 403 })
})
})
describe("Searching accounts", () => {
it("search by tenant ID", async () => {
const tenantId = generator.string()
// Empty result
const [_, emptyBody] = await config.api.accounts.search(
tenantId,
"tenantId"
)
expect(emptyBody.length).toBe(0)
// Hit result
const [hitRes, hitBody] = await config.api.accounts.search(
config.state.tenantId!,
"tenantId"
)
expect(hitBody.length).toBe(1)
expect(hitBody[0].tenantId).toBe(config.state.tenantId)
})
it("searches by email", async () => {
const email = generator.email({ domain: "example.com" })
// Empty result
const [_, emptyBody] = await config.api.accounts.search(email, "email")
expect(emptyBody.length).toBe(0)
// Hit result
const [hitRes, hitBody] = await config.api.accounts.search(
config.state.email!,
"email"
)
expect(hitBody.length).toBe(1)
expect(hitBody[0].email).toBe(config.state.email)
})
})
})

View File

@ -1,46 +0,0 @@
import TestConfiguration from "../../config/TestConfiguration"
import * as fixtures from "../../fixtures"
import { generator } from "../../../shared"
import { Hosting } from "@budibase/types"
describe("Password Management", () => {
const config = new TestConfiguration()
beforeAll(async () => {
await config.beforeAll()
})
afterAll(async () => {
await config.afterAll()
})
it("performs password reset flow", async () => {
// Create account
const createAccountRequest = fixtures.accounts.generateAccount({
hosting: Hosting.CLOUD,
})
await config.api.accounts.create(createAccountRequest, { autoVerify: true })
// Request password reset to get code
const [_, code] = await config.api.auth.resetPassword(
createAccountRequest.email
)
// Change password using code
const password = generator.string()
await config.api.auth.resetPasswordUpdate(code, password)
// Login using the new password
await config.api.auth.login(createAccountRequest.email, password)
// Logout of account
await config.api.auth.logout()
// Cannot log in using old password
await config.api.auth.login(
createAccountRequest.email,
createAccountRequest.password,
{ status: 403 }
)
})
})

View File

@ -1,68 +0,0 @@
import TestConfiguration from "../../config/TestConfiguration"
import * as fixures from "../../fixtures"
import { Feature, Hosting } from "@budibase/types"
describe("license activation", () => {
const config = new TestConfiguration()
beforeAll(async () => {
await config.beforeAll()
})
afterAll(async () => {
await config.afterAll()
})
it("creates, activates and deletes online license - self host", async () => {
// Remove existing license key
await config.internalApi.license.deleteLicenseKey()
// Verify license key not found
await config.internalApi.license.getLicenseKey({ status: 404 })
// Create self host account
const createAccountRequest = fixures.accounts.generateAccount({
hosting: Hosting.SELF,
})
const [createAccountRes, account] =
await config.accountsApi.accounts.create(createAccountRequest, {
autoVerify: true,
})
let licenseKey: string = " "
await config.doInNewState(async () => {
await config.loginAsAccount(createAccountRequest)
// Retrieve license key
const [res, body] = await config.accountsApi.licenses.getLicenseKey()
licenseKey = body.licenseKey
})
const accountId = account.accountId!
// Update license to have paid feature
const [res, acc] = await config.accountsApi.licenses.updateLicense(
accountId,
{
overrides: {
features: [Feature.APP_BACKUPS],
},
}
)
// Activate license key
await config.internalApi.license.activateLicenseKey({ licenseKey })
// Verify license updated with new feature
await config.doInNewState(async () => {
await config.loginAsAccount(createAccountRequest)
const [selfRes, body] = await config.api.accounts.self()
expect(body.license.features[0]).toBe("appBackups")
})
// Remove license key
await config.internalApi.license.deleteLicenseKey()
// Verify license key not found
await config.internalApi.license.getLicenseKey({ status: 404 })
})
})

View File

@ -1,116 +0,0 @@
import TestConfiguration from "../../config/TestConfiguration"
import * as fixtures from "../../fixtures"
import { Hosting, PlanType } from "@budibase/types"
const stripe = require("stripe")(process.env.STRIPE_SECRET_KEY)
describe("license management", () => {
const config = new TestConfiguration()
beforeAll(async () => {
await config.beforeAll()
})
afterAll(async () => {
await config.afterAll()
})
it("retrieves plans, creates checkout session, and updates license", async () => {
// Create cloud account
const createAccountRequest = fixtures.accounts.generateAccount({
hosting: Hosting.CLOUD,
})
const [createAccountRes, account] =
await config.accountsApi.accounts.create(createAccountRequest, {
autoVerify: true,
})
// Self response has free license
await config.doInNewState(async () => {
await config.loginAsAccount(createAccountRequest)
const [selfRes, selfBody] = await config.api.accounts.self()
expect(selfBody.license.plan.type).toBe(PlanType.FREE)
})
// Retrieve plans
const [plansRes, planBody] = await config.api.licenses.getPlans()
// Select priceId from premium plan
let premiumPrice = null
let businessPriceId: ""
for (const plan of planBody) {
if (plan.type === PlanType.PREMIUM_PLUS) {
premiumPrice = plan.prices[0]
}
if (plan.type === PlanType.ENTERPRISE_BASIC) {
businessPriceId = plan.prices[0].priceId
}
}
// Create checkout session for price
const checkoutSessionRes = await config.api.stripe.createCheckoutSession({
id: premiumPrice.priceId,
type: premiumPrice.type,
})
const checkoutSessionUrl = checkoutSessionRes[1].url
expect(checkoutSessionUrl).toContain("checkout.stripe.com")
// Create stripe customer
const customer = await stripe.customers.create({
email: createAccountRequest.email,
})
// Create payment method
const paymentMethod = await stripe.paymentMethods.create({
type: "card",
card: {
token: "tok_visa", // Test Visa Card
},
})
// Attach payment method to customer
await stripe.paymentMethods.attach(paymentMethod.id, {
customer: customer.id,
})
// Update customer
await stripe.customers.update(customer.id, {
invoice_settings: {
default_payment_method: paymentMethod.id,
},
})
// Create subscription for premium plan
const subscription = await stripe.subscriptions.create({
customer: customer.id,
items: [
{
price: premiumPrice.priceId,
quantity: 1,
},
],
default_payment_method: paymentMethod.id,
collection_method: "charge_automatically",
})
await config.doInNewState(async () => {
// License updated from Free to Premium
await config.loginAsAccount(createAccountRequest)
await config.api.stripe.linkStripeCustomer(account.accountId, customer.id)
const [_, selfBodyPremium] = await config.api.accounts.self()
expect(selfBodyPremium.license.plan.type).toBe(PlanType.PREMIUM_PLUS)
// Create portal session - Check URL
const [portalRes, portalSessionBody] =
await config.api.stripe.createPortalSession(customer.id)
expect(portalSessionBody.url).toContain("billing.stripe.com")
// Update subscription from premium to business license
await config.api.licenses.updatePlan(businessPriceId)
// License updated to Business
const [selfRes, selfBodyBusiness] = await config.api.accounts.self()
expect(selfBodyBusiness.license.plan.type).toBe(PlanType.ENTERPRISE_BASIC)
})
})
})

View File

@ -1,79 +0,0 @@
import TestConfiguration from "../../config/TestConfiguration"
import * as fixures from "../../fixtures"
import { Hosting, Feature } from "@budibase/types"
describe("offline", () => {
const config = new TestConfiguration()
beforeAll(async () => {
await config.beforeAll()
})
afterAll(async () => {
await config.afterAll()
})
// TODO: Currently requires a self host install + account portal
// Ignored until we set this up
it.skip("creates, activates and deletes offline license", async () => {
// installation: Delete any token
await config.internalApi.license.deleteOfflineLicenseToken()
// installation: Assert token not found
let [getTokenRes] = await config.internalApi.license.getOfflineLicenseToken(
{ status: 404 }
)
// installation: Retrieve Identifier
const [getIdentifierRes, identifier] =
await config.internalApi.license.getOfflineIdentifier()
// account-portal: Create self-host account
const createAccountRequest = fixures.accounts.generateAccount({
hosting: Hosting.SELF,
})
const [createAccountRes, account] =
await config.accountsApi.accounts.create(createAccountRequest)
const accountId = account.accountId!
const tenantId = account.tenantId!
// account-portal: Enable feature on license
await config.accountsApi.licenses.updateLicense(accountId, {
overrides: {
features: [Feature.OFFLINE],
},
})
// account-portal: Create offline token
const expireAt = new Date()
expireAt.setDate(new Date().getDate() + 1)
await config.accountsApi.licenses.createOfflineLicense(
accountId,
tenantId,
{
expireAt: expireAt.toISOString(),
installationIdentifierBase64: identifier.identifierBase64,
}
)
// account-portal: Retrieve offline token
const [getLicenseRes, offlineLicense] =
await config.accountsApi.licenses.getOfflineLicense(accountId, tenantId)
// installation: Activate offline token
await config.internalApi.license.activateOfflineLicenseToken({
offlineLicenseToken: offlineLicense.offlineLicenseToken,
})
// installation: Assert token found
await config.internalApi.license.getOfflineLicenseToken()
// TODO: Assert on license for current user
// installation: Remove the token
await config.internalApi.license.deleteOfflineLicenseToken()
// installation: Assert token not found
await config.internalApi.license.getOfflineLicenseToken({ status: 404 })
})
})

Some files were not shown because too many files have changed in this diff Show More