Merge branch 'master' of github.com:Budibase/budibase into labday/sqs
This commit is contained in:
commit
d11c316572
|
@ -34,7 +34,6 @@
|
|||
},
|
||||
{
|
||||
"files": ["**/*.ts"],
|
||||
"excludedFiles": ["qa-core/**"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": ["eslint:recommended"],
|
||||
|
@ -49,7 +48,6 @@
|
|||
},
|
||||
{
|
||||
"files": ["**/*.spec.ts"],
|
||||
"excludedFiles": ["qa-core/**"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["jest", "@typescript-eslint"],
|
||||
"extends": ["eslint:recommended", "plugin:jest/recommended"],
|
||||
|
|
|
@ -91,6 +91,9 @@ jobs:
|
|||
|
||||
test-libraries:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull
|
||||
REUSE_CONTAINERS: true
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -104,6 +107,14 @@ jobs:
|
|||
with:
|
||||
node-version: 20.x
|
||||
cache: yarn
|
||||
- name: Pull testcontainers images
|
||||
run: |
|
||||
docker pull testcontainers/ryuk:0.5.1 &
|
||||
docker pull budibase/couchdb &
|
||||
docker pull redis &
|
||||
|
||||
wait $(jobs -p)
|
||||
|
||||
- run: yarn --frozen-lockfile
|
||||
- name: Test
|
||||
run: |
|
||||
|
@ -138,9 +149,10 @@ jobs:
|
|||
fi
|
||||
|
||||
test-server:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: budi-tubby-tornado-quad-core-150gb
|
||||
env:
|
||||
DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull
|
||||
REUSE_CONTAINERS: true
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -157,13 +169,16 @@ jobs:
|
|||
|
||||
- name: Pull testcontainers images
|
||||
run: |
|
||||
docker pull mcr.microsoft.com/mssql/server:2022-latest
|
||||
docker pull mysql:8.3
|
||||
docker pull postgres:16.1-bullseye
|
||||
docker pull mongo:7.0-jammy
|
||||
docker pull mariadb:lts
|
||||
docker pull testcontainers/ryuk:0.5.1
|
||||
docker pull budibase/couchdb
|
||||
docker pull mcr.microsoft.com/mssql/server:2022-latest &
|
||||
docker pull mysql:8.3 &
|
||||
docker pull postgres:16.1-bullseye &
|
||||
docker pull mongo:7.0-jammy &
|
||||
docker pull mariadb:lts &
|
||||
docker pull testcontainers/ryuk:0.5.1 &
|
||||
docker pull budibase/couchdb &
|
||||
docker pull redis &
|
||||
|
||||
wait $(jobs -p)
|
||||
|
||||
- run: yarn --frozen-lockfile
|
||||
|
||||
|
@ -175,35 +190,6 @@ jobs:
|
|||
yarn test --scope=@budibase/server
|
||||
fi
|
||||
|
||||
integration-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
|
||||
- name: Use Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: yarn
|
||||
- run: yarn --frozen-lockfile
|
||||
- name: Build packages
|
||||
run: yarn build --scope @budibase/server --scope @budibase/worker
|
||||
- name: Build backend-core for OSS contributor (required for pro)
|
||||
if: ${{ env.IS_OSS_CONTRIBUTOR == 'true' }}
|
||||
run: yarn build --scope @budibase/backend-core
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd qa-core
|
||||
yarn setup
|
||||
yarn serve:test:self:ci
|
||||
env:
|
||||
BB_ADMIN_USER_EMAIL: admin
|
||||
BB_ADMIN_USER_PASSWORD: admin
|
||||
|
||||
check-pro-submodule:
|
||||
runs-on: ubuntu-latest
|
||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||
|
|
|
@ -69,7 +69,6 @@ typings/
|
|||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
!qa-core/.env
|
||||
!hosting/.env
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
|
|
|
@ -1,25 +1,47 @@
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import path from "path"
|
||||
import lockfile from "proper-lockfile"
|
||||
|
||||
export default async function setup() {
|
||||
await new GenericContainer("budibase/couchdb")
|
||||
.withExposedPorts(5984)
|
||||
.withEnvironment({
|
||||
COUCHDB_PASSWORD: "budibase",
|
||||
COUCHDB_USER: "budibase",
|
||||
})
|
||||
.withCopyContentToContainer([
|
||||
{
|
||||
content: `
|
||||
const lockPath = path.resolve(__dirname, "globalSetup.ts")
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
// If you run multiple tests at the same time, it's possible for the CouchDB
|
||||
// shared container to get started multiple times despite having an
|
||||
// identical reuse hash. To avoid that, we do a filesystem-based lock so
|
||||
// that only one globalSetup.ts is running at a time.
|
||||
lockfile.lockSync(lockPath)
|
||||
}
|
||||
|
||||
try {
|
||||
let couchdb = new GenericContainer("budibase/couchdb")
|
||||
.withExposedPorts(5984)
|
||||
.withEnvironment({
|
||||
COUCHDB_PASSWORD: "budibase",
|
||||
COUCHDB_USER: "budibase",
|
||||
})
|
||||
.withCopyContentToContainer([
|
||||
{
|
||||
content: `
|
||||
[log]
|
||||
level = warn
|
||||
`,
|
||||
target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
|
||||
},
|
||||
])
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"curl http://budibase:budibase@localhost:5984/_up"
|
||||
).withStartupTimeout(20000)
|
||||
)
|
||||
.start()
|
||||
target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
|
||||
},
|
||||
])
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"curl http://budibase:budibase@localhost:5984/_up"
|
||||
).withStartupTimeout(20000)
|
||||
)
|
||||
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
couchdb = couchdb.withReuse()
|
||||
}
|
||||
|
||||
await couchdb.start()
|
||||
} finally {
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
lockfile.unlockSync(lockPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.22.15",
|
||||
"version": "2.22.16",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
10
package.json
10
package.json
|
@ -7,6 +7,7 @@
|
|||
"@babel/preset-env": "^7.22.5",
|
||||
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
|
||||
"@types/node": "20.10.0",
|
||||
"@types/proper-lockfile": "^4.1.4",
|
||||
"@typescript-eslint/parser": "6.9.0",
|
||||
"esbuild": "^0.18.17",
|
||||
"esbuild-node-externals": "^1.8.0",
|
||||
|
@ -23,6 +24,7 @@
|
|||
"nx-cloud": "16.0.5",
|
||||
"prettier": "2.8.8",
|
||||
"prettier-plugin-svelte": "^2.3.0",
|
||||
"proper-lockfile": "^4.1.2",
|
||||
"svelte": "^4.2.10",
|
||||
"svelte-eslint-parser": "^0.33.1",
|
||||
"typescript": "5.2.2",
|
||||
|
@ -58,11 +60,11 @@
|
|||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
||||
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
|
||||
"test": "lerna run --stream test --stream",
|
||||
"lint:eslint": "eslint packages qa-core --max-warnings=0",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
|
||||
"lint:eslint": "eslint packages --max-warnings=0",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
|
||||
"lint": "yarn run lint:eslint && yarn run lint:prettier",
|
||||
"lint:fix:eslint": "eslint --fix --max-warnings=0 packages qa-core",
|
||||
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
|
||||
"lint:fix:eslint": "eslint --fix --max-warnings=0 packages",
|
||||
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
|
||||
"lint:fix": "yarn run lint:fix:eslint && yarn run lint:fix:prettier",
|
||||
"build:specs": "lerna run --stream specs",
|
||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 360ad2dc29c3f1fd5a1182ae258c45666b7f5eb1
|
||||
Subproject commit 532c4db35cecd346b5c24f0b89ab7b397a122a36
|
|
@ -1,6 +1,7 @@
|
|||
import { DatabaseImpl } from "../../../src/db"
|
||||
import { execSync } from "child_process"
|
||||
|
||||
const IPV4_PORT_REGEX = new RegExp(`0\\.0\\.0\\.0:(\\d+)->(\\d+)/tcp`, "g")
|
||||
|
||||
interface ContainerInfo {
|
||||
Command: string
|
||||
CreatedAt: string
|
||||
|
@ -19,7 +20,10 @@ interface ContainerInfo {
|
|||
}
|
||||
|
||||
function getTestcontainers(): ContainerInfo[] {
|
||||
return execSync("docker ps --format json")
|
||||
// We use --format json to make sure the output is nice and machine-readable,
|
||||
// and we use --no-trunc so that the command returns full container IDs so we
|
||||
// can filter on them correctly.
|
||||
return execSync("docker ps --format json --no-trunc")
|
||||
.toString()
|
||||
.split("\n")
|
||||
.filter(x => x.length > 0)
|
||||
|
@ -27,32 +31,55 @@ function getTestcontainers(): ContainerInfo[] {
|
|||
.filter(x => x.Labels.includes("org.testcontainers=true"))
|
||||
}
|
||||
|
||||
function getContainerByImage(image: string) {
|
||||
return getTestcontainers().find(x => x.Image.startsWith(image))
|
||||
export function getContainerByImage(image: string) {
|
||||
const containers = getTestcontainers().filter(x => x.Image.startsWith(image))
|
||||
if (containers.length > 1) {
|
||||
let errorMessage = `Multiple containers found starting with image: "${image}"\n\n`
|
||||
for (const container of containers) {
|
||||
errorMessage += JSON.stringify(container, null, 2)
|
||||
}
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
return containers[0]
|
||||
}
|
||||
|
||||
function getExposedPort(container: ContainerInfo, port: number) {
|
||||
const match = container.Ports.match(new RegExp(`0.0.0.0:(\\d+)->${port}/tcp`))
|
||||
if (!match) {
|
||||
return undefined
|
||||
export function getContainerById(id: string) {
|
||||
return getTestcontainers().find(x => x.ID === id)
|
||||
}
|
||||
|
||||
export interface Port {
|
||||
host: number
|
||||
container: number
|
||||
}
|
||||
|
||||
export function getExposedV4Ports(container: ContainerInfo): Port[] {
|
||||
let ports: Port[] = []
|
||||
for (const match of container.Ports.matchAll(IPV4_PORT_REGEX)) {
|
||||
ports.push({ host: parseInt(match[1]), container: parseInt(match[2]) })
|
||||
}
|
||||
return parseInt(match[1])
|
||||
return ports
|
||||
}
|
||||
|
||||
export function getExposedV4Port(container: ContainerInfo, port: number) {
|
||||
return getExposedV4Ports(container).find(x => x.container === port)?.host
|
||||
}
|
||||
|
||||
export function setupEnv(...envs: any[]) {
|
||||
// We start couchdb in globalSetup.ts, in the root of the monorepo, so it
|
||||
// should be relatively safe to look for it by its image name.
|
||||
const couch = getContainerByImage("budibase/couchdb")
|
||||
if (!couch) {
|
||||
throw new Error("CouchDB container not found")
|
||||
}
|
||||
|
||||
const couchPort = getExposedPort(couch, 5984)
|
||||
const couchPort = getExposedV4Port(couch, 5984)
|
||||
if (!couchPort) {
|
||||
throw new Error("CouchDB port not found")
|
||||
}
|
||||
|
||||
const configs = [
|
||||
{ key: "COUCH_DB_PORT", value: `${couchPort}` },
|
||||
{ key: "COUCH_DB_URL", value: `http://localhost:${couchPort}` },
|
||||
{ key: "COUCH_DB_URL", value: `http://127.0.0.1:${couchPort}` },
|
||||
]
|
||||
|
||||
for (const config of configs.filter(x => !!x.value)) {
|
||||
|
@ -60,7 +87,4 @@ export function setupEnv(...envs: any[]) {
|
|||
env._set(config.key, config.value)
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
DatabaseImpl.nano = undefined
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@
|
|||
"fast-json-patch": "^3.1.1",
|
||||
"json-format-highlight": "^1.0.4",
|
||||
"lodash": "4.17.21",
|
||||
"posthog-js": "^1.36.0",
|
||||
"posthog-js": "^1.116.6",
|
||||
"remixicon": "2.5.0",
|
||||
"sanitize-html": "^2.7.0",
|
||||
"shortid": "2.2.15",
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import { tables } from "stores/builder"
|
||||
import { get as svelteGet } from "svelte/store"
|
||||
|
@ -5,14 +6,12 @@ import { get as svelteGet } from "svelte/store"
|
|||
// currently supported level of relationship depth (server side)
|
||||
const MAX_DEPTH = 1
|
||||
|
||||
//https://github.com/Budibase/budibase/issues/3030
|
||||
const internalType = "internal"
|
||||
|
||||
const TYPES_TO_SKIP = [
|
||||
FIELDS.FORMULA.type,
|
||||
FIELDS.LONGFORM.type,
|
||||
FIELDS.ATTACHMENT.type,
|
||||
internalType,
|
||||
FieldType.FORMULA,
|
||||
FieldType.LONGFORM,
|
||||
FieldType.ATTACHMENT,
|
||||
//https://github.com/Budibase/budibase/issues/3030
|
||||
FieldType.INTERNAL,
|
||||
]
|
||||
|
||||
export function getBindings({
|
||||
|
@ -26,7 +25,7 @@ export function getBindings({
|
|||
return bindings
|
||||
}
|
||||
for (let [column, schema] of Object.entries(table.schema)) {
|
||||
const isRelationship = schema.type === FIELDS.LINK.type
|
||||
const isRelationship = schema.type === FieldType.LINK
|
||||
// skip relationships after a certain depth and types which
|
||||
// can't bind to
|
||||
if (
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { RelationshipType } from "constants/backend"
|
||||
import { RelationshipType } from "@budibase/types"
|
||||
|
||||
const typeMismatch = "Column type of the foreign key must match the primary key"
|
||||
const columnBeingUsed = "Column name cannot be an existing column"
|
||||
|
|
|
@ -49,7 +49,10 @@
|
|||
label: "Long Form Text",
|
||||
value: FIELDS.LONGFORM.type,
|
||||
},
|
||||
|
||||
{
|
||||
label: "Attachment",
|
||||
value: FIELDS.ATTACHMENT.type,
|
||||
},
|
||||
{
|
||||
label: "User",
|
||||
value: `${FIELDS.USER.type}${FIELDS.USER.subtype}`,
|
||||
|
|
|
@ -12,7 +12,7 @@ const getDefaultSchema = rows => {
|
|||
newSchema[column] = {
|
||||
name: column,
|
||||
type: "string",
|
||||
constraints: FIELDS["STRING"].constraints,
|
||||
constraints: FIELDS.STRING.constraints,
|
||||
}
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
|
||||
export const convertOldFieldFormat = fields => {
|
||||
if (!fields) {
|
||||
return []
|
||||
|
@ -31,17 +33,17 @@ export const getComponentForField = (field, schema) => {
|
|||
}
|
||||
|
||||
export const FieldTypeToComponentMap = {
|
||||
string: "stringfield",
|
||||
number: "numberfield",
|
||||
bigint: "bigintfield",
|
||||
options: "optionsfield",
|
||||
array: "multifieldselect",
|
||||
boolean: "booleanfield",
|
||||
longform: "longformfield",
|
||||
datetime: "datetimefield",
|
||||
attachment: "attachmentfield",
|
||||
link: "relationshipfield",
|
||||
json: "jsonfield",
|
||||
barcodeqr: "codescanner",
|
||||
bb_reference: "bbreferencefield",
|
||||
[FieldType.STRING]: "stringfield",
|
||||
[FieldType.NUMBER]: "numberfield",
|
||||
[FieldType.BIGINT]: "bigintfield",
|
||||
[FieldType.OPTIONS]: "optionsfield",
|
||||
[FieldType.ARRAY]: "multifieldselect",
|
||||
[FieldType.BOOLEAN]: "booleanfield",
|
||||
[FieldType.LONGFORM]: "longformfield",
|
||||
[FieldType.DATETIME]: "datetimefield",
|
||||
[FieldType.ATTACHMENT]: "attachmentfield",
|
||||
[FieldType.LINK]: "relationshipfield",
|
||||
[FieldType.JSON]: "jsonfield",
|
||||
[FieldType.BARCODEQR]: "codescanner",
|
||||
[FieldType.BB_REFERENCE]: "bbreferencefield",
|
||||
}
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
import { FieldType, FieldSubtype } from "@budibase/types"
|
||||
import {
|
||||
FieldType,
|
||||
FieldSubtype,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
AutoFieldSubType,
|
||||
Hosting,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const AUTO_COLUMN_SUB_TYPES = {
|
||||
AUTO_ID: "autoID",
|
||||
CREATED_BY: "createdBy",
|
||||
CREATED_AT: "createdAt",
|
||||
UPDATED_BY: "updatedBy",
|
||||
UPDATED_AT: "updatedAt",
|
||||
}
|
||||
export { RelationshipType } from "@budibase/types"
|
||||
|
||||
export const AUTO_COLUMN_SUB_TYPES = AutoFieldSubType
|
||||
|
||||
export const AUTO_COLUMN_DISPLAY_NAMES = {
|
||||
AUTO_ID: "Auto ID",
|
||||
|
@ -167,10 +169,7 @@ export const FILE_TYPES = {
|
|||
DOCUMENT: ["odf", "docx", "doc", "pdf", "csv"],
|
||||
}
|
||||
|
||||
export const HostingTypes = {
|
||||
CLOUD: "cloud",
|
||||
SELF: "self",
|
||||
}
|
||||
export const HostingTypes = Hosting
|
||||
|
||||
export const Roles = {
|
||||
ADMIN: "ADMIN",
|
||||
|
@ -187,12 +186,6 @@ export function isAutoColumnUserRelationship(subtype) {
|
|||
)
|
||||
}
|
||||
|
||||
export const RelationshipType = {
|
||||
MANY_TO_MANY: "many-to-many",
|
||||
ONE_TO_MANY: "one-to-many",
|
||||
MANY_TO_ONE: "many-to-one",
|
||||
}
|
||||
|
||||
export const PrettyRelationshipDefinitions = {
|
||||
MANY: "Many rows",
|
||||
ONE: "One row",
|
||||
|
@ -218,7 +211,7 @@ export const SWITCHABLE_TYPES = [
|
|||
...ALLOWABLE_NUMBER_TYPES,
|
||||
]
|
||||
|
||||
export const BUDIBASE_INTERNAL_DB_ID = "bb_internal"
|
||||
export const BUDIBASE_INTERNAL_DB_ID = INTERNAL_TABLE_SOURCE_ID
|
||||
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
|
||||
export const BUDIBASE_DATASOURCE_TYPE = "budibase"
|
||||
export const DB_TYPE_INTERNAL = "internal"
|
||||
|
@ -265,10 +258,10 @@ export const IntegrationNames = {
|
|||
}
|
||||
|
||||
export const SchemaTypeOptions = [
|
||||
{ label: "Text", value: "string" },
|
||||
{ label: "Number", value: "number" },
|
||||
{ label: "Boolean", value: "boolean" },
|
||||
{ label: "Datetime", value: "datetime" },
|
||||
{ label: "Text", value: FieldType.STRING },
|
||||
{ label: "Number", value: FieldType.NUMBER },
|
||||
{ label: "Boolean", value: FieldType.BOOLEAN },
|
||||
{ label: "Datetime", value: FieldType.DATETIME },
|
||||
]
|
||||
|
||||
export const SchemaTypeOptionsExpanded = SchemaTypeOptions.map(el => ({
|
||||
|
@ -305,10 +298,10 @@ export const PaginationLocations = [
|
|||
]
|
||||
|
||||
export const BannedSearchTypes = [
|
||||
"link",
|
||||
"attachment",
|
||||
"formula",
|
||||
"json",
|
||||
FieldType.LINK,
|
||||
FieldType.ATTACHMENT,
|
||||
FieldType.FORMULA,
|
||||
FieldType.JSON,
|
||||
"jsonarray",
|
||||
"queryarray",
|
||||
]
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
import { FIELDS } from "constants/backend"
|
||||
import { FieldType } from "@budibase/types"
|
||||
|
||||
function baseConversion(type) {
|
||||
if (type === "string") {
|
||||
return {
|
||||
type: FIELDS.STRING.type,
|
||||
type: FieldType.STRING,
|
||||
}
|
||||
} else if (type === "boolean") {
|
||||
return {
|
||||
type: FIELDS.BOOLEAN.type,
|
||||
type: FieldType.BOOLEAN,
|
||||
}
|
||||
} else if (type === "number") {
|
||||
return {
|
||||
type: FIELDS.NUMBER.type,
|
||||
type: FieldType.NUMBER,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ function recurse(schemaLevel = {}, objectLevel) {
|
|||
const schema = recurse(schemaLevel[key], value[0])
|
||||
if (schema) {
|
||||
schemaLevel[key] = {
|
||||
type: FIELDS.ARRAY.type,
|
||||
type: FieldType.ARRAY,
|
||||
schema,
|
||||
}
|
||||
}
|
||||
|
@ -45,7 +45,7 @@ function recurse(schemaLevel = {}, objectLevel) {
|
|||
}
|
||||
}
|
||||
if (!schemaLevel.type) {
|
||||
return { type: FIELDS.JSON.type, schema: schemaLevel }
|
||||
return { type: FieldType.JSON, schema: schemaLevel }
|
||||
} else {
|
||||
return schemaLevel
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
import { ActionStepID } from "constants/backend/automations"
|
||||
import { TableNames } from "constants"
|
||||
import {
|
||||
|
@ -20,20 +21,20 @@ export function buildAutoColumn(tableName, name, subtype) {
|
|||
switch (subtype) {
|
||||
case AUTO_COLUMN_SUB_TYPES.UPDATED_BY:
|
||||
case AUTO_COLUMN_SUB_TYPES.CREATED_BY:
|
||||
type = FIELDS.LINK.type
|
||||
type = FieldType.LINK
|
||||
constraints = FIELDS.LINK.constraints
|
||||
break
|
||||
case AUTO_COLUMN_SUB_TYPES.AUTO_ID:
|
||||
type = FIELDS.NUMBER.type
|
||||
type = FieldType.NUMBER
|
||||
constraints = FIELDS.NUMBER.constraints
|
||||
break
|
||||
case AUTO_COLUMN_SUB_TYPES.UPDATED_AT:
|
||||
case AUTO_COLUMN_SUB_TYPES.CREATED_AT:
|
||||
type = FIELDS.DATETIME.type
|
||||
type = FieldType.DATETIME
|
||||
constraints = FIELDS.DATETIME.constraints
|
||||
break
|
||||
default:
|
||||
type = FIELDS.STRING.type
|
||||
type = FieldType.STRING
|
||||
constraints = FIELDS.STRING.constraints
|
||||
break
|
||||
}
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
import { get, writable, derived } from "svelte/store"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { API } from "api"
|
||||
import { SWITCHABLE_TYPES, FIELDS } from "constants/backend"
|
||||
import { SWITCHABLE_TYPES } from "constants/backend"
|
||||
|
||||
export function createTablesStore() {
|
||||
const store = writable({
|
||||
|
@ -83,14 +84,14 @@ export function createTablesStore() {
|
|||
// make sure tables up to date (related)
|
||||
let newTableIds = []
|
||||
for (let column of Object.values(updatedTable?.schema || {})) {
|
||||
if (column.type === FIELDS.LINK.type) {
|
||||
if (column.type === FieldType.LINK) {
|
||||
newTableIds.push(column.tableId)
|
||||
}
|
||||
}
|
||||
|
||||
let oldTableIds = []
|
||||
for (let column of Object.values(oldTable?.schema || {})) {
|
||||
if (column.type === FIELDS.LINK.type) {
|
||||
if (column.type === FieldType.LINK) {
|
||||
oldTableIds.push(column.tableId)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,21 +1,7 @@
|
|||
import { FieldTypeToComponentMap } from "components/design/settings/controls/FieldConfiguration/utils"
|
||||
import { Component } from "./Component"
|
||||
import { getSchemaForDatasource } from "dataBinding"
|
||||
|
||||
const fieldTypeToComponentMap = {
|
||||
string: "stringfield",
|
||||
number: "numberfield",
|
||||
bigint: "bigintfield",
|
||||
options: "optionsfield",
|
||||
array: "multifieldselect",
|
||||
boolean: "booleanfield",
|
||||
longform: "longformfield",
|
||||
datetime: "datetimefield",
|
||||
attachment: "attachmentfield",
|
||||
link: "relationshipfield",
|
||||
json: "jsonfield",
|
||||
barcodeqr: "codescanner",
|
||||
}
|
||||
|
||||
export function makeDatasourceFormComponents(datasource) {
|
||||
const { schema } = getSchemaForDatasource(null, datasource, {
|
||||
formSchema: true,
|
||||
|
@ -30,7 +16,7 @@ export function makeDatasourceFormComponents(datasource) {
|
|||
}
|
||||
const fieldType =
|
||||
typeof fieldSchema === "object" ? fieldSchema.type : fieldSchema
|
||||
const componentType = fieldTypeToComponentMap[fieldType]
|
||||
const componentType = FieldTypeToComponentMap[fieldType]
|
||||
const fullComponentType = `@budibase/standard-components/${componentType}`
|
||||
if (componentType) {
|
||||
const component = new Component(fullComponentType)
|
||||
|
|
|
@ -7,19 +7,19 @@
|
|||
export let order
|
||||
|
||||
const FieldTypeToComponentMap = {
|
||||
string: "stringfield",
|
||||
number: "numberfield",
|
||||
bigint: "bigintfield",
|
||||
options: "optionsfield",
|
||||
array: "multifieldselect",
|
||||
boolean: "booleanfield",
|
||||
longform: "longformfield",
|
||||
datetime: "datetimefield",
|
||||
attachment: "attachmentfield",
|
||||
link: "relationshipfield",
|
||||
json: "jsonfield",
|
||||
barcodeqr: "codescanner",
|
||||
bb_reference: "bbreferencefield",
|
||||
[FieldType.STRING]: "stringfield",
|
||||
[FieldType.NUMBER]: "numberfield",
|
||||
[FieldType.BIGINT]: "bigintfield",
|
||||
[FieldType.OPTIONS]: "optionsfield",
|
||||
[FieldType.ARRAY]: "multifieldselect",
|
||||
[FieldType.BOOLEAN]: "booleanfield",
|
||||
[FieldType.LONGFORM]: "longformfield",
|
||||
[FieldType.DATETIME]: "datetimefield",
|
||||
[FieldType.ATTACHMENT]: "attachmentfield",
|
||||
[FieldType.LINK]: "relationshipfield",
|
||||
[FieldType.JSON]: "jsonfield",
|
||||
[FieldType.BARCODEQR]: "codescanner",
|
||||
[FieldType.BB_REFERENCE]: "bbreferencefield",
|
||||
}
|
||||
|
||||
const getFieldSchema = field => {
|
||||
|
|
|
@ -23,6 +23,6 @@
|
|||
label="Components"
|
||||
value={$componentStore.mountedComponentCount}
|
||||
/>
|
||||
<DevToolsStat label="User" value={$authStore.email} />
|
||||
<DevToolsStat label="Role" value={$authStore.roleId} />
|
||||
<DevToolsStat label="User" value={$authStore?.email} />
|
||||
<DevToolsStat label="Role" value={$authStore?.roleId} />
|
||||
</Layout>
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
"dependencies": {
|
||||
"@budibase/bbui": "0.0.0",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/types": "0.0.0",
|
||||
"dayjs": "^1.10.8",
|
||||
"lodash": "4.17.21",
|
||||
"socket.io-client": "^4.6.1"
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
|
||||
import OptionsCell from "../cells/OptionsCell.svelte"
|
||||
import DateCell from "../cells/DateCell.svelte"
|
||||
import MultiSelectCell from "../cells/MultiSelectCell.svelte"
|
||||
|
@ -12,19 +14,19 @@ import AttachmentCell from "../cells/AttachmentCell.svelte"
|
|||
import BBReferenceCell from "../cells/BBReferenceCell.svelte"
|
||||
|
||||
const TypeComponentMap = {
|
||||
text: TextCell,
|
||||
options: OptionsCell,
|
||||
datetime: DateCell,
|
||||
barcodeqr: TextCell,
|
||||
longform: LongFormCell,
|
||||
array: MultiSelectCell,
|
||||
number: NumberCell,
|
||||
boolean: BooleanCell,
|
||||
attachment: AttachmentCell,
|
||||
link: RelationshipCell,
|
||||
formula: FormulaCell,
|
||||
json: JSONCell,
|
||||
bb_reference: BBReferenceCell,
|
||||
[FieldType.STRING]: TextCell,
|
||||
[FieldType.OPTIONS]: OptionsCell,
|
||||
[FieldType.DATETIME]: DateCell,
|
||||
[FieldType.BARCODEQR]: TextCell,
|
||||
[FieldType.LONGFORM]: LongFormCell,
|
||||
[FieldType.ARRAY]: MultiSelectCell,
|
||||
[FieldType.NUMBER]: NumberCell,
|
||||
[FieldType.BOOLEAN]: BooleanCell,
|
||||
[FieldType.ATTACHMENT]: AttachmentCell,
|
||||
[FieldType.LINK]: RelationshipCell,
|
||||
[FieldType.FORMULA]: FormulaCell,
|
||||
[FieldType.JSON]: JSONCell,
|
||||
[FieldType.BB_REFERENCE]: BBReferenceCell,
|
||||
}
|
||||
export const getCellRenderer = column => {
|
||||
return TypeComponentMap[column?.schema?.type] || TextCell
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { FieldType, FieldTypeSubtypes } from "@budibase/types"
|
||||
|
||||
export const getColor = (idx, opacity = 0.3) => {
|
||||
if (idx == null || idx === -1) {
|
||||
idx = 0
|
||||
|
@ -6,22 +8,22 @@ export const getColor = (idx, opacity = 0.3) => {
|
|||
}
|
||||
|
||||
const TypeIconMap = {
|
||||
text: "Text",
|
||||
options: "Dropdown",
|
||||
datetime: "Date",
|
||||
barcodeqr: "Camera",
|
||||
longform: "TextAlignLeft",
|
||||
array: "Dropdown",
|
||||
number: "123",
|
||||
boolean: "Boolean",
|
||||
attachment: "AppleFiles",
|
||||
link: "DataCorrelated",
|
||||
formula: "Calculator",
|
||||
json: "Brackets",
|
||||
bigint: "TagBold",
|
||||
bb_reference: {
|
||||
user: "User",
|
||||
users: "UserGroup",
|
||||
[FieldType.STRING]: "Text",
|
||||
[FieldType.OPTIONS]: "Dropdown",
|
||||
[FieldType.DATETIME]: "Date",
|
||||
[FieldType.BARCODEQR]: "Camera",
|
||||
[FieldType.LONGFORM]: "TextAlignLeft",
|
||||
[FieldType.ARRAY]: "Dropdown",
|
||||
[FieldType.NUMBER]: "123",
|
||||
[FieldType.BOOLEAN]: "Boolean",
|
||||
[FieldType.ATTACHMENT]: "AppleFiles",
|
||||
[FieldType.LINK]: "DataCorrelated",
|
||||
[FieldType.FORMULA]: "Calculator",
|
||||
[FieldType.JSON]: "Brackets",
|
||||
[FieldType.BIGINT]: "TagBold",
|
||||
[FieldType.BB_REFERENCE]: {
|
||||
[FieldTypeSubtypes.BB_REFERENCE.USER]: "User",
|
||||
[FieldTypeSubtypes.BB_REFERENCE.USERS]: "UserGroup",
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@ set -e
|
|||
if [[ -n $CI ]]
|
||||
then
|
||||
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS"
|
||||
echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@"
|
||||
jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
|
||||
echo "jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@"
|
||||
jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
|
||||
else
|
||||
# --maxWorkers performs better in development
|
||||
export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS"
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
const setup = require("../../tests/utilities")
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("/metrics", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import * as setup from "./utilities"
|
||||
import path from "path"
|
||||
|
||||
jest.setTimeout(15000)
|
||||
const PASSWORD = "testtest"
|
||||
|
||||
describe("/applications/:appId/import", () => {
|
||||
|
|
|
@ -23,8 +23,6 @@ let {
|
|||
collectAutomation,
|
||||
} = setup.structures
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("/automations", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import { Datasource, Query, SourceName } from "@budibase/types"
|
||||
import * as setup from "../utilities"
|
||||
import { databaseTestProviders } from "../../../../integrations/tests/utils"
|
||||
import pg from "pg"
|
||||
import mysql from "mysql2/promise"
|
||||
import mssql from "mssql"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
rawQuery,
|
||||
} from "../../../../integrations/tests/utils"
|
||||
|
||||
jest.unmock("pg")
|
||||
|
||||
|
@ -34,13 +35,16 @@ const createTableSQL: Record<string, string> = {
|
|||
const insertSQL = `INSERT INTO test_table (name) VALUES ('one'), ('two'), ('three'), ('four'), ('five')`
|
||||
const dropTableSQL = `DROP TABLE test_table;`
|
||||
|
||||
describe.each([
|
||||
["postgres", databaseTestProviders.postgres],
|
||||
["mysql", databaseTestProviders.mysql],
|
||||
["mssql", databaseTestProviders.mssql],
|
||||
["mariadb", databaseTestProviders.mariadb],
|
||||
])("queries (%s)", (dbName, dsProvider) => {
|
||||
describe.each(
|
||||
[
|
||||
DatabaseName.POSTGRES,
|
||||
DatabaseName.MYSQL,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.MARIADB,
|
||||
].map(name => [name, getDatasource(name)])
|
||||
)("queries (%s)", (dbName, dsProvider) => {
|
||||
const config = setup.getConfig()
|
||||
let rawDatasource: Datasource
|
||||
let datasource: Datasource
|
||||
|
||||
async function createQuery(query: Partial<Query>): Promise<Query> {
|
||||
|
@ -57,62 +61,22 @@ describe.each([
|
|||
return await config.api.query.save({ ...defaultQuery, ...query })
|
||||
}
|
||||
|
||||
async function rawQuery(sql: string): Promise<any> {
|
||||
// We re-fetch the datasource here because the one returned by
|
||||
// config.api.datasource.create has the password field blanked out, and we
|
||||
// need the password to connect to the database.
|
||||
const ds = await dsProvider.datasource()
|
||||
switch (ds.source) {
|
||||
case SourceName.POSTGRES: {
|
||||
const client = new pg.Client(ds.config!)
|
||||
await client.connect()
|
||||
try {
|
||||
const { rows } = await client.query(sql)
|
||||
return rows
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
}
|
||||
case SourceName.MYSQL: {
|
||||
const con = await mysql.createConnection(ds.config!)
|
||||
try {
|
||||
const [rows] = await con.query(sql)
|
||||
return rows
|
||||
} finally {
|
||||
con.end()
|
||||
}
|
||||
}
|
||||
case SourceName.SQL_SERVER: {
|
||||
const pool = new mssql.ConnectionPool(ds.config! as mssql.config)
|
||||
const client = await pool.connect()
|
||||
try {
|
||||
const { recordset } = await client.query(sql)
|
||||
return recordset
|
||||
} finally {
|
||||
await pool.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create(
|
||||
await dsProvider.datasource()
|
||||
)
|
||||
rawDatasource = await dsProvider
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await rawQuery(createTableSQL[datasource.source])
|
||||
await rawQuery(insertSQL)
|
||||
await rawQuery(rawDatasource, createTableSQL[datasource.source])
|
||||
await rawQuery(rawDatasource, insertSQL)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await rawQuery(dropTableSQL)
|
||||
await rawQuery(rawDatasource, dropTableSQL)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await dsProvider.stop()
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
|
@ -143,7 +107,10 @@ describe.each([
|
|||
},
|
||||
])
|
||||
|
||||
const rows = await rawQuery("SELECT * FROM test_table WHERE name = 'baz'")
|
||||
const rows = await rawQuery(
|
||||
rawDatasource,
|
||||
"SELECT * FROM test_table WHERE name = 'baz'"
|
||||
)
|
||||
expect(rows).toHaveLength(1)
|
||||
})
|
||||
|
||||
|
@ -171,6 +138,7 @@ describe.each([
|
|||
expect(result.data).toEqual([{ created: true }])
|
||||
|
||||
const rows = await rawQuery(
|
||||
rawDatasource,
|
||||
`SELECT * FROM test_table WHERE birthday = '${date.toISOString()}'`
|
||||
)
|
||||
expect(rows).toHaveLength(1)
|
||||
|
@ -202,6 +170,7 @@ describe.each([
|
|||
expect(result.data).toEqual([{ created: true }])
|
||||
|
||||
const rows = await rawQuery(
|
||||
rawDatasource,
|
||||
`SELECT * FROM test_table WHERE name = '${notDateStr}'`
|
||||
)
|
||||
expect(rows).toHaveLength(1)
|
||||
|
@ -338,7 +307,10 @@ describe.each([
|
|||
},
|
||||
])
|
||||
|
||||
const rows = await rawQuery("SELECT * FROM test_table WHERE id = 1")
|
||||
const rows = await rawQuery(
|
||||
rawDatasource,
|
||||
"SELECT * FROM test_table WHERE id = 1"
|
||||
)
|
||||
expect(rows).toEqual([
|
||||
{ id: 1, name: "foo", birthday: null, number: null },
|
||||
])
|
||||
|
@ -406,7 +378,10 @@ describe.each([
|
|||
},
|
||||
])
|
||||
|
||||
const rows = await rawQuery("SELECT * FROM test_table WHERE id = 1")
|
||||
const rows = await rawQuery(
|
||||
rawDatasource,
|
||||
"SELECT * FROM test_table WHERE id = 1"
|
||||
)
|
||||
expect(rows).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
@ -443,7 +418,7 @@ describe.each([
|
|||
} catch (err: any) {
|
||||
error = err.message
|
||||
}
|
||||
if (dbName === "mssql") {
|
||||
if (dbName === DatabaseName.SQL_SERVER) {
|
||||
expect(error).toBeUndefined()
|
||||
} else {
|
||||
expect(error).toBeDefined()
|
||||
|
|
|
@ -1,14 +1,17 @@
|
|||
import { Datasource, Query } from "@budibase/types"
|
||||
import * as setup from "../utilities"
|
||||
import { databaseTestProviders } from "../../../../integrations/tests/utils"
|
||||
import { MongoClient, type Collection, BSON } from "mongodb"
|
||||
|
||||
const collection = "test_collection"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
} from "../../../../integrations/tests/utils"
|
||||
import { MongoClient, type Collection, BSON, Db } from "mongodb"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
const expectValidId = expect.stringMatching(/^\w{24}$/)
|
||||
const expectValidBsonObjectId = expect.any(BSON.ObjectId)
|
||||
|
||||
describe("/queries", () => {
|
||||
let collection: string
|
||||
let config = setup.getConfig()
|
||||
let datasource: Datasource
|
||||
|
||||
|
@ -37,8 +40,7 @@ describe("/queries", () => {
|
|||
async function withClient<T>(
|
||||
callback: (client: MongoClient) => Promise<T>
|
||||
): Promise<T> {
|
||||
const ds = await databaseTestProviders.mongodb.datasource()
|
||||
const client = new MongoClient(ds.config!.connectionString)
|
||||
const client = new MongoClient(datasource.config!.connectionString)
|
||||
await client.connect()
|
||||
try {
|
||||
return await callback(client)
|
||||
|
@ -47,30 +49,33 @@ describe("/queries", () => {
|
|||
}
|
||||
}
|
||||
|
||||
async function withDb<T>(callback: (db: Db) => Promise<T>): Promise<T> {
|
||||
return await withClient(async client => {
|
||||
return await callback(client.db(datasource.config!.db))
|
||||
})
|
||||
}
|
||||
|
||||
async function withCollection<T>(
|
||||
callback: (collection: Collection) => Promise<T>
|
||||
): Promise<T> {
|
||||
return await withClient(async client => {
|
||||
const db = client.db(
|
||||
(await databaseTestProviders.mongodb.datasource()).config!.db
|
||||
)
|
||||
return await withDb(async db => {
|
||||
return await callback(db.collection(collection))
|
||||
})
|
||||
}
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.mongodb.stop()
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mongodb.datasource()
|
||||
await getDatasource(DatabaseName.MONGODB)
|
||||
)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
collection = generator.guid()
|
||||
await withCollection(async collection => {
|
||||
await collection.insertMany([
|
||||
{ name: "one" },
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { databaseTestProviders } from "../../../integrations/tests/utils"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
|
||||
import tk from "timekeeper"
|
||||
import { outputProcessing } from "../../../utilities/rowProcessor"
|
||||
|
@ -34,10 +34,10 @@ jest.unmock("pg")
|
|||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
["postgres", databaseTestProviders.postgres],
|
||||
["mysql", databaseTestProviders.mysql],
|
||||
["mssql", databaseTestProviders.mssql],
|
||||
["mariadb", databaseTestProviders.mariadb],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/rows (%s)", (__, dsProvider) => {
|
||||
const isInternal = dsProvider === undefined
|
||||
const config = setup.getConfig()
|
||||
|
@ -49,23 +49,23 @@ describe.each([
|
|||
await config.init()
|
||||
if (dsProvider) {
|
||||
datasource = await config.createDatasource({
|
||||
datasource: await dsProvider.datasource(),
|
||||
datasource: await dsProvider,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (dsProvider) {
|
||||
await dsProvider.stop()
|
||||
}
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
function saveTableRequest(
|
||||
...overrides: Partial<SaveTableRequest>[]
|
||||
// We omit the name field here because it's generated in the function with a
|
||||
// high likelihood to be unique. Tests should not have any reason to control
|
||||
// the table name they're writing to.
|
||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||
): SaveTableRequest {
|
||||
const req: SaveTableRequest = {
|
||||
name: uuid.v4().substring(0, 16),
|
||||
name: uuid.v4().substring(0, 10),
|
||||
type: "table",
|
||||
sourceType: datasource
|
||||
? TableSourceType.EXTERNAL
|
||||
|
@ -87,7 +87,10 @@ describe.each([
|
|||
}
|
||||
|
||||
function defaultTable(
|
||||
...overrides: Partial<SaveTableRequest>[]
|
||||
// We omit the name field here because it's generated in the function with a
|
||||
// high likelihood to be unique. Tests should not have any reason to control
|
||||
// the table name they're writing to.
|
||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||
): SaveTableRequest {
|
||||
return saveTableRequest(
|
||||
{
|
||||
|
@ -194,7 +197,6 @@ describe.each([
|
|||
|
||||
const newTable = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "TestTableAuto",
|
||||
schema: {
|
||||
"Row ID": {
|
||||
name: "Row ID",
|
||||
|
@ -383,11 +385,9 @@ describe.each([
|
|||
|
||||
isInternal &&
|
||||
it("doesn't allow creating in user table", async () => {
|
||||
const userTableId = InternalTable.USER_METADATA
|
||||
const response = await config.api.row.save(
|
||||
userTableId,
|
||||
InternalTable.USER_METADATA,
|
||||
{
|
||||
tableId: userTableId,
|
||||
firstName: "Joe",
|
||||
lastName: "Joe",
|
||||
email: "joe@joe.com",
|
||||
|
@ -462,7 +462,6 @@ describe.each([
|
|||
table = await config.api.table.save(defaultTable())
|
||||
otherTable = await config.api.table.save(
|
||||
defaultTable({
|
||||
name: "a",
|
||||
schema: {
|
||||
relationship: {
|
||||
name: "relationship",
|
||||
|
@ -898,8 +897,8 @@ describe.each([
|
|||
let o2mTable: Table
|
||||
let m2mTable: Table
|
||||
beforeAll(async () => {
|
||||
o2mTable = await config.api.table.save(defaultTable({ name: "o2m" }))
|
||||
m2mTable = await config.api.table.save(defaultTable({ name: "m2m" }))
|
||||
o2mTable = await config.api.table.save(defaultTable())
|
||||
m2mTable = await config.api.table.save(defaultTable())
|
||||
})
|
||||
|
||||
describe.each([
|
||||
|
@ -1256,7 +1255,6 @@ describe.each([
|
|||
otherTable = await config.api.table.save(defaultTable())
|
||||
table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "b",
|
||||
schema: {
|
||||
links: {
|
||||
name: "links",
|
||||
|
@ -1354,7 +1352,6 @@ describe.each([
|
|||
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "table",
|
||||
schema: {
|
||||
text: {
|
||||
name: "text",
|
||||
|
|
|
@ -3,8 +3,6 @@ import { checkPermissionsEndpoint } from "./utilities/TestFunctions"
|
|||
import * as setup from "./utilities"
|
||||
import { UserMetadata } from "@budibase/types"
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
jest.mock("../../../utilities/workerRequests", () => ({
|
||||
getGlobalUsers: jest.fn(() => {
|
||||
return {}
|
||||
|
|
|
@ -19,8 +19,7 @@ import {
|
|||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import * as uuid from "uuid"
|
||||
import { databaseTestProviders } from "../../../integrations/tests/utils"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
import merge from "lodash/merge"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { roles } from "@budibase/backend-core"
|
||||
|
@ -30,10 +29,10 @@ jest.unmock("pg")
|
|||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
["postgres", databaseTestProviders.postgres],
|
||||
["mysql", databaseTestProviders.mysql],
|
||||
["mssql", databaseTestProviders.mssql],
|
||||
["mariadb", databaseTestProviders.mariadb],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/v2/views (%s)", (_, dsProvider) => {
|
||||
const config = setup.getConfig()
|
||||
const isInternal = !dsProvider
|
||||
|
@ -42,10 +41,10 @@ describe.each([
|
|||
let datasource: Datasource
|
||||
|
||||
function saveTableRequest(
|
||||
...overrides: Partial<SaveTableRequest>[]
|
||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||
): SaveTableRequest {
|
||||
const req: SaveTableRequest = {
|
||||
name: uuid.v4().substring(0, 16),
|
||||
name: generator.guid().replaceAll("-", "").substring(0, 16),
|
||||
type: "table",
|
||||
sourceType: datasource
|
||||
? TableSourceType.EXTERNAL
|
||||
|
@ -90,16 +89,13 @@ describe.each([
|
|||
|
||||
if (dsProvider) {
|
||||
datasource = await config.createDatasource({
|
||||
datasource: await dsProvider.datasource(),
|
||||
datasource: await dsProvider,
|
||||
})
|
||||
}
|
||||
table = await config.api.table.save(priceTable())
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (dsProvider) {
|
||||
await dsProvider.stop()
|
||||
}
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
|
@ -231,7 +227,7 @@ describe.each([
|
|||
|
||||
view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: "View A",
|
||||
name: generator.guid(),
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -307,12 +303,13 @@ describe.each([
|
|||
|
||||
it("can update an existing view name", async () => {
|
||||
const tableId = table._id!
|
||||
await config.api.viewV2.update({ ...view, name: "View B" })
|
||||
const newName = generator.guid()
|
||||
await config.api.viewV2.update({ ...view, name: newName })
|
||||
|
||||
expect(await config.api.table.get(tableId)).toEqual(
|
||||
expect.objectContaining({
|
||||
views: {
|
||||
"View B": { ...view, name: "View B", schema: expect.anything() },
|
||||
[newName]: { ...view, name: newName, schema: expect.anything() },
|
||||
},
|
||||
})
|
||||
)
|
||||
|
@ -507,7 +504,6 @@ describe.each([
|
|||
it("views have extra data trimmed", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "orders",
|
||||
schema: {
|
||||
Country: {
|
||||
type: FieldType.STRING,
|
||||
|
@ -523,7 +519,7 @@ describe.each([
|
|||
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: uuid.v4(),
|
||||
name: generator.guid(),
|
||||
schema: {
|
||||
Country: {
|
||||
visible: true,
|
||||
|
@ -853,7 +849,6 @@ describe.each([
|
|||
beforeAll(async () => {
|
||||
table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: `users_${uuid.v4()}`,
|
||||
type: "table",
|
||||
schema: {
|
||||
name: {
|
||||
|
|
|
@ -3,7 +3,6 @@ import {
|
|||
generateMakeRequest,
|
||||
MakeRequestResponse,
|
||||
} from "../api/routes/public/tests/utils"
|
||||
import { v4 as uuidv4 } from "uuid"
|
||||
import * as setup from "../api/routes/tests/utilities"
|
||||
import {
|
||||
Datasource,
|
||||
|
@ -12,12 +11,23 @@ import {
|
|||
TableRequest,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import { databaseTestProviders } from "../integrations/tests/utils"
|
||||
import mysql from "mysql2/promise"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
rawQuery,
|
||||
} from "../integrations/tests/utils"
|
||||
import { builderSocket } from "../websockets"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
// @ts-ignore
|
||||
fetch.mockSearch()
|
||||
|
||||
function uniqueTableName(length?: number): string {
|
||||
return generator
|
||||
.guid()
|
||||
.replaceAll("-", "_")
|
||||
.substring(0, length || 10)
|
||||
}
|
||||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
jest.mock("../websockets", () => ({
|
||||
|
@ -37,7 +47,8 @@ jest.mock("../websockets", () => ({
|
|||
|
||||
describe("mysql integrations", () => {
|
||||
let makeRequest: MakeRequestResponse,
|
||||
mysqlDatasource: Datasource,
|
||||
rawDatasource: Datasource,
|
||||
datasource: Datasource,
|
||||
primaryMySqlTable: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
|
@ -46,18 +57,13 @@ describe("mysql integrations", () => {
|
|||
|
||||
makeRequest = generateMakeRequest(apiKey, true)
|
||||
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.mysql.stop()
|
||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
primaryMySqlTable = await config.createTable({
|
||||
name: uuidv4(),
|
||||
name: uniqueTableName(),
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
|
@ -79,7 +85,7 @@ describe("mysql integrations", () => {
|
|||
type: FieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
sourceId: mysqlDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
})
|
||||
|
@ -87,18 +93,15 @@ describe("mysql integrations", () => {
|
|||
afterAll(config.end)
|
||||
|
||||
it("validate table schema", async () => {
|
||||
const res = await makeRequest(
|
||||
"get",
|
||||
`/api/datasources/${mysqlDatasource._id}`
|
||||
)
|
||||
const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual({
|
||||
config: {
|
||||
database: "mysql",
|
||||
host: mysqlDatasource.config!.host,
|
||||
database: expect.any(String),
|
||||
host: datasource.config!.host,
|
||||
password: "--secret-value--",
|
||||
port: mysqlDatasource.config!.port,
|
||||
port: datasource.config!.port,
|
||||
user: "root",
|
||||
},
|
||||
plus: true,
|
||||
|
@ -117,7 +120,7 @@ describe("mysql integrations", () => {
|
|||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: await databaseTestProviders.mysql.datasource(),
|
||||
datasource: rawDatasource,
|
||||
},
|
||||
{
|
||||
body: {
|
||||
|
@ -128,13 +131,12 @@ describe("mysql integrations", () => {
|
|||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
const dbConfig = await databaseTestProviders.mysql.datasource()
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...dbConfig,
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...dbConfig.config,
|
||||
...rawDatasource.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
|
@ -154,7 +156,7 @@ describe("mysql integrations", () => {
|
|||
it("should fetch information about mysql datasource", async () => {
|
||||
const primaryName = primaryMySqlTable.name
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: mysqlDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -163,40 +165,38 @@ describe("mysql integrations", () => {
|
|||
})
|
||||
|
||||
describe("Integration compatibility with mysql search_path", () => {
|
||||
let client: mysql.Connection, pathDatasource: Datasource
|
||||
const database = "test1"
|
||||
const database2 = "test-2"
|
||||
let datasource: Datasource, rawDatasource: Datasource
|
||||
const database = generator.guid()
|
||||
const database2 = generator.guid()
|
||||
|
||||
beforeAll(async () => {
|
||||
const dsConfig = await databaseTestProviders.mysql.datasource()
|
||||
const dbConfig = dsConfig.config!
|
||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||
|
||||
client = await mysql.createConnection(dbConfig)
|
||||
await client.query(`CREATE DATABASE \`${database}\`;`)
|
||||
await client.query(`CREATE DATABASE \`${database2}\`;`)
|
||||
await rawQuery(rawDatasource, `CREATE DATABASE \`${database}\`;`)
|
||||
await rawQuery(rawDatasource, `CREATE DATABASE \`${database2}\`;`)
|
||||
|
||||
const pathConfig: any = {
|
||||
...dsConfig,
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...dbConfig,
|
||||
...rawDatasource.config!,
|
||||
database,
|
||||
},
|
||||
}
|
||||
pathDatasource = await config.api.datasource.create(pathConfig)
|
||||
datasource = await config.api.datasource.create(pathConfig)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await client.query(`DROP DATABASE \`${database}\`;`)
|
||||
await client.query(`DROP DATABASE \`${database2}\`;`)
|
||||
await client.end()
|
||||
await rawQuery(rawDatasource, `DROP DATABASE \`${database}\`;`)
|
||||
await rawQuery(rawDatasource, `DROP DATABASE \`${database2}\`;`)
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: pathDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -207,15 +207,17 @@ describe("mysql integrations", () => {
|
|||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
|
||||
)
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
|
||||
)
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${pathDatasource._id}/schema`,
|
||||
`/api/datasources/${datasource._id}/schema`,
|
||||
{
|
||||
tablesFilter: [repeated_table_name],
|
||||
}
|
||||
|
@ -231,30 +233,14 @@ describe("mysql integrations", () => {
|
|||
})
|
||||
|
||||
describe("POST /api/tables/", () => {
|
||||
let client: mysql.Connection
|
||||
const emitDatasourceUpdateMock = jest.fn()
|
||||
|
||||
beforeEach(async () => {
|
||||
client = await mysql.createConnection(
|
||||
(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
).config!
|
||||
)
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.end()
|
||||
})
|
||||
|
||||
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
|
||||
const addColumnToTable: TableRequest = {
|
||||
type: "table",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
name: "table",
|
||||
sourceId: mysqlDatasource._id!,
|
||||
name: uniqueTableName(),
|
||||
sourceId: datasource._id!,
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
|
@ -301,14 +287,16 @@ describe("mysql integrations", () => {
|
|||
},
|
||||
},
|
||||
created: true,
|
||||
_id: `${mysqlDatasource._id}__table`,
|
||||
_id: `${datasource._id}__${addColumnToTable.name}`,
|
||||
}
|
||||
delete expectedTable._add
|
||||
|
||||
expect(emitDatasourceUpdateMock).toHaveBeenCalledTimes(1)
|
||||
const emittedDatasource: Datasource =
|
||||
emitDatasourceUpdateMock.mock.calls[0][1]
|
||||
expect(emittedDatasource.entities!["table"]).toEqual(expectedTable)
|
||||
expect(emittedDatasource.entities![expectedTable.name]).toEqual(
|
||||
expectedTable
|
||||
)
|
||||
})
|
||||
|
||||
it("will rename a column", async () => {
|
||||
|
@ -346,17 +334,18 @@ describe("mysql integrations", () => {
|
|||
"/api/tables/",
|
||||
renameColumnOnTable
|
||||
)
|
||||
mysqlDatasource = (
|
||||
await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${mysqlDatasource._id}/schema`
|
||||
)
|
||||
|
||||
const ds = (
|
||||
await makeRequest("post", `/api/datasources/${datasource._id}/schema`)
|
||||
).body.datasource
|
||||
|
||||
expect(response.status).toEqual(200)
|
||||
expect(
|
||||
Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema)
|
||||
).toEqual(["id", "name", "description", "age"])
|
||||
expect(Object.keys(ds.entities![primaryMySqlTable.name].schema)).toEqual([
|
||||
"id",
|
||||
"name",
|
||||
"description",
|
||||
"age",
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -16,8 +16,12 @@ import {
|
|||
import _ from "lodash"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { utils } from "@budibase/backend-core"
|
||||
import { databaseTestProviders } from "../integrations/tests/utils"
|
||||
import { Client } from "pg"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
rawQuery,
|
||||
} from "../integrations/tests/utils"
|
||||
|
||||
// @ts-ignore
|
||||
fetch.mockSearch()
|
||||
|
||||
|
@ -28,7 +32,8 @@ jest.mock("../websockets")
|
|||
|
||||
describe("postgres integrations", () => {
|
||||
let makeRequest: MakeRequestResponse,
|
||||
postgresDatasource: Datasource,
|
||||
rawDatasource: Datasource,
|
||||
datasource: Datasource,
|
||||
primaryPostgresTable: Table,
|
||||
oneToManyRelationshipInfo: ForeignTableInfo,
|
||||
manyToOneRelationshipInfo: ForeignTableInfo,
|
||||
|
@ -40,19 +45,17 @@ describe("postgres integrations", () => {
|
|||
|
||||
makeRequest = generateMakeRequest(apiKey, true)
|
||||
|
||||
postgresDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.postgres.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.postgres.stop()
|
||||
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
async function createAuxTable(prefix: string) {
|
||||
return await config.createTable({
|
||||
name: `${prefix}_${generator.word({ length: 6 })}`,
|
||||
name: `${prefix}_${generator
|
||||
.guid()
|
||||
.replaceAll("-", "")
|
||||
.substring(0, 6)}`,
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
primaryDisplay: "title",
|
||||
|
@ -67,7 +70,7 @@ describe("postgres integrations", () => {
|
|||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
}
|
||||
|
@ -89,7 +92,7 @@ describe("postgres integrations", () => {
|
|||
}
|
||||
|
||||
primaryPostgresTable = await config.createTable({
|
||||
name: `p_${generator.word({ length: 6 })}`,
|
||||
name: `p_${generator.guid().replaceAll("-", "").substring(0, 6)}`,
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
|
@ -144,7 +147,7 @@ describe("postgres integrations", () => {
|
|||
main: true,
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
})
|
||||
|
@ -251,7 +254,7 @@ describe("postgres integrations", () => {
|
|||
|
||||
async function createDefaultPgTable() {
|
||||
return await config.createTable({
|
||||
name: generator.word({ length: 10 }),
|
||||
name: generator.guid().replaceAll("-", "").substring(0, 10),
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
|
@ -261,7 +264,7 @@ describe("postgres integrations", () => {
|
|||
autocolumn: true,
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
}
|
||||
|
@ -299,19 +302,16 @@ describe("postgres integrations", () => {
|
|||
}
|
||||
|
||||
it("validate table schema", async () => {
|
||||
const res = await makeRequest(
|
||||
"get",
|
||||
`/api/datasources/${postgresDatasource._id}`
|
||||
)
|
||||
const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual({
|
||||
config: {
|
||||
ca: false,
|
||||
database: "postgres",
|
||||
host: postgresDatasource.config!.host,
|
||||
database: expect.any(String),
|
||||
host: datasource.config!.host,
|
||||
password: "--secret-value--",
|
||||
port: postgresDatasource.config!.port,
|
||||
port: datasource.config!.port,
|
||||
rejectUnauthorized: false,
|
||||
schema: "public",
|
||||
ssl: false,
|
||||
|
@ -1043,7 +1043,7 @@ describe("postgres integrations", () => {
|
|||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: await databaseTestProviders.postgres.datasource(),
|
||||
datasource: await getDatasource(DatabaseName.POSTGRES),
|
||||
},
|
||||
{
|
||||
body: {
|
||||
|
@ -1054,7 +1054,7 @@ describe("postgres integrations", () => {
|
|||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
const dbConfig = await databaseTestProviders.postgres.datasource()
|
||||
const dbConfig = await getDatasource(DatabaseName.POSTGRES)
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
|
@ -1079,7 +1079,7 @@ describe("postgres integrations", () => {
|
|||
it("should fetch information about postgres datasource", async () => {
|
||||
const primaryName = primaryPostgresTable.name
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: postgresDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -1088,86 +1088,88 @@ describe("postgres integrations", () => {
|
|||
})
|
||||
|
||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||
let client: Client
|
||||
let tableName: string
|
||||
|
||||
beforeEach(async () => {
|
||||
client = new Client(
|
||||
(await databaseTestProviders.postgres.datasource()).config!
|
||||
)
|
||||
await client.connect()
|
||||
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.query(`DROP TABLE IF EXISTS "table"`)
|
||||
await client.end()
|
||||
await rawQuery(rawDatasource, `DROP TABLE IF EXISTS "${tableName}"`)
|
||||
})
|
||||
|
||||
it("recognises when a table has no primary key", async () => {
|
||||
await client.query(`CREATE TABLE "table" (id SERIAL)`)
|
||||
await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`)
|
||||
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${postgresDatasource._id}/schema`
|
||||
`/api/datasources/${datasource._id}/schema`
|
||||
)
|
||||
|
||||
expect(response.body.errors).toEqual({
|
||||
table: "Table must have a primary key.",
|
||||
[tableName]: "Table must have a primary key.",
|
||||
})
|
||||
})
|
||||
|
||||
it("recognises when a table is using a reserved column name", async () => {
|
||||
await client.query(`CREATE TABLE "table" (_id SERIAL PRIMARY KEY) `)
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) `
|
||||
)
|
||||
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${postgresDatasource._id}/schema`
|
||||
`/api/datasources/${datasource._id}/schema`
|
||||
)
|
||||
|
||||
expect(response.body.errors).toEqual({
|
||||
table: "Table contains invalid columns.",
|
||||
[tableName]: "Table contains invalid columns.",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Integration compatibility with postgres search_path", () => {
|
||||
let client: Client, pathDatasource: Datasource
|
||||
const schema1 = "test1",
|
||||
schema2 = "test-2"
|
||||
let rawDatasource: Datasource,
|
||||
datasource: Datasource,
|
||||
schema1: string,
|
||||
schema2: string
|
||||
|
||||
beforeAll(async () => {
|
||||
const dsConfig = await databaseTestProviders.postgres.datasource()
|
||||
const dbConfig = dsConfig.config!
|
||||
beforeEach(async () => {
|
||||
schema1 = generator.guid().replaceAll("-", "")
|
||||
schema2 = generator.guid().replaceAll("-", "")
|
||||
|
||||
client = new Client(dbConfig)
|
||||
await client.connect()
|
||||
await client.query(`CREATE SCHEMA "${schema1}";`)
|
||||
await client.query(`CREATE SCHEMA "${schema2}";`)
|
||||
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||
const dbConfig = rawDatasource.config!
|
||||
|
||||
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema1}";`)
|
||||
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema2}";`)
|
||||
|
||||
const pathConfig: any = {
|
||||
...dsConfig,
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...dbConfig,
|
||||
schema: `${schema1}, ${schema2}`,
|
||||
},
|
||||
}
|
||||
pathDatasource = await config.api.datasource.create(pathConfig)
|
||||
datasource = await config.api.datasource.create(pathConfig)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await client.query(`DROP SCHEMA "${schema1}" CASCADE;`)
|
||||
await client.query(`DROP SCHEMA "${schema2}" CASCADE;`)
|
||||
await client.end()
|
||||
afterEach(async () => {
|
||||
await rawQuery(rawDatasource, `DROP SCHEMA "${schema1}" CASCADE;`)
|
||||
await rawQuery(rawDatasource, `DROP SCHEMA "${schema2}" CASCADE;`)
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: pathDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -1178,15 +1180,17 @@ describe("postgres integrations", () => {
|
|||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
|
||||
)
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
|
||||
)
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${pathDatasource._id}/schema`,
|
||||
`/api/datasources/${datasource._id}/schema`,
|
||||
{
|
||||
tablesFilter: [repeated_table_name],
|
||||
}
|
||||
|
|
|
@ -1,25 +1,90 @@
|
|||
jest.unmock("pg")
|
||||
|
||||
import { Datasource } from "@budibase/types"
|
||||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import * as postgres from "./postgres"
|
||||
import * as mongodb from "./mongodb"
|
||||
import * as mysql from "./mysql"
|
||||
import * as mssql from "./mssql"
|
||||
import * as mariadb from "./mariadb"
|
||||
import { StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer } from "testcontainers"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
|
||||
jest.setTimeout(30000)
|
||||
export type DatasourceProvider = () => Promise<Datasource>
|
||||
|
||||
export interface DatabaseProvider {
|
||||
start(): Promise<StartedTestContainer>
|
||||
stop(): Promise<void>
|
||||
datasource(): Promise<Datasource>
|
||||
export enum DatabaseName {
|
||||
POSTGRES = "postgres",
|
||||
MONGODB = "mongodb",
|
||||
MYSQL = "mysql",
|
||||
SQL_SERVER = "mssql",
|
||||
MARIADB = "mariadb",
|
||||
}
|
||||
|
||||
export const databaseTestProviders = {
|
||||
postgres,
|
||||
mongodb,
|
||||
mysql,
|
||||
mssql,
|
||||
mariadb,
|
||||
const providers: Record<DatabaseName, DatasourceProvider> = {
|
||||
[DatabaseName.POSTGRES]: postgres.getDatasource,
|
||||
[DatabaseName.MONGODB]: mongodb.getDatasource,
|
||||
[DatabaseName.MYSQL]: mysql.getDatasource,
|
||||
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
||||
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
||||
}
|
||||
|
||||
export function getDatasourceProviders(
|
||||
...sourceNames: DatabaseName[]
|
||||
): Promise<Datasource>[] {
|
||||
return sourceNames.map(sourceName => providers[sourceName]())
|
||||
}
|
||||
|
||||
export function getDatasourceProvider(
|
||||
sourceName: DatabaseName
|
||||
): DatasourceProvider {
|
||||
return providers[sourceName]
|
||||
}
|
||||
|
||||
export function getDatasource(sourceName: DatabaseName): Promise<Datasource> {
|
||||
return providers[sourceName]()
|
||||
}
|
||||
|
||||
export async function getDatasources(
|
||||
...sourceNames: DatabaseName[]
|
||||
): Promise<Datasource[]> {
|
||||
return Promise.all(sourceNames.map(sourceName => providers[sourceName]()))
|
||||
}
|
||||
|
||||
export async function rawQuery(ds: Datasource, sql: string): Promise<any> {
|
||||
switch (ds.source) {
|
||||
case SourceName.POSTGRES: {
|
||||
return postgres.rawQuery(ds, sql)
|
||||
}
|
||||
case SourceName.MYSQL: {
|
||||
return mysql.rawQuery(ds, sql)
|
||||
}
|
||||
case SourceName.SQL_SERVER: {
|
||||
return mssql.rawQuery(ds, sql)
|
||||
}
|
||||
default: {
|
||||
throw new Error(`Unsupported source: ${ds.source}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function startContainer(container: GenericContainer) {
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
container = container.withReuse()
|
||||
}
|
||||
|
||||
const startedContainer = await container.start()
|
||||
|
||||
const info = testContainerUtils.getContainerById(startedContainer.getId())
|
||||
if (!info) {
|
||||
throw new Error("Container not found")
|
||||
}
|
||||
|
||||
// Some Docker runtimes, when you expose a port, will bind it to both
|
||||
// 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6
|
||||
// addresses are not shared, and testcontainers will sometimes give you back
|
||||
// the ipv6 port. There's no way to know that this has happened, and if you
|
||||
// try to then connect to `localhost:port` you may attempt to bind to the v4
|
||||
// address which could be unbound or even an entirely different container. For
|
||||
// that reason, we don't use testcontainers' `getExposedPort` function,
|
||||
// preferring instead our own method that guaranteed v4 ports.
|
||||
return testContainerUtils.getExposedV4Ports(info)
|
||||
}
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
|
||||
import { rawQuery } from "./mysql"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
class MariaDBWaitStrategy extends AbstractWaitStrategy {
|
||||
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
|
||||
|
@ -21,38 +24,38 @@ class MariaDBWaitStrategy extends AbstractWaitStrategy {
|
|||
}
|
||||
}
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("mariadb:lts")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MariaDBWaitStrategy())
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mariadb:lts")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MariaDBWaitStrategy())
|
||||
)
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(3306)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 3306)?.host
|
||||
if (!port) {
|
||||
throw new Error("MariaDB port not found")
|
||||
}
|
||||
|
||||
const config = {
|
||||
host: "127.0.0.1",
|
||||
port,
|
||||
user: "root",
|
||||
password: "password",
|
||||
database: "mysql",
|
||||
}
|
||||
|
||||
const datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.MYSQL,
|
||||
plus: true,
|
||||
config: {
|
||||
host,
|
||||
port,
|
||||
user: "root",
|
||||
password: "password",
|
||||
database: "mysql",
|
||||
},
|
||||
config,
|
||||
}
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
}
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
|
||||
datasource.config.database = database
|
||||
return datasource
|
||||
}
|
||||
|
|
|
@ -1,43 +1,39 @@
|
|||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("mongo:7.0-jammy")
|
||||
.withExposedPorts(27017)
|
||||
.withEnvironment({
|
||||
MONGO_INITDB_ROOT_USERNAME: "mongo",
|
||||
MONGO_INITDB_ROOT_PASSWORD: "password",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
`mongosh --eval "db.version()"`
|
||||
).withStartupTimeout(10000)
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mongo:7.0-jammy")
|
||||
.withExposedPorts(27017)
|
||||
.withEnvironment({
|
||||
MONGO_INITDB_ROOT_USERNAME: "mongo",
|
||||
MONGO_INITDB_ROOT_PASSWORD: "password",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
`mongosh --eval "db.version()"`
|
||||
).withStartupTimeout(10000)
|
||||
)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(27017)
|
||||
|
||||
const port = (await ports).find(x => x.container === 27017)
|
||||
if (!port) {
|
||||
throw new Error("MongoDB port not found")
|
||||
}
|
||||
|
||||
return {
|
||||
type: "datasource",
|
||||
source: SourceName.MONGODB,
|
||||
plus: false,
|
||||
config: {
|
||||
connectionString: `mongodb://mongo:password@${host}:${port}`,
|
||||
db: "mongo",
|
||||
connectionString: `mongodb://mongo:password@127.0.0.1:${port.host}`,
|
||||
db: generator.guid(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,43 +1,41 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import mssql from "mssql"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer(
|
||||
"mcr.microsoft.com/mssql/server:2022-latest"
|
||||
)
|
||||
.withExposedPorts(1433)
|
||||
.withEnvironment({
|
||||
ACCEPT_EULA: "Y",
|
||||
MSSQL_SA_PASSWORD: "Password_123",
|
||||
// This is important, as Microsoft allow us to use the "Developer" edition
|
||||
// of SQL Server for development and testing purposes. We can't use other
|
||||
// versions without a valid license, and we cannot use the Developer
|
||||
// version in production.
|
||||
MSSQL_PID: "Developer",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
||||
)
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mcr.microsoft.com/mssql/server:2022-latest")
|
||||
.withExposedPorts(1433)
|
||||
.withEnvironment({
|
||||
ACCEPT_EULA: "Y",
|
||||
MSSQL_SA_PASSWORD: "Password_123",
|
||||
// This is important, as Microsoft allow us to use the "Developer" edition
|
||||
// of SQL Server for development and testing purposes. We can't use other
|
||||
// versions without a valid license, and we cannot use the Developer
|
||||
// version in production.
|
||||
MSSQL_PID: "Developer",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
||||
)
|
||||
)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(1433)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 1433)?.host
|
||||
|
||||
const datasource: Datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.SQL_SERVER,
|
||||
plus: true,
|
||||
config: {
|
||||
server: host,
|
||||
server: "127.0.0.1",
|
||||
port,
|
||||
user: "sa",
|
||||
password: "Password_123",
|
||||
|
@ -46,11 +44,28 @@ export async function datasource(): Promise<Datasource> {
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE "${database}"`)
|
||||
datasource.config!.database = database
|
||||
|
||||
return datasource
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
export async function rawQuery(ds: Datasource, sql: string) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
if (ds.source !== SourceName.SQL_SERVER) {
|
||||
throw new Error("Datasource source is not SQL Server")
|
||||
}
|
||||
|
||||
const pool = new mssql.ConnectionPool(ds.config! as mssql.config)
|
||||
const client = await pool.connect()
|
||||
try {
|
||||
const { recordset } = await client.query(sql)
|
||||
return recordset
|
||||
} finally {
|
||||
await pool.close()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
|
||||
import mysql from "mysql2/promise"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
class MySQLWaitStrategy extends AbstractWaitStrategy {
|
||||
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
|
||||
|
@ -24,38 +27,50 @@ class MySQLWaitStrategy extends AbstractWaitStrategy {
|
|||
}
|
||||
}
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("mysql:8.3")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mysql:8.3")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
|
||||
)
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(3306)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 3306)?.host
|
||||
|
||||
const datasource: Datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.MYSQL,
|
||||
plus: true,
|
||||
config: {
|
||||
host,
|
||||
host: "127.0.0.1",
|
||||
port,
|
||||
user: "root",
|
||||
password: "password",
|
||||
database: "mysql",
|
||||
},
|
||||
}
|
||||
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
|
||||
datasource.config!.database = database
|
||||
return datasource
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
export async function rawQuery(ds: Datasource, sql: string) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
if (ds.source !== SourceName.MYSQL) {
|
||||
throw new Error("Datasource source is not MySQL")
|
||||
}
|
||||
|
||||
const connection = await mysql.createConnection(ds.config)
|
||||
try {
|
||||
const [rows] = await connection.query(sql)
|
||||
return rows
|
||||
} finally {
|
||||
connection.end()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,33 +1,33 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import pg from "pg"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("postgres:16.1-bullseye")
|
||||
.withExposedPorts(5432)
|
||||
.withEnvironment({ POSTGRES_PASSWORD: "password" })
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"pg_isready -h localhost -p 5432"
|
||||
).withStartupTimeout(10000)
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("postgres:16.1-bullseye")
|
||||
.withExposedPorts(5432)
|
||||
.withEnvironment({ POSTGRES_PASSWORD: "password" })
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"pg_isready -h localhost -p 5432"
|
||||
).withStartupTimeout(10000)
|
||||
)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(5432)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 5432)?.host
|
||||
|
||||
const datasource: Datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.POSTGRES,
|
||||
plus: true,
|
||||
config: {
|
||||
host,
|
||||
host: "127.0.0.1",
|
||||
port,
|
||||
database: "postgres",
|
||||
user: "postgres",
|
||||
|
@ -38,11 +38,28 @@ export async function datasource(): Promise<Datasource> {
|
|||
ca: false,
|
||||
},
|
||||
}
|
||||
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE "${database}"`)
|
||||
datasource.config!.database = database
|
||||
|
||||
return datasource
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
export async function rawQuery(ds: Datasource, sql: string) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
if (ds.source !== SourceName.POSTGRES) {
|
||||
throw new Error("Datasource source is not Postgres")
|
||||
}
|
||||
|
||||
const client = new pg.Client(ds.config)
|
||||
await client.connect()
|
||||
try {
|
||||
const { rows } = await client.query(sql)
|
||||
return rows
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,8 +25,6 @@ const clearMigrations = async () => {
|
|||
}
|
||||
}
|
||||
|
||||
jest.setTimeout(10000)
|
||||
|
||||
describe("migrations", () => {
|
||||
const config = new TestConfig()
|
||||
|
||||
|
|
|
@ -17,8 +17,6 @@ import {
|
|||
generator,
|
||||
} from "@budibase/backend-core/tests"
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("external search", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
|
|
|
@ -2,17 +2,11 @@ import env from "../environment"
|
|||
import { env as coreEnv, timers } from "@budibase/backend-core"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
|
||||
if (!process.env.DEBUG) {
|
||||
global.console.log = jest.fn() // console.log are ignored in tests
|
||||
global.console.warn = jest.fn() // console.warn are ignored in tests
|
||||
}
|
||||
|
||||
if (!process.env.CI) {
|
||||
// set a longer timeout in dev for debugging
|
||||
// 100 seconds
|
||||
// set a longer timeout in dev for debugging 100 seconds
|
||||
jest.setTimeout(100 * 1000)
|
||||
} else {
|
||||
jest.setTimeout(10 * 1000)
|
||||
jest.setTimeout(30 * 1000)
|
||||
}
|
||||
|
||||
testContainerUtils.setupEnv(env, coreEnv)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import TestConfiguration from "../TestConfiguration"
|
||||
import { SuperTest, Test, Response } from "supertest"
|
||||
import request, { SuperTest, Test, Response } from "supertest"
|
||||
import { ReadStream } from "fs"
|
||||
import { getServer } from "../../../app"
|
||||
|
||||
type Headers = Record<string, string | string[] | undefined>
|
||||
type Method = "get" | "post" | "put" | "patch" | "delete"
|
||||
|
@ -76,7 +77,8 @@ export abstract class TestAPI {
|
|||
protected _requestRaw = async (
|
||||
method: "get" | "post" | "put" | "patch" | "delete",
|
||||
url: string,
|
||||
opts?: RequestOpts
|
||||
opts?: RequestOpts,
|
||||
attempt = 0
|
||||
): Promise<Response> => {
|
||||
const {
|
||||
headers = {},
|
||||
|
@ -107,26 +109,29 @@ export abstract class TestAPI {
|
|||
const headersFn = publicUser
|
||||
? this.config.publicHeaders.bind(this.config)
|
||||
: this.config.defaultHeaders.bind(this.config)
|
||||
let request = this.request[method](url).set(
|
||||
|
||||
const app = getServer()
|
||||
let req = request(app)[method](url)
|
||||
req = req.set(
|
||||
headersFn({
|
||||
"x-budibase-include-stacktrace": "true",
|
||||
})
|
||||
)
|
||||
if (headers) {
|
||||
request = request.set(headers)
|
||||
req = req.set(headers)
|
||||
}
|
||||
if (body) {
|
||||
request = request.send(body)
|
||||
req = req.send(body)
|
||||
}
|
||||
for (const [key, value] of Object.entries(fields)) {
|
||||
request = request.field(key, value)
|
||||
req = req.field(key, value)
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(files)) {
|
||||
if (isAttachedFile(value)) {
|
||||
request = request.attach(key, value.file, value.name)
|
||||
req = req.attach(key, value.file, value.name)
|
||||
} else {
|
||||
request = request.attach(key, value as any)
|
||||
req = req.attach(key, value as any)
|
||||
}
|
||||
}
|
||||
if (expectations?.headers) {
|
||||
|
@ -136,11 +141,25 @@ export abstract class TestAPI {
|
|||
`Got an undefined expected value for header "${key}", if you want to check for the absence of a header, use headersNotPresent`
|
||||
)
|
||||
}
|
||||
request = request.expect(key, value as any)
|
||||
req = req.expect(key, value as any)
|
||||
}
|
||||
}
|
||||
|
||||
return await request
|
||||
try {
|
||||
return await req
|
||||
} catch (e: any) {
|
||||
// We've found that occasionally the connection between supertest and the
|
||||
// server supertest starts gets reset. Not sure why, but retrying it
|
||||
// appears to work. I don't particularly like this, but it's better than
|
||||
// flakiness.
|
||||
if (e.code === "ECONNRESET") {
|
||||
if (attempt > 2) {
|
||||
throw e
|
||||
}
|
||||
return await this._requestRaw(method, url, opts, attempt + 1)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
protected _checkResponse = (
|
||||
|
@ -170,7 +189,18 @@ export abstract class TestAPI {
|
|||
}
|
||||
}
|
||||
|
||||
throw new Error(message)
|
||||
if (response.error) {
|
||||
// Sometimes the error can be between supertest and the app, and when
|
||||
// that happens response.error is sometimes populated with `text` that
|
||||
// gives more detail about the error. The `message` is almost always
|
||||
// useless from what I've seen.
|
||||
if (response.error.text) {
|
||||
response.error.message = response.error.text
|
||||
}
|
||||
throw new Error(message, { cause: response.error })
|
||||
} else {
|
||||
throw new Error(message)
|
||||
}
|
||||
}
|
||||
|
||||
if (expectations?.headersNotPresent) {
|
||||
|
|
|
@ -54,7 +54,7 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
|
|||
type: columnType,
|
||||
subtype: columnSubtype,
|
||||
autocolumn: isAutoColumn,
|
||||
} = schema[columnName]
|
||||
} = schema[columnName] || {}
|
||||
|
||||
// If the column had an invalid value we don't want to override it
|
||||
if (results.schemaValidation[columnName] === false) {
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
node_modules/
|
||||
.env
|
||||
watchtower-hook.json
|
||||
dist/
|
||||
testResults.json
|
|
@ -1,28 +0,0 @@
|
|||
# QA Core API Tests
|
||||
|
||||
The QA Core API tests are a jest suite that run directly against the budibase backend APIs.
|
||||
|
||||
## Auto Setup
|
||||
|
||||
You can run the whole test suite with one command, that spins up the budibase server and runs the jest tests:
|
||||
|
||||
`yarn test:ci`
|
||||
|
||||
## Setup Server
|
||||
|
||||
You can run the local development stack by following the instructions on the main readme.
|
||||
|
||||
## Run Tests
|
||||
|
||||
If you configured the server using the previous command, you can run the whole test suite by using:
|
||||
|
||||
`yarn test`
|
||||
|
||||
for watch mode, where the tests will run on every change:
|
||||
|
||||
`yarn test:watch`
|
||||
|
||||
To run tests locally against a cloud service you can update the configuration inside the `.env` file and run:
|
||||
|
||||
`yarn test`
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
import { Config } from "@jest/types"
|
||||
|
||||
const config: Config.InitialOptions = {
|
||||
preset: "ts-jest",
|
||||
setupFiles: ["./src/jest/jestSetup.ts"],
|
||||
setupFilesAfterEnv: ["./src/jest/jest.extends.ts"],
|
||||
testEnvironment: "node",
|
||||
transform: {
|
||||
"^.+\\.ts?$": "@swc/jest",
|
||||
},
|
||||
globalSetup: "./src/jest/globalSetup.ts",
|
||||
globalTeardown: "./src/jest/globalTeardown.ts",
|
||||
moduleNameMapper: {
|
||||
"@budibase/types": "<rootDir>/../packages/types/src",
|
||||
"@budibase/server": "<rootDir>/../packages/server/src",
|
||||
"@budibase/backend-core": "<rootDir>/../packages/backend-core/src",
|
||||
"@budibase/backend-core/(.*)": "<rootDir>/../packages/backend-core/$1",
|
||||
},
|
||||
}
|
||||
|
||||
export default config
|
|
@ -1,49 +0,0 @@
|
|||
{
|
||||
"name": "@budibase/qa-core",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "0.0.1",
|
||||
"main": "index.js",
|
||||
"description": "Budibase Integration Test Suite",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Budibase/budibase.git"
|
||||
},
|
||||
"scripts": {
|
||||
"setup": "yarn && node scripts/createEnv.js",
|
||||
"user": "yarn && node scripts/createEnv.js && node scripts/createUser.js",
|
||||
"test": "jest --runInBand --json --outputFile=testResults.json --forceExit",
|
||||
"test:watch": "yarn run test --watch",
|
||||
"test:debug": "DEBUG=1 yarn run test",
|
||||
"test:notify": "node scripts/testResultsWebhook",
|
||||
"test:cloud:prod": "yarn run test --testPathIgnorePatterns=\\.integration\\.",
|
||||
"test:cloud:qa": "yarn run test",
|
||||
"test:self:ci": "yarn run test --testPathIgnorePatterns=\\.integration\\. \\.cloud\\. \\.licensing\\.",
|
||||
"serve:test:self:ci": "start-server-and-test dev:built http://localhost:4001/health test:self:ci",
|
||||
"serve": "start-server-and-test dev:built http://localhost:4001/health",
|
||||
"dev:built": "cd ../ && DISABLE_RATE_LIMITING=1 yarn dev:built"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@budibase/types": "^2.3.17",
|
||||
"@swc/core": "1.3.71",
|
||||
"@swc/jest": "0.2.27",
|
||||
"@trendyol/jest-testcontainers": "2.1.1",
|
||||
"@types/jest": "29.5.3",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"chance": "1.1.8",
|
||||
"dotenv": "16.0.1",
|
||||
"jest": "29.7.0",
|
||||
"prettier": "2.7.1",
|
||||
"start-server-and-test": "1.14.0",
|
||||
"timekeeper": "2.2.0",
|
||||
"ts-jest": "29.1.1",
|
||||
"ts-node": "10.8.1",
|
||||
"tsconfig-paths": "4.0.0",
|
||||
"typescript": "5.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/backend-core": "^2.3.17",
|
||||
"form-data": "^4.0.0",
|
||||
"node-fetch": "2.6.7",
|
||||
"stripe": "^14.11.0"
|
||||
}
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
const path = require("path")
|
||||
const fs = require("fs")
|
||||
|
||||
function init() {
|
||||
const envFilePath = path.join(process.cwd(), ".env")
|
||||
if (!fs.existsSync(envFilePath)) {
|
||||
const envFileJson = {
|
||||
BUDIBASE_URL: "http://localhost:10000",
|
||||
ACCOUNT_PORTAL_URL: "http://localhost:10001",
|
||||
ACCOUNT_PORTAL_API_KEY: "budibase",
|
||||
BB_ADMIN_USER_EMAIL: "admin",
|
||||
BB_ADMIN_USER_PASSWORD: "admin",
|
||||
LOG_LEVEL: "info",
|
||||
JEST_TIMEOUT: "60000",
|
||||
DISABLE_PINO_LOGGER: "1",
|
||||
}
|
||||
let envFile = ""
|
||||
Object.keys(envFileJson).forEach(key => {
|
||||
envFile += `${key}=${envFileJson[key]}\n`
|
||||
})
|
||||
fs.writeFileSync(envFilePath, envFile)
|
||||
}
|
||||
}
|
||||
|
||||
init()
|
|
@ -1,49 +0,0 @@
|
|||
const dotenv = require("dotenv")
|
||||
const { join } = require("path")
|
||||
const fs = require("fs")
|
||||
const fetch = require("node-fetch")
|
||||
|
||||
function getVarFromDotEnv(path, varName) {
|
||||
const parsed = dotenv.parse(fs.readFileSync(path))
|
||||
return parsed[varName]
|
||||
}
|
||||
|
||||
async function createUser() {
|
||||
const serverPath = join(__dirname, "..", "..", "packages", "server", ".env")
|
||||
const qaCorePath = join(__dirname, "..", ".env")
|
||||
const apiKey = getVarFromDotEnv(serverPath, "INTERNAL_API_KEY")
|
||||
const username = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_EMAIL")
|
||||
const password = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_PASSWORD")
|
||||
const url = getVarFromDotEnv(qaCorePath, "BUDIBASE_URL")
|
||||
const resp = await fetch(`${url}/api/public/v1/users`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"x-budibase-api-key": apiKey,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
email: username,
|
||||
password,
|
||||
builder: {
|
||||
global: true,
|
||||
},
|
||||
admin: {
|
||||
global: true,
|
||||
},
|
||||
roles: {},
|
||||
}),
|
||||
})
|
||||
if (resp.status !== 200) {
|
||||
throw new Error(await resp.text())
|
||||
} else {
|
||||
return await resp.json()
|
||||
}
|
||||
}
|
||||
|
||||
createUser()
|
||||
.then(() => {
|
||||
console.log("User created - ready to use")
|
||||
})
|
||||
.catch(err => {
|
||||
console.error("Failed to create user - ", err)
|
||||
})
|
|
@ -1,130 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const fetch = require("node-fetch")
|
||||
const path = require("path")
|
||||
const fs = require("fs")
|
||||
|
||||
const WEBHOOK_URL = process.env.WEBHOOK_URL
|
||||
const GIT_SHA = process.env.GITHUB_SHA
|
||||
const GITHUB_ACTIONS_RUN_URL = process.env.GITHUB_ACTIONS_RUN_URL
|
||||
|
||||
async function generateReport() {
|
||||
// read the report file
|
||||
const REPORT_PATH = path.resolve(__dirname, "..", "testResults.json")
|
||||
const report = fs.readFileSync(REPORT_PATH, "utf-8")
|
||||
return JSON.parse(report)
|
||||
}
|
||||
|
||||
const env = process.argv.slice(2)[0]
|
||||
|
||||
if (!env) {
|
||||
throw new Error("environment argument is required")
|
||||
}
|
||||
|
||||
async function discordResultsNotification(report) {
|
||||
const {
|
||||
numTotalTestSuites,
|
||||
numTotalTests,
|
||||
numPassedTests,
|
||||
numPendingTests,
|
||||
numFailedTests,
|
||||
success,
|
||||
startTime,
|
||||
endTime,
|
||||
} = report
|
||||
|
||||
const OUTCOME = success ? "success" : "failure"
|
||||
|
||||
const options = {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
content: `**Tests Status**: ${OUTCOME}`,
|
||||
embeds: [
|
||||
{
|
||||
title: `Budi QA Bot - ${env}`,
|
||||
description: `API Integration Tests`,
|
||||
url: GITHUB_ACTIONS_RUN_URL,
|
||||
color: OUTCOME === "success" ? 3066993 : 15548997,
|
||||
timestamp: new Date(),
|
||||
footer: {
|
||||
icon_url: "http://bbui.budibase.com/budibase-logo.png",
|
||||
text: "Budibase QA Bot",
|
||||
},
|
||||
thumbnail: {
|
||||
url: "http://bbui.budibase.com/budibase-logo.png",
|
||||
},
|
||||
author: {
|
||||
name: "Budibase QA Bot",
|
||||
url: "https://discordapp.com",
|
||||
icon_url: "http://bbui.budibase.com/budibase-logo.png",
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: "Commit",
|
||||
value: `https://github.com/Budibase/budibase/commit/${GIT_SHA}`,
|
||||
},
|
||||
{
|
||||
name: "Github Actions Run URL",
|
||||
value: GITHUB_ACTIONS_RUN_URL || "None Supplied",
|
||||
},
|
||||
{
|
||||
name: "Test Suites",
|
||||
value: numTotalTestSuites,
|
||||
},
|
||||
{
|
||||
name: "Tests",
|
||||
value: numTotalTests,
|
||||
},
|
||||
{
|
||||
name: "Passed",
|
||||
value: numPassedTests,
|
||||
},
|
||||
{
|
||||
name: "Pending",
|
||||
value: numPendingTests,
|
||||
},
|
||||
{
|
||||
name: "Failures",
|
||||
value: numFailedTests,
|
||||
},
|
||||
{
|
||||
name: "Duration",
|
||||
value: endTime
|
||||
? `${(endTime - startTime) / 1000} Seconds`
|
||||
: "DNF",
|
||||
},
|
||||
{
|
||||
name: "Pass Percentage",
|
||||
value: Math.floor((numPassedTests / numTotalTests) * 100),
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}),
|
||||
}
|
||||
|
||||
// Only post in discord when tests fail
|
||||
if (success) {
|
||||
return
|
||||
}
|
||||
|
||||
const response = await fetch(WEBHOOK_URL, options)
|
||||
|
||||
if (response.status >= 201) {
|
||||
const text = await response.text()
|
||||
console.error(
|
||||
`Error sending discord webhook. \nStatus: ${response.status}. \nResponse Body: ${text}. \nRequest Body: ${options.body}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function run() {
|
||||
const report = await generateReport()
|
||||
await discordResultsNotification(report)
|
||||
}
|
||||
|
||||
run()
|
|
@ -1,20 +0,0 @@
|
|||
import AccountInternalAPIClient from "./AccountInternalAPIClient"
|
||||
import { AccountAPI, LicenseAPI, AuthAPI, StripeAPI } from "./apis"
|
||||
import { State } from "../../types"
|
||||
|
||||
export default class AccountInternalAPI {
|
||||
client: AccountInternalAPIClient
|
||||
|
||||
auth: AuthAPI
|
||||
accounts: AccountAPI
|
||||
licenses: LicenseAPI
|
||||
stripe: StripeAPI
|
||||
|
||||
constructor(state: State) {
|
||||
this.client = new AccountInternalAPIClient(state)
|
||||
this.auth = new AuthAPI(this.client)
|
||||
this.accounts = new AccountAPI(this.client)
|
||||
this.licenses = new LicenseAPI(this.client)
|
||||
this.stripe = new StripeAPI(this.client)
|
||||
}
|
||||
}
|
|
@ -1,89 +0,0 @@
|
|||
import fetch, { Response, HeadersInit } from "node-fetch"
|
||||
import env from "../../environment"
|
||||
import { State } from "../../types"
|
||||
import { Header } from "@budibase/backend-core"
|
||||
|
||||
type APIMethod = "GET" | "POST" | "PUT" | "PATCH" | "DELETE"
|
||||
|
||||
interface ApiOptions {
|
||||
method?: APIMethod
|
||||
body?: object
|
||||
headers?: HeadersInit | undefined
|
||||
internal?: boolean
|
||||
}
|
||||
|
||||
export default class AccountInternalAPIClient {
|
||||
state: State
|
||||
host: string
|
||||
|
||||
constructor(state: State) {
|
||||
if (!env.ACCOUNT_PORTAL_URL) {
|
||||
throw new Error("Must set ACCOUNT_PORTAL_URL env var")
|
||||
}
|
||||
if (!env.ACCOUNT_PORTAL_API_KEY) {
|
||||
throw new Error("Must set ACCOUNT_PORTAL_API_KEY env var")
|
||||
}
|
||||
this.host = `${env.ACCOUNT_PORTAL_URL}`
|
||||
this.state = state
|
||||
}
|
||||
|
||||
apiCall =
|
||||
(method: APIMethod) =>
|
||||
async (url = "", options: ApiOptions = {}): Promise<[Response, any]> => {
|
||||
const requestOptions = {
|
||||
method,
|
||||
body: JSON.stringify(options.body),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
cookie: this.state.cookie,
|
||||
redirect: "follow",
|
||||
follow: 20,
|
||||
...options.headers,
|
||||
},
|
||||
credentials: "include",
|
||||
}
|
||||
|
||||
if (options.internal) {
|
||||
requestOptions.headers = {
|
||||
...requestOptions.headers,
|
||||
...{ [Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY },
|
||||
cookie: "",
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
const response = await fetch(`${this.host}${url}`, requestOptions)
|
||||
|
||||
let body: any
|
||||
const contentType = response.headers.get("content-type")
|
||||
if (contentType && contentType.includes("application/json")) {
|
||||
body = await response.json()
|
||||
} else {
|
||||
body = await response.text()
|
||||
}
|
||||
|
||||
const data = {
|
||||
request: requestOptions.body,
|
||||
response: body,
|
||||
}
|
||||
const message = `${method} ${url} - ${response.status}`
|
||||
|
||||
const isDebug = process.env.LOG_LEVEL === "debug"
|
||||
if (response.status > 499) {
|
||||
console.error(message, data)
|
||||
} else if (response.status >= 400) {
|
||||
console.warn(message, data)
|
||||
} else if (isDebug) {
|
||||
console.debug(message, data)
|
||||
}
|
||||
|
||||
return [response, body]
|
||||
}
|
||||
|
||||
post = this.apiCall("POST")
|
||||
get = this.apiCall("GET")
|
||||
patch = this.apiCall("PATCH")
|
||||
del = this.apiCall("DELETE")
|
||||
put = this.apiCall("PUT")
|
||||
}
|
|
@ -1,123 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import {
|
||||
Account,
|
||||
CreateAccountRequest,
|
||||
SearchAccountsRequest,
|
||||
SearchAccountsResponse,
|
||||
} from "@budibase/types"
|
||||
import AccountInternalAPIClient from "../AccountInternalAPIClient"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
import { Header } from "@budibase/backend-core"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
|
||||
export default class AccountAPI extends BaseAPI {
|
||||
client: AccountInternalAPIClient
|
||||
|
||||
constructor(client: AccountInternalAPIClient) {
|
||||
super()
|
||||
this.client = client
|
||||
}
|
||||
|
||||
async validateEmail(email: string, opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/accounts/validate/email`, {
|
||||
body: { email },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async validateTenantId(
|
||||
tenantId: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/accounts/validate/tenantId`, {
|
||||
body: { tenantId },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async create(
|
||||
body: CreateAccountRequest,
|
||||
opts: APIRequestOpts & { autoVerify: boolean } = {
|
||||
status: 201,
|
||||
autoVerify: false,
|
||||
}
|
||||
): Promise<[Response, Account]> {
|
||||
return this.doRequest(() => {
|
||||
const headers = {
|
||||
"no-verify": opts.autoVerify ? "1" : "0",
|
||||
}
|
||||
return this.client.post(`/api/accounts`, {
|
||||
body,
|
||||
headers,
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async delete(accountID: string, opts: APIRequestOpts = { status: 204 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.del(`/api/accounts/${accountID}`, {
|
||||
internal: true,
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async deleteCurrentAccount(opts: APIRequestOpts = { status: 204 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.del(`/api/accounts`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async verifyAccount(
|
||||
verificationCode: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/accounts/verify`, {
|
||||
body: { verificationCode },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async sendVerificationEmail(
|
||||
email: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
): Promise<[Response, string]> {
|
||||
return this.doRequest(async () => {
|
||||
const [response] = await this.client.post(`/api/accounts/verify/send`, {
|
||||
body: { email },
|
||||
headers: {
|
||||
[Header.RETURN_VERIFICATION_CODE]: "1",
|
||||
},
|
||||
})
|
||||
const code = response.headers.get(Header.VERIFICATION_CODE)
|
||||
return [response, code]
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async search(
|
||||
searchType: string,
|
||||
search: "email" | "tenantId",
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
): Promise<[Response, SearchAccountsResponse]> {
|
||||
return this.doRequest(() => {
|
||||
let body: SearchAccountsRequest = {}
|
||||
if (search === "email") {
|
||||
body.email = searchType
|
||||
} else if (search === "tenantId") {
|
||||
body.tenantId = searchType
|
||||
}
|
||||
return this.client.post(`/api/accounts/search`, {
|
||||
body,
|
||||
internal: true,
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async self(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/auth/self`)
|
||||
}, opts)
|
||||
}
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import AccountInternalAPIClient from "../AccountInternalAPIClient"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
import { Header } from "@budibase/backend-core"
|
||||
|
||||
export default class AuthAPI extends BaseAPI {
|
||||
client: AccountInternalAPIClient
|
||||
|
||||
constructor(client: AccountInternalAPIClient) {
|
||||
super()
|
||||
this.client = client
|
||||
}
|
||||
|
||||
async login(
|
||||
email: string,
|
||||
password: string,
|
||||
opts: APIRequestOpts = { doExpect: true, status: 200 }
|
||||
): Promise<[Response, string]> {
|
||||
return this.doRequest(async () => {
|
||||
const [res] = await this.client.post(`/api/auth/login`, {
|
||||
body: {
|
||||
email: email,
|
||||
password: password,
|
||||
},
|
||||
})
|
||||
const cookie = res.headers.get("set-cookie")
|
||||
return [res, cookie]
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async logout(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/auth/logout`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async resetPassword(
|
||||
email: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
): Promise<[Response, string]> {
|
||||
return this.doRequest(async () => {
|
||||
const [response] = await this.client.post(`/api/auth/reset`, {
|
||||
body: { email },
|
||||
headers: {
|
||||
[Header.RETURN_RESET_PASSWORD_CODE]: "1",
|
||||
},
|
||||
})
|
||||
const code = response.headers.get(Header.RESET_PASSWORD_CODE)
|
||||
return [response, code]
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async resetPasswordUpdate(
|
||||
resetCode: string,
|
||||
password: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/auth/reset/update`, {
|
||||
body: {
|
||||
resetCode: resetCode,
|
||||
password: password,
|
||||
},
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
|
||||
export default class BaseAPI {
|
||||
async doRequest(
|
||||
request: () => Promise<[Response, any]>,
|
||||
opts: APIRequestOpts
|
||||
): Promise<[Response, any]> {
|
||||
const [response, body] = await request()
|
||||
|
||||
// do expect on by default
|
||||
if (opts.doExpect === undefined) {
|
||||
opts.doExpect = true
|
||||
}
|
||||
if (opts.doExpect && opts.status) {
|
||||
expect(response).toHaveStatusCode(opts.status)
|
||||
}
|
||||
return [response, body]
|
||||
}
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
import AccountInternalAPIClient from "../AccountInternalAPIClient"
|
||||
import {
|
||||
Account,
|
||||
CreateOfflineLicenseRequest,
|
||||
GetLicenseKeyResponse,
|
||||
GetOfflineLicenseResponse,
|
||||
UpdateLicenseRequest,
|
||||
} from "@budibase/types"
|
||||
import { Response } from "node-fetch"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
|
||||
export default class LicenseAPI extends BaseAPI {
|
||||
client: AccountInternalAPIClient
|
||||
constructor(client: AccountInternalAPIClient) {
|
||||
super()
|
||||
this.client = client
|
||||
}
|
||||
async updateLicense(
|
||||
accountId: string,
|
||||
body: UpdateLicenseRequest,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
): Promise<[Response, Account]> {
|
||||
return this.doRequest(() => {
|
||||
return this.client.put(`/api/accounts/${accountId}/license`, {
|
||||
body,
|
||||
internal: true,
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
// TODO: Better approach for setting tenant id header
|
||||
async createOfflineLicense(
|
||||
accountId: string,
|
||||
tenantId: string,
|
||||
body: CreateOfflineLicenseRequest,
|
||||
opts: { status?: number } = {}
|
||||
): Promise<Response> {
|
||||
const [response, json] = await this.client.post(
|
||||
`/api/internal/accounts/${accountId}/license/offline`,
|
||||
{
|
||||
body,
|
||||
internal: true,
|
||||
headers: {
|
||||
"x-budibase-tenant-id": tenantId,
|
||||
},
|
||||
}
|
||||
)
|
||||
expect(response.status).toBe(opts.status ? opts.status : 201)
|
||||
return response
|
||||
}
|
||||
async getOfflineLicense(
|
||||
accountId: string,
|
||||
tenantId: string,
|
||||
opts: { status?: number } = {}
|
||||
): Promise<[Response, GetOfflineLicenseResponse]> {
|
||||
const [response, json] = await this.client.get(
|
||||
`/api/internal/accounts/${accountId}/license/offline`,
|
||||
{
|
||||
internal: true,
|
||||
headers: {
|
||||
"x-budibase-tenant-id": tenantId,
|
||||
},
|
||||
}
|
||||
)
|
||||
expect(response.status).toBe(opts.status ? opts.status : 200)
|
||||
return [response, json]
|
||||
}
|
||||
async getLicenseKey(
|
||||
opts: { status?: number } = {}
|
||||
): Promise<[Response, GetLicenseKeyResponse]> {
|
||||
const [response, json] = await this.client.get(`/api/license/key`)
|
||||
expect(response.status).toBe(opts.status || 200)
|
||||
return [response, json]
|
||||
}
|
||||
async activateLicense(
|
||||
apiKey: string,
|
||||
tenantId: string,
|
||||
licenseKey: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/license/activate`, {
|
||||
body: {
|
||||
apiKey: apiKey,
|
||||
tenantId: tenantId,
|
||||
licenseKey: licenseKey,
|
||||
},
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
async regenerateLicenseKey(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/license/key/regenerate`, {})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async getPlans(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/plans`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async updatePlan(priceId: string, opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.put(`/api/license/plan`, {
|
||||
body: { priceId },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async refreshAccountLicense(
|
||||
accountId: string,
|
||||
opts: { status?: number } = {}
|
||||
): Promise<Response> {
|
||||
const [response, json] = await this.client.post(
|
||||
`/api/accounts/${accountId}/license/refresh`,
|
||||
{
|
||||
internal: true,
|
||||
}
|
||||
)
|
||||
expect(response.status).toBe(opts.status ? opts.status : 201)
|
||||
return response
|
||||
}
|
||||
|
||||
async getLicenseUsage(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/license/usage`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async licenseUsageTriggered(
|
||||
opts: { status?: number } = {}
|
||||
): Promise<Response> {
|
||||
const [response, json] = await this.client.post(
|
||||
`/api/license/usage/triggered`
|
||||
)
|
||||
expect(response.status).toBe(opts.status ? opts.status : 201)
|
||||
return response
|
||||
}
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
import AccountInternalAPIClient from "../AccountInternalAPIClient"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
|
||||
export default class StripeAPI extends BaseAPI {
|
||||
client: AccountInternalAPIClient
|
||||
|
||||
constructor(client: AccountInternalAPIClient) {
|
||||
super()
|
||||
this.client = client
|
||||
}
|
||||
|
||||
async createCheckoutSession(
|
||||
price: object,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/stripe/checkout-session`, {
|
||||
body: { prices: [price] },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async checkoutSuccess(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/stripe/checkout-success`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async createPortalSession(
|
||||
stripeCustomerId: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/stripe/portal-session`, {
|
||||
body: { stripeCustomerId },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async linkStripeCustomer(
|
||||
accountId: string,
|
||||
stripeCustomerId: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/stripe/link`, {
|
||||
body: {
|
||||
accountId,
|
||||
stripeCustomerId,
|
||||
},
|
||||
internal: true,
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async getInvoices(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/stripe/invoices`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async getUpcomingInvoice(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/stripe/upcoming-invoice`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async getStripeCustomers(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/stripe/customers`)
|
||||
}, opts)
|
||||
}
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
export { default as AuthAPI } from "./AuthAPI"
|
||||
export { default as AccountAPI } from "./AccountAPI"
|
||||
export { default as LicenseAPI } from "./LicenseAPI"
|
||||
export { default as StripeAPI } from "./StripeAPI"
|
|
@ -1 +0,0 @@
|
|||
export { default as AccountInternalAPI } from "./AccountInternalAPI"
|
|
@ -1,29 +0,0 @@
|
|||
import { AccountInternalAPI } from "../api"
|
||||
import { BudibaseTestConfiguration } from "../../shared"
|
||||
|
||||
export default class TestConfiguration<T> extends BudibaseTestConfiguration {
|
||||
// apis
|
||||
api: AccountInternalAPI
|
||||
|
||||
context: T
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
this.api = new AccountInternalAPI(this.state)
|
||||
this.context = <T>{}
|
||||
}
|
||||
|
||||
async beforeAll() {
|
||||
await super.beforeAll()
|
||||
await this.setApiKey()
|
||||
}
|
||||
|
||||
async afterAll() {
|
||||
await super.afterAll()
|
||||
}
|
||||
|
||||
async setApiKey() {
|
||||
const apiKeyResponse = await this.internalApi.self.getApiKey()
|
||||
this.state.apiKey = apiKeyResponse.apiKey
|
||||
}
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
import { generator } from "../../shared"
|
||||
import { Hosting, CreateAccountRequest } from "@budibase/types"
|
||||
|
||||
// TODO: Refactor me to central location
|
||||
export const generateAccount = (
|
||||
partial: Partial<CreateAccountRequest>
|
||||
): CreateAccountRequest => {
|
||||
const uuid = generator.guid()
|
||||
|
||||
const email = `${uuid}@budibase.com`
|
||||
const tenant = `tenant${uuid.replace(/-/g, "")}`
|
||||
|
||||
return {
|
||||
email,
|
||||
hosting: Hosting.CLOUD,
|
||||
name: email,
|
||||
password: uuid,
|
||||
profession: "software_engineer",
|
||||
size: "10+",
|
||||
tenantId: tenant,
|
||||
tenantName: tenant,
|
||||
...partial,
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
export * as accounts from "./accounts"
|
|
@ -1 +0,0 @@
|
|||
export * from "./api"
|
|
@ -1,32 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixtures from "../../fixtures"
|
||||
import { generator } from "../../../shared"
|
||||
import { Hosting } from "@budibase/types"
|
||||
|
||||
describe("Account Internal Operations", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
it("performs account deletion by ID", async () => {
|
||||
// Deleting by unknown id doesn't work
|
||||
const accountId = generator.guid()
|
||||
await config.api.accounts.delete(accountId, { status: 404 })
|
||||
|
||||
// Create new account
|
||||
const [_, account] = await config.api.accounts.create({
|
||||
...fixtures.accounts.generateAccount({
|
||||
hosting: Hosting.CLOUD,
|
||||
}),
|
||||
})
|
||||
|
||||
// New account can be deleted
|
||||
await config.api.accounts.delete(account.accountId)
|
||||
})
|
||||
})
|
|
@ -1,102 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixtures from "../../fixtures"
|
||||
import { generator } from "../../../shared"
|
||||
import { Hosting } from "@budibase/types"
|
||||
|
||||
describe("Accounts", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
it("performs signup and deletion flow", async () => {
|
||||
await config.doInNewState(async () => {
|
||||
// Create account
|
||||
const createAccountRequest = fixtures.accounts.generateAccount({
|
||||
hosting: Hosting.CLOUD,
|
||||
})
|
||||
const email = createAccountRequest.email
|
||||
const tenantId = createAccountRequest.tenantId
|
||||
|
||||
// Validation - email and tenant ID allowed
|
||||
await config.api.accounts.validateEmail(email)
|
||||
await config.api.accounts.validateTenantId(tenantId)
|
||||
|
||||
// Create unverified account
|
||||
await config.api.accounts.create(createAccountRequest)
|
||||
|
||||
// Validation - email and tenant ID no longer valid
|
||||
await config.api.accounts.validateEmail(email, { status: 400 })
|
||||
await config.api.accounts.validateTenantId(tenantId, { status: 400 })
|
||||
|
||||
// Attempt to log in using unverified account
|
||||
await config.loginAsAccount(createAccountRequest, { status: 400 })
|
||||
|
||||
// Re-send verification email to get access to code
|
||||
const [_, code] = await config.accountsApi.accounts.sendVerificationEmail(
|
||||
email
|
||||
)
|
||||
|
||||
// Send the verification request
|
||||
await config.accountsApi.accounts.verifyAccount(code!)
|
||||
|
||||
// Verify self response is unauthorized
|
||||
await config.api.accounts.self({ status: 403 })
|
||||
|
||||
// Can now log in to the account
|
||||
await config.loginAsAccount(createAccountRequest)
|
||||
|
||||
// Verify self response matches account
|
||||
const [selfRes, selfBody] = await config.api.accounts.self()
|
||||
expect(selfBody.email).toBe(email)
|
||||
|
||||
// Delete account
|
||||
await config.api.accounts.deleteCurrentAccount()
|
||||
|
||||
// Can't log in
|
||||
await config.loginAsAccount(createAccountRequest, { status: 403 })
|
||||
})
|
||||
})
|
||||
|
||||
describe("Searching accounts", () => {
|
||||
it("search by tenant ID", async () => {
|
||||
const tenantId = generator.string()
|
||||
|
||||
// Empty result
|
||||
const [_, emptyBody] = await config.api.accounts.search(
|
||||
tenantId,
|
||||
"tenantId"
|
||||
)
|
||||
expect(emptyBody.length).toBe(0)
|
||||
|
||||
// Hit result
|
||||
const [hitRes, hitBody] = await config.api.accounts.search(
|
||||
config.state.tenantId!,
|
||||
"tenantId"
|
||||
)
|
||||
expect(hitBody.length).toBe(1)
|
||||
expect(hitBody[0].tenantId).toBe(config.state.tenantId)
|
||||
})
|
||||
|
||||
it("searches by email", async () => {
|
||||
const email = generator.email({ domain: "example.com" })
|
||||
|
||||
// Empty result
|
||||
const [_, emptyBody] = await config.api.accounts.search(email, "email")
|
||||
expect(emptyBody.length).toBe(0)
|
||||
|
||||
// Hit result
|
||||
const [hitRes, hitBody] = await config.api.accounts.search(
|
||||
config.state.email!,
|
||||
"email"
|
||||
)
|
||||
expect(hitBody.length).toBe(1)
|
||||
expect(hitBody[0].email).toBe(config.state.email)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,46 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixtures from "../../fixtures"
|
||||
import { generator } from "../../../shared"
|
||||
import { Hosting } from "@budibase/types"
|
||||
|
||||
describe("Password Management", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
it("performs password reset flow", async () => {
|
||||
// Create account
|
||||
const createAccountRequest = fixtures.accounts.generateAccount({
|
||||
hosting: Hosting.CLOUD,
|
||||
})
|
||||
await config.api.accounts.create(createAccountRequest, { autoVerify: true })
|
||||
|
||||
// Request password reset to get code
|
||||
const [_, code] = await config.api.auth.resetPassword(
|
||||
createAccountRequest.email
|
||||
)
|
||||
|
||||
// Change password using code
|
||||
const password = generator.string()
|
||||
await config.api.auth.resetPasswordUpdate(code, password)
|
||||
|
||||
// Login using the new password
|
||||
await config.api.auth.login(createAccountRequest.email, password)
|
||||
|
||||
// Logout of account
|
||||
await config.api.auth.logout()
|
||||
|
||||
// Cannot log in using old password
|
||||
await config.api.auth.login(
|
||||
createAccountRequest.email,
|
||||
createAccountRequest.password,
|
||||
{ status: 403 }
|
||||
)
|
||||
})
|
||||
})
|
|
@ -1,68 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixures from "../../fixtures"
|
||||
import { Feature, Hosting } from "@budibase/types"
|
||||
|
||||
describe("license activation", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
it("creates, activates and deletes online license - self host", async () => {
|
||||
// Remove existing license key
|
||||
await config.internalApi.license.deleteLicenseKey()
|
||||
|
||||
// Verify license key not found
|
||||
await config.internalApi.license.getLicenseKey({ status: 404 })
|
||||
|
||||
// Create self host account
|
||||
const createAccountRequest = fixures.accounts.generateAccount({
|
||||
hosting: Hosting.SELF,
|
||||
})
|
||||
const [createAccountRes, account] =
|
||||
await config.accountsApi.accounts.create(createAccountRequest, {
|
||||
autoVerify: true,
|
||||
})
|
||||
|
||||
let licenseKey: string = " "
|
||||
await config.doInNewState(async () => {
|
||||
await config.loginAsAccount(createAccountRequest)
|
||||
// Retrieve license key
|
||||
const [res, body] = await config.accountsApi.licenses.getLicenseKey()
|
||||
licenseKey = body.licenseKey
|
||||
})
|
||||
|
||||
const accountId = account.accountId!
|
||||
|
||||
// Update license to have paid feature
|
||||
const [res, acc] = await config.accountsApi.licenses.updateLicense(
|
||||
accountId,
|
||||
{
|
||||
overrides: {
|
||||
features: [Feature.APP_BACKUPS],
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
// Activate license key
|
||||
await config.internalApi.license.activateLicenseKey({ licenseKey })
|
||||
|
||||
// Verify license updated with new feature
|
||||
await config.doInNewState(async () => {
|
||||
await config.loginAsAccount(createAccountRequest)
|
||||
const [selfRes, body] = await config.api.accounts.self()
|
||||
expect(body.license.features[0]).toBe("appBackups")
|
||||
})
|
||||
|
||||
// Remove license key
|
||||
await config.internalApi.license.deleteLicenseKey()
|
||||
|
||||
// Verify license key not found
|
||||
await config.internalApi.license.getLicenseKey({ status: 404 })
|
||||
})
|
||||
})
|
|
@ -1,116 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixtures from "../../fixtures"
|
||||
import { Hosting, PlanType } from "@budibase/types"
|
||||
|
||||
const stripe = require("stripe")(process.env.STRIPE_SECRET_KEY)
|
||||
|
||||
describe("license management", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
it("retrieves plans, creates checkout session, and updates license", async () => {
|
||||
// Create cloud account
|
||||
const createAccountRequest = fixtures.accounts.generateAccount({
|
||||
hosting: Hosting.CLOUD,
|
||||
})
|
||||
const [createAccountRes, account] =
|
||||
await config.accountsApi.accounts.create(createAccountRequest, {
|
||||
autoVerify: true,
|
||||
})
|
||||
|
||||
// Self response has free license
|
||||
await config.doInNewState(async () => {
|
||||
await config.loginAsAccount(createAccountRequest)
|
||||
const [selfRes, selfBody] = await config.api.accounts.self()
|
||||
expect(selfBody.license.plan.type).toBe(PlanType.FREE)
|
||||
})
|
||||
|
||||
// Retrieve plans
|
||||
const [plansRes, planBody] = await config.api.licenses.getPlans()
|
||||
|
||||
// Select priceId from premium plan
|
||||
let premiumPrice = null
|
||||
let businessPriceId: ""
|
||||
for (const plan of planBody) {
|
||||
if (plan.type === PlanType.PREMIUM_PLUS) {
|
||||
premiumPrice = plan.prices[0]
|
||||
}
|
||||
if (plan.type === PlanType.ENTERPRISE_BASIC) {
|
||||
businessPriceId = plan.prices[0].priceId
|
||||
}
|
||||
}
|
||||
|
||||
// Create checkout session for price
|
||||
const checkoutSessionRes = await config.api.stripe.createCheckoutSession({
|
||||
id: premiumPrice.priceId,
|
||||
type: premiumPrice.type,
|
||||
})
|
||||
const checkoutSessionUrl = checkoutSessionRes[1].url
|
||||
expect(checkoutSessionUrl).toContain("checkout.stripe.com")
|
||||
|
||||
// Create stripe customer
|
||||
const customer = await stripe.customers.create({
|
||||
email: createAccountRequest.email,
|
||||
})
|
||||
|
||||
// Create payment method
|
||||
const paymentMethod = await stripe.paymentMethods.create({
|
||||
type: "card",
|
||||
card: {
|
||||
token: "tok_visa", // Test Visa Card
|
||||
},
|
||||
})
|
||||
|
||||
// Attach payment method to customer
|
||||
await stripe.paymentMethods.attach(paymentMethod.id, {
|
||||
customer: customer.id,
|
||||
})
|
||||
|
||||
// Update customer
|
||||
await stripe.customers.update(customer.id, {
|
||||
invoice_settings: {
|
||||
default_payment_method: paymentMethod.id,
|
||||
},
|
||||
})
|
||||
|
||||
// Create subscription for premium plan
|
||||
const subscription = await stripe.subscriptions.create({
|
||||
customer: customer.id,
|
||||
items: [
|
||||
{
|
||||
price: premiumPrice.priceId,
|
||||
quantity: 1,
|
||||
},
|
||||
],
|
||||
default_payment_method: paymentMethod.id,
|
||||
collection_method: "charge_automatically",
|
||||
})
|
||||
|
||||
await config.doInNewState(async () => {
|
||||
// License updated from Free to Premium
|
||||
await config.loginAsAccount(createAccountRequest)
|
||||
await config.api.stripe.linkStripeCustomer(account.accountId, customer.id)
|
||||
const [_, selfBodyPremium] = await config.api.accounts.self()
|
||||
expect(selfBodyPremium.license.plan.type).toBe(PlanType.PREMIUM_PLUS)
|
||||
|
||||
// Create portal session - Check URL
|
||||
const [portalRes, portalSessionBody] =
|
||||
await config.api.stripe.createPortalSession(customer.id)
|
||||
expect(portalSessionBody.url).toContain("billing.stripe.com")
|
||||
|
||||
// Update subscription from premium to business license
|
||||
await config.api.licenses.updatePlan(businessPriceId)
|
||||
|
||||
// License updated to Business
|
||||
const [selfRes, selfBodyBusiness] = await config.api.accounts.self()
|
||||
expect(selfBodyBusiness.license.plan.type).toBe(PlanType.ENTERPRISE_BASIC)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,79 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixures from "../../fixtures"
|
||||
import { Hosting, Feature } from "@budibase/types"
|
||||
|
||||
describe("offline", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
// TODO: Currently requires a self host install + account portal
|
||||
// Ignored until we set this up
|
||||
it.skip("creates, activates and deletes offline license", async () => {
|
||||
// installation: Delete any token
|
||||
await config.internalApi.license.deleteOfflineLicenseToken()
|
||||
|
||||
// installation: Assert token not found
|
||||
let [getTokenRes] = await config.internalApi.license.getOfflineLicenseToken(
|
||||
{ status: 404 }
|
||||
)
|
||||
|
||||
// installation: Retrieve Identifier
|
||||
const [getIdentifierRes, identifier] =
|
||||
await config.internalApi.license.getOfflineIdentifier()
|
||||
|
||||
// account-portal: Create self-host account
|
||||
const createAccountRequest = fixures.accounts.generateAccount({
|
||||
hosting: Hosting.SELF,
|
||||
})
|
||||
const [createAccountRes, account] =
|
||||
await config.accountsApi.accounts.create(createAccountRequest)
|
||||
const accountId = account.accountId!
|
||||
const tenantId = account.tenantId!
|
||||
|
||||
// account-portal: Enable feature on license
|
||||
await config.accountsApi.licenses.updateLicense(accountId, {
|
||||
overrides: {
|
||||
features: [Feature.OFFLINE],
|
||||
},
|
||||
})
|
||||
|
||||
// account-portal: Create offline token
|
||||
const expireAt = new Date()
|
||||
expireAt.setDate(new Date().getDate() + 1)
|
||||
await config.accountsApi.licenses.createOfflineLicense(
|
||||
accountId,
|
||||
tenantId,
|
||||
{
|
||||
expireAt: expireAt.toISOString(),
|
||||
installationIdentifierBase64: identifier.identifierBase64,
|
||||
}
|
||||
)
|
||||
|
||||
// account-portal: Retrieve offline token
|
||||
const [getLicenseRes, offlineLicense] =
|
||||
await config.accountsApi.licenses.getOfflineLicense(accountId, tenantId)
|
||||
|
||||
// installation: Activate offline token
|
||||
await config.internalApi.license.activateOfflineLicenseToken({
|
||||
offlineLicenseToken: offlineLicense.offlineLicenseToken,
|
||||
})
|
||||
|
||||
// installation: Assert token found
|
||||
await config.internalApi.license.getOfflineLicenseToken()
|
||||
|
||||
// TODO: Assert on license for current user
|
||||
|
||||
// installation: Remove the token
|
||||
await config.internalApi.license.deleteOfflineLicenseToken()
|
||||
|
||||
// installation: Assert token not found
|
||||
await config.internalApi.license.getOfflineLicenseToken({ status: 404 })
|
||||
})
|
||||
})
|
|
@ -1,34 +0,0 @@
|
|||
import { join } from "path"
|
||||
|
||||
let LOADED = false
|
||||
if (!LOADED) {
|
||||
require("dotenv").config({
|
||||
path: join(__dirname, "..", ".env"),
|
||||
})
|
||||
LOADED = true
|
||||
}
|
||||
|
||||
const env = {
|
||||
BUDIBASE_URL: process.env.BUDIBASE_URL,
|
||||
ACCOUNT_PORTAL_URL: process.env.ACCOUNT_PORTAL_URL,
|
||||
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY,
|
||||
BB_ADMIN_USER_EMAIL: process.env.BB_ADMIN_USER_EMAIL,
|
||||
BB_ADMIN_USER_PASSWORD: process.env.BB_ADMIN_USER_PASSWORD,
|
||||
POSTGRES_HOST: process.env.POSTGRES_HOST,
|
||||
POSTGRES_PORT: process.env.POSTGRES_PORT,
|
||||
POSTGRES_DB: process.env.POSTGRES_DB,
|
||||
POSTGRES_USER: process.env.POSTGRES_USER,
|
||||
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD,
|
||||
MONGODB_CONNECTION_STRING: process.env.MONGODB_CONNECTION_STRING,
|
||||
MONGODB_DB: process.env.MONGODB_DB,
|
||||
REST_API_BASE_URL: process.env.REST_API_BASE_URL,
|
||||
REST_API_KEY: process.env.REST_API_KEY,
|
||||
MARIADB_HOST: process.env.MARIADB_HOST,
|
||||
MARIADB_PORT: process.env.MARIADB_PORT,
|
||||
MARIADB_DB: process.env.MARIADB_DB,
|
||||
MARIADB_USER: process.env.MARIADB_USER,
|
||||
MARIADB_PASSWORD: process.env.MARIADB_PASSWORD,
|
||||
STRIPE_SECRET_KEY: process.env.STRIPE_SECRET_KEY,
|
||||
}
|
||||
|
||||
export = env
|
|
@ -1,112 +0,0 @@
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { Duration, TemporalUnit } from "node-duration"
|
||||
import mssql from "../../../../packages/server/src/integrations/microsoftSqlServer"
|
||||
|
||||
jest.unmock("mssql")
|
||||
|
||||
describe("getExternalSchema", () => {
|
||||
describe("mssql", () => {
|
||||
let config: any
|
||||
|
||||
beforeAll(async () => {
|
||||
const password = "Str0Ng_p@ssW0rd!"
|
||||
const container = await new GenericContainer(
|
||||
"mcr.microsoft.com/mssql/server"
|
||||
)
|
||||
.withExposedPorts(1433)
|
||||
.withEnv("ACCEPT_EULA", "Y")
|
||||
.withEnv("MSSQL_SA_PASSWORD", password)
|
||||
.withEnv("MSSQL_PID", "Developer")
|
||||
.withWaitStrategy(Wait.forHealthCheck())
|
||||
.withHealthCheck({
|
||||
test: `/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "${password}" -Q "SELECT 1" -b -o /dev/null`,
|
||||
interval: new Duration(1000, TemporalUnit.MILLISECONDS),
|
||||
timeout: new Duration(3, TemporalUnit.SECONDS),
|
||||
retries: 20,
|
||||
startPeriod: new Duration(100, TemporalUnit.MILLISECONDS),
|
||||
})
|
||||
.start()
|
||||
|
||||
const host = container.getContainerIpAddress()
|
||||
const port = container.getMappedPort(1433)
|
||||
config = {
|
||||
user: "sa",
|
||||
password,
|
||||
server: host,
|
||||
port: port,
|
||||
database: "master",
|
||||
schema: "dbo",
|
||||
}
|
||||
})
|
||||
|
||||
it("can export an empty database", async () => {
|
||||
const integration = new mssql.integration(config)
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`""`)
|
||||
})
|
||||
|
||||
it("can export a database with tables", async () => {
|
||||
const integration = new mssql.integration(config)
|
||||
|
||||
await integration.connect()
|
||||
await integration.internalQuery({
|
||||
sql: `
|
||||
CREATE TABLE users (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
role VARCHAR(15) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE products (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
price DECIMAL(10, 2) NOT NULL
|
||||
);
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE TABLE [products] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
price decimal(9) NOT NULL,
|
||||
CONSTRAINT [PK_products] PRIMARY KEY (id)
|
||||
);
|
||||
CREATE TABLE [users] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
role varchar(15) NOT NULL,
|
||||
CONSTRAINT [PK_users] PRIMARY KEY (id)
|
||||
);"
|
||||
`)
|
||||
})
|
||||
|
||||
it("does not export a data", async () => {
|
||||
const integration = new mssql.integration(config)
|
||||
|
||||
await integration.connect()
|
||||
await integration.internalQuery({
|
||||
sql: `INSERT INTO [users] ([name], [role]) VALUES ('John Doe', 'Administrator');
|
||||
INSERT INTO [products] ([name], [price]) VALUES ('Book', 7.68);
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE TABLE [products] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
price decimal(9) NOT NULL,
|
||||
CONSTRAINT [PK_products] PRIMARY KEY (id)
|
||||
);
|
||||
CREATE TABLE [users] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
role varchar(15) NOT NULL,
|
||||
CONSTRAINT [PK_users] PRIMARY KEY (id)
|
||||
);"
|
||||
`)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,106 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import mysql from "../../../../packages/server/src/integrations/mysql"
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("mysql", () => {
|
||||
let config: any
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("mysql:8.3")
|
||||
.withExposedPorts(3306)
|
||||
.withEnv("MYSQL_ROOT_PASSWORD", "admin")
|
||||
.withEnv("MYSQL_DATABASE", "db")
|
||||
.withEnv("MYSQL_USER", "user")
|
||||
.withEnv("MYSQL_PASSWORD", "password")
|
||||
.start()
|
||||
|
||||
const host = container.getContainerIpAddress()
|
||||
const port = container.getMappedPort(3306)
|
||||
config = {
|
||||
host,
|
||||
port,
|
||||
user: "user",
|
||||
database: "db",
|
||||
password: "password",
|
||||
rejectUnauthorized: true,
|
||||
}
|
||||
})
|
||||
|
||||
it("can export an empty database", async () => {
|
||||
const integration = new mysql.integration(config)
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(
|
||||
`"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */"`
|
||||
)
|
||||
})
|
||||
|
||||
it("can export a database with tables", async () => {
|
||||
const integration = new mysql.integration(config)
|
||||
|
||||
await integration.internalQuery({
|
||||
sql: `
|
||||
CREATE TABLE users (
|
||||
id INT AUTO_INCREMENT,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
role VARCHAR(15) NOT NULL,
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
|
||||
CREATE TABLE products (
|
||||
id INT AUTO_INCREMENT,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
price DECIMAL,
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
|
||||
CREATE TABLE \`products\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`price\` decimal(10,0) DEFAULT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
|
||||
CREATE TABLE \`users\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`role\` varchar(15) NOT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
|
||||
`)
|
||||
})
|
||||
|
||||
it("does not export a data", async () => {
|
||||
const integration = new mysql.integration(config)
|
||||
|
||||
await integration.internalQuery({
|
||||
sql: `INSERT INTO users (name, role) VALUES ('John Doe', 'Administrator');`,
|
||||
})
|
||||
|
||||
await integration.internalQuery({
|
||||
sql: `INSERT INTO products (name, price) VALUES ('Book', 7.68);`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
|
||||
CREATE TABLE \`products\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`price\` decimal(10,0) DEFAULT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
|
||||
CREATE TABLE \`users\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`role\` varchar(15) NOT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
|
||||
`)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,376 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import postgres from "../../../../packages/server/src/integrations/postgres"
|
||||
|
||||
jest.unmock("pg")
|
||||
|
||||
describe("getExternalSchema", () => {
|
||||
describe("postgres", () => {
|
||||
let config: any
|
||||
|
||||
// Remove versioning from the outputs to prevent failures when running different pg_dump versions
|
||||
function stripResultsVersions(sql: string) {
|
||||
const result = sql
|
||||
.replace(/\n[^\n]+Dumped from database version[^\n]+\n/, "")
|
||||
.replace(/\n[^\n]+Dumped by pg_dump version[^\n]+\n/, "")
|
||||
.toString()
|
||||
return result
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("postgres:16.1-bullseye")
|
||||
.withExposedPorts(5432)
|
||||
.withEnv("POSTGRES_PASSWORD", "password")
|
||||
.start()
|
||||
|
||||
const host = container.getContainerIpAddress()
|
||||
const port = container.getMappedPort(5432)
|
||||
|
||||
config = {
|
||||
host,
|
||||
port,
|
||||
database: "postgres",
|
||||
user: "postgres",
|
||||
password: "password",
|
||||
schema: "public",
|
||||
ssl: false,
|
||||
rejectUnauthorized: false,
|
||||
}
|
||||
})
|
||||
|
||||
it("can export an empty database", async () => {
|
||||
const integration = new postgres.integration(config)
|
||||
const result = await integration.getExternalSchema()
|
||||
|
||||
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
|
||||
"--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
||||
"
|
||||
`)
|
||||
})
|
||||
|
||||
it("can export a database with tables", async () => {
|
||||
const integration = new postgres.integration(config)
|
||||
|
||||
await integration.internalQuery(
|
||||
{
|
||||
sql: `
|
||||
CREATE TABLE "users" (
|
||||
"id" SERIAL,
|
||||
"name" VARCHAR(100) NOT NULL,
|
||||
"role" VARCHAR(15) NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
CREATE TABLE "products" (
|
||||
"id" SERIAL,
|
||||
"name" VARCHAR(100) NOT NULL,
|
||||
"price" DECIMAL NOT NULL,
|
||||
"owner" INTEGER NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
ALTER TABLE "products" ADD CONSTRAINT "fk_owner" FOREIGN KEY ("owner") REFERENCES "users" ("id");`,
|
||||
},
|
||||
false
|
||||
)
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
|
||||
"--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.products (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
price numeric NOT NULL,
|
||||
owner integer
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.products OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.products_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.products_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.users (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
role character varying(15) NOT NULL
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.users OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.users_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.users_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users
|
||||
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products fk_owner; Type: FK CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT fk_owner FOREIGN KEY (owner) REFERENCES public.users(id);
|
||||
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
||||
"
|
||||
`)
|
||||
})
|
||||
|
||||
it("does not export a data", async () => {
|
||||
const integration = new postgres.integration(config)
|
||||
|
||||
await integration.internalQuery(
|
||||
{
|
||||
sql: `INSERT INTO "users" ("name", "role") VALUES ('John Doe', 'Administrator');
|
||||
INSERT INTO "products" ("name", "price") VALUES ('Book', 7.68);`,
|
||||
},
|
||||
false
|
||||
)
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
|
||||
"--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.products (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
price numeric NOT NULL,
|
||||
owner integer
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.products OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.products_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.products_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.users (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
role character varying(15) NOT NULL
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.users OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.users_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.users_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users
|
||||
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products fk_owner; Type: FK CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT fk_owner FOREIGN KEY (owner) REFERENCES public.users(id);
|
||||
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
||||
"
|
||||
`)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,77 +0,0 @@
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import arangodb from "../../../../packages/server/src/integrations/arangodb"
|
||||
import { generator } from "../../shared"
|
||||
|
||||
jest.unmock("arangojs")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("arangodb", () => {
|
||||
let connectionSettings: {
|
||||
user: string
|
||||
password: string
|
||||
url: string
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
const user = "root"
|
||||
const password = generator.hash()
|
||||
const container = await new GenericContainer("arangodb")
|
||||
.withExposedPorts(8529)
|
||||
.withEnv("ARANGO_ROOT_PASSWORD", password)
|
||||
.withWaitStrategy(
|
||||
Wait.forLogMessage("is ready for business. Have fun!")
|
||||
)
|
||||
.start()
|
||||
|
||||
connectionSettings = {
|
||||
user,
|
||||
password,
|
||||
url: `http://${container.getContainerIpAddress()}:${container.getMappedPort(
|
||||
8529
|
||||
)}`,
|
||||
}
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new arangodb.integration({
|
||||
url: connectionSettings.url,
|
||||
username: connectionSettings.user,
|
||||
password: connectionSettings.password,
|
||||
databaseName: "",
|
||||
collection: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test wrong password", async () => {
|
||||
const integration = new arangodb.integration({
|
||||
url: connectionSettings.url,
|
||||
username: connectionSettings.user,
|
||||
password: "wrong",
|
||||
databaseName: "",
|
||||
collection: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "not authorized to execute this request",
|
||||
})
|
||||
})
|
||||
|
||||
it("test wrong url", async () => {
|
||||
const integration = new arangodb.integration({
|
||||
url: "http://not.here",
|
||||
username: connectionSettings.user,
|
||||
password: connectionSettings.password,
|
||||
databaseName: "",
|
||||
collection: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "getaddrinfo ENOTFOUND not.here",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,67 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
|
||||
import couchdb from "../../../../packages/server/src/integrations/couchdb"
|
||||
import { generator } from "../../shared"
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("couchdb", () => {
|
||||
let url: string
|
||||
|
||||
beforeAll(async () => {
|
||||
const user = generator.first()
|
||||
const password = generator.hash()
|
||||
|
||||
const container = await new GenericContainer("budibase/couchdb")
|
||||
.withExposedPorts(5984)
|
||||
.withEnv("COUCHDB_USER", user)
|
||||
.withEnv("COUCHDB_PASSWORD", password)
|
||||
.start()
|
||||
|
||||
const host = container.getContainerIpAddress()
|
||||
const port = container.getMappedPort(5984)
|
||||
|
||||
await container.exec([
|
||||
`curl`,
|
||||
`-u`,
|
||||
`${user}:${password}`,
|
||||
`-X`,
|
||||
`PUT`,
|
||||
`localhost:5984/db`,
|
||||
])
|
||||
url = `http://${user}:${password}@${host}:${port}`
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new couchdb.integration({
|
||||
url,
|
||||
database: "db",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test invalid database", async () => {
|
||||
const integration = new couchdb.integration({
|
||||
url,
|
||||
database: "random_db",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
})
|
||||
})
|
||||
|
||||
it("test invalid url", async () => {
|
||||
const integration = new couchdb.integration({
|
||||
url: "http://invalid:123",
|
||||
database: "any",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error:
|
||||
"request to http://invalid:123/any failed, reason: getaddrinfo ENOTFOUND invalid",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,63 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import { env } from "@budibase/backend-core"
|
||||
|
||||
import dynamodb from "../../../../packages/server/src/integrations/dynamodb"
|
||||
import { generator } from "../../shared"
|
||||
|
||||
jest.unmock("aws-sdk")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("dynamodb", () => {
|
||||
let connectionSettings: {
|
||||
user: string
|
||||
password: string
|
||||
url: string
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
const user = "root"
|
||||
const password = generator.hash()
|
||||
const container = await new GenericContainer("amazon/dynamodb-local")
|
||||
.withExposedPorts(8000)
|
||||
.start()
|
||||
|
||||
connectionSettings = {
|
||||
user,
|
||||
password,
|
||||
url: `http://${container.getContainerIpAddress()}:${container.getMappedPort(
|
||||
8000
|
||||
)}`,
|
||||
}
|
||||
env._set("AWS_ACCESS_KEY_ID", "mockedkey")
|
||||
env._set("AWS_SECRET_ACCESS_KEY", "mockedsecret")
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new dynamodb.integration({
|
||||
endpoint: connectionSettings.url,
|
||||
region: "",
|
||||
accessKeyId: "",
|
||||
secretAccessKey: "",
|
||||
})
|
||||
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test wrong endpoint", async () => {
|
||||
const integration = new dynamodb.integration({
|
||||
endpoint: "http://wrong.url:2880",
|
||||
region: "",
|
||||
accessKeyId: "",
|
||||
secretAccessKey: "",
|
||||
})
|
||||
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error:
|
||||
"Inaccessible host: `wrong.url' at port `undefined'. This service may not be available in the `eu-west-1' region.",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,34 +0,0 @@
|
|||
import { ElasticsearchContainer } from "testcontainers"
|
||||
import elastic from "../../../../packages/server/src/integrations/elasticsearch"
|
||||
|
||||
jest.unmock("@elastic/elasticsearch")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("elastic search", () => {
|
||||
let url: string
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new ElasticsearchContainer().start()
|
||||
url = container.getHttpUrl()
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new elastic.integration({
|
||||
url,
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test wrong connection string", async () => {
|
||||
const integration = new elastic.integration({
|
||||
url: `http://localhost:5656`,
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "connect ECONNREFUSED 127.0.0.1:5656",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,100 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import mongo from "../../../../packages/server/src/integrations/mongodb"
|
||||
import { generator } from "../../shared"
|
||||
|
||||
jest.unmock("mongodb")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("mongo", () => {
|
||||
let connectionSettings: {
|
||||
user: string
|
||||
password: string
|
||||
host: string
|
||||
port: number
|
||||
}
|
||||
|
||||
function getConnectionString(
|
||||
settings: Partial<typeof connectionSettings> = {}
|
||||
) {
|
||||
const { user, password, host, port } = {
|
||||
...connectionSettings,
|
||||
...settings,
|
||||
}
|
||||
return `mongodb://${user}:${password}@${host}:${port}`
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
const user = generator.name()
|
||||
const password = generator.hash()
|
||||
const container = await new GenericContainer("mongo:7.0-jammy")
|
||||
.withExposedPorts(27017)
|
||||
.withEnv("MONGO_INITDB_ROOT_USERNAME", user)
|
||||
.withEnv("MONGO_INITDB_ROOT_PASSWORD", password)
|
||||
.start()
|
||||
|
||||
connectionSettings = {
|
||||
user,
|
||||
password,
|
||||
host: container.getContainerIpAddress(),
|
||||
port: container.getMappedPort(27017),
|
||||
}
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new mongo.integration({
|
||||
connectionString: getConnectionString(),
|
||||
db: "",
|
||||
tlsCertificateFile: "",
|
||||
tlsCertificateKeyFile: "",
|
||||
tlsCAFile: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test invalid password", async () => {
|
||||
const integration = new mongo.integration({
|
||||
connectionString: getConnectionString({ password: "wrong" }),
|
||||
db: "",
|
||||
tlsCertificateFile: "",
|
||||
tlsCertificateKeyFile: "",
|
||||
tlsCAFile: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "Authentication failed.",
|
||||
})
|
||||
})
|
||||
|
||||
it("test invalid username", async () => {
|
||||
const integration = new mongo.integration({
|
||||
connectionString: getConnectionString({ user: "wrong" }),
|
||||
db: "",
|
||||
tlsCertificateFile: "",
|
||||
tlsCertificateKeyFile: "",
|
||||
tlsCAFile: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "Authentication failed.",
|
||||
})
|
||||
})
|
||||
|
||||
it("test invalid connection", async () => {
|
||||
const integration = new mongo.integration({
|
||||
connectionString: getConnectionString({ host: "http://nothinghere" }),
|
||||
db: "",
|
||||
tlsCertificateFile: "",
|
||||
tlsCertificateKeyFile: "",
|
||||
tlsCAFile: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "getaddrinfo ENOTFOUND http",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,65 +0,0 @@
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { Duration, TemporalUnit } from "node-duration"
|
||||
|
||||
import mssql from "../../../../packages/server/src/integrations/microsoftSqlServer"
|
||||
|
||||
jest.unmock("mssql")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("mssql", () => {
|
||||
let host: string, port: number
|
||||
|
||||
const password = "Str0Ng_p@ssW0rd!"
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer(
|
||||
"mcr.microsoft.com/mssql/server:2022-latest"
|
||||
)
|
||||
.withExposedPorts(1433)
|
||||
.withEnv("ACCEPT_EULA", "Y")
|
||||
.withEnv("MSSQL_SA_PASSWORD", password)
|
||||
.withEnv("MSSQL_PID", "Developer")
|
||||
.withWaitStrategy(Wait.forHealthCheck())
|
||||
.withHealthCheck({
|
||||
test: `/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "${password}" -Q "SELECT 1" -b -o /dev/null`,
|
||||
interval: new Duration(1000, TemporalUnit.MILLISECONDS),
|
||||
timeout: new Duration(3, TemporalUnit.SECONDS),
|
||||
retries: 20,
|
||||
startPeriod: new Duration(100, TemporalUnit.MILLISECONDS),
|
||||
})
|
||||
.start()
|
||||
|
||||
host = container.getContainerIpAddress()
|
||||
port = container.getMappedPort(1433)
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new mssql.integration({
|
||||
user: "sa",
|
||||
password,
|
||||
server: host,
|
||||
port: port,
|
||||
database: "master",
|
||||
schema: "dbo",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test invalid password", async () => {
|
||||
const integration = new mssql.integration({
|
||||
user: "sa",
|
||||
password: "wrong_pwd",
|
||||
server: host,
|
||||
port: port,
|
||||
database: "master",
|
||||
schema: "dbo",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "Login failed for user 'sa'.",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,68 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import mysql from "../../../../packages/server/src/integrations/mysql"
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("mysql", () => {
|
||||
let host: string
|
||||
let port: number
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("mysql:8.3")
|
||||
.withExposedPorts(3306)
|
||||
.withEnv("MYSQL_ROOT_PASSWORD", "admin")
|
||||
.withEnv("MYSQL_DATABASE", "db")
|
||||
.withEnv("MYSQL_USER", "user")
|
||||
.withEnv("MYSQL_PASSWORD", "password")
|
||||
.start()
|
||||
|
||||
host = container.getContainerIpAddress()
|
||||
port = container.getMappedPort(3306)
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new mysql.integration({
|
||||
host,
|
||||
port,
|
||||
user: "user",
|
||||
database: "db",
|
||||
password: "password",
|
||||
rejectUnauthorized: true,
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test invalid database", async () => {
|
||||
const integration = new mysql.integration({
|
||||
host,
|
||||
port,
|
||||
user: "user",
|
||||
database: "test",
|
||||
password: "password",
|
||||
rejectUnauthorized: true,
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "Access denied for user 'user'@'%' to database 'test'",
|
||||
})
|
||||
})
|
||||
|
||||
it("test invalid password", async () => {
|
||||
const integration = new mysql.integration({
|
||||
host,
|
||||
port,
|
||||
user: "root",
|
||||
database: "test",
|
||||
password: "wrong",
|
||||
rejectUnauthorized: true,
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error:
|
||||
"Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,54 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import postgres from "../../../../packages/server/src/integrations/postgres"
|
||||
|
||||
jest.unmock("pg")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("postgres", () => {
|
||||
let host: string
|
||||
let port: number
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("postgres:16.1-bullseye")
|
||||
.withExposedPorts(5432)
|
||||
.withEnv("POSTGRES_PASSWORD", "password")
|
||||
.start()
|
||||
|
||||
host = container.getContainerIpAddress()
|
||||
port = container.getMappedPort(5432)
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new postgres.integration({
|
||||
host,
|
||||
port,
|
||||
database: "postgres",
|
||||
user: "postgres",
|
||||
password: "password",
|
||||
schema: "public",
|
||||
ssl: false,
|
||||
rejectUnauthorized: false,
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test invalid connection string", async () => {
|
||||
const integration = new postgres.integration({
|
||||
host,
|
||||
port,
|
||||
database: "postgres",
|
||||
user: "wrong",
|
||||
password: "password",
|
||||
schema: "public",
|
||||
ssl: false,
|
||||
rejectUnauthorized: false,
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: 'password authentication failed for user "wrong"',
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,72 +0,0 @@
|
|||
import redis from "../../../../packages/server/src/integrations/redis"
|
||||
import { GenericContainer } from "testcontainers"
|
||||
import { generator } from "../../shared"
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("redis", () => {
|
||||
describe("unsecured", () => {
|
||||
let host: string
|
||||
let port: number
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("redis")
|
||||
.withExposedPorts(6379)
|
||||
.start()
|
||||
|
||||
host = container.getContainerIpAddress()
|
||||
port = container.getMappedPort(6379)
|
||||
})
|
||||
|
||||
it("test valid connection", async () => {
|
||||
const integration = new redis.integration({
|
||||
host,
|
||||
port,
|
||||
username: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test invalid connection even with wrong user/password", async () => {
|
||||
const integration = new redis.integration({
|
||||
host,
|
||||
port,
|
||||
username: generator.name(),
|
||||
password: generator.hash(),
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error:
|
||||
"WRONGPASS invalid username-password pair or user is disabled.",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("secured", () => {
|
||||
let host: string
|
||||
let port: number
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("redis")
|
||||
.withExposedPorts(6379)
|
||||
.withCmd(["redis-server", "--requirepass", "P@ssW0rd!"])
|
||||
.start()
|
||||
|
||||
host = container.getContainerIpAddress()
|
||||
port = container.getMappedPort(6379)
|
||||
})
|
||||
|
||||
it("test valid connection", async () => {
|
||||
const integration = new redis.integration({
|
||||
host,
|
||||
port,
|
||||
username: "",
|
||||
password: "P@ssW0rd!",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,52 +0,0 @@
|
|||
import s3 from "../../../../packages/server/src/integrations/s3"
|
||||
import { GenericContainer } from "testcontainers"
|
||||
|
||||
jest.unmock("aws-sdk")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("s3", () => {
|
||||
let host: string
|
||||
let port: number
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("localstack/localstack")
|
||||
.withExposedPorts(4566)
|
||||
.withEnv("SERVICES", "s3")
|
||||
.withEnv("DEFAULT_REGION", "eu-west-1")
|
||||
.withEnv("AWS_ACCESS_KEY_ID", "testkey")
|
||||
.withEnv("AWS_SECRET_ACCESS_KEY", "testsecret")
|
||||
.start()
|
||||
|
||||
host = container.getContainerIpAddress()
|
||||
port = container.getMappedPort(4566)
|
||||
})
|
||||
|
||||
it("test valid connection", async () => {
|
||||
const integration = new s3.integration({
|
||||
region: "eu-west-1",
|
||||
accessKeyId: "testkey",
|
||||
secretAccessKey: "testsecret",
|
||||
s3ForcePathStyle: false,
|
||||
endpoint: `http://${host}:${port}`,
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test wrong endpoint", async () => {
|
||||
const integration = new s3.integration({
|
||||
region: "eu-west-2",
|
||||
accessKeyId: "testkey",
|
||||
secretAccessKey: "testsecret",
|
||||
s3ForcePathStyle: false,
|
||||
endpoint: `http://wrong:123`,
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error:
|
||||
"Inaccessible host: `wrong' at port `undefined'. This service may not be available in the `eu-west-2' region.",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,54 +0,0 @@
|
|||
import AppAPI from "./apis/AppAPI"
|
||||
import AuthAPI from "./apis/AuthAPI"
|
||||
import EnvironmentAPI from "./apis/EnvironmentAPI"
|
||||
import RoleAPI from "./apis/RoleAPI"
|
||||
import RowAPI from "./apis/RowAPI"
|
||||
import ScreenAPI from "./apis/ScreenAPI"
|
||||
import SelfAPI from "./apis/SelfAPI"
|
||||
import TableAPI from "./apis/TableAPI"
|
||||
import UserAPI from "./apis/UserAPI"
|
||||
import DatasourcesAPI from "./apis/DatasourcesAPI"
|
||||
import IntegrationsAPI from "./apis/IntegrationsAPI"
|
||||
import QueriesAPI from "./apis/QueriesAPI"
|
||||
import PermissionsAPI from "./apis/PermissionsAPI"
|
||||
import LicenseAPI from "./apis/LicenseAPI"
|
||||
import BudibaseInternalAPIClient from "./BudibaseInternalAPIClient"
|
||||
import { State } from "../../types"
|
||||
|
||||
export default class BudibaseInternalAPI {
|
||||
client: BudibaseInternalAPIClient
|
||||
|
||||
apps: AppAPI
|
||||
auth: AuthAPI
|
||||
environment: EnvironmentAPI
|
||||
roles: RoleAPI
|
||||
rows: RowAPI
|
||||
screens: ScreenAPI
|
||||
self: SelfAPI
|
||||
tables: TableAPI
|
||||
users: UserAPI
|
||||
datasources: DatasourcesAPI
|
||||
integrations: IntegrationsAPI
|
||||
queries: QueriesAPI
|
||||
permissions: PermissionsAPI
|
||||
license: LicenseAPI
|
||||
|
||||
constructor(state: State) {
|
||||
this.client = new BudibaseInternalAPIClient(state)
|
||||
|
||||
this.apps = new AppAPI(this.client)
|
||||
this.auth = new AuthAPI(this.client, state)
|
||||
this.environment = new EnvironmentAPI(this.client)
|
||||
this.roles = new RoleAPI(this.client)
|
||||
this.rows = new RowAPI(this.client)
|
||||
this.screens = new ScreenAPI(this.client)
|
||||
this.self = new SelfAPI(this.client)
|
||||
this.tables = new TableAPI(this.client)
|
||||
this.users = new UserAPI(this.client)
|
||||
this.datasources = new DatasourcesAPI(this.client)
|
||||
this.integrations = new IntegrationsAPI(this.client)
|
||||
this.queries = new QueriesAPI(this.client)
|
||||
this.permissions = new PermissionsAPI(this.client)
|
||||
this.license = new LicenseAPI(this.client)
|
||||
}
|
||||
}
|
|
@ -1,80 +0,0 @@
|
|||
import env from "../../environment"
|
||||
import fetch, { HeadersInit } from "node-fetch"
|
||||
import { State } from "../../types"
|
||||
|
||||
type APIMethod = "GET" | "POST" | "PUT" | "PATCH" | "DELETE"
|
||||
|
||||
interface ApiOptions {
|
||||
method?: APIMethod
|
||||
body?: object
|
||||
headers?: HeadersInit | undefined
|
||||
}
|
||||
|
||||
class BudibaseInternalAPIClient {
|
||||
host: string
|
||||
state: State
|
||||
|
||||
constructor(state: State) {
|
||||
if (!env.BUDIBASE_URL) {
|
||||
throw new Error("Must set BUDIBASE_URL env var")
|
||||
}
|
||||
this.host = `${env.BUDIBASE_URL}/api`
|
||||
this.state = state
|
||||
}
|
||||
|
||||
apiCall =
|
||||
(method: APIMethod) =>
|
||||
async (url = "", options: ApiOptions = {}) => {
|
||||
const requestOptions = {
|
||||
method,
|
||||
body: JSON.stringify(options.body),
|
||||
headers: {
|
||||
"x-budibase-app-id": this.state.appId,
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
cookie: this.state.cookie,
|
||||
redirect: "follow",
|
||||
follow: 20,
|
||||
...options.headers,
|
||||
},
|
||||
credentials: "include",
|
||||
}
|
||||
|
||||
// prettier-ignore
|
||||
// @ts-ignore
|
||||
const response = await fetch(`${this.host}${url}`, requestOptions)
|
||||
|
||||
let body: any
|
||||
const contentType = response.headers.get("content-type")
|
||||
if (contentType && contentType.includes("application/json")) {
|
||||
body = await response.json()
|
||||
} else {
|
||||
body = await response.text()
|
||||
}
|
||||
|
||||
const data = {
|
||||
request: requestOptions.body,
|
||||
response: body,
|
||||
}
|
||||
const message = `${method} ${url} - ${response.status}`
|
||||
|
||||
const isDebug = process.env.LOG_LEVEL === "debug"
|
||||
if (response.status > 499) {
|
||||
console.error(message, data)
|
||||
} else if (response.status >= 400) {
|
||||
console.warn(message, data)
|
||||
} else if (isDebug) {
|
||||
console.debug(message, data)
|
||||
}
|
||||
|
||||
return [response, body]
|
||||
}
|
||||
|
||||
post = this.apiCall("POST")
|
||||
get = this.apiCall("GET")
|
||||
patch = this.apiCall("PATCH")
|
||||
del = this.apiCall("DELETE")
|
||||
put = this.apiCall("PUT")
|
||||
}
|
||||
|
||||
export default BudibaseInternalAPIClient
|
|
@ -1,152 +0,0 @@
|
|||
import { App, CreateAppRequest } from "@budibase/types"
|
||||
import { Response } from "node-fetch"
|
||||
import {
|
||||
RouteConfig,
|
||||
AppPackageResponse,
|
||||
DeployConfig,
|
||||
MessageResponse,
|
||||
} from "../../../types"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
|
||||
interface RenameAppBody {
|
||||
name: string
|
||||
}
|
||||
|
||||
export default class AppAPI extends BaseAPI {
|
||||
constructor(client: BudibaseInternalAPIClient) {
|
||||
super(client)
|
||||
}
|
||||
|
||||
// TODO Fix the fetch apps to receive an optional number of apps and compare if the received app is more or less.
|
||||
// each possible scenario should have its own method.
|
||||
async fetchEmptyAppList(): Promise<[Response, App[]]> {
|
||||
const [response, json] = await this.get(`/applications?status=all`)
|
||||
expect(json.length).toBeGreaterThanOrEqual(0)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async fetchAllApplications(): Promise<[Response, App[]]> {
|
||||
const [response, json] = await this.get(`/applications?status=all`)
|
||||
expect(json.length).toBeGreaterThanOrEqual(1)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async canRender(): Promise<[Response, boolean]> {
|
||||
const [response, json] = await this.get("/routing/client")
|
||||
const publishedAppRenders = Object.keys(json.routes).length > 0
|
||||
expect(publishedAppRenders).toBe(true)
|
||||
return [response, publishedAppRenders]
|
||||
}
|
||||
|
||||
async getAppPackage(appId: string): Promise<[Response, AppPackageResponse]> {
|
||||
const [response, json] = await this.get(`/applications/${appId}/appPackage`)
|
||||
expect(json.application.appId).toEqual(appId)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async publish(appId: string | undefined): Promise<[Response, DeployConfig]> {
|
||||
const [response, json] = await this.post(`/applications/${appId}/publish`)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async create(body: CreateAppRequest): Promise<App> {
|
||||
const [response, json] = await this.post(`/applications`, body)
|
||||
expect(json._id).toBeDefined()
|
||||
return json
|
||||
}
|
||||
|
||||
async read(id: string): Promise<[Response, App]> {
|
||||
const [response, json] = await this.get(`/applications/${id}`)
|
||||
return [response, json.data]
|
||||
}
|
||||
|
||||
async sync(appId: string): Promise<[Response, MessageResponse]> {
|
||||
const [response, json] = await this.post(`/applications/${appId}/sync`)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
// TODO
|
||||
async updateClient(appId: string, body: any): Promise<[Response, App]> {
|
||||
const [response, json] = await this.put(
|
||||
`/applications/${appId}/client/update`,
|
||||
{ body }
|
||||
)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async revertPublished(appId: string): Promise<[Response, MessageResponse]> {
|
||||
const [response, json] = await this.post(`/dev/${appId}/revert`)
|
||||
expect(json).toEqual({
|
||||
message: "Reverted changes successfully.",
|
||||
})
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async revertUnpublished(appId: string): Promise<[Response, MessageResponse]> {
|
||||
const [response, json] = await this.post(
|
||||
`/dev/${appId}/revert`,
|
||||
undefined,
|
||||
400
|
||||
)
|
||||
expect(json).toEqual({
|
||||
message: "App has not yet been deployed",
|
||||
status: 400,
|
||||
})
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async delete(appId: string): Promise<Response> {
|
||||
const [response, _] = await this.del(`/applications/${appId}`)
|
||||
return response
|
||||
}
|
||||
|
||||
async rename(
|
||||
appId: string,
|
||||
oldName: string,
|
||||
body: RenameAppBody
|
||||
): Promise<[Response, App]> {
|
||||
const [response, json] = await this.put(`/applications/${appId}`, body)
|
||||
expect(json.name).not.toEqual(oldName)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async getRoutes(screenExists?: boolean): Promise<[Response, RouteConfig]> {
|
||||
const [response, json] = await this.get(`/routing`)
|
||||
if (screenExists) {
|
||||
expect(json.routes["/test"]).toBeTruthy()
|
||||
} else {
|
||||
expect(json.routes["/test"]).toBeUndefined()
|
||||
}
|
||||
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async unpublish(appId: string): Promise<[Response]> {
|
||||
const [response, json] = await this.post(
|
||||
`/applications/${appId}/unpublish`,
|
||||
undefined,
|
||||
204
|
||||
)
|
||||
return [response]
|
||||
}
|
||||
|
||||
async unlock(appId: string): Promise<[Response, MessageResponse]> {
|
||||
const [response, json] = await this.del(`/dev/${appId}/lock`)
|
||||
expect(json.message).toEqual("Lock released successfully.")
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async updateIcon(appId: string): Promise<[Response, App]> {
|
||||
const body = {
|
||||
icon: {
|
||||
name: "ConversionFunnel",
|
||||
color: "var(--spectrum-global-color-red-400)",
|
||||
},
|
||||
}
|
||||
const [response, json] = await this.put(`/applications/${appId}`, body)
|
||||
expect(json.icon.name).toEqual(body.icon.name)
|
||||
expect(json.icon.color).toEqual(body.icon.color)
|
||||
return [response, json]
|
||||
}
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
import { APIRequestOpts, State } from "../../../types"
|
||||
|
||||
export default class AuthAPI {
|
||||
state: State
|
||||
client: BudibaseInternalAPIClient
|
||||
|
||||
constructor(client: BudibaseInternalAPIClient, state: State) {
|
||||
this.client = client
|
||||
this.state = state
|
||||
}
|
||||
|
||||
async login(
|
||||
tenantId: string,
|
||||
email: String,
|
||||
password: String,
|
||||
opts: APIRequestOpts = { doExpect: true }
|
||||
): Promise<[Response, string]> {
|
||||
const [response, json] = await this.client.post(
|
||||
`/global/auth/${tenantId}/login`,
|
||||
{
|
||||
body: {
|
||||
username: email,
|
||||
password: password,
|
||||
},
|
||||
}
|
||||
)
|
||||
if (opts.doExpect) {
|
||||
expect(response).toHaveStatusCode(200)
|
||||
}
|
||||
const cookie = response.headers.get("set-cookie")
|
||||
return [response, cookie!]
|
||||
}
|
||||
|
||||
async logout(): Promise<any> {
|
||||
return this.client.post(`/global/auth/logout`)
|
||||
}
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
|
||||
export default class BaseAPI {
|
||||
client: BudibaseInternalAPIClient
|
||||
|
||||
constructor(client: BudibaseInternalAPIClient) {
|
||||
this.client = client
|
||||
}
|
||||
|
||||
async get(url: string, status?: number): Promise<[Response, any]> {
|
||||
const [response, json] = await this.client.get(url)
|
||||
expect(response).toHaveStatusCode(status ? status : 200)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async post(
|
||||
url: string,
|
||||
body?: any,
|
||||
statusCode?: number
|
||||
): Promise<[Response, any]> {
|
||||
const [response, json] = await this.client.post(url, { body })
|
||||
expect(response).toHaveStatusCode(statusCode ? statusCode : 200)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async put(
|
||||
url: string,
|
||||
body?: any,
|
||||
statusCode?: number
|
||||
): Promise<[Response, any]> {
|
||||
const [response, json] = await this.client.put(url, { body })
|
||||
expect(response).toHaveStatusCode(statusCode ? statusCode : 200)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async patch(
|
||||
url: string,
|
||||
body?: any,
|
||||
statusCode?: number
|
||||
): Promise<[Response, any]> {
|
||||
const [response, json] = await this.client.patch(url, { body })
|
||||
expect(response).toHaveStatusCode(statusCode ? statusCode : 200)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async del(
|
||||
url: string,
|
||||
statusCode?: number,
|
||||
body?: any
|
||||
): Promise<[Response, any]> {
|
||||
const [response, json] = await this.client.del(url, { body })
|
||||
expect(response).toHaveStatusCode(statusCode ? statusCode : 200)
|
||||
return [response, json]
|
||||
}
|
||||
}
|
|
@ -1,62 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import {
|
||||
Datasource,
|
||||
CreateDatasourceResponse,
|
||||
UpdateDatasourceResponse,
|
||||
} from "@budibase/types"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
import { DatasourceRequest } from "../../../types"
|
||||
|
||||
export default class DatasourcesAPI extends BaseAPI {
|
||||
constructor(client: BudibaseInternalAPIClient) {
|
||||
super(client)
|
||||
}
|
||||
|
||||
async getIntegrations(): Promise<[Response, any]> {
|
||||
const [response, json] = await this.get(`/integrations`)
|
||||
const integrationsCount = Object.keys(json).length
|
||||
expect(integrationsCount).toBe(16)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async getAll(): Promise<[Response, Datasource[]]> {
|
||||
const [response, json] = await this.get(`/datasources`)
|
||||
expect(json.length).toBeGreaterThan(0)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async getTable(dataSourceId: string): Promise<[Response, Datasource]> {
|
||||
const [response, json] = await this.get(`/datasources/${dataSourceId}`)
|
||||
expect(json._id).toEqual(dataSourceId)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async add(
|
||||
body: DatasourceRequest
|
||||
): Promise<[Response, CreateDatasourceResponse]> {
|
||||
const [response, json] = await this.post(`/datasources`, body)
|
||||
expect(json.datasource._id).toBeDefined()
|
||||
expect(json.datasource._rev).toBeDefined()
|
||||
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async update(
|
||||
body: Datasource
|
||||
): Promise<[Response, UpdateDatasourceResponse]> {
|
||||
const [response, json] = await this.put(`/datasources/${body._id}`, body)
|
||||
expect(json.datasource._id).toBeDefined()
|
||||
expect(json.datasource._rev).toBeDefined()
|
||||
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async delete(dataSourceId: string, revId: string): Promise<Response> {
|
||||
const [response, json] = await this.del(
|
||||
`/datasources/${dataSourceId}/${revId}`
|
||||
)
|
||||
|
||||
return response
|
||||
}
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
import { GetEnvironmentResponse } from "@budibase/types"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
|
||||
export default class EnvironmentAPI {
|
||||
client: BudibaseInternalAPIClient
|
||||
|
||||
constructor(client: BudibaseInternalAPIClient) {
|
||||
this.client = client
|
||||
}
|
||||
|
||||
async getEnvironment(
|
||||
opts: APIRequestOpts = { doExpect: true }
|
||||
): Promise<GetEnvironmentResponse> {
|
||||
const [response, json] = await this.client.get(`/system/environment`)
|
||||
if (opts.doExpect) {
|
||||
expect(response.status).toBe(200)
|
||||
}
|
||||
return json
|
||||
}
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
|
||||
export default class IntegrationsAPI extends BaseAPI {
|
||||
constructor(client: BudibaseInternalAPIClient) {
|
||||
super(client)
|
||||
}
|
||||
|
||||
async getAll(): Promise<[Response, any]> {
|
||||
const [response, json] = await this.get(`/integrations`)
|
||||
const integrationsCount = Object.keys(json).length
|
||||
expect(integrationsCount).toBeGreaterThan(0)
|
||||
return [response, json]
|
||||
}
|
||||
}
|
|
@ -1,63 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import {
|
||||
ActivateLicenseKeyRequest,
|
||||
ActivateOfflineLicenseTokenRequest,
|
||||
GetLicenseKeyResponse,
|
||||
GetOfflineIdentifierResponse,
|
||||
GetOfflineLicenseTokenResponse,
|
||||
} from "@budibase/types"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
|
||||
export default class LicenseAPI extends BaseAPI {
|
||||
constructor(client: BudibaseInternalAPIClient) {
|
||||
super(client)
|
||||
}
|
||||
async getOfflineLicenseToken(
|
||||
opts: { status?: number } = {}
|
||||
): Promise<[Response, GetOfflineLicenseTokenResponse]> {
|
||||
const [response, body] = await this.get(
|
||||
`/global/license/offline`,
|
||||
opts.status
|
||||
)
|
||||
return [response, body]
|
||||
}
|
||||
async deleteOfflineLicenseToken(): Promise<[Response]> {
|
||||
const [response] = await this.del(`/global/license/offline`, 204)
|
||||
return [response]
|
||||
}
|
||||
async activateOfflineLicenseToken(
|
||||
body: ActivateOfflineLicenseTokenRequest
|
||||
): Promise<[Response]> {
|
||||
const [response] = await this.post(`/global/license/offline`, body)
|
||||
return [response]
|
||||
}
|
||||
async getOfflineIdentifier(): Promise<
|
||||
[Response, GetOfflineIdentifierResponse]
|
||||
> {
|
||||
const [response, body] = await this.get(
|
||||
`/global/license/offline/identifier`
|
||||
)
|
||||
return [response, body]
|
||||
}
|
||||
|
||||
async getLicenseKey(
|
||||
opts: { status?: number } = {}
|
||||
): Promise<[Response, GetLicenseKeyResponse]> {
|
||||
const [response, body] = await this.get(`/global/license/key`, opts.status)
|
||||
return [response, body]
|
||||
}
|
||||
|
||||
async activateLicenseKey(
|
||||
body: ActivateLicenseKeyRequest
|
||||
): Promise<[Response]> {
|
||||
const [response] = await this.post(`/global/license/key`, body)
|
||||
return [response]
|
||||
}
|
||||
|
||||
async deleteLicenseKey(): Promise<[Response]> {
|
||||
const [response] = await this.del(`/global/license/key`, 204)
|
||||
return [response]
|
||||
}
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
|
||||
export default class PermissionsAPI extends BaseAPI {
|
||||
constructor(client: BudibaseInternalAPIClient) {
|
||||
super(client)
|
||||
}
|
||||
|
||||
async getAll(id: string): Promise<[Response, any]> {
|
||||
const [response, json] = await this.get(`/permissions/${id}`)
|
||||
return [response, json]
|
||||
}
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
import { PreviewQueryRequest, Query } from "@budibase/types"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
|
||||
export default class QueriesAPI extends BaseAPI {
|
||||
constructor(client: BudibaseInternalAPIClient) {
|
||||
super(client)
|
||||
}
|
||||
|
||||
async preview(body: PreviewQueryRequest): Promise<[Response, any]> {
|
||||
const [response, json] = await this.post(`/queries/preview`, body)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async save(body: Query): Promise<[Response, any]> {
|
||||
const [response, json] = await this.post(`/queries`, body)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async getQuery(queryId: string): Promise<[Response, any]> {
|
||||
const [response, json] = await this.get(`/queries/${queryId}`)
|
||||
return [response, json]
|
||||
}
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import { Role, UserRoles } from "@budibase/types"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
|
||||
export default class RoleAPI extends BaseAPI {
|
||||
constructor(client: BudibaseInternalAPIClient) {
|
||||
super(client)
|
||||
}
|
||||
|
||||
async getRoles(): Promise<[Response, Role[]]> {
|
||||
const [response, json] = await this.get(`/roles`)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async createRole(body: Partial<UserRoles>): Promise<[Response, UserRoles]> {
|
||||
const [response, json] = await this.post(`/roles`, body)
|
||||
return [response, json]
|
||||
}
|
||||
}
|
|
@ -1,57 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import { Row } from "@budibase/types"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
|
||||
export default class RowAPI extends BaseAPI {
|
||||
rowAdded: boolean
|
||||
|
||||
constructor(client: BudibaseInternalAPIClient) {
|
||||
super(client)
|
||||
this.rowAdded = false
|
||||
}
|
||||
|
||||
async getAll(tableId: string): Promise<[Response, Row[]]> {
|
||||
const [response, json] = await this.get(`/${tableId}/rows`)
|
||||
if (this.rowAdded) {
|
||||
expect(json.length).toBeGreaterThanOrEqual(1)
|
||||
}
|
||||
return [response, json]
|
||||
}
|
||||
async add(tableId: string, body: Row): Promise<[Response, Row]> {
|
||||
const [response, json] = await this.post(`/${tableId}/rows`, body)
|
||||
expect(json._id).toBeDefined()
|
||||
expect(json._rev).toBeDefined()
|
||||
expect(json.tableId).toEqual(tableId)
|
||||
this.rowAdded = true
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async delete(tableId: string, body: Row): Promise<[Response, Row[]]> {
|
||||
const [response, json] = await this.del(
|
||||
`/${tableId}/rows/`,
|
||||
undefined,
|
||||
body
|
||||
)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async searchNoPagination(
|
||||
tableId: string,
|
||||
body: string
|
||||
): Promise<[Response, Row[]]> {
|
||||
const [response, json] = await this.post(`/${tableId}/search`, body)
|
||||
expect(json.hasNextPage).toEqual(false)
|
||||
return [response, json.rows]
|
||||
}
|
||||
|
||||
async searchWithPagination(
|
||||
tableId: string,
|
||||
body: string
|
||||
): Promise<[Response, Row[]]> {
|
||||
const [response, json] = await this.post(`/${tableId}/search`, body)
|
||||
expect(json.hasNextPage).toEqual(true)
|
||||
expect(json.rows.length).toEqual(10)
|
||||
return [response, json.rows]
|
||||
}
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import { Screen } from "@budibase/types"
|
||||
import { ScreenRequest } from "../../../types/screens"
|
||||
import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
|
||||
export default class ScreenAPI extends BaseAPI {
|
||||
constructor(client: BudibaseInternalAPIClient) {
|
||||
super(client)
|
||||
}
|
||||
|
||||
async create(body: ScreenRequest): Promise<[Response, Screen]> {
|
||||
const [response, json] = await this.post(`/screens`, body)
|
||||
expect(json._id).toBeDefined()
|
||||
expect(json.routing.roleId).toBe(body.routing.roleId)
|
||||
return [response, json]
|
||||
}
|
||||
|
||||
async delete(screenId: string, rev: string): Promise<[Response, Screen]> {
|
||||
const [response, json] = await this.del(`/screens/${screenId}/${rev}`)
|
||||
return [response, json]
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue