Merge remote-tracking branch 'origin/master' into feature/signature-field-and-component
This commit is contained in:
commit
203d6484ea
|
@ -34,7 +34,6 @@
|
|||
},
|
||||
{
|
||||
"files": ["**/*.ts"],
|
||||
"excludedFiles": ["qa-core/**"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": ["eslint:recommended"],
|
||||
|
@ -49,7 +48,6 @@
|
|||
},
|
||||
{
|
||||
"files": ["**/*.spec.ts"],
|
||||
"excludedFiles": ["qa-core/**"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": ["jest", "@typescript-eslint"],
|
||||
"extends": ["eslint:recommended", "plugin:jest/recommended"],
|
||||
|
|
|
@ -91,6 +91,9 @@ jobs:
|
|||
|
||||
test-libraries:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull
|
||||
REUSE_CONTAINERS: true
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -104,6 +107,14 @@ jobs:
|
|||
with:
|
||||
node-version: 20.x
|
||||
cache: yarn
|
||||
- name: Pull testcontainers images
|
||||
run: |
|
||||
docker pull testcontainers/ryuk:0.5.1 &
|
||||
docker pull budibase/couchdb &
|
||||
docker pull redis &
|
||||
|
||||
wait $(jobs -p)
|
||||
|
||||
- run: yarn --frozen-lockfile
|
||||
- name: Test
|
||||
run: |
|
||||
|
@ -138,9 +149,10 @@ jobs:
|
|||
fi
|
||||
|
||||
test-server:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: budi-tubby-tornado-quad-core-150gb
|
||||
env:
|
||||
DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull
|
||||
REUSE_CONTAINERS: true
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -157,13 +169,16 @@ jobs:
|
|||
|
||||
- name: Pull testcontainers images
|
||||
run: |
|
||||
docker pull mcr.microsoft.com/mssql/server:2022-latest
|
||||
docker pull mysql:8.3
|
||||
docker pull postgres:16.1-bullseye
|
||||
docker pull mongo:7.0-jammy
|
||||
docker pull mariadb:lts
|
||||
docker pull testcontainers/ryuk:0.5.1
|
||||
docker pull budibase/couchdb
|
||||
docker pull mcr.microsoft.com/mssql/server:2022-latest &
|
||||
docker pull mysql:8.3 &
|
||||
docker pull postgres:16.1-bullseye &
|
||||
docker pull mongo:7.0-jammy &
|
||||
docker pull mariadb:lts &
|
||||
docker pull testcontainers/ryuk:0.5.1 &
|
||||
docker pull budibase/couchdb &
|
||||
docker pull redis &
|
||||
|
||||
wait $(jobs -p)
|
||||
|
||||
- run: yarn --frozen-lockfile
|
||||
|
||||
|
@ -175,35 +190,6 @@ jobs:
|
|||
yarn test --scope=@budibase/server
|
||||
fi
|
||||
|
||||
integration-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
|
||||
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
|
||||
|
||||
- name: Use Node.js 20.x
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: yarn
|
||||
- run: yarn --frozen-lockfile
|
||||
- name: Build packages
|
||||
run: yarn build --scope @budibase/server --scope @budibase/worker
|
||||
- name: Build backend-core for OSS contributor (required for pro)
|
||||
if: ${{ env.IS_OSS_CONTRIBUTOR == 'true' }}
|
||||
run: yarn build --scope @budibase/backend-core
|
||||
- name: Run tests
|
||||
run: |
|
||||
cd qa-core
|
||||
yarn setup
|
||||
yarn serve:test:self:ci
|
||||
env:
|
||||
BB_ADMIN_USER_EMAIL: admin
|
||||
BB_ADMIN_USER_PASSWORD: admin
|
||||
|
||||
check-pro-submodule:
|
||||
runs-on: ubuntu-latest
|
||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||
|
|
|
@ -69,7 +69,6 @@ typings/
|
|||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
!qa-core/.env
|
||||
!hosting/.env
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
|
|
|
@ -1,25 +1,47 @@
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import path from "path"
|
||||
import lockfile from "proper-lockfile"
|
||||
|
||||
export default async function setup() {
|
||||
await new GenericContainer("budibase/couchdb")
|
||||
.withExposedPorts(5984)
|
||||
.withEnvironment({
|
||||
COUCHDB_PASSWORD: "budibase",
|
||||
COUCHDB_USER: "budibase",
|
||||
})
|
||||
.withCopyContentToContainer([
|
||||
{
|
||||
content: `
|
||||
const lockPath = path.resolve(__dirname, "globalSetup.ts")
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
// If you run multiple tests at the same time, it's possible for the CouchDB
|
||||
// shared container to get started multiple times despite having an
|
||||
// identical reuse hash. To avoid that, we do a filesystem-based lock so
|
||||
// that only one globalSetup.ts is running at a time.
|
||||
lockfile.lockSync(lockPath)
|
||||
}
|
||||
|
||||
try {
|
||||
let couchdb = new GenericContainer("budibase/couchdb")
|
||||
.withExposedPorts(5984)
|
||||
.withEnvironment({
|
||||
COUCHDB_PASSWORD: "budibase",
|
||||
COUCHDB_USER: "budibase",
|
||||
})
|
||||
.withCopyContentToContainer([
|
||||
{
|
||||
content: `
|
||||
[log]
|
||||
level = warn
|
||||
`,
|
||||
target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
|
||||
},
|
||||
])
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"curl http://budibase:budibase@localhost:5984/_up"
|
||||
).withStartupTimeout(20000)
|
||||
)
|
||||
.start()
|
||||
target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
|
||||
},
|
||||
])
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"curl http://budibase:budibase@localhost:5984/_up"
|
||||
).withStartupTimeout(20000)
|
||||
)
|
||||
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
couchdb = couchdb.withReuse()
|
||||
}
|
||||
|
||||
await couchdb.start()
|
||||
} finally {
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
lockfile.unlockSync(lockPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.22.14",
|
||||
"version": "2.22.16",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
10
package.json
10
package.json
|
@ -7,6 +7,7 @@
|
|||
"@babel/preset-env": "^7.22.5",
|
||||
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
|
||||
"@types/node": "20.10.0",
|
||||
"@types/proper-lockfile": "^4.1.4",
|
||||
"@typescript-eslint/parser": "6.9.0",
|
||||
"esbuild": "^0.18.17",
|
||||
"esbuild-node-externals": "^1.8.0",
|
||||
|
@ -23,6 +24,7 @@
|
|||
"nx-cloud": "16.0.5",
|
||||
"prettier": "2.8.8",
|
||||
"prettier-plugin-svelte": "^2.3.0",
|
||||
"proper-lockfile": "^4.1.2",
|
||||
"svelte": "^4.2.10",
|
||||
"svelte-eslint-parser": "^0.33.1",
|
||||
"typescript": "5.2.2",
|
||||
|
@ -58,11 +60,11 @@
|
|||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
||||
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
|
||||
"test": "lerna run --stream test --stream",
|
||||
"lint:eslint": "eslint packages qa-core --max-warnings=0",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
|
||||
"lint:eslint": "eslint packages --max-warnings=0",
|
||||
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
|
||||
"lint": "yarn run lint:eslint && yarn run lint:prettier",
|
||||
"lint:fix:eslint": "eslint --fix --max-warnings=0 packages qa-core",
|
||||
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
|
||||
"lint:fix:eslint": "eslint --fix --max-warnings=0 packages",
|
||||
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
|
||||
"lint:fix": "yarn run lint:fix:eslint && yarn run lint:fix:prettier",
|
||||
"build:specs": "lerna run --stream specs",
|
||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 360ad2dc29c3f1fd5a1182ae258c45666b7f5eb1
|
||||
Subproject commit 532c4db35cecd346b5c24f0b89ab7b397a122a36
|
|
@ -1,6 +1,7 @@
|
|||
import { DatabaseImpl } from "../../../src/db"
|
||||
import { execSync } from "child_process"
|
||||
|
||||
const IPV4_PORT_REGEX = new RegExp(`0\\.0\\.0\\.0:(\\d+)->(\\d+)/tcp`, "g")
|
||||
|
||||
interface ContainerInfo {
|
||||
Command: string
|
||||
CreatedAt: string
|
||||
|
@ -19,7 +20,10 @@ interface ContainerInfo {
|
|||
}
|
||||
|
||||
function getTestcontainers(): ContainerInfo[] {
|
||||
return execSync("docker ps --format json")
|
||||
// We use --format json to make sure the output is nice and machine-readable,
|
||||
// and we use --no-trunc so that the command returns full container IDs so we
|
||||
// can filter on them correctly.
|
||||
return execSync("docker ps --format json --no-trunc")
|
||||
.toString()
|
||||
.split("\n")
|
||||
.filter(x => x.length > 0)
|
||||
|
@ -27,32 +31,55 @@ function getTestcontainers(): ContainerInfo[] {
|
|||
.filter(x => x.Labels.includes("org.testcontainers=true"))
|
||||
}
|
||||
|
||||
function getContainerByImage(image: string) {
|
||||
return getTestcontainers().find(x => x.Image.startsWith(image))
|
||||
export function getContainerByImage(image: string) {
|
||||
const containers = getTestcontainers().filter(x => x.Image.startsWith(image))
|
||||
if (containers.length > 1) {
|
||||
let errorMessage = `Multiple containers found starting with image: "${image}"\n\n`
|
||||
for (const container of containers) {
|
||||
errorMessage += JSON.stringify(container, null, 2)
|
||||
}
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
return containers[0]
|
||||
}
|
||||
|
||||
function getExposedPort(container: ContainerInfo, port: number) {
|
||||
const match = container.Ports.match(new RegExp(`0.0.0.0:(\\d+)->${port}/tcp`))
|
||||
if (!match) {
|
||||
return undefined
|
||||
export function getContainerById(id: string) {
|
||||
return getTestcontainers().find(x => x.ID === id)
|
||||
}
|
||||
|
||||
export interface Port {
|
||||
host: number
|
||||
container: number
|
||||
}
|
||||
|
||||
export function getExposedV4Ports(container: ContainerInfo): Port[] {
|
||||
let ports: Port[] = []
|
||||
for (const match of container.Ports.matchAll(IPV4_PORT_REGEX)) {
|
||||
ports.push({ host: parseInt(match[1]), container: parseInt(match[2]) })
|
||||
}
|
||||
return parseInt(match[1])
|
||||
return ports
|
||||
}
|
||||
|
||||
export function getExposedV4Port(container: ContainerInfo, port: number) {
|
||||
return getExposedV4Ports(container).find(x => x.container === port)?.host
|
||||
}
|
||||
|
||||
export function setupEnv(...envs: any[]) {
|
||||
// We start couchdb in globalSetup.ts, in the root of the monorepo, so it
|
||||
// should be relatively safe to look for it by its image name.
|
||||
const couch = getContainerByImage("budibase/couchdb")
|
||||
if (!couch) {
|
||||
throw new Error("CouchDB container not found")
|
||||
}
|
||||
|
||||
const couchPort = getExposedPort(couch, 5984)
|
||||
const couchPort = getExposedV4Port(couch, 5984)
|
||||
if (!couchPort) {
|
||||
throw new Error("CouchDB port not found")
|
||||
}
|
||||
|
||||
const configs = [
|
||||
{ key: "COUCH_DB_PORT", value: `${couchPort}` },
|
||||
{ key: "COUCH_DB_URL", value: `http://localhost:${couchPort}` },
|
||||
{ key: "COUCH_DB_URL", value: `http://127.0.0.1:${couchPort}` },
|
||||
]
|
||||
|
||||
for (const config of configs.filter(x => !!x.value)) {
|
||||
|
@ -60,7 +87,4 @@ export function setupEnv(...envs: any[]) {
|
|||
env._set(config.key, config.value)
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
DatabaseImpl.nano = undefined
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@
|
|||
"fast-json-patch": "^3.1.1",
|
||||
"json-format-highlight": "^1.0.4",
|
||||
"lodash": "4.17.21",
|
||||
"posthog-js": "^1.36.0",
|
||||
"posthog-js": "^1.116.6",
|
||||
"remixicon": "2.5.0",
|
||||
"sanitize-html": "^2.7.0",
|
||||
"shortid": "2.2.15",
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import { tables } from "stores/builder"
|
||||
import { get as svelteGet } from "svelte/store"
|
||||
|
@ -5,15 +6,13 @@ import { get as svelteGet } from "svelte/store"
|
|||
// currently supported level of relationship depth (server side)
|
||||
const MAX_DEPTH = 1
|
||||
|
||||
//https://github.com/Budibase/budibase/issues/3030
|
||||
const internalType = "internal"
|
||||
|
||||
const TYPES_TO_SKIP = [
|
||||
FIELDS.FORMULA.type,
|
||||
FIELDS.LONGFORM.type,
|
||||
FIELDS.ATTACHMENT.type,
|
||||
FIELDS.SIGNATURE.type,
|
||||
internalType,
|
||||
FieldType.FORMULA,
|
||||
FieldType.LONGFORM,
|
||||
FieldType.ATTACHMENT,
|
||||
FieldType.SIGNATURE,
|
||||
//https://github.com/Budibase/budibase/issues/3030
|
||||
FieldType.INTERNAL,
|
||||
]
|
||||
|
||||
export function getBindings({
|
||||
|
@ -27,7 +26,7 @@ export function getBindings({
|
|||
return bindings
|
||||
}
|
||||
for (let [column, schema] of Object.entries(table.schema)) {
|
||||
const isRelationship = schema.type === FIELDS.LINK.type
|
||||
const isRelationship = schema.type === FieldType.LINK
|
||||
// skip relationships after a certain depth and types which
|
||||
// can't bind to
|
||||
if (
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { RelationshipType } from "constants/backend"
|
||||
import { RelationshipType } from "@budibase/types"
|
||||
|
||||
const typeMismatch = "Column type of the foreign key must match the primary key"
|
||||
const columnBeingUsed = "Column name cannot be an existing column"
|
||||
|
|
|
@ -49,7 +49,10 @@
|
|||
label: "Long Form Text",
|
||||
value: FIELDS.LONGFORM.type,
|
||||
},
|
||||
|
||||
{
|
||||
label: "Attachment",
|
||||
value: FIELDS.ATTACHMENT.type,
|
||||
},
|
||||
{
|
||||
label: "User",
|
||||
value: `${FIELDS.USER.type}${FIELDS.USER.subtype}`,
|
||||
|
|
|
@ -12,7 +12,7 @@ const getDefaultSchema = rows => {
|
|||
newSchema[column] = {
|
||||
name: column,
|
||||
type: "string",
|
||||
constraints: FIELDS["STRING"].constraints,
|
||||
constraints: FIELDS.STRING.constraints,
|
||||
}
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
|
||||
export const convertOldFieldFormat = fields => {
|
||||
if (!fields) {
|
||||
return []
|
||||
|
@ -31,18 +33,18 @@ export const getComponentForField = (field, schema) => {
|
|||
}
|
||||
|
||||
export const FieldTypeToComponentMap = {
|
||||
string: "stringfield",
|
||||
number: "numberfield",
|
||||
bigint: "bigintfield",
|
||||
options: "optionsfield",
|
||||
array: "multifieldselect",
|
||||
boolean: "booleanfield",
|
||||
longform: "longformfield",
|
||||
datetime: "datetimefield",
|
||||
attachment: "attachmentfield",
|
||||
link: "relationshipfield",
|
||||
json: "jsonfield",
|
||||
barcodeqr: "codescanner",
|
||||
signature: "signaturefield",
|
||||
bb_reference: "bbreferencefield",
|
||||
[FieldType.STRING]: "stringfield",
|
||||
[FieldType.NUMBER]: "numberfield",
|
||||
[FieldType.BIGINT]: "bigintfield",
|
||||
[FieldType.OPTIONS]: "optionsfield",
|
||||
[FieldType.ARRAY]: "multifieldselect",
|
||||
[FieldType.BOOLEAN]: "booleanfield",
|
||||
[FieldType.LONGFORM]: "longformfield",
|
||||
[FieldType.DATETIME]: "datetimefield",
|
||||
[FieldType.ATTACHMENT]: "attachmentfield",
|
||||
[FieldType.SIGNATURE]: "signaturefield",
|
||||
[FieldType.LINK]: "relationshipfield",
|
||||
[FieldType.JSON]: "jsonfield",
|
||||
[FieldType.BARCODEQR]: "codescanner",
|
||||
[FieldType.BB_REFERENCE]: "bbreferencefield",
|
||||
}
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
import { FieldType, FieldSubtype } from "@budibase/types"
|
||||
import {
|
||||
FieldType,
|
||||
FieldSubtype,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
AutoFieldSubType,
|
||||
Hosting,
|
||||
} from "@budibase/types"
|
||||
|
||||
export const AUTO_COLUMN_SUB_TYPES = {
|
||||
AUTO_ID: "autoID",
|
||||
CREATED_BY: "createdBy",
|
||||
CREATED_AT: "createdAt",
|
||||
UPDATED_BY: "updatedBy",
|
||||
UPDATED_AT: "updatedAt",
|
||||
}
|
||||
export { RelationshipType } from "@budibase/types"
|
||||
|
||||
export const AUTO_COLUMN_SUB_TYPES = AutoFieldSubType
|
||||
|
||||
export const AUTO_COLUMN_DISPLAY_NAMES = {
|
||||
AUTO_ID: "Auto ID",
|
||||
|
@ -176,10 +178,7 @@ export const FILE_TYPES = {
|
|||
DOCUMENT: ["odf", "docx", "doc", "pdf", "csv"],
|
||||
}
|
||||
|
||||
export const HostingTypes = {
|
||||
CLOUD: "cloud",
|
||||
SELF: "self",
|
||||
}
|
||||
export const HostingTypes = Hosting
|
||||
|
||||
export const Roles = {
|
||||
ADMIN: "ADMIN",
|
||||
|
@ -196,12 +195,6 @@ export function isAutoColumnUserRelationship(subtype) {
|
|||
)
|
||||
}
|
||||
|
||||
export const RelationshipType = {
|
||||
MANY_TO_MANY: "many-to-many",
|
||||
ONE_TO_MANY: "one-to-many",
|
||||
MANY_TO_ONE: "many-to-one",
|
||||
}
|
||||
|
||||
export const PrettyRelationshipDefinitions = {
|
||||
MANY: "Many rows",
|
||||
ONE: "One row",
|
||||
|
@ -227,7 +220,7 @@ export const SWITCHABLE_TYPES = [
|
|||
...ALLOWABLE_NUMBER_TYPES,
|
||||
]
|
||||
|
||||
export const BUDIBASE_INTERNAL_DB_ID = "bb_internal"
|
||||
export const BUDIBASE_INTERNAL_DB_ID = INTERNAL_TABLE_SOURCE_ID
|
||||
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
|
||||
export const BUDIBASE_DATASOURCE_TYPE = "budibase"
|
||||
export const DB_TYPE_INTERNAL = "internal"
|
||||
|
@ -274,10 +267,10 @@ export const IntegrationNames = {
|
|||
}
|
||||
|
||||
export const SchemaTypeOptions = [
|
||||
{ label: "Text", value: "string" },
|
||||
{ label: "Number", value: "number" },
|
||||
{ label: "Boolean", value: "boolean" },
|
||||
{ label: "Datetime", value: "datetime" },
|
||||
{ label: "Text", value: FieldType.STRING },
|
||||
{ label: "Number", value: FieldType.NUMBER },
|
||||
{ label: "Boolean", value: FieldType.BOOLEAN },
|
||||
{ label: "Datetime", value: FieldType.DATETIME },
|
||||
]
|
||||
|
||||
export const SchemaTypeOptionsExpanded = SchemaTypeOptions.map(el => ({
|
||||
|
@ -314,10 +307,10 @@ export const PaginationLocations = [
|
|||
]
|
||||
|
||||
export const BannedSearchTypes = [
|
||||
"link",
|
||||
"attachment",
|
||||
"formula",
|
||||
"json",
|
||||
FieldType.LINK,
|
||||
FieldType.ATTACHMENT,
|
||||
FieldType.FORMULA,
|
||||
FieldType.JSON,
|
||||
"jsonarray",
|
||||
"queryarray",
|
||||
]
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
import { FIELDS } from "constants/backend"
|
||||
import { FieldType } from "@budibase/types"
|
||||
|
||||
function baseConversion(type) {
|
||||
if (type === "string") {
|
||||
return {
|
||||
type: FIELDS.STRING.type,
|
||||
type: FieldType.STRING,
|
||||
}
|
||||
} else if (type === "boolean") {
|
||||
return {
|
||||
type: FIELDS.BOOLEAN.type,
|
||||
type: FieldType.BOOLEAN,
|
||||
}
|
||||
} else if (type === "number") {
|
||||
return {
|
||||
type: FIELDS.NUMBER.type,
|
||||
type: FieldType.NUMBER,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ function recurse(schemaLevel = {}, objectLevel) {
|
|||
const schema = recurse(schemaLevel[key], value[0])
|
||||
if (schema) {
|
||||
schemaLevel[key] = {
|
||||
type: FIELDS.ARRAY.type,
|
||||
type: FieldType.ARRAY,
|
||||
schema,
|
||||
}
|
||||
}
|
||||
|
@ -45,7 +45,7 @@ function recurse(schemaLevel = {}, objectLevel) {
|
|||
}
|
||||
}
|
||||
if (!schemaLevel.type) {
|
||||
return { type: FIELDS.JSON.type, schema: schemaLevel }
|
||||
return { type: FieldType.JSON, schema: schemaLevel }
|
||||
} else {
|
||||
return schemaLevel
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
import { ActionStepID } from "constants/backend/automations"
|
||||
import { TableNames } from "constants"
|
||||
import {
|
||||
|
@ -20,20 +21,20 @@ export function buildAutoColumn(tableName, name, subtype) {
|
|||
switch (subtype) {
|
||||
case AUTO_COLUMN_SUB_TYPES.UPDATED_BY:
|
||||
case AUTO_COLUMN_SUB_TYPES.CREATED_BY:
|
||||
type = FIELDS.LINK.type
|
||||
type = FieldType.LINK
|
||||
constraints = FIELDS.LINK.constraints
|
||||
break
|
||||
case AUTO_COLUMN_SUB_TYPES.AUTO_ID:
|
||||
type = FIELDS.NUMBER.type
|
||||
type = FieldType.NUMBER
|
||||
constraints = FIELDS.NUMBER.constraints
|
||||
break
|
||||
case AUTO_COLUMN_SUB_TYPES.UPDATED_AT:
|
||||
case AUTO_COLUMN_SUB_TYPES.CREATED_AT:
|
||||
type = FIELDS.DATETIME.type
|
||||
type = FieldType.DATETIME
|
||||
constraints = FIELDS.DATETIME.constraints
|
||||
break
|
||||
default:
|
||||
type = FIELDS.STRING.type
|
||||
type = FieldType.STRING
|
||||
constraints = FIELDS.STRING.constraints
|
||||
break
|
||||
}
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
import { get, writable, derived } from "svelte/store"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { API } from "api"
|
||||
import { SWITCHABLE_TYPES, FIELDS } from "constants/backend"
|
||||
import { SWITCHABLE_TYPES } from "constants/backend"
|
||||
|
||||
export function createTablesStore() {
|
||||
const store = writable({
|
||||
|
@ -83,14 +84,14 @@ export function createTablesStore() {
|
|||
// make sure tables up to date (related)
|
||||
let newTableIds = []
|
||||
for (let column of Object.values(updatedTable?.schema || {})) {
|
||||
if (column.type === FIELDS.LINK.type) {
|
||||
if (column.type === FieldType.LINK) {
|
||||
newTableIds.push(column.tableId)
|
||||
}
|
||||
}
|
||||
|
||||
let oldTableIds = []
|
||||
for (let column of Object.values(oldTable?.schema || {})) {
|
||||
if (column.type === FIELDS.LINK.type) {
|
||||
if (column.type === FieldType.LINK) {
|
||||
oldTableIds.push(column.tableId)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,22 +1,7 @@
|
|||
import { FieldTypeToComponentMap } from "components/design/settings/controls/FieldConfiguration/utils"
|
||||
import { Component } from "./Component"
|
||||
import { getSchemaForDatasource } from "dataBinding"
|
||||
|
||||
const fieldTypeToComponentMap = {
|
||||
string: "stringfield",
|
||||
number: "numberfield",
|
||||
bigint: "bigintfield",
|
||||
options: "optionsfield",
|
||||
array: "multifieldselect",
|
||||
boolean: "booleanfield",
|
||||
longform: "longformfield",
|
||||
datetime: "datetimefield",
|
||||
attachment: "attachmentfield",
|
||||
link: "relationshipfield",
|
||||
json: "jsonfield",
|
||||
signature: "signaturefield",
|
||||
barcodeqr: "codescanner",
|
||||
}
|
||||
|
||||
export function makeDatasourceFormComponents(datasource) {
|
||||
const { schema } = getSchemaForDatasource(null, datasource, {
|
||||
formSchema: true,
|
||||
|
@ -31,7 +16,7 @@ export function makeDatasourceFormComponents(datasource) {
|
|||
}
|
||||
const fieldType =
|
||||
typeof fieldSchema === "object" ? fieldSchema.type : fieldSchema
|
||||
const componentType = fieldTypeToComponentMap[fieldType]
|
||||
const componentType = FieldTypeToComponentMap[fieldType]
|
||||
const fullComponentType = `@budibase/standard-components/${componentType}`
|
||||
if (componentType) {
|
||||
const component = new Component(fullComponentType)
|
||||
|
|
|
@ -7,20 +7,20 @@
|
|||
export let order
|
||||
|
||||
const FieldTypeToComponentMap = {
|
||||
string: "stringfield",
|
||||
number: "numberfield",
|
||||
bigint: "bigintfield",
|
||||
options: "optionsfield",
|
||||
array: "multifieldselect",
|
||||
boolean: "booleanfield",
|
||||
longform: "longformfield",
|
||||
datetime: "datetimefield",
|
||||
attachment: "attachmentfield",
|
||||
link: "relationshipfield",
|
||||
json: "jsonfield",
|
||||
barcodeqr: "codescanner",
|
||||
signature: "signaturefield",
|
||||
bb_reference: "bbreferencefield",
|
||||
[FieldType.STRING]: "stringfield",
|
||||
[FieldType.NUMBER]: "numberfield",
|
||||
[FieldType.BIGINT]: "bigintfield",
|
||||
[FieldType.OPTIONS]: "optionsfield",
|
||||
[FieldType.ARRAY]: "multifieldselect",
|
||||
[FieldType.BOOLEAN]: "booleanfield",
|
||||
[FieldType.LONGFORM]: "longformfield",
|
||||
[FieldType.DATETIME]: "datetimefield",
|
||||
[FieldType.ATTACHMENT]: "attachmentfield",
|
||||
[FieldType.SIGNATURE]: "signaturefield",
|
||||
[FieldType.LINK]: "relationshipfield",
|
||||
[FieldType.JSON]: "jsonfield",
|
||||
[FieldType.BARCODEQR]: "codescanner",
|
||||
[FieldType.BB_REFERENCE]: "bbreferencefield",
|
||||
}
|
||||
|
||||
const getFieldSchema = field => {
|
||||
|
|
|
@ -23,6 +23,6 @@
|
|||
label="Components"
|
||||
value={$componentStore.mountedComponentCount}
|
||||
/>
|
||||
<DevToolsStat label="User" value={$authStore.email} />
|
||||
<DevToolsStat label="Role" value={$authStore.roleId} />
|
||||
<DevToolsStat label="User" value={$authStore?.email} />
|
||||
<DevToolsStat label="Role" value={$authStore?.roleId} />
|
||||
</Layout>
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
"dependencies": {
|
||||
"@budibase/bbui": "0.0.0",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/types": "0.0.0",
|
||||
"dayjs": "^1.10.8",
|
||||
"lodash": "4.17.21",
|
||||
"socket.io-client": "^4.6.1"
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { FieldType } from "@budibase/types"
|
||||
|
||||
import OptionsCell from "../cells/OptionsCell.svelte"
|
||||
import DateCell from "../cells/DateCell.svelte"
|
||||
import MultiSelectCell from "../cells/MultiSelectCell.svelte"
|
||||
|
@ -13,20 +15,20 @@ import BBReferenceCell from "../cells/BBReferenceCell.svelte"
|
|||
import SignatureCell from "../cells/SignatureCell.svelte"
|
||||
|
||||
const TypeComponentMap = {
|
||||
text: TextCell,
|
||||
options: OptionsCell,
|
||||
datetime: DateCell,
|
||||
barcodeqr: TextCell,
|
||||
signature: SignatureCell,
|
||||
longform: LongFormCell,
|
||||
array: MultiSelectCell,
|
||||
number: NumberCell,
|
||||
boolean: BooleanCell,
|
||||
attachment: AttachmentCell,
|
||||
link: RelationshipCell,
|
||||
formula: FormulaCell,
|
||||
json: JSONCell,
|
||||
bb_reference: BBReferenceCell,
|
||||
[FieldType.STRING]: TextCell,
|
||||
[FieldType.OPTIONS]: OptionsCell,
|
||||
[FieldType.DATETIME]: DateCell,
|
||||
[FieldType.BARCODEQR]: TextCell,
|
||||
[FieldType.SIGNATURE]: SignatureCell,
|
||||
[FieldType.LONGFORM]: LongFormCell,
|
||||
[FieldType.ARRAY]: MultiSelectCell,
|
||||
[FieldType.NUMBER]: NumberCell,
|
||||
[FieldType.BOOLEAN]: BooleanCell,
|
||||
[FieldType.ATTACHMENT]: AttachmentCell,
|
||||
[FieldType.LINK]: RelationshipCell,
|
||||
[FieldType.FORMULA]: FormulaCell,
|
||||
[FieldType.JSON]: JSONCell,
|
||||
[FieldType.BB_REFERENCE]: BBReferenceCell,
|
||||
}
|
||||
export const getCellRenderer = column => {
|
||||
return TypeComponentMap[column?.schema?.type] || TextCell
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { FieldType, FieldTypeSubtypes } from "@budibase/types"
|
||||
|
||||
export const getColor = (idx, opacity = 0.3) => {
|
||||
if (idx == null || idx === -1) {
|
||||
idx = 0
|
||||
|
@ -6,23 +8,23 @@ export const getColor = (idx, opacity = 0.3) => {
|
|||
}
|
||||
|
||||
const TypeIconMap = {
|
||||
text: "Text",
|
||||
options: "Dropdown",
|
||||
datetime: "Date",
|
||||
barcodeqr: "Camera",
|
||||
signature: "AnnotatePen",
|
||||
longform: "TextAlignLeft",
|
||||
array: "Dropdown",
|
||||
number: "123",
|
||||
boolean: "Boolean",
|
||||
attachment: "AppleFiles",
|
||||
link: "DataCorrelated",
|
||||
formula: "Calculator",
|
||||
json: "Brackets",
|
||||
bigint: "TagBold",
|
||||
bb_reference: {
|
||||
user: "User",
|
||||
users: "UserGroup",
|
||||
[FieldType.STRING]: "Text",
|
||||
[FieldType.OPTIONS]: "Dropdown",
|
||||
[FieldType.DATETIME]: "Date",
|
||||
[FieldType.BARCODEQR]: "Camera",
|
||||
[FieldType.SIGNATURE]: "AnnotatePen",
|
||||
[FieldType.LONGFORM]: "TextAlignLeft",
|
||||
[FieldType.ARRAY]: "Dropdown",
|
||||
[FieldType.NUMBER]: "123",
|
||||
[FieldType.BOOLEAN]: "Boolean",
|
||||
[FieldType.ATTACHMENT]: "AppleFiles",
|
||||
[FieldType.LINK]: "DataCorrelated",
|
||||
[FieldType.FORMULA]: "Calculator",
|
||||
[FieldType.JSON]: "Brackets",
|
||||
[FieldType.BIGINT]: "TagBold",
|
||||
[FieldType.BB_REFERENCE]: {
|
||||
[FieldTypeSubtypes.BB_REFERENCE.USER]: "User",
|
||||
[FieldTypeSubtypes.BB_REFERENCE.USERS]: "UserGroup",
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 6b62505be0c0b50a57b4f4980d86541ebdc86428
|
||||
Subproject commit f8e8f87bd52081e1303a5ae92c432ea5b38f3bb4
|
|
@ -1,25 +0,0 @@
|
|||
const query = jest.fn(() => ({
|
||||
rows: [
|
||||
{
|
||||
a: "string",
|
||||
b: 1,
|
||||
},
|
||||
],
|
||||
}))
|
||||
|
||||
class Client {
|
||||
query = query
|
||||
end = jest.fn(cb => {
|
||||
if (cb) cb()
|
||||
})
|
||||
connect = jest.fn()
|
||||
release = jest.fn()
|
||||
}
|
||||
|
||||
const on = jest.fn()
|
||||
|
||||
module.exports = {
|
||||
Client,
|
||||
queryMock: query,
|
||||
on,
|
||||
}
|
|
@ -42,12 +42,6 @@ if (fs.existsSync("../pro/src")) {
|
|||
|
||||
const config: Config.InitialOptions = {
|
||||
projects: [
|
||||
{
|
||||
...baseConfig,
|
||||
displayName: "sequential test",
|
||||
testMatch: ["<rootDir>/**/*.seq.spec.[jt]s"],
|
||||
runner: "jest-serial-runner",
|
||||
},
|
||||
{
|
||||
...baseConfig,
|
||||
testMatch: ["<rootDir>/**/!(*.seq).spec.[jt]s"],
|
||||
|
@ -60,6 +54,9 @@ const config: Config.InitialOptions = {
|
|||
"!src/db/views/staticViews.*",
|
||||
"!src/**/*.spec.{js,ts}",
|
||||
"!src/tests/**/*.{js,ts}",
|
||||
// The use of coverage in the JS runner breaks tests by inserting
|
||||
// coverage functions into code that will run inside of the isolate.
|
||||
"!src/jsRunner/**/*.{js,ts}",
|
||||
],
|
||||
coverageReporters: ["lcov", "json", "clover"],
|
||||
}
|
||||
|
|
|
@ -143,7 +143,7 @@
|
|||
"jest": "29.7.0",
|
||||
"jest-openapi": "0.14.2",
|
||||
"jest-runner": "29.7.0",
|
||||
"jest-serial-runner": "1.2.1",
|
||||
"nock": "13.5.4",
|
||||
"nodemon": "2.0.15",
|
||||
"openapi-typescript": "5.2.0",
|
||||
"path-to-regexp": "6.2.0",
|
||||
|
|
|
@ -4,11 +4,9 @@ set -e
|
|||
if [[ -n $CI ]]
|
||||
then
|
||||
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS"
|
||||
echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@"
|
||||
jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
|
||||
jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
|
||||
else
|
||||
# --maxWorkers performs better in development
|
||||
export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS"
|
||||
echo "jest --coverage --maxWorkers=2 --forceExit $@"
|
||||
jest --coverage --maxWorkers=2 --forceExit $@
|
||||
fi
|
|
@ -1,6 +1,6 @@
|
|||
import { getQueryParams, getTableParams } from "../../db/utils"
|
||||
import { getIntegration } from "../../integrations"
|
||||
import { invalidateDynamicVariables } from "../../threads/utils"
|
||||
import { invalidateCachedVariable } from "../../threads/utils"
|
||||
import { context, db as dbCore, events } from "@budibase/backend-core"
|
||||
import {
|
||||
BuildSchemaFromSourceRequest,
|
||||
|
@ -121,7 +121,7 @@ async function invalidateVariables(
|
|||
}
|
||||
})
|
||||
}
|
||||
await invalidateDynamicVariables(toInvalidate)
|
||||
await invalidateCachedVariable(toInvalidate)
|
||||
}
|
||||
|
||||
export async function update(
|
||||
|
|
|
@ -2,7 +2,7 @@ import { generateQueryID } from "../../../db/utils"
|
|||
import { Thread, ThreadType } from "../../../threads"
|
||||
import { save as saveDatasource } from "../datasource"
|
||||
import { RestImporter } from "./import"
|
||||
import { invalidateDynamicVariables } from "../../../threads/utils"
|
||||
import { invalidateCachedVariable } from "../../../threads/utils"
|
||||
import env from "../../../environment"
|
||||
import { events, context, utils, constants } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
|
@ -281,49 +281,52 @@ export async function preview(
|
|||
return { previewSchema, nestedSchemaFields }
|
||||
}
|
||||
|
||||
const inputs: QueryEvent = {
|
||||
appId: ctx.appId,
|
||||
queryVerb: query.queryVerb,
|
||||
fields: query.fields,
|
||||
parameters: enrichParameters(query),
|
||||
transformer: query.transformer,
|
||||
schema: query.schema,
|
||||
nullDefaultSupport: query.nullDefaultSupport,
|
||||
queryId,
|
||||
datasource,
|
||||
// have to pass down to the thread runner - can't put into context now
|
||||
environmentVariables: envVars,
|
||||
ctx: {
|
||||
user: ctx.user,
|
||||
auth: { ...authConfigCtx },
|
||||
},
|
||||
}
|
||||
|
||||
let queryResponse: QueryResponse
|
||||
try {
|
||||
const inputs: QueryEvent = {
|
||||
appId: ctx.appId,
|
||||
queryVerb: query.queryVerb,
|
||||
fields: query.fields,
|
||||
parameters: enrichParameters(query),
|
||||
transformer: query.transformer,
|
||||
schema: query.schema,
|
||||
nullDefaultSupport: query.nullDefaultSupport,
|
||||
queryId,
|
||||
datasource,
|
||||
// have to pass down to the thread runner - can't put into context now
|
||||
environmentVariables: envVars,
|
||||
ctx: {
|
||||
user: ctx.user,
|
||||
auth: { ...authConfigCtx },
|
||||
},
|
||||
}
|
||||
|
||||
const { rows, keys, info, extra } = await Runner.run<QueryResponse>(inputs)
|
||||
const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
|
||||
|
||||
// if existing schema, update to include any previous schema keys
|
||||
if (existingSchema) {
|
||||
for (let key of Object.keys(previewSchema)) {
|
||||
if (existingSchema[key]) {
|
||||
previewSchema[key] = existingSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
// remove configuration before sending event
|
||||
delete datasource.config
|
||||
await events.query.previewed(datasource, ctx.request.body)
|
||||
ctx.body = {
|
||||
rows,
|
||||
nestedSchemaFields,
|
||||
schema: previewSchema,
|
||||
info,
|
||||
extra,
|
||||
}
|
||||
queryResponse = await Runner.run<QueryResponse>(inputs)
|
||||
} catch (err: any) {
|
||||
ctx.throw(400, err)
|
||||
}
|
||||
|
||||
const { rows, keys, info, extra } = queryResponse
|
||||
const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
|
||||
|
||||
// if existing schema, update to include any previous schema keys
|
||||
if (existingSchema) {
|
||||
for (let key of Object.keys(previewSchema)) {
|
||||
if (existingSchema[key]) {
|
||||
previewSchema[key] = existingSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
// remove configuration before sending event
|
||||
delete datasource.config
|
||||
await events.query.previewed(datasource, ctx.request.body)
|
||||
ctx.body = {
|
||||
rows,
|
||||
nestedSchemaFields,
|
||||
schema: previewSchema,
|
||||
info,
|
||||
extra,
|
||||
}
|
||||
}
|
||||
|
||||
async function execute(
|
||||
|
@ -416,7 +419,7 @@ const removeDynamicVariables = async (queryId: string) => {
|
|||
const variablesToDelete = dynamicVariables!.filter(
|
||||
(dv: any) => dv.queryId === queryId
|
||||
)
|
||||
await invalidateDynamicVariables(variablesToDelete)
|
||||
await invalidateCachedVariable(variablesToDelete)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
const setup = require("../../tests/utilities")
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("/metrics", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import * as setup from "./utilities"
|
||||
import path from "path"
|
||||
|
||||
jest.setTimeout(15000)
|
||||
const PASSWORD = "testtest"
|
||||
|
||||
describe("/applications/:appId/import", () => {
|
||||
|
|
|
@ -23,8 +23,6 @@ let {
|
|||
collectAutomation,
|
||||
} = setup.structures
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("/automations", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
|
|
|
@ -1,18 +1,16 @@
|
|||
jest.mock("pg")
|
||||
import * as setup from "./utilities"
|
||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import { checkCacheForDynamicVariable } from "../../../threads/utils"
|
||||
import { getCachedVariable } from "../../../threads/utils"
|
||||
import { context, events } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
import tk from "timekeeper"
|
||||
import { mocks } from "@budibase/backend-core/tests"
|
||||
import { QueryPreview } from "@budibase/types"
|
||||
import { QueryPreview, SourceName } from "@budibase/types"
|
||||
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
let { basicDatasource } = setup.structures
|
||||
const pg = require("pg")
|
||||
|
||||
describe("/datasources", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -42,6 +40,23 @@ describe("/datasources", () => {
|
|||
expect(res.body.errors).toEqual({})
|
||||
expect(events.datasource.created).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should fail if the datasource is invalid", async () => {
|
||||
await config.api.datasource.create(
|
||||
{
|
||||
name: "Test",
|
||||
type: "test",
|
||||
source: "invalid" as SourceName,
|
||||
config: {},
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
body: {
|
||||
message: "No datasource implementation found.",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
|
@ -74,7 +89,7 @@ describe("/datasources", () => {
|
|||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
return config.api.query.previewQuery(queryPreview)
|
||||
return config.api.query.preview(queryPreview)
|
||||
}
|
||||
|
||||
it("should invalidate changed or removed variables", async () => {
|
||||
|
@ -85,10 +100,7 @@ describe("/datasources", () => {
|
|||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
// check variables in cache
|
||||
let contents = await checkCacheForDynamicVariable(
|
||||
query._id!,
|
||||
"variable3"
|
||||
)
|
||||
let contents = await getCachedVariable(query._id!, "variable3")
|
||||
expect(contents.rows.length).toEqual(1)
|
||||
|
||||
// update the datasource to remove the variables
|
||||
|
@ -102,7 +114,7 @@ describe("/datasources", () => {
|
|||
expect(res.body.errors).toBeUndefined()
|
||||
|
||||
// check variables no longer in cache
|
||||
contents = await checkCacheForDynamicVariable(query._id!, "variable3")
|
||||
contents = await getCachedVariable(query._id!, "variable3")
|
||||
expect(contents).toBe(null)
|
||||
})
|
||||
})
|
||||
|
@ -149,35 +161,6 @@ describe("/datasources", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("query", () => {
|
||||
it("should be able to query a pg datasource", async () => {
|
||||
const res = await request
|
||||
.post(`/api/datasources/query`)
|
||||
.send({
|
||||
endpoint: {
|
||||
datasourceId: datasource._id,
|
||||
operation: "READ",
|
||||
// table name below
|
||||
entityId: "users",
|
||||
},
|
||||
resource: {
|
||||
fields: ["users.name", "users.age"],
|
||||
},
|
||||
filters: {
|
||||
string: {
|
||||
name: "John",
|
||||
},
|
||||
},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
// this is mock data, can't test it
|
||||
expect(res.body).toBeDefined()
|
||||
const expSql = `select "users"."name" as "users.name", "users"."age" as "users.age" from (select * from "users" where "users"."name" ilike $1 limit $2) as "users"`
|
||||
expect(pg.queryMock).toHaveBeenCalledWith(expSql, ["John%", 5000])
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
beforeAll(setupTest)
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,14 +1,17 @@
|
|||
import { Datasource, Query } from "@budibase/types"
|
||||
import * as setup from "../utilities"
|
||||
import { databaseTestProviders } from "../../../../integrations/tests/utils"
|
||||
import { MongoClient, type Collection, BSON } from "mongodb"
|
||||
|
||||
const collection = "test_collection"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
} from "../../../../integrations/tests/utils"
|
||||
import { MongoClient, type Collection, BSON, Db } from "mongodb"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
const expectValidId = expect.stringMatching(/^\w{24}$/)
|
||||
const expectValidBsonObjectId = expect.any(BSON.ObjectId)
|
||||
|
||||
describe("/queries", () => {
|
||||
let collection: string
|
||||
let config = setup.getConfig()
|
||||
let datasource: Datasource
|
||||
|
||||
|
@ -37,8 +40,7 @@ describe("/queries", () => {
|
|||
async function withClient<T>(
|
||||
callback: (client: MongoClient) => Promise<T>
|
||||
): Promise<T> {
|
||||
const ds = await databaseTestProviders.mongodb.datasource()
|
||||
const client = new MongoClient(ds.config!.connectionString)
|
||||
const client = new MongoClient(datasource.config!.connectionString)
|
||||
await client.connect()
|
||||
try {
|
||||
return await callback(client)
|
||||
|
@ -47,30 +49,33 @@ describe("/queries", () => {
|
|||
}
|
||||
}
|
||||
|
||||
async function withDb<T>(callback: (db: Db) => Promise<T>): Promise<T> {
|
||||
return await withClient(async client => {
|
||||
return await callback(client.db(datasource.config!.db))
|
||||
})
|
||||
}
|
||||
|
||||
async function withCollection<T>(
|
||||
callback: (collection: Collection) => Promise<T>
|
||||
): Promise<T> {
|
||||
return await withClient(async client => {
|
||||
const db = client.db(
|
||||
(await databaseTestProviders.mongodb.datasource()).config!.db
|
||||
)
|
||||
return await withDb(async db => {
|
||||
return await callback(db.collection(collection))
|
||||
})
|
||||
}
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.mongodb.stop()
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mongodb.datasource()
|
||||
await getDatasource(DatabaseName.MONGODB)
|
||||
)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
collection = generator.guid()
|
||||
await withCollection(async collection => {
|
||||
await collection.insertMany([
|
||||
{ name: "one" },
|
||||
|
@ -83,345 +88,491 @@ describe("/queries", () => {
|
|||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await withCollection(async collection => {
|
||||
await collection.drop()
|
||||
})
|
||||
await withCollection(collection => collection.drop())
|
||||
})
|
||||
|
||||
it("should execute a count query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "count",
|
||||
describe("preview", () => {
|
||||
it("should generate a nested schema with an empty array", async () => {
|
||||
const name = generator.guid()
|
||||
await withCollection(
|
||||
async collection => await collection.insertOne({ name, nested: [] })
|
||||
)
|
||||
|
||||
const preview = await config.api.query.preview({
|
||||
name: "New Query",
|
||||
datasourceId: datasource._id!,
|
||||
fields: {
|
||||
json: {
|
||||
name: { $eq: name },
|
||||
},
|
||||
extra: {
|
||||
collection,
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(preview).toEqual({
|
||||
nestedSchemaFields: {},
|
||||
rows: [{ _id: expect.any(String), name, nested: [] }],
|
||||
schema: {
|
||||
_id: {
|
||||
type: "string",
|
||||
name: "_id",
|
||||
},
|
||||
name: {
|
||||
type: "string",
|
||||
name: "name",
|
||||
},
|
||||
nested: {
|
||||
type: "array",
|
||||
name: "nested",
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
it("should generate a nested schema based on all of the nested items", async () => {
|
||||
const name = generator.guid()
|
||||
const item = {
|
||||
name,
|
||||
contacts: [
|
||||
{
|
||||
address: "123 Lane",
|
||||
},
|
||||
{
|
||||
address: "456 Drive",
|
||||
},
|
||||
{
|
||||
postcode: "BT1 12N",
|
||||
lat: 54.59,
|
||||
long: -5.92,
|
||||
},
|
||||
{
|
||||
city: "Belfast",
|
||||
},
|
||||
{
|
||||
address: "789 Avenue",
|
||||
phoneNumber: "0800-999-5555",
|
||||
},
|
||||
{
|
||||
name: "Name",
|
||||
isActive: false,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
expect(result.data).toEqual([{ value: 5 }])
|
||||
})
|
||||
await withCollection(collection => collection.insertOne(item))
|
||||
|
||||
it("should execute a count query with a transformer", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "count",
|
||||
const preview = await config.api.query.preview({
|
||||
name: "New Query",
|
||||
datasourceId: datasource._id!,
|
||||
fields: {
|
||||
json: {
|
||||
name: { $eq: name },
|
||||
},
|
||||
extra: {
|
||||
collection,
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
},
|
||||
transformer: "return data + 1",
|
||||
})
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([{ value: 6 }])
|
||||
})
|
||||
|
||||
it("should execute a find query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "find",
|
||||
expect(preview).toEqual({
|
||||
nestedSchemaFields: {
|
||||
contacts: {
|
||||
address: {
|
||||
type: "string",
|
||||
name: "address",
|
||||
},
|
||||
postcode: {
|
||||
type: "string",
|
||||
name: "postcode",
|
||||
},
|
||||
lat: {
|
||||
type: "number",
|
||||
name: "lat",
|
||||
},
|
||||
long: {
|
||||
type: "number",
|
||||
name: "long",
|
||||
},
|
||||
city: {
|
||||
type: "string",
|
||||
name: "city",
|
||||
},
|
||||
phoneNumber: {
|
||||
type: "string",
|
||||
name: "phoneNumber",
|
||||
},
|
||||
name: {
|
||||
type: "string",
|
||||
name: "name",
|
||||
},
|
||||
isActive: {
|
||||
type: "boolean",
|
||||
name: "isActive",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{ _id: expectValidId, name: "one" },
|
||||
{ _id: expectValidId, name: "two" },
|
||||
{ _id: expectValidId, name: "three" },
|
||||
{ _id: expectValidId, name: "four" },
|
||||
{ _id: expectValidId, name: "five" },
|
||||
])
|
||||
})
|
||||
|
||||
it("should execute a findOne query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "findOne",
|
||||
rows: [{ ...item, _id: expect.any(String) }],
|
||||
schema: {
|
||||
_id: { type: "string", name: "_id" },
|
||||
name: { type: "string", name: "name" },
|
||||
contacts: { type: "json", name: "contacts", subtype: "array" },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([{ _id: expectValidId, name: "one" }])
|
||||
})
|
||||
|
||||
it("should execute a findOneAndUpdate query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { name: { $eq: "one" } },
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "findOneAndUpdate",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
lastErrorObject: { n: 1, updatedExisting: true },
|
||||
ok: 1,
|
||||
value: { _id: expectValidId, name: "one" },
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
expect(await collection.countDocuments()).toBe(5)
|
||||
|
||||
const doc = await collection.findOne({ name: { $eq: "newName" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
name: "newName",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("should execute a distinct query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: "name",
|
||||
extra: {
|
||||
actionType: "distinct",
|
||||
describe("execute", () => {
|
||||
it("a count query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "count",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
const values = result.data.map(o => o.value).sort()
|
||||
expect(values).toEqual(["five", "four", "one", "three", "two"])
|
||||
})
|
||||
|
||||
it("should execute a create query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: { foo: "{{ foo }}" },
|
||||
extra: {
|
||||
actionType: "insertOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "create",
|
||||
parameters: [
|
||||
{
|
||||
name: "foo",
|
||||
default: "default",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { foo: "bar" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
insertedId: expectValidId,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ foo: { $eq: "bar" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
foo: "bar",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("should execute a delete query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: { name: { $eq: "{{ name }}" } },
|
||||
extra: {
|
||||
actionType: "deleteOne",
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([{ value: 5 }])
|
||||
})
|
||||
|
||||
it("should be able to updateOne by ObjectId", async () => {
|
||||
const insertResult = await withCollection(c =>
|
||||
c.insertOne({ name: "one" })
|
||||
)
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { _id: { $eq: `ObjectId("${insertResult.insertedId}")` } },
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateOne",
|
||||
},
|
||||
},
|
||||
},
|
||||
queryVerb: "delete",
|
||||
parameters: [
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
name: "name",
|
||||
default: "",
|
||||
acknowledged: true,
|
||||
matchedCount: 1,
|
||||
modifiedCount: 1,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
],
|
||||
})
|
||||
])
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { name: "one" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
deletedCount: 1,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "one" } })
|
||||
expect(doc).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
it("should execute an update query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { name: { $eq: "{{ name }}" } },
|
||||
update: { $set: { name: "{{ newName }}" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
parameters: [
|
||||
{
|
||||
name: "name",
|
||||
default: "",
|
||||
},
|
||||
{
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "newName" } })
|
||||
expect(doc).toEqual({
|
||||
_id: insertResult.insertedId,
|
||||
name: "newName",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { name: "one", newName: "newOne" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 1,
|
||||
modifiedCount: 1,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "newOne" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
name: "newOne",
|
||||
})
|
||||
|
||||
const oldDoc = await collection.findOne({ name: { $eq: "one" } })
|
||||
expect(oldDoc).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to updateOne by ObjectId", async () => {
|
||||
const insertResult = await withCollection(c => c.insertOne({ name: "one" }))
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { _id: { $eq: `ObjectId("${insertResult.insertedId}")` } },
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 1,
|
||||
modifiedCount: 1,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "newName" } })
|
||||
expect(doc).toEqual({
|
||||
_id: insertResult.insertedId,
|
||||
name: "newName",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete all records", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "deleteMany",
|
||||
it("a count query with a transformer", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "count",
|
||||
},
|
||||
},
|
||||
},
|
||||
queryVerb: "delete",
|
||||
transformer: "return data + 1",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([{ value: 6 }])
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
deletedCount: 5,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const docs = await collection.find().toArray()
|
||||
expect(docs).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to update all documents", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: {},
|
||||
update: { $set: { name: "newName" } },
|
||||
it("a find query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "find",
|
||||
},
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateMany",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{ _id: expectValidId, name: "one" },
|
||||
{ _id: expectValidId, name: "two" },
|
||||
{ _id: expectValidId, name: "three" },
|
||||
{ _id: expectValidId, name: "four" },
|
||||
{ _id: expectValidId, name: "five" },
|
||||
])
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
it("a findOne query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 5,
|
||||
modifiedCount: 5,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
await withCollection(async collection => {
|
||||
const docs = await collection.find().toArray()
|
||||
expect(docs).toHaveLength(5)
|
||||
for (const doc of docs) {
|
||||
expect(result.data).toEqual([{ _id: expectValidId, name: "one" }])
|
||||
})
|
||||
|
||||
it("a findOneAndUpdate query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { name: { $eq: "one" } },
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "findOneAndUpdate",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
lastErrorObject: { n: 1, updatedExisting: true },
|
||||
ok: 1,
|
||||
value: { _id: expectValidId, name: "one" },
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
expect(await collection.countDocuments()).toBe(5)
|
||||
|
||||
const doc = await collection.findOne({ name: { $eq: "newName" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
name: "newName",
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it("a distinct query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: "name",
|
||||
extra: {
|
||||
actionType: "distinct",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
const values = result.data.map(o => o.value).sort()
|
||||
expect(values).toEqual(["five", "four", "one", "three", "two"])
|
||||
})
|
||||
|
||||
it("a create query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: { foo: "{{ foo }}" },
|
||||
extra: {
|
||||
actionType: "insertOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "create",
|
||||
parameters: [
|
||||
{
|
||||
name: "foo",
|
||||
default: "default",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { foo: "bar" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
insertedId: expectValidId,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ foo: { $eq: "bar" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
foo: "bar",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("a delete query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: { name: { $eq: "{{ name }}" } },
|
||||
extra: {
|
||||
actionType: "deleteOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "delete",
|
||||
parameters: [
|
||||
{
|
||||
name: "name",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { name: "one" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
deletedCount: 1,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "one" } })
|
||||
expect(doc).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
it("an update query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { name: { $eq: "{{ name }}" } },
|
||||
update: { $set: { name: "{{ newName }}" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
parameters: [
|
||||
{
|
||||
name: "name",
|
||||
default: "",
|
||||
},
|
||||
{
|
||||
name: "newName",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { name: "one", newName: "newOne" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 1,
|
||||
modifiedCount: 1,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "newOne" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
name: "newOne",
|
||||
})
|
||||
|
||||
const oldDoc = await collection.findOne({ name: { $eq: "one" } })
|
||||
expect(oldDoc).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete all records", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "deleteMany",
|
||||
},
|
||||
},
|
||||
queryVerb: "delete",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
deletedCount: 5,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const docs = await collection.find().toArray()
|
||||
expect(docs).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to update all documents", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: {},
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateMany",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 5,
|
||||
modifiedCount: 5,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const docs = await collection.find().toArray()
|
||||
expect(docs).toHaveLength(5)
|
||||
for (const doc of docs) {
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
name: "newName",
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
import * as setup from "../utilities"
|
||||
import { checkBuilderEndpoint } from "../utilities/TestFunctions"
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
import { Datasource, Query, SourceName } from "@budibase/types"
|
||||
|
||||
describe("query permissions", () => {
|
||||
let config: TestConfiguration
|
||||
let datasource: Datasource
|
||||
let query: Query
|
||||
|
||||
beforeAll(async () => {
|
||||
config = setup.getConfig()
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create({
|
||||
name: "test datasource",
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {},
|
||||
})
|
||||
query = await config.api.query.save({
|
||||
name: "test query",
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields: {},
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
queryVerb: "read",
|
||||
})
|
||||
})
|
||||
|
||||
it("delete should require builder", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "DELETE",
|
||||
url: `/api/queries/${query._id}/${query._rev}`,
|
||||
})
|
||||
})
|
||||
|
||||
it("preview should require builder", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "POST",
|
||||
url: `/api/queries/preview`,
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,774 +0,0 @@
|
|||
import tk from "timekeeper"
|
||||
|
||||
const pg = require("pg")
|
||||
|
||||
// Mock out postgres for this
|
||||
jest.mock("pg")
|
||||
jest.mock("node-fetch")
|
||||
|
||||
// Mock isProdAppID to we can later mock the implementation and pretend we are
|
||||
// using prod app IDs
|
||||
jest.mock("@budibase/backend-core", () => {
|
||||
const core = jest.requireActual("@budibase/backend-core")
|
||||
return {
|
||||
...core,
|
||||
db: {
|
||||
...core.db,
|
||||
isProdAppID: jest.fn(),
|
||||
},
|
||||
}
|
||||
})
|
||||
import * as setup from "../utilities"
|
||||
import { checkBuilderEndpoint } from "../utilities/TestFunctions"
|
||||
import { checkCacheForDynamicVariable } from "../../../../threads/utils"
|
||||
|
||||
const { basicQuery, basicDatasource } = setup.structures
|
||||
import { events, db as dbCore } from "@budibase/backend-core"
|
||||
import {
|
||||
Datasource,
|
||||
Query,
|
||||
SourceName,
|
||||
QueryPreview,
|
||||
QueryParameter,
|
||||
} from "@budibase/types"
|
||||
|
||||
tk.freeze(Date.now())
|
||||
|
||||
const mockIsProdAppID = dbCore.isProdAppID as jest.MockedFunction<
|
||||
typeof dbCore.isProdAppID
|
||||
>
|
||||
|
||||
describe("/queries", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let datasource: Datasource & Required<Pick<Datasource, "_id">>, query: Query
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
const setupTest = async () => {
|
||||
await config.init()
|
||||
datasource = await config.createDatasource()
|
||||
query = await config.createQuery()
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
const createQuery = async (query: Query) => {
|
||||
return request
|
||||
.post(`/api/queries`)
|
||||
.send(query)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
}
|
||||
|
||||
describe("create", () => {
|
||||
it("should create a new query", async () => {
|
||||
const { _id } = await config.createDatasource()
|
||||
const query = basicQuery(_id)
|
||||
jest.clearAllMocks()
|
||||
const res = await createQuery(query)
|
||||
|
||||
expect((res as any).res.statusMessage).toEqual(
|
||||
`Query ${query.name} saved successfully.`
|
||||
)
|
||||
expect(res.body).toEqual({
|
||||
_rev: res.body._rev,
|
||||
_id: res.body._id,
|
||||
...query,
|
||||
nullDefaultSupport: true,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
})
|
||||
expect(events.query.created).toHaveBeenCalledTimes(1)
|
||||
expect(events.query.updated).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
it("should update query", async () => {
|
||||
const { _id } = await config.createDatasource()
|
||||
const query = basicQuery(_id)
|
||||
const res = await createQuery(query)
|
||||
jest.clearAllMocks()
|
||||
query._id = res.body._id
|
||||
query._rev = res.body._rev
|
||||
await createQuery(query)
|
||||
|
||||
expect((res as any).res.statusMessage).toEqual(
|
||||
`Query ${query.name} saved successfully.`
|
||||
)
|
||||
expect(res.body).toEqual({
|
||||
_rev: res.body._rev,
|
||||
_id: res.body._id,
|
||||
...query,
|
||||
nullDefaultSupport: true,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
})
|
||||
expect(events.query.created).not.toHaveBeenCalled()
|
||||
expect(events.query.updated).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("fetch", () => {
|
||||
beforeEach(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
it("returns all the queries from the server", async () => {
|
||||
const res = await request
|
||||
.get(`/api/queries`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
|
||||
const queries = res.body
|
||||
expect(queries).toEqual([
|
||||
{
|
||||
_rev: query._rev,
|
||||
_id: query._id,
|
||||
createdAt: new Date().toISOString(),
|
||||
...basicQuery(datasource._id),
|
||||
nullDefaultSupport: true,
|
||||
updatedAt: new Date().toISOString(),
|
||||
readable: true,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "GET",
|
||||
url: `/api/datasources`,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("find", () => {
|
||||
it("should find a query in builder", async () => {
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body._id).toEqual(query._id)
|
||||
})
|
||||
|
||||
it("should find a query in cloud", async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
.set(await config.defaultHeaders())
|
||||
.expect(200)
|
||||
.expect("Content-Type", /json/)
|
||||
expect(res.body.fields).toBeDefined()
|
||||
expect(res.body.parameters).toBeDefined()
|
||||
expect(res.body.schema).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
it("should remove sensitive info for prod apps", async () => {
|
||||
// Mock isProdAppID to pretend we are using a prod app
|
||||
mockIsProdAppID.mockClear()
|
||||
mockIsProdAppID.mockImplementation(() => true)
|
||||
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
.set(await config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body._id).toEqual(query._id)
|
||||
expect(res.body.fields).toBeUndefined()
|
||||
expect(res.body.parameters).toBeUndefined()
|
||||
expect(res.body.schema).toBeDefined()
|
||||
|
||||
// Reset isProdAppID mock
|
||||
expect(dbCore.isProdAppID).toHaveBeenCalledTimes(1)
|
||||
mockIsProdAppID.mockImplementation(() => false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
beforeEach(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
it("deletes a query and returns a success message", async () => {
|
||||
await request
|
||||
.delete(`/api/queries/${query._id}/${query._rev}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
|
||||
const res = await request
|
||||
.get(`/api/queries`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
|
||||
expect(res.body).toEqual([])
|
||||
expect(events.query.deleted).toHaveBeenCalledTimes(1)
|
||||
expect(events.query.deleted).toHaveBeenCalledWith(datasource, query)
|
||||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
const query = await config.createQuery()
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "DELETE",
|
||||
url: `/api/queries/${query._id}/${query._rev}`,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("preview", () => {
|
||||
it("should be able to preview the query", async () => {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id,
|
||||
queryVerb: "read",
|
||||
fields: {},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
name: datasource.name!,
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||
// these responses come from the mock
|
||||
expect(responseBody.schema).toEqual({
|
||||
a: { type: "string", name: "a" },
|
||||
b: { type: "number", name: "b" },
|
||||
})
|
||||
expect(responseBody.rows.length).toEqual(1)
|
||||
expect(events.query.previewed).toHaveBeenCalledTimes(1)
|
||||
delete datasource.config
|
||||
expect(events.query.previewed).toHaveBeenCalledWith(datasource, {
|
||||
...queryPreview,
|
||||
nullDefaultSupport: true,
|
||||
})
|
||||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "POST",
|
||||
url: `/api/queries/preview`,
|
||||
})
|
||||
})
|
||||
|
||||
it("should not error when trying to generate a nested schema for an empty array", async () => {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id,
|
||||
parameters: [],
|
||||
fields: {},
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
const rows = [
|
||||
{
|
||||
contacts: [],
|
||||
},
|
||||
]
|
||||
pg.queryMock.mockImplementation(() => ({
|
||||
rows,
|
||||
}))
|
||||
|
||||
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||
expect(responseBody).toEqual({
|
||||
nestedSchemaFields: {},
|
||||
rows,
|
||||
schema: {
|
||||
contacts: { type: "array", name: "contacts" },
|
||||
},
|
||||
})
|
||||
expect(responseBody.rows.length).toEqual(1)
|
||||
delete datasource.config
|
||||
})
|
||||
|
||||
it("should generate a nested schema based on all the nested items", async () => {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id,
|
||||
parameters: [],
|
||||
fields: {},
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
const rows = [
|
||||
{
|
||||
contacts: [
|
||||
{
|
||||
address: "123 Lane",
|
||||
},
|
||||
{
|
||||
address: "456 Drive",
|
||||
},
|
||||
{
|
||||
postcode: "BT1 12N",
|
||||
lat: 54.59,
|
||||
long: -5.92,
|
||||
},
|
||||
{
|
||||
city: "Belfast",
|
||||
},
|
||||
{
|
||||
address: "789 Avenue",
|
||||
phoneNumber: "0800-999-5555",
|
||||
},
|
||||
{
|
||||
name: "Name",
|
||||
isActive: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
pg.queryMock.mockImplementation(() => ({
|
||||
rows,
|
||||
}))
|
||||
|
||||
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||
expect(responseBody).toEqual({
|
||||
nestedSchemaFields: {
|
||||
contacts: {
|
||||
address: {
|
||||
type: "string",
|
||||
name: "address",
|
||||
},
|
||||
postcode: {
|
||||
type: "string",
|
||||
name: "postcode",
|
||||
},
|
||||
lat: {
|
||||
type: "number",
|
||||
name: "lat",
|
||||
},
|
||||
long: {
|
||||
type: "number",
|
||||
name: "long",
|
||||
},
|
||||
city: {
|
||||
type: "string",
|
||||
name: "city",
|
||||
},
|
||||
phoneNumber: {
|
||||
type: "string",
|
||||
name: "phoneNumber",
|
||||
},
|
||||
name: {
|
||||
type: "string",
|
||||
name: "name",
|
||||
},
|
||||
isActive: {
|
||||
type: "boolean",
|
||||
name: "isActive",
|
||||
},
|
||||
},
|
||||
},
|
||||
rows,
|
||||
schema: {
|
||||
contacts: { type: "json", name: "contacts", subtype: "array" },
|
||||
},
|
||||
})
|
||||
expect(responseBody.rows.length).toEqual(1)
|
||||
delete datasource.config
|
||||
})
|
||||
})
|
||||
|
||||
describe("execute", () => {
|
||||
beforeEach(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
it("should be able to execute the query", async () => {
|
||||
const res = await request
|
||||
.post(`/api/queries/${query._id}`)
|
||||
.send({
|
||||
parameters: {},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body.length).toEqual(1)
|
||||
})
|
||||
|
||||
it("should fail with invalid integration type", async () => {
|
||||
const datasource: Datasource = {
|
||||
...basicDatasource().datasource,
|
||||
source: "INVALID_INTEGRATION" as SourceName,
|
||||
}
|
||||
await config.api.datasource.create(datasource, {
|
||||
status: 500,
|
||||
body: {
|
||||
message: "No datasource implementation found.",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("shouldn't allow handlebars to be passed as parameters", async () => {
|
||||
const res = await request
|
||||
.post(`/api/queries/${query._id}`)
|
||||
.send({
|
||||
parameters: {
|
||||
a: "{{ 'test' }}",
|
||||
},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect(400)
|
||||
expect(res.body.message).toEqual(
|
||||
"Parameter 'a' input contains a handlebars binding - this is not allowed."
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("variables", () => {
|
||||
async function preview(datasource: Datasource, fields: any) {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields,
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
return await config.api.query.previewQuery(queryPreview)
|
||||
}
|
||||
|
||||
it("should work with static variables", async () => {
|
||||
const datasource = await config.restDatasource({
|
||||
staticVariables: {
|
||||
variable: "google",
|
||||
variable2: "1",
|
||||
},
|
||||
})
|
||||
const responseBody = await preview(datasource, {
|
||||
path: "www.{{ variable }}.com",
|
||||
queryString: "test={{ variable2 }}",
|
||||
})
|
||||
// these responses come from the mock
|
||||
expect(responseBody.schema).toEqual({
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
value: { type: "string", name: "value" },
|
||||
})
|
||||
expect(responseBody.rows[0].url).toEqual("http://www.google.com?test=1")
|
||||
})
|
||||
|
||||
it("should work with dynamic variables", async () => {
|
||||
const { datasource } = await config.dynamicVariableDatasource()
|
||||
const responseBody = await preview(datasource, {
|
||||
path: "www.google.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
expect(responseBody.schema).toEqual({
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
value: { type: "string", name: "value" },
|
||||
})
|
||||
expect(responseBody.rows[0].url).toContain("doctype%20html")
|
||||
})
|
||||
|
||||
it("check that it automatically retries on fail with cached dynamics", async () => {
|
||||
const { datasource, query: base } =
|
||||
await config.dynamicVariableDatasource()
|
||||
// preview once to cache
|
||||
await preview(datasource, {
|
||||
path: "www.google.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
// check its in cache
|
||||
const contents = await checkCacheForDynamicVariable(
|
||||
base._id!,
|
||||
"variable3"
|
||||
)
|
||||
expect(contents.rows.length).toEqual(1)
|
||||
const responseBody = await preview(datasource, {
|
||||
path: "www.failonce.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
expect(responseBody.schema).toEqual({
|
||||
fails: { type: "number", name: "fails" },
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
})
|
||||
expect(responseBody.rows[0].fails).toEqual(1)
|
||||
})
|
||||
|
||||
it("deletes variables when linked query is deleted", async () => {
|
||||
const { datasource, query: base } =
|
||||
await config.dynamicVariableDatasource()
|
||||
// preview once to cache
|
||||
await preview(datasource, {
|
||||
path: "www.google.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
// check its in cache
|
||||
let contents = await checkCacheForDynamicVariable(base._id!, "variable3")
|
||||
expect(contents.rows.length).toEqual(1)
|
||||
|
||||
// delete the query
|
||||
await request
|
||||
.delete(`/api/queries/${base._id}/${base._rev}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
|
||||
// check variables no longer in cache
|
||||
contents = await checkCacheForDynamicVariable(base._id!, "variable3")
|
||||
expect(contents).toBe(null)
|
||||
})
|
||||
})
|
||||
|
||||
describe("Current User Request Mapping", () => {
|
||||
async function previewGet(
|
||||
datasource: Datasource,
|
||||
fields: any,
|
||||
params: QueryParameter[]
|
||||
) {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id!,
|
||||
parameters: params,
|
||||
fields,
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
return await config.api.query.previewQuery(queryPreview)
|
||||
}
|
||||
|
||||
async function previewPost(
|
||||
datasource: Datasource,
|
||||
fields: any,
|
||||
params: QueryParameter[]
|
||||
) {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id!,
|
||||
parameters: params,
|
||||
fields,
|
||||
queryVerb: "create",
|
||||
name: datasource.name!,
|
||||
transformer: null,
|
||||
schema: {},
|
||||
readable: false,
|
||||
}
|
||||
return await config.api.query.previewQuery(queryPreview)
|
||||
}
|
||||
|
||||
it("should parse global and query level header mappings", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
|
||||
const datasource = await config.restDatasource({
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
})
|
||||
const responseBody = await previewGet(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "email={{[user].[email]}}",
|
||||
headers: {
|
||||
queryHdr: "{{[user].[firstName]}}",
|
||||
secondHdr: "1234",
|
||||
},
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
expect(parsedRequest.opts.headers).toEqual({
|
||||
test: "headerVal",
|
||||
emailHdr: userDetails.email,
|
||||
queryHdr: userDetails.firstName,
|
||||
secondHdr: "1234",
|
||||
})
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?email=" + userDetails.email.replace("@", "%40")
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user to query parameters", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewGet(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString:
|
||||
"test={{myEmail}}&testName={{myName}}&testParam={{testParam}}",
|
||||
},
|
||||
[
|
||||
{ name: "myEmail", default: "{{[user].[email]}}" },
|
||||
{ name: "myName", default: "{{[user].[firstName]}}" },
|
||||
{ name: "testParam", default: "1234" },
|
||||
]
|
||||
)
|
||||
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?test=" +
|
||||
userDetails.email.replace("@", "%40") +
|
||||
"&testName=" +
|
||||
userDetails.firstName +
|
||||
"&testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - plain text", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
"This is plain text and this is my email: {{[user].[email]}}. This is a test param: {{testParam}}",
|
||||
bodyType: "text",
|
||||
},
|
||||
[{ name: "testParam", default: "1234" }]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
expect(parsedRequest.opts.body).toEqual(
|
||||
`This is plain text and this is my email: ${userDetails.email}. This is a test param: 1234`
|
||||
)
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - json", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
bodyType: "json",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
const test = `{"email":"${userDetails.email}","queryCode":1234,"userRef":"${userDetails.firstName}"}`
|
||||
expect(parsedRequest.opts.body).toEqual(test)
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - xml", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
"<note> <email>{{[user].[email]}}</email> <code>{{testParam}}</code> " +
|
||||
"<ref>{{userId}}</ref> <somestring>testing</somestring> </note>",
|
||||
bodyType: "xml",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userId", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
const test = `<note> <email>${userDetails.email}</email> <code>1234</code> <ref>${userDetails.firstName}</ref> <somestring>testing</somestring> </note>`
|
||||
|
||||
expect(parsedRequest.opts.body).toEqual(test)
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - form-data", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
bodyType: "form",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
|
||||
const emailData = parsedRequest.opts.body._streams[1]
|
||||
expect(emailData).toEqual(userDetails.email)
|
||||
|
||||
const queryCodeData = parsedRequest.opts.body._streams[4]
|
||||
expect(queryCodeData).toEqual("1234")
|
||||
|
||||
const userRef = parsedRequest.opts.body._streams[7]
|
||||
expect(userRef).toEqual(userDetails.firstName)
|
||||
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - encoded", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
bodyType: "encoded",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
|
||||
expect(parsedRequest.opts.body.email).toEqual(userDetails.email)
|
||||
expect(parsedRequest.opts.body.queryCode).toEqual("1234")
|
||||
expect(parsedRequest.opts.body.userRef).toEqual(userDetails.firstName)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,406 @@
|
|||
import * as setup from "../utilities"
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { getCachedVariable } from "../../../../threads/utils"
|
||||
import nock from "nock"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
jest.unmock("node-fetch")
|
||||
|
||||
describe("rest", () => {
|
||||
let config: TestConfiguration
|
||||
let datasource: Datasource
|
||||
|
||||
async function createQuery(fields: any) {
|
||||
return await config.api.query.save({
|
||||
name: "test query",
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields,
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
queryVerb: "read",
|
||||
})
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
config = setup.getConfig()
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {},
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it("should automatically retry on fail with cached dynamics", async () => {
|
||||
const basedOnQuery = await createQuery({
|
||||
path: "one.example.com",
|
||||
})
|
||||
|
||||
let cached = await getCachedVariable(basedOnQuery._id!, "foo")
|
||||
expect(cached).toBeNull()
|
||||
|
||||
await config.api.datasource.update({
|
||||
...datasource,
|
||||
config: {
|
||||
...datasource.config,
|
||||
dynamicVariables: [
|
||||
{
|
||||
queryId: basedOnQuery._id!,
|
||||
name: "foo",
|
||||
value: "{{ data[0].name }}",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
cached = await getCachedVariable(basedOnQuery._id!, "foo")
|
||||
expect(cached).toBeNull()
|
||||
|
||||
nock("http://one.example.com")
|
||||
.get("/")
|
||||
.reply(200, [{ name: "one" }])
|
||||
nock("http://two.example.com").get("/?test=one").reply(500)
|
||||
nock("http://two.example.com")
|
||||
.get("/?test=one")
|
||||
.reply(200, [{ name: "two" }])
|
||||
|
||||
const res = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: "test query",
|
||||
parameters: [],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "two.example.com",
|
||||
queryString: "test={{ foo }}",
|
||||
},
|
||||
})
|
||||
expect(res.schema).toEqual({
|
||||
name: { type: "string", name: "name" },
|
||||
})
|
||||
|
||||
cached = await getCachedVariable(basedOnQuery._id!, "foo")
|
||||
expect(cached.rows.length).toEqual(1)
|
||||
expect(cached.rows[0].name).toEqual("one")
|
||||
})
|
||||
|
||||
it("should parse global and query level header mappings", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com", {
|
||||
reqheaders: {
|
||||
test: "headerVal",
|
||||
emailhdr: user.email,
|
||||
queryhdr: user.firstName!,
|
||||
secondhdr: "1234",
|
||||
},
|
||||
})
|
||||
.get("/?email=" + user.email.replace("@", "%40"))
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
queryString: "email={{[user].[email]}}",
|
||||
headers: {
|
||||
queryHdr: "{{[user].[firstName]}}",
|
||||
secondHdr: "1234",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to query params", async () => {
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.get(
|
||||
"/?test=" +
|
||||
user.email.replace("@", "%40") +
|
||||
"&testName=" +
|
||||
user.firstName +
|
||||
"&testParam=1234"
|
||||
)
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "myEmail", default: "{{[user].[email]}}" },
|
||||
{ name: "myName", default: "{{[user].[firstName]}}" },
|
||||
{ name: "testParam", default: "1234" },
|
||||
],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
queryString:
|
||||
"test={{myEmail}}&testName={{myName}}&testParam={{testParam}}",
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - plain text", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post(
|
||||
"/?testParam=1234",
|
||||
"This is plain text and this is my email: " +
|
||||
user.email +
|
||||
". This is a test param: 1234"
|
||||
)
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [{ name: "testParam", default: "1234" }],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "text",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
"This is plain text and this is my email: {{[user].[email]}}. This is a test param: {{testParam}}",
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - json", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post("/?testParam=1234", {
|
||||
email: user.email,
|
||||
queryCode: 1234,
|
||||
userRef: user.firstName,
|
||||
})
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "json",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - xml", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post(
|
||||
"/?testParam=1234",
|
||||
`<note> <email>${user.email}</email> <code>1234</code> <ref>${user.firstName}</ref> <somestring>testing</somestring> </note>`
|
||||
)
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userId", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "xml",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
"<note> <email>{{[user].[email]}}</email> <code>{{testParam}}</code> " +
|
||||
"<ref>{{userId}}</ref> <somestring>testing</somestring> </note>",
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - form-data", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post("/?testParam=1234", body => {
|
||||
return (
|
||||
body.includes('name="email"\r\n\r\n' + user.email + "\r\n") &&
|
||||
body.includes('name="queryCode"\r\n\r\n1234\r\n') &&
|
||||
body.includes('name="userRef"\r\n\r\n' + user.firstName + "\r\n")
|
||||
)
|
||||
})
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "form",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - encoded", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post("/?testParam=1234", {
|
||||
email: user.email,
|
||||
queryCode: 1234,
|
||||
userRef: user.firstName,
|
||||
})
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "encoded",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
})
|
|
@ -1,4 +1,4 @@
|
|||
import { databaseTestProviders } from "../../../integrations/tests/utils"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
|
||||
import tk from "timekeeper"
|
||||
import { outputProcessing } from "../../../utilities/rowProcessor"
|
||||
|
@ -30,14 +30,13 @@ const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
|||
tk.freeze(timestamp)
|
||||
|
||||
jest.unmock("mssql")
|
||||
jest.unmock("pg")
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
["postgres", databaseTestProviders.postgres],
|
||||
["mysql", databaseTestProviders.mysql],
|
||||
["mssql", databaseTestProviders.mssql],
|
||||
["mariadb", databaseTestProviders.mariadb],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/rows (%s)", (__, dsProvider) => {
|
||||
const isInternal = dsProvider === undefined
|
||||
const config = setup.getConfig()
|
||||
|
@ -49,23 +48,23 @@ describe.each([
|
|||
await config.init()
|
||||
if (dsProvider) {
|
||||
datasource = await config.createDatasource({
|
||||
datasource: await dsProvider.datasource(),
|
||||
datasource: await dsProvider,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (dsProvider) {
|
||||
await dsProvider.stop()
|
||||
}
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
function saveTableRequest(
|
||||
...overrides: Partial<SaveTableRequest>[]
|
||||
// We omit the name field here because it's generated in the function with a
|
||||
// high likelihood to be unique. Tests should not have any reason to control
|
||||
// the table name they're writing to.
|
||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||
): SaveTableRequest {
|
||||
const req: SaveTableRequest = {
|
||||
name: uuid.v4().substring(0, 16),
|
||||
name: uuid.v4().substring(0, 10),
|
||||
type: "table",
|
||||
sourceType: datasource
|
||||
? TableSourceType.EXTERNAL
|
||||
|
@ -87,7 +86,10 @@ describe.each([
|
|||
}
|
||||
|
||||
function defaultTable(
|
||||
...overrides: Partial<SaveTableRequest>[]
|
||||
// We omit the name field here because it's generated in the function with a
|
||||
// high likelihood to be unique. Tests should not have any reason to control
|
||||
// the table name they're writing to.
|
||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||
): SaveTableRequest {
|
||||
return saveTableRequest(
|
||||
{
|
||||
|
@ -194,7 +196,6 @@ describe.each([
|
|||
|
||||
const newTable = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "TestTableAuto",
|
||||
schema: {
|
||||
"Row ID": {
|
||||
name: "Row ID",
|
||||
|
@ -383,11 +384,9 @@ describe.each([
|
|||
|
||||
isInternal &&
|
||||
it("doesn't allow creating in user table", async () => {
|
||||
const userTableId = InternalTable.USER_METADATA
|
||||
const response = await config.api.row.save(
|
||||
userTableId,
|
||||
InternalTable.USER_METADATA,
|
||||
{
|
||||
tableId: userTableId,
|
||||
firstName: "Joe",
|
||||
lastName: "Joe",
|
||||
email: "joe@joe.com",
|
||||
|
@ -462,7 +461,6 @@ describe.each([
|
|||
table = await config.api.table.save(defaultTable())
|
||||
otherTable = await config.api.table.save(
|
||||
defaultTable({
|
||||
name: "a",
|
||||
schema: {
|
||||
relationship: {
|
||||
name: "relationship",
|
||||
|
@ -898,8 +896,8 @@ describe.each([
|
|||
let o2mTable: Table
|
||||
let m2mTable: Table
|
||||
beforeAll(async () => {
|
||||
o2mTable = await config.api.table.save(defaultTable({ name: "o2m" }))
|
||||
m2mTable = await config.api.table.save(defaultTable({ name: "m2m" }))
|
||||
o2mTable = await config.api.table.save(defaultTable())
|
||||
m2mTable = await config.api.table.save(defaultTable())
|
||||
})
|
||||
|
||||
describe.each([
|
||||
|
@ -1256,7 +1254,6 @@ describe.each([
|
|||
otherTable = await config.api.table.save(defaultTable())
|
||||
table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "b",
|
||||
schema: {
|
||||
links: {
|
||||
name: "links",
|
||||
|
@ -1298,7 +1295,7 @@ describe.each([
|
|||
|
||||
describe("Formula JS protection", () => {
|
||||
it("should time out JS execution if a single cell takes too long", async () => {
|
||||
await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 20 }, async () => {
|
||||
await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => {
|
||||
const js = Buffer.from(
|
||||
`
|
||||
let i = 0;
|
||||
|
@ -1338,8 +1335,8 @@ describe.each([
|
|||
it("should time out JS execution if a multiple cells take too long", async () => {
|
||||
await config.withEnv(
|
||||
{
|
||||
JS_PER_INVOCATION_TIMEOUT_MS: 20,
|
||||
JS_PER_REQUEST_TIMEOUT_MS: 40,
|
||||
JS_PER_INVOCATION_TIMEOUT_MS: 40,
|
||||
JS_PER_REQUEST_TIMEOUT_MS: 80,
|
||||
},
|
||||
async () => {
|
||||
const js = Buffer.from(
|
||||
|
@ -1354,7 +1351,6 @@ describe.each([
|
|||
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "table",
|
||||
schema: {
|
||||
text: {
|
||||
name: "text",
|
||||
|
|
|
@ -3,8 +3,6 @@ import { checkPermissionsEndpoint } from "./utilities/TestFunctions"
|
|||
import * as setup from "./utilities"
|
||||
import { UserMetadata } from "@budibase/types"
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
jest.mock("../../../utilities/workerRequests", () => ({
|
||||
getGlobalUsers: jest.fn(() => {
|
||||
return {}
|
||||
|
|
|
@ -19,21 +19,19 @@ import {
|
|||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import * as uuid from "uuid"
|
||||
import { databaseTestProviders } from "../../../integrations/tests/utils"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
import merge from "lodash/merge"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { roles } from "@budibase/backend-core"
|
||||
|
||||
jest.unmock("mssql")
|
||||
jest.unmock("pg")
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
["postgres", databaseTestProviders.postgres],
|
||||
["mysql", databaseTestProviders.mysql],
|
||||
["mssql", databaseTestProviders.mssql],
|
||||
["mariadb", databaseTestProviders.mariadb],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/v2/views (%s)", (_, dsProvider) => {
|
||||
const config = setup.getConfig()
|
||||
const isInternal = !dsProvider
|
||||
|
@ -42,10 +40,10 @@ describe.each([
|
|||
let datasource: Datasource
|
||||
|
||||
function saveTableRequest(
|
||||
...overrides: Partial<SaveTableRequest>[]
|
||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||
): SaveTableRequest {
|
||||
const req: SaveTableRequest = {
|
||||
name: uuid.v4().substring(0, 16),
|
||||
name: generator.guid().replaceAll("-", "").substring(0, 16),
|
||||
type: "table",
|
||||
sourceType: datasource
|
||||
? TableSourceType.EXTERNAL
|
||||
|
@ -90,16 +88,13 @@ describe.each([
|
|||
|
||||
if (dsProvider) {
|
||||
datasource = await config.createDatasource({
|
||||
datasource: await dsProvider.datasource(),
|
||||
datasource: await dsProvider,
|
||||
})
|
||||
}
|
||||
table = await config.api.table.save(priceTable())
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (dsProvider) {
|
||||
await dsProvider.stop()
|
||||
}
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
|
@ -231,7 +226,7 @@ describe.each([
|
|||
|
||||
view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: "View A",
|
||||
name: generator.guid(),
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -307,12 +302,13 @@ describe.each([
|
|||
|
||||
it("can update an existing view name", async () => {
|
||||
const tableId = table._id!
|
||||
await config.api.viewV2.update({ ...view, name: "View B" })
|
||||
const newName = generator.guid()
|
||||
await config.api.viewV2.update({ ...view, name: newName })
|
||||
|
||||
expect(await config.api.table.get(tableId)).toEqual(
|
||||
expect.objectContaining({
|
||||
views: {
|
||||
"View B": { ...view, name: "View B", schema: expect.anything() },
|
||||
[newName]: { ...view, name: newName, schema: expect.anything() },
|
||||
},
|
||||
})
|
||||
)
|
||||
|
@ -507,7 +503,6 @@ describe.each([
|
|||
it("views have extra data trimmed", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "orders",
|
||||
schema: {
|
||||
Country: {
|
||||
type: FieldType.STRING,
|
||||
|
@ -523,7 +518,7 @@ describe.each([
|
|||
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: uuid.v4(),
|
||||
name: generator.guid(),
|
||||
schema: {
|
||||
Country: {
|
||||
visible: true,
|
||||
|
@ -853,7 +848,6 @@ describe.each([
|
|||
beforeAll(async () => {
|
||||
table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: `users_${uuid.v4()}`,
|
||||
type: "table",
|
||||
schema: {
|
||||
name: {
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
const setup = require("./utilities")
|
||||
|
||||
describe("test the execute query action", () => {
|
||||
let query
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
||||
await config.createDatasource()
|
||||
query = await config.createQuery()
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to execute a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: query._id },
|
||||
})
|
||||
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should handle a null query value", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: null,
|
||||
})
|
||||
expect(res.response.message).toEqual("Invalid inputs")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should handle an error executing a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: "wrong_id" },
|
||||
})
|
||||
expect(res.response).toEqual("Error: missing")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,94 @@
|
|||
import { Datasource, Query, SourceName } from "@budibase/types"
|
||||
import * as setup from "./utilities"
|
||||
import { DatabaseName, getDatasource } from "../../integrations/tests/utils"
|
||||
import knex, { Knex } from "knex"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
function getKnexClientName(source: SourceName) {
|
||||
switch (source) {
|
||||
case SourceName.MYSQL:
|
||||
return "mysql2"
|
||||
case SourceName.SQL_SERVER:
|
||||
return "mssql"
|
||||
case SourceName.POSTGRES:
|
||||
return "pg"
|
||||
}
|
||||
throw new Error(`Unsupported source: ${source}`)
|
||||
}
|
||||
|
||||
describe.each(
|
||||
[
|
||||
DatabaseName.POSTGRES,
|
||||
DatabaseName.MYSQL,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.MARIADB,
|
||||
].map(name => [name, getDatasource(name)])
|
||||
)("execute query action (%s)", (_, dsProvider) => {
|
||||
let tableName: string
|
||||
let client: Knex
|
||||
let datasource: Datasource
|
||||
let query: Query
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
||||
const ds = await dsProvider
|
||||
datasource = await config.api.datasource.create(ds)
|
||||
client = knex({
|
||||
client: getKnexClientName(ds.source),
|
||||
connection: ds.config,
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.string("a")
|
||||
table.integer("b")
|
||||
})
|
||||
await client(tableName).insert({ a: "string", b: 1 })
|
||||
query = await config.api.query.save({
|
||||
name: "test query",
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields: {
|
||||
sql: client(tableName).select("*").toSQL().toNative().sql,
|
||||
},
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
queryVerb: "read",
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTable(tableName)
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to execute a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: query._id },
|
||||
})
|
||||
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should handle a null query value", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: null,
|
||||
})
|
||||
expect(res.response.message).toEqual("Invalid inputs")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should handle an error executing a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: "wrong_id" },
|
||||
})
|
||||
expect(res.response).toEqual("Error: missing")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
})
|
|
@ -3,7 +3,6 @@ import {
|
|||
generateMakeRequest,
|
||||
MakeRequestResponse,
|
||||
} from "../api/routes/public/tests/utils"
|
||||
import { v4 as uuidv4 } from "uuid"
|
||||
import * as setup from "../api/routes/tests/utilities"
|
||||
import {
|
||||
Datasource,
|
||||
|
@ -12,12 +11,23 @@ import {
|
|||
TableRequest,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import { databaseTestProviders } from "../integrations/tests/utils"
|
||||
import mysql from "mysql2/promise"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
rawQuery,
|
||||
} from "../integrations/tests/utils"
|
||||
import { builderSocket } from "../websockets"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
// @ts-ignore
|
||||
fetch.mockSearch()
|
||||
|
||||
function uniqueTableName(length?: number): string {
|
||||
return generator
|
||||
.guid()
|
||||
.replaceAll("-", "_")
|
||||
.substring(0, length || 10)
|
||||
}
|
||||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
jest.mock("../websockets", () => ({
|
||||
|
@ -37,7 +47,8 @@ jest.mock("../websockets", () => ({
|
|||
|
||||
describe("mysql integrations", () => {
|
||||
let makeRequest: MakeRequestResponse,
|
||||
mysqlDatasource: Datasource,
|
||||
rawDatasource: Datasource,
|
||||
datasource: Datasource,
|
||||
primaryMySqlTable: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
|
@ -46,18 +57,13 @@ describe("mysql integrations", () => {
|
|||
|
||||
makeRequest = generateMakeRequest(apiKey, true)
|
||||
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.mysql.stop()
|
||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
primaryMySqlTable = await config.createTable({
|
||||
name: uuidv4(),
|
||||
name: uniqueTableName(),
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
|
@ -79,7 +85,7 @@ describe("mysql integrations", () => {
|
|||
type: FieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
sourceId: mysqlDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
})
|
||||
|
@ -87,18 +93,15 @@ describe("mysql integrations", () => {
|
|||
afterAll(config.end)
|
||||
|
||||
it("validate table schema", async () => {
|
||||
const res = await makeRequest(
|
||||
"get",
|
||||
`/api/datasources/${mysqlDatasource._id}`
|
||||
)
|
||||
const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual({
|
||||
config: {
|
||||
database: "mysql",
|
||||
host: mysqlDatasource.config!.host,
|
||||
database: expect.any(String),
|
||||
host: datasource.config!.host,
|
||||
password: "--secret-value--",
|
||||
port: mysqlDatasource.config!.port,
|
||||
port: datasource.config!.port,
|
||||
user: "root",
|
||||
},
|
||||
plus: true,
|
||||
|
@ -117,7 +120,7 @@ describe("mysql integrations", () => {
|
|||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: await databaseTestProviders.mysql.datasource(),
|
||||
datasource: rawDatasource,
|
||||
},
|
||||
{
|
||||
body: {
|
||||
|
@ -128,13 +131,12 @@ describe("mysql integrations", () => {
|
|||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
const dbConfig = await databaseTestProviders.mysql.datasource()
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...dbConfig,
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...dbConfig.config,
|
||||
...rawDatasource.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
|
@ -154,7 +156,7 @@ describe("mysql integrations", () => {
|
|||
it("should fetch information about mysql datasource", async () => {
|
||||
const primaryName = primaryMySqlTable.name
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: mysqlDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -163,40 +165,38 @@ describe("mysql integrations", () => {
|
|||
})
|
||||
|
||||
describe("Integration compatibility with mysql search_path", () => {
|
||||
let client: mysql.Connection, pathDatasource: Datasource
|
||||
const database = "test1"
|
||||
const database2 = "test-2"
|
||||
let datasource: Datasource, rawDatasource: Datasource
|
||||
const database = generator.guid()
|
||||
const database2 = generator.guid()
|
||||
|
||||
beforeAll(async () => {
|
||||
const dsConfig = await databaseTestProviders.mysql.datasource()
|
||||
const dbConfig = dsConfig.config!
|
||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||
|
||||
client = await mysql.createConnection(dbConfig)
|
||||
await client.query(`CREATE DATABASE \`${database}\`;`)
|
||||
await client.query(`CREATE DATABASE \`${database2}\`;`)
|
||||
await rawQuery(rawDatasource, `CREATE DATABASE \`${database}\`;`)
|
||||
await rawQuery(rawDatasource, `CREATE DATABASE \`${database2}\`;`)
|
||||
|
||||
const pathConfig: any = {
|
||||
...dsConfig,
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...dbConfig,
|
||||
...rawDatasource.config!,
|
||||
database,
|
||||
},
|
||||
}
|
||||
pathDatasource = await config.api.datasource.create(pathConfig)
|
||||
datasource = await config.api.datasource.create(pathConfig)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await client.query(`DROP DATABASE \`${database}\`;`)
|
||||
await client.query(`DROP DATABASE \`${database2}\`;`)
|
||||
await client.end()
|
||||
await rawQuery(rawDatasource, `DROP DATABASE \`${database}\`;`)
|
||||
await rawQuery(rawDatasource, `DROP DATABASE \`${database2}\`;`)
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: pathDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -207,15 +207,17 @@ describe("mysql integrations", () => {
|
|||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
|
||||
)
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
|
||||
)
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${pathDatasource._id}/schema`,
|
||||
`/api/datasources/${datasource._id}/schema`,
|
||||
{
|
||||
tablesFilter: [repeated_table_name],
|
||||
}
|
||||
|
@ -231,30 +233,14 @@ describe("mysql integrations", () => {
|
|||
})
|
||||
|
||||
describe("POST /api/tables/", () => {
|
||||
let client: mysql.Connection
|
||||
const emitDatasourceUpdateMock = jest.fn()
|
||||
|
||||
beforeEach(async () => {
|
||||
client = await mysql.createConnection(
|
||||
(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
).config!
|
||||
)
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.end()
|
||||
})
|
||||
|
||||
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
|
||||
const addColumnToTable: TableRequest = {
|
||||
type: "table",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
name: "table",
|
||||
sourceId: mysqlDatasource._id!,
|
||||
name: uniqueTableName(),
|
||||
sourceId: datasource._id!,
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
|
@ -301,14 +287,16 @@ describe("mysql integrations", () => {
|
|||
},
|
||||
},
|
||||
created: true,
|
||||
_id: `${mysqlDatasource._id}__table`,
|
||||
_id: `${datasource._id}__${addColumnToTable.name}`,
|
||||
}
|
||||
delete expectedTable._add
|
||||
|
||||
expect(emitDatasourceUpdateMock).toHaveBeenCalledTimes(1)
|
||||
const emittedDatasource: Datasource =
|
||||
emitDatasourceUpdateMock.mock.calls[0][1]
|
||||
expect(emittedDatasource.entities!["table"]).toEqual(expectedTable)
|
||||
expect(emittedDatasource.entities![expectedTable.name]).toEqual(
|
||||
expectedTable
|
||||
)
|
||||
})
|
||||
|
||||
it("will rename a column", async () => {
|
||||
|
@ -346,17 +334,18 @@ describe("mysql integrations", () => {
|
|||
"/api/tables/",
|
||||
renameColumnOnTable
|
||||
)
|
||||
mysqlDatasource = (
|
||||
await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${mysqlDatasource._id}/schema`
|
||||
)
|
||||
|
||||
const ds = (
|
||||
await makeRequest("post", `/api/datasources/${datasource._id}/schema`)
|
||||
).body.datasource
|
||||
|
||||
expect(response.status).toEqual(200)
|
||||
expect(
|
||||
Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema)
|
||||
).toEqual(["id", "name", "description", "age"])
|
||||
expect(Object.keys(ds.entities![primaryMySqlTable.name].schema)).toEqual([
|
||||
"id",
|
||||
"name",
|
||||
"description",
|
||||
"age",
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -16,19 +16,23 @@ import {
|
|||
import _ from "lodash"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { utils } from "@budibase/backend-core"
|
||||
import { databaseTestProviders } from "../integrations/tests/utils"
|
||||
import { Client } from "pg"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
rawQuery,
|
||||
} from "../integrations/tests/utils"
|
||||
|
||||
// @ts-ignore
|
||||
fetch.mockSearch()
|
||||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
jest.unmock("pg")
|
||||
jest.mock("../websockets")
|
||||
|
||||
describe("postgres integrations", () => {
|
||||
let makeRequest: MakeRequestResponse,
|
||||
postgresDatasource: Datasource,
|
||||
rawDatasource: Datasource,
|
||||
datasource: Datasource,
|
||||
primaryPostgresTable: Table,
|
||||
oneToManyRelationshipInfo: ForeignTableInfo,
|
||||
manyToOneRelationshipInfo: ForeignTableInfo,
|
||||
|
@ -40,19 +44,17 @@ describe("postgres integrations", () => {
|
|||
|
||||
makeRequest = generateMakeRequest(apiKey, true)
|
||||
|
||||
postgresDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.postgres.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.postgres.stop()
|
||||
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
async function createAuxTable(prefix: string) {
|
||||
return await config.createTable({
|
||||
name: `${prefix}_${generator.word({ length: 6 })}`,
|
||||
name: `${prefix}_${generator
|
||||
.guid()
|
||||
.replaceAll("-", "")
|
||||
.substring(0, 6)}`,
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
primaryDisplay: "title",
|
||||
|
@ -67,7 +69,7 @@ describe("postgres integrations", () => {
|
|||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
}
|
||||
|
@ -89,7 +91,7 @@ describe("postgres integrations", () => {
|
|||
}
|
||||
|
||||
primaryPostgresTable = await config.createTable({
|
||||
name: `p_${generator.word({ length: 6 })}`,
|
||||
name: `p_${generator.guid().replaceAll("-", "").substring(0, 6)}`,
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
|
@ -144,7 +146,7 @@ describe("postgres integrations", () => {
|
|||
main: true,
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
})
|
||||
|
@ -251,7 +253,7 @@ describe("postgres integrations", () => {
|
|||
|
||||
async function createDefaultPgTable() {
|
||||
return await config.createTable({
|
||||
name: generator.word({ length: 10 }),
|
||||
name: generator.guid().replaceAll("-", "").substring(0, 10),
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
|
@ -261,7 +263,7 @@ describe("postgres integrations", () => {
|
|||
autocolumn: true,
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
}
|
||||
|
@ -299,19 +301,16 @@ describe("postgres integrations", () => {
|
|||
}
|
||||
|
||||
it("validate table schema", async () => {
|
||||
const res = await makeRequest(
|
||||
"get",
|
||||
`/api/datasources/${postgresDatasource._id}`
|
||||
)
|
||||
const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual({
|
||||
config: {
|
||||
ca: false,
|
||||
database: "postgres",
|
||||
host: postgresDatasource.config!.host,
|
||||
database: expect.any(String),
|
||||
host: datasource.config!.host,
|
||||
password: "--secret-value--",
|
||||
port: postgresDatasource.config!.port,
|
||||
port: datasource.config!.port,
|
||||
rejectUnauthorized: false,
|
||||
schema: "public",
|
||||
ssl: false,
|
||||
|
@ -1043,7 +1042,7 @@ describe("postgres integrations", () => {
|
|||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: await databaseTestProviders.postgres.datasource(),
|
||||
datasource: await getDatasource(DatabaseName.POSTGRES),
|
||||
},
|
||||
{
|
||||
body: {
|
||||
|
@ -1054,7 +1053,7 @@ describe("postgres integrations", () => {
|
|||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
const dbConfig = await databaseTestProviders.postgres.datasource()
|
||||
const dbConfig = await getDatasource(DatabaseName.POSTGRES)
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
|
@ -1079,7 +1078,7 @@ describe("postgres integrations", () => {
|
|||
it("should fetch information about postgres datasource", async () => {
|
||||
const primaryName = primaryPostgresTable.name
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: postgresDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -1088,86 +1087,88 @@ describe("postgres integrations", () => {
|
|||
})
|
||||
|
||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||
let client: Client
|
||||
let tableName: string
|
||||
|
||||
beforeEach(async () => {
|
||||
client = new Client(
|
||||
(await databaseTestProviders.postgres.datasource()).config!
|
||||
)
|
||||
await client.connect()
|
||||
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.query(`DROP TABLE IF EXISTS "table"`)
|
||||
await client.end()
|
||||
await rawQuery(rawDatasource, `DROP TABLE IF EXISTS "${tableName}"`)
|
||||
})
|
||||
|
||||
it("recognises when a table has no primary key", async () => {
|
||||
await client.query(`CREATE TABLE "table" (id SERIAL)`)
|
||||
await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`)
|
||||
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${postgresDatasource._id}/schema`
|
||||
`/api/datasources/${datasource._id}/schema`
|
||||
)
|
||||
|
||||
expect(response.body.errors).toEqual({
|
||||
table: "Table must have a primary key.",
|
||||
[tableName]: "Table must have a primary key.",
|
||||
})
|
||||
})
|
||||
|
||||
it("recognises when a table is using a reserved column name", async () => {
|
||||
await client.query(`CREATE TABLE "table" (_id SERIAL PRIMARY KEY) `)
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) `
|
||||
)
|
||||
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${postgresDatasource._id}/schema`
|
||||
`/api/datasources/${datasource._id}/schema`
|
||||
)
|
||||
|
||||
expect(response.body.errors).toEqual({
|
||||
table: "Table contains invalid columns.",
|
||||
[tableName]: "Table contains invalid columns.",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Integration compatibility with postgres search_path", () => {
|
||||
let client: Client, pathDatasource: Datasource
|
||||
const schema1 = "test1",
|
||||
schema2 = "test-2"
|
||||
let rawDatasource: Datasource,
|
||||
datasource: Datasource,
|
||||
schema1: string,
|
||||
schema2: string
|
||||
|
||||
beforeAll(async () => {
|
||||
const dsConfig = await databaseTestProviders.postgres.datasource()
|
||||
const dbConfig = dsConfig.config!
|
||||
beforeEach(async () => {
|
||||
schema1 = generator.guid().replaceAll("-", "")
|
||||
schema2 = generator.guid().replaceAll("-", "")
|
||||
|
||||
client = new Client(dbConfig)
|
||||
await client.connect()
|
||||
await client.query(`CREATE SCHEMA "${schema1}";`)
|
||||
await client.query(`CREATE SCHEMA "${schema2}";`)
|
||||
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||
const dbConfig = rawDatasource.config!
|
||||
|
||||
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema1}";`)
|
||||
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema2}";`)
|
||||
|
||||
const pathConfig: any = {
|
||||
...dsConfig,
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...dbConfig,
|
||||
schema: `${schema1}, ${schema2}`,
|
||||
},
|
||||
}
|
||||
pathDatasource = await config.api.datasource.create(pathConfig)
|
||||
datasource = await config.api.datasource.create(pathConfig)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await client.query(`DROP SCHEMA "${schema1}" CASCADE;`)
|
||||
await client.query(`DROP SCHEMA "${schema2}" CASCADE;`)
|
||||
await client.end()
|
||||
afterEach(async () => {
|
||||
await rawQuery(rawDatasource, `DROP SCHEMA "${schema1}" CASCADE;`)
|
||||
await rawQuery(rawDatasource, `DROP SCHEMA "${schema2}" CASCADE;`)
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: pathDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -1178,15 +1179,17 @@ describe("postgres integrations", () => {
|
|||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
|
||||
)
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
|
||||
)
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${pathDatasource._id}/schema`,
|
||||
`/api/datasources/${datasource._id}/schema`,
|
||||
{
|
||||
tablesFilter: [repeated_table_name],
|
||||
}
|
||||
|
|
|
@ -1,25 +1,88 @@
|
|||
jest.unmock("pg")
|
||||
|
||||
import { Datasource } from "@budibase/types"
|
||||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import * as postgres from "./postgres"
|
||||
import * as mongodb from "./mongodb"
|
||||
import * as mysql from "./mysql"
|
||||
import * as mssql from "./mssql"
|
||||
import * as mariadb from "./mariadb"
|
||||
import { StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer } from "testcontainers"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
|
||||
jest.setTimeout(30000)
|
||||
export type DatasourceProvider = () => Promise<Datasource>
|
||||
|
||||
export interface DatabaseProvider {
|
||||
start(): Promise<StartedTestContainer>
|
||||
stop(): Promise<void>
|
||||
datasource(): Promise<Datasource>
|
||||
export enum DatabaseName {
|
||||
POSTGRES = "postgres",
|
||||
MONGODB = "mongodb",
|
||||
MYSQL = "mysql",
|
||||
SQL_SERVER = "mssql",
|
||||
MARIADB = "mariadb",
|
||||
}
|
||||
|
||||
export const databaseTestProviders = {
|
||||
postgres,
|
||||
mongodb,
|
||||
mysql,
|
||||
mssql,
|
||||
mariadb,
|
||||
const providers: Record<DatabaseName, DatasourceProvider> = {
|
||||
[DatabaseName.POSTGRES]: postgres.getDatasource,
|
||||
[DatabaseName.MONGODB]: mongodb.getDatasource,
|
||||
[DatabaseName.MYSQL]: mysql.getDatasource,
|
||||
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
||||
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
||||
}
|
||||
|
||||
export function getDatasourceProviders(
|
||||
...sourceNames: DatabaseName[]
|
||||
): Promise<Datasource>[] {
|
||||
return sourceNames.map(sourceName => providers[sourceName]())
|
||||
}
|
||||
|
||||
export function getDatasourceProvider(
|
||||
sourceName: DatabaseName
|
||||
): DatasourceProvider {
|
||||
return providers[sourceName]
|
||||
}
|
||||
|
||||
export function getDatasource(sourceName: DatabaseName): Promise<Datasource> {
|
||||
return providers[sourceName]()
|
||||
}
|
||||
|
||||
export async function getDatasources(
|
||||
...sourceNames: DatabaseName[]
|
||||
): Promise<Datasource[]> {
|
||||
return Promise.all(sourceNames.map(sourceName => providers[sourceName]()))
|
||||
}
|
||||
|
||||
export async function rawQuery(ds: Datasource, sql: string): Promise<any> {
|
||||
switch (ds.source) {
|
||||
case SourceName.POSTGRES: {
|
||||
return postgres.rawQuery(ds, sql)
|
||||
}
|
||||
case SourceName.MYSQL: {
|
||||
return mysql.rawQuery(ds, sql)
|
||||
}
|
||||
case SourceName.SQL_SERVER: {
|
||||
return mssql.rawQuery(ds, sql)
|
||||
}
|
||||
default: {
|
||||
throw new Error(`Unsupported source: ${ds.source}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function startContainer(container: GenericContainer) {
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
container = container.withReuse()
|
||||
}
|
||||
|
||||
const startedContainer = await container.start()
|
||||
|
||||
const info = testContainerUtils.getContainerById(startedContainer.getId())
|
||||
if (!info) {
|
||||
throw new Error("Container not found")
|
||||
}
|
||||
|
||||
// Some Docker runtimes, when you expose a port, will bind it to both
|
||||
// 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6
|
||||
// addresses are not shared, and testcontainers will sometimes give you back
|
||||
// the ipv6 port. There's no way to know that this has happened, and if you
|
||||
// try to then connect to `localhost:port` you may attempt to bind to the v4
|
||||
// address which could be unbound or even an entirely different container. For
|
||||
// that reason, we don't use testcontainers' `getExposedPort` function,
|
||||
// preferring instead our own method that guaranteed v4 ports.
|
||||
return testContainerUtils.getExposedV4Ports(info)
|
||||
}
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
|
||||
import { rawQuery } from "./mysql"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
class MariaDBWaitStrategy extends AbstractWaitStrategy {
|
||||
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
|
||||
|
@ -21,38 +24,38 @@ class MariaDBWaitStrategy extends AbstractWaitStrategy {
|
|||
}
|
||||
}
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("mariadb:lts")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MariaDBWaitStrategy())
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mariadb:lts")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MariaDBWaitStrategy())
|
||||
)
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(3306)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 3306)?.host
|
||||
if (!port) {
|
||||
throw new Error("MariaDB port not found")
|
||||
}
|
||||
|
||||
const config = {
|
||||
host: "127.0.0.1",
|
||||
port,
|
||||
user: "root",
|
||||
password: "password",
|
||||
database: "mysql",
|
||||
}
|
||||
|
||||
const datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.MYSQL,
|
||||
plus: true,
|
||||
config: {
|
||||
host,
|
||||
port,
|
||||
user: "root",
|
||||
password: "password",
|
||||
database: "mysql",
|
||||
},
|
||||
config,
|
||||
}
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
}
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
|
||||
datasource.config.database = database
|
||||
return datasource
|
||||
}
|
||||
|
|
|
@ -1,43 +1,39 @@
|
|||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("mongo:7.0-jammy")
|
||||
.withExposedPorts(27017)
|
||||
.withEnvironment({
|
||||
MONGO_INITDB_ROOT_USERNAME: "mongo",
|
||||
MONGO_INITDB_ROOT_PASSWORD: "password",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
`mongosh --eval "db.version()"`
|
||||
).withStartupTimeout(10000)
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mongo:7.0-jammy")
|
||||
.withExposedPorts(27017)
|
||||
.withEnvironment({
|
||||
MONGO_INITDB_ROOT_USERNAME: "mongo",
|
||||
MONGO_INITDB_ROOT_PASSWORD: "password",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
`mongosh --eval "db.version()"`
|
||||
).withStartupTimeout(10000)
|
||||
)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(27017)
|
||||
|
||||
const port = (await ports).find(x => x.container === 27017)
|
||||
if (!port) {
|
||||
throw new Error("MongoDB port not found")
|
||||
}
|
||||
|
||||
return {
|
||||
type: "datasource",
|
||||
source: SourceName.MONGODB,
|
||||
plus: false,
|
||||
config: {
|
||||
connectionString: `mongodb://mongo:password@${host}:${port}`,
|
||||
db: "mongo",
|
||||
connectionString: `mongodb://mongo:password@127.0.0.1:${port.host}`,
|
||||
db: generator.guid(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,43 +1,41 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import mssql from "mssql"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer(
|
||||
"mcr.microsoft.com/mssql/server:2022-latest"
|
||||
)
|
||||
.withExposedPorts(1433)
|
||||
.withEnvironment({
|
||||
ACCEPT_EULA: "Y",
|
||||
MSSQL_SA_PASSWORD: "Password_123",
|
||||
// This is important, as Microsoft allow us to use the "Developer" edition
|
||||
// of SQL Server for development and testing purposes. We can't use other
|
||||
// versions without a valid license, and we cannot use the Developer
|
||||
// version in production.
|
||||
MSSQL_PID: "Developer",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
||||
)
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mcr.microsoft.com/mssql/server:2022-latest")
|
||||
.withExposedPorts(1433)
|
||||
.withEnvironment({
|
||||
ACCEPT_EULA: "Y",
|
||||
MSSQL_SA_PASSWORD: "Password_123",
|
||||
// This is important, as Microsoft allow us to use the "Developer" edition
|
||||
// of SQL Server for development and testing purposes. We can't use other
|
||||
// versions without a valid license, and we cannot use the Developer
|
||||
// version in production.
|
||||
MSSQL_PID: "Developer",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
||||
)
|
||||
)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(1433)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 1433)?.host
|
||||
|
||||
const datasource: Datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.SQL_SERVER,
|
||||
plus: true,
|
||||
config: {
|
||||
server: host,
|
||||
server: "127.0.0.1",
|
||||
port,
|
||||
user: "sa",
|
||||
password: "Password_123",
|
||||
|
@ -46,11 +44,28 @@ export async function datasource(): Promise<Datasource> {
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE "${database}"`)
|
||||
datasource.config!.database = database
|
||||
|
||||
return datasource
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
export async function rawQuery(ds: Datasource, sql: string) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
if (ds.source !== SourceName.SQL_SERVER) {
|
||||
throw new Error("Datasource source is not SQL Server")
|
||||
}
|
||||
|
||||
const pool = new mssql.ConnectionPool(ds.config! as mssql.config)
|
||||
const client = await pool.connect()
|
||||
try {
|
||||
const { recordset } = await client.query(sql)
|
||||
return recordset
|
||||
} finally {
|
||||
await pool.close()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
|
||||
import mysql from "mysql2/promise"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
class MySQLWaitStrategy extends AbstractWaitStrategy {
|
||||
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
|
||||
|
@ -24,38 +27,50 @@ class MySQLWaitStrategy extends AbstractWaitStrategy {
|
|||
}
|
||||
}
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("mysql:8.3")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mysql:8.3")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
|
||||
)
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(3306)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 3306)?.host
|
||||
|
||||
const datasource: Datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.MYSQL,
|
||||
plus: true,
|
||||
config: {
|
||||
host,
|
||||
host: "127.0.0.1",
|
||||
port,
|
||||
user: "root",
|
||||
password: "password",
|
||||
database: "mysql",
|
||||
},
|
||||
}
|
||||
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
|
||||
datasource.config!.database = database
|
||||
return datasource
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
export async function rawQuery(ds: Datasource, sql: string) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
if (ds.source !== SourceName.MYSQL) {
|
||||
throw new Error("Datasource source is not MySQL")
|
||||
}
|
||||
|
||||
const connection = await mysql.createConnection(ds.config)
|
||||
try {
|
||||
const [rows] = await connection.query(sql)
|
||||
return rows
|
||||
} finally {
|
||||
connection.end()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,33 +1,33 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import pg from "pg"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("postgres:16.1-bullseye")
|
||||
.withExposedPorts(5432)
|
||||
.withEnvironment({ POSTGRES_PASSWORD: "password" })
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"pg_isready -h localhost -p 5432"
|
||||
).withStartupTimeout(10000)
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("postgres:16.1-bullseye")
|
||||
.withExposedPorts(5432)
|
||||
.withEnvironment({ POSTGRES_PASSWORD: "password" })
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"pg_isready -h localhost -p 5432"
|
||||
).withStartupTimeout(10000)
|
||||
)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(5432)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 5432)?.host
|
||||
|
||||
const datasource: Datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.POSTGRES,
|
||||
plus: true,
|
||||
config: {
|
||||
host,
|
||||
host: "127.0.0.1",
|
||||
port,
|
||||
database: "postgres",
|
||||
user: "postgres",
|
||||
|
@ -38,11 +38,28 @@ export async function datasource(): Promise<Datasource> {
|
|||
ca: false,
|
||||
},
|
||||
}
|
||||
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE "${database}"`)
|
||||
datasource.config!.database = database
|
||||
|
||||
return datasource
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
export async function rawQuery(ds: Datasource, sql: string) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
if (ds.source !== SourceName.POSTGRES) {
|
||||
throw new Error("Datasource source is not Postgres")
|
||||
}
|
||||
|
||||
const client = new pg.Client(ds.config)
|
||||
await client.connect()
|
||||
try {
|
||||
const { rows } = await client.query(sql)
|
||||
return rows
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,8 +25,6 @@ const clearMigrations = async () => {
|
|||
}
|
||||
}
|
||||
|
||||
jest.setTimeout(10000)
|
||||
|
||||
describe("migrations", () => {
|
||||
const config = new TestConfig()
|
||||
|
||||
|
|
|
@ -17,8 +17,6 @@ import {
|
|||
generator,
|
||||
} from "@budibase/backend-core/tests"
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("external search", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
|
|
|
@ -1,4 +1,12 @@
|
|||
import newid from "../../db/newid"
|
||||
import TestConfig from "../../tests/utilities/TestConfiguration"
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import sdk from "../index"
|
||||
import {
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import { FIND_LIMIT } from "../app/rows/attachments"
|
||||
|
||||
const attachment = {
|
||||
size: 73479,
|
||||
|
@ -8,69 +16,48 @@ const attachment = {
|
|||
key: "app_bbb/attachments/a.png",
|
||||
}
|
||||
|
||||
const row = {
|
||||
_id: "ro_ta_aaa",
|
||||
photo: [attachment],
|
||||
otherCol: "string",
|
||||
}
|
||||
|
||||
const table = {
|
||||
_id: "ta_aaa",
|
||||
name: "photos",
|
||||
schema: {
|
||||
photo: {
|
||||
type: "attachment",
|
||||
name: "photo",
|
||||
},
|
||||
otherCol: {
|
||||
type: "string",
|
||||
name: "otherCol",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
jest.mock("@budibase/backend-core", () => {
|
||||
const core = jest.requireActual("@budibase/backend-core")
|
||||
return {
|
||||
...core,
|
||||
db: {
|
||||
...core.db,
|
||||
directCouchFind: jest.fn(),
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import sdk from "../index"
|
||||
|
||||
describe("should be able to re-write attachment URLs", () => {
|
||||
const config = new TestConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
it("should update URLs on a number of rows over the limit", async () => {
|
||||
const db = dbCore.getDB("app_aaa")
|
||||
await db.put(table)
|
||||
const limit = 30
|
||||
let rows = []
|
||||
for (let i = 0; i < limit; i++) {
|
||||
const rowToWrite = {
|
||||
...row,
|
||||
_id: `${row._id}_${newid()}`,
|
||||
}
|
||||
const { rev } = await db.put(rowToWrite)
|
||||
rows.push({
|
||||
...rowToWrite,
|
||||
_rev: rev,
|
||||
const table = await config.api.table.save({
|
||||
name: "photos",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
schema: {
|
||||
photo: {
|
||||
type: FieldType.ATTACHMENT,
|
||||
name: "photo",
|
||||
},
|
||||
otherCol: {
|
||||
type: FieldType.STRING,
|
||||
name: "otherCol",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
for (let i = 0; i < FIND_LIMIT * 4; i++) {
|
||||
await config.api.row.save(table._id!, {
|
||||
photo: [attachment],
|
||||
otherCol: "string",
|
||||
})
|
||||
}
|
||||
|
||||
dbCore.directCouchFind
|
||||
// @ts-ignore
|
||||
.mockReturnValueOnce({ rows: rows.slice(0, 25), bookmark: "aaa" })
|
||||
.mockReturnValueOnce({ rows: rows.slice(25, limit), bookmark: "bbb" })
|
||||
const db = dbCore.getDB(config.getAppId())
|
||||
await sdk.backups.updateAttachmentColumns(db.name, db)
|
||||
const finalRows = await sdk.rows.getAllInternalRows(db.name)
|
||||
for (let rowToCheck of finalRows) {
|
||||
expect(rowToCheck.otherCol).toBe(row.otherCol)
|
||||
expect(rowToCheck.photo[0].url).toBe("")
|
||||
expect(rowToCheck.photo[0].key).toBe(`${db.name}/attachments/a.png`)
|
||||
|
||||
const rows = (await sdk.rows.getAllInternalRows(db.name)).filter(
|
||||
row => row.tableId === table._id
|
||||
)
|
||||
for (const row of rows) {
|
||||
expect(row.otherCol).toBe("string")
|
||||
expect(row.photo[0].url).toBe("")
|
||||
expect(row.photo[0].key).toBe(`${db.name}/attachments/a.png`)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
|
|
@ -35,11 +35,20 @@ describe("syncGlobalUsers", () => {
|
|||
builder: { global: true },
|
||||
})
|
||||
await config.doInContext(config.appId, async () => {
|
||||
expect(await rawUserMetadata()).toHaveLength(1)
|
||||
let metadata = await rawUserMetadata()
|
||||
expect(metadata).not.toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user1._id!),
|
||||
})
|
||||
)
|
||||
expect(metadata).not.toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user2._id!),
|
||||
})
|
||||
)
|
||||
await syncGlobalUsers()
|
||||
|
||||
const metadata = await rawUserMetadata()
|
||||
expect(metadata).toHaveLength(3)
|
||||
metadata = await rawUserMetadata()
|
||||
expect(metadata).toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user1._id!),
|
||||
|
@ -62,7 +71,6 @@ describe("syncGlobalUsers", () => {
|
|||
await syncGlobalUsers()
|
||||
|
||||
const metadata = await rawUserMetadata()
|
||||
expect(metadata).toHaveLength(1)
|
||||
expect(metadata).not.toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user._id!),
|
||||
|
|
|
@ -2,17 +2,11 @@ import env from "../environment"
|
|||
import { env as coreEnv, timers } from "@budibase/backend-core"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
|
||||
if (!process.env.DEBUG) {
|
||||
global.console.log = jest.fn() // console.log are ignored in tests
|
||||
global.console.warn = jest.fn() // console.warn are ignored in tests
|
||||
}
|
||||
|
||||
if (!process.env.CI) {
|
||||
// set a longer timeout in dev for debugging
|
||||
// 100 seconds
|
||||
// set a longer timeout in dev for debugging 100 seconds
|
||||
jest.setTimeout(100 * 1000)
|
||||
} else {
|
||||
jest.setTimeout(10 * 1000)
|
||||
jest.setTimeout(30 * 1000)
|
||||
}
|
||||
|
||||
testContainerUtils.setupEnv(env, coreEnv)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import TestConfiguration from "../TestConfiguration"
|
||||
import { SuperTest, Test, Response } from "supertest"
|
||||
import request, { SuperTest, Test, Response } from "supertest"
|
||||
import { ReadStream } from "fs"
|
||||
import { getServer } from "../../../app"
|
||||
|
||||
type Headers = Record<string, string | string[] | undefined>
|
||||
type Method = "get" | "post" | "put" | "patch" | "delete"
|
||||
|
@ -76,7 +77,8 @@ export abstract class TestAPI {
|
|||
protected _requestRaw = async (
|
||||
method: "get" | "post" | "put" | "patch" | "delete",
|
||||
url: string,
|
||||
opts?: RequestOpts
|
||||
opts?: RequestOpts,
|
||||
attempt = 0
|
||||
): Promise<Response> => {
|
||||
const {
|
||||
headers = {},
|
||||
|
@ -107,26 +109,29 @@ export abstract class TestAPI {
|
|||
const headersFn = publicUser
|
||||
? this.config.publicHeaders.bind(this.config)
|
||||
: this.config.defaultHeaders.bind(this.config)
|
||||
let request = this.request[method](url).set(
|
||||
|
||||
const app = getServer()
|
||||
let req = request(app)[method](url)
|
||||
req = req.set(
|
||||
headersFn({
|
||||
"x-budibase-include-stacktrace": "true",
|
||||
})
|
||||
)
|
||||
if (headers) {
|
||||
request = request.set(headers)
|
||||
req = req.set(headers)
|
||||
}
|
||||
if (body) {
|
||||
request = request.send(body)
|
||||
req = req.send(body)
|
||||
}
|
||||
for (const [key, value] of Object.entries(fields)) {
|
||||
request = request.field(key, value)
|
||||
req = req.field(key, value)
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(files)) {
|
||||
if (isAttachedFile(value)) {
|
||||
request = request.attach(key, value.file, value.name)
|
||||
req = req.attach(key, value.file, value.name)
|
||||
} else {
|
||||
request = request.attach(key, value as any)
|
||||
req = req.attach(key, value as any)
|
||||
}
|
||||
}
|
||||
if (expectations?.headers) {
|
||||
|
@ -136,11 +141,25 @@ export abstract class TestAPI {
|
|||
`Got an undefined expected value for header "${key}", if you want to check for the absence of a header, use headersNotPresent`
|
||||
)
|
||||
}
|
||||
request = request.expect(key, value as any)
|
||||
req = req.expect(key, value as any)
|
||||
}
|
||||
}
|
||||
|
||||
return await request
|
||||
try {
|
||||
return await req
|
||||
} catch (e: any) {
|
||||
// We've found that occasionally the connection between supertest and the
|
||||
// server supertest starts gets reset. Not sure why, but retrying it
|
||||
// appears to work. I don't particularly like this, but it's better than
|
||||
// flakiness.
|
||||
if (e.code === "ECONNRESET") {
|
||||
if (attempt > 2) {
|
||||
throw e
|
||||
}
|
||||
return await this._requestRaw(method, url, opts, attempt + 1)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
protected _checkResponse = (
|
||||
|
@ -170,7 +189,18 @@ export abstract class TestAPI {
|
|||
}
|
||||
}
|
||||
|
||||
throw new Error(message)
|
||||
if (response.error) {
|
||||
// Sometimes the error can be between supertest and the app, and when
|
||||
// that happens response.error is sometimes populated with `text` that
|
||||
// gives more detail about the error. The `message` is almost always
|
||||
// useless from what I've seen.
|
||||
if (response.error.text) {
|
||||
response.error.message = response.error.text
|
||||
}
|
||||
throw new Error(message, { cause: response.error })
|
||||
} else {
|
||||
throw new Error(message)
|
||||
}
|
||||
}
|
||||
|
||||
if (expectations?.headersNotPresent) {
|
||||
|
|
|
@ -4,6 +4,7 @@ import {
|
|||
CreateDatasourceResponse,
|
||||
UpdateDatasourceResponse,
|
||||
UpdateDatasourceRequest,
|
||||
QueryJson,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
|
@ -45,4 +46,24 @@ export class DatasourceAPI extends TestAPI {
|
|||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
delete = async (datasource: Datasource, expectations?: Expectations) => {
|
||||
return await this._delete(
|
||||
`/api/datasources/${datasource._id!}/${datasource._rev!}`,
|
||||
{ expectations }
|
||||
)
|
||||
}
|
||||
|
||||
get = async (id: string, expectations?: Expectations) => {
|
||||
return await this._get<Datasource>(`/api/datasources/${id}`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
query = async (query: QueryJson, expectations?: Expectations) => {
|
||||
return await this._post<any>(`/api/datasources/query`, {
|
||||
body: query,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,10 +6,11 @@ import {
|
|||
PreviewQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
import { constants } from "@budibase/backend-core"
|
||||
|
||||
export class QueryAPI extends TestAPI {
|
||||
save = async (body: Query): Promise<Query> => {
|
||||
return await this._post<Query>(`/api/queries`, { body })
|
||||
save = async (body: Query, expectations?: Expectations): Promise<Query> => {
|
||||
return await this._post<Query>(`/api/queries`, { body, expectations })
|
||||
}
|
||||
|
||||
execute = async (
|
||||
|
@ -26,9 +27,36 @@ export class QueryAPI extends TestAPI {
|
|||
)
|
||||
}
|
||||
|
||||
previewQuery = async (queryPreview: PreviewQueryRequest) => {
|
||||
preview = async (
|
||||
queryPreview: PreviewQueryRequest,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
return await this._post<PreviewQueryResponse>(`/api/queries/preview`, {
|
||||
body: queryPreview,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
delete = async (query: Query, expectations?: Expectations) => {
|
||||
return await this._delete(`/api/queries/${query._id!}/${query._rev!}`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
get = async (queryId: string, expectations?: Expectations) => {
|
||||
return await this._get<Query>(`/api/queries/${queryId}`, { expectations })
|
||||
}
|
||||
|
||||
getProd = async (queryId: string, expectations?: Expectations) => {
|
||||
return await this._get<Query>(`/api/queries/${queryId}`, {
|
||||
expectations,
|
||||
headers: {
|
||||
[constants.Header.APP_ID]: this.config.getProdAppId(),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fetch = async (expectations?: Expectations) => {
|
||||
return await this._get<Query[]>(`/api/queries`, { expectations })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -167,7 +167,7 @@ class QueryRunner {
|
|||
this.hasRerun = true
|
||||
}
|
||||
|
||||
await threadUtils.invalidateDynamicVariables(this.cachedVariables)
|
||||
await threadUtils.invalidateCachedVariable(this.cachedVariables)
|
||||
return this.execute()
|
||||
}
|
||||
|
||||
|
@ -254,7 +254,7 @@ class QueryRunner {
|
|||
let { parameters } = this
|
||||
const queryId = variable.queryId,
|
||||
name = variable.name
|
||||
let value = await threadUtils.checkCacheForDynamicVariable(queryId, name)
|
||||
let value = await threadUtils.getCachedVariable(queryId, name)
|
||||
if (!value) {
|
||||
value = this.queryResponse[queryId]
|
||||
? this.queryResponse[queryId]
|
||||
|
|
|
@ -5,7 +5,7 @@ import { redis, db as dbCore } from "@budibase/backend-core"
|
|||
import * as jsRunner from "../jsRunner"
|
||||
|
||||
const VARIABLE_TTL_SECONDS = 3600
|
||||
let client: any
|
||||
let client: redis.Client | null = null
|
||||
|
||||
async function getClient() {
|
||||
if (!client) {
|
||||
|
@ -36,23 +36,15 @@ export function threadSetup() {
|
|||
db.init()
|
||||
}
|
||||
|
||||
export async function checkCacheForDynamicVariable(
|
||||
queryId: string,
|
||||
variable: string
|
||||
) {
|
||||
const cache = await getClient()
|
||||
return cache.get(makeVariableKey(queryId, variable))
|
||||
export async function getCachedVariable(queryId: string, variable: string) {
|
||||
return (await getClient()).get(makeVariableKey(queryId, variable))
|
||||
}
|
||||
|
||||
export async function invalidateDynamicVariables(cachedVars: QueryVariable[]) {
|
||||
export async function invalidateCachedVariable(vars: QueryVariable[]) {
|
||||
const cache = await getClient()
|
||||
let promises = []
|
||||
for (let variable of cachedVars) {
|
||||
promises.push(
|
||||
cache.delete(makeVariableKey(variable.queryId, variable.name))
|
||||
)
|
||||
}
|
||||
await Promise.all(promises)
|
||||
await Promise.all(
|
||||
vars.map(v => cache.delete(makeVariableKey(v.queryId, v.name)))
|
||||
)
|
||||
}
|
||||
|
||||
export async function storeDynamicVariable(
|
||||
|
@ -93,7 +85,7 @@ export default {
|
|||
hasExtraData,
|
||||
formatResponse,
|
||||
storeDynamicVariable,
|
||||
invalidateDynamicVariables,
|
||||
checkCacheForDynamicVariable,
|
||||
invalidateCachedVariable,
|
||||
getCachedVariable,
|
||||
threadSetup,
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
|
|||
type: columnType,
|
||||
subtype: columnSubtype,
|
||||
autocolumn: isAutoColumn,
|
||||
} = schema[columnName]
|
||||
} = schema[columnName] || {}
|
||||
|
||||
// If the column had an invalid value we don't want to override it
|
||||
if (results.schemaValidation[columnName] === false) {
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
node_modules/
|
||||
.env
|
||||
watchtower-hook.json
|
||||
dist/
|
||||
testResults.json
|
|
@ -1,28 +0,0 @@
|
|||
# QA Core API Tests
|
||||
|
||||
The QA Core API tests are a jest suite that run directly against the budibase backend APIs.
|
||||
|
||||
## Auto Setup
|
||||
|
||||
You can run the whole test suite with one command, that spins up the budibase server and runs the jest tests:
|
||||
|
||||
`yarn test:ci`
|
||||
|
||||
## Setup Server
|
||||
|
||||
You can run the local development stack by following the instructions on the main readme.
|
||||
|
||||
## Run Tests
|
||||
|
||||
If you configured the server using the previous command, you can run the whole test suite by using:
|
||||
|
||||
`yarn test`
|
||||
|
||||
for watch mode, where the tests will run on every change:
|
||||
|
||||
`yarn test:watch`
|
||||
|
||||
To run tests locally against a cloud service you can update the configuration inside the `.env` file and run:
|
||||
|
||||
`yarn test`
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
import { Config } from "@jest/types"
|
||||
|
||||
const config: Config.InitialOptions = {
|
||||
preset: "ts-jest",
|
||||
setupFiles: ["./src/jest/jestSetup.ts"],
|
||||
setupFilesAfterEnv: ["./src/jest/jest.extends.ts"],
|
||||
testEnvironment: "node",
|
||||
transform: {
|
||||
"^.+\\.ts?$": "@swc/jest",
|
||||
},
|
||||
globalSetup: "./src/jest/globalSetup.ts",
|
||||
globalTeardown: "./src/jest/globalTeardown.ts",
|
||||
moduleNameMapper: {
|
||||
"@budibase/types": "<rootDir>/../packages/types/src",
|
||||
"@budibase/server": "<rootDir>/../packages/server/src",
|
||||
"@budibase/backend-core": "<rootDir>/../packages/backend-core/src",
|
||||
"@budibase/backend-core/(.*)": "<rootDir>/../packages/backend-core/$1",
|
||||
},
|
||||
}
|
||||
|
||||
export default config
|
|
@ -1,49 +0,0 @@
|
|||
{
|
||||
"name": "@budibase/qa-core",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "0.0.1",
|
||||
"main": "index.js",
|
||||
"description": "Budibase Integration Test Suite",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Budibase/budibase.git"
|
||||
},
|
||||
"scripts": {
|
||||
"setup": "yarn && node scripts/createEnv.js",
|
||||
"user": "yarn && node scripts/createEnv.js && node scripts/createUser.js",
|
||||
"test": "jest --runInBand --json --outputFile=testResults.json --forceExit",
|
||||
"test:watch": "yarn run test --watch",
|
||||
"test:debug": "DEBUG=1 yarn run test",
|
||||
"test:notify": "node scripts/testResultsWebhook",
|
||||
"test:cloud:prod": "yarn run test --testPathIgnorePatterns=\\.integration\\.",
|
||||
"test:cloud:qa": "yarn run test",
|
||||
"test:self:ci": "yarn run test --testPathIgnorePatterns=\\.integration\\. \\.cloud\\. \\.licensing\\.",
|
||||
"serve:test:self:ci": "start-server-and-test dev:built http://localhost:4001/health test:self:ci",
|
||||
"serve": "start-server-and-test dev:built http://localhost:4001/health",
|
||||
"dev:built": "cd ../ && DISABLE_RATE_LIMITING=1 yarn dev:built"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@budibase/types": "^2.3.17",
|
||||
"@swc/core": "1.3.71",
|
||||
"@swc/jest": "0.2.27",
|
||||
"@trendyol/jest-testcontainers": "2.1.1",
|
||||
"@types/jest": "29.5.3",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"chance": "1.1.8",
|
||||
"dotenv": "16.0.1",
|
||||
"jest": "29.7.0",
|
||||
"prettier": "2.7.1",
|
||||
"start-server-and-test": "1.14.0",
|
||||
"timekeeper": "2.2.0",
|
||||
"ts-jest": "29.1.1",
|
||||
"ts-node": "10.8.1",
|
||||
"tsconfig-paths": "4.0.0",
|
||||
"typescript": "5.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/backend-core": "^2.3.17",
|
||||
"form-data": "^4.0.0",
|
||||
"node-fetch": "2.6.7",
|
||||
"stripe": "^14.11.0"
|
||||
}
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
const path = require("path")
|
||||
const fs = require("fs")
|
||||
|
||||
function init() {
|
||||
const envFilePath = path.join(process.cwd(), ".env")
|
||||
if (!fs.existsSync(envFilePath)) {
|
||||
const envFileJson = {
|
||||
BUDIBASE_URL: "http://localhost:10000",
|
||||
ACCOUNT_PORTAL_URL: "http://localhost:10001",
|
||||
ACCOUNT_PORTAL_API_KEY: "budibase",
|
||||
BB_ADMIN_USER_EMAIL: "admin",
|
||||
BB_ADMIN_USER_PASSWORD: "admin",
|
||||
LOG_LEVEL: "info",
|
||||
JEST_TIMEOUT: "60000",
|
||||
DISABLE_PINO_LOGGER: "1",
|
||||
}
|
||||
let envFile = ""
|
||||
Object.keys(envFileJson).forEach(key => {
|
||||
envFile += `${key}=${envFileJson[key]}\n`
|
||||
})
|
||||
fs.writeFileSync(envFilePath, envFile)
|
||||
}
|
||||
}
|
||||
|
||||
init()
|
|
@ -1,49 +0,0 @@
|
|||
const dotenv = require("dotenv")
|
||||
const { join } = require("path")
|
||||
const fs = require("fs")
|
||||
const fetch = require("node-fetch")
|
||||
|
||||
function getVarFromDotEnv(path, varName) {
|
||||
const parsed = dotenv.parse(fs.readFileSync(path))
|
||||
return parsed[varName]
|
||||
}
|
||||
|
||||
async function createUser() {
|
||||
const serverPath = join(__dirname, "..", "..", "packages", "server", ".env")
|
||||
const qaCorePath = join(__dirname, "..", ".env")
|
||||
const apiKey = getVarFromDotEnv(serverPath, "INTERNAL_API_KEY")
|
||||
const username = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_EMAIL")
|
||||
const password = getVarFromDotEnv(qaCorePath, "BB_ADMIN_USER_PASSWORD")
|
||||
const url = getVarFromDotEnv(qaCorePath, "BUDIBASE_URL")
|
||||
const resp = await fetch(`${url}/api/public/v1/users`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"x-budibase-api-key": apiKey,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
email: username,
|
||||
password,
|
||||
builder: {
|
||||
global: true,
|
||||
},
|
||||
admin: {
|
||||
global: true,
|
||||
},
|
||||
roles: {},
|
||||
}),
|
||||
})
|
||||
if (resp.status !== 200) {
|
||||
throw new Error(await resp.text())
|
||||
} else {
|
||||
return await resp.json()
|
||||
}
|
||||
}
|
||||
|
||||
createUser()
|
||||
.then(() => {
|
||||
console.log("User created - ready to use")
|
||||
})
|
||||
.catch(err => {
|
||||
console.error("Failed to create user - ", err)
|
||||
})
|
|
@ -1,130 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const fetch = require("node-fetch")
|
||||
const path = require("path")
|
||||
const fs = require("fs")
|
||||
|
||||
const WEBHOOK_URL = process.env.WEBHOOK_URL
|
||||
const GIT_SHA = process.env.GITHUB_SHA
|
||||
const GITHUB_ACTIONS_RUN_URL = process.env.GITHUB_ACTIONS_RUN_URL
|
||||
|
||||
async function generateReport() {
|
||||
// read the report file
|
||||
const REPORT_PATH = path.resolve(__dirname, "..", "testResults.json")
|
||||
const report = fs.readFileSync(REPORT_PATH, "utf-8")
|
||||
return JSON.parse(report)
|
||||
}
|
||||
|
||||
const env = process.argv.slice(2)[0]
|
||||
|
||||
if (!env) {
|
||||
throw new Error("environment argument is required")
|
||||
}
|
||||
|
||||
async function discordResultsNotification(report) {
|
||||
const {
|
||||
numTotalTestSuites,
|
||||
numTotalTests,
|
||||
numPassedTests,
|
||||
numPendingTests,
|
||||
numFailedTests,
|
||||
success,
|
||||
startTime,
|
||||
endTime,
|
||||
} = report
|
||||
|
||||
const OUTCOME = success ? "success" : "failure"
|
||||
|
||||
const options = {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
content: `**Tests Status**: ${OUTCOME}`,
|
||||
embeds: [
|
||||
{
|
||||
title: `Budi QA Bot - ${env}`,
|
||||
description: `API Integration Tests`,
|
||||
url: GITHUB_ACTIONS_RUN_URL,
|
||||
color: OUTCOME === "success" ? 3066993 : 15548997,
|
||||
timestamp: new Date(),
|
||||
footer: {
|
||||
icon_url: "http://bbui.budibase.com/budibase-logo.png",
|
||||
text: "Budibase QA Bot",
|
||||
},
|
||||
thumbnail: {
|
||||
url: "http://bbui.budibase.com/budibase-logo.png",
|
||||
},
|
||||
author: {
|
||||
name: "Budibase QA Bot",
|
||||
url: "https://discordapp.com",
|
||||
icon_url: "http://bbui.budibase.com/budibase-logo.png",
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: "Commit",
|
||||
value: `https://github.com/Budibase/budibase/commit/${GIT_SHA}`,
|
||||
},
|
||||
{
|
||||
name: "Github Actions Run URL",
|
||||
value: GITHUB_ACTIONS_RUN_URL || "None Supplied",
|
||||
},
|
||||
{
|
||||
name: "Test Suites",
|
||||
value: numTotalTestSuites,
|
||||
},
|
||||
{
|
||||
name: "Tests",
|
||||
value: numTotalTests,
|
||||
},
|
||||
{
|
||||
name: "Passed",
|
||||
value: numPassedTests,
|
||||
},
|
||||
{
|
||||
name: "Pending",
|
||||
value: numPendingTests,
|
||||
},
|
||||
{
|
||||
name: "Failures",
|
||||
value: numFailedTests,
|
||||
},
|
||||
{
|
||||
name: "Duration",
|
||||
value: endTime
|
||||
? `${(endTime - startTime) / 1000} Seconds`
|
||||
: "DNF",
|
||||
},
|
||||
{
|
||||
name: "Pass Percentage",
|
||||
value: Math.floor((numPassedTests / numTotalTests) * 100),
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}),
|
||||
}
|
||||
|
||||
// Only post in discord when tests fail
|
||||
if (success) {
|
||||
return
|
||||
}
|
||||
|
||||
const response = await fetch(WEBHOOK_URL, options)
|
||||
|
||||
if (response.status >= 201) {
|
||||
const text = await response.text()
|
||||
console.error(
|
||||
`Error sending discord webhook. \nStatus: ${response.status}. \nResponse Body: ${text}. \nRequest Body: ${options.body}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function run() {
|
||||
const report = await generateReport()
|
||||
await discordResultsNotification(report)
|
||||
}
|
||||
|
||||
run()
|
|
@ -1,20 +0,0 @@
|
|||
import AccountInternalAPIClient from "./AccountInternalAPIClient"
|
||||
import { AccountAPI, LicenseAPI, AuthAPI, StripeAPI } from "./apis"
|
||||
import { State } from "../../types"
|
||||
|
||||
export default class AccountInternalAPI {
|
||||
client: AccountInternalAPIClient
|
||||
|
||||
auth: AuthAPI
|
||||
accounts: AccountAPI
|
||||
licenses: LicenseAPI
|
||||
stripe: StripeAPI
|
||||
|
||||
constructor(state: State) {
|
||||
this.client = new AccountInternalAPIClient(state)
|
||||
this.auth = new AuthAPI(this.client)
|
||||
this.accounts = new AccountAPI(this.client)
|
||||
this.licenses = new LicenseAPI(this.client)
|
||||
this.stripe = new StripeAPI(this.client)
|
||||
}
|
||||
}
|
|
@ -1,89 +0,0 @@
|
|||
import fetch, { Response, HeadersInit } from "node-fetch"
|
||||
import env from "../../environment"
|
||||
import { State } from "../../types"
|
||||
import { Header } from "@budibase/backend-core"
|
||||
|
||||
type APIMethod = "GET" | "POST" | "PUT" | "PATCH" | "DELETE"
|
||||
|
||||
interface ApiOptions {
|
||||
method?: APIMethod
|
||||
body?: object
|
||||
headers?: HeadersInit | undefined
|
||||
internal?: boolean
|
||||
}
|
||||
|
||||
export default class AccountInternalAPIClient {
|
||||
state: State
|
||||
host: string
|
||||
|
||||
constructor(state: State) {
|
||||
if (!env.ACCOUNT_PORTAL_URL) {
|
||||
throw new Error("Must set ACCOUNT_PORTAL_URL env var")
|
||||
}
|
||||
if (!env.ACCOUNT_PORTAL_API_KEY) {
|
||||
throw new Error("Must set ACCOUNT_PORTAL_API_KEY env var")
|
||||
}
|
||||
this.host = `${env.ACCOUNT_PORTAL_URL}`
|
||||
this.state = state
|
||||
}
|
||||
|
||||
apiCall =
|
||||
(method: APIMethod) =>
|
||||
async (url = "", options: ApiOptions = {}): Promise<[Response, any]> => {
|
||||
const requestOptions = {
|
||||
method,
|
||||
body: JSON.stringify(options.body),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
cookie: this.state.cookie,
|
||||
redirect: "follow",
|
||||
follow: 20,
|
||||
...options.headers,
|
||||
},
|
||||
credentials: "include",
|
||||
}
|
||||
|
||||
if (options.internal) {
|
||||
requestOptions.headers = {
|
||||
...requestOptions.headers,
|
||||
...{ [Header.API_KEY]: env.ACCOUNT_PORTAL_API_KEY },
|
||||
cookie: "",
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
const response = await fetch(`${this.host}${url}`, requestOptions)
|
||||
|
||||
let body: any
|
||||
const contentType = response.headers.get("content-type")
|
||||
if (contentType && contentType.includes("application/json")) {
|
||||
body = await response.json()
|
||||
} else {
|
||||
body = await response.text()
|
||||
}
|
||||
|
||||
const data = {
|
||||
request: requestOptions.body,
|
||||
response: body,
|
||||
}
|
||||
const message = `${method} ${url} - ${response.status}`
|
||||
|
||||
const isDebug = process.env.LOG_LEVEL === "debug"
|
||||
if (response.status > 499) {
|
||||
console.error(message, data)
|
||||
} else if (response.status >= 400) {
|
||||
console.warn(message, data)
|
||||
} else if (isDebug) {
|
||||
console.debug(message, data)
|
||||
}
|
||||
|
||||
return [response, body]
|
||||
}
|
||||
|
||||
post = this.apiCall("POST")
|
||||
get = this.apiCall("GET")
|
||||
patch = this.apiCall("PATCH")
|
||||
del = this.apiCall("DELETE")
|
||||
put = this.apiCall("PUT")
|
||||
}
|
|
@ -1,123 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import {
|
||||
Account,
|
||||
CreateAccountRequest,
|
||||
SearchAccountsRequest,
|
||||
SearchAccountsResponse,
|
||||
} from "@budibase/types"
|
||||
import AccountInternalAPIClient from "../AccountInternalAPIClient"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
import { Header } from "@budibase/backend-core"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
|
||||
export default class AccountAPI extends BaseAPI {
|
||||
client: AccountInternalAPIClient
|
||||
|
||||
constructor(client: AccountInternalAPIClient) {
|
||||
super()
|
||||
this.client = client
|
||||
}
|
||||
|
||||
async validateEmail(email: string, opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/accounts/validate/email`, {
|
||||
body: { email },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async validateTenantId(
|
||||
tenantId: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/accounts/validate/tenantId`, {
|
||||
body: { tenantId },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async create(
|
||||
body: CreateAccountRequest,
|
||||
opts: APIRequestOpts & { autoVerify: boolean } = {
|
||||
status: 201,
|
||||
autoVerify: false,
|
||||
}
|
||||
): Promise<[Response, Account]> {
|
||||
return this.doRequest(() => {
|
||||
const headers = {
|
||||
"no-verify": opts.autoVerify ? "1" : "0",
|
||||
}
|
||||
return this.client.post(`/api/accounts`, {
|
||||
body,
|
||||
headers,
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async delete(accountID: string, opts: APIRequestOpts = { status: 204 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.del(`/api/accounts/${accountID}`, {
|
||||
internal: true,
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async deleteCurrentAccount(opts: APIRequestOpts = { status: 204 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.del(`/api/accounts`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async verifyAccount(
|
||||
verificationCode: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/accounts/verify`, {
|
||||
body: { verificationCode },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async sendVerificationEmail(
|
||||
email: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
): Promise<[Response, string]> {
|
||||
return this.doRequest(async () => {
|
||||
const [response] = await this.client.post(`/api/accounts/verify/send`, {
|
||||
body: { email },
|
||||
headers: {
|
||||
[Header.RETURN_VERIFICATION_CODE]: "1",
|
||||
},
|
||||
})
|
||||
const code = response.headers.get(Header.VERIFICATION_CODE)
|
||||
return [response, code]
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async search(
|
||||
searchType: string,
|
||||
search: "email" | "tenantId",
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
): Promise<[Response, SearchAccountsResponse]> {
|
||||
return this.doRequest(() => {
|
||||
let body: SearchAccountsRequest = {}
|
||||
if (search === "email") {
|
||||
body.email = searchType
|
||||
} else if (search === "tenantId") {
|
||||
body.tenantId = searchType
|
||||
}
|
||||
return this.client.post(`/api/accounts/search`, {
|
||||
body,
|
||||
internal: true,
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async self(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/auth/self`)
|
||||
}, opts)
|
||||
}
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import AccountInternalAPIClient from "../AccountInternalAPIClient"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
import { Header } from "@budibase/backend-core"
|
||||
|
||||
export default class AuthAPI extends BaseAPI {
|
||||
client: AccountInternalAPIClient
|
||||
|
||||
constructor(client: AccountInternalAPIClient) {
|
||||
super()
|
||||
this.client = client
|
||||
}
|
||||
|
||||
async login(
|
||||
email: string,
|
||||
password: string,
|
||||
opts: APIRequestOpts = { doExpect: true, status: 200 }
|
||||
): Promise<[Response, string]> {
|
||||
return this.doRequest(async () => {
|
||||
const [res] = await this.client.post(`/api/auth/login`, {
|
||||
body: {
|
||||
email: email,
|
||||
password: password,
|
||||
},
|
||||
})
|
||||
const cookie = res.headers.get("set-cookie")
|
||||
return [res, cookie]
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async logout(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/auth/logout`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async resetPassword(
|
||||
email: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
): Promise<[Response, string]> {
|
||||
return this.doRequest(async () => {
|
||||
const [response] = await this.client.post(`/api/auth/reset`, {
|
||||
body: { email },
|
||||
headers: {
|
||||
[Header.RETURN_RESET_PASSWORD_CODE]: "1",
|
||||
},
|
||||
})
|
||||
const code = response.headers.get(Header.RESET_PASSWORD_CODE)
|
||||
return [response, code]
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async resetPasswordUpdate(
|
||||
resetCode: string,
|
||||
password: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/auth/reset/update`, {
|
||||
body: {
|
||||
resetCode: resetCode,
|
||||
password: password,
|
||||
},
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
import { Response } from "node-fetch"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
|
||||
export default class BaseAPI {
|
||||
async doRequest(
|
||||
request: () => Promise<[Response, any]>,
|
||||
opts: APIRequestOpts
|
||||
): Promise<[Response, any]> {
|
||||
const [response, body] = await request()
|
||||
|
||||
// do expect on by default
|
||||
if (opts.doExpect === undefined) {
|
||||
opts.doExpect = true
|
||||
}
|
||||
if (opts.doExpect && opts.status) {
|
||||
expect(response).toHaveStatusCode(opts.status)
|
||||
}
|
||||
return [response, body]
|
||||
}
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
import AccountInternalAPIClient from "../AccountInternalAPIClient"
|
||||
import {
|
||||
Account,
|
||||
CreateOfflineLicenseRequest,
|
||||
GetLicenseKeyResponse,
|
||||
GetOfflineLicenseResponse,
|
||||
UpdateLicenseRequest,
|
||||
} from "@budibase/types"
|
||||
import { Response } from "node-fetch"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
|
||||
export default class LicenseAPI extends BaseAPI {
|
||||
client: AccountInternalAPIClient
|
||||
constructor(client: AccountInternalAPIClient) {
|
||||
super()
|
||||
this.client = client
|
||||
}
|
||||
async updateLicense(
|
||||
accountId: string,
|
||||
body: UpdateLicenseRequest,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
): Promise<[Response, Account]> {
|
||||
return this.doRequest(() => {
|
||||
return this.client.put(`/api/accounts/${accountId}/license`, {
|
||||
body,
|
||||
internal: true,
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
// TODO: Better approach for setting tenant id header
|
||||
async createOfflineLicense(
|
||||
accountId: string,
|
||||
tenantId: string,
|
||||
body: CreateOfflineLicenseRequest,
|
||||
opts: { status?: number } = {}
|
||||
): Promise<Response> {
|
||||
const [response, json] = await this.client.post(
|
||||
`/api/internal/accounts/${accountId}/license/offline`,
|
||||
{
|
||||
body,
|
||||
internal: true,
|
||||
headers: {
|
||||
"x-budibase-tenant-id": tenantId,
|
||||
},
|
||||
}
|
||||
)
|
||||
expect(response.status).toBe(opts.status ? opts.status : 201)
|
||||
return response
|
||||
}
|
||||
async getOfflineLicense(
|
||||
accountId: string,
|
||||
tenantId: string,
|
||||
opts: { status?: number } = {}
|
||||
): Promise<[Response, GetOfflineLicenseResponse]> {
|
||||
const [response, json] = await this.client.get(
|
||||
`/api/internal/accounts/${accountId}/license/offline`,
|
||||
{
|
||||
internal: true,
|
||||
headers: {
|
||||
"x-budibase-tenant-id": tenantId,
|
||||
},
|
||||
}
|
||||
)
|
||||
expect(response.status).toBe(opts.status ? opts.status : 200)
|
||||
return [response, json]
|
||||
}
|
||||
async getLicenseKey(
|
||||
opts: { status?: number } = {}
|
||||
): Promise<[Response, GetLicenseKeyResponse]> {
|
||||
const [response, json] = await this.client.get(`/api/license/key`)
|
||||
expect(response.status).toBe(opts.status || 200)
|
||||
return [response, json]
|
||||
}
|
||||
async activateLicense(
|
||||
apiKey: string,
|
||||
tenantId: string,
|
||||
licenseKey: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/license/activate`, {
|
||||
body: {
|
||||
apiKey: apiKey,
|
||||
tenantId: tenantId,
|
||||
licenseKey: licenseKey,
|
||||
},
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
async regenerateLicenseKey(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/license/key/regenerate`, {})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async getPlans(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/plans`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async updatePlan(priceId: string, opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.put(`/api/license/plan`, {
|
||||
body: { priceId },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async refreshAccountLicense(
|
||||
accountId: string,
|
||||
opts: { status?: number } = {}
|
||||
): Promise<Response> {
|
||||
const [response, json] = await this.client.post(
|
||||
`/api/accounts/${accountId}/license/refresh`,
|
||||
{
|
||||
internal: true,
|
||||
}
|
||||
)
|
||||
expect(response.status).toBe(opts.status ? opts.status : 201)
|
||||
return response
|
||||
}
|
||||
|
||||
async getLicenseUsage(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/license/usage`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async licenseUsageTriggered(
|
||||
opts: { status?: number } = {}
|
||||
): Promise<Response> {
|
||||
const [response, json] = await this.client.post(
|
||||
`/api/license/usage/triggered`
|
||||
)
|
||||
expect(response.status).toBe(opts.status ? opts.status : 201)
|
||||
return response
|
||||
}
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
import AccountInternalAPIClient from "../AccountInternalAPIClient"
|
||||
import BaseAPI from "./BaseAPI"
|
||||
import { APIRequestOpts } from "../../../types"
|
||||
|
||||
export default class StripeAPI extends BaseAPI {
|
||||
client: AccountInternalAPIClient
|
||||
|
||||
constructor(client: AccountInternalAPIClient) {
|
||||
super()
|
||||
this.client = client
|
||||
}
|
||||
|
||||
async createCheckoutSession(
|
||||
price: object,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/stripe/checkout-session`, {
|
||||
body: { prices: [price] },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async checkoutSuccess(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/stripe/checkout-success`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async createPortalSession(
|
||||
stripeCustomerId: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/stripe/portal-session`, {
|
||||
body: { stripeCustomerId },
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async linkStripeCustomer(
|
||||
accountId: string,
|
||||
stripeCustomerId: string,
|
||||
opts: APIRequestOpts = { status: 200 }
|
||||
) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.post(`/api/stripe/link`, {
|
||||
body: {
|
||||
accountId,
|
||||
stripeCustomerId,
|
||||
},
|
||||
internal: true,
|
||||
})
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async getInvoices(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/stripe/invoices`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async getUpcomingInvoice(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/stripe/upcoming-invoice`)
|
||||
}, opts)
|
||||
}
|
||||
|
||||
async getStripeCustomers(opts: APIRequestOpts = { status: 200 }) {
|
||||
return this.doRequest(() => {
|
||||
return this.client.get(`/api/stripe/customers`)
|
||||
}, opts)
|
||||
}
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
export { default as AuthAPI } from "./AuthAPI"
|
||||
export { default as AccountAPI } from "./AccountAPI"
|
||||
export { default as LicenseAPI } from "./LicenseAPI"
|
||||
export { default as StripeAPI } from "./StripeAPI"
|
|
@ -1 +0,0 @@
|
|||
export { default as AccountInternalAPI } from "./AccountInternalAPI"
|
|
@ -1,29 +0,0 @@
|
|||
import { AccountInternalAPI } from "../api"
|
||||
import { BudibaseTestConfiguration } from "../../shared"
|
||||
|
||||
export default class TestConfiguration<T> extends BudibaseTestConfiguration {
|
||||
// apis
|
||||
api: AccountInternalAPI
|
||||
|
||||
context: T
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
this.api = new AccountInternalAPI(this.state)
|
||||
this.context = <T>{}
|
||||
}
|
||||
|
||||
async beforeAll() {
|
||||
await super.beforeAll()
|
||||
await this.setApiKey()
|
||||
}
|
||||
|
||||
async afterAll() {
|
||||
await super.afterAll()
|
||||
}
|
||||
|
||||
async setApiKey() {
|
||||
const apiKeyResponse = await this.internalApi.self.getApiKey()
|
||||
this.state.apiKey = apiKeyResponse.apiKey
|
||||
}
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
import { generator } from "../../shared"
|
||||
import { Hosting, CreateAccountRequest } from "@budibase/types"
|
||||
|
||||
// TODO: Refactor me to central location
|
||||
export const generateAccount = (
|
||||
partial: Partial<CreateAccountRequest>
|
||||
): CreateAccountRequest => {
|
||||
const uuid = generator.guid()
|
||||
|
||||
const email = `${uuid}@budibase.com`
|
||||
const tenant = `tenant${uuid.replace(/-/g, "")}`
|
||||
|
||||
return {
|
||||
email,
|
||||
hosting: Hosting.CLOUD,
|
||||
name: email,
|
||||
password: uuid,
|
||||
profession: "software_engineer",
|
||||
size: "10+",
|
||||
tenantId: tenant,
|
||||
tenantName: tenant,
|
||||
...partial,
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
export * as accounts from "./accounts"
|
|
@ -1 +0,0 @@
|
|||
export * from "./api"
|
|
@ -1,32 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixtures from "../../fixtures"
|
||||
import { generator } from "../../../shared"
|
||||
import { Hosting } from "@budibase/types"
|
||||
|
||||
describe("Account Internal Operations", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
it("performs account deletion by ID", async () => {
|
||||
// Deleting by unknown id doesn't work
|
||||
const accountId = generator.guid()
|
||||
await config.api.accounts.delete(accountId, { status: 404 })
|
||||
|
||||
// Create new account
|
||||
const [_, account] = await config.api.accounts.create({
|
||||
...fixtures.accounts.generateAccount({
|
||||
hosting: Hosting.CLOUD,
|
||||
}),
|
||||
})
|
||||
|
||||
// New account can be deleted
|
||||
await config.api.accounts.delete(account.accountId)
|
||||
})
|
||||
})
|
|
@ -1,102 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixtures from "../../fixtures"
|
||||
import { generator } from "../../../shared"
|
||||
import { Hosting } from "@budibase/types"
|
||||
|
||||
describe("Accounts", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
it("performs signup and deletion flow", async () => {
|
||||
await config.doInNewState(async () => {
|
||||
// Create account
|
||||
const createAccountRequest = fixtures.accounts.generateAccount({
|
||||
hosting: Hosting.CLOUD,
|
||||
})
|
||||
const email = createAccountRequest.email
|
||||
const tenantId = createAccountRequest.tenantId
|
||||
|
||||
// Validation - email and tenant ID allowed
|
||||
await config.api.accounts.validateEmail(email)
|
||||
await config.api.accounts.validateTenantId(tenantId)
|
||||
|
||||
// Create unverified account
|
||||
await config.api.accounts.create(createAccountRequest)
|
||||
|
||||
// Validation - email and tenant ID no longer valid
|
||||
await config.api.accounts.validateEmail(email, { status: 400 })
|
||||
await config.api.accounts.validateTenantId(tenantId, { status: 400 })
|
||||
|
||||
// Attempt to log in using unverified account
|
||||
await config.loginAsAccount(createAccountRequest, { status: 400 })
|
||||
|
||||
// Re-send verification email to get access to code
|
||||
const [_, code] = await config.accountsApi.accounts.sendVerificationEmail(
|
||||
email
|
||||
)
|
||||
|
||||
// Send the verification request
|
||||
await config.accountsApi.accounts.verifyAccount(code!)
|
||||
|
||||
// Verify self response is unauthorized
|
||||
await config.api.accounts.self({ status: 403 })
|
||||
|
||||
// Can now log in to the account
|
||||
await config.loginAsAccount(createAccountRequest)
|
||||
|
||||
// Verify self response matches account
|
||||
const [selfRes, selfBody] = await config.api.accounts.self()
|
||||
expect(selfBody.email).toBe(email)
|
||||
|
||||
// Delete account
|
||||
await config.api.accounts.deleteCurrentAccount()
|
||||
|
||||
// Can't log in
|
||||
await config.loginAsAccount(createAccountRequest, { status: 403 })
|
||||
})
|
||||
})
|
||||
|
||||
describe("Searching accounts", () => {
|
||||
it("search by tenant ID", async () => {
|
||||
const tenantId = generator.string()
|
||||
|
||||
// Empty result
|
||||
const [_, emptyBody] = await config.api.accounts.search(
|
||||
tenantId,
|
||||
"tenantId"
|
||||
)
|
||||
expect(emptyBody.length).toBe(0)
|
||||
|
||||
// Hit result
|
||||
const [hitRes, hitBody] = await config.api.accounts.search(
|
||||
config.state.tenantId!,
|
||||
"tenantId"
|
||||
)
|
||||
expect(hitBody.length).toBe(1)
|
||||
expect(hitBody[0].tenantId).toBe(config.state.tenantId)
|
||||
})
|
||||
|
||||
it("searches by email", async () => {
|
||||
const email = generator.email({ domain: "example.com" })
|
||||
|
||||
// Empty result
|
||||
const [_, emptyBody] = await config.api.accounts.search(email, "email")
|
||||
expect(emptyBody.length).toBe(0)
|
||||
|
||||
// Hit result
|
||||
const [hitRes, hitBody] = await config.api.accounts.search(
|
||||
config.state.email!,
|
||||
"email"
|
||||
)
|
||||
expect(hitBody.length).toBe(1)
|
||||
expect(hitBody[0].email).toBe(config.state.email)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,46 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixtures from "../../fixtures"
|
||||
import { generator } from "../../../shared"
|
||||
import { Hosting } from "@budibase/types"
|
||||
|
||||
describe("Password Management", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
it("performs password reset flow", async () => {
|
||||
// Create account
|
||||
const createAccountRequest = fixtures.accounts.generateAccount({
|
||||
hosting: Hosting.CLOUD,
|
||||
})
|
||||
await config.api.accounts.create(createAccountRequest, { autoVerify: true })
|
||||
|
||||
// Request password reset to get code
|
||||
const [_, code] = await config.api.auth.resetPassword(
|
||||
createAccountRequest.email
|
||||
)
|
||||
|
||||
// Change password using code
|
||||
const password = generator.string()
|
||||
await config.api.auth.resetPasswordUpdate(code, password)
|
||||
|
||||
// Login using the new password
|
||||
await config.api.auth.login(createAccountRequest.email, password)
|
||||
|
||||
// Logout of account
|
||||
await config.api.auth.logout()
|
||||
|
||||
// Cannot log in using old password
|
||||
await config.api.auth.login(
|
||||
createAccountRequest.email,
|
||||
createAccountRequest.password,
|
||||
{ status: 403 }
|
||||
)
|
||||
})
|
||||
})
|
|
@ -1,68 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixures from "../../fixtures"
|
||||
import { Feature, Hosting } from "@budibase/types"
|
||||
|
||||
describe("license activation", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
it("creates, activates and deletes online license - self host", async () => {
|
||||
// Remove existing license key
|
||||
await config.internalApi.license.deleteLicenseKey()
|
||||
|
||||
// Verify license key not found
|
||||
await config.internalApi.license.getLicenseKey({ status: 404 })
|
||||
|
||||
// Create self host account
|
||||
const createAccountRequest = fixures.accounts.generateAccount({
|
||||
hosting: Hosting.SELF,
|
||||
})
|
||||
const [createAccountRes, account] =
|
||||
await config.accountsApi.accounts.create(createAccountRequest, {
|
||||
autoVerify: true,
|
||||
})
|
||||
|
||||
let licenseKey: string = " "
|
||||
await config.doInNewState(async () => {
|
||||
await config.loginAsAccount(createAccountRequest)
|
||||
// Retrieve license key
|
||||
const [res, body] = await config.accountsApi.licenses.getLicenseKey()
|
||||
licenseKey = body.licenseKey
|
||||
})
|
||||
|
||||
const accountId = account.accountId!
|
||||
|
||||
// Update license to have paid feature
|
||||
const [res, acc] = await config.accountsApi.licenses.updateLicense(
|
||||
accountId,
|
||||
{
|
||||
overrides: {
|
||||
features: [Feature.APP_BACKUPS],
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
// Activate license key
|
||||
await config.internalApi.license.activateLicenseKey({ licenseKey })
|
||||
|
||||
// Verify license updated with new feature
|
||||
await config.doInNewState(async () => {
|
||||
await config.loginAsAccount(createAccountRequest)
|
||||
const [selfRes, body] = await config.api.accounts.self()
|
||||
expect(body.license.features[0]).toBe("appBackups")
|
||||
})
|
||||
|
||||
// Remove license key
|
||||
await config.internalApi.license.deleteLicenseKey()
|
||||
|
||||
// Verify license key not found
|
||||
await config.internalApi.license.getLicenseKey({ status: 404 })
|
||||
})
|
||||
})
|
|
@ -1,116 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixtures from "../../fixtures"
|
||||
import { Hosting, PlanType } from "@budibase/types"
|
||||
|
||||
const stripe = require("stripe")(process.env.STRIPE_SECRET_KEY)
|
||||
|
||||
describe("license management", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
it("retrieves plans, creates checkout session, and updates license", async () => {
|
||||
// Create cloud account
|
||||
const createAccountRequest = fixtures.accounts.generateAccount({
|
||||
hosting: Hosting.CLOUD,
|
||||
})
|
||||
const [createAccountRes, account] =
|
||||
await config.accountsApi.accounts.create(createAccountRequest, {
|
||||
autoVerify: true,
|
||||
})
|
||||
|
||||
// Self response has free license
|
||||
await config.doInNewState(async () => {
|
||||
await config.loginAsAccount(createAccountRequest)
|
||||
const [selfRes, selfBody] = await config.api.accounts.self()
|
||||
expect(selfBody.license.plan.type).toBe(PlanType.FREE)
|
||||
})
|
||||
|
||||
// Retrieve plans
|
||||
const [plansRes, planBody] = await config.api.licenses.getPlans()
|
||||
|
||||
// Select priceId from premium plan
|
||||
let premiumPrice = null
|
||||
let businessPriceId: ""
|
||||
for (const plan of planBody) {
|
||||
if (plan.type === PlanType.PREMIUM_PLUS) {
|
||||
premiumPrice = plan.prices[0]
|
||||
}
|
||||
if (plan.type === PlanType.ENTERPRISE_BASIC) {
|
||||
businessPriceId = plan.prices[0].priceId
|
||||
}
|
||||
}
|
||||
|
||||
// Create checkout session for price
|
||||
const checkoutSessionRes = await config.api.stripe.createCheckoutSession({
|
||||
id: premiumPrice.priceId,
|
||||
type: premiumPrice.type,
|
||||
})
|
||||
const checkoutSessionUrl = checkoutSessionRes[1].url
|
||||
expect(checkoutSessionUrl).toContain("checkout.stripe.com")
|
||||
|
||||
// Create stripe customer
|
||||
const customer = await stripe.customers.create({
|
||||
email: createAccountRequest.email,
|
||||
})
|
||||
|
||||
// Create payment method
|
||||
const paymentMethod = await stripe.paymentMethods.create({
|
||||
type: "card",
|
||||
card: {
|
||||
token: "tok_visa", // Test Visa Card
|
||||
},
|
||||
})
|
||||
|
||||
// Attach payment method to customer
|
||||
await stripe.paymentMethods.attach(paymentMethod.id, {
|
||||
customer: customer.id,
|
||||
})
|
||||
|
||||
// Update customer
|
||||
await stripe.customers.update(customer.id, {
|
||||
invoice_settings: {
|
||||
default_payment_method: paymentMethod.id,
|
||||
},
|
||||
})
|
||||
|
||||
// Create subscription for premium plan
|
||||
const subscription = await stripe.subscriptions.create({
|
||||
customer: customer.id,
|
||||
items: [
|
||||
{
|
||||
price: premiumPrice.priceId,
|
||||
quantity: 1,
|
||||
},
|
||||
],
|
||||
default_payment_method: paymentMethod.id,
|
||||
collection_method: "charge_automatically",
|
||||
})
|
||||
|
||||
await config.doInNewState(async () => {
|
||||
// License updated from Free to Premium
|
||||
await config.loginAsAccount(createAccountRequest)
|
||||
await config.api.stripe.linkStripeCustomer(account.accountId, customer.id)
|
||||
const [_, selfBodyPremium] = await config.api.accounts.self()
|
||||
expect(selfBodyPremium.license.plan.type).toBe(PlanType.PREMIUM_PLUS)
|
||||
|
||||
// Create portal session - Check URL
|
||||
const [portalRes, portalSessionBody] =
|
||||
await config.api.stripe.createPortalSession(customer.id)
|
||||
expect(portalSessionBody.url).toContain("billing.stripe.com")
|
||||
|
||||
// Update subscription from premium to business license
|
||||
await config.api.licenses.updatePlan(businessPriceId)
|
||||
|
||||
// License updated to Business
|
||||
const [selfRes, selfBodyBusiness] = await config.api.accounts.self()
|
||||
expect(selfBodyBusiness.license.plan.type).toBe(PlanType.ENTERPRISE_BASIC)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,79 +0,0 @@
|
|||
import TestConfiguration from "../../config/TestConfiguration"
|
||||
import * as fixures from "../../fixtures"
|
||||
import { Hosting, Feature } from "@budibase/types"
|
||||
|
||||
describe("offline", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.beforeAll()
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await config.afterAll()
|
||||
})
|
||||
|
||||
// TODO: Currently requires a self host install + account portal
|
||||
// Ignored until we set this up
|
||||
it.skip("creates, activates and deletes offline license", async () => {
|
||||
// installation: Delete any token
|
||||
await config.internalApi.license.deleteOfflineLicenseToken()
|
||||
|
||||
// installation: Assert token not found
|
||||
let [getTokenRes] = await config.internalApi.license.getOfflineLicenseToken(
|
||||
{ status: 404 }
|
||||
)
|
||||
|
||||
// installation: Retrieve Identifier
|
||||
const [getIdentifierRes, identifier] =
|
||||
await config.internalApi.license.getOfflineIdentifier()
|
||||
|
||||
// account-portal: Create self-host account
|
||||
const createAccountRequest = fixures.accounts.generateAccount({
|
||||
hosting: Hosting.SELF,
|
||||
})
|
||||
const [createAccountRes, account] =
|
||||
await config.accountsApi.accounts.create(createAccountRequest)
|
||||
const accountId = account.accountId!
|
||||
const tenantId = account.tenantId!
|
||||
|
||||
// account-portal: Enable feature on license
|
||||
await config.accountsApi.licenses.updateLicense(accountId, {
|
||||
overrides: {
|
||||
features: [Feature.OFFLINE],
|
||||
},
|
||||
})
|
||||
|
||||
// account-portal: Create offline token
|
||||
const expireAt = new Date()
|
||||
expireAt.setDate(new Date().getDate() + 1)
|
||||
await config.accountsApi.licenses.createOfflineLicense(
|
||||
accountId,
|
||||
tenantId,
|
||||
{
|
||||
expireAt: expireAt.toISOString(),
|
||||
installationIdentifierBase64: identifier.identifierBase64,
|
||||
}
|
||||
)
|
||||
|
||||
// account-portal: Retrieve offline token
|
||||
const [getLicenseRes, offlineLicense] =
|
||||
await config.accountsApi.licenses.getOfflineLicense(accountId, tenantId)
|
||||
|
||||
// installation: Activate offline token
|
||||
await config.internalApi.license.activateOfflineLicenseToken({
|
||||
offlineLicenseToken: offlineLicense.offlineLicenseToken,
|
||||
})
|
||||
|
||||
// installation: Assert token found
|
||||
await config.internalApi.license.getOfflineLicenseToken()
|
||||
|
||||
// TODO: Assert on license for current user
|
||||
|
||||
// installation: Remove the token
|
||||
await config.internalApi.license.deleteOfflineLicenseToken()
|
||||
|
||||
// installation: Assert token not found
|
||||
await config.internalApi.license.getOfflineLicenseToken({ status: 404 })
|
||||
})
|
||||
})
|
|
@ -1,34 +0,0 @@
|
|||
import { join } from "path"
|
||||
|
||||
let LOADED = false
|
||||
if (!LOADED) {
|
||||
require("dotenv").config({
|
||||
path: join(__dirname, "..", ".env"),
|
||||
})
|
||||
LOADED = true
|
||||
}
|
||||
|
||||
const env = {
|
||||
BUDIBASE_URL: process.env.BUDIBASE_URL,
|
||||
ACCOUNT_PORTAL_URL: process.env.ACCOUNT_PORTAL_URL,
|
||||
ACCOUNT_PORTAL_API_KEY: process.env.ACCOUNT_PORTAL_API_KEY,
|
||||
BB_ADMIN_USER_EMAIL: process.env.BB_ADMIN_USER_EMAIL,
|
||||
BB_ADMIN_USER_PASSWORD: process.env.BB_ADMIN_USER_PASSWORD,
|
||||
POSTGRES_HOST: process.env.POSTGRES_HOST,
|
||||
POSTGRES_PORT: process.env.POSTGRES_PORT,
|
||||
POSTGRES_DB: process.env.POSTGRES_DB,
|
||||
POSTGRES_USER: process.env.POSTGRES_USER,
|
||||
POSTGRES_PASSWORD: process.env.POSTGRES_PASSWORD,
|
||||
MONGODB_CONNECTION_STRING: process.env.MONGODB_CONNECTION_STRING,
|
||||
MONGODB_DB: process.env.MONGODB_DB,
|
||||
REST_API_BASE_URL: process.env.REST_API_BASE_URL,
|
||||
REST_API_KEY: process.env.REST_API_KEY,
|
||||
MARIADB_HOST: process.env.MARIADB_HOST,
|
||||
MARIADB_PORT: process.env.MARIADB_PORT,
|
||||
MARIADB_DB: process.env.MARIADB_DB,
|
||||
MARIADB_USER: process.env.MARIADB_USER,
|
||||
MARIADB_PASSWORD: process.env.MARIADB_PASSWORD,
|
||||
STRIPE_SECRET_KEY: process.env.STRIPE_SECRET_KEY,
|
||||
}
|
||||
|
||||
export = env
|
|
@ -1,112 +0,0 @@
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { Duration, TemporalUnit } from "node-duration"
|
||||
import mssql from "../../../../packages/server/src/integrations/microsoftSqlServer"
|
||||
|
||||
jest.unmock("mssql")
|
||||
|
||||
describe("getExternalSchema", () => {
|
||||
describe("mssql", () => {
|
||||
let config: any
|
||||
|
||||
beforeAll(async () => {
|
||||
const password = "Str0Ng_p@ssW0rd!"
|
||||
const container = await new GenericContainer(
|
||||
"mcr.microsoft.com/mssql/server"
|
||||
)
|
||||
.withExposedPorts(1433)
|
||||
.withEnv("ACCEPT_EULA", "Y")
|
||||
.withEnv("MSSQL_SA_PASSWORD", password)
|
||||
.withEnv("MSSQL_PID", "Developer")
|
||||
.withWaitStrategy(Wait.forHealthCheck())
|
||||
.withHealthCheck({
|
||||
test: `/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "${password}" -Q "SELECT 1" -b -o /dev/null`,
|
||||
interval: new Duration(1000, TemporalUnit.MILLISECONDS),
|
||||
timeout: new Duration(3, TemporalUnit.SECONDS),
|
||||
retries: 20,
|
||||
startPeriod: new Duration(100, TemporalUnit.MILLISECONDS),
|
||||
})
|
||||
.start()
|
||||
|
||||
const host = container.getContainerIpAddress()
|
||||
const port = container.getMappedPort(1433)
|
||||
config = {
|
||||
user: "sa",
|
||||
password,
|
||||
server: host,
|
||||
port: port,
|
||||
database: "master",
|
||||
schema: "dbo",
|
||||
}
|
||||
})
|
||||
|
||||
it("can export an empty database", async () => {
|
||||
const integration = new mssql.integration(config)
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`""`)
|
||||
})
|
||||
|
||||
it("can export a database with tables", async () => {
|
||||
const integration = new mssql.integration(config)
|
||||
|
||||
await integration.connect()
|
||||
await integration.internalQuery({
|
||||
sql: `
|
||||
CREATE TABLE users (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
role VARCHAR(15) NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE products (
|
||||
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
price DECIMAL(10, 2) NOT NULL
|
||||
);
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE TABLE [products] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
price decimal(9) NOT NULL,
|
||||
CONSTRAINT [PK_products] PRIMARY KEY (id)
|
||||
);
|
||||
CREATE TABLE [users] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
role varchar(15) NOT NULL,
|
||||
CONSTRAINT [PK_users] PRIMARY KEY (id)
|
||||
);"
|
||||
`)
|
||||
})
|
||||
|
||||
it("does not export a data", async () => {
|
||||
const integration = new mssql.integration(config)
|
||||
|
||||
await integration.connect()
|
||||
await integration.internalQuery({
|
||||
sql: `INSERT INTO [users] ([name], [role]) VALUES ('John Doe', 'Administrator');
|
||||
INSERT INTO [products] ([name], [price]) VALUES ('Book', 7.68);
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE TABLE [products] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
price decimal(9) NOT NULL,
|
||||
CONSTRAINT [PK_products] PRIMARY KEY (id)
|
||||
);
|
||||
CREATE TABLE [users] (
|
||||
id int(4) NOT NULL,
|
||||
name varchar(100) NOT NULL,
|
||||
role varchar(15) NOT NULL,
|
||||
CONSTRAINT [PK_users] PRIMARY KEY (id)
|
||||
);"
|
||||
`)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,106 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import mysql from "../../../../packages/server/src/integrations/mysql"
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("mysql", () => {
|
||||
let config: any
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("mysql:8.3")
|
||||
.withExposedPorts(3306)
|
||||
.withEnv("MYSQL_ROOT_PASSWORD", "admin")
|
||||
.withEnv("MYSQL_DATABASE", "db")
|
||||
.withEnv("MYSQL_USER", "user")
|
||||
.withEnv("MYSQL_PASSWORD", "password")
|
||||
.start()
|
||||
|
||||
const host = container.getContainerIpAddress()
|
||||
const port = container.getMappedPort(3306)
|
||||
config = {
|
||||
host,
|
||||
port,
|
||||
user: "user",
|
||||
database: "db",
|
||||
password: "password",
|
||||
rejectUnauthorized: true,
|
||||
}
|
||||
})
|
||||
|
||||
it("can export an empty database", async () => {
|
||||
const integration = new mysql.integration(config)
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(
|
||||
`"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */"`
|
||||
)
|
||||
})
|
||||
|
||||
it("can export a database with tables", async () => {
|
||||
const integration = new mysql.integration(config)
|
||||
|
||||
await integration.internalQuery({
|
||||
sql: `
|
||||
CREATE TABLE users (
|
||||
id INT AUTO_INCREMENT,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
role VARCHAR(15) NOT NULL,
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
|
||||
CREATE TABLE products (
|
||||
id INT AUTO_INCREMENT,
|
||||
name VARCHAR(100) NOT NULL,
|
||||
price DECIMAL,
|
||||
PRIMARY KEY (id)
|
||||
);
|
||||
`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
|
||||
CREATE TABLE \`products\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`price\` decimal(10,0) DEFAULT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
|
||||
CREATE TABLE \`users\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`role\` varchar(15) NOT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
|
||||
`)
|
||||
})
|
||||
|
||||
it("does not export a data", async () => {
|
||||
const integration = new mysql.integration(config)
|
||||
|
||||
await integration.internalQuery({
|
||||
sql: `INSERT INTO users (name, role) VALUES ('John Doe', 'Administrator');`,
|
||||
})
|
||||
|
||||
await integration.internalQuery({
|
||||
sql: `INSERT INTO products (name, price) VALUES ('Book', 7.68);`,
|
||||
})
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
|
||||
CREATE TABLE \`products\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`price\` decimal(10,0) DEFAULT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
|
||||
CREATE TABLE \`users\` (
|
||||
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||
\`name\` varchar(100) NOT NULL,
|
||||
\`role\` varchar(15) NOT NULL,
|
||||
PRIMARY KEY (\`id\`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
|
||||
`)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,376 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import postgres from "../../../../packages/server/src/integrations/postgres"
|
||||
|
||||
jest.unmock("pg")
|
||||
|
||||
describe("getExternalSchema", () => {
|
||||
describe("postgres", () => {
|
||||
let config: any
|
||||
|
||||
// Remove versioning from the outputs to prevent failures when running different pg_dump versions
|
||||
function stripResultsVersions(sql: string) {
|
||||
const result = sql
|
||||
.replace(/\n[^\n]+Dumped from database version[^\n]+\n/, "")
|
||||
.replace(/\n[^\n]+Dumped by pg_dump version[^\n]+\n/, "")
|
||||
.toString()
|
||||
return result
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer("postgres:16.1-bullseye")
|
||||
.withExposedPorts(5432)
|
||||
.withEnv("POSTGRES_PASSWORD", "password")
|
||||
.start()
|
||||
|
||||
const host = container.getContainerIpAddress()
|
||||
const port = container.getMappedPort(5432)
|
||||
|
||||
config = {
|
||||
host,
|
||||
port,
|
||||
database: "postgres",
|
||||
user: "postgres",
|
||||
password: "password",
|
||||
schema: "public",
|
||||
ssl: false,
|
||||
rejectUnauthorized: false,
|
||||
}
|
||||
})
|
||||
|
||||
it("can export an empty database", async () => {
|
||||
const integration = new postgres.integration(config)
|
||||
const result = await integration.getExternalSchema()
|
||||
|
||||
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
|
||||
"--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
||||
"
|
||||
`)
|
||||
})
|
||||
|
||||
it("can export a database with tables", async () => {
|
||||
const integration = new postgres.integration(config)
|
||||
|
||||
await integration.internalQuery(
|
||||
{
|
||||
sql: `
|
||||
CREATE TABLE "users" (
|
||||
"id" SERIAL,
|
||||
"name" VARCHAR(100) NOT NULL,
|
||||
"role" VARCHAR(15) NOT NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
CREATE TABLE "products" (
|
||||
"id" SERIAL,
|
||||
"name" VARCHAR(100) NOT NULL,
|
||||
"price" DECIMAL NOT NULL,
|
||||
"owner" INTEGER NULL,
|
||||
PRIMARY KEY ("id")
|
||||
);
|
||||
ALTER TABLE "products" ADD CONSTRAINT "fk_owner" FOREIGN KEY ("owner") REFERENCES "users" ("id");`,
|
||||
},
|
||||
false
|
||||
)
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
|
||||
"--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.products (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
price numeric NOT NULL,
|
||||
owner integer
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.products OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.products_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.products_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.users (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
role character varying(15) NOT NULL
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.users OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.users_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.users_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users
|
||||
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products fk_owner; Type: FK CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT fk_owner FOREIGN KEY (owner) REFERENCES public.users(id);
|
||||
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
||||
"
|
||||
`)
|
||||
})
|
||||
|
||||
it("does not export a data", async () => {
|
||||
const integration = new postgres.integration(config)
|
||||
|
||||
await integration.internalQuery(
|
||||
{
|
||||
sql: `INSERT INTO "users" ("name", "role") VALUES ('John Doe', 'Administrator');
|
||||
INSERT INTO "products" ("name", "price") VALUES ('Book', 7.68);`,
|
||||
},
|
||||
false
|
||||
)
|
||||
|
||||
const result = await integration.getExternalSchema()
|
||||
expect(stripResultsVersions(result)).toMatchInlineSnapshot(`
|
||||
"--
|
||||
-- PostgreSQL database dump
|
||||
--
|
||||
SET statement_timeout = 0;
|
||||
SET lock_timeout = 0;
|
||||
SET idle_in_transaction_session_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET standard_conforming_strings = on;
|
||||
SELECT pg_catalog.set_config('search_path', '', false);
|
||||
SET check_function_bodies = false;
|
||||
SET xmloption = content;
|
||||
SET client_min_messages = warning;
|
||||
SET row_security = off;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_table_access_method = heap;
|
||||
|
||||
--
|
||||
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.products (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
price numeric NOT NULL,
|
||||
owner integer
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.products OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.products_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.products_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE TABLE public.users (
|
||||
id integer NOT NULL,
|
||||
name character varying(100) NOT NULL,
|
||||
role character varying(15) NOT NULL
|
||||
);
|
||||
|
||||
|
||||
ALTER TABLE public.users OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
CREATE SEQUENCE public.users_id_seq
|
||||
AS integer
|
||||
START WITH 1
|
||||
INCREMENT BY 1
|
||||
NO MINVALUE
|
||||
NO MAXVALUE
|
||||
CACHE 1;
|
||||
|
||||
|
||||
ALTER TABLE public.users_id_seq OWNER TO postgres;
|
||||
|
||||
--
|
||||
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
|
||||
|
||||
|
||||
--
|
||||
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.users
|
||||
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
|
||||
|
||||
|
||||
--
|
||||
-- Name: products fk_owner; Type: FK CONSTRAINT; Schema: public; Owner: postgres
|
||||
--
|
||||
|
||||
ALTER TABLE ONLY public.products
|
||||
ADD CONSTRAINT fk_owner FOREIGN KEY (owner) REFERENCES public.users(id);
|
||||
|
||||
|
||||
--
|
||||
-- PostgreSQL database dump complete
|
||||
--
|
||||
|
||||
"
|
||||
`)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,77 +0,0 @@
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import arangodb from "../../../../packages/server/src/integrations/arangodb"
|
||||
import { generator } from "../../shared"
|
||||
|
||||
jest.unmock("arangojs")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("arangodb", () => {
|
||||
let connectionSettings: {
|
||||
user: string
|
||||
password: string
|
||||
url: string
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
const user = "root"
|
||||
const password = generator.hash()
|
||||
const container = await new GenericContainer("arangodb")
|
||||
.withExposedPorts(8529)
|
||||
.withEnv("ARANGO_ROOT_PASSWORD", password)
|
||||
.withWaitStrategy(
|
||||
Wait.forLogMessage("is ready for business. Have fun!")
|
||||
)
|
||||
.start()
|
||||
|
||||
connectionSettings = {
|
||||
user,
|
||||
password,
|
||||
url: `http://${container.getContainerIpAddress()}:${container.getMappedPort(
|
||||
8529
|
||||
)}`,
|
||||
}
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new arangodb.integration({
|
||||
url: connectionSettings.url,
|
||||
username: connectionSettings.user,
|
||||
password: connectionSettings.password,
|
||||
databaseName: "",
|
||||
collection: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test wrong password", async () => {
|
||||
const integration = new arangodb.integration({
|
||||
url: connectionSettings.url,
|
||||
username: connectionSettings.user,
|
||||
password: "wrong",
|
||||
databaseName: "",
|
||||
collection: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "not authorized to execute this request",
|
||||
})
|
||||
})
|
||||
|
||||
it("test wrong url", async () => {
|
||||
const integration = new arangodb.integration({
|
||||
url: "http://not.here",
|
||||
username: connectionSettings.user,
|
||||
password: connectionSettings.password,
|
||||
databaseName: "",
|
||||
collection: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "getaddrinfo ENOTFOUND not.here",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,67 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
|
||||
import couchdb from "../../../../packages/server/src/integrations/couchdb"
|
||||
import { generator } from "../../shared"
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("couchdb", () => {
|
||||
let url: string
|
||||
|
||||
beforeAll(async () => {
|
||||
const user = generator.first()
|
||||
const password = generator.hash()
|
||||
|
||||
const container = await new GenericContainer("budibase/couchdb")
|
||||
.withExposedPorts(5984)
|
||||
.withEnv("COUCHDB_USER", user)
|
||||
.withEnv("COUCHDB_PASSWORD", password)
|
||||
.start()
|
||||
|
||||
const host = container.getContainerIpAddress()
|
||||
const port = container.getMappedPort(5984)
|
||||
|
||||
await container.exec([
|
||||
`curl`,
|
||||
`-u`,
|
||||
`${user}:${password}`,
|
||||
`-X`,
|
||||
`PUT`,
|
||||
`localhost:5984/db`,
|
||||
])
|
||||
url = `http://${user}:${password}@${host}:${port}`
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new couchdb.integration({
|
||||
url,
|
||||
database: "db",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test invalid database", async () => {
|
||||
const integration = new couchdb.integration({
|
||||
url,
|
||||
database: "random_db",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
})
|
||||
})
|
||||
|
||||
it("test invalid url", async () => {
|
||||
const integration = new couchdb.integration({
|
||||
url: "http://invalid:123",
|
||||
database: "any",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error:
|
||||
"request to http://invalid:123/any failed, reason: getaddrinfo ENOTFOUND invalid",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,63 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import { env } from "@budibase/backend-core"
|
||||
|
||||
import dynamodb from "../../../../packages/server/src/integrations/dynamodb"
|
||||
import { generator } from "../../shared"
|
||||
|
||||
jest.unmock("aws-sdk")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("dynamodb", () => {
|
||||
let connectionSettings: {
|
||||
user: string
|
||||
password: string
|
||||
url: string
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
const user = "root"
|
||||
const password = generator.hash()
|
||||
const container = await new GenericContainer("amazon/dynamodb-local")
|
||||
.withExposedPorts(8000)
|
||||
.start()
|
||||
|
||||
connectionSettings = {
|
||||
user,
|
||||
password,
|
||||
url: `http://${container.getContainerIpAddress()}:${container.getMappedPort(
|
||||
8000
|
||||
)}`,
|
||||
}
|
||||
env._set("AWS_ACCESS_KEY_ID", "mockedkey")
|
||||
env._set("AWS_SECRET_ACCESS_KEY", "mockedsecret")
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new dynamodb.integration({
|
||||
endpoint: connectionSettings.url,
|
||||
region: "",
|
||||
accessKeyId: "",
|
||||
secretAccessKey: "",
|
||||
})
|
||||
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test wrong endpoint", async () => {
|
||||
const integration = new dynamodb.integration({
|
||||
endpoint: "http://wrong.url:2880",
|
||||
region: "",
|
||||
accessKeyId: "",
|
||||
secretAccessKey: "",
|
||||
})
|
||||
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error:
|
||||
"Inaccessible host: `wrong.url' at port `undefined'. This service may not be available in the `eu-west-1' region.",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,34 +0,0 @@
|
|||
import { ElasticsearchContainer } from "testcontainers"
|
||||
import elastic from "../../../../packages/server/src/integrations/elasticsearch"
|
||||
|
||||
jest.unmock("@elastic/elasticsearch")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("elastic search", () => {
|
||||
let url: string
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new ElasticsearchContainer().start()
|
||||
url = container.getHttpUrl()
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new elastic.integration({
|
||||
url,
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test wrong connection string", async () => {
|
||||
const integration = new elastic.integration({
|
||||
url: `http://localhost:5656`,
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "connect ECONNREFUSED 127.0.0.1:5656",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,100 +0,0 @@
|
|||
import { GenericContainer } from "testcontainers"
|
||||
import mongo from "../../../../packages/server/src/integrations/mongodb"
|
||||
import { generator } from "../../shared"
|
||||
|
||||
jest.unmock("mongodb")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("mongo", () => {
|
||||
let connectionSettings: {
|
||||
user: string
|
||||
password: string
|
||||
host: string
|
||||
port: number
|
||||
}
|
||||
|
||||
function getConnectionString(
|
||||
settings: Partial<typeof connectionSettings> = {}
|
||||
) {
|
||||
const { user, password, host, port } = {
|
||||
...connectionSettings,
|
||||
...settings,
|
||||
}
|
||||
return `mongodb://${user}:${password}@${host}:${port}`
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
const user = generator.name()
|
||||
const password = generator.hash()
|
||||
const container = await new GenericContainer("mongo:7.0-jammy")
|
||||
.withExposedPorts(27017)
|
||||
.withEnv("MONGO_INITDB_ROOT_USERNAME", user)
|
||||
.withEnv("MONGO_INITDB_ROOT_PASSWORD", password)
|
||||
.start()
|
||||
|
||||
connectionSettings = {
|
||||
user,
|
||||
password,
|
||||
host: container.getContainerIpAddress(),
|
||||
port: container.getMappedPort(27017),
|
||||
}
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new mongo.integration({
|
||||
connectionString: getConnectionString(),
|
||||
db: "",
|
||||
tlsCertificateFile: "",
|
||||
tlsCertificateKeyFile: "",
|
||||
tlsCAFile: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test invalid password", async () => {
|
||||
const integration = new mongo.integration({
|
||||
connectionString: getConnectionString({ password: "wrong" }),
|
||||
db: "",
|
||||
tlsCertificateFile: "",
|
||||
tlsCertificateKeyFile: "",
|
||||
tlsCAFile: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "Authentication failed.",
|
||||
})
|
||||
})
|
||||
|
||||
it("test invalid username", async () => {
|
||||
const integration = new mongo.integration({
|
||||
connectionString: getConnectionString({ user: "wrong" }),
|
||||
db: "",
|
||||
tlsCertificateFile: "",
|
||||
tlsCertificateKeyFile: "",
|
||||
tlsCAFile: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "Authentication failed.",
|
||||
})
|
||||
})
|
||||
|
||||
it("test invalid connection", async () => {
|
||||
const integration = new mongo.integration({
|
||||
connectionString: getConnectionString({ host: "http://nothinghere" }),
|
||||
db: "",
|
||||
tlsCertificateFile: "",
|
||||
tlsCertificateKeyFile: "",
|
||||
tlsCAFile: "",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "getaddrinfo ENOTFOUND http",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,65 +0,0 @@
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { Duration, TemporalUnit } from "node-duration"
|
||||
|
||||
import mssql from "../../../../packages/server/src/integrations/microsoftSqlServer"
|
||||
|
||||
jest.unmock("mssql")
|
||||
|
||||
describe("datasource validators", () => {
|
||||
describe("mssql", () => {
|
||||
let host: string, port: number
|
||||
|
||||
const password = "Str0Ng_p@ssW0rd!"
|
||||
|
||||
beforeAll(async () => {
|
||||
const container = await new GenericContainer(
|
||||
"mcr.microsoft.com/mssql/server:2022-latest"
|
||||
)
|
||||
.withExposedPorts(1433)
|
||||
.withEnv("ACCEPT_EULA", "Y")
|
||||
.withEnv("MSSQL_SA_PASSWORD", password)
|
||||
.withEnv("MSSQL_PID", "Developer")
|
||||
.withWaitStrategy(Wait.forHealthCheck())
|
||||
.withHealthCheck({
|
||||
test: `/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "${password}" -Q "SELECT 1" -b -o /dev/null`,
|
||||
interval: new Duration(1000, TemporalUnit.MILLISECONDS),
|
||||
timeout: new Duration(3, TemporalUnit.SECONDS),
|
||||
retries: 20,
|
||||
startPeriod: new Duration(100, TemporalUnit.MILLISECONDS),
|
||||
})
|
||||
.start()
|
||||
|
||||
host = container.getContainerIpAddress()
|
||||
port = container.getMappedPort(1433)
|
||||
})
|
||||
|
||||
it("test valid connection string", async () => {
|
||||
const integration = new mssql.integration({
|
||||
user: "sa",
|
||||
password,
|
||||
server: host,
|
||||
port: port,
|
||||
database: "master",
|
||||
schema: "dbo",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({ connected: true })
|
||||
})
|
||||
|
||||
it("test invalid password", async () => {
|
||||
const integration = new mssql.integration({
|
||||
user: "sa",
|
||||
password: "wrong_pwd",
|
||||
server: host,
|
||||
port: port,
|
||||
database: "master",
|
||||
schema: "dbo",
|
||||
})
|
||||
const result = await integration.testConnection()
|
||||
expect(result).toEqual({
|
||||
connected: false,
|
||||
error: "Login failed for user 'sa'.",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue