Merge branch 'master' into fix/screen-load-actions
This commit is contained in:
commit
e1109ff5aa
|
@ -91,6 +91,9 @@ jobs:
|
|||
|
||||
test-libraries:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull
|
||||
REUSE_CONTAINERS: true
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -104,6 +107,14 @@ jobs:
|
|||
with:
|
||||
node-version: 20.x
|
||||
cache: yarn
|
||||
- name: Pull testcontainers images
|
||||
run: |
|
||||
docker pull testcontainers/ryuk:0.5.1 &
|
||||
docker pull budibase/couchdb &
|
||||
docker pull redis &
|
||||
|
||||
wait $(jobs -p)
|
||||
|
||||
- run: yarn --frozen-lockfile
|
||||
- name: Test
|
||||
run: |
|
||||
|
@ -138,9 +149,10 @@ jobs:
|
|||
fi
|
||||
|
||||
test-server:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: budi-tubby-tornado-quad-core-150gb
|
||||
env:
|
||||
DEBUG: testcontainers,testcontainers:exec,testcontainers:build,testcontainers:pull
|
||||
REUSE_CONTAINERS: true
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
|
@ -157,13 +169,16 @@ jobs:
|
|||
|
||||
- name: Pull testcontainers images
|
||||
run: |
|
||||
docker pull mcr.microsoft.com/mssql/server:2022-latest
|
||||
docker pull mysql:8.3
|
||||
docker pull postgres:16.1-bullseye
|
||||
docker pull mongo:7.0-jammy
|
||||
docker pull mariadb:lts
|
||||
docker pull testcontainers/ryuk:0.5.1
|
||||
docker pull budibase/couchdb
|
||||
docker pull mcr.microsoft.com/mssql/server:2022-latest &
|
||||
docker pull mysql:8.3 &
|
||||
docker pull postgres:16.1-bullseye &
|
||||
docker pull mongo:7.0-jammy &
|
||||
docker pull mariadb:lts &
|
||||
docker pull testcontainers/ryuk:0.5.1 &
|
||||
docker pull budibase/couchdb &
|
||||
docker pull redis &
|
||||
|
||||
wait $(jobs -p)
|
||||
|
||||
- run: yarn --frozen-lockfile
|
||||
|
||||
|
|
|
@ -1,25 +1,47 @@
|
|||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import path from "path"
|
||||
import lockfile from "proper-lockfile"
|
||||
|
||||
export default async function setup() {
|
||||
await new GenericContainer("budibase/couchdb")
|
||||
.withExposedPorts(5984)
|
||||
.withEnvironment({
|
||||
COUCHDB_PASSWORD: "budibase",
|
||||
COUCHDB_USER: "budibase",
|
||||
})
|
||||
.withCopyContentToContainer([
|
||||
{
|
||||
content: `
|
||||
const lockPath = path.resolve(__dirname, "globalSetup.ts")
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
// If you run multiple tests at the same time, it's possible for the CouchDB
|
||||
// shared container to get started multiple times despite having an
|
||||
// identical reuse hash. To avoid that, we do a filesystem-based lock so
|
||||
// that only one globalSetup.ts is running at a time.
|
||||
lockfile.lockSync(lockPath)
|
||||
}
|
||||
|
||||
try {
|
||||
let couchdb = new GenericContainer("budibase/couchdb")
|
||||
.withExposedPorts(5984)
|
||||
.withEnvironment({
|
||||
COUCHDB_PASSWORD: "budibase",
|
||||
COUCHDB_USER: "budibase",
|
||||
})
|
||||
.withCopyContentToContainer([
|
||||
{
|
||||
content: `
|
||||
[log]
|
||||
level = warn
|
||||
`,
|
||||
target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
|
||||
},
|
||||
])
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"curl http://budibase:budibase@localhost:5984/_up"
|
||||
).withStartupTimeout(20000)
|
||||
)
|
||||
.start()
|
||||
target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
|
||||
},
|
||||
])
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"curl http://budibase:budibase@localhost:5984/_up"
|
||||
).withStartupTimeout(20000)
|
||||
)
|
||||
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
couchdb = couchdb.withReuse()
|
||||
}
|
||||
|
||||
await couchdb.start()
|
||||
} finally {
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
lockfile.unlockSync(lockPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.22.15",
|
||||
"version": "2.22.16",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
"@babel/preset-env": "^7.22.5",
|
||||
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
|
||||
"@types/node": "20.10.0",
|
||||
"@types/proper-lockfile": "^4.1.4",
|
||||
"@typescript-eslint/parser": "6.9.0",
|
||||
"esbuild": "^0.18.17",
|
||||
"esbuild-node-externals": "^1.8.0",
|
||||
|
@ -23,6 +24,7 @@
|
|||
"nx-cloud": "16.0.5",
|
||||
"prettier": "2.8.8",
|
||||
"prettier-plugin-svelte": "^2.3.0",
|
||||
"proper-lockfile": "^4.1.2",
|
||||
"svelte": "^4.2.10",
|
||||
"svelte-eslint-parser": "^0.33.1",
|
||||
"typescript": "5.2.2",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 360ad2dc29c3f1fd5a1182ae258c45666b7f5eb1
|
||||
Subproject commit 532c4db35cecd346b5c24f0b89ab7b397a122a36
|
|
@ -1,6 +1,7 @@
|
|||
import { DatabaseImpl } from "../../../src/db"
|
||||
import { execSync } from "child_process"
|
||||
|
||||
const IPV4_PORT_REGEX = new RegExp(`0\\.0\\.0\\.0:(\\d+)->(\\d+)/tcp`, "g")
|
||||
|
||||
interface ContainerInfo {
|
||||
Command: string
|
||||
CreatedAt: string
|
||||
|
@ -19,7 +20,10 @@ interface ContainerInfo {
|
|||
}
|
||||
|
||||
function getTestcontainers(): ContainerInfo[] {
|
||||
return execSync("docker ps --format json")
|
||||
// We use --format json to make sure the output is nice and machine-readable,
|
||||
// and we use --no-trunc so that the command returns full container IDs so we
|
||||
// can filter on them correctly.
|
||||
return execSync("docker ps --format json --no-trunc")
|
||||
.toString()
|
||||
.split("\n")
|
||||
.filter(x => x.length > 0)
|
||||
|
@ -27,32 +31,55 @@ function getTestcontainers(): ContainerInfo[] {
|
|||
.filter(x => x.Labels.includes("org.testcontainers=true"))
|
||||
}
|
||||
|
||||
function getContainerByImage(image: string) {
|
||||
return getTestcontainers().find(x => x.Image.startsWith(image))
|
||||
export function getContainerByImage(image: string) {
|
||||
const containers = getTestcontainers().filter(x => x.Image.startsWith(image))
|
||||
if (containers.length > 1) {
|
||||
let errorMessage = `Multiple containers found starting with image: "${image}"\n\n`
|
||||
for (const container of containers) {
|
||||
errorMessage += JSON.stringify(container, null, 2)
|
||||
}
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
return containers[0]
|
||||
}
|
||||
|
||||
function getExposedPort(container: ContainerInfo, port: number) {
|
||||
const match = container.Ports.match(new RegExp(`0.0.0.0:(\\d+)->${port}/tcp`))
|
||||
if (!match) {
|
||||
return undefined
|
||||
export function getContainerById(id: string) {
|
||||
return getTestcontainers().find(x => x.ID === id)
|
||||
}
|
||||
|
||||
export interface Port {
|
||||
host: number
|
||||
container: number
|
||||
}
|
||||
|
||||
export function getExposedV4Ports(container: ContainerInfo): Port[] {
|
||||
let ports: Port[] = []
|
||||
for (const match of container.Ports.matchAll(IPV4_PORT_REGEX)) {
|
||||
ports.push({ host: parseInt(match[1]), container: parseInt(match[2]) })
|
||||
}
|
||||
return parseInt(match[1])
|
||||
return ports
|
||||
}
|
||||
|
||||
export function getExposedV4Port(container: ContainerInfo, port: number) {
|
||||
return getExposedV4Ports(container).find(x => x.container === port)?.host
|
||||
}
|
||||
|
||||
export function setupEnv(...envs: any[]) {
|
||||
// We start couchdb in globalSetup.ts, in the root of the monorepo, so it
|
||||
// should be relatively safe to look for it by its image name.
|
||||
const couch = getContainerByImage("budibase/couchdb")
|
||||
if (!couch) {
|
||||
throw new Error("CouchDB container not found")
|
||||
}
|
||||
|
||||
const couchPort = getExposedPort(couch, 5984)
|
||||
const couchPort = getExposedV4Port(couch, 5984)
|
||||
if (!couchPort) {
|
||||
throw new Error("CouchDB port not found")
|
||||
}
|
||||
|
||||
const configs = [
|
||||
{ key: "COUCH_DB_PORT", value: `${couchPort}` },
|
||||
{ key: "COUCH_DB_URL", value: `http://localhost:${couchPort}` },
|
||||
{ key: "COUCH_DB_URL", value: `http://127.0.0.1:${couchPort}` },
|
||||
]
|
||||
|
||||
for (const config of configs.filter(x => !!x.value)) {
|
||||
|
@ -60,7 +87,4 @@ export function setupEnv(...envs: any[]) {
|
|||
env._set(config.key, config.value)
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-expect-error
|
||||
DatabaseImpl.nano = undefined
|
||||
}
|
||||
|
|
|
@ -49,7 +49,10 @@
|
|||
label: "Long Form Text",
|
||||
value: FIELDS.LONGFORM.type,
|
||||
},
|
||||
|
||||
{
|
||||
label: "Attachment",
|
||||
value: FIELDS.ATTACHMENT.type,
|
||||
},
|
||||
{
|
||||
label: "User",
|
||||
value: `${FIELDS.USER.type}${FIELDS.USER.subtype}`,
|
||||
|
|
|
@ -23,6 +23,6 @@
|
|||
label="Components"
|
||||
value={$componentStore.mountedComponentCount}
|
||||
/>
|
||||
<DevToolsStat label="User" value={$authStore.email} />
|
||||
<DevToolsStat label="Role" value={$authStore.roleId} />
|
||||
<DevToolsStat label="User" value={$authStore?.email} />
|
||||
<DevToolsStat label="Role" value={$authStore?.roleId} />
|
||||
</Layout>
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 6b62505be0c0b50a57b4f4980d86541ebdc86428
|
||||
Subproject commit f8e8f87bd52081e1303a5ae92c432ea5b38f3bb4
|
|
@ -1,25 +0,0 @@
|
|||
const query = jest.fn(() => ({
|
||||
rows: [
|
||||
{
|
||||
a: "string",
|
||||
b: 1,
|
||||
},
|
||||
],
|
||||
}))
|
||||
|
||||
class Client {
|
||||
query = query
|
||||
end = jest.fn(cb => {
|
||||
if (cb) cb()
|
||||
})
|
||||
connect = jest.fn()
|
||||
release = jest.fn()
|
||||
}
|
||||
|
||||
const on = jest.fn()
|
||||
|
||||
module.exports = {
|
||||
Client,
|
||||
queryMock: query,
|
||||
on,
|
||||
}
|
|
@ -42,12 +42,6 @@ if (fs.existsSync("../pro/src")) {
|
|||
|
||||
const config: Config.InitialOptions = {
|
||||
projects: [
|
||||
{
|
||||
...baseConfig,
|
||||
displayName: "sequential test",
|
||||
testMatch: ["<rootDir>/**/*.seq.spec.[jt]s"],
|
||||
runner: "jest-serial-runner",
|
||||
},
|
||||
{
|
||||
...baseConfig,
|
||||
testMatch: ["<rootDir>/**/!(*.seq).spec.[jt]s"],
|
||||
|
@ -60,6 +54,9 @@ const config: Config.InitialOptions = {
|
|||
"!src/db/views/staticViews.*",
|
||||
"!src/**/*.spec.{js,ts}",
|
||||
"!src/tests/**/*.{js,ts}",
|
||||
// The use of coverage in the JS runner breaks tests by inserting
|
||||
// coverage functions into code that will run inside of the isolate.
|
||||
"!src/jsRunner/**/*.{js,ts}",
|
||||
],
|
||||
coverageReporters: ["lcov", "json", "clover"],
|
||||
}
|
||||
|
|
|
@ -143,7 +143,7 @@
|
|||
"jest": "29.7.0",
|
||||
"jest-openapi": "0.14.2",
|
||||
"jest-runner": "29.7.0",
|
||||
"jest-serial-runner": "1.2.1",
|
||||
"nock": "13.5.4",
|
||||
"nodemon": "2.0.15",
|
||||
"openapi-typescript": "5.2.0",
|
||||
"path-to-regexp": "6.2.0",
|
||||
|
|
|
@ -4,11 +4,9 @@ set -e
|
|||
if [[ -n $CI ]]
|
||||
then
|
||||
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS"
|
||||
echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@"
|
||||
jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
|
||||
jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
|
||||
else
|
||||
# --maxWorkers performs better in development
|
||||
export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS"
|
||||
echo "jest --coverage --maxWorkers=2 --forceExit $@"
|
||||
jest --coverage --maxWorkers=2 --forceExit $@
|
||||
fi
|
|
@ -1,6 +1,6 @@
|
|||
import { getQueryParams, getTableParams } from "../../db/utils"
|
||||
import { getIntegration } from "../../integrations"
|
||||
import { invalidateDynamicVariables } from "../../threads/utils"
|
||||
import { invalidateCachedVariable } from "../../threads/utils"
|
||||
import { context, db as dbCore, events } from "@budibase/backend-core"
|
||||
import {
|
||||
BuildSchemaFromSourceRequest,
|
||||
|
@ -121,7 +121,7 @@ async function invalidateVariables(
|
|||
}
|
||||
})
|
||||
}
|
||||
await invalidateDynamicVariables(toInvalidate)
|
||||
await invalidateCachedVariable(toInvalidate)
|
||||
}
|
||||
|
||||
export async function update(
|
||||
|
|
|
@ -2,7 +2,7 @@ import { generateQueryID } from "../../../db/utils"
|
|||
import { Thread, ThreadType } from "../../../threads"
|
||||
import { save as saveDatasource } from "../datasource"
|
||||
import { RestImporter } from "./import"
|
||||
import { invalidateDynamicVariables } from "../../../threads/utils"
|
||||
import { invalidateCachedVariable } from "../../../threads/utils"
|
||||
import env from "../../../environment"
|
||||
import { events, context, utils, constants } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
|
@ -281,49 +281,52 @@ export async function preview(
|
|||
return { previewSchema, nestedSchemaFields }
|
||||
}
|
||||
|
||||
const inputs: QueryEvent = {
|
||||
appId: ctx.appId,
|
||||
queryVerb: query.queryVerb,
|
||||
fields: query.fields,
|
||||
parameters: enrichParameters(query),
|
||||
transformer: query.transformer,
|
||||
schema: query.schema,
|
||||
nullDefaultSupport: query.nullDefaultSupport,
|
||||
queryId,
|
||||
datasource,
|
||||
// have to pass down to the thread runner - can't put into context now
|
||||
environmentVariables: envVars,
|
||||
ctx: {
|
||||
user: ctx.user,
|
||||
auth: { ...authConfigCtx },
|
||||
},
|
||||
}
|
||||
|
||||
let queryResponse: QueryResponse
|
||||
try {
|
||||
const inputs: QueryEvent = {
|
||||
appId: ctx.appId,
|
||||
queryVerb: query.queryVerb,
|
||||
fields: query.fields,
|
||||
parameters: enrichParameters(query),
|
||||
transformer: query.transformer,
|
||||
schema: query.schema,
|
||||
nullDefaultSupport: query.nullDefaultSupport,
|
||||
queryId,
|
||||
datasource,
|
||||
// have to pass down to the thread runner - can't put into context now
|
||||
environmentVariables: envVars,
|
||||
ctx: {
|
||||
user: ctx.user,
|
||||
auth: { ...authConfigCtx },
|
||||
},
|
||||
}
|
||||
|
||||
const { rows, keys, info, extra } = await Runner.run<QueryResponse>(inputs)
|
||||
const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
|
||||
|
||||
// if existing schema, update to include any previous schema keys
|
||||
if (existingSchema) {
|
||||
for (let key of Object.keys(previewSchema)) {
|
||||
if (existingSchema[key]) {
|
||||
previewSchema[key] = existingSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
// remove configuration before sending event
|
||||
delete datasource.config
|
||||
await events.query.previewed(datasource, ctx.request.body)
|
||||
ctx.body = {
|
||||
rows,
|
||||
nestedSchemaFields,
|
||||
schema: previewSchema,
|
||||
info,
|
||||
extra,
|
||||
}
|
||||
queryResponse = await Runner.run<QueryResponse>(inputs)
|
||||
} catch (err: any) {
|
||||
ctx.throw(400, err)
|
||||
}
|
||||
|
||||
const { rows, keys, info, extra } = queryResponse
|
||||
const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
|
||||
|
||||
// if existing schema, update to include any previous schema keys
|
||||
if (existingSchema) {
|
||||
for (let key of Object.keys(previewSchema)) {
|
||||
if (existingSchema[key]) {
|
||||
previewSchema[key] = existingSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
// remove configuration before sending event
|
||||
delete datasource.config
|
||||
await events.query.previewed(datasource, ctx.request.body)
|
||||
ctx.body = {
|
||||
rows,
|
||||
nestedSchemaFields,
|
||||
schema: previewSchema,
|
||||
info,
|
||||
extra,
|
||||
}
|
||||
}
|
||||
|
||||
async function execute(
|
||||
|
@ -416,7 +419,7 @@ const removeDynamicVariables = async (queryId: string) => {
|
|||
const variablesToDelete = dynamicVariables!.filter(
|
||||
(dv: any) => dv.queryId === queryId
|
||||
)
|
||||
await invalidateDynamicVariables(variablesToDelete)
|
||||
await invalidateCachedVariable(variablesToDelete)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
const setup = require("../../tests/utilities")
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("/metrics", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import * as setup from "./utilities"
|
||||
import path from "path"
|
||||
|
||||
jest.setTimeout(15000)
|
||||
const PASSWORD = "testtest"
|
||||
|
||||
describe("/applications/:appId/import", () => {
|
||||
|
|
|
@ -23,8 +23,6 @@ let {
|
|||
collectAutomation,
|
||||
} = setup.structures
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("/automations", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
|
|
|
@ -1,18 +1,16 @@
|
|||
jest.mock("pg")
|
||||
import * as setup from "./utilities"
|
||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||
import { checkCacheForDynamicVariable } from "../../../threads/utils"
|
||||
import { getCachedVariable } from "../../../threads/utils"
|
||||
import { context, events } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
import tk from "timekeeper"
|
||||
import { mocks } from "@budibase/backend-core/tests"
|
||||
import { QueryPreview } from "@budibase/types"
|
||||
import { QueryPreview, SourceName } from "@budibase/types"
|
||||
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
let { basicDatasource } = setup.structures
|
||||
const pg = require("pg")
|
||||
|
||||
describe("/datasources", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -42,6 +40,23 @@ describe("/datasources", () => {
|
|||
expect(res.body.errors).toEqual({})
|
||||
expect(events.datasource.created).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should fail if the datasource is invalid", async () => {
|
||||
await config.api.datasource.create(
|
||||
{
|
||||
name: "Test",
|
||||
type: "test",
|
||||
source: "invalid" as SourceName,
|
||||
config: {},
|
||||
},
|
||||
{
|
||||
status: 500,
|
||||
body: {
|
||||
message: "No datasource implementation found.",
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
|
@ -74,7 +89,7 @@ describe("/datasources", () => {
|
|||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
return config.api.query.previewQuery(queryPreview)
|
||||
return config.api.query.preview(queryPreview)
|
||||
}
|
||||
|
||||
it("should invalidate changed or removed variables", async () => {
|
||||
|
@ -85,10 +100,7 @@ describe("/datasources", () => {
|
|||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
// check variables in cache
|
||||
let contents = await checkCacheForDynamicVariable(
|
||||
query._id!,
|
||||
"variable3"
|
||||
)
|
||||
let contents = await getCachedVariable(query._id!, "variable3")
|
||||
expect(contents.rows.length).toEqual(1)
|
||||
|
||||
// update the datasource to remove the variables
|
||||
|
@ -102,7 +114,7 @@ describe("/datasources", () => {
|
|||
expect(res.body.errors).toBeUndefined()
|
||||
|
||||
// check variables no longer in cache
|
||||
contents = await checkCacheForDynamicVariable(query._id!, "variable3")
|
||||
contents = await getCachedVariable(query._id!, "variable3")
|
||||
expect(contents).toBe(null)
|
||||
})
|
||||
})
|
||||
|
@ -149,35 +161,6 @@ describe("/datasources", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("query", () => {
|
||||
it("should be able to query a pg datasource", async () => {
|
||||
const res = await request
|
||||
.post(`/api/datasources/query`)
|
||||
.send({
|
||||
endpoint: {
|
||||
datasourceId: datasource._id,
|
||||
operation: "READ",
|
||||
// table name below
|
||||
entityId: "users",
|
||||
},
|
||||
resource: {
|
||||
fields: ["users.name", "users.age"],
|
||||
},
|
||||
filters: {
|
||||
string: {
|
||||
name: "John",
|
||||
},
|
||||
},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
// this is mock data, can't test it
|
||||
expect(res.body).toBeDefined()
|
||||
const expSql = `select "users"."name" as "users.name", "users"."age" as "users.age" from (select * from "users" where "users"."name" ilike $1 limit $2) as "users"`
|
||||
expect(pg.queryMock).toHaveBeenCalledWith(expSql, ["John%", 5000])
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
beforeAll(setupTest)
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,14 +1,17 @@
|
|||
import { Datasource, Query } from "@budibase/types"
|
||||
import * as setup from "../utilities"
|
||||
import { databaseTestProviders } from "../../../../integrations/tests/utils"
|
||||
import { MongoClient, type Collection, BSON } from "mongodb"
|
||||
|
||||
const collection = "test_collection"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
} from "../../../../integrations/tests/utils"
|
||||
import { MongoClient, type Collection, BSON, Db } from "mongodb"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
const expectValidId = expect.stringMatching(/^\w{24}$/)
|
||||
const expectValidBsonObjectId = expect.any(BSON.ObjectId)
|
||||
|
||||
describe("/queries", () => {
|
||||
let collection: string
|
||||
let config = setup.getConfig()
|
||||
let datasource: Datasource
|
||||
|
||||
|
@ -37,8 +40,7 @@ describe("/queries", () => {
|
|||
async function withClient<T>(
|
||||
callback: (client: MongoClient) => Promise<T>
|
||||
): Promise<T> {
|
||||
const ds = await databaseTestProviders.mongodb.datasource()
|
||||
const client = new MongoClient(ds.config!.connectionString)
|
||||
const client = new MongoClient(datasource.config!.connectionString)
|
||||
await client.connect()
|
||||
try {
|
||||
return await callback(client)
|
||||
|
@ -47,30 +49,33 @@ describe("/queries", () => {
|
|||
}
|
||||
}
|
||||
|
||||
async function withDb<T>(callback: (db: Db) => Promise<T>): Promise<T> {
|
||||
return await withClient(async client => {
|
||||
return await callback(client.db(datasource.config!.db))
|
||||
})
|
||||
}
|
||||
|
||||
async function withCollection<T>(
|
||||
callback: (collection: Collection) => Promise<T>
|
||||
): Promise<T> {
|
||||
return await withClient(async client => {
|
||||
const db = client.db(
|
||||
(await databaseTestProviders.mongodb.datasource()).config!.db
|
||||
)
|
||||
return await withDb(async db => {
|
||||
return await callback(db.collection(collection))
|
||||
})
|
||||
}
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.mongodb.stop()
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mongodb.datasource()
|
||||
await getDatasource(DatabaseName.MONGODB)
|
||||
)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
collection = generator.guid()
|
||||
await withCollection(async collection => {
|
||||
await collection.insertMany([
|
||||
{ name: "one" },
|
||||
|
@ -83,345 +88,491 @@ describe("/queries", () => {
|
|||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await withCollection(async collection => {
|
||||
await collection.drop()
|
||||
})
|
||||
await withCollection(collection => collection.drop())
|
||||
})
|
||||
|
||||
it("should execute a count query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "count",
|
||||
describe("preview", () => {
|
||||
it("should generate a nested schema with an empty array", async () => {
|
||||
const name = generator.guid()
|
||||
await withCollection(
|
||||
async collection => await collection.insertOne({ name, nested: [] })
|
||||
)
|
||||
|
||||
const preview = await config.api.query.preview({
|
||||
name: "New Query",
|
||||
datasourceId: datasource._id!,
|
||||
fields: {
|
||||
json: {
|
||||
name: { $eq: name },
|
||||
},
|
||||
extra: {
|
||||
collection,
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
},
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
})
|
||||
|
||||
expect(preview).toEqual({
|
||||
nestedSchemaFields: {},
|
||||
rows: [{ _id: expect.any(String), name, nested: [] }],
|
||||
schema: {
|
||||
_id: {
|
||||
type: "string",
|
||||
name: "_id",
|
||||
},
|
||||
name: {
|
||||
type: "string",
|
||||
name: "name",
|
||||
},
|
||||
nested: {
|
||||
type: "array",
|
||||
name: "nested",
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
it("should generate a nested schema based on all of the nested items", async () => {
|
||||
const name = generator.guid()
|
||||
const item = {
|
||||
name,
|
||||
contacts: [
|
||||
{
|
||||
address: "123 Lane",
|
||||
},
|
||||
{
|
||||
address: "456 Drive",
|
||||
},
|
||||
{
|
||||
postcode: "BT1 12N",
|
||||
lat: 54.59,
|
||||
long: -5.92,
|
||||
},
|
||||
{
|
||||
city: "Belfast",
|
||||
},
|
||||
{
|
||||
address: "789 Avenue",
|
||||
phoneNumber: "0800-999-5555",
|
||||
},
|
||||
{
|
||||
name: "Name",
|
||||
isActive: false,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
expect(result.data).toEqual([{ value: 5 }])
|
||||
})
|
||||
await withCollection(collection => collection.insertOne(item))
|
||||
|
||||
it("should execute a count query with a transformer", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "count",
|
||||
const preview = await config.api.query.preview({
|
||||
name: "New Query",
|
||||
datasourceId: datasource._id!,
|
||||
fields: {
|
||||
json: {
|
||||
name: { $eq: name },
|
||||
},
|
||||
extra: {
|
||||
collection,
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
},
|
||||
transformer: "return data + 1",
|
||||
})
|
||||
schema: {},
|
||||
queryVerb: "read",
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
readable: true,
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([{ value: 6 }])
|
||||
})
|
||||
|
||||
it("should execute a find query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "find",
|
||||
expect(preview).toEqual({
|
||||
nestedSchemaFields: {
|
||||
contacts: {
|
||||
address: {
|
||||
type: "string",
|
||||
name: "address",
|
||||
},
|
||||
postcode: {
|
||||
type: "string",
|
||||
name: "postcode",
|
||||
},
|
||||
lat: {
|
||||
type: "number",
|
||||
name: "lat",
|
||||
},
|
||||
long: {
|
||||
type: "number",
|
||||
name: "long",
|
||||
},
|
||||
city: {
|
||||
type: "string",
|
||||
name: "city",
|
||||
},
|
||||
phoneNumber: {
|
||||
type: "string",
|
||||
name: "phoneNumber",
|
||||
},
|
||||
name: {
|
||||
type: "string",
|
||||
name: "name",
|
||||
},
|
||||
isActive: {
|
||||
type: "boolean",
|
||||
name: "isActive",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{ _id: expectValidId, name: "one" },
|
||||
{ _id: expectValidId, name: "two" },
|
||||
{ _id: expectValidId, name: "three" },
|
||||
{ _id: expectValidId, name: "four" },
|
||||
{ _id: expectValidId, name: "five" },
|
||||
])
|
||||
})
|
||||
|
||||
it("should execute a findOne query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "findOne",
|
||||
rows: [{ ...item, _id: expect.any(String) }],
|
||||
schema: {
|
||||
_id: { type: "string", name: "_id" },
|
||||
name: { type: "string", name: "name" },
|
||||
contacts: { type: "json", name: "contacts", subtype: "array" },
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([{ _id: expectValidId, name: "one" }])
|
||||
})
|
||||
|
||||
it("should execute a findOneAndUpdate query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { name: { $eq: "one" } },
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "findOneAndUpdate",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
lastErrorObject: { n: 1, updatedExisting: true },
|
||||
ok: 1,
|
||||
value: { _id: expectValidId, name: "one" },
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
expect(await collection.countDocuments()).toBe(5)
|
||||
|
||||
const doc = await collection.findOne({ name: { $eq: "newName" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
name: "newName",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("should execute a distinct query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: "name",
|
||||
extra: {
|
||||
actionType: "distinct",
|
||||
describe("execute", () => {
|
||||
it("a count query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "count",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
const values = result.data.map(o => o.value).sort()
|
||||
expect(values).toEqual(["five", "four", "one", "three", "two"])
|
||||
})
|
||||
|
||||
it("should execute a create query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: { foo: "{{ foo }}" },
|
||||
extra: {
|
||||
actionType: "insertOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "create",
|
||||
parameters: [
|
||||
{
|
||||
name: "foo",
|
||||
default: "default",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { foo: "bar" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
insertedId: expectValidId,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ foo: { $eq: "bar" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
foo: "bar",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("should execute a delete query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: { name: { $eq: "{{ name }}" } },
|
||||
extra: {
|
||||
actionType: "deleteOne",
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([{ value: 5 }])
|
||||
})
|
||||
|
||||
it("should be able to updateOne by ObjectId", async () => {
|
||||
const insertResult = await withCollection(c =>
|
||||
c.insertOne({ name: "one" })
|
||||
)
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { _id: { $eq: `ObjectId("${insertResult.insertedId}")` } },
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateOne",
|
||||
},
|
||||
},
|
||||
},
|
||||
queryVerb: "delete",
|
||||
parameters: [
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
name: "name",
|
||||
default: "",
|
||||
acknowledged: true,
|
||||
matchedCount: 1,
|
||||
modifiedCount: 1,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
],
|
||||
})
|
||||
])
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { name: "one" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
deletedCount: 1,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "one" } })
|
||||
expect(doc).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
it("should execute an update query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { name: { $eq: "{{ name }}" } },
|
||||
update: { $set: { name: "{{ newName }}" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
parameters: [
|
||||
{
|
||||
name: "name",
|
||||
default: "",
|
||||
},
|
||||
{
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "newName" } })
|
||||
expect(doc).toEqual({
|
||||
_id: insertResult.insertedId,
|
||||
name: "newName",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { name: "one", newName: "newOne" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 1,
|
||||
modifiedCount: 1,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "newOne" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
name: "newOne",
|
||||
})
|
||||
|
||||
const oldDoc = await collection.findOne({ name: { $eq: "one" } })
|
||||
expect(oldDoc).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to updateOne by ObjectId", async () => {
|
||||
const insertResult = await withCollection(c => c.insertOne({ name: "one" }))
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { _id: { $eq: `ObjectId("${insertResult.insertedId}")` } },
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 1,
|
||||
modifiedCount: 1,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "newName" } })
|
||||
expect(doc).toEqual({
|
||||
_id: insertResult.insertedId,
|
||||
name: "newName",
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete all records", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "deleteMany",
|
||||
it("a count query with a transformer", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "count",
|
||||
},
|
||||
},
|
||||
},
|
||||
queryVerb: "delete",
|
||||
transformer: "return data + 1",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([{ value: 6 }])
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
deletedCount: 5,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const docs = await collection.find().toArray()
|
||||
expect(docs).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to update all documents", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: {},
|
||||
update: { $set: { name: "newName" } },
|
||||
it("a find query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "find",
|
||||
},
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateMany",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{ _id: expectValidId, name: "one" },
|
||||
{ _id: expectValidId, name: "two" },
|
||||
{ _id: expectValidId, name: "three" },
|
||||
{ _id: expectValidId, name: "four" },
|
||||
{ _id: expectValidId, name: "five" },
|
||||
])
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
it("a findOne query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "findOne",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 5,
|
||||
modifiedCount: 5,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
await withCollection(async collection => {
|
||||
const docs = await collection.find().toArray()
|
||||
expect(docs).toHaveLength(5)
|
||||
for (const doc of docs) {
|
||||
expect(result.data).toEqual([{ _id: expectValidId, name: "one" }])
|
||||
})
|
||||
|
||||
it("a findOneAndUpdate query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { name: { $eq: "one" } },
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "findOneAndUpdate",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
lastErrorObject: { n: 1, updatedExisting: true },
|
||||
ok: 1,
|
||||
value: { _id: expectValidId, name: "one" },
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
expect(await collection.countDocuments()).toBe(5)
|
||||
|
||||
const doc = await collection.findOne({ name: { $eq: "newName" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
name: "newName",
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it("a distinct query", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: "name",
|
||||
extra: {
|
||||
actionType: "distinct",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
const values = result.data.map(o => o.value).sort()
|
||||
expect(values).toEqual(["five", "four", "one", "three", "two"])
|
||||
})
|
||||
|
||||
it("a create query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: { foo: "{{ foo }}" },
|
||||
extra: {
|
||||
actionType: "insertOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "create",
|
||||
parameters: [
|
||||
{
|
||||
name: "foo",
|
||||
default: "default",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { foo: "bar" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
insertedId: expectValidId,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ foo: { $eq: "bar" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
foo: "bar",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it("a delete query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: { name: { $eq: "{{ name }}" } },
|
||||
extra: {
|
||||
actionType: "deleteOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "delete",
|
||||
parameters: [
|
||||
{
|
||||
name: "name",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { name: "one" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
deletedCount: 1,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "one" } })
|
||||
expect(doc).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
it("an update query with parameters", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: { name: { $eq: "{{ name }}" } },
|
||||
update: { $set: { name: "{{ newName }}" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateOne",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
parameters: [
|
||||
{
|
||||
name: "name",
|
||||
default: "",
|
||||
},
|
||||
{
|
||||
name: "newName",
|
||||
default: "",
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!, {
|
||||
parameters: { name: "one", newName: "newOne" },
|
||||
})
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 1,
|
||||
modifiedCount: 1,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const doc = await collection.findOne({ name: { $eq: "newOne" } })
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
name: "newOne",
|
||||
})
|
||||
|
||||
const oldDoc = await collection.findOne({ name: { $eq: "one" } })
|
||||
expect(oldDoc).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete all records", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {},
|
||||
extra: {
|
||||
actionType: "deleteMany",
|
||||
},
|
||||
},
|
||||
queryVerb: "delete",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
deletedCount: 5,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const docs = await collection.find().toArray()
|
||||
expect(docs).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to update all documents", async () => {
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
json: {
|
||||
filter: {},
|
||||
update: { $set: { name: "newName" } },
|
||||
},
|
||||
extra: {
|
||||
actionType: "updateMany",
|
||||
},
|
||||
},
|
||||
queryVerb: "update",
|
||||
})
|
||||
|
||||
const result = await config.api.query.execute(query._id!)
|
||||
|
||||
expect(result.data).toEqual([
|
||||
{
|
||||
acknowledged: true,
|
||||
matchedCount: 5,
|
||||
modifiedCount: 5,
|
||||
upsertedCount: 0,
|
||||
upsertedId: null,
|
||||
},
|
||||
])
|
||||
|
||||
await withCollection(async collection => {
|
||||
const docs = await collection.find().toArray()
|
||||
expect(docs).toHaveLength(5)
|
||||
for (const doc of docs) {
|
||||
expect(doc).toEqual({
|
||||
_id: expectValidBsonObjectId,
|
||||
name: "newName",
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
import * as setup from "../utilities"
|
||||
import { checkBuilderEndpoint } from "../utilities/TestFunctions"
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
import { Datasource, Query, SourceName } from "@budibase/types"
|
||||
|
||||
describe("query permissions", () => {
|
||||
let config: TestConfiguration
|
||||
let datasource: Datasource
|
||||
let query: Query
|
||||
|
||||
beforeAll(async () => {
|
||||
config = setup.getConfig()
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create({
|
||||
name: "test datasource",
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {},
|
||||
})
|
||||
query = await config.api.query.save({
|
||||
name: "test query",
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields: {},
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
queryVerb: "read",
|
||||
})
|
||||
})
|
||||
|
||||
it("delete should require builder", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "DELETE",
|
||||
url: `/api/queries/${query._id}/${query._rev}`,
|
||||
})
|
||||
})
|
||||
|
||||
it("preview should require builder", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "POST",
|
||||
url: `/api/queries/preview`,
|
||||
})
|
||||
})
|
||||
})
|
|
@ -1,774 +0,0 @@
|
|||
import tk from "timekeeper"
|
||||
|
||||
const pg = require("pg")
|
||||
|
||||
// Mock out postgres for this
|
||||
jest.mock("pg")
|
||||
jest.mock("node-fetch")
|
||||
|
||||
// Mock isProdAppID to we can later mock the implementation and pretend we are
|
||||
// using prod app IDs
|
||||
jest.mock("@budibase/backend-core", () => {
|
||||
const core = jest.requireActual("@budibase/backend-core")
|
||||
return {
|
||||
...core,
|
||||
db: {
|
||||
...core.db,
|
||||
isProdAppID: jest.fn(),
|
||||
},
|
||||
}
|
||||
})
|
||||
import * as setup from "../utilities"
|
||||
import { checkBuilderEndpoint } from "../utilities/TestFunctions"
|
||||
import { checkCacheForDynamicVariable } from "../../../../threads/utils"
|
||||
|
||||
const { basicQuery, basicDatasource } = setup.structures
|
||||
import { events, db as dbCore } from "@budibase/backend-core"
|
||||
import {
|
||||
Datasource,
|
||||
Query,
|
||||
SourceName,
|
||||
QueryPreview,
|
||||
QueryParameter,
|
||||
} from "@budibase/types"
|
||||
|
||||
tk.freeze(Date.now())
|
||||
|
||||
const mockIsProdAppID = dbCore.isProdAppID as jest.MockedFunction<
|
||||
typeof dbCore.isProdAppID
|
||||
>
|
||||
|
||||
describe("/queries", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let datasource: Datasource & Required<Pick<Datasource, "_id">>, query: Query
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
const setupTest = async () => {
|
||||
await config.init()
|
||||
datasource = await config.createDatasource()
|
||||
query = await config.createQuery()
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
const createQuery = async (query: Query) => {
|
||||
return request
|
||||
.post(`/api/queries`)
|
||||
.send(query)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
}
|
||||
|
||||
describe("create", () => {
|
||||
it("should create a new query", async () => {
|
||||
const { _id } = await config.createDatasource()
|
||||
const query = basicQuery(_id)
|
||||
jest.clearAllMocks()
|
||||
const res = await createQuery(query)
|
||||
|
||||
expect((res as any).res.statusMessage).toEqual(
|
||||
`Query ${query.name} saved successfully.`
|
||||
)
|
||||
expect(res.body).toEqual({
|
||||
_rev: res.body._rev,
|
||||
_id: res.body._id,
|
||||
...query,
|
||||
nullDefaultSupport: true,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
})
|
||||
expect(events.query.created).toHaveBeenCalledTimes(1)
|
||||
expect(events.query.updated).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe("update", () => {
|
||||
it("should update query", async () => {
|
||||
const { _id } = await config.createDatasource()
|
||||
const query = basicQuery(_id)
|
||||
const res = await createQuery(query)
|
||||
jest.clearAllMocks()
|
||||
query._id = res.body._id
|
||||
query._rev = res.body._rev
|
||||
await createQuery(query)
|
||||
|
||||
expect((res as any).res.statusMessage).toEqual(
|
||||
`Query ${query.name} saved successfully.`
|
||||
)
|
||||
expect(res.body).toEqual({
|
||||
_rev: res.body._rev,
|
||||
_id: res.body._id,
|
||||
...query,
|
||||
nullDefaultSupport: true,
|
||||
createdAt: new Date().toISOString(),
|
||||
updatedAt: new Date().toISOString(),
|
||||
})
|
||||
expect(events.query.created).not.toHaveBeenCalled()
|
||||
expect(events.query.updated).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("fetch", () => {
|
||||
beforeEach(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
it("returns all the queries from the server", async () => {
|
||||
const res = await request
|
||||
.get(`/api/queries`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
|
||||
const queries = res.body
|
||||
expect(queries).toEqual([
|
||||
{
|
||||
_rev: query._rev,
|
||||
_id: query._id,
|
||||
createdAt: new Date().toISOString(),
|
||||
...basicQuery(datasource._id),
|
||||
nullDefaultSupport: true,
|
||||
updatedAt: new Date().toISOString(),
|
||||
readable: true,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "GET",
|
||||
url: `/api/datasources`,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("find", () => {
|
||||
it("should find a query in builder", async () => {
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body._id).toEqual(query._id)
|
||||
})
|
||||
|
||||
it("should find a query in cloud", async () => {
|
||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
.set(await config.defaultHeaders())
|
||||
.expect(200)
|
||||
.expect("Content-Type", /json/)
|
||||
expect(res.body.fields).toBeDefined()
|
||||
expect(res.body.parameters).toBeDefined()
|
||||
expect(res.body.schema).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
it("should remove sensitive info for prod apps", async () => {
|
||||
// Mock isProdAppID to pretend we are using a prod app
|
||||
mockIsProdAppID.mockClear()
|
||||
mockIsProdAppID.mockImplementation(() => true)
|
||||
|
||||
const query = await config.createQuery()
|
||||
const res = await request
|
||||
.get(`/api/queries/${query._id}`)
|
||||
.set(await config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body._id).toEqual(query._id)
|
||||
expect(res.body.fields).toBeUndefined()
|
||||
expect(res.body.parameters).toBeUndefined()
|
||||
expect(res.body.schema).toBeDefined()
|
||||
|
||||
// Reset isProdAppID mock
|
||||
expect(dbCore.isProdAppID).toHaveBeenCalledTimes(1)
|
||||
mockIsProdAppID.mockImplementation(() => false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
beforeEach(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
it("deletes a query and returns a success message", async () => {
|
||||
await request
|
||||
.delete(`/api/queries/${query._id}/${query._rev}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
|
||||
const res = await request
|
||||
.get(`/api/queries`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
|
||||
expect(res.body).toEqual([])
|
||||
expect(events.query.deleted).toHaveBeenCalledTimes(1)
|
||||
expect(events.query.deleted).toHaveBeenCalledWith(datasource, query)
|
||||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
const query = await config.createQuery()
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "DELETE",
|
||||
url: `/api/queries/${query._id}/${query._rev}`,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("preview", () => {
|
||||
it("should be able to preview the query", async () => {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id,
|
||||
queryVerb: "read",
|
||||
fields: {},
|
||||
parameters: [],
|
||||
transformer: "return data",
|
||||
name: datasource.name!,
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||
// these responses come from the mock
|
||||
expect(responseBody.schema).toEqual({
|
||||
a: { type: "string", name: "a" },
|
||||
b: { type: "number", name: "b" },
|
||||
})
|
||||
expect(responseBody.rows.length).toEqual(1)
|
||||
expect(events.query.previewed).toHaveBeenCalledTimes(1)
|
||||
delete datasource.config
|
||||
expect(events.query.previewed).toHaveBeenCalledWith(datasource, {
|
||||
...queryPreview,
|
||||
nullDefaultSupport: true,
|
||||
})
|
||||
})
|
||||
|
||||
it("should apply authorization to endpoint", async () => {
|
||||
await checkBuilderEndpoint({
|
||||
config,
|
||||
method: "POST",
|
||||
url: `/api/queries/preview`,
|
||||
})
|
||||
})
|
||||
|
||||
it("should not error when trying to generate a nested schema for an empty array", async () => {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id,
|
||||
parameters: [],
|
||||
fields: {},
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
const rows = [
|
||||
{
|
||||
contacts: [],
|
||||
},
|
||||
]
|
||||
pg.queryMock.mockImplementation(() => ({
|
||||
rows,
|
||||
}))
|
||||
|
||||
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||
expect(responseBody).toEqual({
|
||||
nestedSchemaFields: {},
|
||||
rows,
|
||||
schema: {
|
||||
contacts: { type: "array", name: "contacts" },
|
||||
},
|
||||
})
|
||||
expect(responseBody.rows.length).toEqual(1)
|
||||
delete datasource.config
|
||||
})
|
||||
|
||||
it("should generate a nested schema based on all the nested items", async () => {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id,
|
||||
parameters: [],
|
||||
fields: {},
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
const rows = [
|
||||
{
|
||||
contacts: [
|
||||
{
|
||||
address: "123 Lane",
|
||||
},
|
||||
{
|
||||
address: "456 Drive",
|
||||
},
|
||||
{
|
||||
postcode: "BT1 12N",
|
||||
lat: 54.59,
|
||||
long: -5.92,
|
||||
},
|
||||
{
|
||||
city: "Belfast",
|
||||
},
|
||||
{
|
||||
address: "789 Avenue",
|
||||
phoneNumber: "0800-999-5555",
|
||||
},
|
||||
{
|
||||
name: "Name",
|
||||
isActive: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
pg.queryMock.mockImplementation(() => ({
|
||||
rows,
|
||||
}))
|
||||
|
||||
const responseBody = await config.api.query.previewQuery(queryPreview)
|
||||
expect(responseBody).toEqual({
|
||||
nestedSchemaFields: {
|
||||
contacts: {
|
||||
address: {
|
||||
type: "string",
|
||||
name: "address",
|
||||
},
|
||||
postcode: {
|
||||
type: "string",
|
||||
name: "postcode",
|
||||
},
|
||||
lat: {
|
||||
type: "number",
|
||||
name: "lat",
|
||||
},
|
||||
long: {
|
||||
type: "number",
|
||||
name: "long",
|
||||
},
|
||||
city: {
|
||||
type: "string",
|
||||
name: "city",
|
||||
},
|
||||
phoneNumber: {
|
||||
type: "string",
|
||||
name: "phoneNumber",
|
||||
},
|
||||
name: {
|
||||
type: "string",
|
||||
name: "name",
|
||||
},
|
||||
isActive: {
|
||||
type: "boolean",
|
||||
name: "isActive",
|
||||
},
|
||||
},
|
||||
},
|
||||
rows,
|
||||
schema: {
|
||||
contacts: { type: "json", name: "contacts", subtype: "array" },
|
||||
},
|
||||
})
|
||||
expect(responseBody.rows.length).toEqual(1)
|
||||
delete datasource.config
|
||||
})
|
||||
})
|
||||
|
||||
describe("execute", () => {
|
||||
beforeEach(async () => {
|
||||
await setupTest()
|
||||
})
|
||||
|
||||
it("should be able to execute the query", async () => {
|
||||
const res = await request
|
||||
.post(`/api/queries/${query._id}`)
|
||||
.send({
|
||||
parameters: {},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body.length).toEqual(1)
|
||||
})
|
||||
|
||||
it("should fail with invalid integration type", async () => {
|
||||
const datasource: Datasource = {
|
||||
...basicDatasource().datasource,
|
||||
source: "INVALID_INTEGRATION" as SourceName,
|
||||
}
|
||||
await config.api.datasource.create(datasource, {
|
||||
status: 500,
|
||||
body: {
|
||||
message: "No datasource implementation found.",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("shouldn't allow handlebars to be passed as parameters", async () => {
|
||||
const res = await request
|
||||
.post(`/api/queries/${query._id}`)
|
||||
.send({
|
||||
parameters: {
|
||||
a: "{{ 'test' }}",
|
||||
},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect(400)
|
||||
expect(res.body.message).toEqual(
|
||||
"Parameter 'a' input contains a handlebars binding - this is not allowed."
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("variables", () => {
|
||||
async function preview(datasource: Datasource, fields: any) {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields,
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
return await config.api.query.previewQuery(queryPreview)
|
||||
}
|
||||
|
||||
it("should work with static variables", async () => {
|
||||
const datasource = await config.restDatasource({
|
||||
staticVariables: {
|
||||
variable: "google",
|
||||
variable2: "1",
|
||||
},
|
||||
})
|
||||
const responseBody = await preview(datasource, {
|
||||
path: "www.{{ variable }}.com",
|
||||
queryString: "test={{ variable2 }}",
|
||||
})
|
||||
// these responses come from the mock
|
||||
expect(responseBody.schema).toEqual({
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
value: { type: "string", name: "value" },
|
||||
})
|
||||
expect(responseBody.rows[0].url).toEqual("http://www.google.com?test=1")
|
||||
})
|
||||
|
||||
it("should work with dynamic variables", async () => {
|
||||
const { datasource } = await config.dynamicVariableDatasource()
|
||||
const responseBody = await preview(datasource, {
|
||||
path: "www.google.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
expect(responseBody.schema).toEqual({
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
value: { type: "string", name: "value" },
|
||||
})
|
||||
expect(responseBody.rows[0].url).toContain("doctype%20html")
|
||||
})
|
||||
|
||||
it("check that it automatically retries on fail with cached dynamics", async () => {
|
||||
const { datasource, query: base } =
|
||||
await config.dynamicVariableDatasource()
|
||||
// preview once to cache
|
||||
await preview(datasource, {
|
||||
path: "www.google.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
// check its in cache
|
||||
const contents = await checkCacheForDynamicVariable(
|
||||
base._id!,
|
||||
"variable3"
|
||||
)
|
||||
expect(contents.rows.length).toEqual(1)
|
||||
const responseBody = await preview(datasource, {
|
||||
path: "www.failonce.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
expect(responseBody.schema).toEqual({
|
||||
fails: { type: "number", name: "fails" },
|
||||
opts: { type: "json", name: "opts" },
|
||||
url: { type: "string", name: "url" },
|
||||
})
|
||||
expect(responseBody.rows[0].fails).toEqual(1)
|
||||
})
|
||||
|
||||
it("deletes variables when linked query is deleted", async () => {
|
||||
const { datasource, query: base } =
|
||||
await config.dynamicVariableDatasource()
|
||||
// preview once to cache
|
||||
await preview(datasource, {
|
||||
path: "www.google.com",
|
||||
queryString: "test={{ variable3 }}",
|
||||
})
|
||||
// check its in cache
|
||||
let contents = await checkCacheForDynamicVariable(base._id!, "variable3")
|
||||
expect(contents.rows.length).toEqual(1)
|
||||
|
||||
// delete the query
|
||||
await request
|
||||
.delete(`/api/queries/${base._id}/${base._rev}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect(200)
|
||||
|
||||
// check variables no longer in cache
|
||||
contents = await checkCacheForDynamicVariable(base._id!, "variable3")
|
||||
expect(contents).toBe(null)
|
||||
})
|
||||
})
|
||||
|
||||
describe("Current User Request Mapping", () => {
|
||||
async function previewGet(
|
||||
datasource: Datasource,
|
||||
fields: any,
|
||||
params: QueryParameter[]
|
||||
) {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id!,
|
||||
parameters: params,
|
||||
fields,
|
||||
queryVerb: "read",
|
||||
name: datasource.name!,
|
||||
transformer: "return data",
|
||||
schema: {},
|
||||
readable: true,
|
||||
}
|
||||
return await config.api.query.previewQuery(queryPreview)
|
||||
}
|
||||
|
||||
async function previewPost(
|
||||
datasource: Datasource,
|
||||
fields: any,
|
||||
params: QueryParameter[]
|
||||
) {
|
||||
const queryPreview: QueryPreview = {
|
||||
datasourceId: datasource._id!,
|
||||
parameters: params,
|
||||
fields,
|
||||
queryVerb: "create",
|
||||
name: datasource.name!,
|
||||
transformer: null,
|
||||
schema: {},
|
||||
readable: false,
|
||||
}
|
||||
return await config.api.query.previewQuery(queryPreview)
|
||||
}
|
||||
|
||||
it("should parse global and query level header mappings", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
|
||||
const datasource = await config.restDatasource({
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
})
|
||||
const responseBody = await previewGet(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "email={{[user].[email]}}",
|
||||
headers: {
|
||||
queryHdr: "{{[user].[firstName]}}",
|
||||
secondHdr: "1234",
|
||||
},
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
expect(parsedRequest.opts.headers).toEqual({
|
||||
test: "headerVal",
|
||||
emailHdr: userDetails.email,
|
||||
queryHdr: userDetails.firstName,
|
||||
secondHdr: "1234",
|
||||
})
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?email=" + userDetails.email.replace("@", "%40")
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user to query parameters", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewGet(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString:
|
||||
"test={{myEmail}}&testName={{myName}}&testParam={{testParam}}",
|
||||
},
|
||||
[
|
||||
{ name: "myEmail", default: "{{[user].[email]}}" },
|
||||
{ name: "myName", default: "{{[user].[firstName]}}" },
|
||||
{ name: "testParam", default: "1234" },
|
||||
]
|
||||
)
|
||||
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?test=" +
|
||||
userDetails.email.replace("@", "%40") +
|
||||
"&testName=" +
|
||||
userDetails.firstName +
|
||||
"&testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - plain text", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
"This is plain text and this is my email: {{[user].[email]}}. This is a test param: {{testParam}}",
|
||||
bodyType: "text",
|
||||
},
|
||||
[{ name: "testParam", default: "1234" }]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
expect(parsedRequest.opts.body).toEqual(
|
||||
`This is plain text and this is my email: ${userDetails.email}. This is a test param: 1234`
|
||||
)
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - json", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
bodyType: "json",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
const test = `{"email":"${userDetails.email}","queryCode":1234,"userRef":"${userDetails.firstName}"}`
|
||||
expect(parsedRequest.opts.body).toEqual(test)
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - xml", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
"<note> <email>{{[user].[email]}}</email> <code>{{testParam}}</code> " +
|
||||
"<ref>{{userId}}</ref> <somestring>testing</somestring> </note>",
|
||||
bodyType: "xml",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userId", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
const test = `<note> <email>${userDetails.email}</email> <code>1234</code> <ref>${userDetails.firstName}</ref> <somestring>testing</somestring> </note>`
|
||||
|
||||
expect(parsedRequest.opts.body).toEqual(test)
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - form-data", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
bodyType: "form",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
|
||||
const emailData = parsedRequest.opts.body._streams[1]
|
||||
expect(emailData).toEqual(userDetails.email)
|
||||
|
||||
const queryCodeData = parsedRequest.opts.body._streams[4]
|
||||
expect(queryCodeData).toEqual("1234")
|
||||
|
||||
const userRef = parsedRequest.opts.body._streams[7]
|
||||
expect(userRef).toEqual(userDetails.firstName)
|
||||
|
||||
expect(responseBody.rows[0].url).toEqual(
|
||||
"http://www.google.com?testParam=1234"
|
||||
)
|
||||
})
|
||||
|
||||
it("should bind the current user the request body - encoded", async () => {
|
||||
const userDetails = config.getUserDetails()
|
||||
const datasource = await config.restDatasource()
|
||||
|
||||
const responseBody = await previewPost(
|
||||
datasource,
|
||||
{
|
||||
path: "www.google.com",
|
||||
queryString: "testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
bodyType: "encoded",
|
||||
},
|
||||
[
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
]
|
||||
)
|
||||
const parsedRequest = JSON.parse(responseBody.extra.raw)
|
||||
|
||||
expect(parsedRequest.opts.body.email).toEqual(userDetails.email)
|
||||
expect(parsedRequest.opts.body.queryCode).toEqual("1234")
|
||||
expect(parsedRequest.opts.body.userRef).toEqual(userDetails.firstName)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,406 @@
|
|||
import * as setup from "../utilities"
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { getCachedVariable } from "../../../../threads/utils"
|
||||
import nock from "nock"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
jest.unmock("node-fetch")
|
||||
|
||||
describe("rest", () => {
|
||||
let config: TestConfiguration
|
||||
let datasource: Datasource
|
||||
|
||||
async function createQuery(fields: any) {
|
||||
return await config.api.query.save({
|
||||
name: "test query",
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields,
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
queryVerb: "read",
|
||||
})
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
config = setup.getConfig()
|
||||
await config.init()
|
||||
datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {},
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
nock.cleanAll()
|
||||
})
|
||||
|
||||
it("should automatically retry on fail with cached dynamics", async () => {
|
||||
const basedOnQuery = await createQuery({
|
||||
path: "one.example.com",
|
||||
})
|
||||
|
||||
let cached = await getCachedVariable(basedOnQuery._id!, "foo")
|
||||
expect(cached).toBeNull()
|
||||
|
||||
await config.api.datasource.update({
|
||||
...datasource,
|
||||
config: {
|
||||
...datasource.config,
|
||||
dynamicVariables: [
|
||||
{
|
||||
queryId: basedOnQuery._id!,
|
||||
name: "foo",
|
||||
value: "{{ data[0].name }}",
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
|
||||
cached = await getCachedVariable(basedOnQuery._id!, "foo")
|
||||
expect(cached).toBeNull()
|
||||
|
||||
nock("http://one.example.com")
|
||||
.get("/")
|
||||
.reply(200, [{ name: "one" }])
|
||||
nock("http://two.example.com").get("/?test=one").reply(500)
|
||||
nock("http://two.example.com")
|
||||
.get("/?test=one")
|
||||
.reply(200, [{ name: "two" }])
|
||||
|
||||
const res = await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: "test query",
|
||||
parameters: [],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "two.example.com",
|
||||
queryString: "test={{ foo }}",
|
||||
},
|
||||
})
|
||||
expect(res.schema).toEqual({
|
||||
name: { type: "string", name: "name" },
|
||||
})
|
||||
|
||||
cached = await getCachedVariable(basedOnQuery._id!, "foo")
|
||||
expect(cached.rows.length).toEqual(1)
|
||||
expect(cached.rows[0].name).toEqual("one")
|
||||
})
|
||||
|
||||
it("should parse global and query level header mappings", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com", {
|
||||
reqheaders: {
|
||||
test: "headerVal",
|
||||
emailhdr: user.email,
|
||||
queryhdr: user.firstName!,
|
||||
secondhdr: "1234",
|
||||
},
|
||||
})
|
||||
.get("/?email=" + user.email.replace("@", "%40"))
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
queryString: "email={{[user].[email]}}",
|
||||
headers: {
|
||||
queryHdr: "{{[user].[firstName]}}",
|
||||
secondHdr: "1234",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to query params", async () => {
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.get(
|
||||
"/?test=" +
|
||||
user.email.replace("@", "%40") +
|
||||
"&testName=" +
|
||||
user.firstName +
|
||||
"&testParam=1234"
|
||||
)
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "myEmail", default: "{{[user].[email]}}" },
|
||||
{ name: "myName", default: "{{[user].[firstName]}}" },
|
||||
{ name: "testParam", default: "1234" },
|
||||
],
|
||||
queryVerb: "read",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
queryString:
|
||||
"test={{myEmail}}&testName={{myName}}&testParam={{testParam}}",
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - plain text", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post(
|
||||
"/?testParam=1234",
|
||||
"This is plain text and this is my email: " +
|
||||
user.email +
|
||||
". This is a test param: 1234"
|
||||
)
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [{ name: "testParam", default: "1234" }],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "text",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
"This is plain text and this is my email: {{[user].[email]}}. This is a test param: {{testParam}}",
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - json", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post("/?testParam=1234", {
|
||||
email: user.email,
|
||||
queryCode: 1234,
|
||||
userRef: user.firstName,
|
||||
})
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "json",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - xml", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post(
|
||||
"/?testParam=1234",
|
||||
`<note> <email>${user.email}</email> <code>1234</code> <ref>${user.firstName}</ref> <somestring>testing</somestring> </note>`
|
||||
)
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userId", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "xml",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
"<note> <email>{{[user].[email]}}</email> <code>{{testParam}}</code> " +
|
||||
"<ref>{{userId}}</ref> <somestring>testing</somestring> </note>",
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - form-data", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post("/?testParam=1234", body => {
|
||||
return (
|
||||
body.includes('name="email"\r\n\r\n' + user.email + "\r\n") &&
|
||||
body.includes('name="queryCode"\r\n\r\n1234\r\n') &&
|
||||
body.includes('name="userRef"\r\n\r\n' + user.firstName + "\r\n")
|
||||
)
|
||||
})
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "form",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
|
||||
it("should bind the current user to the request body - encoded", async () => {
|
||||
const datasource = await config.api.datasource.create({
|
||||
name: generator.guid(),
|
||||
type: "test",
|
||||
source: SourceName.REST,
|
||||
config: {
|
||||
method: "POST",
|
||||
defaultHeaders: {
|
||||
test: "headerVal",
|
||||
emailHdr: "{{[user].[email]}}",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const user = config.getUserDetails()
|
||||
const mock = nock("http://www.example.com")
|
||||
.post("/?testParam=1234", {
|
||||
email: user.email,
|
||||
queryCode: 1234,
|
||||
userRef: user.firstName,
|
||||
})
|
||||
.reply(200, {})
|
||||
|
||||
await config.api.query.preview({
|
||||
datasourceId: datasource._id!,
|
||||
name: generator.guid(),
|
||||
parameters: [
|
||||
{ name: "testParam", default: "1234" },
|
||||
{ name: "userRef", default: "{{[user].[firstName]}}" },
|
||||
],
|
||||
queryVerb: "create",
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
fields: {
|
||||
path: "www.example.com",
|
||||
bodyType: "encoded",
|
||||
queryString: "&testParam={{testParam}}",
|
||||
requestBody:
|
||||
'{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
|
||||
},
|
||||
})
|
||||
|
||||
expect(mock.isDone()).toEqual(true)
|
||||
})
|
||||
})
|
|
@ -1,4 +1,4 @@
|
|||
import { databaseTestProviders } from "../../../integrations/tests/utils"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
|
||||
import tk from "timekeeper"
|
||||
import { outputProcessing } from "../../../utilities/rowProcessor"
|
||||
|
@ -30,14 +30,13 @@ const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
|||
tk.freeze(timestamp)
|
||||
|
||||
jest.unmock("mssql")
|
||||
jest.unmock("pg")
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
["postgres", databaseTestProviders.postgres],
|
||||
["mysql", databaseTestProviders.mysql],
|
||||
["mssql", databaseTestProviders.mssql],
|
||||
["mariadb", databaseTestProviders.mariadb],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/rows (%s)", (__, dsProvider) => {
|
||||
const isInternal = dsProvider === undefined
|
||||
const config = setup.getConfig()
|
||||
|
@ -49,23 +48,23 @@ describe.each([
|
|||
await config.init()
|
||||
if (dsProvider) {
|
||||
datasource = await config.createDatasource({
|
||||
datasource: await dsProvider.datasource(),
|
||||
datasource: await dsProvider,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (dsProvider) {
|
||||
await dsProvider.stop()
|
||||
}
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
function saveTableRequest(
|
||||
...overrides: Partial<SaveTableRequest>[]
|
||||
// We omit the name field here because it's generated in the function with a
|
||||
// high likelihood to be unique. Tests should not have any reason to control
|
||||
// the table name they're writing to.
|
||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||
): SaveTableRequest {
|
||||
const req: SaveTableRequest = {
|
||||
name: uuid.v4().substring(0, 16),
|
||||
name: uuid.v4().substring(0, 10),
|
||||
type: "table",
|
||||
sourceType: datasource
|
||||
? TableSourceType.EXTERNAL
|
||||
|
@ -87,7 +86,10 @@ describe.each([
|
|||
}
|
||||
|
||||
function defaultTable(
|
||||
...overrides: Partial<SaveTableRequest>[]
|
||||
// We omit the name field here because it's generated in the function with a
|
||||
// high likelihood to be unique. Tests should not have any reason to control
|
||||
// the table name they're writing to.
|
||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||
): SaveTableRequest {
|
||||
return saveTableRequest(
|
||||
{
|
||||
|
@ -194,7 +196,6 @@ describe.each([
|
|||
|
||||
const newTable = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "TestTableAuto",
|
||||
schema: {
|
||||
"Row ID": {
|
||||
name: "Row ID",
|
||||
|
@ -383,11 +384,9 @@ describe.each([
|
|||
|
||||
isInternal &&
|
||||
it("doesn't allow creating in user table", async () => {
|
||||
const userTableId = InternalTable.USER_METADATA
|
||||
const response = await config.api.row.save(
|
||||
userTableId,
|
||||
InternalTable.USER_METADATA,
|
||||
{
|
||||
tableId: userTableId,
|
||||
firstName: "Joe",
|
||||
lastName: "Joe",
|
||||
email: "joe@joe.com",
|
||||
|
@ -462,7 +461,6 @@ describe.each([
|
|||
table = await config.api.table.save(defaultTable())
|
||||
otherTable = await config.api.table.save(
|
||||
defaultTable({
|
||||
name: "a",
|
||||
schema: {
|
||||
relationship: {
|
||||
name: "relationship",
|
||||
|
@ -898,8 +896,8 @@ describe.each([
|
|||
let o2mTable: Table
|
||||
let m2mTable: Table
|
||||
beforeAll(async () => {
|
||||
o2mTable = await config.api.table.save(defaultTable({ name: "o2m" }))
|
||||
m2mTable = await config.api.table.save(defaultTable({ name: "m2m" }))
|
||||
o2mTable = await config.api.table.save(defaultTable())
|
||||
m2mTable = await config.api.table.save(defaultTable())
|
||||
})
|
||||
|
||||
describe.each([
|
||||
|
@ -1256,7 +1254,6 @@ describe.each([
|
|||
otherTable = await config.api.table.save(defaultTable())
|
||||
table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "b",
|
||||
schema: {
|
||||
links: {
|
||||
name: "links",
|
||||
|
@ -1298,7 +1295,7 @@ describe.each([
|
|||
|
||||
describe("Formula JS protection", () => {
|
||||
it("should time out JS execution if a single cell takes too long", async () => {
|
||||
await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 20 }, async () => {
|
||||
await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => {
|
||||
const js = Buffer.from(
|
||||
`
|
||||
let i = 0;
|
||||
|
@ -1338,8 +1335,8 @@ describe.each([
|
|||
it("should time out JS execution if a multiple cells take too long", async () => {
|
||||
await config.withEnv(
|
||||
{
|
||||
JS_PER_INVOCATION_TIMEOUT_MS: 20,
|
||||
JS_PER_REQUEST_TIMEOUT_MS: 40,
|
||||
JS_PER_INVOCATION_TIMEOUT_MS: 40,
|
||||
JS_PER_REQUEST_TIMEOUT_MS: 80,
|
||||
},
|
||||
async () => {
|
||||
const js = Buffer.from(
|
||||
|
@ -1354,7 +1351,6 @@ describe.each([
|
|||
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "table",
|
||||
schema: {
|
||||
text: {
|
||||
name: "text",
|
||||
|
|
|
@ -3,8 +3,6 @@ import { checkPermissionsEndpoint } from "./utilities/TestFunctions"
|
|||
import * as setup from "./utilities"
|
||||
import { UserMetadata } from "@budibase/types"
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
jest.mock("../../../utilities/workerRequests", () => ({
|
||||
getGlobalUsers: jest.fn(() => {
|
||||
return {}
|
||||
|
|
|
@ -19,21 +19,19 @@ import {
|
|||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import * as uuid from "uuid"
|
||||
import { databaseTestProviders } from "../../../integrations/tests/utils"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
import merge from "lodash/merge"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { roles } from "@budibase/backend-core"
|
||||
|
||||
jest.unmock("mssql")
|
||||
jest.unmock("pg")
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
["postgres", databaseTestProviders.postgres],
|
||||
["mysql", databaseTestProviders.mysql],
|
||||
["mssql", databaseTestProviders.mssql],
|
||||
["mariadb", databaseTestProviders.mariadb],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/v2/views (%s)", (_, dsProvider) => {
|
||||
const config = setup.getConfig()
|
||||
const isInternal = !dsProvider
|
||||
|
@ -42,10 +40,10 @@ describe.each([
|
|||
let datasource: Datasource
|
||||
|
||||
function saveTableRequest(
|
||||
...overrides: Partial<SaveTableRequest>[]
|
||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||
): SaveTableRequest {
|
||||
const req: SaveTableRequest = {
|
||||
name: uuid.v4().substring(0, 16),
|
||||
name: generator.guid().replaceAll("-", "").substring(0, 16),
|
||||
type: "table",
|
||||
sourceType: datasource
|
||||
? TableSourceType.EXTERNAL
|
||||
|
@ -90,16 +88,13 @@ describe.each([
|
|||
|
||||
if (dsProvider) {
|
||||
datasource = await config.createDatasource({
|
||||
datasource: await dsProvider.datasource(),
|
||||
datasource: await dsProvider,
|
||||
})
|
||||
}
|
||||
table = await config.api.table.save(priceTable())
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (dsProvider) {
|
||||
await dsProvider.stop()
|
||||
}
|
||||
setup.afterAll()
|
||||
})
|
||||
|
||||
|
@ -231,7 +226,7 @@ describe.each([
|
|||
|
||||
view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: "View A",
|
||||
name: generator.guid(),
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -307,12 +302,13 @@ describe.each([
|
|||
|
||||
it("can update an existing view name", async () => {
|
||||
const tableId = table._id!
|
||||
await config.api.viewV2.update({ ...view, name: "View B" })
|
||||
const newName = generator.guid()
|
||||
await config.api.viewV2.update({ ...view, name: newName })
|
||||
|
||||
expect(await config.api.table.get(tableId)).toEqual(
|
||||
expect.objectContaining({
|
||||
views: {
|
||||
"View B": { ...view, name: "View B", schema: expect.anything() },
|
||||
[newName]: { ...view, name: newName, schema: expect.anything() },
|
||||
},
|
||||
})
|
||||
)
|
||||
|
@ -507,7 +503,6 @@ describe.each([
|
|||
it("views have extra data trimmed", async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: "orders",
|
||||
schema: {
|
||||
Country: {
|
||||
type: FieldType.STRING,
|
||||
|
@ -523,7 +518,7 @@ describe.each([
|
|||
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: uuid.v4(),
|
||||
name: generator.guid(),
|
||||
schema: {
|
||||
Country: {
|
||||
visible: true,
|
||||
|
@ -853,7 +848,6 @@ describe.each([
|
|||
beforeAll(async () => {
|
||||
table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
name: `users_${uuid.v4()}`,
|
||||
type: "table",
|
||||
schema: {
|
||||
name: {
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
const setup = require("./utilities")
|
||||
|
||||
describe("test the execute query action", () => {
|
||||
let query
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
||||
await config.createDatasource()
|
||||
query = await config.createQuery()
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to execute a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: query._id },
|
||||
})
|
||||
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should handle a null query value", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: null,
|
||||
})
|
||||
expect(res.response.message).toEqual("Invalid inputs")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should handle an error executing a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: "wrong_id" },
|
||||
})
|
||||
expect(res.response).toEqual("Error: missing")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,94 @@
|
|||
import { Datasource, Query, SourceName } from "@budibase/types"
|
||||
import * as setup from "./utilities"
|
||||
import { DatabaseName, getDatasource } from "../../integrations/tests/utils"
|
||||
import knex, { Knex } from "knex"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
function getKnexClientName(source: SourceName) {
|
||||
switch (source) {
|
||||
case SourceName.MYSQL:
|
||||
return "mysql2"
|
||||
case SourceName.SQL_SERVER:
|
||||
return "mssql"
|
||||
case SourceName.POSTGRES:
|
||||
return "pg"
|
||||
}
|
||||
throw new Error(`Unsupported source: ${source}`)
|
||||
}
|
||||
|
||||
describe.each(
|
||||
[
|
||||
DatabaseName.POSTGRES,
|
||||
DatabaseName.MYSQL,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.MARIADB,
|
||||
].map(name => [name, getDatasource(name)])
|
||||
)("execute query action (%s)", (_, dsProvider) => {
|
||||
let tableName: string
|
||||
let client: Knex
|
||||
let datasource: Datasource
|
||||
let query: Query
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
||||
const ds = await dsProvider
|
||||
datasource = await config.api.datasource.create(ds)
|
||||
client = knex({
|
||||
client: getKnexClientName(ds.source),
|
||||
connection: ds.config,
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.string("a")
|
||||
table.integer("b")
|
||||
})
|
||||
await client(tableName).insert({ a: "string", b: 1 })
|
||||
query = await config.api.query.save({
|
||||
name: "test query",
|
||||
datasourceId: datasource._id!,
|
||||
parameters: [],
|
||||
fields: {
|
||||
sql: client(tableName).select("*").toSQL().toNative().sql,
|
||||
},
|
||||
transformer: "",
|
||||
schema: {},
|
||||
readable: true,
|
||||
queryVerb: "read",
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTable(tableName)
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to execute a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: query._id },
|
||||
})
|
||||
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should handle a null query value", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: null,
|
||||
})
|
||||
expect(res.response.message).toEqual("Invalid inputs")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should handle an error executing a query", async () => {
|
||||
let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
|
||||
query: { queryId: "wrong_id" },
|
||||
})
|
||||
expect(res.response).toEqual("Error: missing")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
})
|
|
@ -3,7 +3,6 @@ import {
|
|||
generateMakeRequest,
|
||||
MakeRequestResponse,
|
||||
} from "../api/routes/public/tests/utils"
|
||||
import { v4 as uuidv4 } from "uuid"
|
||||
import * as setup from "../api/routes/tests/utilities"
|
||||
import {
|
||||
Datasource,
|
||||
|
@ -12,12 +11,23 @@ import {
|
|||
TableRequest,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import { databaseTestProviders } from "../integrations/tests/utils"
|
||||
import mysql from "mysql2/promise"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
rawQuery,
|
||||
} from "../integrations/tests/utils"
|
||||
import { builderSocket } from "../websockets"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
// @ts-ignore
|
||||
fetch.mockSearch()
|
||||
|
||||
function uniqueTableName(length?: number): string {
|
||||
return generator
|
||||
.guid()
|
||||
.replaceAll("-", "_")
|
||||
.substring(0, length || 10)
|
||||
}
|
||||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
jest.mock("../websockets", () => ({
|
||||
|
@ -37,7 +47,8 @@ jest.mock("../websockets", () => ({
|
|||
|
||||
describe("mysql integrations", () => {
|
||||
let makeRequest: MakeRequestResponse,
|
||||
mysqlDatasource: Datasource,
|
||||
rawDatasource: Datasource,
|
||||
datasource: Datasource,
|
||||
primaryMySqlTable: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
|
@ -46,18 +57,13 @@ describe("mysql integrations", () => {
|
|||
|
||||
makeRequest = generateMakeRequest(apiKey, true)
|
||||
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.mysql.stop()
|
||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
primaryMySqlTable = await config.createTable({
|
||||
name: uuidv4(),
|
||||
name: uniqueTableName(),
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
|
@ -79,7 +85,7 @@ describe("mysql integrations", () => {
|
|||
type: FieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
sourceId: mysqlDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
})
|
||||
|
@ -87,18 +93,15 @@ describe("mysql integrations", () => {
|
|||
afterAll(config.end)
|
||||
|
||||
it("validate table schema", async () => {
|
||||
const res = await makeRequest(
|
||||
"get",
|
||||
`/api/datasources/${mysqlDatasource._id}`
|
||||
)
|
||||
const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual({
|
||||
config: {
|
||||
database: "mysql",
|
||||
host: mysqlDatasource.config!.host,
|
||||
database: expect.any(String),
|
||||
host: datasource.config!.host,
|
||||
password: "--secret-value--",
|
||||
port: mysqlDatasource.config!.port,
|
||||
port: datasource.config!.port,
|
||||
user: "root",
|
||||
},
|
||||
plus: true,
|
||||
|
@ -117,7 +120,7 @@ describe("mysql integrations", () => {
|
|||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: await databaseTestProviders.mysql.datasource(),
|
||||
datasource: rawDatasource,
|
||||
},
|
||||
{
|
||||
body: {
|
||||
|
@ -128,13 +131,12 @@ describe("mysql integrations", () => {
|
|||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
const dbConfig = await databaseTestProviders.mysql.datasource()
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
...dbConfig,
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...dbConfig.config,
|
||||
...rawDatasource.config,
|
||||
password: "wrongpassword",
|
||||
},
|
||||
},
|
||||
|
@ -154,7 +156,7 @@ describe("mysql integrations", () => {
|
|||
it("should fetch information about mysql datasource", async () => {
|
||||
const primaryName = primaryMySqlTable.name
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: mysqlDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -163,40 +165,38 @@ describe("mysql integrations", () => {
|
|||
})
|
||||
|
||||
describe("Integration compatibility with mysql search_path", () => {
|
||||
let client: mysql.Connection, pathDatasource: Datasource
|
||||
const database = "test1"
|
||||
const database2 = "test-2"
|
||||
let datasource: Datasource, rawDatasource: Datasource
|
||||
const database = generator.guid()
|
||||
const database2 = generator.guid()
|
||||
|
||||
beforeAll(async () => {
|
||||
const dsConfig = await databaseTestProviders.mysql.datasource()
|
||||
const dbConfig = dsConfig.config!
|
||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||
|
||||
client = await mysql.createConnection(dbConfig)
|
||||
await client.query(`CREATE DATABASE \`${database}\`;`)
|
||||
await client.query(`CREATE DATABASE \`${database2}\`;`)
|
||||
await rawQuery(rawDatasource, `CREATE DATABASE \`${database}\`;`)
|
||||
await rawQuery(rawDatasource, `CREATE DATABASE \`${database2}\`;`)
|
||||
|
||||
const pathConfig: any = {
|
||||
...dsConfig,
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...dbConfig,
|
||||
...rawDatasource.config!,
|
||||
database,
|
||||
},
|
||||
}
|
||||
pathDatasource = await config.api.datasource.create(pathConfig)
|
||||
datasource = await config.api.datasource.create(pathConfig)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await client.query(`DROP DATABASE \`${database}\`;`)
|
||||
await client.query(`DROP DATABASE \`${database2}\`;`)
|
||||
await client.end()
|
||||
await rawQuery(rawDatasource, `DROP DATABASE \`${database}\`;`)
|
||||
await rawQuery(rawDatasource, `DROP DATABASE \`${database2}\`;`)
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: pathDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -207,15 +207,17 @@ describe("mysql integrations", () => {
|
|||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
|
||||
)
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
|
||||
)
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${pathDatasource._id}/schema`,
|
||||
`/api/datasources/${datasource._id}/schema`,
|
||||
{
|
||||
tablesFilter: [repeated_table_name],
|
||||
}
|
||||
|
@ -231,30 +233,14 @@ describe("mysql integrations", () => {
|
|||
})
|
||||
|
||||
describe("POST /api/tables/", () => {
|
||||
let client: mysql.Connection
|
||||
const emitDatasourceUpdateMock = jest.fn()
|
||||
|
||||
beforeEach(async () => {
|
||||
client = await mysql.createConnection(
|
||||
(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
).config!
|
||||
)
|
||||
mysqlDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.mysql.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.end()
|
||||
})
|
||||
|
||||
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
|
||||
const addColumnToTable: TableRequest = {
|
||||
type: "table",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
name: "table",
|
||||
sourceId: mysqlDatasource._id!,
|
||||
name: uniqueTableName(),
|
||||
sourceId: datasource._id!,
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: {
|
||||
|
@ -301,14 +287,16 @@ describe("mysql integrations", () => {
|
|||
},
|
||||
},
|
||||
created: true,
|
||||
_id: `${mysqlDatasource._id}__table`,
|
||||
_id: `${datasource._id}__${addColumnToTable.name}`,
|
||||
}
|
||||
delete expectedTable._add
|
||||
|
||||
expect(emitDatasourceUpdateMock).toHaveBeenCalledTimes(1)
|
||||
const emittedDatasource: Datasource =
|
||||
emitDatasourceUpdateMock.mock.calls[0][1]
|
||||
expect(emittedDatasource.entities!["table"]).toEqual(expectedTable)
|
||||
expect(emittedDatasource.entities![expectedTable.name]).toEqual(
|
||||
expectedTable
|
||||
)
|
||||
})
|
||||
|
||||
it("will rename a column", async () => {
|
||||
|
@ -346,17 +334,18 @@ describe("mysql integrations", () => {
|
|||
"/api/tables/",
|
||||
renameColumnOnTable
|
||||
)
|
||||
mysqlDatasource = (
|
||||
await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${mysqlDatasource._id}/schema`
|
||||
)
|
||||
|
||||
const ds = (
|
||||
await makeRequest("post", `/api/datasources/${datasource._id}/schema`)
|
||||
).body.datasource
|
||||
|
||||
expect(response.status).toEqual(200)
|
||||
expect(
|
||||
Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema)
|
||||
).toEqual(["id", "name", "description", "age"])
|
||||
expect(Object.keys(ds.entities![primaryMySqlTable.name].schema)).toEqual([
|
||||
"id",
|
||||
"name",
|
||||
"description",
|
||||
"age",
|
||||
])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -16,19 +16,23 @@ import {
|
|||
import _ from "lodash"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { utils } from "@budibase/backend-core"
|
||||
import { databaseTestProviders } from "../integrations/tests/utils"
|
||||
import { Client } from "pg"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
rawQuery,
|
||||
} from "../integrations/tests/utils"
|
||||
|
||||
// @ts-ignore
|
||||
fetch.mockSearch()
|
||||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
jest.unmock("pg")
|
||||
jest.mock("../websockets")
|
||||
|
||||
describe("postgres integrations", () => {
|
||||
let makeRequest: MakeRequestResponse,
|
||||
postgresDatasource: Datasource,
|
||||
rawDatasource: Datasource,
|
||||
datasource: Datasource,
|
||||
primaryPostgresTable: Table,
|
||||
oneToManyRelationshipInfo: ForeignTableInfo,
|
||||
manyToOneRelationshipInfo: ForeignTableInfo,
|
||||
|
@ -40,19 +44,17 @@ describe("postgres integrations", () => {
|
|||
|
||||
makeRequest = generateMakeRequest(apiKey, true)
|
||||
|
||||
postgresDatasource = await config.api.datasource.create(
|
||||
await databaseTestProviders.postgres.datasource()
|
||||
)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await databaseTestProviders.postgres.stop()
|
||||
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
async function createAuxTable(prefix: string) {
|
||||
return await config.createTable({
|
||||
name: `${prefix}_${generator.word({ length: 6 })}`,
|
||||
name: `${prefix}_${generator
|
||||
.guid()
|
||||
.replaceAll("-", "")
|
||||
.substring(0, 6)}`,
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
primaryDisplay: "title",
|
||||
|
@ -67,7 +69,7 @@ describe("postgres integrations", () => {
|
|||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
}
|
||||
|
@ -89,7 +91,7 @@ describe("postgres integrations", () => {
|
|||
}
|
||||
|
||||
primaryPostgresTable = await config.createTable({
|
||||
name: `p_${generator.word({ length: 6 })}`,
|
||||
name: `p_${generator.guid().replaceAll("-", "").substring(0, 6)}`,
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
|
@ -144,7 +146,7 @@ describe("postgres integrations", () => {
|
|||
main: true,
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
})
|
||||
|
@ -251,7 +253,7 @@ describe("postgres integrations", () => {
|
|||
|
||||
async function createDefaultPgTable() {
|
||||
return await config.createTable({
|
||||
name: generator.word({ length: 10 }),
|
||||
name: generator.guid().replaceAll("-", "").substring(0, 10),
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
|
@ -261,7 +263,7 @@ describe("postgres integrations", () => {
|
|||
autocolumn: true,
|
||||
},
|
||||
},
|
||||
sourceId: postgresDatasource._id,
|
||||
sourceId: datasource._id,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
})
|
||||
}
|
||||
|
@ -299,19 +301,16 @@ describe("postgres integrations", () => {
|
|||
}
|
||||
|
||||
it("validate table schema", async () => {
|
||||
const res = await makeRequest(
|
||||
"get",
|
||||
`/api/datasources/${postgresDatasource._id}`
|
||||
)
|
||||
const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
|
||||
|
||||
expect(res.status).toBe(200)
|
||||
expect(res.body).toEqual({
|
||||
config: {
|
||||
ca: false,
|
||||
database: "postgres",
|
||||
host: postgresDatasource.config!.host,
|
||||
database: expect.any(String),
|
||||
host: datasource.config!.host,
|
||||
password: "--secret-value--",
|
||||
port: postgresDatasource.config!.port,
|
||||
port: datasource.config!.port,
|
||||
rejectUnauthorized: false,
|
||||
schema: "public",
|
||||
ssl: false,
|
||||
|
@ -1043,7 +1042,7 @@ describe("postgres integrations", () => {
|
|||
it("should be able to verify the connection", async () => {
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: await databaseTestProviders.postgres.datasource(),
|
||||
datasource: await getDatasource(DatabaseName.POSTGRES),
|
||||
},
|
||||
{
|
||||
body: {
|
||||
|
@ -1054,7 +1053,7 @@ describe("postgres integrations", () => {
|
|||
})
|
||||
|
||||
it("should state an invalid datasource cannot connect", async () => {
|
||||
const dbConfig = await databaseTestProviders.postgres.datasource()
|
||||
const dbConfig = await getDatasource(DatabaseName.POSTGRES)
|
||||
await config.api.datasource.verify(
|
||||
{
|
||||
datasource: {
|
||||
|
@ -1079,7 +1078,7 @@ describe("postgres integrations", () => {
|
|||
it("should fetch information about postgres datasource", async () => {
|
||||
const primaryName = primaryPostgresTable.name
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: postgresDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -1088,86 +1087,88 @@ describe("postgres integrations", () => {
|
|||
})
|
||||
|
||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||
let client: Client
|
||||
let tableName: string
|
||||
|
||||
beforeEach(async () => {
|
||||
client = new Client(
|
||||
(await databaseTestProviders.postgres.datasource()).config!
|
||||
)
|
||||
await client.connect()
|
||||
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.query(`DROP TABLE IF EXISTS "table"`)
|
||||
await client.end()
|
||||
await rawQuery(rawDatasource, `DROP TABLE IF EXISTS "${tableName}"`)
|
||||
})
|
||||
|
||||
it("recognises when a table has no primary key", async () => {
|
||||
await client.query(`CREATE TABLE "table" (id SERIAL)`)
|
||||
await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`)
|
||||
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${postgresDatasource._id}/schema`
|
||||
`/api/datasources/${datasource._id}/schema`
|
||||
)
|
||||
|
||||
expect(response.body.errors).toEqual({
|
||||
table: "Table must have a primary key.",
|
||||
[tableName]: "Table must have a primary key.",
|
||||
})
|
||||
})
|
||||
|
||||
it("recognises when a table is using a reserved column name", async () => {
|
||||
await client.query(`CREATE TABLE "table" (_id SERIAL PRIMARY KEY) `)
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) `
|
||||
)
|
||||
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${postgresDatasource._id}/schema`
|
||||
`/api/datasources/${datasource._id}/schema`
|
||||
)
|
||||
|
||||
expect(response.body.errors).toEqual({
|
||||
table: "Table contains invalid columns.",
|
||||
[tableName]: "Table contains invalid columns.",
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Integration compatibility with postgres search_path", () => {
|
||||
let client: Client, pathDatasource: Datasource
|
||||
const schema1 = "test1",
|
||||
schema2 = "test-2"
|
||||
let rawDatasource: Datasource,
|
||||
datasource: Datasource,
|
||||
schema1: string,
|
||||
schema2: string
|
||||
|
||||
beforeAll(async () => {
|
||||
const dsConfig = await databaseTestProviders.postgres.datasource()
|
||||
const dbConfig = dsConfig.config!
|
||||
beforeEach(async () => {
|
||||
schema1 = generator.guid().replaceAll("-", "")
|
||||
schema2 = generator.guid().replaceAll("-", "")
|
||||
|
||||
client = new Client(dbConfig)
|
||||
await client.connect()
|
||||
await client.query(`CREATE SCHEMA "${schema1}";`)
|
||||
await client.query(`CREATE SCHEMA "${schema2}";`)
|
||||
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||
const dbConfig = rawDatasource.config!
|
||||
|
||||
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema1}";`)
|
||||
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema2}";`)
|
||||
|
||||
const pathConfig: any = {
|
||||
...dsConfig,
|
||||
...rawDatasource,
|
||||
config: {
|
||||
...dbConfig,
|
||||
schema: `${schema1}, ${schema2}`,
|
||||
},
|
||||
}
|
||||
pathDatasource = await config.api.datasource.create(pathConfig)
|
||||
datasource = await config.api.datasource.create(pathConfig)
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
await client.query(`DROP SCHEMA "${schema1}" CASCADE;`)
|
||||
await client.query(`DROP SCHEMA "${schema2}" CASCADE;`)
|
||||
await client.end()
|
||||
afterEach(async () => {
|
||||
await rawQuery(rawDatasource, `DROP SCHEMA "${schema1}" CASCADE;`)
|
||||
await rawQuery(rawDatasource, `DROP SCHEMA "${schema2}" CASCADE;`)
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);`
|
||||
)
|
||||
const response = await makeRequest("post", "/api/datasources/info", {
|
||||
datasource: pathDatasource,
|
||||
datasource: datasource,
|
||||
})
|
||||
expect(response.status).toBe(200)
|
||||
expect(response.body.tableNames).toBeDefined()
|
||||
|
@ -1178,15 +1179,17 @@ describe("postgres integrations", () => {
|
|||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
|
||||
)
|
||||
await client.query(
|
||||
await rawQuery(
|
||||
rawDatasource,
|
||||
`CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
|
||||
)
|
||||
const response = await makeRequest(
|
||||
"post",
|
||||
`/api/datasources/${pathDatasource._id}/schema`,
|
||||
`/api/datasources/${datasource._id}/schema`,
|
||||
{
|
||||
tablesFilter: [repeated_table_name],
|
||||
}
|
||||
|
|
|
@ -1,25 +1,88 @@
|
|||
jest.unmock("pg")
|
||||
|
||||
import { Datasource } from "@budibase/types"
|
||||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import * as postgres from "./postgres"
|
||||
import * as mongodb from "./mongodb"
|
||||
import * as mysql from "./mysql"
|
||||
import * as mssql from "./mssql"
|
||||
import * as mariadb from "./mariadb"
|
||||
import { StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer } from "testcontainers"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
|
||||
jest.setTimeout(30000)
|
||||
export type DatasourceProvider = () => Promise<Datasource>
|
||||
|
||||
export interface DatabaseProvider {
|
||||
start(): Promise<StartedTestContainer>
|
||||
stop(): Promise<void>
|
||||
datasource(): Promise<Datasource>
|
||||
export enum DatabaseName {
|
||||
POSTGRES = "postgres",
|
||||
MONGODB = "mongodb",
|
||||
MYSQL = "mysql",
|
||||
SQL_SERVER = "mssql",
|
||||
MARIADB = "mariadb",
|
||||
}
|
||||
|
||||
export const databaseTestProviders = {
|
||||
postgres,
|
||||
mongodb,
|
||||
mysql,
|
||||
mssql,
|
||||
mariadb,
|
||||
const providers: Record<DatabaseName, DatasourceProvider> = {
|
||||
[DatabaseName.POSTGRES]: postgres.getDatasource,
|
||||
[DatabaseName.MONGODB]: mongodb.getDatasource,
|
||||
[DatabaseName.MYSQL]: mysql.getDatasource,
|
||||
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
||||
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
||||
}
|
||||
|
||||
export function getDatasourceProviders(
|
||||
...sourceNames: DatabaseName[]
|
||||
): Promise<Datasource>[] {
|
||||
return sourceNames.map(sourceName => providers[sourceName]())
|
||||
}
|
||||
|
||||
export function getDatasourceProvider(
|
||||
sourceName: DatabaseName
|
||||
): DatasourceProvider {
|
||||
return providers[sourceName]
|
||||
}
|
||||
|
||||
export function getDatasource(sourceName: DatabaseName): Promise<Datasource> {
|
||||
return providers[sourceName]()
|
||||
}
|
||||
|
||||
export async function getDatasources(
|
||||
...sourceNames: DatabaseName[]
|
||||
): Promise<Datasource[]> {
|
||||
return Promise.all(sourceNames.map(sourceName => providers[sourceName]()))
|
||||
}
|
||||
|
||||
export async function rawQuery(ds: Datasource, sql: string): Promise<any> {
|
||||
switch (ds.source) {
|
||||
case SourceName.POSTGRES: {
|
||||
return postgres.rawQuery(ds, sql)
|
||||
}
|
||||
case SourceName.MYSQL: {
|
||||
return mysql.rawQuery(ds, sql)
|
||||
}
|
||||
case SourceName.SQL_SERVER: {
|
||||
return mssql.rawQuery(ds, sql)
|
||||
}
|
||||
default: {
|
||||
throw new Error(`Unsupported source: ${ds.source}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function startContainer(container: GenericContainer) {
|
||||
if (process.env.REUSE_CONTAINERS) {
|
||||
container = container.withReuse()
|
||||
}
|
||||
|
||||
const startedContainer = await container.start()
|
||||
|
||||
const info = testContainerUtils.getContainerById(startedContainer.getId())
|
||||
if (!info) {
|
||||
throw new Error("Container not found")
|
||||
}
|
||||
|
||||
// Some Docker runtimes, when you expose a port, will bind it to both
|
||||
// 127.0.0.1 and ::1, so ipv4 and ipv6. The port spaces of ipv4 and ipv6
|
||||
// addresses are not shared, and testcontainers will sometimes give you back
|
||||
// the ipv6 port. There's no way to know that this has happened, and if you
|
||||
// try to then connect to `localhost:port` you may attempt to bind to the v4
|
||||
// address which could be unbound or even an entirely different container. For
|
||||
// that reason, we don't use testcontainers' `getExposedPort` function,
|
||||
// preferring instead our own method that guaranteed v4 ports.
|
||||
return testContainerUtils.getExposedV4Ports(info)
|
||||
}
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
|
||||
import { rawQuery } from "./mysql"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
class MariaDBWaitStrategy extends AbstractWaitStrategy {
|
||||
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
|
||||
|
@ -21,38 +24,38 @@ class MariaDBWaitStrategy extends AbstractWaitStrategy {
|
|||
}
|
||||
}
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("mariadb:lts")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MariaDBWaitStrategy())
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mariadb:lts")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MariaDBWaitStrategy())
|
||||
)
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(3306)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 3306)?.host
|
||||
if (!port) {
|
||||
throw new Error("MariaDB port not found")
|
||||
}
|
||||
|
||||
const config = {
|
||||
host: "127.0.0.1",
|
||||
port,
|
||||
user: "root",
|
||||
password: "password",
|
||||
database: "mysql",
|
||||
}
|
||||
|
||||
const datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.MYSQL,
|
||||
plus: true,
|
||||
config: {
|
||||
host,
|
||||
port,
|
||||
user: "root",
|
||||
password: "password",
|
||||
database: "mysql",
|
||||
},
|
||||
config,
|
||||
}
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
}
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
|
||||
datasource.config.database = database
|
||||
return datasource
|
||||
}
|
||||
|
|
|
@ -1,43 +1,39 @@
|
|||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("mongo:7.0-jammy")
|
||||
.withExposedPorts(27017)
|
||||
.withEnvironment({
|
||||
MONGO_INITDB_ROOT_USERNAME: "mongo",
|
||||
MONGO_INITDB_ROOT_PASSWORD: "password",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
`mongosh --eval "db.version()"`
|
||||
).withStartupTimeout(10000)
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mongo:7.0-jammy")
|
||||
.withExposedPorts(27017)
|
||||
.withEnvironment({
|
||||
MONGO_INITDB_ROOT_USERNAME: "mongo",
|
||||
MONGO_INITDB_ROOT_PASSWORD: "password",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
`mongosh --eval "db.version()"`
|
||||
).withStartupTimeout(10000)
|
||||
)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(27017)
|
||||
|
||||
const port = (await ports).find(x => x.container === 27017)
|
||||
if (!port) {
|
||||
throw new Error("MongoDB port not found")
|
||||
}
|
||||
|
||||
return {
|
||||
type: "datasource",
|
||||
source: SourceName.MONGODB,
|
||||
plus: false,
|
||||
config: {
|
||||
connectionString: `mongodb://mongo:password@${host}:${port}`,
|
||||
db: "mongo",
|
||||
connectionString: `mongodb://mongo:password@127.0.0.1:${port.host}`,
|
||||
db: generator.guid(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,43 +1,41 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import mssql from "mssql"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer(
|
||||
"mcr.microsoft.com/mssql/server:2022-latest"
|
||||
)
|
||||
.withExposedPorts(1433)
|
||||
.withEnvironment({
|
||||
ACCEPT_EULA: "Y",
|
||||
MSSQL_SA_PASSWORD: "Password_123",
|
||||
// This is important, as Microsoft allow us to use the "Developer" edition
|
||||
// of SQL Server for development and testing purposes. We can't use other
|
||||
// versions without a valid license, and we cannot use the Developer
|
||||
// version in production.
|
||||
MSSQL_PID: "Developer",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
||||
)
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mcr.microsoft.com/mssql/server:2022-latest")
|
||||
.withExposedPorts(1433)
|
||||
.withEnvironment({
|
||||
ACCEPT_EULA: "Y",
|
||||
MSSQL_SA_PASSWORD: "Password_123",
|
||||
// This is important, as Microsoft allow us to use the "Developer" edition
|
||||
// of SQL Server for development and testing purposes. We can't use other
|
||||
// versions without a valid license, and we cannot use the Developer
|
||||
// version in production.
|
||||
MSSQL_PID: "Developer",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
||||
)
|
||||
)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(1433)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 1433)?.host
|
||||
|
||||
const datasource: Datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.SQL_SERVER,
|
||||
plus: true,
|
||||
config: {
|
||||
server: host,
|
||||
server: "127.0.0.1",
|
||||
port,
|
||||
user: "sa",
|
||||
password: "Password_123",
|
||||
|
@ -46,11 +44,28 @@ export async function datasource(): Promise<Datasource> {
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE "${database}"`)
|
||||
datasource.config!.database = database
|
||||
|
||||
return datasource
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
export async function rawQuery(ds: Datasource, sql: string) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
if (ds.source !== SourceName.SQL_SERVER) {
|
||||
throw new Error("Datasource source is not SQL Server")
|
||||
}
|
||||
|
||||
const pool = new mssql.ConnectionPool(ds.config! as mssql.config)
|
||||
const client = await pool.connect()
|
||||
try {
|
||||
const { recordset } = await client.query(sql)
|
||||
return recordset
|
||||
} finally {
|
||||
await pool.close()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
|
||||
import mysql from "mysql2/promise"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
class MySQLWaitStrategy extends AbstractWaitStrategy {
|
||||
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
|
||||
|
@ -24,38 +27,50 @@ class MySQLWaitStrategy extends AbstractWaitStrategy {
|
|||
}
|
||||
}
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("mysql:8.3")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("mysql:8.3")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MYSQL_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MySQLWaitStrategy().withStartupTimeout(10000))
|
||||
)
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(3306)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 3306)?.host
|
||||
|
||||
const datasource: Datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.MYSQL,
|
||||
plus: true,
|
||||
config: {
|
||||
host,
|
||||
host: "127.0.0.1",
|
||||
port,
|
||||
user: "root",
|
||||
password: "password",
|
||||
database: "mysql",
|
||||
},
|
||||
}
|
||||
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
|
||||
datasource.config!.database = database
|
||||
return datasource
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
export async function rawQuery(ds: Datasource, sql: string) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
if (ds.source !== SourceName.MYSQL) {
|
||||
throw new Error("Datasource source is not MySQL")
|
||||
}
|
||||
|
||||
const connection = await mysql.createConnection(ds.config)
|
||||
try {
|
||||
const [rows] = await connection.query(sql)
|
||||
return rows
|
||||
} finally {
|
||||
connection.end()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,33 +1,33 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import pg from "pg"
|
||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("postgres:16.1-bullseye")
|
||||
.withExposedPorts(5432)
|
||||
.withEnvironment({ POSTGRES_PASSWORD: "password" })
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"pg_isready -h localhost -p 5432"
|
||||
).withStartupTimeout(10000)
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer("postgres:16.1-bullseye")
|
||||
.withExposedPorts(5432)
|
||||
.withEnvironment({ POSTGRES_PASSWORD: "password" })
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"pg_isready -h localhost -p 5432"
|
||||
).withStartupTimeout(10000)
|
||||
)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(5432)
|
||||
|
||||
return {
|
||||
const port = (await ports).find(x => x.container === 5432)?.host
|
||||
|
||||
const datasource: Datasource = {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.POSTGRES,
|
||||
plus: true,
|
||||
config: {
|
||||
host,
|
||||
host: "127.0.0.1",
|
||||
port,
|
||||
database: "postgres",
|
||||
user: "postgres",
|
||||
|
@ -38,11 +38,28 @@ export async function datasource(): Promise<Datasource> {
|
|||
ca: false,
|
||||
},
|
||||
}
|
||||
|
||||
const database = generator.guid().replaceAll("-", "")
|
||||
await rawQuery(datasource, `CREATE DATABASE "${database}"`)
|
||||
datasource.config!.database = database
|
||||
|
||||
return datasource
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
export async function rawQuery(ds: Datasource, sql: string) {
|
||||
if (!ds.config) {
|
||||
throw new Error("Datasource config is missing")
|
||||
}
|
||||
if (ds.source !== SourceName.POSTGRES) {
|
||||
throw new Error("Datasource source is not Postgres")
|
||||
}
|
||||
|
||||
const client = new pg.Client(ds.config)
|
||||
await client.connect()
|
||||
try {
|
||||
const { rows } = await client.query(sql)
|
||||
return rows
|
||||
} finally {
|
||||
await client.end()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,8 +25,6 @@ const clearMigrations = async () => {
|
|||
}
|
||||
}
|
||||
|
||||
jest.setTimeout(10000)
|
||||
|
||||
describe("migrations", () => {
|
||||
const config = new TestConfig()
|
||||
|
||||
|
|
|
@ -17,8 +17,6 @@ import {
|
|||
generator,
|
||||
} from "@budibase/backend-core/tests"
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
||||
describe("external search", () => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
|
|
|
@ -1,4 +1,12 @@
|
|||
import newid from "../../db/newid"
|
||||
import TestConfig from "../../tests/utilities/TestConfiguration"
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import sdk from "../index"
|
||||
import {
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import { FIND_LIMIT } from "../app/rows/attachments"
|
||||
|
||||
const attachment = {
|
||||
size: 73479,
|
||||
|
@ -8,69 +16,48 @@ const attachment = {
|
|||
key: "app_bbb/attachments/a.png",
|
||||
}
|
||||
|
||||
const row = {
|
||||
_id: "ro_ta_aaa",
|
||||
photo: [attachment],
|
||||
otherCol: "string",
|
||||
}
|
||||
|
||||
const table = {
|
||||
_id: "ta_aaa",
|
||||
name: "photos",
|
||||
schema: {
|
||||
photo: {
|
||||
type: "attachment",
|
||||
name: "photo",
|
||||
},
|
||||
otherCol: {
|
||||
type: "string",
|
||||
name: "otherCol",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
jest.mock("@budibase/backend-core", () => {
|
||||
const core = jest.requireActual("@budibase/backend-core")
|
||||
return {
|
||||
...core,
|
||||
db: {
|
||||
...core.db,
|
||||
directCouchFind: jest.fn(),
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import sdk from "../index"
|
||||
|
||||
describe("should be able to re-write attachment URLs", () => {
|
||||
const config = new TestConfig()
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
it("should update URLs on a number of rows over the limit", async () => {
|
||||
const db = dbCore.getDB("app_aaa")
|
||||
await db.put(table)
|
||||
const limit = 30
|
||||
let rows = []
|
||||
for (let i = 0; i < limit; i++) {
|
||||
const rowToWrite = {
|
||||
...row,
|
||||
_id: `${row._id}_${newid()}`,
|
||||
}
|
||||
const { rev } = await db.put(rowToWrite)
|
||||
rows.push({
|
||||
...rowToWrite,
|
||||
_rev: rev,
|
||||
const table = await config.api.table.save({
|
||||
name: "photos",
|
||||
type: "table",
|
||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
schema: {
|
||||
photo: {
|
||||
type: FieldType.ATTACHMENT,
|
||||
name: "photo",
|
||||
},
|
||||
otherCol: {
|
||||
type: FieldType.STRING,
|
||||
name: "otherCol",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
for (let i = 0; i < FIND_LIMIT * 4; i++) {
|
||||
await config.api.row.save(table._id!, {
|
||||
photo: [attachment],
|
||||
otherCol: "string",
|
||||
})
|
||||
}
|
||||
|
||||
dbCore.directCouchFind
|
||||
// @ts-ignore
|
||||
.mockReturnValueOnce({ rows: rows.slice(0, 25), bookmark: "aaa" })
|
||||
.mockReturnValueOnce({ rows: rows.slice(25, limit), bookmark: "bbb" })
|
||||
const db = dbCore.getDB(config.getAppId())
|
||||
await sdk.backups.updateAttachmentColumns(db.name, db)
|
||||
const finalRows = await sdk.rows.getAllInternalRows(db.name)
|
||||
for (let rowToCheck of finalRows) {
|
||||
expect(rowToCheck.otherCol).toBe(row.otherCol)
|
||||
expect(rowToCheck.photo[0].url).toBe("")
|
||||
expect(rowToCheck.photo[0].key).toBe(`${db.name}/attachments/a.png`)
|
||||
|
||||
const rows = (await sdk.rows.getAllInternalRows(db.name)).filter(
|
||||
row => row.tableId === table._id
|
||||
)
|
||||
for (const row of rows) {
|
||||
expect(row.otherCol).toBe("string")
|
||||
expect(row.photo[0].url).toBe("")
|
||||
expect(row.photo[0].key).toBe(`${db.name}/attachments/a.png`)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
|
|
@ -35,11 +35,20 @@ describe("syncGlobalUsers", () => {
|
|||
builder: { global: true },
|
||||
})
|
||||
await config.doInContext(config.appId, async () => {
|
||||
expect(await rawUserMetadata()).toHaveLength(1)
|
||||
let metadata = await rawUserMetadata()
|
||||
expect(metadata).not.toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user1._id!),
|
||||
})
|
||||
)
|
||||
expect(metadata).not.toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user2._id!),
|
||||
})
|
||||
)
|
||||
await syncGlobalUsers()
|
||||
|
||||
const metadata = await rawUserMetadata()
|
||||
expect(metadata).toHaveLength(3)
|
||||
metadata = await rawUserMetadata()
|
||||
expect(metadata).toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user1._id!),
|
||||
|
@ -62,7 +71,6 @@ describe("syncGlobalUsers", () => {
|
|||
await syncGlobalUsers()
|
||||
|
||||
const metadata = await rawUserMetadata()
|
||||
expect(metadata).toHaveLength(1)
|
||||
expect(metadata).not.toContainEqual(
|
||||
expect.objectContaining({
|
||||
_id: db.generateUserMetadataID(user._id!),
|
||||
|
|
|
@ -2,17 +2,11 @@ import env from "../environment"
|
|||
import { env as coreEnv, timers } from "@budibase/backend-core"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
|
||||
if (!process.env.DEBUG) {
|
||||
global.console.log = jest.fn() // console.log are ignored in tests
|
||||
global.console.warn = jest.fn() // console.warn are ignored in tests
|
||||
}
|
||||
|
||||
if (!process.env.CI) {
|
||||
// set a longer timeout in dev for debugging
|
||||
// 100 seconds
|
||||
// set a longer timeout in dev for debugging 100 seconds
|
||||
jest.setTimeout(100 * 1000)
|
||||
} else {
|
||||
jest.setTimeout(10 * 1000)
|
||||
jest.setTimeout(30 * 1000)
|
||||
}
|
||||
|
||||
testContainerUtils.setupEnv(env, coreEnv)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import TestConfiguration from "../TestConfiguration"
|
||||
import { SuperTest, Test, Response } from "supertest"
|
||||
import request, { SuperTest, Test, Response } from "supertest"
|
||||
import { ReadStream } from "fs"
|
||||
import { getServer } from "../../../app"
|
||||
|
||||
type Headers = Record<string, string | string[] | undefined>
|
||||
type Method = "get" | "post" | "put" | "patch" | "delete"
|
||||
|
@ -76,7 +77,8 @@ export abstract class TestAPI {
|
|||
protected _requestRaw = async (
|
||||
method: "get" | "post" | "put" | "patch" | "delete",
|
||||
url: string,
|
||||
opts?: RequestOpts
|
||||
opts?: RequestOpts,
|
||||
attempt = 0
|
||||
): Promise<Response> => {
|
||||
const {
|
||||
headers = {},
|
||||
|
@ -107,26 +109,29 @@ export abstract class TestAPI {
|
|||
const headersFn = publicUser
|
||||
? this.config.publicHeaders.bind(this.config)
|
||||
: this.config.defaultHeaders.bind(this.config)
|
||||
let request = this.request[method](url).set(
|
||||
|
||||
const app = getServer()
|
||||
let req = request(app)[method](url)
|
||||
req = req.set(
|
||||
headersFn({
|
||||
"x-budibase-include-stacktrace": "true",
|
||||
})
|
||||
)
|
||||
if (headers) {
|
||||
request = request.set(headers)
|
||||
req = req.set(headers)
|
||||
}
|
||||
if (body) {
|
||||
request = request.send(body)
|
||||
req = req.send(body)
|
||||
}
|
||||
for (const [key, value] of Object.entries(fields)) {
|
||||
request = request.field(key, value)
|
||||
req = req.field(key, value)
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(files)) {
|
||||
if (isAttachedFile(value)) {
|
||||
request = request.attach(key, value.file, value.name)
|
||||
req = req.attach(key, value.file, value.name)
|
||||
} else {
|
||||
request = request.attach(key, value as any)
|
||||
req = req.attach(key, value as any)
|
||||
}
|
||||
}
|
||||
if (expectations?.headers) {
|
||||
|
@ -136,11 +141,25 @@ export abstract class TestAPI {
|
|||
`Got an undefined expected value for header "${key}", if you want to check for the absence of a header, use headersNotPresent`
|
||||
)
|
||||
}
|
||||
request = request.expect(key, value as any)
|
||||
req = req.expect(key, value as any)
|
||||
}
|
||||
}
|
||||
|
||||
return await request
|
||||
try {
|
||||
return await req
|
||||
} catch (e: any) {
|
||||
// We've found that occasionally the connection between supertest and the
|
||||
// server supertest starts gets reset. Not sure why, but retrying it
|
||||
// appears to work. I don't particularly like this, but it's better than
|
||||
// flakiness.
|
||||
if (e.code === "ECONNRESET") {
|
||||
if (attempt > 2) {
|
||||
throw e
|
||||
}
|
||||
return await this._requestRaw(method, url, opts, attempt + 1)
|
||||
}
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
protected _checkResponse = (
|
||||
|
@ -170,7 +189,18 @@ export abstract class TestAPI {
|
|||
}
|
||||
}
|
||||
|
||||
throw new Error(message)
|
||||
if (response.error) {
|
||||
// Sometimes the error can be between supertest and the app, and when
|
||||
// that happens response.error is sometimes populated with `text` that
|
||||
// gives more detail about the error. The `message` is almost always
|
||||
// useless from what I've seen.
|
||||
if (response.error.text) {
|
||||
response.error.message = response.error.text
|
||||
}
|
||||
throw new Error(message, { cause: response.error })
|
||||
} else {
|
||||
throw new Error(message)
|
||||
}
|
||||
}
|
||||
|
||||
if (expectations?.headersNotPresent) {
|
||||
|
|
|
@ -4,6 +4,7 @@ import {
|
|||
CreateDatasourceResponse,
|
||||
UpdateDatasourceResponse,
|
||||
UpdateDatasourceRequest,
|
||||
QueryJson,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
|
||||
|
@ -45,4 +46,24 @@ export class DatasourceAPI extends TestAPI {
|
|||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
delete = async (datasource: Datasource, expectations?: Expectations) => {
|
||||
return await this._delete(
|
||||
`/api/datasources/${datasource._id!}/${datasource._rev!}`,
|
||||
{ expectations }
|
||||
)
|
||||
}
|
||||
|
||||
get = async (id: string, expectations?: Expectations) => {
|
||||
return await this._get<Datasource>(`/api/datasources/${id}`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
query = async (query: QueryJson, expectations?: Expectations) => {
|
||||
return await this._post<any>(`/api/datasources/query`, {
|
||||
body: query,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,10 +6,11 @@ import {
|
|||
PreviewQueryResponse,
|
||||
} from "@budibase/types"
|
||||
import { Expectations, TestAPI } from "./base"
|
||||
import { constants } from "@budibase/backend-core"
|
||||
|
||||
export class QueryAPI extends TestAPI {
|
||||
save = async (body: Query): Promise<Query> => {
|
||||
return await this._post<Query>(`/api/queries`, { body })
|
||||
save = async (body: Query, expectations?: Expectations): Promise<Query> => {
|
||||
return await this._post<Query>(`/api/queries`, { body, expectations })
|
||||
}
|
||||
|
||||
execute = async (
|
||||
|
@ -26,9 +27,36 @@ export class QueryAPI extends TestAPI {
|
|||
)
|
||||
}
|
||||
|
||||
previewQuery = async (queryPreview: PreviewQueryRequest) => {
|
||||
preview = async (
|
||||
queryPreview: PreviewQueryRequest,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
return await this._post<PreviewQueryResponse>(`/api/queries/preview`, {
|
||||
body: queryPreview,
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
delete = async (query: Query, expectations?: Expectations) => {
|
||||
return await this._delete(`/api/queries/${query._id!}/${query._rev!}`, {
|
||||
expectations,
|
||||
})
|
||||
}
|
||||
|
||||
get = async (queryId: string, expectations?: Expectations) => {
|
||||
return await this._get<Query>(`/api/queries/${queryId}`, { expectations })
|
||||
}
|
||||
|
||||
getProd = async (queryId: string, expectations?: Expectations) => {
|
||||
return await this._get<Query>(`/api/queries/${queryId}`, {
|
||||
expectations,
|
||||
headers: {
|
||||
[constants.Header.APP_ID]: this.config.getProdAppId(),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fetch = async (expectations?: Expectations) => {
|
||||
return await this._get<Query[]>(`/api/queries`, { expectations })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -167,7 +167,7 @@ class QueryRunner {
|
|||
this.hasRerun = true
|
||||
}
|
||||
|
||||
await threadUtils.invalidateDynamicVariables(this.cachedVariables)
|
||||
await threadUtils.invalidateCachedVariable(this.cachedVariables)
|
||||
return this.execute()
|
||||
}
|
||||
|
||||
|
@ -254,7 +254,7 @@ class QueryRunner {
|
|||
let { parameters } = this
|
||||
const queryId = variable.queryId,
|
||||
name = variable.name
|
||||
let value = await threadUtils.checkCacheForDynamicVariable(queryId, name)
|
||||
let value = await threadUtils.getCachedVariable(queryId, name)
|
||||
if (!value) {
|
||||
value = this.queryResponse[queryId]
|
||||
? this.queryResponse[queryId]
|
||||
|
|
|
@ -5,7 +5,7 @@ import { redis, db as dbCore } from "@budibase/backend-core"
|
|||
import * as jsRunner from "../jsRunner"
|
||||
|
||||
const VARIABLE_TTL_SECONDS = 3600
|
||||
let client: any
|
||||
let client: redis.Client | null = null
|
||||
|
||||
async function getClient() {
|
||||
if (!client) {
|
||||
|
@ -36,23 +36,15 @@ export function threadSetup() {
|
|||
db.init()
|
||||
}
|
||||
|
||||
export async function checkCacheForDynamicVariable(
|
||||
queryId: string,
|
||||
variable: string
|
||||
) {
|
||||
const cache = await getClient()
|
||||
return cache.get(makeVariableKey(queryId, variable))
|
||||
export async function getCachedVariable(queryId: string, variable: string) {
|
||||
return (await getClient()).get(makeVariableKey(queryId, variable))
|
||||
}
|
||||
|
||||
export async function invalidateDynamicVariables(cachedVars: QueryVariable[]) {
|
||||
export async function invalidateCachedVariable(vars: QueryVariable[]) {
|
||||
const cache = await getClient()
|
||||
let promises = []
|
||||
for (let variable of cachedVars) {
|
||||
promises.push(
|
||||
cache.delete(makeVariableKey(variable.queryId, variable.name))
|
||||
)
|
||||
}
|
||||
await Promise.all(promises)
|
||||
await Promise.all(
|
||||
vars.map(v => cache.delete(makeVariableKey(v.queryId, v.name)))
|
||||
)
|
||||
}
|
||||
|
||||
export async function storeDynamicVariable(
|
||||
|
@ -93,7 +85,7 @@ export default {
|
|||
hasExtraData,
|
||||
formatResponse,
|
||||
storeDynamicVariable,
|
||||
invalidateDynamicVariables,
|
||||
checkCacheForDynamicVariable,
|
||||
invalidateCachedVariable,
|
||||
getCachedVariable,
|
||||
threadSetup,
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
|
|||
type: columnType,
|
||||
subtype: columnSubtype,
|
||||
autocolumn: isAutoColumn,
|
||||
} = schema[columnName]
|
||||
} = schema[columnName] || {}
|
||||
|
||||
// If the column had an invalid value we don't want to override it
|
||||
if (results.schemaValidation[columnName] === false) {
|
||||
|
|
300
yarn.lock
300
yarn.lock
|
@ -5098,6 +5098,15 @@
|
|||
resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf"
|
||||
integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==
|
||||
|
||||
"@trendyol/jest-testcontainers@2.1.1":
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@trendyol/jest-testcontainers/-/jest-testcontainers-2.1.1.tgz#dced95cf9c37b75efe0a65db9b75ae8912f2f14a"
|
||||
integrity sha512-4iAc2pMsev4BTUzoA7jO1VvbTOU2N3juQUYa8TwiSPXPuQtxKwV9WB9ZEP+JQ+Pj15YqfGOXp5H0WNMPtapjiA==
|
||||
dependencies:
|
||||
cwd "^0.10.0"
|
||||
node-duration "^1.0.4"
|
||||
testcontainers "4.7.0"
|
||||
|
||||
"@trysound/sax@0.2.0":
|
||||
version "0.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@trysound/sax/-/sax-0.2.0.tgz#cccaab758af56761eb7bf37af6f03f326dd798ad"
|
||||
|
@ -5287,6 +5296,13 @@
|
|||
"@types/node" "*"
|
||||
"@types/ssh2" "*"
|
||||
|
||||
"@types/dockerode@^2.5.34":
|
||||
version "2.5.34"
|
||||
resolved "https://registry.yarnpkg.com/@types/dockerode/-/dockerode-2.5.34.tgz#9adb884f7cc6c012a6eb4b2ad794cc5d01439959"
|
||||
integrity sha512-LcbLGcvcBwBAvjH9UrUI+4qotY+A5WCer5r43DR5XHv2ZIEByNXFdPLo1XxR+v/BjkGjlggW8qUiXuVEhqfkpA==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/dockerode@^3.3.24":
|
||||
version "3.3.24"
|
||||
resolved "https://registry.yarnpkg.com/@types/dockerode/-/dockerode-3.3.24.tgz#bea354a4fcd0824a80fd5ea5ede3e8cda71137a7"
|
||||
|
@ -5875,6 +5891,13 @@
|
|||
"@types/pouchdb-node" "*"
|
||||
"@types/pouchdb-replication" "*"
|
||||
|
||||
"@types/proper-lockfile@^4.1.4":
|
||||
version "4.1.4"
|
||||
resolved "https://registry.yarnpkg.com/@types/proper-lockfile/-/proper-lockfile-4.1.4.tgz#cd9fab92bdb04730c1ada542c356f03620f84008"
|
||||
integrity sha512-uo2ABllncSqg9F1D4nugVl9v93RmjxF6LJzQLMLDdPaXCUIDPeOJ21Gbqi43xNKzBi/WQ0Q0dICqufzQbMjipQ==
|
||||
dependencies:
|
||||
"@types/retry" "*"
|
||||
|
||||
"@types/qs@*":
|
||||
version "6.9.7"
|
||||
resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb"
|
||||
|
@ -5937,6 +5960,11 @@
|
|||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/retry@*":
|
||||
version "0.12.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.5.tgz#f090ff4bd8d2e5b940ff270ab39fd5ca1834a07e"
|
||||
integrity sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw==
|
||||
|
||||
"@types/rimraf@^3.0.2":
|
||||
version "3.0.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/rimraf/-/rimraf-3.0.2.tgz#a63d175b331748e5220ad48c901d7bbf1f44eef8"
|
||||
|
@ -7249,37 +7277,7 @@ axios-retry@^3.1.9:
|
|||
"@babel/runtime" "^7.15.4"
|
||||
is-retry-allowed "^2.2.0"
|
||||
|
||||
axios@0.24.0:
|
||||
version "0.24.0"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6"
|
||||
integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==
|
||||
dependencies:
|
||||
follow-redirects "^1.14.4"
|
||||
|
||||
axios@1.1.3:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-1.1.3.tgz#8274250dada2edf53814ed7db644b9c2866c1e35"
|
||||
integrity sha512-00tXVRwKx/FZr/IDVFt4C+f9FYairX517WoGCL6dpOntqLkZofjhu43F/Xl44UOpqa+9sLFDrG/XAnFsUYgkDA==
|
||||
dependencies:
|
||||
follow-redirects "^1.15.0"
|
||||
form-data "^4.0.0"
|
||||
proxy-from-env "^1.1.0"
|
||||
|
||||
axios@^0.21.1, axios@^0.21.4:
|
||||
version "0.21.4"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575"
|
||||
integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==
|
||||
dependencies:
|
||||
follow-redirects "^1.14.0"
|
||||
|
||||
axios@^0.26.0:
|
||||
version "0.26.1"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9"
|
||||
integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==
|
||||
dependencies:
|
||||
follow-redirects "^1.14.8"
|
||||
|
||||
axios@^1.0.0, axios@^1.1.3, axios@^1.5.0:
|
||||
axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^0.21.4, axios@^0.26.0, axios@^1.0.0, axios@^1.1.3, axios@^1.5.0:
|
||||
version "1.6.3"
|
||||
resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.3.tgz#7f50f23b3aa246eff43c54834272346c396613f4"
|
||||
integrity sha512-fWyNdeawGam70jXSVlKl+SUNVcL6j6W79CuSIPfi6HnDUmSCH6gyUys/HrqHeA/wU0Az41rRgean494d0Jb+ww==
|
||||
|
@ -9154,6 +9152,14 @@ curlconverter@3.21.0:
|
|||
string.prototype.startswith "^1.0.0"
|
||||
yamljs "^0.3.0"
|
||||
|
||||
cwd@^0.10.0:
|
||||
version "0.10.0"
|
||||
resolved "https://registry.yarnpkg.com/cwd/-/cwd-0.10.0.tgz#172400694057c22a13b0cf16162c7e4b7a7fe567"
|
||||
integrity sha512-YGZxdTTL9lmLkCUTpg4j0zQ7IhRB5ZmqNBbGCl3Tg6MP/d5/6sY7L5mmTjzbc6JKgVZYiqTQTNhPFsbXNGlRaA==
|
||||
dependencies:
|
||||
find-pkg "^0.1.2"
|
||||
fs-exists-sync "^0.1.0"
|
||||
|
||||
dargs@^7.0.0:
|
||||
version "7.0.0"
|
||||
resolved "https://registry.yarnpkg.com/dargs/-/dargs-7.0.0.tgz#04015c41de0bcb69ec84050f3d9be0caf8d6d5cc"
|
||||
|
@ -9775,7 +9781,7 @@ docker-compose@0.24.0:
|
|||
dependencies:
|
||||
yaml "^1.10.2"
|
||||
|
||||
docker-compose@^0.23.6:
|
||||
docker-compose@^0.23.5, docker-compose@^0.23.6:
|
||||
version "0.23.19"
|
||||
resolved "https://registry.yarnpkg.com/docker-compose/-/docker-compose-0.23.19.tgz#9947726e2fe67bdfa9e8efe1ff15aa0de2e10eb8"
|
||||
integrity sha512-v5vNLIdUqwj4my80wxFDkNH+4S85zsRuH29SO7dCWVWPCMt/ohZBsGN6g6KXWifT0pzQ7uOxqEKCYCDPJ8Vz4g==
|
||||
|
@ -9799,7 +9805,7 @@ docker-modem@^3.0.0:
|
|||
split-ca "^1.0.1"
|
||||
ssh2 "^1.11.0"
|
||||
|
||||
dockerode@^3.3.5:
|
||||
dockerode@^3.2.1, dockerode@^3.3.5:
|
||||
version "3.3.5"
|
||||
resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.5.tgz#7ae3f40f2bec53ae5e9a741ce655fff459745629"
|
||||
integrity sha512-/0YNa3ZDNeLr/tSckmD69+Gq+qVNhvKfAHNeZJBnp7EOP6RGKV8ORrJHkUn20So5wU+xxT7+1n5u8PjHbfjbSA==
|
||||
|
@ -10824,6 +10830,13 @@ expand-template@^2.0.3:
|
|||
resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c"
|
||||
integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==
|
||||
|
||||
expand-tilde@^1.2.2:
|
||||
version "1.2.2"
|
||||
resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-1.2.2.tgz#0b81eba897e5a3d31d1c3d102f8f01441e559449"
|
||||
integrity sha512-rtmc+cjLZqnu9dSYosX9EWmSJhTwpACgJQTfj4hgg2JjOD/6SIQalZrt4a3aQeh++oNxkazcaxrhPUj6+g5G/Q==
|
||||
dependencies:
|
||||
os-homedir "^1.0.1"
|
||||
|
||||
expand-tilde@^2.0.2:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-2.0.2.tgz#97e801aa052df02454de46b02bf621642cdc8502"
|
||||
|
@ -11162,11 +11175,26 @@ filter-obj@^1.1.0:
|
|||
resolved "https://registry.yarnpkg.com/filter-obj/-/filter-obj-1.1.0.tgz#9b311112bc6c6127a16e016c6c5d7f19e0805c5b"
|
||||
integrity sha512-8rXg1ZnX7xzy2NGDVkBVaAy+lSlPNwad13BtgSlLuxfIslyt5Vg64U7tFcCt4WS1R0hvtnQybT/IyCkGZ3DpXQ==
|
||||
|
||||
find-file-up@^0.1.2:
|
||||
version "0.1.3"
|
||||
resolved "https://registry.yarnpkg.com/find-file-up/-/find-file-up-0.1.3.tgz#cf68091bcf9f300a40da411b37da5cce5a2fbea0"
|
||||
integrity sha512-mBxmNbVyjg1LQIIpgO8hN+ybWBgDQK8qjht+EbrTCGmmPV/sc7RF1i9stPTD6bpvXZywBdrwRYxhSdJv867L6A==
|
||||
dependencies:
|
||||
fs-exists-sync "^0.1.0"
|
||||
resolve-dir "^0.1.0"
|
||||
|
||||
find-free-port@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/find-free-port/-/find-free-port-2.0.0.tgz#4b22e5f6579eb1a38c41ac6bcb3efed1b6da9b1b"
|
||||
integrity sha512-J1j8gfEVf5FN4PR5w5wrZZ7NYs2IvqsHcd03cAeQx3Ec/mo+lKceaVNhpsRKoZpZKbId88o8qh+dwUwzBV6WCg==
|
||||
|
||||
find-pkg@^0.1.2:
|
||||
version "0.1.2"
|
||||
resolved "https://registry.yarnpkg.com/find-pkg/-/find-pkg-0.1.2.tgz#1bdc22c06e36365532e2a248046854b9788da557"
|
||||
integrity sha512-0rnQWcFwZr7eO0513HahrWafsc3CTFioEB7DRiEYCUM/70QXSY8f3mCST17HXLcPvEhzH/Ty/Bxd72ZZsr/yvw==
|
||||
dependencies:
|
||||
find-file-up "^0.1.2"
|
||||
|
||||
find-up@^2.0.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
|
||||
|
@ -11230,11 +11258,6 @@ fn.name@1.x.x:
|
|||
resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc"
|
||||
integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==
|
||||
|
||||
follow-redirects@^1.14.0, follow-redirects@^1.14.4, follow-redirects@^1.14.8:
|
||||
version "1.15.6"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b"
|
||||
integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==
|
||||
|
||||
follow-redirects@^1.15.0:
|
||||
version "1.15.2"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13"
|
||||
|
@ -11339,6 +11362,11 @@ fs-constants@^1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
|
||||
integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==
|
||||
|
||||
fs-exists-sync@^0.1.0:
|
||||
version "0.1.0"
|
||||
resolved "https://registry.yarnpkg.com/fs-exists-sync/-/fs-exists-sync-0.1.0.tgz#982d6893af918e72d08dec9e8673ff2b5a8d6add"
|
||||
integrity sha512-cR/vflFyPZtrN6b38ZyWxpWdhlXrzZEBawlpBQMq7033xVY7/kg0GDMBK5jg8lDYQckdJ5x/YC88lM3C7VMsLg==
|
||||
|
||||
fs-extra@^10.0.0:
|
||||
version "10.1.0"
|
||||
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf"
|
||||
|
@ -11835,6 +11863,24 @@ global-dirs@^3.0.0:
|
|||
dependencies:
|
||||
ini "2.0.0"
|
||||
|
||||
global-modules@^0.2.3:
|
||||
version "0.2.3"
|
||||
resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-0.2.3.tgz#ea5a3bed42c6d6ce995a4f8a1269b5dae223828d"
|
||||
integrity sha512-JeXuCbvYzYXcwE6acL9V2bAOeSIGl4dD+iwLY9iUx2VBJJ80R18HCn+JCwHM9Oegdfya3lEkGCdaRkSyc10hDA==
|
||||
dependencies:
|
||||
global-prefix "^0.1.4"
|
||||
is-windows "^0.2.0"
|
||||
|
||||
global-prefix@^0.1.4:
|
||||
version "0.1.5"
|
||||
resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-0.1.5.tgz#8d3bc6b8da3ca8112a160d8d496ff0462bfef78f"
|
||||
integrity sha512-gOPiyxcD9dJGCEArAhF4Hd0BAqvAe/JzERP7tYumE4yIkmIedPUVXcJFWbV3/p/ovIIvKjkrTk+f1UVkq7vvbw==
|
||||
dependencies:
|
||||
homedir-polyfill "^1.0.0"
|
||||
ini "^1.3.4"
|
||||
is-windows "^0.2.0"
|
||||
which "^1.2.12"
|
||||
|
||||
global@~4.4.0:
|
||||
version "4.4.0"
|
||||
resolved "https://registry.yarnpkg.com/global/-/global-4.4.0.tgz#3e7b105179006a323ed71aafca3e9c57a5cc6406"
|
||||
|
@ -12265,7 +12311,7 @@ hmac-drbg@^1.0.1:
|
|||
minimalistic-assert "^1.0.0"
|
||||
minimalistic-crypto-utils "^1.0.1"
|
||||
|
||||
homedir-polyfill@^1.0.1:
|
||||
homedir-polyfill@^1.0.0, homedir-polyfill@^1.0.1:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8"
|
||||
integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==
|
||||
|
@ -12353,12 +12399,7 @@ http-assert@^1.3.0:
|
|||
deep-equal "~1.0.1"
|
||||
http-errors "~1.8.0"
|
||||
|
||||
http-cache-semantics@3.8.1:
|
||||
version "3.8.1"
|
||||
resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2"
|
||||
integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w==
|
||||
|
||||
http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1:
|
||||
http-cache-semantics@3.8.1, http-cache-semantics@4.1.1, http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a"
|
||||
integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==
|
||||
|
@ -13244,6 +13285,11 @@ is-whitespace@^0.3.0:
|
|||
resolved "https://registry.yarnpkg.com/is-whitespace/-/is-whitespace-0.3.0.tgz#1639ecb1be036aec69a54cbb401cfbed7114ab7f"
|
||||
integrity sha512-RydPhl4S6JwAyj0JJjshWJEFG6hNye3pZFBRZaTUfZFwGHxzppNaNOVgQuS/E/SlhrApuMXrpnK1EEIXfdo3Dg==
|
||||
|
||||
is-windows@^0.2.0:
|
||||
version "0.2.0"
|
||||
resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-0.2.0.tgz#de1aa6d63ea29dd248737b69f1ff8b8002d2108c"
|
||||
integrity sha512-n67eJYmXbniZB7RF4I/FTjK1s6RPOCTxhYrVYLRaCt3lF0mpWZPKr3T2LSZAqyjQsxR2qMmGYXXzK0YWwcPM1Q==
|
||||
|
||||
is-wsl@^2.1.1, is-wsl@^2.2.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271"
|
||||
|
@ -13303,6 +13349,11 @@ isobject@^3.0.1:
|
|||
resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
|
||||
integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==
|
||||
|
||||
isobject@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isobject/-/isobject-4.0.0.tgz#3f1c9155e73b192022a80819bacd0343711697b0"
|
||||
integrity sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA==
|
||||
|
||||
isolated-vm@^4.7.2:
|
||||
version "4.7.2"
|
||||
resolved "https://registry.yarnpkg.com/isolated-vm/-/isolated-vm-4.7.2.tgz#5670d5cce1d92004f9b825bec5b0b11fc7501b65"
|
||||
|
@ -15897,7 +15948,7 @@ msgpackr-extract@^3.0.2:
|
|||
"@msgpackr-extract/msgpackr-extract-linux-x64" "3.0.2"
|
||||
"@msgpackr-extract/msgpackr-extract-win32-x64" "3.0.2"
|
||||
|
||||
msgpackr@^1.5.2:
|
||||
msgpackr@1.10.1, msgpackr@^1.5.2:
|
||||
version "1.10.1"
|
||||
resolved "https://registry.yarnpkg.com/msgpackr/-/msgpackr-1.10.1.tgz#51953bb4ce4f3494f0c4af3f484f01cfbb306555"
|
||||
integrity sha512-r5VRLv9qouXuLiIBrLpl2d5ZvPt8svdQTl5/vMvE4nzDMyEX4sgW5yWhuBBj5UmgwOTWj8CIdSXn5sAfsHAWIQ==
|
||||
|
@ -16101,25 +16152,18 @@ node-addon-api@^6.1.0:
|
|||
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76"
|
||||
integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==
|
||||
|
||||
node-fetch@2.6.0:
|
||||
version "2.6.0"
|
||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
|
||||
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
|
||||
node-duration@^1.0.4:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/node-duration/-/node-duration-1.0.4.tgz#3e94ecc0e473691c89c4560074503362071cecac"
|
||||
integrity sha512-eUXYNSY7DL53vqfTosggWkvyIW3bhAcqBDIlolgNYlZhianXTrCL50rlUJWD1eRqkIxMppXTfiFbp+9SjpPrgA==
|
||||
|
||||
node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7:
|
||||
node-fetch@2.6.0, node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9, node-fetch@^2.7.0:
|
||||
version "2.6.7"
|
||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
|
||||
integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
|
||||
dependencies:
|
||||
whatwg-url "^5.0.0"
|
||||
|
||||
node-fetch@^2.6.9, node-fetch@^2.7.0:
|
||||
version "2.7.0"
|
||||
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d"
|
||||
integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==
|
||||
dependencies:
|
||||
whatwg-url "^5.0.0"
|
||||
|
||||
node-forge@^1.3.1:
|
||||
version "1.3.1"
|
||||
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3"
|
||||
|
@ -16859,6 +16903,11 @@ oracledb@5.3.0:
|
|||
resolved "https://registry.yarnpkg.com/oracledb/-/oracledb-5.3.0.tgz#a15e6cd16757d8711a2c006a28bd7ecd3b8466f7"
|
||||
integrity sha512-HMJzQ6lCf287ztvvehTEmjCWA21FQ3RMvM+mgoqd4i8pkREuqFWO+y3ovsGR9moJUg4T0xjcwS8rl4mggWPxmg==
|
||||
|
||||
os-homedir@^1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
|
||||
integrity sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==
|
||||
|
||||
os-locale@^3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a"
|
||||
|
@ -17264,15 +17313,7 @@ passport-strategy@1.x.x, passport-strategy@^1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4"
|
||||
integrity sha512-CB97UUvDKJde2V0KDWWB3lyf6PC3FaZP7YxZ2G8OAtn9p4HI9j9JLP9qjOGZFvyl8uwNT8qM+hGnz/n16NI7oA==
|
||||
|
||||
passport@^0.4.0:
|
||||
version "0.4.1"
|
||||
resolved "https://registry.yarnpkg.com/passport/-/passport-0.4.1.tgz#941446a21cb92fc688d97a0861c38ce9f738f270"
|
||||
integrity sha512-IxXgZZs8d7uFSt3eqNjM9NQ3g3uQCW5avD8mRNoXV99Yig50vjuaez6dQK2qC0kVWPRTujxY0dWgGfT09adjYg==
|
||||
dependencies:
|
||||
passport-strategy "1.x.x"
|
||||
pause "0.0.1"
|
||||
|
||||
passport@^0.6.0:
|
||||
passport@0.6.0, passport@^0.4.0, passport@^0.6.0:
|
||||
version "0.6.0"
|
||||
resolved "https://registry.yarnpkg.com/passport/-/passport-0.6.0.tgz#e869579fab465b5c0b291e841e6cc95c005fac9d"
|
||||
integrity sha512-0fe+p3ZnrWRW74fe8+SvCyf4a3Pb2/h7gFkQ8yTJpAO50gDzlfjZUZTO1k5Eg9kUct22OxHLqDZoKUWRHOh9ug==
|
||||
|
@ -17991,9 +18032,9 @@ postgres-interval@^1.1.0:
|
|||
xtend "^4.0.0"
|
||||
|
||||
posthog-js@^1.116.6:
|
||||
version "1.116.6"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.116.6.tgz#9a5c9f49230a76642f4c44d93b96710f886c2880"
|
||||
integrity sha512-rvt8HxzJD4c2B/xsUa4jle8ApdqljeBI2Qqjp4XJMohQf18DXRyM6b96H5/UMs8jxYuZG14Er0h/kEIWeU6Fmw==
|
||||
version "1.117.0"
|
||||
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.117.0.tgz#59c3e520f6269f76ea82dce8760fbc33cdd7f48f"
|
||||
integrity sha512-+I8q5G9YG6r6wOLKPT+C+AV7MRhyVFJMTJS7dfwLmmT+mkVxQ5bfC59hBkJUObOR+YRn5jn2JT/sgIslU94EZg==
|
||||
dependencies:
|
||||
fflate "^0.4.8"
|
||||
preact "^10.19.3"
|
||||
|
@ -18573,7 +18614,7 @@ pseudomap@^1.0.2:
|
|||
resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3"
|
||||
integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==
|
||||
|
||||
psl@^1.1.28, psl@^1.1.33:
|
||||
psl@^1.1.33:
|
||||
version "1.9.0"
|
||||
resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7"
|
||||
integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==
|
||||
|
@ -19186,6 +19227,14 @@ resolve-dependency-path@^2.0.0:
|
|||
resolved "https://registry.yarnpkg.com/resolve-dependency-path/-/resolve-dependency-path-2.0.0.tgz#11700e340717b865d216c66cabeb4a2a3c696736"
|
||||
integrity sha512-DIgu+0Dv+6v2XwRaNWnumKu7GPufBBOr5I1gRPJHkvghrfCGOooJODFvgFimX/KRxk9j0whD2MnKHzM1jYvk9w==
|
||||
|
||||
resolve-dir@^0.1.0:
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-0.1.1.tgz#b219259a5602fac5c5c496ad894a6e8cc430261e"
|
||||
integrity sha512-QxMPqI6le2u0dCLyiGzgy92kjkkL6zO0XyvHzjdTNH3zM6e5Hz3BwG6+aEyNgiQ5Xz6PwTwgQEj3U50dByPKIA==
|
||||
dependencies:
|
||||
expand-tilde "^1.2.2"
|
||||
global-modules "^0.2.3"
|
||||
|
||||
resolve-from@5.0.0, resolve-from@^5.0.0:
|
||||
version "5.0.0"
|
||||
resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69"
|
||||
|
@ -19590,11 +19639,6 @@ sax@1.2.1:
|
|||
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a"
|
||||
integrity sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==
|
||||
|
||||
sax@>=0.1.1:
|
||||
version "1.3.0"
|
||||
resolved "https://registry.yarnpkg.com/sax/-/sax-1.3.0.tgz#a5dbe77db3be05c9d1ee7785dbd3ea9de51593d0"
|
||||
integrity sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==
|
||||
|
||||
sax@>=0.6.0:
|
||||
version "1.2.4"
|
||||
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
|
||||
|
@ -19676,40 +19720,13 @@ semver-diff@^3.1.1:
|
|||
dependencies:
|
||||
semver "^6.3.0"
|
||||
|
||||
"semver@2 || 3 || 4 || 5", semver@^5.5.0, semver@^5.6.0, semver@^5.7.1:
|
||||
version "5.7.2"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8"
|
||||
integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==
|
||||
|
||||
semver@7.5.3, semver@^7.0.0, semver@^7.1.1, semver@^7.1.2, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3:
|
||||
"semver@2 || 3 || 4 || 5", semver@7.5.3, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.0.0, semver@^7.1.1, semver@^7.1.2, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@~2.3.1, semver@~7.0.0:
|
||||
version "7.5.3"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e"
|
||||
integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==
|
||||
dependencies:
|
||||
lru-cache "^6.0.0"
|
||||
|
||||
semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1:
|
||||
version "6.3.1"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4"
|
||||
integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==
|
||||
|
||||
semver@^7.5.4:
|
||||
version "7.6.0"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d"
|
||||
integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==
|
||||
dependencies:
|
||||
lru-cache "^6.0.0"
|
||||
|
||||
semver@~2.3.1:
|
||||
version "2.3.2"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-2.3.2.tgz#b9848f25d6cf36333073ec9ef8856d42f1233e52"
|
||||
integrity sha512-abLdIKCosKfpnmhS52NCTjO4RiLspDfsn37prjzGrp9im5DPJOgh82Os92vtwGh6XdQryKI/7SREZnV+aqiXrA==
|
||||
|
||||
semver@~7.0.0:
|
||||
version "7.0.0"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e"
|
||||
integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==
|
||||
|
||||
seq-queue@^0.0.5:
|
||||
version "0.0.5"
|
||||
resolved "https://registry.yarnpkg.com/seq-queue/-/seq-queue-0.0.5.tgz#d56812e1c017a6e4e7c3e3a37a1da6d78dd3c93e"
|
||||
|
@ -20907,7 +20924,7 @@ tapable@^2.1.1, tapable@^2.2.0:
|
|||
resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0"
|
||||
integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==
|
||||
|
||||
tar-fs@2.1.1, tar-fs@^2.0.0:
|
||||
tar-fs@2.1.1, tar-fs@^2.0.0, tar-fs@^2.1.0:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784"
|
||||
integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==
|
||||
|
@ -21085,6 +21102,23 @@ testcontainers@10.7.2, testcontainers@^10.7.2:
|
|||
tar-fs "^3.0.5"
|
||||
tmp "^0.2.1"
|
||||
|
||||
testcontainers@4.7.0:
|
||||
version "4.7.0"
|
||||
resolved "https://registry.yarnpkg.com/testcontainers/-/testcontainers-4.7.0.tgz#5a9a864b1b0cc86984086dcc737c2f5e73490cf3"
|
||||
integrity sha512-5SrG9RMfDRRZig34fDZeMcGD5i3lHCOJzn0kjouyK4TiEWjZB3h7kCk8524lwNRHROFE1j6DGjceonv/5hl5ag==
|
||||
dependencies:
|
||||
"@types/dockerode" "^2.5.34"
|
||||
byline "^5.0.0"
|
||||
debug "^4.1.1"
|
||||
docker-compose "^0.23.5"
|
||||
dockerode "^3.2.1"
|
||||
get-port "^5.1.1"
|
||||
glob "^7.1.6"
|
||||
node-duration "^1.0.4"
|
||||
slash "^3.0.0"
|
||||
stream-to-array "^2.3.0"
|
||||
tar-fs "^2.1.0"
|
||||
|
||||
text-extensions@^1.0.0:
|
||||
version "1.9.0"
|
||||
resolved "https://registry.yarnpkg.com/text-extensions/-/text-extensions-1.9.0.tgz#1853e45fee39c945ce6f6c36b2d659b5aabc2a26"
|
||||
|
@ -21295,7 +21329,7 @@ touch@^3.1.0:
|
|||
dependencies:
|
||||
nopt "~1.0.10"
|
||||
|
||||
"tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2:
|
||||
tough-cookie@4.1.3, "tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2, tough-cookie@~2.5.0:
|
||||
version "4.1.3"
|
||||
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf"
|
||||
integrity sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==
|
||||
|
@ -21305,14 +21339,6 @@ touch@^3.1.0:
|
|||
universalify "^0.2.0"
|
||||
url-parse "^1.5.3"
|
||||
|
||||
tough-cookie@~2.5.0:
|
||||
version "2.5.0"
|
||||
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2"
|
||||
integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==
|
||||
dependencies:
|
||||
psl "^1.1.28"
|
||||
punycode "^2.1.1"
|
||||
|
||||
tr46@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240"
|
||||
|
@ -21789,6 +21815,14 @@ unpipe@1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
|
||||
integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==
|
||||
|
||||
unset-value@2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-2.0.1.tgz#57bed0c22d26f28d69acde5df9a11b77c74d2df3"
|
||||
integrity sha512-2hvrBfjUE00PkqN+q0XP6yRAOGrR06uSiUoIQGZkc7GxvQ9H7v8quUPNtZjMg4uux69i8HWpIjLPUKwCuRGyNg==
|
||||
dependencies:
|
||||
has-value "^2.0.2"
|
||||
isobject "^4.0.0"
|
||||
|
||||
untildify@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b"
|
||||
|
@ -22323,7 +22357,7 @@ which-typed-array@^1.1.11, which-typed-array@^1.1.13, which-typed-array@^1.1.9:
|
|||
gopd "^1.0.1"
|
||||
has-tostringtag "^1.0.0"
|
||||
|
||||
which@^1.2.9:
|
||||
which@^1.2.12, which@^1.2.9:
|
||||
version "1.3.1"
|
||||
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
|
||||
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
|
||||
|
@ -22559,33 +22593,10 @@ xml-parse-from-string@^1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz#a9029e929d3dbcded169f3c6e28238d95a5d5a28"
|
||||
integrity sha512-ErcKwJTF54uRzzNMXq2X5sMIy88zJvfN2DmdoQvy7PAFJ+tPRU6ydWuOKNMyfmOjdyBQTFREi60s0Y0SyI0G0g==
|
||||
|
||||
xml2js@0.1.x:
|
||||
version "0.1.14"
|
||||
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.1.14.tgz#5274e67f5a64c5f92974cd85139e0332adc6b90c"
|
||||
integrity sha512-pbdws4PPPNc1HPluSUKamY4GWMk592K7qwcj6BExbVOhhubub8+pMda/ql68b6L3luZs/OGjGSB5goV7SnmgnA==
|
||||
dependencies:
|
||||
sax ">=0.1.1"
|
||||
|
||||
xml2js@0.4.19:
|
||||
version "0.4.19"
|
||||
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7"
|
||||
integrity sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==
|
||||
dependencies:
|
||||
sax ">=0.6.0"
|
||||
xmlbuilder "~9.0.1"
|
||||
|
||||
xml2js@0.5.0:
|
||||
version "0.5.0"
|
||||
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7"
|
||||
integrity sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==
|
||||
dependencies:
|
||||
sax ">=0.6.0"
|
||||
xmlbuilder "~11.0.0"
|
||||
|
||||
xml2js@^0.4.19, xml2js@^0.4.5:
|
||||
version "0.4.23"
|
||||
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66"
|
||||
integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==
|
||||
xml2js@0.1.x, xml2js@0.4.19, xml2js@0.5.0, xml2js@0.6.2, xml2js@^0.4.19, xml2js@^0.4.5:
|
||||
version "0.6.2"
|
||||
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499"
|
||||
integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==
|
||||
dependencies:
|
||||
sax ">=0.6.0"
|
||||
xmlbuilder "~11.0.0"
|
||||
|
@ -22595,11 +22606,6 @@ xmlbuilder@~11.0.0:
|
|||
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3"
|
||||
integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==
|
||||
|
||||
xmlbuilder@~9.0.1:
|
||||
version "9.0.7"
|
||||
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
|
||||
integrity sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ==
|
||||
|
||||
xmlchars@^2.2.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb"
|
||||
|
|
Loading…
Reference in New Issue