Merge branch 'master' into BUDI-8270/validation-for-search-api
# Conflicts: # packages/server/package.json # packages/server/src/api/controllers/row/index.ts # packages/server/src/api/routes/tests/search.spec.ts # yarn.lock
This commit is contained in:
commit
9c460424a9
|
@ -114,9 +114,11 @@ jobs:
|
|||
- name: Test
|
||||
run: |
|
||||
if ${{ env.ONLY_AFFECTED_TASKS }}; then
|
||||
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions
|
||||
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/builder --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions
|
||||
yarn test -- --scope=@budibase/builder --since=${{ env.NX_BASE_BRANCH }}
|
||||
else
|
||||
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix -- --verbose --reporters=default --reporters=github-actions
|
||||
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/builder --no-prefix -- --verbose --reporters=default --reporters=github-actions
|
||||
yarn test -- --scope=@budibase/builder --no-prefix
|
||||
fi
|
||||
|
||||
test-worker:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.2.9",
|
||||
"version": "3.2.10",
|
||||
"npmClient": "yarn",
|
||||
"concurrency": 20,
|
||||
"command": {
|
||||
|
|
|
@ -109,7 +109,7 @@
|
|||
"semver": "7.5.3",
|
||||
"http-cache-semantics": "4.1.1",
|
||||
"msgpackr": "1.10.1",
|
||||
"axios": "1.6.3",
|
||||
"axios": "1.7.7",
|
||||
"xml2js": "0.6.2",
|
||||
"unset-value": "2.0.1",
|
||||
"passport": "0.6.0",
|
||||
|
@ -119,6 +119,5 @@
|
|||
},
|
||||
"engines": {
|
||||
"node": ">=20.0.0 <21.0.0"
|
||||
},
|
||||
"dependencies": {}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,14 +33,17 @@
|
|||
"@budibase/pouchdb-replication-stream": "1.2.11",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/types": "0.0.0",
|
||||
"@techpass/passport-openidconnect": "0.3.3",
|
||||
"aws-cloudfront-sign": "3.0.2",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"aws-sdk": "2.1692.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bull": "4.10.1",
|
||||
"correlation-id": "4.0.0",
|
||||
"dd-trace": "5.2.0",
|
||||
"dd-trace": "5.23.0",
|
||||
"dotenv": "16.0.1",
|
||||
"google-auth-library": "^8.0.1",
|
||||
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5",
|
||||
"ioredis": "5.3.2",
|
||||
"joi": "17.6.0",
|
||||
"jsonwebtoken": "9.0.2",
|
||||
|
@ -55,17 +58,14 @@
|
|||
"pino": "8.11.0",
|
||||
"pino-http": "8.3.3",
|
||||
"posthog-node": "4.0.1",
|
||||
"pouchdb": "7.3.0",
|
||||
"pouchdb-find": "7.2.2",
|
||||
"pouchdb": "9.0.0",
|
||||
"pouchdb-find": "9.0.0",
|
||||
"redlock": "4.2.0",
|
||||
"rotating-file-stream": "3.1.0",
|
||||
"sanitize-s3-objectkey": "0.0.1",
|
||||
"semver": "^7.5.4",
|
||||
"tar-fs": "2.1.1",
|
||||
"uuid": "^8.3.2",
|
||||
"@techpass/passport-openidconnect": "0.3.3",
|
||||
"google-auth-library": "^8.0.1",
|
||||
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5"
|
||||
"uuid": "^8.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@jest/types": "^29.6.3",
|
||||
|
@ -78,7 +78,7 @@
|
|||
"@types/lodash": "4.14.200",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
"@types/pouchdb": "6.4.2",
|
||||
"@types/redlock": "4.0.7",
|
||||
"@types/semver": "7.3.7",
|
||||
"@types/tar-fs": "2.0.1",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit bfeece324a03a3a5f25137bf3f8c66d5ed6103d8
|
||||
Subproject commit 4facf6a44ee52a405794845f71584168b9db652c
|
|
@ -63,13 +63,13 @@
|
|||
"@bull-board/koa": "5.10.2",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
"@google-cloud/firestore": "7.8.0",
|
||||
"@koa/router": "8.0.8",
|
||||
"@koa/router": "13.1.0",
|
||||
"@socket.io/redis-adapter": "^8.2.1",
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"airtable": "0.12.2",
|
||||
"arangojs": "7.2.0",
|
||||
"archiver": "7.0.1",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"aws-sdk": "2.1692.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bson": "^6.9.0",
|
||||
|
@ -80,8 +80,8 @@
|
|||
"cookies": "0.8.0",
|
||||
"csvtojson": "2.0.10",
|
||||
"curlconverter": "3.21.0",
|
||||
"dd-trace": "5.23.0",
|
||||
"dayjs": "^1.10.8",
|
||||
"dd-trace": "5.2.0",
|
||||
"dotenv": "8.2.0",
|
||||
"form-data": "4.0.0",
|
||||
"global-agent": "3.0.0",
|
||||
|
@ -89,7 +89,7 @@
|
|||
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5",
|
||||
"ioredis": "5.3.2",
|
||||
"isolated-vm": "^4.7.2",
|
||||
"jimp": "0.22.12",
|
||||
"jimp": "1.1.4",
|
||||
"joi": "17.6.0",
|
||||
"js-yaml": "4.1.0",
|
||||
"jsonschema": "1.4.0",
|
||||
|
@ -104,7 +104,7 @@
|
|||
"lodash": "4.17.21",
|
||||
"memorystream": "0.3.1",
|
||||
"mongodb": "6.7.0",
|
||||
"mssql": "10.0.1",
|
||||
"mssql": "11.0.1",
|
||||
"mysql2": "3.9.8",
|
||||
"node-fetch": "2.6.7",
|
||||
"object-sizeof": "2.6.1",
|
||||
|
@ -112,15 +112,15 @@
|
|||
"openapi-types": "9.3.1",
|
||||
"oracledb": "6.5.1",
|
||||
"pg": "8.10.0",
|
||||
"pouchdb": "7.3.0",
|
||||
"pouchdb": "9.0.0",
|
||||
"pouchdb-all-dbs": "1.1.1",
|
||||
"pouchdb-find": "7.2.2",
|
||||
"pouchdb-find": "9.0.0",
|
||||
"redis": "4",
|
||||
"semver": "^7.5.4",
|
||||
"serialize-error": "^7.0.1",
|
||||
"server-destroy": "1.0.1",
|
||||
"snowflake-promise": "^4.5.0",
|
||||
"socket.io": "4.7.5",
|
||||
"snowflake-sdk": "^1.15.0",
|
||||
"socket.io": "4.8.1",
|
||||
"svelte": "^4.2.10",
|
||||
"tar": "6.2.1",
|
||||
"tmp": "0.2.3",
|
||||
|
@ -128,7 +128,7 @@
|
|||
"uuid": "^8.3.2",
|
||||
"validate.js": "0.13.1",
|
||||
"worker-farm": "1.7.0",
|
||||
"xml2js": "0.5.0",
|
||||
"xml2js": "0.6.2",
|
||||
"zod-validation-error": "^3.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -142,13 +142,14 @@
|
|||
"@types/jest": "29.5.5",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/koa-send": "^4.1.6",
|
||||
"@types/koa__router": "8.0.8",
|
||||
"@types/koa__router": "12.0.4",
|
||||
"@types/lodash": "4.14.200",
|
||||
"@types/mssql": "9.1.4",
|
||||
"@types/mssql": "9.1.5",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/oracledb": "6.5.1",
|
||||
"@types/pg": "8.6.6",
|
||||
"@types/pouchdb": "6.4.2",
|
||||
"@types/server-destroy": "1.0.1",
|
||||
"@types/supertest": "2.0.14",
|
||||
"@types/tar": "6.1.5",
|
||||
|
|
|
@ -4,7 +4,7 @@ import { URL } from "url"
|
|||
|
||||
const curlconverter = require("curlconverter")
|
||||
|
||||
const parseCurl = (data: string): any => {
|
||||
const parseCurl = (data: string): Promise<any> => {
|
||||
const curlJson = curlconverter.toJsonString(data)
|
||||
return JSON.parse(curlJson)
|
||||
}
|
||||
|
@ -53,8 +53,7 @@ export class Curl extends ImportSource {
|
|||
|
||||
isSupported = async (data: string): Promise<boolean> => {
|
||||
try {
|
||||
const curl = parseCurl(data)
|
||||
this.curl = curl
|
||||
this.curl = parseCurl(data)
|
||||
} catch (err) {
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -164,9 +164,12 @@ describe("/datasources", () => {
|
|||
})
|
||||
})
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "%s", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] },
|
||||
({ config, dsProvider }) => {
|
||||
const descriptions = datasourceDescribe({
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
||||
})
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)("$dbName", ({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let rawDatasource: Datasource
|
||||
let client: Knex
|
||||
|
@ -492,5 +495,5 @@ datasourceDescribe(
|
|||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -977,63 +977,69 @@ describe("/rowsActions", () => {
|
|||
})
|
||||
})
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "row actions (%s)", only: [DatabaseName.SQS, DatabaseName.POSTGRES] },
|
||||
({ config, dsProvider, isInternal }) => {
|
||||
let datasource: Datasource | undefined
|
||||
const descriptions = datasourceDescribe({
|
||||
only: [DatabaseName.SQS, DatabaseName.POSTGRES],
|
||||
})
|
||||
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource
|
||||
})
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"row actions ($dbName)",
|
||||
({ config, dsProvider, isInternal }) => {
|
||||
let datasource: Datasource | undefined
|
||||
|
||||
async function getTable(): Promise<Table> {
|
||||
if (isInternal) {
|
||||
await config.api.application.addSampleData(config.getAppId())
|
||||
const tables = await config.api.table.fetch()
|
||||
return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)!
|
||||
} else {
|
||||
const table = await config.api.table.save(
|
||||
setup.structures.tableForDatasource(datasource!)
|
||||
)
|
||||
return table
|
||||
}
|
||||
}
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource
|
||||
})
|
||||
|
||||
it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => {
|
||||
async function getRowActionsFromDb(tableId: string) {
|
||||
return await context.doInAppContext(config.getAppId(), async () => {
|
||||
const db = context.getAppDB()
|
||||
const tableDoc = await db.tryGet<TableRowActions>(
|
||||
generateRowActionsID(tableId)
|
||||
async function getTable(): Promise<Table> {
|
||||
if (isInternal) {
|
||||
await config.api.application.addSampleData(config.getAppId())
|
||||
const tables = await config.api.table.fetch()
|
||||
return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)!
|
||||
} else {
|
||||
const table = await config.api.table.save(
|
||||
setup.structures.tableForDatasource(datasource!)
|
||||
)
|
||||
return tableDoc
|
||||
})
|
||||
return table
|
||||
}
|
||||
}
|
||||
|
||||
const table = await getTable()
|
||||
const tableId = table._id!
|
||||
it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => {
|
||||
async function getRowActionsFromDb(tableId: string) {
|
||||
return await context.doInAppContext(config.getAppId(), async () => {
|
||||
const db = context.getAppDB()
|
||||
const tableDoc = await db.tryGet<TableRowActions>(
|
||||
generateRowActionsID(tableId)
|
||||
)
|
||||
return tableDoc
|
||||
})
|
||||
}
|
||||
|
||||
await config.api.rowAction.save(tableId, {
|
||||
name: generator.guid(),
|
||||
const table = await getTable()
|
||||
const tableId = table._id!
|
||||
|
||||
await config.api.rowAction.save(tableId, {
|
||||
name: generator.guid(),
|
||||
})
|
||||
await config.api.rowAction.save(tableId, {
|
||||
name: generator.guid(),
|
||||
})
|
||||
|
||||
const { actions } = (await getRowActionsFromDb(tableId))!
|
||||
expect(Object.entries(actions)).toHaveLength(2)
|
||||
|
||||
const { automations } = await config.api.automation.fetch()
|
||||
expect(automations).toHaveLength(2)
|
||||
|
||||
const datasource = await config.api.datasource.get(table.sourceId)
|
||||
await config.api.datasource.delete(datasource)
|
||||
|
||||
const automationsResp = await config.api.automation.fetch()
|
||||
expect(automationsResp.automations).toHaveLength(0)
|
||||
|
||||
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
|
||||
})
|
||||
await config.api.rowAction.save(tableId, {
|
||||
name: generator.guid(),
|
||||
})
|
||||
|
||||
const { actions } = (await getRowActionsFromDb(tableId))!
|
||||
expect(Object.entries(actions)).toHaveLength(2)
|
||||
|
||||
const { automations } = await config.api.automation.fetch()
|
||||
expect(automations).toHaveLength(2)
|
||||
|
||||
const datasource = await config.api.datasource.get(table.sourceId)
|
||||
await config.api.datasource.delete(datasource)
|
||||
|
||||
const automationsResp = await config.api.automation.fetch()
|
||||
expect(automationsResp.automations).toHaveLength(0)
|
||||
|
||||
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -7,71 +7,74 @@ import {
|
|||
import { Knex } from "knex"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "execute query action",
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
||||
},
|
||||
({ config, dsProvider }) => {
|
||||
let tableName: string
|
||||
let client: Knex
|
||||
let datasource: Datasource
|
||||
let query: Query
|
||||
const descriptions = datasourceDescribe({
|
||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
||||
})
|
||||
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"execute query action ($dbName)",
|
||||
({ config, dsProvider }) => {
|
||||
let tableName: string
|
||||
let client: Knex
|
||||
let datasource: Datasource
|
||||
let query: Query
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.string("a")
|
||||
table.integer("b")
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
await client(tableName).insert({ a: "string", b: 1 })
|
||||
query = await setup.saveTestQuery(config, client, tableName, datasource)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTable(tableName)
|
||||
})
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid()
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.string("a")
|
||||
table.integer("b")
|
||||
})
|
||||
await client(tableName).insert({ a: "string", b: 1 })
|
||||
query = await setup.saveTestQuery(config, client, tableName, datasource)
|
||||
})
|
||||
|
||||
it("should be able to execute a query", async () => {
|
||||
let res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.EXECUTE_QUERY.stepId,
|
||||
{
|
||||
query: { queryId: query._id },
|
||||
}
|
||||
)
|
||||
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTable(tableName)
|
||||
})
|
||||
|
||||
it("should handle a null query value", async () => {
|
||||
let res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.EXECUTE_QUERY.stepId,
|
||||
{
|
||||
query: null,
|
||||
}
|
||||
)
|
||||
expect(res.response.message).toEqual("Invalid inputs")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
it("should be able to execute a query", async () => {
|
||||
let res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.EXECUTE_QUERY.stepId,
|
||||
{
|
||||
query: { queryId: query._id },
|
||||
}
|
||||
)
|
||||
expect(res.response).toEqual([{ a: "string", b: 1 }])
|
||||
expect(res.success).toEqual(true)
|
||||
})
|
||||
|
||||
it("should handle an error executing a query", async () => {
|
||||
let res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.EXECUTE_QUERY.stepId,
|
||||
{
|
||||
query: { queryId: "wrong_id" },
|
||||
}
|
||||
)
|
||||
expect(res.response).toBeDefined()
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
}
|
||||
)
|
||||
it("should handle a null query value", async () => {
|
||||
let res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.EXECUTE_QUERY.stepId,
|
||||
{
|
||||
query: null,
|
||||
}
|
||||
)
|
||||
expect(res.response.message).toEqual("Invalid inputs")
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should handle an error executing a query", async () => {
|
||||
let res = await setup.runStep(
|
||||
config,
|
||||
setup.actions.EXECUTE_QUERY.stepId,
|
||||
{
|
||||
query: { queryId: "wrong_id" },
|
||||
}
|
||||
)
|
||||
expect(res.response).toBeDefined()
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
@ -433,9 +433,10 @@ describe("Automation Scenarios", () => {
|
|||
})
|
||||
})
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "", only: [DatabaseName.MYSQL] },
|
||||
({ config, dsProvider }) => {
|
||||
const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
|
||||
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)("/rows ($dbName)", ({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
|
@ -531,5 +532,5 @@ datasourceDescribe(
|
|||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -10,119 +10,123 @@ function uniqueTableName(length?: number): string {
|
|||
.substring(0, length || 10)
|
||||
}
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "Integration compatibility with mysql search_path",
|
||||
only: [DatabaseName.MYSQL],
|
||||
},
|
||||
({ config, dsProvider }) => {
|
||||
let rawDatasource: Datasource
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
const mainDescriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
|
||||
|
||||
const database = generator.guid()
|
||||
const database2 = generator.guid()
|
||||
if (mainDescriptions.length) {
|
||||
describe.each(mainDescriptions)(
|
||||
"/Integration compatibility with mysql search_path ($dbName)",
|
||||
({ config, dsProvider }) => {
|
||||
let rawDatasource: Datasource
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
rawDatasource = ds.rawDatasource!
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
const database = generator.guid()
|
||||
const database2 = generator.guid()
|
||||
|
||||
await client.raw(`CREATE DATABASE \`${database}\`;`)
|
||||
await client.raw(`CREATE DATABASE \`${database2}\`;`)
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
rawDatasource = ds.rawDatasource!
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
|
||||
rawDatasource.config!.database = database
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
await client.raw(`CREATE DATABASE \`${database}\`;`)
|
||||
await client.raw(`CREATE DATABASE \`${database2}\`;`)
|
||||
|
||||
afterAll(async () => {
|
||||
await client.raw(`DROP DATABASE \`${database}\`;`)
|
||||
await client.raw(`DROP DATABASE \`${database2}\`;`)
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.schema.createTable(`${database}.table1`, table => {
|
||||
table.increments("id1").primary()
|
||||
rawDatasource.config!.database = database
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
const res = await config.api.datasource.info(datasource)
|
||||
expect(res.tableNames).toBeDefined()
|
||||
expect(res.tableNames).toEqual(expect.arrayContaining(["table1"]))
|
||||
})
|
||||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
await client.schema.createTable(
|
||||
`${database}.${repeated_table_name}`,
|
||||
table => {
|
||||
table.increments("id").primary()
|
||||
table.string("val1")
|
||||
}
|
||||
)
|
||||
await client.schema.createTable(
|
||||
`${database2}.${repeated_table_name}`,
|
||||
table => {
|
||||
table.increments("id2").primary()
|
||||
table.string("val2")
|
||||
}
|
||||
)
|
||||
|
||||
const res = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
tablesFilter: [repeated_table_name],
|
||||
afterAll(async () => {
|
||||
await client.raw(`DROP DATABASE \`${database}\`;`)
|
||||
await client.raw(`DROP DATABASE \`${database2}\`;`)
|
||||
})
|
||||
expect(res.datasource.entities![repeated_table_name].schema).toBeDefined()
|
||||
const schema = res.datasource.entities![repeated_table_name].schema
|
||||
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "POST /api/datasources/:datasourceId/schema",
|
||||
only: [DatabaseName.MYSQL],
|
||||
},
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.schema.createTable(`${database}.table1`, table => {
|
||||
table.increments("id1").primary()
|
||||
})
|
||||
const res = await config.api.datasource.info(datasource)
|
||||
expect(res.tableNames).toBeDefined()
|
||||
expect(res.tableNames).toEqual(expect.arrayContaining(["table1"]))
|
||||
})
|
||||
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
|
||||
let tableName: string
|
||||
beforeEach(async () => {
|
||||
tableName = uniqueTableName()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTableIfExists(tableName)
|
||||
})
|
||||
|
||||
it("recognises enum columns as options", async () => {
|
||||
const enumColumnName = "status"
|
||||
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("order_id").primary()
|
||||
table.string("customer_name", 100).notNullable()
|
||||
table.enum(
|
||||
enumColumnName,
|
||||
["pending", "processing", "shipped", "delivered", "cancelled"],
|
||||
{ useNative: true, enumName: `${tableName}_${enumColumnName}` }
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
await client.schema.createTable(
|
||||
`${database}.${repeated_table_name}`,
|
||||
table => {
|
||||
table.increments("id").primary()
|
||||
table.string("val1")
|
||||
}
|
||||
)
|
||||
await client.schema.createTable(
|
||||
`${database2}.${repeated_table_name}`,
|
||||
table => {
|
||||
table.increments("id2").primary()
|
||||
table.string("val2")
|
||||
}
|
||||
)
|
||||
|
||||
const res = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
tablesFilter: [repeated_table_name],
|
||||
})
|
||||
expect(
|
||||
res.datasource.entities![repeated_table_name].schema
|
||||
).toBeDefined()
|
||||
const schema = res.datasource.entities![repeated_table_name].schema
|
||||
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const res = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
|
||||
|
||||
const table = res.datasource.entities![tableName]
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"POST /api/datasources/:datasourceId/schema ($dbName)",
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
expect(table).toBeDefined()
|
||||
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
|
||||
})
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
|
||||
let tableName: string
|
||||
beforeEach(async () => {
|
||||
tableName = uniqueTableName()
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTableIfExists(tableName)
|
||||
})
|
||||
|
||||
it("recognises enum columns as options", async () => {
|
||||
const enumColumnName = "status"
|
||||
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("order_id").primary()
|
||||
table.string("customer_name", 100).notNullable()
|
||||
table.enum(
|
||||
enumColumnName,
|
||||
["pending", "processing", "shipped", "delivered", "cancelled"],
|
||||
{ useNative: true, enumName: `${tableName}_${enumColumnName}` }
|
||||
)
|
||||
})
|
||||
|
||||
const res = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
const table = res.datasource.entities![tableName]
|
||||
|
||||
expect(table).toBeDefined()
|
||||
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
@ -8,283 +8,292 @@ import {
|
|||
} from "../integrations/tests/utils"
|
||||
import { Knex } from "knex"
|
||||
|
||||
datasourceDescribe(
|
||||
{ name: "postgres integrations", only: [DatabaseName.POSTGRES] },
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
const mainDescriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] })
|
||||
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
if (mainDescriptions.length) {
|
||||
describe.each(mainDescriptions)(
|
||||
"/postgres integrations",
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
afterAll(config.end)
|
||||
|
||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||
let tableName: string
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTableIfExists(tableName)
|
||||
})
|
||||
afterAll(config.end)
|
||||
|
||||
it("recognises when a table has no primary key", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id", { primaryKey: false })
|
||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||
let tableName: string
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTableIfExists(tableName)
|
||||
})
|
||||
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table must have a primary key.",
|
||||
})
|
||||
})
|
||||
it("recognises when a table has no primary key", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id", { primaryKey: false })
|
||||
})
|
||||
|
||||
it("recognises when a table is using a reserved column name", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("_id").primary()
|
||||
})
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table contains invalid columns.",
|
||||
})
|
||||
})
|
||||
|
||||
it("recognises enum columns as options", async () => {
|
||||
const tableName = `orders_${generator
|
||||
.guid()
|
||||
.replaceAll("-", "")
|
||||
.substring(0, 6)}`
|
||||
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("order_id").primary()
|
||||
table.string("customer_name").notNullable()
|
||||
table.enum("status", ["pending", "processing", "shipped"], {
|
||||
useNative: true,
|
||||
enumName: `${tableName}_status`,
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table must have a primary key.",
|
||||
})
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
it("recognises when a table is using a reserved column name", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("_id").primary()
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table contains invalid columns.",
|
||||
})
|
||||
})
|
||||
|
||||
const table = response.datasource.entities?.[tableName]
|
||||
it("recognises enum columns as options", async () => {
|
||||
const tableName = `orders_${generator
|
||||
.guid()
|
||||
.replaceAll("-", "")
|
||||
.substring(0, 6)}`
|
||||
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
|
||||
})
|
||||
})
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("order_id").primary()
|
||||
table.string("customer_name").notNullable()
|
||||
table.enum("status", ["pending", "processing", "shipped"], {
|
||||
useNative: true,
|
||||
enumName: `${tableName}_status`,
|
||||
})
|
||||
})
|
||||
|
||||
describe("check custom column types", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("binaryTable", table => {
|
||||
table.binary("id").primary()
|
||||
table.string("column1")
|
||||
table.integer("column2")
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
const table = response.datasource.entities?.[tableName]
|
||||
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle binary columns", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
describe("check custom column types", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("binaryTable", table => {
|
||||
table.binary("id").primary()
|
||||
table.string("column1")
|
||||
table.integer("column2")
|
||||
})
|
||||
})
|
||||
expect(response.datasource.entities).toBeDefined()
|
||||
const table = response.datasource.entities?.["binaryTable"]
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.id.externalType).toBe("bytea")
|
||||
const row = await config.api.row.save(table?._id!, {
|
||||
id: "1111",
|
||||
column1: "hello",
|
||||
column2: 222,
|
||||
})
|
||||
expect(row._id).toBeDefined()
|
||||
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
|
||||
expect(JSON.parse(decoded)[0]).toBe("1111")
|
||||
})
|
||||
})
|
||||
|
||||
describe("check fetching null/not null table", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("nullableTable", table => {
|
||||
table.increments("order_id").primary()
|
||||
table.integer("order_number").notNullable()
|
||||
it("should handle binary columns", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
expect(response.datasource.entities).toBeDefined()
|
||||
const table = response.datasource.entities?.["binaryTable"]
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.id.externalType).toBe("bytea")
|
||||
const row = await config.api.row.save(table?._id!, {
|
||||
id: "1111",
|
||||
column1: "hello",
|
||||
column2: 222,
|
||||
})
|
||||
expect(row._id).toBeDefined()
|
||||
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
|
||||
expect(JSON.parse(decoded)[0]).toBe("1111")
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to change the table to allow nullable and refetch this", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const entities = response.datasource.entities
|
||||
expect(entities).toBeDefined()
|
||||
const nullableTable = entities?.["nullableTable"]
|
||||
expect(nullableTable).toBeDefined()
|
||||
expect(
|
||||
nullableTable?.schema["order_number"].constraints?.presence
|
||||
).toEqual(true)
|
||||
|
||||
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
|
||||
// is aware of - therefore we can try to fetch and make sure BB updates correctly
|
||||
await client.schema.alterTable("nullableTable", table => {
|
||||
table.setNullable("order_number")
|
||||
describe("check fetching null/not null table", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("nullableTable", table => {
|
||||
table.increments("order_id").primary()
|
||||
table.integer("order_number").notNullable()
|
||||
})
|
||||
})
|
||||
|
||||
const responseAfter = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
it("should be able to change the table to allow nullable and refetch this", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const entities = response.datasource.entities
|
||||
expect(entities).toBeDefined()
|
||||
const nullableTable = entities?.["nullableTable"]
|
||||
expect(nullableTable).toBeDefined()
|
||||
expect(
|
||||
nullableTable?.schema["order_number"].constraints?.presence
|
||||
).toEqual(true)
|
||||
|
||||
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
|
||||
// is aware of - therefore we can try to fetch and make sure BB updates correctly
|
||||
await client.schema.alterTable("nullableTable", table => {
|
||||
table.setNullable("order_number")
|
||||
})
|
||||
|
||||
const responseAfter = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const entitiesAfter = responseAfter.datasource.entities
|
||||
expect(entitiesAfter).toBeDefined()
|
||||
const nullableTableAfter = entitiesAfter?.["nullableTable"]
|
||||
expect(nullableTableAfter).toBeDefined()
|
||||
expect(
|
||||
nullableTableAfter?.schema["order_number"].constraints?.presence
|
||||
).toBeUndefined()
|
||||
})
|
||||
const entitiesAfter = responseAfter.datasource.entities
|
||||
expect(entitiesAfter).toBeDefined()
|
||||
const nullableTableAfter = entitiesAfter?.["nullableTable"]
|
||||
expect(nullableTableAfter).toBeDefined()
|
||||
expect(
|
||||
nullableTableAfter?.schema["order_number"].constraints?.presence
|
||||
).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("money field 💰", () => {
|
||||
const tableName = "moneytable"
|
||||
let table: Table
|
||||
describe("money field 💰", () => {
|
||||
const tableName = "moneytable"
|
||||
let table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await client.raw(`
|
||||
beforeAll(async () => {
|
||||
await client.raw(`
|
||||
CREATE TABLE ${tableName} (
|
||||
id serial PRIMARY KEY,
|
||||
price money
|
||||
)
|
||||
`)
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
table = response.datasource.entities![tableName]
|
||||
})
|
||||
|
||||
it("should be able to import a money field", async () => {
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
|
||||
})
|
||||
|
||||
it("should be able to search a money field", async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [{ price: 200 }, { price: 300 }],
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
table = response.datasource.entities![tableName]
|
||||
})
|
||||
|
||||
const { rows } = await config.api.row.search(table._id!, {
|
||||
query: {
|
||||
equal: {
|
||||
price: 200,
|
||||
it("should be able to import a money field", async () => {
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
|
||||
})
|
||||
|
||||
it("should be able to search a money field", async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [{ price: 200 }, { price: 300 }],
|
||||
})
|
||||
|
||||
const { rows } = await config.api.row.search(table._id!, {
|
||||
query: {
|
||||
equal: {
|
||||
price: 200,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
expect(rows).toHaveLength(1)
|
||||
expect(rows[0].price).toBe("200.00")
|
||||
})
|
||||
|
||||
it("should be able to update a money field", async () => {
|
||||
let row = await config.api.row.save(table._id!, { price: 200 })
|
||||
expect(row.price).toBe("200.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: 300 })
|
||||
expect(row.price).toBe("300.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, {
|
||||
...row,
|
||||
price: "400.00",
|
||||
})
|
||||
expect(row.price).toBe("400.00")
|
||||
})
|
||||
expect(rows).toHaveLength(1)
|
||||
expect(rows[0].price).toBe("200.00")
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
it("should be able to update a money field", async () => {
|
||||
let row = await config.api.row.save(table._id!, { price: 200 })
|
||||
expect(row.price).toBe("200.00")
|
||||
const descriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] })
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: 300 })
|
||||
expect(row.price).toBe("300.00")
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"Integration compatibility with postgres search_path",
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
let schema1: string
|
||||
let schema2: string
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: "400.00" })
|
||||
expect(row.price).toBe("400.00")
|
||||
})
|
||||
})
|
||||
beforeEach(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
const rawDatasource = ds.rawDatasource!
|
||||
|
||||
schema1 = generator.guid().replaceAll("-", "")
|
||||
schema2 = generator.guid().replaceAll("-", "")
|
||||
|
||||
client = await knexClient(rawDatasource)
|
||||
|
||||
await client.schema.createSchema(schema1)
|
||||
await client.schema.createSchema(schema2)
|
||||
|
||||
rawDatasource.config!.schema = `${schema1}, ${schema2}`
|
||||
|
||||
client = await knexClient(rawDatasource)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropSchema(schema1, true)
|
||||
await client.schema.dropSchema(schema2, true)
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.schema.createTable(`${schema1}.table1`, table => {
|
||||
table.increments("id1").primary()
|
||||
})
|
||||
|
||||
await client.schema.createTable(`${schema2}.table2`, table => {
|
||||
table.increments("id2").primary()
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.info(datasource)
|
||||
expect(response.tableNames).toBeDefined()
|
||||
expect(response.tableNames).toEqual(
|
||||
expect.arrayContaining(["table1", "table2"])
|
||||
)
|
||||
})
|
||||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
|
||||
await client.schema.createTable(
|
||||
`${schema1}.${repeated_table_name}`,
|
||||
table => {
|
||||
table.increments("id").primary()
|
||||
table.string("val1")
|
||||
}
|
||||
)
|
||||
|
||||
await client.schema.createTable(
|
||||
`${schema2}.${repeated_table_name}`,
|
||||
table => {
|
||||
table.increments("id2").primary()
|
||||
table.string("val2")
|
||||
}
|
||||
)
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
tablesFilter: [repeated_table_name],
|
||||
})
|
||||
expect(
|
||||
response.datasource.entities?.[repeated_table_name].schema
|
||||
).toBeDefined()
|
||||
const schema =
|
||||
response.datasource.entities?.[repeated_table_name].schema
|
||||
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "Integration compatibility with postgres search_path",
|
||||
only: [DatabaseName.POSTGRES],
|
||||
},
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
let schema1: string
|
||||
let schema2: string
|
||||
|
||||
beforeEach(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource!
|
||||
const rawDatasource = ds.rawDatasource!
|
||||
|
||||
schema1 = generator.guid().replaceAll("-", "")
|
||||
schema2 = generator.guid().replaceAll("-", "")
|
||||
|
||||
client = await knexClient(rawDatasource)
|
||||
|
||||
await client.schema.createSchema(schema1)
|
||||
await client.schema.createSchema(schema2)
|
||||
|
||||
rawDatasource.config!.schema = `${schema1}, ${schema2}`
|
||||
|
||||
client = await knexClient(rawDatasource)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropSchema(schema1, true)
|
||||
await client.schema.dropSchema(schema2, true)
|
||||
})
|
||||
|
||||
it("discovers tables from any schema in search path", async () => {
|
||||
await client.schema.createTable(`${schema1}.table1`, table => {
|
||||
table.increments("id1").primary()
|
||||
})
|
||||
|
||||
await client.schema.createTable(`${schema2}.table2`, table => {
|
||||
table.increments("id2").primary()
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.info(datasource)
|
||||
expect(response.tableNames).toBeDefined()
|
||||
expect(response.tableNames).toEqual(
|
||||
expect.arrayContaining(["table1", "table2"])
|
||||
)
|
||||
})
|
||||
|
||||
it("does not mix columns from different tables", async () => {
|
||||
const repeated_table_name = "table_same_name"
|
||||
|
||||
await client.schema.createTable(
|
||||
`${schema1}.${repeated_table_name}`,
|
||||
table => {
|
||||
table.increments("id").primary()
|
||||
table.string("val1")
|
||||
}
|
||||
)
|
||||
|
||||
await client.schema.createTable(
|
||||
`${schema2}.${repeated_table_name}`,
|
||||
table => {
|
||||
table.increments("id2").primary()
|
||||
table.string("val2")
|
||||
}
|
||||
)
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
tablesFilter: [repeated_table_name],
|
||||
})
|
||||
expect(
|
||||
response.datasource.entities?.[repeated_table_name].schema
|
||||
).toBeDefined()
|
||||
const schema = response.datasource.entities?.[repeated_table_name].schema
|
||||
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
@ -281,8 +281,14 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
case MSSQLConfigAuthType.NTLM: {
|
||||
const { domain, trustServerCertificate } =
|
||||
this.config.ntlmConfig || {}
|
||||
|
||||
if (!domain) {
|
||||
throw Error("Domain must be provided for NTLM config")
|
||||
}
|
||||
|
||||
clientCfg.authentication = {
|
||||
type: "ntlm",
|
||||
// @ts-expect-error - username and password not required for NTLM
|
||||
options: {
|
||||
domain,
|
||||
},
|
||||
|
|
|
@ -6,7 +6,8 @@ import {
|
|||
QueryType,
|
||||
SqlQuery,
|
||||
} from "@budibase/types"
|
||||
import { Snowflake } from "snowflake-promise"
|
||||
import snowflakeSdk, { SnowflakeError } from "snowflake-sdk"
|
||||
import { promisify } from "util"
|
||||
|
||||
interface SnowflakeConfig {
|
||||
account: string
|
||||
|
@ -71,11 +72,52 @@ const SCHEMA: Integration = {
|
|||
},
|
||||
}
|
||||
|
||||
class SnowflakeIntegration {
|
||||
private client: Snowflake
|
||||
class SnowflakePromise {
|
||||
config: SnowflakeConfig
|
||||
client?: snowflakeSdk.Connection
|
||||
|
||||
constructor(config: SnowflakeConfig) {
|
||||
this.client = new Snowflake(config)
|
||||
this.config = config
|
||||
}
|
||||
|
||||
async connect() {
|
||||
if (this.client?.isUp()) return
|
||||
|
||||
this.client = snowflakeSdk.createConnection(this.config)
|
||||
const connectAsync = promisify(this.client.connect.bind(this.client))
|
||||
return connectAsync()
|
||||
}
|
||||
|
||||
async execute(sql: string) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!this.client) {
|
||||
throw Error(
|
||||
"No snowflake client present to execute query. Run connect() first to initialise."
|
||||
)
|
||||
}
|
||||
|
||||
this.client.execute({
|
||||
sqlText: sql,
|
||||
complete: function (
|
||||
err: SnowflakeError | undefined,
|
||||
statementExecuted: any,
|
||||
rows: any
|
||||
) {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
resolve(rows)
|
||||
},
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
class SnowflakeIntegration {
|
||||
private client: SnowflakePromise
|
||||
|
||||
constructor(config: SnowflakeConfig) {
|
||||
this.client = new SnowflakePromise(config)
|
||||
}
|
||||
|
||||
async testConnection(): Promise<ConnectionInfo> {
|
||||
|
|
|
@ -35,7 +35,6 @@ const providers: Record<DatabaseName, DatasourceProvider> = {
|
|||
}
|
||||
|
||||
export interface DatasourceDescribeOpts {
|
||||
name: string
|
||||
only?: DatabaseName[]
|
||||
exclude?: DatabaseName[]
|
||||
}
|
||||
|
@ -102,16 +101,12 @@ function createDummyTest() {
|
|||
})
|
||||
}
|
||||
|
||||
export function datasourceDescribe(
|
||||
opts: DatasourceDescribeOpts,
|
||||
cb: (args: DatasourceDescribeReturn) => void
|
||||
) {
|
||||
export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
||||
if (process.env.DATASOURCE === "none") {
|
||||
createDummyTest()
|
||||
return
|
||||
}
|
||||
|
||||
const { name, only, exclude } = opts
|
||||
const { only, exclude } = opts
|
||||
|
||||
if (only && exclude) {
|
||||
throw new Error("you can only supply one of 'only' or 'exclude'")
|
||||
|
@ -130,36 +125,28 @@ export function datasourceDescribe(
|
|||
|
||||
if (databases.length === 0) {
|
||||
createDummyTest()
|
||||
return
|
||||
}
|
||||
|
||||
describe.each(databases)(name, name => {
|
||||
const config = new TestConfiguration()
|
||||
|
||||
afterAll(() => {
|
||||
config.end()
|
||||
})
|
||||
|
||||
cb({
|
||||
name,
|
||||
config,
|
||||
dsProvider: () => createDatasources(config, name),
|
||||
isInternal: name === DatabaseName.SQS,
|
||||
isExternal: name !== DatabaseName.SQS,
|
||||
isSql: [
|
||||
DatabaseName.MARIADB,
|
||||
DatabaseName.MYSQL,
|
||||
DatabaseName.POSTGRES,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.ORACLE,
|
||||
].includes(name),
|
||||
isMySQL: name === DatabaseName.MYSQL,
|
||||
isPostgres: name === DatabaseName.POSTGRES,
|
||||
isMongodb: name === DatabaseName.MONGODB,
|
||||
isMSSQL: name === DatabaseName.SQL_SERVER,
|
||||
isOracle: name === DatabaseName.ORACLE,
|
||||
})
|
||||
})
|
||||
const config = new TestConfiguration()
|
||||
return databases.map(dbName => ({
|
||||
dbName,
|
||||
config,
|
||||
dsProvider: () => createDatasources(config, dbName),
|
||||
isInternal: dbName === DatabaseName.SQS,
|
||||
isExternal: dbName !== DatabaseName.SQS,
|
||||
isSql: [
|
||||
DatabaseName.MARIADB,
|
||||
DatabaseName.MYSQL,
|
||||
DatabaseName.POSTGRES,
|
||||
DatabaseName.SQL_SERVER,
|
||||
DatabaseName.ORACLE,
|
||||
].includes(dbName),
|
||||
isMySQL: dbName === DatabaseName.MYSQL,
|
||||
isPostgres: dbName === DatabaseName.POSTGRES,
|
||||
isMongodb: dbName === DatabaseName.MONGODB,
|
||||
isMSSQL: dbName === DatabaseName.SQL_SERVER,
|
||||
isOracle: dbName === DatabaseName.ORACLE,
|
||||
}))
|
||||
}
|
||||
|
||||
function getDatasource(
|
||||
|
|
|
@ -19,202 +19,206 @@ import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
|||
// These test cases are only for things that cannot be tested through the API
|
||||
// (e.g. limiting searches to returning specific fields). If it's possible to
|
||||
// test through the API, it should be done there instead.
|
||||
datasourceDescribe(
|
||||
{ name: "search sdk (%s)", exclude: [DatabaseName.MONGODB] },
|
||||
({ config, dsProvider, isInternal }) => {
|
||||
let datasource: Datasource | undefined
|
||||
let table: Table
|
||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
||||
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource
|
||||
})
|
||||
if (descriptions.length) {
|
||||
describe.each(descriptions)(
|
||||
"search sdk ($dbName)",
|
||||
({ config, dsProvider, isInternal }) => {
|
||||
let datasource: Datasource | undefined
|
||||
let table: Table
|
||||
|
||||
beforeEach(async () => {
|
||||
const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata =
|
||||
isInternal
|
||||
? {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
autocolumn: true,
|
||||
}
|
||||
: {
|
||||
name: "id",
|
||||
type: FieldType.NUMBER,
|
||||
autocolumn: true,
|
||||
}
|
||||
|
||||
table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: idFieldSchema,
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
surname: {
|
||||
name: "surname",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
address: {
|
||||
name: "address",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await config.api.row.save(table._id!, {
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
config.end()
|
||||
})
|
||||
|
||||
it("querying by fields will always return data attribute columns", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const { rows } = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
|
||||
expect(rows).toHaveLength(10)
|
||||
for (const row of rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider()
|
||||
datasource = ds.datasource
|
||||
})
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
it("will decode _id in oneOf query", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
|
||||
beforeEach(async () => {
|
||||
const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata =
|
||||
isInternal
|
||||
? {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
autocolumn: true,
|
||||
}
|
||||
: {
|
||||
name: "id",
|
||||
type: FieldType.NUMBER,
|
||||
autocolumn: true,
|
||||
}
|
||||
|
||||
table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: idFieldSchema,
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
surname: {
|
||||
name: "surname",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
address: {
|
||||
name: "address",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
expect(result.rows).toHaveLength(3)
|
||||
expect(result.rows.map(row => row.id)).toEqual(
|
||||
expect.arrayContaining([1, 4, 8])
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("surname")
|
||||
expect(keys).toContain("address")
|
||||
expect(keys).not.toContain("age")
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await config.api.row.save(table._id!, {
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields even if requested", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).not.toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
afterAll(async () => {
|
||||
config.end()
|
||||
})
|
||||
})
|
||||
|
||||
it.each([
|
||||
[["id", "name", "age"], 3],
|
||||
[["name", "age"], 10],
|
||||
])(
|
||||
"cannot query by non search fields (fields: %s)",
|
||||
async (queryFields, expectedRows) => {
|
||||
it("querying by fields will always return data attribute columns", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const { rows } = await search({
|
||||
tableId: table._id!,
|
||||
query: {
|
||||
$or: {
|
||||
conditions: [
|
||||
{
|
||||
$and: {
|
||||
conditions: [
|
||||
{ range: { id: { low: 2, high: 4 } } },
|
||||
{ range: { id: { low: 3, high: 5 } } },
|
||||
],
|
||||
},
|
||||
},
|
||||
{ equal: { id: 7 } },
|
||||
],
|
||||
},
|
||||
},
|
||||
fields: queryFields,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
|
||||
expect(rows).toHaveLength(expectedRows)
|
||||
expect(rows).toHaveLength(10)
|
||||
for (const row of rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
it("will decode _id in oneOf query", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.rows).toHaveLength(3)
|
||||
expect(result.rows.map(row => row.id)).toEqual(
|
||||
expect.arrayContaining([1, 4, 8])
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("surname")
|
||||
expect(keys).toContain("address")
|
||||
expect(keys).not.toContain("age")
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields even if requested", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).not.toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it.each([
|
||||
[["id", "name", "age"], 3],
|
||||
[["name", "age"], 10],
|
||||
])(
|
||||
"cannot query by non search fields (fields: %s)",
|
||||
async (queryFields, expectedRows) => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const { rows } = await search({
|
||||
tableId: table._id!,
|
||||
query: {
|
||||
$or: {
|
||||
conditions: [
|
||||
{
|
||||
$and: {
|
||||
conditions: [
|
||||
{ range: { id: { low: 2, high: 4 } } },
|
||||
{ range: { id: { low: 3, high: 5 } } },
|
||||
],
|
||||
},
|
||||
},
|
||||
{ equal: { id: 7 } },
|
||||
],
|
||||
},
|
||||
},
|
||||
fields: queryFields,
|
||||
})
|
||||
|
||||
expect(rows).toHaveLength(expectedRows)
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import jimp from "jimp"
|
||||
import { Jimp } from "jimp"
|
||||
|
||||
const FORMATS = {
|
||||
IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"],
|
||||
|
@ -6,8 +6,8 @@ const FORMATS = {
|
|||
|
||||
function processImage(file: { path: string }) {
|
||||
// this will overwrite the temp file
|
||||
return jimp.read(file.path).then(img => {
|
||||
return img.resize(300, jimp.AUTO).write(file.path)
|
||||
return Jimp.read(file.path).then(img => {
|
||||
return img.resize({ w: 256 }).write(file.path as `${string}.${string}`)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -40,17 +40,17 @@
|
|||
"dependencies": {
|
||||
"@budibase/backend-core": "0.0.0",
|
||||
"@budibase/pro": "0.0.0",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/string-templates": "0.0.0",
|
||||
"@budibase/types": "0.0.0",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@koa/router": "8.0.8",
|
||||
"@koa/router": "13.1.0",
|
||||
"@techpass/passport-openidconnect": "0.3.3",
|
||||
"@types/global-agent": "2.1.1",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"aws-sdk": "2.1692.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"bull": "4.10.1",
|
||||
"dd-trace": "5.2.0",
|
||||
"dd-trace": "5.23.0",
|
||||
"dotenv": "8.6.0",
|
||||
"global-agent": "3.0.0",
|
||||
"ical-generator": "4.1.0",
|
||||
|
@ -82,7 +82,7 @@
|
|||
"@types/jest": "29.5.5",
|
||||
"@types/jsonwebtoken": "9.0.3",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/koa__router": "8.0.8",
|
||||
"@types/koa__router": "12.0.4",
|
||||
"@types/lodash": "4.14.200",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
|
|
|
@ -40,6 +40,7 @@ import {
|
|||
import { checkAnyUserExists } from "../../../utilities/users"
|
||||
import { isEmailConfigured } from "../../../utilities/email"
|
||||
import { BpmStatusKey, BpmStatusValue, utils } from "@budibase/shared-core"
|
||||
import crypto from "crypto"
|
||||
|
||||
const MAX_USERS_UPLOAD_LIMIT = 1000
|
||||
|
||||
|
|
Loading…
Reference in New Issue