Merge branch 'master' into BUDI-8270/validation-for-search-api

# Conflicts:
#	packages/server/package.json
#	packages/server/src/api/controllers/row/index.ts
#	packages/server/src/api/routes/tests/search.spec.ts
#	yarn.lock
This commit is contained in:
Adria Navarro 2024-11-20 18:11:49 +01:00
commit 9c460424a9
27 changed files with 16550 additions and 15825 deletions

View File

@ -114,9 +114,11 @@ jobs:
- name: Test - name: Test
run: | run: |
if ${{ env.ONLY_AFFECTED_TASKS }}; then if ${{ env.ONLY_AFFECTED_TASKS }}; then
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/builder --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions
yarn test -- --scope=@budibase/builder --since=${{ env.NX_BASE_BRANCH }}
else else
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix -- --verbose --reporters=default --reporters=github-actions yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/builder --no-prefix -- --verbose --reporters=default --reporters=github-actions
yarn test -- --scope=@budibase/builder --no-prefix
fi fi
test-worker: test-worker:

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.2.9", "version": "3.2.10",
"npmClient": "yarn", "npmClient": "yarn",
"concurrency": 20, "concurrency": 20,
"command": { "command": {

View File

@ -109,7 +109,7 @@
"semver": "7.5.3", "semver": "7.5.3",
"http-cache-semantics": "4.1.1", "http-cache-semantics": "4.1.1",
"msgpackr": "1.10.1", "msgpackr": "1.10.1",
"axios": "1.6.3", "axios": "1.7.7",
"xml2js": "0.6.2", "xml2js": "0.6.2",
"unset-value": "2.0.1", "unset-value": "2.0.1",
"passport": "0.6.0", "passport": "0.6.0",
@ -119,6 +119,5 @@
}, },
"engines": { "engines": {
"node": ">=20.0.0 <21.0.0" "node": ">=20.0.0 <21.0.0"
}, }
"dependencies": {}
} }

View File

@ -33,14 +33,17 @@
"@budibase/pouchdb-replication-stream": "1.2.11", "@budibase/pouchdb-replication-stream": "1.2.11",
"@budibase/shared-core": "0.0.0", "@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",
"@techpass/passport-openidconnect": "0.3.3",
"aws-cloudfront-sign": "3.0.2", "aws-cloudfront-sign": "3.0.2",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1692.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"bull": "4.10.1", "bull": "4.10.1",
"correlation-id": "4.0.0", "correlation-id": "4.0.0",
"dd-trace": "5.2.0", "dd-trace": "5.23.0",
"dotenv": "16.0.1", "dotenv": "16.0.1",
"google-auth-library": "^8.0.1",
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5",
"ioredis": "5.3.2", "ioredis": "5.3.2",
"joi": "17.6.0", "joi": "17.6.0",
"jsonwebtoken": "9.0.2", "jsonwebtoken": "9.0.2",
@ -55,17 +58,14 @@
"pino": "8.11.0", "pino": "8.11.0",
"pino-http": "8.3.3", "pino-http": "8.3.3",
"posthog-node": "4.0.1", "posthog-node": "4.0.1",
"pouchdb": "7.3.0", "pouchdb": "9.0.0",
"pouchdb-find": "7.2.2", "pouchdb-find": "9.0.0",
"redlock": "4.2.0", "redlock": "4.2.0",
"rotating-file-stream": "3.1.0", "rotating-file-stream": "3.1.0",
"sanitize-s3-objectkey": "0.0.1", "sanitize-s3-objectkey": "0.0.1",
"semver": "^7.5.4", "semver": "^7.5.4",
"tar-fs": "2.1.1", "tar-fs": "2.1.1",
"uuid": "^8.3.2", "uuid": "^8.3.2"
"@techpass/passport-openidconnect": "0.3.3",
"google-auth-library": "^8.0.1",
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5"
}, },
"devDependencies": { "devDependencies": {
"@jest/types": "^29.6.3", "@jest/types": "^29.6.3",
@ -78,7 +78,7 @@
"@types/lodash": "4.14.200", "@types/lodash": "4.14.200",
"@types/node": "^22.9.0", "@types/node": "^22.9.0",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",
"@types/pouchdb": "6.4.0", "@types/pouchdb": "6.4.2",
"@types/redlock": "4.0.7", "@types/redlock": "4.0.7",
"@types/semver": "7.3.7", "@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1", "@types/tar-fs": "2.0.1",

@ -1 +1 @@
Subproject commit bfeece324a03a3a5f25137bf3f8c66d5ed6103d8 Subproject commit 4facf6a44ee52a405794845f71584168b9db652c

View File

@ -63,13 +63,13 @@
"@bull-board/koa": "5.10.2", "@bull-board/koa": "5.10.2",
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",
"@google-cloud/firestore": "7.8.0", "@google-cloud/firestore": "7.8.0",
"@koa/router": "8.0.8", "@koa/router": "13.1.0",
"@socket.io/redis-adapter": "^8.2.1", "@socket.io/redis-adapter": "^8.2.1",
"@types/xml2js": "^0.4.14", "@types/xml2js": "^0.4.14",
"airtable": "0.12.2", "airtable": "0.12.2",
"arangojs": "7.2.0", "arangojs": "7.2.0",
"archiver": "7.0.1", "archiver": "7.0.1",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1692.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"bson": "^6.9.0", "bson": "^6.9.0",
@ -80,8 +80,8 @@
"cookies": "0.8.0", "cookies": "0.8.0",
"csvtojson": "2.0.10", "csvtojson": "2.0.10",
"curlconverter": "3.21.0", "curlconverter": "3.21.0",
"dd-trace": "5.23.0",
"dayjs": "^1.10.8", "dayjs": "^1.10.8",
"dd-trace": "5.2.0",
"dotenv": "8.2.0", "dotenv": "8.2.0",
"form-data": "4.0.0", "form-data": "4.0.0",
"global-agent": "3.0.0", "global-agent": "3.0.0",
@ -89,7 +89,7 @@
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5", "google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5",
"ioredis": "5.3.2", "ioredis": "5.3.2",
"isolated-vm": "^4.7.2", "isolated-vm": "^4.7.2",
"jimp": "0.22.12", "jimp": "1.1.4",
"joi": "17.6.0", "joi": "17.6.0",
"js-yaml": "4.1.0", "js-yaml": "4.1.0",
"jsonschema": "1.4.0", "jsonschema": "1.4.0",
@ -104,7 +104,7 @@
"lodash": "4.17.21", "lodash": "4.17.21",
"memorystream": "0.3.1", "memorystream": "0.3.1",
"mongodb": "6.7.0", "mongodb": "6.7.0",
"mssql": "10.0.1", "mssql": "11.0.1",
"mysql2": "3.9.8", "mysql2": "3.9.8",
"node-fetch": "2.6.7", "node-fetch": "2.6.7",
"object-sizeof": "2.6.1", "object-sizeof": "2.6.1",
@ -112,15 +112,15 @@
"openapi-types": "9.3.1", "openapi-types": "9.3.1",
"oracledb": "6.5.1", "oracledb": "6.5.1",
"pg": "8.10.0", "pg": "8.10.0",
"pouchdb": "7.3.0", "pouchdb": "9.0.0",
"pouchdb-all-dbs": "1.1.1", "pouchdb-all-dbs": "1.1.1",
"pouchdb-find": "7.2.2", "pouchdb-find": "9.0.0",
"redis": "4", "redis": "4",
"semver": "^7.5.4", "semver": "^7.5.4",
"serialize-error": "^7.0.1", "serialize-error": "^7.0.1",
"server-destroy": "1.0.1", "server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0", "snowflake-sdk": "^1.15.0",
"socket.io": "4.7.5", "socket.io": "4.8.1",
"svelte": "^4.2.10", "svelte": "^4.2.10",
"tar": "6.2.1", "tar": "6.2.1",
"tmp": "0.2.3", "tmp": "0.2.3",
@ -128,7 +128,7 @@
"uuid": "^8.3.2", "uuid": "^8.3.2",
"validate.js": "0.13.1", "validate.js": "0.13.1",
"worker-farm": "1.7.0", "worker-farm": "1.7.0",
"xml2js": "0.5.0", "xml2js": "0.6.2",
"zod-validation-error": "^3.4.0" "zod-validation-error": "^3.4.0"
}, },
"devDependencies": { "devDependencies": {
@ -142,13 +142,14 @@
"@types/jest": "29.5.5", "@types/jest": "29.5.5",
"@types/koa": "2.13.4", "@types/koa": "2.13.4",
"@types/koa-send": "^4.1.6", "@types/koa-send": "^4.1.6",
"@types/koa__router": "8.0.8", "@types/koa__router": "12.0.4",
"@types/lodash": "4.14.200", "@types/lodash": "4.14.200",
"@types/mssql": "9.1.4", "@types/mssql": "9.1.5",
"@types/node": "^22.9.0", "@types/node": "^22.9.0",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",
"@types/oracledb": "6.5.1", "@types/oracledb": "6.5.1",
"@types/pg": "8.6.6", "@types/pg": "8.6.6",
"@types/pouchdb": "6.4.2",
"@types/server-destroy": "1.0.1", "@types/server-destroy": "1.0.1",
"@types/supertest": "2.0.14", "@types/supertest": "2.0.14",
"@types/tar": "6.1.5", "@types/tar": "6.1.5",

View File

@ -4,7 +4,7 @@ import { URL } from "url"
const curlconverter = require("curlconverter") const curlconverter = require("curlconverter")
const parseCurl = (data: string): any => { const parseCurl = (data: string): Promise<any> => {
const curlJson = curlconverter.toJsonString(data) const curlJson = curlconverter.toJsonString(data)
return JSON.parse(curlJson) return JSON.parse(curlJson)
} }
@ -53,8 +53,7 @@ export class Curl extends ImportSource {
isSupported = async (data: string): Promise<boolean> => { isSupported = async (data: string): Promise<boolean> => {
try { try {
const curl = parseCurl(data) this.curl = parseCurl(data)
this.curl = curl
} catch (err) { } catch (err) {
return false return false
} }

View File

@ -164,9 +164,12 @@ describe("/datasources", () => {
}) })
}) })
datasourceDescribe( const descriptions = datasourceDescribe({
{ name: "%s", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] }, exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
({ config, dsProvider }) => { })
if (descriptions.length) {
describe.each(descriptions)("$dbName", ({ config, dsProvider }) => {
let datasource: Datasource let datasource: Datasource
let rawDatasource: Datasource let rawDatasource: Datasource
let client: Knex let client: Knex
@ -492,5 +495,5 @@ datasourceDescribe(
) )
}) })
}) })
} })
) }

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -977,63 +977,69 @@ describe("/rowsActions", () => {
}) })
}) })
datasourceDescribe( const descriptions = datasourceDescribe({
{ name: "row actions (%s)", only: [DatabaseName.SQS, DatabaseName.POSTGRES] }, only: [DatabaseName.SQS, DatabaseName.POSTGRES],
({ config, dsProvider, isInternal }) => { })
let datasource: Datasource | undefined
beforeAll(async () => { if (descriptions.length) {
const ds = await dsProvider() describe.each(descriptions)(
datasource = ds.datasource "row actions ($dbName)",
}) ({ config, dsProvider, isInternal }) => {
let datasource: Datasource | undefined
async function getTable(): Promise<Table> { beforeAll(async () => {
if (isInternal) { const ds = await dsProvider()
await config.api.application.addSampleData(config.getAppId()) datasource = ds.datasource
const tables = await config.api.table.fetch() })
return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)!
} else {
const table = await config.api.table.save(
setup.structures.tableForDatasource(datasource!)
)
return table
}
}
it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => { async function getTable(): Promise<Table> {
async function getRowActionsFromDb(tableId: string) { if (isInternal) {
return await context.doInAppContext(config.getAppId(), async () => { await config.api.application.addSampleData(config.getAppId())
const db = context.getAppDB() const tables = await config.api.table.fetch()
const tableDoc = await db.tryGet<TableRowActions>( return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)!
generateRowActionsID(tableId) } else {
const table = await config.api.table.save(
setup.structures.tableForDatasource(datasource!)
) )
return tableDoc return table
}) }
} }
const table = await getTable() it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => {
const tableId = table._id! async function getRowActionsFromDb(tableId: string) {
return await context.doInAppContext(config.getAppId(), async () => {
const db = context.getAppDB()
const tableDoc = await db.tryGet<TableRowActions>(
generateRowActionsID(tableId)
)
return tableDoc
})
}
await config.api.rowAction.save(tableId, { const table = await getTable()
name: generator.guid(), const tableId = table._id!
await config.api.rowAction.save(tableId, {
name: generator.guid(),
})
await config.api.rowAction.save(tableId, {
name: generator.guid(),
})
const { actions } = (await getRowActionsFromDb(tableId))!
expect(Object.entries(actions)).toHaveLength(2)
const { automations } = await config.api.automation.fetch()
expect(automations).toHaveLength(2)
const datasource = await config.api.datasource.get(table.sourceId)
await config.api.datasource.delete(datasource)
const automationsResp = await config.api.automation.fetch()
expect(automationsResp.automations).toHaveLength(0)
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
}) })
await config.api.rowAction.save(tableId, { }
name: generator.guid(), )
}) }
const { actions } = (await getRowActionsFromDb(tableId))!
expect(Object.entries(actions)).toHaveLength(2)
const { automations } = await config.api.automation.fetch()
expect(automations).toHaveLength(2)
const datasource = await config.api.datasource.get(table.sourceId)
await config.api.datasource.delete(datasource)
const automationsResp = await config.api.automation.fetch()
expect(automationsResp.automations).toHaveLength(0)
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
})
}
)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -7,71 +7,74 @@ import {
import { Knex } from "knex" import { Knex } from "knex"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
datasourceDescribe( const descriptions = datasourceDescribe({
{ exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
name: "execute query action", })
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
},
({ config, dsProvider }) => {
let tableName: string
let client: Knex
let datasource: Datasource
let query: Query
beforeAll(async () => { if (descriptions.length) {
const ds = await dsProvider() describe.each(descriptions)(
datasource = ds.datasource! "execute query action ($dbName)",
client = ds.client! ({ config, dsProvider }) => {
}) let tableName: string
let client: Knex
let datasource: Datasource
let query: Query
beforeEach(async () => { beforeAll(async () => {
tableName = generator.guid() const ds = await dsProvider()
await client.schema.createTable(tableName, table => { datasource = ds.datasource!
table.string("a") client = ds.client!
table.integer("b")
}) })
await client(tableName).insert({ a: "string", b: 1 })
query = await setup.saveTestQuery(config, client, tableName, datasource)
})
afterEach(async () => { beforeEach(async () => {
await client.schema.dropTable(tableName) tableName = generator.guid()
}) await client.schema.createTable(tableName, table => {
table.string("a")
table.integer("b")
})
await client(tableName).insert({ a: "string", b: 1 })
query = await setup.saveTestQuery(config, client, tableName, datasource)
})
it("should be able to execute a query", async () => { afterEach(async () => {
let res = await setup.runStep( await client.schema.dropTable(tableName)
config, })
setup.actions.EXECUTE_QUERY.stepId,
{
query: { queryId: query._id },
}
)
expect(res.response).toEqual([{ a: "string", b: 1 }])
expect(res.success).toEqual(true)
})
it("should handle a null query value", async () => { it("should be able to execute a query", async () => {
let res = await setup.runStep( let res = await setup.runStep(
config, config,
setup.actions.EXECUTE_QUERY.stepId, setup.actions.EXECUTE_QUERY.stepId,
{ {
query: null, query: { queryId: query._id },
} }
) )
expect(res.response.message).toEqual("Invalid inputs") expect(res.response).toEqual([{ a: "string", b: 1 }])
expect(res.success).toEqual(false) expect(res.success).toEqual(true)
}) })
it("should handle an error executing a query", async () => { it("should handle a null query value", async () => {
let res = await setup.runStep( let res = await setup.runStep(
config, config,
setup.actions.EXECUTE_QUERY.stepId, setup.actions.EXECUTE_QUERY.stepId,
{ {
query: { queryId: "wrong_id" }, query: null,
} }
) )
expect(res.response).toBeDefined() expect(res.response.message).toEqual("Invalid inputs")
expect(res.success).toEqual(false) expect(res.success).toEqual(false)
}) })
}
) it("should handle an error executing a query", async () => {
let res = await setup.runStep(
config,
setup.actions.EXECUTE_QUERY.stepId,
{
query: { queryId: "wrong_id" },
}
)
expect(res.response).toBeDefined()
expect(res.success).toEqual(false)
})
}
)
}

View File

@ -433,9 +433,10 @@ describe("Automation Scenarios", () => {
}) })
}) })
datasourceDescribe( const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
{ name: "", only: [DatabaseName.MYSQL] },
({ config, dsProvider }) => { if (descriptions.length) {
describe.each(descriptions)("/rows ($dbName)", ({ config, dsProvider }) => {
let datasource: Datasource let datasource: Datasource
let client: Knex let client: Knex
@ -531,5 +532,5 @@ datasourceDescribe(
) )
}) })
}) })
} })
) }

View File

@ -10,119 +10,123 @@ function uniqueTableName(length?: number): string {
.substring(0, length || 10) .substring(0, length || 10)
} }
datasourceDescribe( const mainDescriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
{
name: "Integration compatibility with mysql search_path",
only: [DatabaseName.MYSQL],
},
({ config, dsProvider }) => {
let rawDatasource: Datasource
let datasource: Datasource
let client: Knex
const database = generator.guid() if (mainDescriptions.length) {
const database2 = generator.guid() describe.each(mainDescriptions)(
"/Integration compatibility with mysql search_path ($dbName)",
({ config, dsProvider }) => {
let rawDatasource: Datasource
let datasource: Datasource
let client: Knex
beforeAll(async () => { const database = generator.guid()
const ds = await dsProvider() const database2 = generator.guid()
rawDatasource = ds.rawDatasource!
datasource = ds.datasource!
client = ds.client!
await client.raw(`CREATE DATABASE \`${database}\`;`) beforeAll(async () => {
await client.raw(`CREATE DATABASE \`${database2}\`;`) const ds = await dsProvider()
rawDatasource = ds.rawDatasource!
datasource = ds.datasource!
client = ds.client!
rawDatasource.config!.database = database await client.raw(`CREATE DATABASE \`${database}\`;`)
datasource = await config.api.datasource.create(rawDatasource) await client.raw(`CREATE DATABASE \`${database2}\`;`)
})
afterAll(async () => { rawDatasource.config!.database = database
await client.raw(`DROP DATABASE \`${database}\`;`) datasource = await config.api.datasource.create(rawDatasource)
await client.raw(`DROP DATABASE \`${database2}\`;`)
})
it("discovers tables from any schema in search path", async () => {
await client.schema.createTable(`${database}.table1`, table => {
table.increments("id1").primary()
}) })
const res = await config.api.datasource.info(datasource)
expect(res.tableNames).toBeDefined()
expect(res.tableNames).toEqual(expect.arrayContaining(["table1"]))
})
it("does not mix columns from different tables", async () => { afterAll(async () => {
const repeated_table_name = "table_same_name" await client.raw(`DROP DATABASE \`${database}\`;`)
await client.schema.createTable( await client.raw(`DROP DATABASE \`${database2}\`;`)
`${database}.${repeated_table_name}`,
table => {
table.increments("id").primary()
table.string("val1")
}
)
await client.schema.createTable(
`${database2}.${repeated_table_name}`,
table => {
table.increments("id2").primary()
table.string("val2")
}
)
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
tablesFilter: [repeated_table_name],
}) })
expect(res.datasource.entities![repeated_table_name].schema).toBeDefined()
const schema = res.datasource.entities![repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
})
}
)
datasourceDescribe( it("discovers tables from any schema in search path", async () => {
{ await client.schema.createTable(`${database}.table1`, table => {
name: "POST /api/datasources/:datasourceId/schema", table.increments("id1").primary()
only: [DatabaseName.MYSQL], })
}, const res = await config.api.datasource.info(datasource)
({ config, dsProvider }) => { expect(res.tableNames).toBeDefined()
let datasource: Datasource expect(res.tableNames).toEqual(expect.arrayContaining(["table1"]))
let client: Knex })
beforeAll(async () => { it("does not mix columns from different tables", async () => {
const ds = await dsProvider() const repeated_table_name = "table_same_name"
datasource = ds.datasource! await client.schema.createTable(
client = ds.client! `${database}.${repeated_table_name}`,
}) table => {
table.increments("id").primary()
let tableName: string table.string("val1")
beforeEach(async () => { }
tableName = uniqueTableName()
})
afterEach(async () => {
await client.schema.dropTableIfExists(tableName)
})
it("recognises enum columns as options", async () => {
const enumColumnName = "status"
await client.schema.createTable(tableName, table => {
table.increments("order_id").primary()
table.string("customer_name", 100).notNullable()
table.enum(
enumColumnName,
["pending", "processing", "shipped", "delivered", "cancelled"],
{ useNative: true, enumName: `${tableName}_${enumColumnName}` }
) )
await client.schema.createTable(
`${database2}.${repeated_table_name}`,
table => {
table.increments("id2").primary()
table.string("val2")
}
)
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
tablesFilter: [repeated_table_name],
})
expect(
res.datasource.entities![repeated_table_name].schema
).toBeDefined()
const schema = res.datasource.entities![repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
}) })
}
)
const res = await config.api.datasource.fetchSchema({ const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
datasourceId: datasource._id!,
})
const table = res.datasource.entities![tableName] if (descriptions.length) {
describe.each(descriptions)(
"POST /api/datasources/:datasourceId/schema ($dbName)",
({ config, dsProvider }) => {
let datasource: Datasource
let client: Knex
expect(table).toBeDefined() beforeAll(async () => {
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) const ds = await dsProvider()
}) datasource = ds.datasource!
client = ds.client!
})
let tableName: string
beforeEach(async () => {
tableName = uniqueTableName()
})
afterEach(async () => {
await client.schema.dropTableIfExists(tableName)
})
it("recognises enum columns as options", async () => {
const enumColumnName = "status"
await client.schema.createTable(tableName, table => {
table.increments("order_id").primary()
table.string("customer_name", 100).notNullable()
table.enum(
enumColumnName,
["pending", "processing", "shipped", "delivered", "cancelled"],
{ useNative: true, enumName: `${tableName}_${enumColumnName}` }
)
})
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const table = res.datasource.entities![tableName]
expect(table).toBeDefined()
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
})
}
)
} }
) }

View File

@ -8,283 +8,292 @@ import {
} from "../integrations/tests/utils" } from "../integrations/tests/utils"
import { Knex } from "knex" import { Knex } from "knex"
datasourceDescribe( const mainDescriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] })
{ name: "postgres integrations", only: [DatabaseName.POSTGRES] },
({ config, dsProvider }) => {
let datasource: Datasource
let client: Knex
beforeAll(async () => { if (mainDescriptions.length) {
const ds = await dsProvider() describe.each(mainDescriptions)(
datasource = ds.datasource! "/postgres integrations",
client = ds.client! ({ config, dsProvider }) => {
}) let datasource: Datasource
let client: Knex
afterAll(config.end) beforeAll(async () => {
const ds = await dsProvider()
describe("POST /api/datasources/:datasourceId/schema", () => { datasource = ds.datasource!
let tableName: string client = ds.client!
beforeEach(async () => {
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
}) })
afterEach(async () => { afterAll(config.end)
await client.schema.dropTableIfExists(tableName)
})
it("recognises when a table has no primary key", async () => { describe("POST /api/datasources/:datasourceId/schema", () => {
await client.schema.createTable(tableName, table => { let tableName: string
table.increments("id", { primaryKey: false })
beforeEach(async () => {
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
}) })
const response = await config.api.datasource.fetchSchema({ afterEach(async () => {
datasourceId: datasource._id!, await client.schema.dropTableIfExists(tableName)
}) })
expect(response.errors).toEqual({ it("recognises when a table has no primary key", async () => {
[tableName]: "Table must have a primary key.", await client.schema.createTable(tableName, table => {
}) table.increments("id", { primaryKey: false })
}) })
it("recognises when a table is using a reserved column name", async () => { const response = await config.api.datasource.fetchSchema({
await client.schema.createTable(tableName, table => { datasourceId: datasource._id!,
table.increments("_id").primary() })
})
const response = await config.api.datasource.fetchSchema({ expect(response.errors).toEqual({
datasourceId: datasource._id!, [tableName]: "Table must have a primary key.",
})
expect(response.errors).toEqual({
[tableName]: "Table contains invalid columns.",
})
})
it("recognises enum columns as options", async () => {
const tableName = `orders_${generator
.guid()
.replaceAll("-", "")
.substring(0, 6)}`
await client.schema.createTable(tableName, table => {
table.increments("order_id").primary()
table.string("customer_name").notNullable()
table.enum("status", ["pending", "processing", "shipped"], {
useNative: true,
enumName: `${tableName}_status`,
}) })
}) })
const response = await config.api.datasource.fetchSchema({ it("recognises when a table is using a reserved column name", async () => {
datasourceId: datasource._id!, await client.schema.createTable(tableName, table => {
table.increments("_id").primary()
})
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(response.errors).toEqual({
[tableName]: "Table contains invalid columns.",
})
}) })
const table = response.datasource.entities?.[tableName] it("recognises enum columns as options", async () => {
const tableName = `orders_${generator
.guid()
.replaceAll("-", "")
.substring(0, 6)}`
expect(table).toBeDefined() await client.schema.createTable(tableName, table => {
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS) table.increments("order_id").primary()
}) table.string("customer_name").notNullable()
}) table.enum("status", ["pending", "processing", "shipped"], {
useNative: true,
enumName: `${tableName}_status`,
})
})
describe("check custom column types", () => { const response = await config.api.datasource.fetchSchema({
beforeAll(async () => { datasourceId: datasource._id!,
await client.schema.createTable("binaryTable", table => { })
table.binary("id").primary()
table.string("column1") const table = response.datasource.entities?.[tableName]
table.integer("column2")
expect(table).toBeDefined()
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
}) })
}) })
it("should handle binary columns", async () => { describe("check custom column types", () => {
const response = await config.api.datasource.fetchSchema({ beforeAll(async () => {
datasourceId: datasource._id!, await client.schema.createTable("binaryTable", table => {
table.binary("id").primary()
table.string("column1")
table.integer("column2")
})
}) })
expect(response.datasource.entities).toBeDefined()
const table = response.datasource.entities?.["binaryTable"]
expect(table).toBeDefined()
expect(table?.schema.id.externalType).toBe("bytea")
const row = await config.api.row.save(table?._id!, {
id: "1111",
column1: "hello",
column2: 222,
})
expect(row._id).toBeDefined()
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
expect(JSON.parse(decoded)[0]).toBe("1111")
})
})
describe("check fetching null/not null table", () => { it("should handle binary columns", async () => {
beforeAll(async () => { const response = await config.api.datasource.fetchSchema({
await client.schema.createTable("nullableTable", table => { datasourceId: datasource._id!,
table.increments("order_id").primary() })
table.integer("order_number").notNullable() expect(response.datasource.entities).toBeDefined()
const table = response.datasource.entities?.["binaryTable"]
expect(table).toBeDefined()
expect(table?.schema.id.externalType).toBe("bytea")
const row = await config.api.row.save(table?._id!, {
id: "1111",
column1: "hello",
column2: 222,
})
expect(row._id).toBeDefined()
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
expect(JSON.parse(decoded)[0]).toBe("1111")
}) })
}) })
it("should be able to change the table to allow nullable and refetch this", async () => { describe("check fetching null/not null table", () => {
const response = await config.api.datasource.fetchSchema({ beforeAll(async () => {
datasourceId: datasource._id!, await client.schema.createTable("nullableTable", table => {
}) table.increments("order_id").primary()
const entities = response.datasource.entities table.integer("order_number").notNullable()
expect(entities).toBeDefined() })
const nullableTable = entities?.["nullableTable"]
expect(nullableTable).toBeDefined()
expect(
nullableTable?.schema["order_number"].constraints?.presence
).toEqual(true)
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
// is aware of - therefore we can try to fetch and make sure BB updates correctly
await client.schema.alterTable("nullableTable", table => {
table.setNullable("order_number")
}) })
const responseAfter = await config.api.datasource.fetchSchema({ it("should be able to change the table to allow nullable and refetch this", async () => {
datasourceId: datasource._id!, const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entities = response.datasource.entities
expect(entities).toBeDefined()
const nullableTable = entities?.["nullableTable"]
expect(nullableTable).toBeDefined()
expect(
nullableTable?.schema["order_number"].constraints?.presence
).toEqual(true)
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
// is aware of - therefore we can try to fetch and make sure BB updates correctly
await client.schema.alterTable("nullableTable", table => {
table.setNullable("order_number")
})
const responseAfter = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entitiesAfter = responseAfter.datasource.entities
expect(entitiesAfter).toBeDefined()
const nullableTableAfter = entitiesAfter?.["nullableTable"]
expect(nullableTableAfter).toBeDefined()
expect(
nullableTableAfter?.schema["order_number"].constraints?.presence
).toBeUndefined()
}) })
const entitiesAfter = responseAfter.datasource.entities
expect(entitiesAfter).toBeDefined()
const nullableTableAfter = entitiesAfter?.["nullableTable"]
expect(nullableTableAfter).toBeDefined()
expect(
nullableTableAfter?.schema["order_number"].constraints?.presence
).toBeUndefined()
}) })
})
describe("money field 💰", () => { describe("money field 💰", () => {
const tableName = "moneytable" const tableName = "moneytable"
let table: Table let table: Table
beforeAll(async () => { beforeAll(async () => {
await client.raw(` await client.raw(`
CREATE TABLE ${tableName} ( CREATE TABLE ${tableName} (
id serial PRIMARY KEY, id serial PRIMARY KEY,
price money price money
) )
`) `)
const response = await config.api.datasource.fetchSchema({ const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!, datasourceId: datasource._id!,
}) })
table = response.datasource.entities![tableName] table = response.datasource.entities![tableName]
})
it("should be able to import a money field", async () => {
expect(table).toBeDefined()
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
})
it("should be able to search a money field", async () => {
await config.api.row.bulkImport(table._id!, {
rows: [{ price: 200 }, { price: 300 }],
}) })
const { rows } = await config.api.row.search(table._id!, { it("should be able to import a money field", async () => {
query: { expect(table).toBeDefined()
equal: { expect(table?.schema.price.type).toBe(FieldType.NUMBER)
price: 200, })
it("should be able to search a money field", async () => {
await config.api.row.bulkImport(table._id!, {
rows: [{ price: 200 }, { price: 300 }],
})
const { rows } = await config.api.row.search(table._id!, {
query: {
equal: {
price: 200,
},
}, },
}, })
expect(rows).toHaveLength(1)
expect(rows[0].price).toBe("200.00")
})
it("should be able to update a money field", async () => {
let row = await config.api.row.save(table._id!, { price: 200 })
expect(row.price).toBe("200.00")
row = await config.api.row.save(table._id!, { ...row, price: 300 })
expect(row.price).toBe("300.00")
row = await config.api.row.save(table._id!, {
...row,
price: "400.00",
})
expect(row.price).toBe("400.00")
}) })
expect(rows).toHaveLength(1)
expect(rows[0].price).toBe("200.00")
}) })
}
)
it("should be able to update a money field", async () => { const descriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] })
let row = await config.api.row.save(table._id!, { price: 200 })
expect(row.price).toBe("200.00")
row = await config.api.row.save(table._id!, { ...row, price: 300 }) if (descriptions.length) {
expect(row.price).toBe("300.00") describe.each(descriptions)(
"Integration compatibility with postgres search_path",
({ config, dsProvider }) => {
let datasource: Datasource
let client: Knex
let schema1: string
let schema2: string
row = await config.api.row.save(table._id!, { ...row, price: "400.00" }) beforeEach(async () => {
expect(row.price).toBe("400.00") const ds = await dsProvider()
}) datasource = ds.datasource!
}) const rawDatasource = ds.rawDatasource!
schema1 = generator.guid().replaceAll("-", "")
schema2 = generator.guid().replaceAll("-", "")
client = await knexClient(rawDatasource)
await client.schema.createSchema(schema1)
await client.schema.createSchema(schema2)
rawDatasource.config!.schema = `${schema1}, ${schema2}`
client = await knexClient(rawDatasource)
datasource = await config.api.datasource.create(rawDatasource)
})
afterEach(async () => {
await client.schema.dropSchema(schema1, true)
await client.schema.dropSchema(schema2, true)
})
it("discovers tables from any schema in search path", async () => {
await client.schema.createTable(`${schema1}.table1`, table => {
table.increments("id1").primary()
})
await client.schema.createTable(`${schema2}.table2`, table => {
table.increments("id2").primary()
})
const response = await config.api.datasource.info(datasource)
expect(response.tableNames).toBeDefined()
expect(response.tableNames).toEqual(
expect.arrayContaining(["table1", "table2"])
)
})
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
await client.schema.createTable(
`${schema1}.${repeated_table_name}`,
table => {
table.increments("id").primary()
table.string("val1")
}
)
await client.schema.createTable(
`${schema2}.${repeated_table_name}`,
table => {
table.increments("id2").primary()
table.string("val2")
}
)
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
tablesFilter: [repeated_table_name],
})
expect(
response.datasource.entities?.[repeated_table_name].schema
).toBeDefined()
const schema =
response.datasource.entities?.[repeated_table_name].schema
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
})
}
)
} }
) }
datasourceDescribe(
{
name: "Integration compatibility with postgres search_path",
only: [DatabaseName.POSTGRES],
},
({ config, dsProvider }) => {
let datasource: Datasource
let client: Knex
let schema1: string
let schema2: string
beforeEach(async () => {
const ds = await dsProvider()
datasource = ds.datasource!
const rawDatasource = ds.rawDatasource!
schema1 = generator.guid().replaceAll("-", "")
schema2 = generator.guid().replaceAll("-", "")
client = await knexClient(rawDatasource)
await client.schema.createSchema(schema1)
await client.schema.createSchema(schema2)
rawDatasource.config!.schema = `${schema1}, ${schema2}`
client = await knexClient(rawDatasource)
datasource = await config.api.datasource.create(rawDatasource)
})
afterEach(async () => {
await client.schema.dropSchema(schema1, true)
await client.schema.dropSchema(schema2, true)
})
it("discovers tables from any schema in search path", async () => {
await client.schema.createTable(`${schema1}.table1`, table => {
table.increments("id1").primary()
})
await client.schema.createTable(`${schema2}.table2`, table => {
table.increments("id2").primary()
})
const response = await config.api.datasource.info(datasource)
expect(response.tableNames).toBeDefined()
expect(response.tableNames).toEqual(
expect.arrayContaining(["table1", "table2"])
)
})
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
await client.schema.createTable(
`${schema1}.${repeated_table_name}`,
table => {
table.increments("id").primary()
table.string("val1")
}
)
await client.schema.createTable(
`${schema2}.${repeated_table_name}`,
table => {
table.increments("id2").primary()
table.string("val2")
}
)
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
tablesFilter: [repeated_table_name],
})
expect(
response.datasource.entities?.[repeated_table_name].schema
).toBeDefined()
const schema = response.datasource.entities?.[repeated_table_name].schema
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
})
}
)

View File

@ -281,8 +281,14 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
case MSSQLConfigAuthType.NTLM: { case MSSQLConfigAuthType.NTLM: {
const { domain, trustServerCertificate } = const { domain, trustServerCertificate } =
this.config.ntlmConfig || {} this.config.ntlmConfig || {}
if (!domain) {
throw Error("Domain must be provided for NTLM config")
}
clientCfg.authentication = { clientCfg.authentication = {
type: "ntlm", type: "ntlm",
// @ts-expect-error - username and password not required for NTLM
options: { options: {
domain, domain,
}, },

View File

@ -6,7 +6,8 @@ import {
QueryType, QueryType,
SqlQuery, SqlQuery,
} from "@budibase/types" } from "@budibase/types"
import { Snowflake } from "snowflake-promise" import snowflakeSdk, { SnowflakeError } from "snowflake-sdk"
import { promisify } from "util"
interface SnowflakeConfig { interface SnowflakeConfig {
account: string account: string
@ -71,11 +72,52 @@ const SCHEMA: Integration = {
}, },
} }
class SnowflakeIntegration { class SnowflakePromise {
private client: Snowflake config: SnowflakeConfig
client?: snowflakeSdk.Connection
constructor(config: SnowflakeConfig) { constructor(config: SnowflakeConfig) {
this.client = new Snowflake(config) this.config = config
}
async connect() {
if (this.client?.isUp()) return
this.client = snowflakeSdk.createConnection(this.config)
const connectAsync = promisify(this.client.connect.bind(this.client))
return connectAsync()
}
async execute(sql: string) {
return new Promise((resolve, reject) => {
if (!this.client) {
throw Error(
"No snowflake client present to execute query. Run connect() first to initialise."
)
}
this.client.execute({
sqlText: sql,
complete: function (
err: SnowflakeError | undefined,
statementExecuted: any,
rows: any
) {
if (err) {
return reject(err)
}
resolve(rows)
},
})
})
}
}
class SnowflakeIntegration {
private client: SnowflakePromise
constructor(config: SnowflakeConfig) {
this.client = new SnowflakePromise(config)
} }
async testConnection(): Promise<ConnectionInfo> { async testConnection(): Promise<ConnectionInfo> {

View File

@ -35,7 +35,6 @@ const providers: Record<DatabaseName, DatasourceProvider> = {
} }
export interface DatasourceDescribeOpts { export interface DatasourceDescribeOpts {
name: string
only?: DatabaseName[] only?: DatabaseName[]
exclude?: DatabaseName[] exclude?: DatabaseName[]
} }
@ -102,16 +101,12 @@ function createDummyTest() {
}) })
} }
export function datasourceDescribe( export function datasourceDescribe(opts: DatasourceDescribeOpts) {
opts: DatasourceDescribeOpts,
cb: (args: DatasourceDescribeReturn) => void
) {
if (process.env.DATASOURCE === "none") { if (process.env.DATASOURCE === "none") {
createDummyTest() createDummyTest()
return
} }
const { name, only, exclude } = opts const { only, exclude } = opts
if (only && exclude) { if (only && exclude) {
throw new Error("you can only supply one of 'only' or 'exclude'") throw new Error("you can only supply one of 'only' or 'exclude'")
@ -130,36 +125,28 @@ export function datasourceDescribe(
if (databases.length === 0) { if (databases.length === 0) {
createDummyTest() createDummyTest()
return
} }
describe.each(databases)(name, name => { const config = new TestConfiguration()
const config = new TestConfiguration() return databases.map(dbName => ({
dbName,
afterAll(() => { config,
config.end() dsProvider: () => createDatasources(config, dbName),
}) isInternal: dbName === DatabaseName.SQS,
isExternal: dbName !== DatabaseName.SQS,
cb({ isSql: [
name, DatabaseName.MARIADB,
config, DatabaseName.MYSQL,
dsProvider: () => createDatasources(config, name), DatabaseName.POSTGRES,
isInternal: name === DatabaseName.SQS, DatabaseName.SQL_SERVER,
isExternal: name !== DatabaseName.SQS, DatabaseName.ORACLE,
isSql: [ ].includes(dbName),
DatabaseName.MARIADB, isMySQL: dbName === DatabaseName.MYSQL,
DatabaseName.MYSQL, isPostgres: dbName === DatabaseName.POSTGRES,
DatabaseName.POSTGRES, isMongodb: dbName === DatabaseName.MONGODB,
DatabaseName.SQL_SERVER, isMSSQL: dbName === DatabaseName.SQL_SERVER,
DatabaseName.ORACLE, isOracle: dbName === DatabaseName.ORACLE,
].includes(name), }))
isMySQL: name === DatabaseName.MYSQL,
isPostgres: name === DatabaseName.POSTGRES,
isMongodb: name === DatabaseName.MONGODB,
isMSSQL: name === DatabaseName.SQL_SERVER,
isOracle: name === DatabaseName.ORACLE,
})
})
} }
function getDatasource( function getDatasource(

View File

@ -19,202 +19,206 @@ import { tableForDatasource } from "../../../../../tests/utilities/structures"
// These test cases are only for things that cannot be tested through the API // These test cases are only for things that cannot be tested through the API
// (e.g. limiting searches to returning specific fields). If it's possible to // (e.g. limiting searches to returning specific fields). If it's possible to
// test through the API, it should be done there instead. // test through the API, it should be done there instead.
datasourceDescribe( const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
{ name: "search sdk (%s)", exclude: [DatabaseName.MONGODB] },
({ config, dsProvider, isInternal }) => {
let datasource: Datasource | undefined
let table: Table
beforeAll(async () => { if (descriptions.length) {
const ds = await dsProvider() describe.each(descriptions)(
datasource = ds.datasource "search sdk ($dbName)",
}) ({ config, dsProvider, isInternal }) => {
let datasource: Datasource | undefined
let table: Table
beforeEach(async () => { beforeAll(async () => {
const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata = const ds = await dsProvider()
isInternal datasource = ds.datasource
? {
name: "id",
type: FieldType.AUTO,
subtype: AutoFieldSubType.AUTO_ID,
autocolumn: true,
}
: {
name: "id",
type: FieldType.NUMBER,
autocolumn: true,
}
table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: {
id: idFieldSchema,
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
},
},
})
)
for (let i = 0; i < 10; i++) {
await config.api.row.save(table._id!, {
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
})
}
})
afterAll(async () => {
config.end()
})
it("querying by fields will always return data attribute columns", async () => {
await config.doInContext(config.appId, async () => {
const { rows } = await search({
tableId: table._id!,
query: {},
fields: ["name", "age"],
})
expect(rows).toHaveLength(10)
for (const row of rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
}) })
})
!isInternal && beforeEach(async () => {
it("will decode _id in oneOf query", async () => { const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata =
await config.doInContext(config.appId, async () => { isInternal
const result = await search({ ? {
tableId: table._id!, name: "id",
query: { type: FieldType.AUTO,
oneOf: { subtype: AutoFieldSubType.AUTO_ID,
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"], autocolumn: true,
}
: {
name: "id",
type: FieldType.NUMBER,
autocolumn: true,
}
table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: {
id: idFieldSchema,
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
}, },
}, },
}) })
)
expect(result.rows).toHaveLength(3) for (let i = 0; i < 10; i++) {
expect(result.rows.map(row => row.id)).toEqual( await config.api.row.save(table._id!, {
expect.arrayContaining([1, 4, 8]) name: generator.first(),
) surname: generator.last(),
}) age: generator.age(),
}) address: generator.address(),
})
it("does not allow accessing hidden fields", async () => {
await config.doInContext(config.appId, async () => {
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("surname")
expect(keys).toContain("address")
expect(keys).not.toContain("age")
} }
}) })
})
it("does not allow accessing hidden fields even if requested", async () => { afterAll(async () => {
await config.doInContext(config.appId, async () => { config.end()
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
fields: ["name", "age"],
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).not.toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
}) })
})
it.each([ it("querying by fields will always return data attribute columns", async () => {
[["id", "name", "age"], 3],
[["name", "age"], 10],
])(
"cannot query by non search fields (fields: %s)",
async (queryFields, expectedRows) => {
await config.doInContext(config.appId, async () => { await config.doInContext(config.appId, async () => {
const { rows } = await search({ const { rows } = await search({
tableId: table._id!, tableId: table._id!,
query: { query: {},
$or: { fields: ["name", "age"],
conditions: [
{
$and: {
conditions: [
{ range: { id: { low: 2, high: 4 } } },
{ range: { id: { low: 3, high: 5 } } },
],
},
},
{ equal: { id: 7 } },
],
},
},
fields: queryFields,
}) })
expect(rows).toHaveLength(expectedRows) expect(rows).toHaveLength(10)
for (const row of rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
}) })
} })
)
} !isInternal &&
) it("will decode _id in oneOf query", async () => {
await config.doInContext(config.appId, async () => {
const result = await search({
tableId: table._id!,
query: {
oneOf: {
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
},
},
})
expect(result.rows).toHaveLength(3)
expect(result.rows.map(row => row.id)).toEqual(
expect.arrayContaining([1, 4, 8])
)
})
})
it("does not allow accessing hidden fields", async () => {
await config.doInContext(config.appId, async () => {
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("surname")
expect(keys).toContain("address")
expect(keys).not.toContain("age")
}
})
})
it("does not allow accessing hidden fields even if requested", async () => {
await config.doInContext(config.appId, async () => {
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
fields: ["name", "age"],
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).not.toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
})
})
it.each([
[["id", "name", "age"], 3],
[["name", "age"], 10],
])(
"cannot query by non search fields (fields: %s)",
async (queryFields, expectedRows) => {
await config.doInContext(config.appId, async () => {
const { rows } = await search({
tableId: table._id!,
query: {
$or: {
conditions: [
{
$and: {
conditions: [
{ range: { id: { low: 2, high: 4 } } },
{ range: { id: { low: 3, high: 5 } } },
],
},
},
{ equal: { id: 7 } },
],
},
},
fields: queryFields,
})
expect(rows).toHaveLength(expectedRows)
})
}
)
}
)
}

View File

@ -1,4 +1,4 @@
import jimp from "jimp" import { Jimp } from "jimp"
const FORMATS = { const FORMATS = {
IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"], IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"],
@ -6,8 +6,8 @@ const FORMATS = {
function processImage(file: { path: string }) { function processImage(file: { path: string }) {
// this will overwrite the temp file // this will overwrite the temp file
return jimp.read(file.path).then(img => { return Jimp.read(file.path).then(img => {
return img.resize(300, jimp.AUTO).write(file.path) return img.resize({ w: 256 }).write(file.path as `${string}.${string}`)
}) })
} }

View File

@ -40,17 +40,17 @@
"dependencies": { "dependencies": {
"@budibase/backend-core": "0.0.0", "@budibase/backend-core": "0.0.0",
"@budibase/pro": "0.0.0", "@budibase/pro": "0.0.0",
"@budibase/shared-core": "0.0.0",
"@budibase/string-templates": "0.0.0", "@budibase/string-templates": "0.0.0",
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",
"@budibase/shared-core": "0.0.0", "@koa/router": "13.1.0",
"@koa/router": "8.0.8",
"@techpass/passport-openidconnect": "0.3.3", "@techpass/passport-openidconnect": "0.3.3",
"@types/global-agent": "2.1.1", "@types/global-agent": "2.1.1",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1692.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"bull": "4.10.1", "bull": "4.10.1",
"dd-trace": "5.2.0", "dd-trace": "5.23.0",
"dotenv": "8.6.0", "dotenv": "8.6.0",
"global-agent": "3.0.0", "global-agent": "3.0.0",
"ical-generator": "4.1.0", "ical-generator": "4.1.0",
@ -82,7 +82,7 @@
"@types/jest": "29.5.5", "@types/jest": "29.5.5",
"@types/jsonwebtoken": "9.0.3", "@types/jsonwebtoken": "9.0.3",
"@types/koa": "2.13.4", "@types/koa": "2.13.4",
"@types/koa__router": "8.0.8", "@types/koa__router": "12.0.4",
"@types/lodash": "4.14.200", "@types/lodash": "4.14.200",
"@types/node": "^22.9.0", "@types/node": "^22.9.0",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",

View File

@ -40,6 +40,7 @@ import {
import { checkAnyUserExists } from "../../../utilities/users" import { checkAnyUserExists } from "../../../utilities/users"
import { isEmailConfigured } from "../../../utilities/email" import { isEmailConfigured } from "../../../utilities/email"
import { BpmStatusKey, BpmStatusValue, utils } from "@budibase/shared-core" import { BpmStatusKey, BpmStatusValue, utils } from "@budibase/shared-core"
import crypto from "crypto"
const MAX_USERS_UPLOAD_LIMIT = 1000 const MAX_USERS_UPLOAD_LIMIT = 1000

3668
yarn.lock

File diff suppressed because it is too large Load Diff