Merge branch 'master' into security-updates

This commit is contained in:
Martin McKeaveney 2024-11-20 16:13:58 +00:00 committed by GitHub
commit e5e0563546
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 14384 additions and 14226 deletions

View File

@ -114,9 +114,9 @@ jobs:
- name: Test - name: Test
run: | run: |
if ${{ env.ONLY_AFFECTED_TASKS }}; then if ${{ env.ONLY_AFFECTED_TASKS }}; then
yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions
else else
yarn test --ignore=@budibase/worker --ignore=@budibase/server yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix -- --verbose --reporters=default --reporters=github-actions
fi fi
test-worker: test-worker:
@ -138,11 +138,16 @@ jobs:
- name: Test worker - name: Test worker
run: | run: |
if ${{ env.ONLY_AFFECTED_TASKS }}; then if ${{ env.ONLY_AFFECTED_TASKS }}; then
node scripts/run-affected.js --task=test --scope=@budibase/worker --since=${{ env.NX_BASE_BRANCH }} AFFECTED=$(yarn --silent nx show projects --affected -t test --base=${{ env.NX_BASE_BRANCH }} -p @budibase/worker)
else if [ -z "$AFFECTED" ]; then
yarn test --scope=@budibase/worker echo "No affected tests to run"
exit 0
fi
fi fi
cd packages/worker
yarn test --verbose --reporters=default --reporters=github-actions
test-server: test-server:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
@ -211,7 +216,7 @@ jobs:
fi fi
cd packages/server cd packages/server
yarn test --filter $FILTER --passWithNoTests yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions
check-pro-submodule: check-pro-submodule:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@ -57,7 +57,7 @@
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server", "dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "./scripts/devDocker.sh", "dev:docker": "./scripts/devDocker.sh",
"test": "lerna run --concurrency 1 --stream test --stream", "test": "lerna run --concurrency 1 --stream test",
"test:containers:kill": "./scripts/killTestcontainers.sh", "test:containers:kill": "./scripts/killTestcontainers.sh",
"lint:eslint": "eslint packages --max-warnings=0", "lint:eslint": "eslint packages --max-warnings=0",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"", "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",

View File

@ -164,9 +164,12 @@ describe("/datasources", () => {
}) })
}) })
datasourceDescribe( const descriptions = datasourceDescribe({
{ name: "%s", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] }, exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
({ config, dsProvider }) => { })
if (descriptions.length) {
describe.each(descriptions)("$dbName", ({ config, dsProvider }) => {
let datasource: Datasource let datasource: Datasource
let rawDatasource: Datasource let rawDatasource: Datasource
let client: Knex let client: Knex
@ -492,5 +495,5 @@ datasourceDescribe(
) )
}) })
}) })
} })
) }

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -977,63 +977,69 @@ describe("/rowsActions", () => {
}) })
}) })
datasourceDescribe( const descriptions = datasourceDescribe({
{ name: "row actions (%s)", only: [DatabaseName.SQS, DatabaseName.POSTGRES] }, only: [DatabaseName.SQS, DatabaseName.POSTGRES],
({ config, dsProvider, isInternal }) => { })
let datasource: Datasource | undefined
beforeAll(async () => { if (descriptions.length) {
const ds = await dsProvider() describe.each(descriptions)(
datasource = ds.datasource "row actions ($dbName)",
}) ({ config, dsProvider, isInternal }) => {
let datasource: Datasource | undefined
async function getTable(): Promise<Table> { beforeAll(async () => {
if (isInternal) { const ds = await dsProvider()
await config.api.application.addSampleData(config.getAppId()) datasource = ds.datasource
const tables = await config.api.table.fetch() })
return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)!
} else {
const table = await config.api.table.save(
setup.structures.tableForDatasource(datasource!)
)
return table
}
}
it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => { async function getTable(): Promise<Table> {
async function getRowActionsFromDb(tableId: string) { if (isInternal) {
return await context.doInAppContext(config.getAppId(), async () => { await config.api.application.addSampleData(config.getAppId())
const db = context.getAppDB() const tables = await config.api.table.fetch()
const tableDoc = await db.tryGet<TableRowActions>( return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)!
generateRowActionsID(tableId) } else {
const table = await config.api.table.save(
setup.structures.tableForDatasource(datasource!)
) )
return tableDoc return table
}) }
} }
const table = await getTable() it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => {
const tableId = table._id! async function getRowActionsFromDb(tableId: string) {
return await context.doInAppContext(config.getAppId(), async () => {
const db = context.getAppDB()
const tableDoc = await db.tryGet<TableRowActions>(
generateRowActionsID(tableId)
)
return tableDoc
})
}
await config.api.rowAction.save(tableId, { const table = await getTable()
name: generator.guid(), const tableId = table._id!
await config.api.rowAction.save(tableId, {
name: generator.guid(),
})
await config.api.rowAction.save(tableId, {
name: generator.guid(),
})
const { actions } = (await getRowActionsFromDb(tableId))!
expect(Object.entries(actions)).toHaveLength(2)
const { automations } = await config.api.automation.fetch()
expect(automations).toHaveLength(2)
const datasource = await config.api.datasource.get(table.sourceId)
await config.api.datasource.delete(datasource)
const automationsResp = await config.api.automation.fetch()
expect(automationsResp.automations).toHaveLength(0)
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
}) })
await config.api.rowAction.save(tableId, { }
name: generator.guid(), )
}) }
const { actions } = (await getRowActionsFromDb(tableId))!
expect(Object.entries(actions)).toHaveLength(2)
const { automations } = await config.api.automation.fetch()
expect(automations).toHaveLength(2)
const datasource = await config.api.datasource.get(table.sourceId)
await config.api.datasource.delete(datasource)
const automationsResp = await config.api.automation.fetch()
expect(automationsResp.automations).toHaveLength(0)
expect(await getRowActionsFromDb(tableId)).toBeUndefined()
})
}
)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -7,71 +7,74 @@ import {
import { Knex } from "knex" import { Knex } from "knex"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
datasourceDescribe( const descriptions = datasourceDescribe({
{ exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
name: "execute query action", })
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
},
({ config, dsProvider }) => {
let tableName: string
let client: Knex
let datasource: Datasource
let query: Query
beforeAll(async () => { if (descriptions.length) {
const ds = await dsProvider() describe.each(descriptions)(
datasource = ds.datasource! "execute query action ($dbName)",
client = ds.client! ({ config, dsProvider }) => {
}) let tableName: string
let client: Knex
let datasource: Datasource
let query: Query
beforeEach(async () => { beforeAll(async () => {
tableName = generator.guid() const ds = await dsProvider()
await client.schema.createTable(tableName, table => { datasource = ds.datasource!
table.string("a") client = ds.client!
table.integer("b")
}) })
await client(tableName).insert({ a: "string", b: 1 })
query = await setup.saveTestQuery(config, client, tableName, datasource)
})
afterEach(async () => { beforeEach(async () => {
await client.schema.dropTable(tableName) tableName = generator.guid()
}) await client.schema.createTable(tableName, table => {
table.string("a")
table.integer("b")
})
await client(tableName).insert({ a: "string", b: 1 })
query = await setup.saveTestQuery(config, client, tableName, datasource)
})
it("should be able to execute a query", async () => { afterEach(async () => {
let res = await setup.runStep( await client.schema.dropTable(tableName)
config, })
setup.actions.EXECUTE_QUERY.stepId,
{
query: { queryId: query._id },
}
)
expect(res.response).toEqual([{ a: "string", b: 1 }])
expect(res.success).toEqual(true)
})
it("should handle a null query value", async () => { it("should be able to execute a query", async () => {
let res = await setup.runStep( let res = await setup.runStep(
config, config,
setup.actions.EXECUTE_QUERY.stepId, setup.actions.EXECUTE_QUERY.stepId,
{ {
query: null, query: { queryId: query._id },
} }
) )
expect(res.response.message).toEqual("Invalid inputs") expect(res.response).toEqual([{ a: "string", b: 1 }])
expect(res.success).toEqual(false) expect(res.success).toEqual(true)
}) })
it("should handle an error executing a query", async () => { it("should handle a null query value", async () => {
let res = await setup.runStep( let res = await setup.runStep(
config, config,
setup.actions.EXECUTE_QUERY.stepId, setup.actions.EXECUTE_QUERY.stepId,
{ {
query: { queryId: "wrong_id" }, query: null,
} }
) )
expect(res.response).toBeDefined() expect(res.response.message).toEqual("Invalid inputs")
expect(res.success).toEqual(false) expect(res.success).toEqual(false)
}) })
}
) it("should handle an error executing a query", async () => {
let res = await setup.runStep(
config,
setup.actions.EXECUTE_QUERY.stepId,
{
query: { queryId: "wrong_id" },
}
)
expect(res.response).toBeDefined()
expect(res.success).toEqual(false)
})
}
)
}

View File

@ -433,9 +433,10 @@ describe("Automation Scenarios", () => {
}) })
}) })
datasourceDescribe( const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
{ name: "", only: [DatabaseName.MYSQL] },
({ config, dsProvider }) => { if (descriptions.length) {
describe.each(descriptions)("/rows ($dbName)", ({ config, dsProvider }) => {
let datasource: Datasource let datasource: Datasource
let client: Knex let client: Knex
@ -531,5 +532,5 @@ datasourceDescribe(
) )
}) })
}) })
} })
) }

View File

@ -10,119 +10,123 @@ function uniqueTableName(length?: number): string {
.substring(0, length || 10) .substring(0, length || 10)
} }
datasourceDescribe( const mainDescriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
{
name: "Integration compatibility with mysql search_path",
only: [DatabaseName.MYSQL],
},
({ config, dsProvider }) => {
let rawDatasource: Datasource
let datasource: Datasource
let client: Knex
const database = generator.guid() if (mainDescriptions.length) {
const database2 = generator.guid() describe.each(mainDescriptions)(
"/Integration compatibility with mysql search_path ($dbName)",
({ config, dsProvider }) => {
let rawDatasource: Datasource
let datasource: Datasource
let client: Knex
beforeAll(async () => { const database = generator.guid()
const ds = await dsProvider() const database2 = generator.guid()
rawDatasource = ds.rawDatasource!
datasource = ds.datasource!
client = ds.client!
await client.raw(`CREATE DATABASE \`${database}\`;`) beforeAll(async () => {
await client.raw(`CREATE DATABASE \`${database2}\`;`) const ds = await dsProvider()
rawDatasource = ds.rawDatasource!
datasource = ds.datasource!
client = ds.client!
rawDatasource.config!.database = database await client.raw(`CREATE DATABASE \`${database}\`;`)
datasource = await config.api.datasource.create(rawDatasource) await client.raw(`CREATE DATABASE \`${database2}\`;`)
})
afterAll(async () => { rawDatasource.config!.database = database
await client.raw(`DROP DATABASE \`${database}\`;`) datasource = await config.api.datasource.create(rawDatasource)
await client.raw(`DROP DATABASE \`${database2}\`;`)
})
it("discovers tables from any schema in search path", async () => {
await client.schema.createTable(`${database}.table1`, table => {
table.increments("id1").primary()
}) })
const res = await config.api.datasource.info(datasource)
expect(res.tableNames).toBeDefined()
expect(res.tableNames).toEqual(expect.arrayContaining(["table1"]))
})
it("does not mix columns from different tables", async () => { afterAll(async () => {
const repeated_table_name = "table_same_name" await client.raw(`DROP DATABASE \`${database}\`;`)
await client.schema.createTable( await client.raw(`DROP DATABASE \`${database2}\`;`)
`${database}.${repeated_table_name}`,
table => {
table.increments("id").primary()
table.string("val1")
}
)
await client.schema.createTable(
`${database2}.${repeated_table_name}`,
table => {
table.increments("id2").primary()
table.string("val2")
}
)
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
tablesFilter: [repeated_table_name],
}) })
expect(res.datasource.entities![repeated_table_name].schema).toBeDefined()
const schema = res.datasource.entities![repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
})
}
)
datasourceDescribe( it("discovers tables from any schema in search path", async () => {
{ await client.schema.createTable(`${database}.table1`, table => {
name: "POST /api/datasources/:datasourceId/schema", table.increments("id1").primary()
only: [DatabaseName.MYSQL], })
}, const res = await config.api.datasource.info(datasource)
({ config, dsProvider }) => { expect(res.tableNames).toBeDefined()
let datasource: Datasource expect(res.tableNames).toEqual(expect.arrayContaining(["table1"]))
let client: Knex })
beforeAll(async () => { it("does not mix columns from different tables", async () => {
const ds = await dsProvider() const repeated_table_name = "table_same_name"
datasource = ds.datasource! await client.schema.createTable(
client = ds.client! `${database}.${repeated_table_name}`,
}) table => {
table.increments("id").primary()
let tableName: string table.string("val1")
beforeEach(async () => { }
tableName = uniqueTableName()
})
afterEach(async () => {
await client.schema.dropTableIfExists(tableName)
})
it("recognises enum columns as options", async () => {
const enumColumnName = "status"
await client.schema.createTable(tableName, table => {
table.increments("order_id").primary()
table.string("customer_name", 100).notNullable()
table.enum(
enumColumnName,
["pending", "processing", "shipped", "delivered", "cancelled"],
{ useNative: true, enumName: `${tableName}_${enumColumnName}` }
) )
await client.schema.createTable(
`${database2}.${repeated_table_name}`,
table => {
table.increments("id2").primary()
table.string("val2")
}
)
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
tablesFilter: [repeated_table_name],
})
expect(
res.datasource.entities![repeated_table_name].schema
).toBeDefined()
const schema = res.datasource.entities![repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
}) })
}
)
const res = await config.api.datasource.fetchSchema({ const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] })
datasourceId: datasource._id!,
})
const table = res.datasource.entities![tableName] if (descriptions.length) {
describe.each(descriptions)(
"POST /api/datasources/:datasourceId/schema ($dbName)",
({ config, dsProvider }) => {
let datasource: Datasource
let client: Knex
expect(table).toBeDefined() beforeAll(async () => {
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) const ds = await dsProvider()
}) datasource = ds.datasource!
client = ds.client!
})
let tableName: string
beforeEach(async () => {
tableName = uniqueTableName()
})
afterEach(async () => {
await client.schema.dropTableIfExists(tableName)
})
it("recognises enum columns as options", async () => {
const enumColumnName = "status"
await client.schema.createTable(tableName, table => {
table.increments("order_id").primary()
table.string("customer_name", 100).notNullable()
table.enum(
enumColumnName,
["pending", "processing", "shipped", "delivered", "cancelled"],
{ useNative: true, enumName: `${tableName}_${enumColumnName}` }
)
})
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const table = res.datasource.entities![tableName]
expect(table).toBeDefined()
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
})
}
)
} }
) }

View File

@ -8,283 +8,292 @@ import {
} from "../integrations/tests/utils" } from "../integrations/tests/utils"
import { Knex } from "knex" import { Knex } from "knex"
datasourceDescribe( const mainDescriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] })
{ name: "postgres integrations", only: [DatabaseName.POSTGRES] },
({ config, dsProvider }) => {
let datasource: Datasource
let client: Knex
beforeAll(async () => { if (mainDescriptions.length) {
const ds = await dsProvider() describe.each(mainDescriptions)(
datasource = ds.datasource! "/postgres integrations",
client = ds.client! ({ config, dsProvider }) => {
}) let datasource: Datasource
let client: Knex
afterAll(config.end) beforeAll(async () => {
const ds = await dsProvider()
describe("POST /api/datasources/:datasourceId/schema", () => { datasource = ds.datasource!
let tableName: string client = ds.client!
beforeEach(async () => {
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
}) })
afterEach(async () => { afterAll(config.end)
await client.schema.dropTableIfExists(tableName)
})
it("recognises when a table has no primary key", async () => { describe("POST /api/datasources/:datasourceId/schema", () => {
await client.schema.createTable(tableName, table => { let tableName: string
table.increments("id", { primaryKey: false })
beforeEach(async () => {
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
}) })
const response = await config.api.datasource.fetchSchema({ afterEach(async () => {
datasourceId: datasource._id!, await client.schema.dropTableIfExists(tableName)
}) })
expect(response.errors).toEqual({ it("recognises when a table has no primary key", async () => {
[tableName]: "Table must have a primary key.", await client.schema.createTable(tableName, table => {
}) table.increments("id", { primaryKey: false })
}) })
it("recognises when a table is using a reserved column name", async () => { const response = await config.api.datasource.fetchSchema({
await client.schema.createTable(tableName, table => { datasourceId: datasource._id!,
table.increments("_id").primary() })
})
const response = await config.api.datasource.fetchSchema({ expect(response.errors).toEqual({
datasourceId: datasource._id!, [tableName]: "Table must have a primary key.",
})
expect(response.errors).toEqual({
[tableName]: "Table contains invalid columns.",
})
})
it("recognises enum columns as options", async () => {
const tableName = `orders_${generator
.guid()
.replaceAll("-", "")
.substring(0, 6)}`
await client.schema.createTable(tableName, table => {
table.increments("order_id").primary()
table.string("customer_name").notNullable()
table.enum("status", ["pending", "processing", "shipped"], {
useNative: true,
enumName: `${tableName}_status`,
}) })
}) })
const response = await config.api.datasource.fetchSchema({ it("recognises when a table is using a reserved column name", async () => {
datasourceId: datasource._id!, await client.schema.createTable(tableName, table => {
table.increments("_id").primary()
})
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(response.errors).toEqual({
[tableName]: "Table contains invalid columns.",
})
}) })
const table = response.datasource.entities?.[tableName] it("recognises enum columns as options", async () => {
const tableName = `orders_${generator
.guid()
.replaceAll("-", "")
.substring(0, 6)}`
expect(table).toBeDefined() await client.schema.createTable(tableName, table => {
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS) table.increments("order_id").primary()
}) table.string("customer_name").notNullable()
}) table.enum("status", ["pending", "processing", "shipped"], {
useNative: true,
enumName: `${tableName}_status`,
})
})
describe("check custom column types", () => { const response = await config.api.datasource.fetchSchema({
beforeAll(async () => { datasourceId: datasource._id!,
await client.schema.createTable("binaryTable", table => { })
table.binary("id").primary()
table.string("column1") const table = response.datasource.entities?.[tableName]
table.integer("column2")
expect(table).toBeDefined()
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
}) })
}) })
it("should handle binary columns", async () => { describe("check custom column types", () => {
const response = await config.api.datasource.fetchSchema({ beforeAll(async () => {
datasourceId: datasource._id!, await client.schema.createTable("binaryTable", table => {
table.binary("id").primary()
table.string("column1")
table.integer("column2")
})
}) })
expect(response.datasource.entities).toBeDefined()
const table = response.datasource.entities?.["binaryTable"]
expect(table).toBeDefined()
expect(table?.schema.id.externalType).toBe("bytea")
const row = await config.api.row.save(table?._id!, {
id: "1111",
column1: "hello",
column2: 222,
})
expect(row._id).toBeDefined()
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
expect(JSON.parse(decoded)[0]).toBe("1111")
})
})
describe("check fetching null/not null table", () => { it("should handle binary columns", async () => {
beforeAll(async () => { const response = await config.api.datasource.fetchSchema({
await client.schema.createTable("nullableTable", table => { datasourceId: datasource._id!,
table.increments("order_id").primary() })
table.integer("order_number").notNullable() expect(response.datasource.entities).toBeDefined()
const table = response.datasource.entities?.["binaryTable"]
expect(table).toBeDefined()
expect(table?.schema.id.externalType).toBe("bytea")
const row = await config.api.row.save(table?._id!, {
id: "1111",
column1: "hello",
column2: 222,
})
expect(row._id).toBeDefined()
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
expect(JSON.parse(decoded)[0]).toBe("1111")
}) })
}) })
it("should be able to change the table to allow nullable and refetch this", async () => { describe("check fetching null/not null table", () => {
const response = await config.api.datasource.fetchSchema({ beforeAll(async () => {
datasourceId: datasource._id!, await client.schema.createTable("nullableTable", table => {
}) table.increments("order_id").primary()
const entities = response.datasource.entities table.integer("order_number").notNullable()
expect(entities).toBeDefined() })
const nullableTable = entities?.["nullableTable"]
expect(nullableTable).toBeDefined()
expect(
nullableTable?.schema["order_number"].constraints?.presence
).toEqual(true)
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
// is aware of - therefore we can try to fetch and make sure BB updates correctly
await client.schema.alterTable("nullableTable", table => {
table.setNullable("order_number")
}) })
const responseAfter = await config.api.datasource.fetchSchema({ it("should be able to change the table to allow nullable and refetch this", async () => {
datasourceId: datasource._id!, const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entities = response.datasource.entities
expect(entities).toBeDefined()
const nullableTable = entities?.["nullableTable"]
expect(nullableTable).toBeDefined()
expect(
nullableTable?.schema["order_number"].constraints?.presence
).toEqual(true)
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
// is aware of - therefore we can try to fetch and make sure BB updates correctly
await client.schema.alterTable("nullableTable", table => {
table.setNullable("order_number")
})
const responseAfter = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entitiesAfter = responseAfter.datasource.entities
expect(entitiesAfter).toBeDefined()
const nullableTableAfter = entitiesAfter?.["nullableTable"]
expect(nullableTableAfter).toBeDefined()
expect(
nullableTableAfter?.schema["order_number"].constraints?.presence
).toBeUndefined()
}) })
const entitiesAfter = responseAfter.datasource.entities
expect(entitiesAfter).toBeDefined()
const nullableTableAfter = entitiesAfter?.["nullableTable"]
expect(nullableTableAfter).toBeDefined()
expect(
nullableTableAfter?.schema["order_number"].constraints?.presence
).toBeUndefined()
}) })
})
describe("money field 💰", () => { describe("money field 💰", () => {
const tableName = "moneytable" const tableName = "moneytable"
let table: Table let table: Table
beforeAll(async () => { beforeAll(async () => {
await client.raw(` await client.raw(`
CREATE TABLE ${tableName} ( CREATE TABLE ${tableName} (
id serial PRIMARY KEY, id serial PRIMARY KEY,
price money price money
) )
`) `)
const response = await config.api.datasource.fetchSchema({ const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!, datasourceId: datasource._id!,
}) })
table = response.datasource.entities![tableName] table = response.datasource.entities![tableName]
})
it("should be able to import a money field", async () => {
expect(table).toBeDefined()
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
})
it("should be able to search a money field", async () => {
await config.api.row.bulkImport(table._id!, {
rows: [{ price: 200 }, { price: 300 }],
}) })
const { rows } = await config.api.row.search(table._id!, { it("should be able to import a money field", async () => {
query: { expect(table).toBeDefined()
equal: { expect(table?.schema.price.type).toBe(FieldType.NUMBER)
price: 200, })
it("should be able to search a money field", async () => {
await config.api.row.bulkImport(table._id!, {
rows: [{ price: 200 }, { price: 300 }],
})
const { rows } = await config.api.row.search(table._id!, {
query: {
equal: {
price: 200,
},
}, },
}, })
expect(rows).toHaveLength(1)
expect(rows[0].price).toBe("200.00")
})
it("should be able to update a money field", async () => {
let row = await config.api.row.save(table._id!, { price: 200 })
expect(row.price).toBe("200.00")
row = await config.api.row.save(table._id!, { ...row, price: 300 })
expect(row.price).toBe("300.00")
row = await config.api.row.save(table._id!, {
...row,
price: "400.00",
})
expect(row.price).toBe("400.00")
}) })
expect(rows).toHaveLength(1)
expect(rows[0].price).toBe("200.00")
}) })
}
)
it("should be able to update a money field", async () => { const descriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] })
let row = await config.api.row.save(table._id!, { price: 200 })
expect(row.price).toBe("200.00")
row = await config.api.row.save(table._id!, { ...row, price: 300 }) if (descriptions.length) {
expect(row.price).toBe("300.00") describe.each(descriptions)(
"Integration compatibility with postgres search_path",
({ config, dsProvider }) => {
let datasource: Datasource
let client: Knex
let schema1: string
let schema2: string
row = await config.api.row.save(table._id!, { ...row, price: "400.00" }) beforeEach(async () => {
expect(row.price).toBe("400.00") const ds = await dsProvider()
}) datasource = ds.datasource!
}) const rawDatasource = ds.rawDatasource!
schema1 = generator.guid().replaceAll("-", "")
schema2 = generator.guid().replaceAll("-", "")
client = await knexClient(rawDatasource)
await client.schema.createSchema(schema1)
await client.schema.createSchema(schema2)
rawDatasource.config!.schema = `${schema1}, ${schema2}`
client = await knexClient(rawDatasource)
datasource = await config.api.datasource.create(rawDatasource)
})
afterEach(async () => {
await client.schema.dropSchema(schema1, true)
await client.schema.dropSchema(schema2, true)
})
it("discovers tables from any schema in search path", async () => {
await client.schema.createTable(`${schema1}.table1`, table => {
table.increments("id1").primary()
})
await client.schema.createTable(`${schema2}.table2`, table => {
table.increments("id2").primary()
})
const response = await config.api.datasource.info(datasource)
expect(response.tableNames).toBeDefined()
expect(response.tableNames).toEqual(
expect.arrayContaining(["table1", "table2"])
)
})
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
await client.schema.createTable(
`${schema1}.${repeated_table_name}`,
table => {
table.increments("id").primary()
table.string("val1")
}
)
await client.schema.createTable(
`${schema2}.${repeated_table_name}`,
table => {
table.increments("id2").primary()
table.string("val2")
}
)
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
tablesFilter: [repeated_table_name],
})
expect(
response.datasource.entities?.[repeated_table_name].schema
).toBeDefined()
const schema =
response.datasource.entities?.[repeated_table_name].schema
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
})
}
)
} }
) }
datasourceDescribe(
{
name: "Integration compatibility with postgres search_path",
only: [DatabaseName.POSTGRES],
},
({ config, dsProvider }) => {
let datasource: Datasource
let client: Knex
let schema1: string
let schema2: string
beforeEach(async () => {
const ds = await dsProvider()
datasource = ds.datasource!
const rawDatasource = ds.rawDatasource!
schema1 = generator.guid().replaceAll("-", "")
schema2 = generator.guid().replaceAll("-", "")
client = await knexClient(rawDatasource)
await client.schema.createSchema(schema1)
await client.schema.createSchema(schema2)
rawDatasource.config!.schema = `${schema1}, ${schema2}`
client = await knexClient(rawDatasource)
datasource = await config.api.datasource.create(rawDatasource)
})
afterEach(async () => {
await client.schema.dropSchema(schema1, true)
await client.schema.dropSchema(schema2, true)
})
it("discovers tables from any schema in search path", async () => {
await client.schema.createTable(`${schema1}.table1`, table => {
table.increments("id1").primary()
})
await client.schema.createTable(`${schema2}.table2`, table => {
table.increments("id2").primary()
})
const response = await config.api.datasource.info(datasource)
expect(response.tableNames).toBeDefined()
expect(response.tableNames).toEqual(
expect.arrayContaining(["table1", "table2"])
)
})
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
await client.schema.createTable(
`${schema1}.${repeated_table_name}`,
table => {
table.increments("id").primary()
table.string("val1")
}
)
await client.schema.createTable(
`${schema2}.${repeated_table_name}`,
table => {
table.increments("id2").primary()
table.string("val2")
}
)
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
tablesFilter: [repeated_table_name],
})
expect(
response.datasource.entities?.[repeated_table_name].schema
).toBeDefined()
const schema = response.datasource.entities?.[repeated_table_name].schema
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
})
}
)

View File

@ -35,7 +35,6 @@ const providers: Record<DatabaseName, DatasourceProvider> = {
} }
export interface DatasourceDescribeOpts { export interface DatasourceDescribeOpts {
name: string
only?: DatabaseName[] only?: DatabaseName[]
exclude?: DatabaseName[] exclude?: DatabaseName[]
} }
@ -102,16 +101,12 @@ function createDummyTest() {
}) })
} }
export function datasourceDescribe( export function datasourceDescribe(opts: DatasourceDescribeOpts) {
opts: DatasourceDescribeOpts,
cb: (args: DatasourceDescribeReturn) => void
) {
if (process.env.DATASOURCE === "none") { if (process.env.DATASOURCE === "none") {
createDummyTest() createDummyTest()
return
} }
const { name, only, exclude } = opts const { only, exclude } = opts
if (only && exclude) { if (only && exclude) {
throw new Error("you can only supply one of 'only' or 'exclude'") throw new Error("you can only supply one of 'only' or 'exclude'")
@ -130,36 +125,28 @@ export function datasourceDescribe(
if (databases.length === 0) { if (databases.length === 0) {
createDummyTest() createDummyTest()
return
} }
describe.each(databases)(name, name => { const config = new TestConfiguration()
const config = new TestConfiguration() return databases.map(dbName => ({
dbName,
afterAll(() => { config,
config.end() dsProvider: () => createDatasources(config, dbName),
}) isInternal: dbName === DatabaseName.SQS,
isExternal: dbName !== DatabaseName.SQS,
cb({ isSql: [
name, DatabaseName.MARIADB,
config, DatabaseName.MYSQL,
dsProvider: () => createDatasources(config, name), DatabaseName.POSTGRES,
isInternal: name === DatabaseName.SQS, DatabaseName.SQL_SERVER,
isExternal: name !== DatabaseName.SQS, DatabaseName.ORACLE,
isSql: [ ].includes(dbName),
DatabaseName.MARIADB, isMySQL: dbName === DatabaseName.MYSQL,
DatabaseName.MYSQL, isPostgres: dbName === DatabaseName.POSTGRES,
DatabaseName.POSTGRES, isMongodb: dbName === DatabaseName.MONGODB,
DatabaseName.SQL_SERVER, isMSSQL: dbName === DatabaseName.SQL_SERVER,
DatabaseName.ORACLE, isOracle: dbName === DatabaseName.ORACLE,
].includes(name), }))
isMySQL: name === DatabaseName.MYSQL,
isPostgres: name === DatabaseName.POSTGRES,
isMongodb: name === DatabaseName.MONGODB,
isMSSQL: name === DatabaseName.SQL_SERVER,
isOracle: name === DatabaseName.ORACLE,
})
})
} }
function getDatasource( function getDatasource(

View File

@ -19,202 +19,206 @@ import { tableForDatasource } from "../../../../../tests/utilities/structures"
// These test cases are only for things that cannot be tested through the API // These test cases are only for things that cannot be tested through the API
// (e.g. limiting searches to returning specific fields). If it's possible to // (e.g. limiting searches to returning specific fields). If it's possible to
// test through the API, it should be done there instead. // test through the API, it should be done there instead.
datasourceDescribe( const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
{ name: "search sdk (%s)", exclude: [DatabaseName.MONGODB] },
({ config, dsProvider, isInternal }) => {
let datasource: Datasource | undefined
let table: Table
beforeAll(async () => { if (descriptions.length) {
const ds = await dsProvider() describe.each(descriptions)(
datasource = ds.datasource "search sdk ($dbName)",
}) ({ config, dsProvider, isInternal }) => {
let datasource: Datasource | undefined
let table: Table
beforeEach(async () => { beforeAll(async () => {
const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata = const ds = await dsProvider()
isInternal datasource = ds.datasource
? {
name: "id",
type: FieldType.AUTO,
subtype: AutoFieldSubType.AUTO_ID,
autocolumn: true,
}
: {
name: "id",
type: FieldType.NUMBER,
autocolumn: true,
}
table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: {
id: idFieldSchema,
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
},
},
})
)
for (let i = 0; i < 10; i++) {
await config.api.row.save(table._id!, {
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
})
}
})
afterAll(async () => {
config.end()
})
it("querying by fields will always return data attribute columns", async () => {
await config.doInContext(config.appId, async () => {
const { rows } = await search({
tableId: table._id!,
query: {},
fields: ["name", "age"],
})
expect(rows).toHaveLength(10)
for (const row of rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
}) })
})
!isInternal && beforeEach(async () => {
it("will decode _id in oneOf query", async () => { const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata =
await config.doInContext(config.appId, async () => { isInternal
const result = await search({ ? {
tableId: table._id!, name: "id",
query: { type: FieldType.AUTO,
oneOf: { subtype: AutoFieldSubType.AUTO_ID,
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"], autocolumn: true,
}
: {
name: "id",
type: FieldType.NUMBER,
autocolumn: true,
}
table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: {
id: idFieldSchema,
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
}, },
}, },
}) })
)
expect(result.rows).toHaveLength(3) for (let i = 0; i < 10; i++) {
expect(result.rows.map(row => row.id)).toEqual( await config.api.row.save(table._id!, {
expect.arrayContaining([1, 4, 8]) name: generator.first(),
) surname: generator.last(),
}) age: generator.age(),
}) address: generator.address(),
})
it("does not allow accessing hidden fields", async () => {
await config.doInContext(config.appId, async () => {
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("surname")
expect(keys).toContain("address")
expect(keys).not.toContain("age")
} }
}) })
})
it("does not allow accessing hidden fields even if requested", async () => { afterAll(async () => {
await config.doInContext(config.appId, async () => { config.end()
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
fields: ["name", "age"],
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).not.toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
}) })
})
it.each([ it("querying by fields will always return data attribute columns", async () => {
[["id", "name", "age"], 3],
[["name", "age"], 10],
])(
"cannot query by non search fields (fields: %s)",
async (queryFields, expectedRows) => {
await config.doInContext(config.appId, async () => { await config.doInContext(config.appId, async () => {
const { rows } = await search({ const { rows } = await search({
tableId: table._id!, tableId: table._id!,
query: { query: {},
$or: { fields: ["name", "age"],
conditions: [
{
$and: {
conditions: [
{ range: { id: { low: 2, high: 4 } } },
{ range: { id: { low: 3, high: 5 } } },
],
},
},
{ equal: { id: 7 } },
],
},
},
fields: queryFields,
}) })
expect(rows).toHaveLength(expectedRows) expect(rows).toHaveLength(10)
for (const row of rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
}) })
} })
)
} !isInternal &&
) it("will decode _id in oneOf query", async () => {
await config.doInContext(config.appId, async () => {
const result = await search({
tableId: table._id!,
query: {
oneOf: {
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
},
},
})
expect(result.rows).toHaveLength(3)
expect(result.rows.map(row => row.id)).toEqual(
expect.arrayContaining([1, 4, 8])
)
})
})
it("does not allow accessing hidden fields", async () => {
await config.doInContext(config.appId, async () => {
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("surname")
expect(keys).toContain("address")
expect(keys).not.toContain("age")
}
})
})
it("does not allow accessing hidden fields even if requested", async () => {
await config.doInContext(config.appId, async () => {
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
fields: ["name", "age"],
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).not.toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
})
})
it.each([
[["id", "name", "age"], 3],
[["name", "age"], 10],
])(
"cannot query by non search fields (fields: %s)",
async (queryFields, expectedRows) => {
await config.doInContext(config.appId, async () => {
const { rows } = await search({
tableId: table._id!,
query: {
$or: {
conditions: [
{
$and: {
conditions: [
{ range: { id: { low: 2, high: 4 } } },
{ range: { id: { low: 3, high: 5 } } },
],
},
},
{ equal: { id: 7 } },
],
},
},
fields: queryFields,
})
expect(rows).toHaveLength(expectedRows)
})
}
)
}
)
}

View File

@ -1,34 +0,0 @@
/***
* Running lerna with since and scope is not working as expected.
* For example, running the command `yarn test --scope=@budibase/worker --since=master`, with changes only on `@budibase/backend-core` will not work as expected, as it does not analyse the dependencies properly. The actual `@budibase/worker` task will not be triggered.
*
* This script is using `lerna ls` to detect all the affected projects from a given commit, and if the scoped package is affected, the actual command will be executed.
*
* The current version of the script only supports a single project in the scope.
*/
const { execSync } = require("child_process")
const argv = require("yargs").demandOption(["task", "since", "scope"]).argv
const { task, since, scope } = argv
const affectedPackages = execSync(
`yarn --silent nx show projects --affected -t ${task} --base=${since} --json`,
{
encoding: "utf-8",
}
)
const packages = JSON.parse(affectedPackages)
const isAffected = packages.includes(scope)
if (isAffected) {
console.log(`${scope} is affected. Running task "${task}"`)
execSync(`yarn ${task} --scope=${scope}`, {
stdio: "inherit",
})
} else {
console.log(`${scope} is not affected. Skipping task "${task}"`)
}