From 602faf1c67477ddfb44a2aee28d982c0f9e1ad09 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 19 Jun 2024 17:52:48 +0100 Subject: [PATCH 1/7] Add test for composite primary keys for external datasource imports. --- packages/backend-core/src/sql/sql.ts | 3 +- .../ExistingTableDataImport.svelte | 20 ++--- .../api/controllers/row/ExternalRequest.ts | 1 + .../src/api/controllers/table/external.ts | 14 +++- .../server/src/api/routes/tests/row.spec.ts | 81 ++++++++++++++++++- 5 files changed, 106 insertions(+), 13 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 3224fc043e..7d52665f39 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -587,7 +587,8 @@ class InternalBuilder { if (!primary) { throw new Error("Primary key is required for upsert") } - return query.insert(parsedBody).onConflict(primary).merge() + const ret = query.insert(parsedBody).onConflict(primary).merge() + return ret } else if (this.client === SqlClient.MS_SQL) { // No upsert or onConflict support in MSSQL yet, see: // https://github.com/knex/knex/pull/6050 diff --git a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte index de56fa8ce5..cd6b5aeb2e 100644 --- a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte +++ b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte @@ -185,20 +185,22 @@ {/each} - {#if tableType === DB_TYPE_INTERNAL} -
- (identifierFields = [])} - thin - text="Update existing rows" - /> - {#if updateExistingRows} +
+ (identifierFields = [])} + thin + text="Update existing rows" + /> + {#if updateExistingRows} + {#if tableType === DB_TYPE_INTERNAL} + {:else} +

Rows will be updated based on the table's primary key.

{/if} {/if} {#if invalidColumns.length > 0} diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index b30c97e289..121a1b6b9b 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -653,6 +653,7 @@ export class ExternalRequest { }, meta: { table, + id: config.id, }, } diff --git a/packages/server/src/api/controllers/table/external.ts b/packages/server/src/api/controllers/table/external.ts index f1b186c233..db4343b405 100644 --- a/packages/server/src/api/controllers/table/external.ts +++ b/packages/server/src/api/controllers/table/external.ts @@ -16,6 +16,7 @@ import { import sdk from "../../../sdk" import { builderSocket } from "../../../websockets" import { inputProcessing } from "../../../utilities/rowProcessor" +import _ from "lodash" function getDatasourceId(table: Table) { if (!table) { @@ -82,9 +83,20 @@ export async function bulkImport( ctx: UserCtx ) { let table = await sdk.tables.getTable(ctx.params.tableId) - const { rows } = ctx.request.body + const { rows, identifierFields } = ctx.request.body const schema = table.schema + if (identifierFields && !_.isEqual(identifierFields, table.primary)) { + // This is becuse we make use of the ON CONFLICT functionality in SQL + // databases, which only triggers when there's a conflict against a unique + // index. The only unique index we can count on atm in Budibase is the + // primary key, so this functionality always uses the primary key. + ctx.throw( + 400, + "Identifier fields are not supported for bulk import into an external datasource." + ) + } + if (!rows || !isRows(rows) || !isSchema(schema)) { ctx.throw(400, "Provided data import information is invalid.") } diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 4d1b4f028b..96eec95921 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1,4 +1,8 @@ -import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" +import { + DatabaseName, + getDatasource, + knexClient, +} from "../../../integrations/tests/utils" import tk from "timekeeper" import emitter from "../../../../src/events" @@ -31,6 +35,7 @@ import { import { generator, mocks } from "@budibase/backend-core/tests" import _, { merge } from "lodash" import * as uuid from "uuid" +import { Knex } from "knex" const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString() tk.freeze(timestamp) @@ -70,13 +75,16 @@ describe.each([ let table: Table let datasource: Datasource | undefined + let client: Knex | undefined beforeAll(async () => { await config.init() if (dsProvider) { + const rawDatasource = await dsProvider datasource = await config.createDatasource({ - datasource: await dsProvider, + datasource: rawDatasource, }) + client = await knexClient(rawDatasource) } }) @@ -1077,6 +1085,75 @@ describe.each([ const rows = await config.api.row.fetch(table._id!) expect(rows.length).toEqual(3) + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") + }) + + // Upserting isn't yet supported in MSSQL, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isInternal && + it("should be able to update existing rows with composite primary keys with bulkImport", async () => { + const tableName = uuid.v4() + await client?.schema.createTable(tableName, table => { + table.integer("companyId") + table.integer("userId") + table.string("name") + table.string("description") + table.primary(["companyId", "userId"]) + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) + + const row2 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["companyId", "userId"], + rows: [ + { + companyId: 1, + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + companyId: 1, + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + companyId: 1, + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + rows.sort((a, b) => a.name.localeCompare(b.name)) expect(rows[0].name).toEqual("Row 1 updated") expect(rows[0].description).toEqual("Row 1 description updated") From c9fecbaa776e69d2f25da4d21897fc12153e7f07 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Fri, 21 Jun 2024 15:12:02 +0100 Subject: [PATCH 2/7] Fix updating rows in external tables where the primary key is an autocolumn. --- .../api/controllers/row/ExternalRequest.ts | 8 +- .../src/api/controllers/table/external.ts | 8 +- .../server/src/api/controllers/table/utils.ts | 2 +- .../server/src/api/routes/tests/row.spec.ts | 87 +++++++++++++------ .../src/utilities/rowProcessor/index.ts | 1 + packages/server/src/utilities/schema.ts | 15 +++- 6 files changed, 89 insertions(+), 32 deletions(-) diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 0ad55a78c3..c2f51decc2 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -289,7 +289,13 @@ export class ExternalRequest { manyRelationships: ManyRelationship[] = [] for (let [key, field] of Object.entries(table.schema)) { // if set already, or not set just skip it - if (row[key] === undefined || newRow[key] || !isEditableColumn(field)) { + if (row[key] === undefined || newRow[key]) { + continue + } + if ( + !(this.operation === Operation.BULK_UPSERT) && + !isEditableColumn(field) + ) { continue } // parse floats/numbers diff --git a/packages/server/src/api/controllers/table/external.ts b/packages/server/src/api/controllers/table/external.ts index db4343b405..15768d73bb 100644 --- a/packages/server/src/api/controllers/table/external.ts +++ b/packages/server/src/api/controllers/table/external.ts @@ -86,7 +86,11 @@ export async function bulkImport( const { rows, identifierFields } = ctx.request.body const schema = table.schema - if (identifierFields && !_.isEqual(identifierFields, table.primary)) { + if ( + identifierFields && + identifierFields.length > 0 && + !_.isEqual(identifierFields, table.primary) + ) { // This is becuse we make use of the ON CONFLICT functionality in SQL // databases, which only triggers when there's a conflict against a unique // index. The only unique index we can count on atm in Budibase is the @@ -102,7 +106,7 @@ export async function bulkImport( } const parsedRows = [] - for (const row of parse(rows, schema)) { + for (const row of parse(rows, table)) { const processed = await inputProcessing(ctx.user?._id, table, row, { noAutoRelationships: true, }) diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts index a42cfc43c3..0e9a32b294 100644 --- a/packages/server/src/api/controllers/table/utils.ts +++ b/packages/server/src/api/controllers/table/utils.ts @@ -178,7 +178,7 @@ export async function handleDataImport( } const db = context.getAppDB() - const data = parse(importRows, schema) + const data = parse(importRows, table) let finalData: any = await importToRows(data, table, user) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 96eec95921..c5f9262861 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -315,13 +315,13 @@ describe.each([ // as quickly as possible. await Promise.all( sequence.map(async () => { - const attempts = 20 + const attempts = 30 for (let attempt = 0; attempt < attempts; attempt++) { try { await config.api.row.save(table._id!, {}) return } catch (e) { - await new Promise(r => setTimeout(r, Math.random() * 15)) + await new Promise(r => setTimeout(r, Math.random() * 50)) } } throw new Error(`Failed to create row after ${attempts} attempts`) @@ -919,32 +919,21 @@ describe.each([ await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) }) - it("Should ignore malformed/invalid delete requests", async () => { - const rowUsage = await getRowUsage() + it.each([{ not: "valid" }, { rows: 123 }, "invalid"])( + "Should ignore malformed/invalid delete request: %s", + async (request: any) => { + const rowUsage = await getRowUsage() - await config.api.row.delete(table._id!, { not: "valid" } as any, { - status: 400, - body: { - message: "Invalid delete rows request", - }, - }) + await config.api.row.delete(table._id!, request, { + status: 400, + body: { + message: "Invalid delete rows request", + }, + }) - await config.api.row.delete(table._id!, { rows: 123 } as any, { - status: 400, - body: { - message: "Invalid delete rows request", - }, - }) - - await config.api.row.delete(table._id!, "invalid" as any, { - status: 400, - body: { - message: "Invalid delete rows request", - }, - }) - - await assertRowUsage(rowUsage) - }) + await assertRowUsage(rowUsage) + } + ) }) describe("bulkImport", () => { @@ -1162,6 +1151,52 @@ describe.each([ expect(rows[2].name).toEqual("Row 3") expect(rows[2].description).toEqual("Row 3 description") }) + + // Upserting isn't yet supported in MSSQL, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isInternal && + it("should be able to update existing rows an autoID primary key", async () => { + const tableName = uuid.v4() + await client!.schema.createTable(tableName, table => { + table.increments("userId").primary() + table.string("name") + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + name: "Clare", + }) + + const row2 = await config.api.row.save(table._id!, { + name: "Jeff", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["userId"], + rows: [ + { + userId: row1.userId, + name: "Clare updated", + }, + { + userId: row2.userId, + name: "Jeff updated", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(2) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Clare updated") + expect(rows[1].name).toEqual("Jeff updated") + }) }) describe("enrich", () => { diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts index 73176af6d8..97b5332dd4 100644 --- a/packages/server/src/utilities/rowProcessor/index.ts +++ b/packages/server/src/utilities/rowProcessor/index.ts @@ -9,6 +9,7 @@ import { Row, RowAttachment, Table, + TableSourceType, } from "@budibase/types" import { cloneDeep } from "lodash/fp" import { diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index 8e6cd34c7c..e473675633 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -4,6 +4,7 @@ import { TableSchema, FieldSchema, Row, + Table, } from "@budibase/types" import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core" import { db } from "@budibase/backend-core" @@ -118,16 +119,26 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults { return results } -export function parse(rows: Rows, schema: TableSchema): Rows { +export function parse(rows: Rows, table: Table): Rows { return rows.map(row => { const parsedRow: Row = {} Object.entries(row).forEach(([columnName, columnData]) => { - if (!(columnName in schema) || schema[columnName]?.autocolumn) { + const schema = table.schema + if (!(columnName in schema)) { // Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case return } + if ( + schema[columnName].autocolumn && + !table.primary?.includes(columnName) + ) { + // Don't want the user specifying values for autocolumns unless they're updating + // a row through its primary key. + return + } + const columnSchema = schema[columnName] const { type: columnType } = columnSchema if (columnType === FieldType.NUMBER) { From d5481312d0e52ec73e569c8a44b724b499eb9c3a Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Fri, 21 Jun 2024 15:31:34 +0100 Subject: [PATCH 3/7] Fix lint, add new update test. --- .../server/src/api/routes/tests/row.spec.ts | 33 ++++++++++++++++++- .../src/utilities/rowProcessor/index.ts | 1 - 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index c5f9262861..b6e3edf5ff 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -606,6 +606,35 @@ describe.each([ expect(res.name).toEqual("Updated Name") await assertRowUsage(rowUsage) }) + + !isInternal && + it("can update a row on an external table with a primary key", async () => { + const tableName = uuid.v4().substring(0, 10) + await client!.schema.createTable(tableName, table => { + table.increments("id").primary() + table.string("name") + }) + + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = res.datasource.entities![tableName] + + const row = await config.api.row.save(table._id!, { + id: 1, + name: "Row 1", + }) + + const updatedRow = await config.api.row.save(table._id!, { + _id: row._id!, + name: "Row 1 Updated", + }) + + expect(updatedRow.name).toEqual("Row 1 Updated") + + const rows = await config.api.row.fetch(table._id!) + expect(rows).toHaveLength(1) + }) }) describe("patch", () => { @@ -675,6 +704,7 @@ describe.each([ expect(event.oldRow.description).toEqual(beforeRow.description) expect(event.row.description).toEqual(beforeRow.description) }) + it("should throw an error when given improper types", async () => { const existing = await config.api.row.save(table._id!, {}) const rowUsage = await getRowUsage() @@ -766,7 +796,8 @@ describe.each([ }) !isInternal && - // TODO: SQL is having issues creating composite keys + // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing + // to identity columns. This is not something Budibase does currently. providerType !== DatabaseName.SQL_SERVER && it("should support updating fields that are part of a composite key", async () => { const tableRequest = saveTableRequest({ diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts index 97b5332dd4..73176af6d8 100644 --- a/packages/server/src/utilities/rowProcessor/index.ts +++ b/packages/server/src/utilities/rowProcessor/index.ts @@ -9,7 +9,6 @@ import { Row, RowAttachment, Table, - TableSourceType, } from "@budibase/types" import { cloneDeep } from "lodash/fp" import { From 2458259093b37fc2c4bd4809c57694d5a58d99a0 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Mon, 24 Jun 2024 15:50:09 +0100 Subject: [PATCH 4/7] Respond to PR feedback. --- .../ExistingTableDataImport.svelte | 25 +++++++++++++------ .../integrations/mssql/data/Dockerfile | 2 +- .../src/api/controllers/table/external.ts | 4 +-- 3 files changed, 21 insertions(+), 10 deletions(-) diff --git a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte index cd6b5aeb2e..80655d1099 100644 --- a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte +++ b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte @@ -1,9 +1,14 @@