From 602faf1c67477ddfb44a2aee28d982c0f9e1ad09 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 19 Jun 2024 17:52:48 +0100 Subject: [PATCH] Add test for composite primary keys for external datasource imports. --- packages/backend-core/src/sql/sql.ts | 3 +- .../ExistingTableDataImport.svelte | 20 ++--- .../api/controllers/row/ExternalRequest.ts | 1 + .../src/api/controllers/table/external.ts | 14 +++- .../server/src/api/routes/tests/row.spec.ts | 81 ++++++++++++++++++- 5 files changed, 106 insertions(+), 13 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 3224fc043e..7d52665f39 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -587,7 +587,8 @@ class InternalBuilder { if (!primary) { throw new Error("Primary key is required for upsert") } - return query.insert(parsedBody).onConflict(primary).merge() + const ret = query.insert(parsedBody).onConflict(primary).merge() + return ret } else if (this.client === SqlClient.MS_SQL) { // No upsert or onConflict support in MSSQL yet, see: // https://github.com/knex/knex/pull/6050 diff --git a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte index de56fa8ce5..cd6b5aeb2e 100644 --- a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte +++ b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte @@ -185,20 +185,22 @@ {/each} - {#if tableType === DB_TYPE_INTERNAL} -
- (identifierFields = [])} - thin - text="Update existing rows" - /> - {#if updateExistingRows} +
+ (identifierFields = [])} + thin + text="Update existing rows" + /> + {#if updateExistingRows} + {#if tableType === DB_TYPE_INTERNAL} + {:else} +

Rows will be updated based on the table's primary key.

{/if} {/if} {#if invalidColumns.length > 0} diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index b30c97e289..121a1b6b9b 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -653,6 +653,7 @@ export class ExternalRequest { }, meta: { table, + id: config.id, }, } diff --git a/packages/server/src/api/controllers/table/external.ts b/packages/server/src/api/controllers/table/external.ts index f1b186c233..db4343b405 100644 --- a/packages/server/src/api/controllers/table/external.ts +++ b/packages/server/src/api/controllers/table/external.ts @@ -16,6 +16,7 @@ import { import sdk from "../../../sdk" import { builderSocket } from "../../../websockets" import { inputProcessing } from "../../../utilities/rowProcessor" +import _ from "lodash" function getDatasourceId(table: Table) { if (!table) { @@ -82,9 +83,20 @@ export async function bulkImport( ctx: UserCtx ) { let table = await sdk.tables.getTable(ctx.params.tableId) - const { rows } = ctx.request.body + const { rows, identifierFields } = ctx.request.body const schema = table.schema + if (identifierFields && !_.isEqual(identifierFields, table.primary)) { + // This is becuse we make use of the ON CONFLICT functionality in SQL + // databases, which only triggers when there's a conflict against a unique + // index. The only unique index we can count on atm in Budibase is the + // primary key, so this functionality always uses the primary key. + ctx.throw( + 400, + "Identifier fields are not supported for bulk import into an external datasource." + ) + } + if (!rows || !isRows(rows) || !isSchema(schema)) { ctx.throw(400, "Provided data import information is invalid.") } diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 4d1b4f028b..96eec95921 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1,4 +1,8 @@ -import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" +import { + DatabaseName, + getDatasource, + knexClient, +} from "../../../integrations/tests/utils" import tk from "timekeeper" import emitter from "../../../../src/events" @@ -31,6 +35,7 @@ import { import { generator, mocks } from "@budibase/backend-core/tests" import _, { merge } from "lodash" import * as uuid from "uuid" +import { Knex } from "knex" const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString() tk.freeze(timestamp) @@ -70,13 +75,16 @@ describe.each([ let table: Table let datasource: Datasource | undefined + let client: Knex | undefined beforeAll(async () => { await config.init() if (dsProvider) { + const rawDatasource = await dsProvider datasource = await config.createDatasource({ - datasource: await dsProvider, + datasource: rawDatasource, }) + client = await knexClient(rawDatasource) } }) @@ -1077,6 +1085,75 @@ describe.each([ const rows = await config.api.row.fetch(table._id!) expect(rows.length).toEqual(3) + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") + }) + + // Upserting isn't yet supported in MSSQL, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isInternal && + it("should be able to update existing rows with composite primary keys with bulkImport", async () => { + const tableName = uuid.v4() + await client?.schema.createTable(tableName, table => { + table.integer("companyId") + table.integer("userId") + table.string("name") + table.string("description") + table.primary(["companyId", "userId"]) + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) + + const row2 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["companyId", "userId"], + rows: [ + { + companyId: 1, + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + companyId: 1, + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + companyId: 1, + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + rows.sort((a, b) => a.name.localeCompare(b.name)) expect(rows[0].name).toEqual("Row 1 updated") expect(rows[0].description).toEqual("Row 1 description updated")