Add test for composite primary keys for external datasource imports.

This commit is contained in:
Sam Rose 2024-06-19 17:52:48 +01:00
parent e288fc8795
commit 602faf1c67
No known key found for this signature in database
5 changed files with 106 additions and 13 deletions

View File

@ -587,7 +587,8 @@ class InternalBuilder {
if (!primary) { if (!primary) {
throw new Error("Primary key is required for upsert") throw new Error("Primary key is required for upsert")
} }
return query.insert(parsedBody).onConflict(primary).merge() const ret = query.insert(parsedBody).onConflict(primary).merge()
return ret
} else if (this.client === SqlClient.MS_SQL) { } else if (this.client === SqlClient.MS_SQL) {
// No upsert or onConflict support in MSSQL yet, see: // No upsert or onConflict support in MSSQL yet, see:
// https://github.com/knex/knex/pull/6050 // https://github.com/knex/knex/pull/6050

View File

@ -185,20 +185,22 @@
</div> </div>
{/each} {/each}
</div> </div>
{#if tableType === DB_TYPE_INTERNAL} <br />
<br /> <Toggle
<Toggle bind:value={updateExistingRows}
bind:value={updateExistingRows} on:change={() => (identifierFields = [])}
on:change={() => (identifierFields = [])} thin
thin text="Update existing rows"
text="Update existing rows" />
/> {#if updateExistingRows}
{#if updateExistingRows} {#if tableType === DB_TYPE_INTERNAL}
<Multiselect <Multiselect
label="Identifier field(s)" label="Identifier field(s)"
options={Object.keys(validation)} options={Object.keys(validation)}
bind:value={identifierFields} bind:value={identifierFields}
/> />
{:else}
<p>Rows will be updated based on the table's primary key.</p>
{/if} {/if}
{/if} {/if}
{#if invalidColumns.length > 0} {#if invalidColumns.length > 0}

View File

@ -653,6 +653,7 @@ export class ExternalRequest<T extends Operation> {
}, },
meta: { meta: {
table, table,
id: config.id,
}, },
} }

View File

@ -16,6 +16,7 @@ import {
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
import { inputProcessing } from "../../../utilities/rowProcessor" import { inputProcessing } from "../../../utilities/rowProcessor"
import _ from "lodash"
function getDatasourceId(table: Table) { function getDatasourceId(table: Table) {
if (!table) { if (!table) {
@ -82,9 +83,20 @@ export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse> ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) { ) {
let table = await sdk.tables.getTable(ctx.params.tableId) let table = await sdk.tables.getTable(ctx.params.tableId)
const { rows } = ctx.request.body const { rows, identifierFields } = ctx.request.body
const schema = table.schema const schema = table.schema
if (identifierFields && !_.isEqual(identifierFields, table.primary)) {
// This is becuse we make use of the ON CONFLICT functionality in SQL
// databases, which only triggers when there's a conflict against a unique
// index. The only unique index we can count on atm in Budibase is the
// primary key, so this functionality always uses the primary key.
ctx.throw(
400,
"Identifier fields are not supported for bulk import into an external datasource."
)
}
if (!rows || !isRows(rows) || !isSchema(schema)) { if (!rows || !isRows(rows) || !isSchema(schema)) {
ctx.throw(400, "Provided data import information is invalid.") ctx.throw(400, "Provided data import information is invalid.")
} }

View File

@ -1,4 +1,8 @@
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import {
DatabaseName,
getDatasource,
knexClient,
} from "../../../integrations/tests/utils"
import tk from "timekeeper" import tk from "timekeeper"
import emitter from "../../../../src/events" import emitter from "../../../../src/events"
@ -31,6 +35,7 @@ import {
import { generator, mocks } from "@budibase/backend-core/tests" import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash" import _, { merge } from "lodash"
import * as uuid from "uuid" import * as uuid from "uuid"
import { Knex } from "knex"
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString() const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
tk.freeze(timestamp) tk.freeze(timestamp)
@ -70,13 +75,16 @@ describe.each([
let table: Table let table: Table
let datasource: Datasource | undefined let datasource: Datasource | undefined
let client: Knex | undefined
beforeAll(async () => { beforeAll(async () => {
await config.init() await config.init()
if (dsProvider) { if (dsProvider) {
const rawDatasource = await dsProvider
datasource = await config.createDatasource({ datasource = await config.createDatasource({
datasource: await dsProvider, datasource: rawDatasource,
}) })
client = await knexClient(rawDatasource)
} }
}) })
@ -1077,6 +1085,75 @@ describe.each([
const rows = await config.api.row.fetch(table._id!) const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3) expect(rows.length).toEqual(3)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1 updated")
expect(rows[0].description).toEqual("Row 1 description updated")
expect(rows[1].name).toEqual("Row 2 updated")
expect(rows[1].description).toEqual("Row 2 description updated")
expect(rows[2].name).toEqual("Row 3")
expect(rows[2].description).toEqual("Row 3 description")
})
// Upserting isn't yet supported in MSSQL, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isInternal &&
it("should be able to update existing rows with composite primary keys with bulkImport", async () => {
const tableName = uuid.v4()
await client?.schema.createTable(tableName, table => {
table.integer("companyId")
table.integer("userId")
table.string("name")
table.string("description")
table.primary(["companyId", "userId"])
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const table = resp.datasource.entities![tableName]
const row1 = await config.api.row.save(table._id!, {
companyId: 1,
userId: 1,
name: "Row 1",
description: "Row 1 description",
})
const row2 = await config.api.row.save(table._id!, {
companyId: 1,
userId: 2,
name: "Row 2",
description: "Row 2 description",
})
await config.api.row.bulkImport(table._id!, {
identifierFields: ["companyId", "userId"],
rows: [
{
companyId: 1,
userId: row1.userId,
name: "Row 1 updated",
description: "Row 1 description updated",
},
{
companyId: 1,
userId: row2.userId,
name: "Row 2 updated",
description: "Row 2 description updated",
},
{
companyId: 1,
userId: 3,
name: "Row 3",
description: "Row 3 description",
},
],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
rows.sort((a, b) => a.name.localeCompare(b.name)) rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1 updated") expect(rows[0].name).toEqual("Row 1 updated")
expect(rows[0].description).toEqual("Row 1 description updated") expect(rows[0].description).toEqual("Row 1 description updated")