Merge pull request #13979 from Budibase/budi-8220-support-updating-existing-rows-for-external-databases-using-2

[BUDI-8220] Support updating existing rows for external databases using CSV import
This commit is contained in:
Sam Rose 2024-06-24 17:32:14 +01:00 committed by GitHub
commit dc2a18d395
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 232 additions and 41 deletions

View File

@ -604,7 +604,8 @@ class InternalBuilder {
if (!primary) {
throw new Error("Primary key is required for upsert")
}
return query.insert(parsedBody).onConflict(primary).merge()
const ret = query.insert(parsedBody).onConflict(primary).merge()
return ret
} else if (this.client === SqlClient.MS_SQL) {
// No upsert or onConflict support in MSSQL yet, see:
// https://github.com/knex/knex/pull/6050

View File

@ -1,9 +1,14 @@
<script>
import { FieldType, BBReferenceFieldSubType } from "@budibase/types"
import {
FieldType,
BBReferenceFieldSubType,
SourceName,
} from "@budibase/types"
import { Select, Toggle, Multiselect } from "@budibase/bbui"
import { DB_TYPE_INTERNAL } from "constants/backend"
import { API } from "api"
import { parseFile } from "./utils"
import { tables, datasources } from "stores/builder"
let error = null
let fileName = null
@ -80,6 +85,9 @@
schema = fetchSchema(tableId)
}
$: table = $tables.list.find(table => table._id === tableId)
$: datasource = $datasources.list.find(ds => ds._id === table?.sourceId)
async function fetchSchema(tableId) {
try {
const definition = await API.fetchTableDefinition(tableId)
@ -185,20 +193,25 @@
</div>
{/each}
</div>
{#if tableType === DB_TYPE_INTERNAL}
<br />
<!-- SQL Server doesn't yet support overwriting rows by existing keys -->
{#if datasource?.source !== SourceName.SQL_SERVER}
<Toggle
bind:value={updateExistingRows}
on:change={() => (identifierFields = [])}
thin
text="Update existing rows"
/>
{/if}
{#if updateExistingRows}
{#if tableType === DB_TYPE_INTERNAL}
<Multiselect
label="Identifier field(s)"
options={Object.keys(validation)}
bind:value={identifierFields}
/>
{:else}
<p>Rows will be updated based on the table's primary key.</p>
{/if}
{/if}
{#if invalidColumns.length > 0}

View File

@ -1,4 +1,4 @@
FROM mcr.microsoft.com/mssql/server:2017-latest
FROM mcr.microsoft.com/mssql/server:2022-latest
ENV ACCEPT_EULA=Y
ENV SA_PASSWORD=Passw0rd

View File

@ -316,7 +316,13 @@ export class ExternalRequest<T extends Operation> {
manyRelationships: ManyRelationship[] = []
for (let [key, field] of Object.entries(table.schema)) {
// if set already, or not set just skip it
if (row[key] === undefined || newRow[key] || !isEditableColumn(field)) {
if (row[key] === undefined || newRow[key]) {
continue
}
if (
!(this.operation === Operation.BULK_UPSERT) &&
!isEditableColumn(field)
) {
continue
}
// parse floats/numbers
@ -690,6 +696,7 @@ export class ExternalRequest<T extends Operation> {
},
meta: {
table,
id: config.id,
},
}

View File

@ -16,6 +16,7 @@ import {
import sdk from "../../../sdk"
import { builderSocket } from "../../../websockets"
import { inputProcessing } from "../../../utilities/rowProcessor"
import { isEqual } from "lodash"
function getDatasourceId(table: Table) {
if (!table) {
@ -85,15 +86,30 @@ export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
let table = await sdk.tables.getTable(ctx.params.tableId)
const { rows } = ctx.request.body
const { rows, identifierFields } = ctx.request.body
const schema = table.schema
if (
identifierFields &&
identifierFields.length > 0 &&
!isEqual(identifierFields, table.primary)
) {
// This is becuse we make use of the ON CONFLICT functionality in SQL
// databases, which only triggers when there's a conflict against a unique
// index. The only unique index we can count on atm in Budibase is the
// primary key, so this functionality always uses the primary key.
ctx.throw(
400,
"Identifier fields are not supported for bulk import into an external datasource."
)
}
if (!rows || !isRows(rows) || !isSchema(schema)) {
ctx.throw(400, "Provided data import information is invalid.")
}
const parsedRows = []
for (const row of parse(rows, schema)) {
for (const row of parse(rows, table)) {
const processed = await inputProcessing(ctx.user?._id, table, row, {
noAutoRelationships: true,
})

View File

@ -178,7 +178,7 @@ export async function handleDataImport(
}
const db = context.getAppDB()
const data = parse(importRows, schema)
const data = parse(importRows, table)
let finalData: any = await importToRows(data, table, user)

View File

@ -1,4 +1,8 @@
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import {
DatabaseName,
getDatasource,
knexClient,
} from "../../../integrations/tests/utils"
import tk from "timekeeper"
import emitter from "../../../../src/events"
@ -31,6 +35,7 @@ import {
import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash"
import * as uuid from "uuid"
import { Knex } from "knex"
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
tk.freeze(timestamp)
@ -70,13 +75,16 @@ describe.each([
let table: Table
let datasource: Datasource | undefined
let client: Knex | undefined
beforeAll(async () => {
await config.init()
if (dsProvider) {
const rawDatasource = await dsProvider
datasource = await config.createDatasource({
datasource: await dsProvider,
datasource: rawDatasource,
})
client = await knexClient(rawDatasource)
}
})
@ -307,13 +315,13 @@ describe.each([
// as quickly as possible.
await Promise.all(
sequence.map(async () => {
const attempts = 20
const attempts = 30
for (let attempt = 0; attempt < attempts; attempt++) {
try {
await config.api.row.save(table._id!, {})
return
} catch (e) {
await new Promise(r => setTimeout(r, Math.random() * 15))
await new Promise(r => setTimeout(r, Math.random() * 50))
}
}
throw new Error(`Failed to create row after ${attempts} attempts`)
@ -598,6 +606,35 @@ describe.each([
expect(res.name).toEqual("Updated Name")
await assertRowUsage(rowUsage)
})
!isInternal &&
it("can update a row on an external table with a primary key", async () => {
const tableName = uuid.v4().substring(0, 10)
await client!.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
})
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const table = res.datasource.entities![tableName]
const row = await config.api.row.save(table._id!, {
id: 1,
name: "Row 1",
})
const updatedRow = await config.api.row.save(table._id!, {
_id: row._id!,
name: "Row 1 Updated",
})
expect(updatedRow.name).toEqual("Row 1 Updated")
const rows = await config.api.row.fetch(table._id!)
expect(rows).toHaveLength(1)
})
})
describe("patch", () => {
@ -667,6 +704,7 @@ describe.each([
expect(event.oldRow.description).toEqual(beforeRow.description)
expect(event.row.description).toEqual(beforeRow.description)
})
it("should throw an error when given improper types", async () => {
const existing = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage()
@ -758,7 +796,8 @@ describe.each([
})
!isInternal &&
// TODO: SQL is having issues creating composite keys
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
// to identity columns. This is not something Budibase does currently.
providerType !== DatabaseName.SQL_SERVER &&
it("should support updating fields that are part of a composite key", async () => {
const tableRequest = saveTableRequest({
@ -911,24 +950,12 @@ describe.each([
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
})
it("Should ignore malformed/invalid delete requests", async () => {
it.each([{ not: "valid" }, { rows: 123 }, "invalid"])(
"Should ignore malformed/invalid delete request: %s",
async (request: any) => {
const rowUsage = await getRowUsage()
await config.api.row.delete(table._id!, { not: "valid" } as any, {
status: 400,
body: {
message: "Invalid delete rows request",
},
})
await config.api.row.delete(table._id!, { rows: 123 } as any, {
status: 400,
body: {
message: "Invalid delete rows request",
},
})
await config.api.row.delete(table._id!, "invalid" as any, {
await config.api.row.delete(table._id!, request, {
status: 400,
body: {
message: "Invalid delete rows request",
@ -936,7 +963,8 @@ describe.each([
})
await assertRowUsage(rowUsage)
})
}
)
})
describe("bulkImport", () => {
@ -1085,6 +1113,121 @@ describe.each([
expect(rows[2].name).toEqual("Row 3")
expect(rows[2].description).toEqual("Row 3 description")
})
// Upserting isn't yet supported in MSSQL, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isInternal &&
it("should be able to update existing rows with composite primary keys with bulkImport", async () => {
const tableName = uuid.v4()
await client?.schema.createTable(tableName, table => {
table.integer("companyId")
table.integer("userId")
table.string("name")
table.string("description")
table.primary(["companyId", "userId"])
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const table = resp.datasource.entities![tableName]
const row1 = await config.api.row.save(table._id!, {
companyId: 1,
userId: 1,
name: "Row 1",
description: "Row 1 description",
})
const row2 = await config.api.row.save(table._id!, {
companyId: 1,
userId: 2,
name: "Row 2",
description: "Row 2 description",
})
await config.api.row.bulkImport(table._id!, {
identifierFields: ["companyId", "userId"],
rows: [
{
companyId: 1,
userId: row1.userId,
name: "Row 1 updated",
description: "Row 1 description updated",
},
{
companyId: 1,
userId: row2.userId,
name: "Row 2 updated",
description: "Row 2 description updated",
},
{
companyId: 1,
userId: 3,
name: "Row 3",
description: "Row 3 description",
},
],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1 updated")
expect(rows[0].description).toEqual("Row 1 description updated")
expect(rows[1].name).toEqual("Row 2 updated")
expect(rows[1].description).toEqual("Row 2 description updated")
expect(rows[2].name).toEqual("Row 3")
expect(rows[2].description).toEqual("Row 3 description")
})
// Upserting isn't yet supported in MSSQL, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isInternal &&
it("should be able to update existing rows an autoID primary key", async () => {
const tableName = uuid.v4()
await client!.schema.createTable(tableName, table => {
table.increments("userId").primary()
table.string("name")
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const table = resp.datasource.entities![tableName]
const row1 = await config.api.row.save(table._id!, {
name: "Clare",
})
const row2 = await config.api.row.save(table._id!, {
name: "Jeff",
})
await config.api.row.bulkImport(table._id!, {
identifierFields: ["userId"],
rows: [
{
userId: row1.userId,
name: "Clare updated",
},
{
userId: row2.userId,
name: "Jeff updated",
},
],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(2)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Clare updated")
expect(rows[1].name).toEqual("Jeff updated")
})
})
describe("enrich", () => {

View File

@ -4,6 +4,7 @@ import {
TableSchema,
FieldSchema,
Row,
Table,
} from "@budibase/types"
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
import { db } from "@budibase/backend-core"
@ -118,16 +119,26 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
return results
}
export function parse(rows: Rows, schema: TableSchema): Rows {
export function parse(rows: Rows, table: Table): Rows {
return rows.map(row => {
const parsedRow: Row = {}
Object.entries(row).forEach(([columnName, columnData]) => {
if (!(columnName in schema) || schema[columnName]?.autocolumn) {
const schema = table.schema
if (!(columnName in schema)) {
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
return
}
if (
schema[columnName].autocolumn &&
!table.primary?.includes(columnName)
) {
// Don't want the user specifying values for autocolumns unless they're updating
// a row through its primary key.
return
}
const columnSchema = schema[columnName]
const { type: columnType } = columnSchema
if (columnType === FieldType.NUMBER) {