Make sure table gets saved after bulkImport if it has changed. This fixes auto ID columns having the wrong lastID.

This commit is contained in:
Sam Rose 2023-10-11 12:29:43 +01:00
parent 4bda97d70f
commit 7f2ab8b1ae
2 changed files with 13 additions and 3 deletions

View File

@ -376,6 +376,7 @@ export async function destroy(ctx: UserCtx) {
export async function bulkImport(ctx: UserCtx) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows }: { rows: unknown } = ctx.request.body
const schema: unknown = table.schema

View File

@ -6,7 +6,7 @@ import {
validate as validateSchema,
} from "../../../utilities/schema"
import { isExternalTable, isSQL } from "../../../integrations/utils"
import { events } from "@budibase/backend-core"
import { context, events } from "@budibase/backend-core"
import {
FetchTablesResponse,
SaveTableRequest,
@ -18,7 +18,7 @@ import {
import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets"
import { cloneDeep } from "lodash"
import { cloneDeep, isEqual } from "lodash"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) {
@ -99,7 +99,16 @@ export async function destroy(ctx: UserCtx) {
export async function bulkImport(ctx: UserCtx) {
const tableId = ctx.params.tableId
await pickApi({ tableId }).bulkImport(ctx)
let db = context.getAppDB()
let tableBefore = await sdk.tables.getTable(tableId)
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
if (!isEqual(tableBefore, tableAfter)) {
await db.put(tableAfter)
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:save`, ctx.appId, tableAfter)
}
// right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to
// think about events for bulk items