Handle inputProcessing on bulk row import
This commit is contained in:
parent
74ac68d72d
commit
25d86d179d
|
@ -265,7 +265,10 @@ export class ExternalRequest<T extends Operation> {
|
|||
}
|
||||
}
|
||||
|
||||
inputProcessing(row: Row | undefined, table: Table) {
|
||||
inputProcessing<T extends Row | undefined>(
|
||||
row: T,
|
||||
table: Table
|
||||
): { row: T; manyRelationships: ManyRelationship[] } {
|
||||
if (!row) {
|
||||
return { row, manyRelationships: [] }
|
||||
}
|
||||
|
@ -346,7 +349,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
// we return the relationships that may need to be created in the through table
|
||||
// we do this so that if the ID is generated by the DB it can be inserted
|
||||
// after the fact
|
||||
return { row: newRow, manyRelationships }
|
||||
return { row: newRow as T, manyRelationships }
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -598,6 +601,18 @@ export class ExternalRequest<T extends Operation> {
|
|||
// clean up row on ingress using schema
|
||||
const processed = this.inputProcessing(row, table)
|
||||
row = processed.row
|
||||
let manyRelationships = processed.manyRelationships
|
||||
|
||||
if (!row && rows) {
|
||||
manyRelationships = []
|
||||
for (let i = 0; i < rows.length; i++) {
|
||||
const processed = this.inputProcessing(rows[i], table)
|
||||
rows[i] = processed.row
|
||||
if (processed.manyRelationships.length) {
|
||||
manyRelationships.push(...processed.manyRelationships)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (
|
||||
operation === Operation.DELETE &&
|
||||
(filters == null || Object.keys(filters).length === 0)
|
||||
|
|
|
@ -15,6 +15,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import { builderSocket } from "../../../websockets"
|
||||
import { inputProcessing } from "../../../utilities/rowProcessor"
|
||||
|
||||
function getDatasourceId(table: Table) {
|
||||
if (!table) {
|
||||
|
@ -80,7 +81,7 @@ export async function destroy(ctx: UserCtx) {
|
|||
export async function bulkImport(
|
||||
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
|
||||
) {
|
||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
let table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { rows } = ctx.request.body
|
||||
const schema = table.schema
|
||||
|
||||
|
@ -88,7 +89,15 @@ export async function bulkImport(
|
|||
ctx.throw(400, "Provided data import information is invalid.")
|
||||
}
|
||||
|
||||
const parsedRows = parse(rows, schema)
|
||||
const parsedRows = []
|
||||
for (const row of parse(rows, schema)) {
|
||||
const processed = await inputProcessing(ctx.user?._id, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
parsedRows.push(processed.row)
|
||||
table = processed.table
|
||||
}
|
||||
|
||||
await handleRequest(Operation.BULK_CREATE, table._id!, {
|
||||
rows: parsedRows,
|
||||
})
|
||||
|
|
Loading…
Reference in New Issue