Update table data via CSV import (#10313)
* Add identifierFields select for import * Update rows on import (Internal DB) * Only allow internal DB to upsert import CSV * Clear identifierFields when turning off update * Passing table instead of tableId * Pass table * Pass tableType
This commit is contained in:
parent
82ac46e5eb
commit
b5c98871ad
|
@ -32,6 +32,7 @@
|
|||
<Grid
|
||||
{API}
|
||||
tableId={id}
|
||||
tableType={$tables.selected?.type}
|
||||
allowAddRows={!isUsersTable}
|
||||
allowDeleteRows={!isUsersTable}
|
||||
schemaOverrides={isUsersTable ? userSchemaOverrides : null}
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
import ImportModal from "../modals/ImportModal.svelte"
|
||||
|
||||
export let tableId
|
||||
export let tableType
|
||||
export let disabled
|
||||
|
||||
let modal
|
||||
|
@ -12,5 +13,5 @@
|
|||
Import
|
||||
</ActionButton>
|
||||
<Modal bind:this={modal}>
|
||||
<ImportModal {tableId} on:importrows />
|
||||
<ImportModal {tableId} {tableType} on:importrows />
|
||||
</Modal>
|
||||
|
|
|
@ -4,11 +4,12 @@
|
|||
|
||||
export let disabled = false
|
||||
|
||||
const { rows, tableId } = getContext("grid")
|
||||
const { rows, tableId, tableType } = getContext("grid")
|
||||
</script>
|
||||
|
||||
<ImportButton
|
||||
{disabled}
|
||||
tableId={$tableId}
|
||||
{tableType}
|
||||
on:importrows={rows.actions.refreshData}
|
||||
/>
|
||||
|
|
|
@ -13,15 +13,18 @@
|
|||
const dispatch = createEventDispatcher()
|
||||
|
||||
export let tableId
|
||||
export let tableType
|
||||
let rows = []
|
||||
let allValid = false
|
||||
let displayColumn = null
|
||||
let identifierFields = []
|
||||
|
||||
async function importData() {
|
||||
try {
|
||||
await API.importTableData({
|
||||
tableId,
|
||||
rows,
|
||||
identifierFields,
|
||||
})
|
||||
notifications.success("Rows successfully imported")
|
||||
} catch (error) {
|
||||
|
@ -45,6 +48,13 @@
|
|||
</Body>
|
||||
<Layout gap="XS" noPadding>
|
||||
<Label grey extraSmall>CSV or JSON file to import</Label>
|
||||
<TableDataImport {tableId} bind:rows bind:allValid bind:displayColumn />
|
||||
<TableDataImport
|
||||
{tableId}
|
||||
{tableType}
|
||||
bind:rows
|
||||
bind:allValid
|
||||
bind:displayColumn
|
||||
bind:identifierFields
|
||||
/>
|
||||
</Layout>
|
||||
</ModalContent>
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import { Select } from "@budibase/bbui"
|
||||
import { Select, Toggle, Multiselect } from "@budibase/bbui"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import { API } from "api"
|
||||
import { parseFile } from "./utils"
|
||||
|
@ -9,14 +9,17 @@
|
|||
let fileType = null
|
||||
|
||||
let loading = false
|
||||
let updateExistingRows = false
|
||||
let validation = {}
|
||||
let validateHash = ""
|
||||
let schema = null
|
||||
let invalidColumns = []
|
||||
|
||||
export let tableId = null
|
||||
export let tableType
|
||||
export let rows = []
|
||||
export let allValid = false
|
||||
export let identifierFields = []
|
||||
|
||||
const typeOptions = [
|
||||
{
|
||||
|
@ -159,6 +162,22 @@
|
|||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{#if tableType === "internal"}
|
||||
<br />
|
||||
<Toggle
|
||||
bind:value={updateExistingRows}
|
||||
on:change={() => (identifierFields = [])}
|
||||
thin
|
||||
text="Update existing rows"
|
||||
/>
|
||||
{#if updateExistingRows}
|
||||
<Multiselect
|
||||
label="Identifier field(s)"
|
||||
options={Object.keys(validation)}
|
||||
bind:value={identifierFields}
|
||||
/>
|
||||
{/if}
|
||||
{/if}
|
||||
{#if invalidColumns.length > 0}
|
||||
<p class="spectrum-FieldLabel spectrum-FieldLabel--sizeM">
|
||||
The following columns are present in the data you wish to import, but do
|
||||
|
|
|
@ -62,13 +62,15 @@ export const buildTableEndpoints = API => ({
|
|||
/**
|
||||
* Imports data into an existing table
|
||||
* @param tableId the table ID to import to
|
||||
* @param data the data import object
|
||||
* @param rows the data import object
|
||||
* @param identifierFields column names to be used as keys for overwriting existing rows
|
||||
*/
|
||||
importTableData: async ({ tableId, rows }) => {
|
||||
importTableData: async ({ tableId, rows, identifierFields }) => {
|
||||
return await API.post({
|
||||
url: `/api/tables/${tableId}/import`,
|
||||
body: {
|
||||
rows,
|
||||
identifierFields,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
|
|
@ -33,6 +33,7 @@
|
|||
|
||||
export let API = null
|
||||
export let tableId = null
|
||||
export let tableType = null
|
||||
export let schemaOverrides = null
|
||||
export let allowAddRows = true
|
||||
export let allowAddColumns = true
|
||||
|
@ -62,6 +63,7 @@
|
|||
rand,
|
||||
config,
|
||||
tableId: tableIdStore,
|
||||
tableType,
|
||||
schemaOverrides: schemaOverridesStore,
|
||||
}
|
||||
context = { ...context, ...createEventManagers() }
|
||||
|
|
|
@ -186,11 +186,7 @@ export async function destroy(ctx: any) {
|
|||
export async function bulkImport(ctx: any) {
|
||||
const db = context.getAppDB()
|
||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { rows } = ctx.request.body
|
||||
await handleDataImport(ctx.user, table, rows)
|
||||
|
||||
// Ensure auto id and other table updates are persisted
|
||||
await db.put(table)
|
||||
|
||||
const { rows, identifierFields } = ctx.request.body
|
||||
await handleDataImport(ctx.user, table, rows, identifierFields)
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -149,7 +149,12 @@ export function importToRows(
|
|||
return finalData
|
||||
}
|
||||
|
||||
export async function handleDataImport(user: any, table: any, rows: any) {
|
||||
export async function handleDataImport(
|
||||
user: any,
|
||||
table: any,
|
||||
rows: any,
|
||||
identifierFields: Array<string> = []
|
||||
) {
|
||||
const schema: unknown = table.schema
|
||||
|
||||
if (!rows || !isRows(rows) || !isSchema(schema)) {
|
||||
|
@ -161,6 +166,32 @@ export async function handleDataImport(user: any, table: any, rows: any) {
|
|||
|
||||
let finalData: any = importToRows(data, table, user)
|
||||
|
||||
//Set IDs of finalData to match existing row if an update is expected
|
||||
if (identifierFields.length > 0) {
|
||||
const allDocs = await db.allDocs(
|
||||
getRowParams(table._id, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
allDocs.rows
|
||||
.map(existingRow => existingRow.doc)
|
||||
.forEach((doc: any) => {
|
||||
finalData.forEach((finalItem: any) => {
|
||||
let match = true
|
||||
for (const field of identifierFields) {
|
||||
if (finalItem[field] !== doc[field]) {
|
||||
match = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if (match) {
|
||||
finalItem._id = doc._id
|
||||
finalItem._rev = doc._rev
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), {
|
||||
tableId: table._id,
|
||||
})
|
||||
|
|
Loading…
Reference in New Issue