Adding all required controls for data import to internal tables, just need to implement external table data import.
This commit is contained in:
parent
04cfca8b8e
commit
6c46c119a6
|
@ -47,5 +47,6 @@
|
|||
--spectrum-semantic-positive-border-color: #2d9d78;
|
||||
--spectrum-semantic-positive-icon-color: #2d9d78;
|
||||
--spectrum-semantic-negative-icon-color: #e34850;
|
||||
min-width: 150px !important;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
import CreateViewButton from "./buttons/CreateViewButton.svelte"
|
||||
import ExistingRelationshipButton from "./buttons/ExistingRelationshipButton.svelte"
|
||||
import ExportButton from "./buttons/ExportButton.svelte"
|
||||
import ImportButton from "./buttons/ImportButton.svelte"
|
||||
import EditRolesButton from "./buttons/EditRolesButton.svelte"
|
||||
import ManageAccessButton from "./buttons/ManageAccessButton.svelte"
|
||||
import HideAutocolumnButton from "./buttons/HideAutocolumnButton.svelte"
|
||||
|
@ -124,6 +125,10 @@
|
|||
<HideAutocolumnButton bind:hideAutocolumns />
|
||||
<!-- always have the export last -->
|
||||
<ExportButton view={$tables.selected?._id} />
|
||||
<ImportButton
|
||||
tableId={$tables.selected?._id}
|
||||
on:updaterows={onUpdateRows}
|
||||
/>
|
||||
{#key id}
|
||||
<TableFilterButton {schema} on:change={onFilter} />
|
||||
{/key}
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
let modal
|
||||
</script>
|
||||
|
||||
<ActionButton icon="Download" size="S" quiet on:click={modal.show}>
|
||||
<ActionButton icon="DataDownload" size="S" quiet on:click={modal.show}>
|
||||
Export
|
||||
</ActionButton>
|
||||
<Modal bind:this={modal}>
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
<script>
|
||||
import { ActionButton, Modal } from "@budibase/bbui"
|
||||
import ImportModal from "../modals/ImportModal.svelte"
|
||||
|
||||
export let tableId
|
||||
|
||||
let modal
|
||||
</script>
|
||||
|
||||
<ActionButton icon="DataUpload" size="S" quiet on:click={modal.show}>
|
||||
Import
|
||||
</ActionButton>
|
||||
<Modal bind:this={modal}>
|
||||
<ImportModal {tableId} on:updaterows />
|
||||
</Modal>
|
|
@ -0,0 +1,36 @@
|
|||
<script>
|
||||
import { ModalContent, Label, notifications } from "@budibase/bbui"
|
||||
import TableDataImport from "../../TableNavigator/TableDataImport.svelte"
|
||||
import api from "builderStore/api"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
export let tableId
|
||||
let dataImport
|
||||
|
||||
$: valid = dataImport?.csvString != null && dataImport?.valid
|
||||
|
||||
async function importData() {
|
||||
const response = await api.post(`/api/tables/${tableId}/import`, {
|
||||
dataImport,
|
||||
})
|
||||
if (response.status !== 200) {
|
||||
const error = await response.text()
|
||||
notifications.error(`Unable to import data - ${error}`)
|
||||
} else {
|
||||
notifications.success("Rows successfully imported.")
|
||||
}
|
||||
dispatch("updaterows")
|
||||
}
|
||||
</script>
|
||||
|
||||
<ModalContent
|
||||
title="Import Data"
|
||||
confirmText="Import"
|
||||
onConfirm={importData}
|
||||
disabled={!valid}
|
||||
>
|
||||
<Label grey extraSmall>CSV to import</Label>
|
||||
<TableDataImport bind:dataImport bind:existingTableId={tableId} />
|
||||
</ModalContent>
|
|
@ -1,6 +1,5 @@
|
|||
<script>
|
||||
import { Select } from "@budibase/bbui"
|
||||
import { notifications } from "@budibase/bbui"
|
||||
import { Select, InlineAlert, notifications } from "@budibase/bbui"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import api from "builderStore/api"
|
||||
|
||||
|
@ -12,11 +11,13 @@
|
|||
valid: true,
|
||||
schema: {},
|
||||
}
|
||||
export let existingTableId
|
||||
|
||||
let csvString
|
||||
let primaryDisplay
|
||||
let csvString = undefined
|
||||
let primaryDisplay = undefined
|
||||
let schema = {}
|
||||
let fields = []
|
||||
let hasValidated = false
|
||||
|
||||
$: valid = !schema || fields.every(column => schema[column].success)
|
||||
$: dataImport = {
|
||||
|
@ -25,6 +26,9 @@
|
|||
csvString,
|
||||
primaryDisplay,
|
||||
}
|
||||
$: noFieldsError = existingTableId
|
||||
? "No columns in CSV match existing table schema"
|
||||
: "Could not find any columns to import"
|
||||
|
||||
function buildTableSchema(schema) {
|
||||
const tableSchema = {}
|
||||
|
@ -46,6 +50,7 @@
|
|||
const response = await api.post("/api/tables/csv/validate", {
|
||||
csvString,
|
||||
schema: schema || {},
|
||||
tableId: existingTableId,
|
||||
})
|
||||
|
||||
const parseResult = await response.json()
|
||||
|
@ -63,6 +68,7 @@
|
|||
notifications.error("CSV Invalid, please try another CSV file")
|
||||
return []
|
||||
}
|
||||
hasValidated = true
|
||||
}
|
||||
|
||||
async function handleFile(evt) {
|
||||
|
@ -138,6 +144,7 @@
|
|||
placeholder={null}
|
||||
getOptionLabel={option => option.label}
|
||||
getOptionValue={option => option.value}
|
||||
disabled={!!existingTableId}
|
||||
/>
|
||||
<span class="field-status" class:error={!schema[columnName].success}>
|
||||
{schema[columnName].success ? "Success" : "Failure"}
|
||||
|
@ -149,9 +156,6 @@
|
|||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if fields.length}
|
||||
<div class="display-column">
|
||||
<Select
|
||||
label="Display Column"
|
||||
|
@ -160,6 +164,14 @@
|
|||
sort
|
||||
/>
|
||||
</div>
|
||||
{:else if hasValidated}
|
||||
<div>
|
||||
<InlineAlert
|
||||
header="Invalid CSV"
|
||||
bind:message={noFieldsError}
|
||||
type="error"
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
|
|
|
@ -279,3 +279,8 @@ exports.destroy = async function (ctx) {
|
|||
|
||||
return tableToDelete
|
||||
}
|
||||
|
||||
exports.bulkImport = async function (ctx) {
|
||||
ctx.status = 200
|
||||
ctx.body = {}
|
||||
}
|
||||
|
|
|
@ -81,8 +81,26 @@ exports.destroy = async function (ctx) {
|
|||
ctx.body = { message: `Table ${tableId} deleted.` }
|
||||
}
|
||||
|
||||
exports.bulkImport = async function (ctx) {
|
||||
const tableId = ctx.params.tableId
|
||||
await pickApi({ tableId }).bulkImport(ctx)
|
||||
// right now we don't trigger anything for bulk import because it
|
||||
// can only be done in the builder, but in the future we may need to
|
||||
// think about events for bulk items
|
||||
ctx.status = 200
|
||||
ctx.body = { message: `Bulk rows created.` }
|
||||
}
|
||||
|
||||
exports.validateCSVSchema = async function (ctx) {
|
||||
const { csvString, schema = {} } = ctx.request.body
|
||||
const result = await csvParser.parse(csvString, schema)
|
||||
// tableId being specified means its an import to an existing table
|
||||
const { csvString, schema = {}, tableId } = ctx.request.body
|
||||
let existingTable
|
||||
if (tableId) {
|
||||
existingTable = await getTable(ctx.appId, tableId)
|
||||
}
|
||||
let result = await csvParser.parse(csvString, schema)
|
||||
if (existingTable) {
|
||||
result = csvParser.updateSchema({ schema: result, existingTable })
|
||||
}
|
||||
ctx.body = { schema: result }
|
||||
}
|
||||
|
|
|
@ -2,7 +2,12 @@ const CouchDB = require("../../../db")
|
|||
const linkRows = require("../../../db/linkedRows")
|
||||
const { getRowParams, generateTableID } = require("../../../db/utils")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { TableSaveFunctions, hasTypeChanged } = require("./utils")
|
||||
const {
|
||||
TableSaveFunctions,
|
||||
hasTypeChanged,
|
||||
getTable,
|
||||
handleDataImport,
|
||||
} = require("./utils")
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
|
@ -140,3 +145,10 @@ exports.destroy = async function (ctx) {
|
|||
|
||||
return tableToDelete
|
||||
}
|
||||
|
||||
exports.bulkImport = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const table = await getTable(appId, ctx.params.tableId)
|
||||
const { dataImport } = ctx.request.body
|
||||
await handleDataImport(appId, ctx.user, table, dataImport)
|
||||
}
|
||||
|
|
|
@ -72,43 +72,47 @@ exports.makeSureTableUpToDate = (table, tableToSave) => {
|
|||
}
|
||||
|
||||
exports.handleDataImport = async (appId, user, table, dataImport) => {
|
||||
if (!dataImport || !dataImport.csvString) {
|
||||
return table
|
||||
}
|
||||
const db = new CouchDB(appId)
|
||||
if (dataImport && dataImport.csvString) {
|
||||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await csvParser.transform(dataImport)
|
||||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await csvParser.transform({
|
||||
...dataImport,
|
||||
existingTable: table,
|
||||
})
|
||||
|
||||
let finalData = []
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let row = data[i]
|
||||
row._id = generateRowID(table._id)
|
||||
row.tableId = table._id
|
||||
const processed = inputProcessing(user, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
table = processed.table
|
||||
row = processed.row
|
||||
let finalData = []
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let row = data[i]
|
||||
row._id = generateRowID(table._id)
|
||||
row.tableId = table._id
|
||||
const processed = inputProcessing(user, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
table = processed.table
|
||||
row = processed.row
|
||||
|
||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||
// check whether the options need to be updated for inclusion as part of the data import
|
||||
if (
|
||||
schema.type === FieldTypes.OPTIONS &&
|
||||
(!schema.constraints.inclusion ||
|
||||
schema.constraints.inclusion.indexOf(row[fieldName]) === -1)
|
||||
) {
|
||||
schema.constraints.inclusion = [
|
||||
...schema.constraints.inclusion,
|
||||
row[fieldName],
|
||||
]
|
||||
}
|
||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||
// check whether the options need to be updated for inclusion as part of the data import
|
||||
if (
|
||||
schema.type === FieldTypes.OPTIONS &&
|
||||
(!schema.constraints.inclusion ||
|
||||
schema.constraints.inclusion.indexOf(row[fieldName]) === -1)
|
||||
) {
|
||||
schema.constraints.inclusion = [
|
||||
...schema.constraints.inclusion,
|
||||
row[fieldName],
|
||||
]
|
||||
}
|
||||
|
||||
finalData.push(row)
|
||||
}
|
||||
|
||||
await db.bulkDocs(finalData)
|
||||
let response = await db.put(table)
|
||||
table._rev = response._rev
|
||||
finalData.push(row)
|
||||
}
|
||||
|
||||
await db.bulkDocs(finalData)
|
||||
let response = await db.put(table)
|
||||
table._rev = response._rev
|
||||
return table
|
||||
}
|
||||
|
||||
|
|
|
@ -53,5 +53,16 @@ router
|
|||
authorized(BUILDER),
|
||||
tableController.destroy
|
||||
)
|
||||
// this is currently builder only, but in the future
|
||||
// it could be carried out by an end user in app,
|
||||
// however some thought will need to be had about
|
||||
// implications for automations (triggers)
|
||||
// new trigger type, bulk rows created
|
||||
.post(
|
||||
"/api/tables/:tableId/import",
|
||||
paramResource("tableId"),
|
||||
authorized(BUILDER),
|
||||
tableController.bulkImport
|
||||
)
|
||||
|
||||
module.exports = router
|
||||
|
|
|
@ -51,7 +51,7 @@ function parse(csvString, parsers) {
|
|||
})
|
||||
result.subscribe(row => {
|
||||
// For each CSV row parse all the columns that need parsed
|
||||
for (let key in parsers) {
|
||||
for (let key of Object.keys(parsers)) {
|
||||
if (!schema[key] || schema[key].success) {
|
||||
// get the validator for the column type
|
||||
const validator = VALIDATORS[parsers[key].type]
|
||||
|
@ -76,16 +76,53 @@ function parse(csvString, parsers) {
|
|||
})
|
||||
}
|
||||
|
||||
async function transform({ schema, csvString }) {
|
||||
function updateSchema({ schema, existingTable }) {
|
||||
const finalSchema = {}
|
||||
const schemaKeyMap = {}
|
||||
Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key))
|
||||
for (let [key, field] of Object.entries(existingTable.schema)) {
|
||||
const lcKey = key.toLowerCase()
|
||||
const foundKey = schemaKeyMap[lcKey]
|
||||
if (foundKey) {
|
||||
finalSchema[key] = schema[foundKey]
|
||||
finalSchema[key].type = field.type
|
||||
}
|
||||
}
|
||||
return finalSchema
|
||||
}
|
||||
|
||||
async function transform({ schema, csvString, existingTable }) {
|
||||
const colParser = {}
|
||||
|
||||
for (let key in schema) {
|
||||
// make sure the table has all the columns required for import
|
||||
schema = updateSchema({ schema, existingTable })
|
||||
|
||||
for (let key of Object.keys(schema)) {
|
||||
colParser[key] = PARSERS[schema[key].type] || schema[key].type
|
||||
}
|
||||
|
||||
try {
|
||||
const json = await csv({ colParser }).fromString(csvString)
|
||||
return json
|
||||
const data = await csv({ colParser }).fromString(csvString)
|
||||
const schemaKeyMap = {}
|
||||
Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key))
|
||||
for (let element of data) {
|
||||
if (!data) {
|
||||
continue
|
||||
}
|
||||
for (let key of Object.keys(element)) {
|
||||
const mappedKey = schemaKeyMap[key.toLowerCase()]
|
||||
// isn't a column in the table, remove it
|
||||
if (mappedKey == null) {
|
||||
delete element[key]
|
||||
}
|
||||
// casing is different, fix it in row
|
||||
if (key !== mappedKey) {
|
||||
element[mappedKey] = element[key]
|
||||
delete element[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return data
|
||||
} catch (err) {
|
||||
console.error(`Error transforming CSV to JSON for data import`, err)
|
||||
throw err
|
||||
|
@ -95,4 +132,5 @@ async function transform({ schema, csvString }) {
|
|||
module.exports = {
|
||||
parse,
|
||||
transform,
|
||||
updateSchema,
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue