Merge pull request #3356 from Budibase/feature/existing-table-import
Import CSV to existing table from builder
This commit is contained in:
commit
716864b38f
|
@ -47,5 +47,6 @@
|
||||||
--spectrum-semantic-positive-border-color: #2d9d78;
|
--spectrum-semantic-positive-border-color: #2d9d78;
|
||||||
--spectrum-semantic-positive-icon-color: #2d9d78;
|
--spectrum-semantic-positive-icon-color: #2d9d78;
|
||||||
--spectrum-semantic-negative-icon-color: #e34850;
|
--spectrum-semantic-negative-icon-color: #e34850;
|
||||||
|
min-width: 100px;
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
import CreateViewButton from "./buttons/CreateViewButton.svelte"
|
import CreateViewButton from "./buttons/CreateViewButton.svelte"
|
||||||
import ExistingRelationshipButton from "./buttons/ExistingRelationshipButton.svelte"
|
import ExistingRelationshipButton from "./buttons/ExistingRelationshipButton.svelte"
|
||||||
import ExportButton from "./buttons/ExportButton.svelte"
|
import ExportButton from "./buttons/ExportButton.svelte"
|
||||||
|
import ImportButton from "./buttons/ImportButton.svelte"
|
||||||
import EditRolesButton from "./buttons/EditRolesButton.svelte"
|
import EditRolesButton from "./buttons/EditRolesButton.svelte"
|
||||||
import ManageAccessButton from "./buttons/ManageAccessButton.svelte"
|
import ManageAccessButton from "./buttons/ManageAccessButton.svelte"
|
||||||
import HideAutocolumnButton from "./buttons/HideAutocolumnButton.svelte"
|
import HideAutocolumnButton from "./buttons/HideAutocolumnButton.svelte"
|
||||||
|
@ -124,6 +125,10 @@
|
||||||
<HideAutocolumnButton bind:hideAutocolumns />
|
<HideAutocolumnButton bind:hideAutocolumns />
|
||||||
<!-- always have the export last -->
|
<!-- always have the export last -->
|
||||||
<ExportButton view={$tables.selected?._id} />
|
<ExportButton view={$tables.selected?._id} />
|
||||||
|
<ImportButton
|
||||||
|
tableId={$tables.selected?._id}
|
||||||
|
on:updaterows={onUpdateRows}
|
||||||
|
/>
|
||||||
{#key id}
|
{#key id}
|
||||||
<TableFilterButton {schema} on:change={onFilter} />
|
<TableFilterButton {schema} on:change={onFilter} />
|
||||||
{/key}
|
{/key}
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
let modal
|
let modal
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<ActionButton icon="Download" size="S" quiet on:click={modal.show}>
|
<ActionButton icon="DataDownload" size="S" quiet on:click={modal.show}>
|
||||||
Export
|
Export
|
||||||
</ActionButton>
|
</ActionButton>
|
||||||
<Modal bind:this={modal}>
|
<Modal bind:this={modal}>
|
||||||
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
<script>
|
||||||
|
import { ActionButton, Modal } from "@budibase/bbui"
|
||||||
|
import ImportModal from "../modals/ImportModal.svelte"
|
||||||
|
|
||||||
|
export let tableId
|
||||||
|
|
||||||
|
let modal
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<ActionButton icon="DataUpload" size="S" quiet on:click={modal.show}>
|
||||||
|
Import
|
||||||
|
</ActionButton>
|
||||||
|
<Modal bind:this={modal}>
|
||||||
|
<ImportModal {tableId} on:updaterows />
|
||||||
|
</Modal>
|
|
@ -0,0 +1,43 @@
|
||||||
|
<script>
|
||||||
|
import { ModalContent, Label, notifications, Body } from "@budibase/bbui"
|
||||||
|
import TableDataImport from "../../TableNavigator/TableDataImport.svelte"
|
||||||
|
import api from "builderStore/api"
|
||||||
|
import { createEventDispatcher } from "svelte"
|
||||||
|
|
||||||
|
const dispatch = createEventDispatcher()
|
||||||
|
|
||||||
|
export let tableId
|
||||||
|
let dataImport
|
||||||
|
|
||||||
|
$: valid = dataImport?.csvString != null && dataImport?.valid
|
||||||
|
|
||||||
|
async function importData() {
|
||||||
|
const response = await api.post(`/api/tables/${tableId}/import`, {
|
||||||
|
dataImport,
|
||||||
|
})
|
||||||
|
if (response.status !== 200) {
|
||||||
|
const error = await response.text()
|
||||||
|
notifications.error(`Unable to import data - ${error}`)
|
||||||
|
} else {
|
||||||
|
notifications.success("Rows successfully imported.")
|
||||||
|
}
|
||||||
|
dispatch("updaterows")
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<ModalContent
|
||||||
|
title="Import Data"
|
||||||
|
confirmText="Import"
|
||||||
|
onConfirm={importData}
|
||||||
|
disabled={!valid}
|
||||||
|
>
|
||||||
|
<Body
|
||||||
|
>Import rows to an existing table from a CSV. Only columns from the CSV
|
||||||
|
which exist in the table will be imported.</Body
|
||||||
|
>
|
||||||
|
<Label grey extraSmall>CSV to import</Label>
|
||||||
|
<TableDataImport bind:dataImport bind:existingTableId={tableId} />
|
||||||
|
</ModalContent>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
</style>
|
|
@ -1,6 +1,5 @@
|
||||||
<script>
|
<script>
|
||||||
import { Select } from "@budibase/bbui"
|
import { Select, InlineAlert, notifications } from "@budibase/bbui"
|
||||||
import { notifications } from "@budibase/bbui"
|
|
||||||
import { FIELDS } from "constants/backend"
|
import { FIELDS } from "constants/backend"
|
||||||
import api from "builderStore/api"
|
import api from "builderStore/api"
|
||||||
|
|
||||||
|
@ -12,11 +11,13 @@
|
||||||
valid: true,
|
valid: true,
|
||||||
schema: {},
|
schema: {},
|
||||||
}
|
}
|
||||||
|
export let existingTableId
|
||||||
|
|
||||||
let csvString
|
let csvString = undefined
|
||||||
let primaryDisplay
|
let primaryDisplay = undefined
|
||||||
let schema = {}
|
let schema = {}
|
||||||
let fields = []
|
let fields = []
|
||||||
|
let hasValidated = false
|
||||||
|
|
||||||
$: valid = !schema || fields.every(column => schema[column].success)
|
$: valid = !schema || fields.every(column => schema[column].success)
|
||||||
$: dataImport = {
|
$: dataImport = {
|
||||||
|
@ -25,6 +26,9 @@
|
||||||
csvString,
|
csvString,
|
||||||
primaryDisplay,
|
primaryDisplay,
|
||||||
}
|
}
|
||||||
|
$: noFieldsError = existingTableId
|
||||||
|
? "No columns in CSV match existing table schema"
|
||||||
|
: "Could not find any columns to import"
|
||||||
|
|
||||||
function buildTableSchema(schema) {
|
function buildTableSchema(schema) {
|
||||||
const tableSchema = {}
|
const tableSchema = {}
|
||||||
|
@ -46,6 +50,7 @@
|
||||||
const response = await api.post("/api/tables/csv/validate", {
|
const response = await api.post("/api/tables/csv/validate", {
|
||||||
csvString,
|
csvString,
|
||||||
schema: schema || {},
|
schema: schema || {},
|
||||||
|
tableId: existingTableId,
|
||||||
})
|
})
|
||||||
|
|
||||||
const parseResult = await response.json()
|
const parseResult = await response.json()
|
||||||
|
@ -63,6 +68,7 @@
|
||||||
notifications.error("CSV Invalid, please try another CSV file")
|
notifications.error("CSV Invalid, please try another CSV file")
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
hasValidated = true
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handleFile(evt) {
|
async function handleFile(evt) {
|
||||||
|
@ -138,6 +144,7 @@
|
||||||
placeholder={null}
|
placeholder={null}
|
||||||
getOptionLabel={option => option.label}
|
getOptionLabel={option => option.label}
|
||||||
getOptionValue={option => option.value}
|
getOptionValue={option => option.value}
|
||||||
|
disabled={!!existingTableId}
|
||||||
/>
|
/>
|
||||||
<span class="field-status" class:error={!schema[columnName].success}>
|
<span class="field-status" class:error={!schema[columnName].success}>
|
||||||
{schema[columnName].success ? "Success" : "Failure"}
|
{schema[columnName].success ? "Success" : "Failure"}
|
||||||
|
@ -149,15 +156,22 @@
|
||||||
</div>
|
</div>
|
||||||
{/each}
|
{/each}
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{#if !existingTableId}
|
||||||
|
<div class="display-column">
|
||||||
{#if fields.length}
|
<Select
|
||||||
<div class="display-column">
|
label="Display Column"
|
||||||
<Select
|
bind:value={primaryDisplay}
|
||||||
label="Display Column"
|
options={fields}
|
||||||
bind:value={primaryDisplay}
|
sort
|
||||||
options={fields}
|
/>
|
||||||
sort
|
</div>
|
||||||
|
{/if}
|
||||||
|
{:else if hasValidated}
|
||||||
|
<div>
|
||||||
|
<InlineAlert
|
||||||
|
header="Invalid CSV"
|
||||||
|
bind:message={noFieldsError}
|
||||||
|
type="error"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
|
@ -33,6 +33,7 @@ interface RunConfig {
|
||||||
sort?: SortJson
|
sort?: SortJson
|
||||||
paginate?: PaginationJson
|
paginate?: PaginationJson
|
||||||
row?: Row
|
row?: Row
|
||||||
|
rows?: Row[]
|
||||||
}
|
}
|
||||||
|
|
||||||
module External {
|
module External {
|
||||||
|
@ -600,7 +601,10 @@ module External {
|
||||||
throw `Unable to process query, table "${tableName}" not defined.`
|
throw `Unable to process query, table "${tableName}" not defined.`
|
||||||
}
|
}
|
||||||
// look for specific components of config which may not be considered acceptable
|
// look for specific components of config which may not be considered acceptable
|
||||||
let { id, row, filters, sort, paginate } = cleanupConfig(config, table)
|
let { id, row, filters, sort, paginate, rows } = cleanupConfig(
|
||||||
|
config,
|
||||||
|
table
|
||||||
|
)
|
||||||
filters = buildFilters(id, filters || {}, table)
|
filters = buildFilters(id, filters || {}, table)
|
||||||
const relationships = this.buildRelationships(table)
|
const relationships = this.buildRelationships(table)
|
||||||
// clean up row on ingress using schema
|
// clean up row on ingress using schema
|
||||||
|
@ -626,7 +630,7 @@ module External {
|
||||||
sort,
|
sort,
|
||||||
paginate,
|
paginate,
|
||||||
relationships,
|
relationships,
|
||||||
body: row,
|
body: row || rows,
|
||||||
// pass an id filter into extra, purely for mysql/returning
|
// pass an id filter into extra, purely for mysql/returning
|
||||||
extra: {
|
extra: {
|
||||||
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
|
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
|
||||||
|
|
|
@ -30,6 +30,8 @@ async function handleRequest(appId, operation, tableId, opts = {}) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.handleRequest = handleRequest
|
||||||
|
|
||||||
exports.patch = async ctx => {
|
exports.patch = async ctx => {
|
||||||
const appId = ctx.appId
|
const appId = ctx.appId
|
||||||
const inputs = ctx.request.body
|
const inputs = ctx.request.body
|
||||||
|
|
|
@ -17,6 +17,8 @@ const {
|
||||||
} = require("../../../constants")
|
} = require("../../../constants")
|
||||||
const { makeExternalQuery } = require("../../../integrations/base/utils")
|
const { makeExternalQuery } = require("../../../integrations/base/utils")
|
||||||
const { cloneDeep } = require("lodash/fp")
|
const { cloneDeep } = require("lodash/fp")
|
||||||
|
const csvParser = require("../../../utilities/csvParser")
|
||||||
|
const { handleRequest } = require("../row/external")
|
||||||
|
|
||||||
async function makeTableRequest(
|
async function makeTableRequest(
|
||||||
datasource,
|
datasource,
|
||||||
|
@ -279,3 +281,20 @@ exports.destroy = async function (ctx) {
|
||||||
|
|
||||||
return tableToDelete
|
return tableToDelete
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.bulkImport = async function (ctx) {
|
||||||
|
const appId = ctx.appId
|
||||||
|
const table = await getTable(appId, ctx.params.tableId)
|
||||||
|
const { dataImport } = ctx.request.body
|
||||||
|
if (!dataImport || !dataImport.schema || !dataImport.csvString) {
|
||||||
|
ctx.throw(400, "Provided data import information is invalid.")
|
||||||
|
}
|
||||||
|
const rows = await csvParser.transform({
|
||||||
|
...dataImport,
|
||||||
|
existingTable: table,
|
||||||
|
})
|
||||||
|
await handleRequest(appId, DataSourceOperation.BULK_CREATE, table._id, {
|
||||||
|
rows,
|
||||||
|
})
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
|
@ -81,8 +81,26 @@ exports.destroy = async function (ctx) {
|
||||||
ctx.body = { message: `Table ${tableId} deleted.` }
|
ctx.body = { message: `Table ${tableId} deleted.` }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.bulkImport = async function (ctx) {
|
||||||
|
const tableId = ctx.params.tableId
|
||||||
|
await pickApi({ tableId }).bulkImport(ctx)
|
||||||
|
// right now we don't trigger anything for bulk import because it
|
||||||
|
// can only be done in the builder, but in the future we may need to
|
||||||
|
// think about events for bulk items
|
||||||
|
ctx.status = 200
|
||||||
|
ctx.body = { message: `Bulk rows created.` }
|
||||||
|
}
|
||||||
|
|
||||||
exports.validateCSVSchema = async function (ctx) {
|
exports.validateCSVSchema = async function (ctx) {
|
||||||
const { csvString, schema = {} } = ctx.request.body
|
// tableId being specified means its an import to an existing table
|
||||||
const result = await csvParser.parse(csvString, schema)
|
const { csvString, schema = {}, tableId } = ctx.request.body
|
||||||
|
let existingTable
|
||||||
|
if (tableId) {
|
||||||
|
existingTable = await getTable(ctx.appId, tableId)
|
||||||
|
}
|
||||||
|
let result = await csvParser.parse(csvString, schema)
|
||||||
|
if (existingTable) {
|
||||||
|
result = csvParser.updateSchema({ schema: result, existingTable })
|
||||||
|
}
|
||||||
ctx.body = { schema: result }
|
ctx.body = { schema: result }
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,12 @@ const CouchDB = require("../../../db")
|
||||||
const linkRows = require("../../../db/linkedRows")
|
const linkRows = require("../../../db/linkedRows")
|
||||||
const { getRowParams, generateTableID } = require("../../../db/utils")
|
const { getRowParams, generateTableID } = require("../../../db/utils")
|
||||||
const { FieldTypes } = require("../../../constants")
|
const { FieldTypes } = require("../../../constants")
|
||||||
const { TableSaveFunctions, hasTypeChanged } = require("./utils")
|
const {
|
||||||
|
TableSaveFunctions,
|
||||||
|
hasTypeChanged,
|
||||||
|
getTable,
|
||||||
|
handleDataImport,
|
||||||
|
} = require("./utils")
|
||||||
|
|
||||||
exports.save = async function (ctx) {
|
exports.save = async function (ctx) {
|
||||||
const appId = ctx.appId
|
const appId = ctx.appId
|
||||||
|
@ -140,3 +145,11 @@ exports.destroy = async function (ctx) {
|
||||||
|
|
||||||
return tableToDelete
|
return tableToDelete
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.bulkImport = async function (ctx) {
|
||||||
|
const appId = ctx.appId
|
||||||
|
const table = await getTable(appId, ctx.params.tableId)
|
||||||
|
const { dataImport } = ctx.request.body
|
||||||
|
await handleDataImport(appId, ctx.user, table, dataImport)
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
|
@ -72,43 +72,47 @@ exports.makeSureTableUpToDate = (table, tableToSave) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.handleDataImport = async (appId, user, table, dataImport) => {
|
exports.handleDataImport = async (appId, user, table, dataImport) => {
|
||||||
|
if (!dataImport || !dataImport.csvString) {
|
||||||
|
return table
|
||||||
|
}
|
||||||
const db = new CouchDB(appId)
|
const db = new CouchDB(appId)
|
||||||
if (dataImport && dataImport.csvString) {
|
// Populate the table with rows imported from CSV in a bulk update
|
||||||
// Populate the table with rows imported from CSV in a bulk update
|
const data = await csvParser.transform({
|
||||||
const data = await csvParser.transform(dataImport)
|
...dataImport,
|
||||||
|
existingTable: table,
|
||||||
|
})
|
||||||
|
|
||||||
let finalData = []
|
let finalData = []
|
||||||
for (let i = 0; i < data.length; i++) {
|
for (let i = 0; i < data.length; i++) {
|
||||||
let row = data[i]
|
let row = data[i]
|
||||||
row._id = generateRowID(table._id)
|
row._id = generateRowID(table._id)
|
||||||
row.tableId = table._id
|
row.tableId = table._id
|
||||||
const processed = inputProcessing(user, table, row, {
|
const processed = inputProcessing(user, table, row, {
|
||||||
noAutoRelationships: true,
|
noAutoRelationships: true,
|
||||||
})
|
})
|
||||||
table = processed.table
|
table = processed.table
|
||||||
row = processed.row
|
row = processed.row
|
||||||
|
|
||||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||||
// check whether the options need to be updated for inclusion as part of the data import
|
// check whether the options need to be updated for inclusion as part of the data import
|
||||||
if (
|
if (
|
||||||
schema.type === FieldTypes.OPTIONS &&
|
schema.type === FieldTypes.OPTIONS &&
|
||||||
(!schema.constraints.inclusion ||
|
(!schema.constraints.inclusion ||
|
||||||
schema.constraints.inclusion.indexOf(row[fieldName]) === -1)
|
schema.constraints.inclusion.indexOf(row[fieldName]) === -1)
|
||||||
) {
|
) {
|
||||||
schema.constraints.inclusion = [
|
schema.constraints.inclusion = [
|
||||||
...schema.constraints.inclusion,
|
...schema.constraints.inclusion,
|
||||||
row[fieldName],
|
row[fieldName],
|
||||||
]
|
]
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
finalData.push(row)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
await db.bulkDocs(finalData)
|
finalData.push(row)
|
||||||
let response = await db.put(table)
|
|
||||||
table._rev = response._rev
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await db.bulkDocs(finalData)
|
||||||
|
let response = await db.put(table)
|
||||||
|
table._rev = response._rev
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -53,5 +53,16 @@ router
|
||||||
authorized(BUILDER),
|
authorized(BUILDER),
|
||||||
tableController.destroy
|
tableController.destroy
|
||||||
)
|
)
|
||||||
|
// this is currently builder only, but in the future
|
||||||
|
// it could be carried out by an end user in app,
|
||||||
|
// however some thought will need to be had about
|
||||||
|
// implications for automations (triggers)
|
||||||
|
// new trigger type, bulk rows created
|
||||||
|
.post(
|
||||||
|
"/api/tables/:tableId/import",
|
||||||
|
paramResource("tableId"),
|
||||||
|
authorized(BUILDER),
|
||||||
|
tableController.bulkImport
|
||||||
|
)
|
||||||
|
|
||||||
module.exports = router
|
module.exports = router
|
||||||
|
|
|
@ -75,7 +75,11 @@ describe("run misc tests", () => {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
const dataImport = {
|
const dataImport = {
|
||||||
csvString: "a,b,c,d\n1,2,3,4"
|
csvString: "a,b,c,d\n1,2,3,4",
|
||||||
|
schema: {},
|
||||||
|
}
|
||||||
|
for (let col of ["a", "b", "c", "d"]) {
|
||||||
|
dataImport.schema[col] = { type: "string" }
|
||||||
}
|
}
|
||||||
await tableUtils.handleDataImport(
|
await tableUtils.handleDataImport(
|
||||||
config.getAppId(),
|
config.getAppId(),
|
||||||
|
|
|
@ -69,6 +69,7 @@ exports.DataSourceOperation = {
|
||||||
READ: "READ",
|
READ: "READ",
|
||||||
UPDATE: "UPDATE",
|
UPDATE: "UPDATE",
|
||||||
DELETE: "DELETE",
|
DELETE: "DELETE",
|
||||||
|
BULK_CREATE: "BULK_CREATE",
|
||||||
CREATE_TABLE: "CREATE_TABLE",
|
CREATE_TABLE: "CREATE_TABLE",
|
||||||
UPDATE_TABLE: "UPDATE_TABLE",
|
UPDATE_TABLE: "UPDATE_TABLE",
|
||||||
DELETE_TABLE: "DELETE_TABLE",
|
DELETE_TABLE: "DELETE_TABLE",
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
import { Table } from "./common"
|
import { Row, Table } from "./common"
|
||||||
|
|
||||||
export enum Operation {
|
export enum Operation {
|
||||||
CREATE = "CREATE",
|
CREATE = "CREATE",
|
||||||
READ = "READ",
|
READ = "READ",
|
||||||
UPDATE = "UPDATE",
|
UPDATE = "UPDATE",
|
||||||
DELETE = "DELETE",
|
DELETE = "DELETE",
|
||||||
|
BULK_CREATE = "BULK_CREATE",
|
||||||
CREATE_TABLE = "CREATE_TABLE",
|
CREATE_TABLE = "CREATE_TABLE",
|
||||||
UPDATE_TABLE = "UPDATE_TABLE",
|
UPDATE_TABLE = "UPDATE_TABLE",
|
||||||
DELETE_TABLE = "DELETE_TABLE",
|
DELETE_TABLE = "DELETE_TABLE",
|
||||||
|
@ -144,7 +145,7 @@ export interface QueryJson {
|
||||||
filters?: SearchFilters
|
filters?: SearchFilters
|
||||||
sort?: SortJson
|
sort?: SortJson
|
||||||
paginate?: PaginationJson
|
paginate?: PaginationJson
|
||||||
body?: object
|
body?: Row | Row[]
|
||||||
table?: Table
|
table?: Table
|
||||||
meta?: {
|
meta?: {
|
||||||
table?: Table
|
table?: Table
|
||||||
|
|
|
@ -179,6 +179,16 @@ class InternalBuilder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bulkCreate(knex: Knex, json: QueryJson): KnexQuery {
|
||||||
|
const { endpoint, body } = json
|
||||||
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
|
if (!Array.isArray(body)) {
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
const parsedBody = body.map(row => parseBody(row))
|
||||||
|
return query.insert(parsedBody)
|
||||||
|
}
|
||||||
|
|
||||||
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||||
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
||||||
const tableName = endpoint.entityId
|
const tableName = endpoint.entityId
|
||||||
|
@ -294,6 +304,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
case Operation.DELETE:
|
case Operation.DELETE:
|
||||||
query = builder.delete(client, json, opts)
|
query = builder.delete(client, json, opts)
|
||||||
break
|
break
|
||||||
|
case Operation.BULK_CREATE:
|
||||||
|
query = builder.bulkCreate(client, json)
|
||||||
|
break
|
||||||
case Operation.CREATE_TABLE:
|
case Operation.CREATE_TABLE:
|
||||||
case Operation.UPDATE_TABLE:
|
case Operation.UPDATE_TABLE:
|
||||||
case Operation.DELETE_TABLE:
|
case Operation.DELETE_TABLE:
|
||||||
|
|
|
@ -29,10 +29,7 @@ function generateSchema(
|
||||||
for (let [key, column] of Object.entries(table.schema)) {
|
for (let [key, column] of Object.entries(table.schema)) {
|
||||||
// skip things that are already correct
|
// skip things that are already correct
|
||||||
const oldColumn = oldTable ? oldTable.schema[key] : null
|
const oldColumn = oldTable ? oldTable.schema[key] : null
|
||||||
if (
|
if ((oldColumn && oldColumn.type) || (primaryKey === key && !isJunction)) {
|
||||||
(oldColumn && oldColumn.type) ||
|
|
||||||
(primaryKey === key && !isJunction)
|
|
||||||
) {
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
switch (column.type) {
|
switch (column.type) {
|
||||||
|
|
|
@ -165,11 +165,11 @@ module PostgresModule {
|
||||||
|
|
||||||
setSchema() {
|
setSchema() {
|
||||||
if (!this.config.schema) {
|
if (!this.config.schema) {
|
||||||
this.config.schema = 'public'
|
this.config.schema = "public"
|
||||||
}
|
}
|
||||||
this.client.on('connect', (client: any) => {
|
this.client.on("connect", (client: any) => {
|
||||||
client.query(`SET search_path TO ${this.config.schema}`);
|
client.query(`SET search_path TO ${this.config.schema}`)
|
||||||
});
|
})
|
||||||
this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = '${this.config.schema}'`
|
this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = '${this.config.schema}'`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -51,7 +51,7 @@ function parse(csvString, parsers) {
|
||||||
})
|
})
|
||||||
result.subscribe(row => {
|
result.subscribe(row => {
|
||||||
// For each CSV row parse all the columns that need parsed
|
// For each CSV row parse all the columns that need parsed
|
||||||
for (let key in parsers) {
|
for (let key of Object.keys(parsers)) {
|
||||||
if (!schema[key] || schema[key].success) {
|
if (!schema[key] || schema[key].success) {
|
||||||
// get the validator for the column type
|
// get the validator for the column type
|
||||||
const validator = VALIDATORS[parsers[key].type]
|
const validator = VALIDATORS[parsers[key].type]
|
||||||
|
@ -76,16 +76,58 @@ function parse(csvString, parsers) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async function transform({ schema, csvString }) {
|
function updateSchema({ schema, existingTable }) {
|
||||||
|
if (!schema) {
|
||||||
|
return schema
|
||||||
|
}
|
||||||
|
const finalSchema = {}
|
||||||
|
const schemaKeyMap = {}
|
||||||
|
Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key))
|
||||||
|
for (let [key, field] of Object.entries(existingTable.schema)) {
|
||||||
|
const lcKey = key.toLowerCase()
|
||||||
|
const foundKey = schemaKeyMap[lcKey]
|
||||||
|
if (foundKey) {
|
||||||
|
finalSchema[key] = schema[foundKey]
|
||||||
|
finalSchema[key].type = field.type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return finalSchema
|
||||||
|
}
|
||||||
|
|
||||||
|
async function transform({ schema, csvString, existingTable }) {
|
||||||
const colParser = {}
|
const colParser = {}
|
||||||
|
|
||||||
for (let key in schema) {
|
// make sure the table has all the columns required for import
|
||||||
|
if (existingTable) {
|
||||||
|
schema = updateSchema({ schema, existingTable })
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let key of Object.keys(schema)) {
|
||||||
colParser[key] = PARSERS[schema[key].type] || schema[key].type
|
colParser[key] = PARSERS[schema[key].type] || schema[key].type
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const json = await csv({ colParser }).fromString(csvString)
|
const data = await csv({ colParser }).fromString(csvString)
|
||||||
return json
|
const schemaKeyMap = {}
|
||||||
|
Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key))
|
||||||
|
for (let element of data) {
|
||||||
|
if (!data) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for (let key of Object.keys(element)) {
|
||||||
|
const mappedKey = schemaKeyMap[key.toLowerCase()]
|
||||||
|
// isn't a column in the table, remove it
|
||||||
|
if (mappedKey == null) {
|
||||||
|
delete element[key]
|
||||||
|
}
|
||||||
|
// casing is different, fix it in row
|
||||||
|
else if (key !== mappedKey) {
|
||||||
|
element[mappedKey] = element[key]
|
||||||
|
delete element[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return data
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(`Error transforming CSV to JSON for data import`, err)
|
console.error(`Error transforming CSV to JSON for data import`, err)
|
||||||
throw err
|
throw err
|
||||||
|
@ -95,4 +137,5 @@ async function transform({ schema, csvString }) {
|
||||||
module.exports = {
|
module.exports = {
|
||||||
parse,
|
parse,
|
||||||
transform,
|
transform,
|
||||||
|
updateSchema,
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,19 +3,13 @@
|
||||||
exports[`CSV Parser transformation transforms a CSV file into JSON 1`] = `
|
exports[`CSV Parser transformation transforms a CSV file into JSON 1`] = `
|
||||||
Array [
|
Array [
|
||||||
Object {
|
Object {
|
||||||
"Address": "5 Sesame Street",
|
|
||||||
"Age": 4324,
|
"Age": 4324,
|
||||||
"Name": "Bertå",
|
|
||||||
},
|
},
|
||||||
Object {
|
Object {
|
||||||
"Address": "1 World Trade Center",
|
|
||||||
"Age": 34,
|
"Age": 34,
|
||||||
"Name": "Ernie",
|
|
||||||
},
|
},
|
||||||
Object {
|
Object {
|
||||||
"Address": "44 Second Avenue",
|
|
||||||
"Age": 23423,
|
"Age": 23423,
|
||||||
"Name": "Big Bird",
|
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
`;
|
`;
|
||||||
|
|
|
@ -24,6 +24,9 @@ const SCHEMAS = {
|
||||||
Age: {
|
Age: {
|
||||||
type: "omit",
|
type: "omit",
|
||||||
},
|
},
|
||||||
|
Name: {
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
BROKEN: {
|
BROKEN: {
|
||||||
Address: {
|
Address: {
|
||||||
|
|
Loading…
Reference in New Issue