Some UI work, as well as moving SQL to using a proper bulk insert method and fixing an issue found with csv parsing and removing of invalid columns.
This commit is contained in:
parent
0095f470e3
commit
d9d5391a40
|
@ -47,6 +47,6 @@
|
|||
--spectrum-semantic-positive-border-color: #2d9d78;
|
||||
--spectrum-semantic-positive-icon-color: #2d9d78;
|
||||
--spectrum-semantic-negative-icon-color: #e34850;
|
||||
min-width: 150px !important;
|
||||
min-width: 100px;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import { ModalContent, Label, notifications } from "@budibase/bbui"
|
||||
import { ModalContent, Label, notifications, Body } from "@budibase/bbui"
|
||||
import TableDataImport from "../../TableNavigator/TableDataImport.svelte"
|
||||
import api from "builderStore/api"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
@ -31,6 +31,13 @@
|
|||
onConfirm={importData}
|
||||
disabled={!valid}
|
||||
>
|
||||
<Body
|
||||
>Import rows to an existing table from a CSV. Only columns from the CSV
|
||||
which exist in the table will be imported.</Body
|
||||
>
|
||||
<Label grey extraSmall>CSV to import</Label>
|
||||
<TableDataImport bind:dataImport bind:existingTableId={tableId} />
|
||||
</ModalContent>
|
||||
|
||||
<style>
|
||||
</style>
|
||||
|
|
|
@ -33,6 +33,7 @@ interface RunConfig {
|
|||
sort?: SortJson
|
||||
paginate?: PaginationJson
|
||||
row?: Row
|
||||
rows?: Row[]
|
||||
}
|
||||
|
||||
module External {
|
||||
|
@ -600,7 +601,7 @@ module External {
|
|||
throw `Unable to process query, table "${tableName}" not defined.`
|
||||
}
|
||||
// look for specific components of config which may not be considered acceptable
|
||||
let { id, row, filters, sort, paginate } = cleanupConfig(config, table)
|
||||
let { id, row, filters, sort, paginate, rows } = cleanupConfig(config, table)
|
||||
filters = buildFilters(id, filters || {}, table)
|
||||
const relationships = this.buildRelationships(table)
|
||||
// clean up row on ingress using schema
|
||||
|
@ -626,7 +627,7 @@ module External {
|
|||
sort,
|
||||
paginate,
|
||||
relationships,
|
||||
body: row,
|
||||
body: row || rows,
|
||||
// pass an id filter into extra, purely for mysql/returning
|
||||
extra: {
|
||||
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
|
||||
|
|
|
@ -30,6 +30,8 @@ async function handleRequest(appId, operation, tableId, opts = {}) {
|
|||
)
|
||||
}
|
||||
|
||||
exports.handleRequest = handleRequest
|
||||
|
||||
exports.patch = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const inputs = ctx.request.body
|
||||
|
|
|
@ -18,7 +18,7 @@ const {
|
|||
const { makeExternalQuery } = require("../../../integrations/base/utils")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const csvParser = require("../../../utilities/csvParser")
|
||||
const { save: rowSave } = require("../row/external")
|
||||
const { handleRequest } = require("../row/external")
|
||||
|
||||
async function makeTableRequest(
|
||||
datasource,
|
||||
|
@ -293,23 +293,8 @@ exports.bulkImport = async function (ctx) {
|
|||
...dataImport,
|
||||
existingTable: table,
|
||||
})
|
||||
const promises = []
|
||||
for (let row of rows) {
|
||||
const rowSaveCtx = {
|
||||
appId,
|
||||
params: {
|
||||
tableId: table._id,
|
||||
},
|
||||
request: {
|
||||
body: {
|
||||
...row,
|
||||
tableId: table._id,
|
||||
},
|
||||
},
|
||||
}
|
||||
promises.push(rowSave(rowSaveCtx))
|
||||
}
|
||||
// don't error if some error, as some will have been imported
|
||||
await Promise.allSettled(promises)
|
||||
await handleRequest(appId, DataSourceOperation.BULK_CREATE, table._id, {
|
||||
rows,
|
||||
})
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -69,6 +69,7 @@ exports.DataSourceOperation = {
|
|||
READ: "READ",
|
||||
UPDATE: "UPDATE",
|
||||
DELETE: "DELETE",
|
||||
BULK_CREATE: "BULK_CREATE",
|
||||
CREATE_TABLE: "CREATE_TABLE",
|
||||
UPDATE_TABLE: "UPDATE_TABLE",
|
||||
DELETE_TABLE: "DELETE_TABLE",
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import { Table } from "./common"
|
||||
import { Row, Table } from "./common"
|
||||
|
||||
export enum Operation {
|
||||
CREATE = "CREATE",
|
||||
READ = "READ",
|
||||
UPDATE = "UPDATE",
|
||||
DELETE = "DELETE",
|
||||
BULK_CREATE = "BULK_CREATE",
|
||||
CREATE_TABLE = "CREATE_TABLE",
|
||||
UPDATE_TABLE = "UPDATE_TABLE",
|
||||
DELETE_TABLE = "DELETE_TABLE",
|
||||
|
@ -144,7 +145,7 @@ export interface QueryJson {
|
|||
filters?: SearchFilters
|
||||
sort?: SortJson
|
||||
paginate?: PaginationJson
|
||||
body?: object
|
||||
body?: Row | Row[]
|
||||
table?: Table
|
||||
meta?: {
|
||||
table?: Table
|
||||
|
|
|
@ -179,6 +179,16 @@ class InternalBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
bulkCreate(knex: Knex, json: QueryJson): KnexQuery {
|
||||
const { endpoint, body } = json
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
if (!Array.isArray(body)) {
|
||||
return query
|
||||
}
|
||||
const parsedBody = body.map(row => parseBody(row))
|
||||
return query.insert(parsedBody)
|
||||
}
|
||||
|
||||
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
||||
const tableName = endpoint.entityId
|
||||
|
@ -294,6 +304,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
case Operation.DELETE:
|
||||
query = builder.delete(client, json, opts)
|
||||
break
|
||||
case Operation.BULK_CREATE:
|
||||
query = builder.bulkCreate(client, json)
|
||||
break
|
||||
case Operation.CREATE_TABLE:
|
||||
case Operation.UPDATE_TABLE:
|
||||
case Operation.DELETE_TABLE:
|
||||
|
|
|
@ -116,7 +116,7 @@ async function transform({ schema, csvString, existingTable }) {
|
|||
delete element[key]
|
||||
}
|
||||
// casing is different, fix it in row
|
||||
if (key !== mappedKey) {
|
||||
else if (key !== mappedKey) {
|
||||
element[mappedKey] = element[key]
|
||||
delete element[key]
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue