Some UI work, as well as moving SQL to using a proper bulk insert method and fixing an issue found with csv parsing and removing of invalid columns.

This commit is contained in:
mike12345567 2021-11-12 19:24:56 +00:00
parent 0095f470e3
commit d9d5391a40
9 changed files with 36 additions and 26 deletions

View File

@ -47,6 +47,6 @@
--spectrum-semantic-positive-border-color: #2d9d78; --spectrum-semantic-positive-border-color: #2d9d78;
--spectrum-semantic-positive-icon-color: #2d9d78; --spectrum-semantic-positive-icon-color: #2d9d78;
--spectrum-semantic-negative-icon-color: #e34850; --spectrum-semantic-negative-icon-color: #e34850;
min-width: 150px !important; min-width: 100px;
} }
</style> </style>

View File

@ -1,5 +1,5 @@
<script> <script>
import { ModalContent, Label, notifications } from "@budibase/bbui" import { ModalContent, Label, notifications, Body } from "@budibase/bbui"
import TableDataImport from "../../TableNavigator/TableDataImport.svelte" import TableDataImport from "../../TableNavigator/TableDataImport.svelte"
import api from "builderStore/api" import api from "builderStore/api"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
@ -30,7 +30,14 @@
confirmText="Import" confirmText="Import"
onConfirm={importData} onConfirm={importData}
disabled={!valid} disabled={!valid}
>
<Body
>Import rows to an existing table from a CSV. Only columns from the CSV
which exist in the table will be imported.</Body
> >
<Label grey extraSmall>CSV to import</Label> <Label grey extraSmall>CSV to import</Label>
<TableDataImport bind:dataImport bind:existingTableId={tableId} /> <TableDataImport bind:dataImport bind:existingTableId={tableId} />
</ModalContent> </ModalContent>
<style>
</style>

View File

@ -33,6 +33,7 @@ interface RunConfig {
sort?: SortJson sort?: SortJson
paginate?: PaginationJson paginate?: PaginationJson
row?: Row row?: Row
rows?: Row[]
} }
module External { module External {
@ -600,7 +601,7 @@ module External {
throw `Unable to process query, table "${tableName}" not defined.` throw `Unable to process query, table "${tableName}" not defined.`
} }
// look for specific components of config which may not be considered acceptable // look for specific components of config which may not be considered acceptable
let { id, row, filters, sort, paginate } = cleanupConfig(config, table) let { id, row, filters, sort, paginate, rows } = cleanupConfig(config, table)
filters = buildFilters(id, filters || {}, table) filters = buildFilters(id, filters || {}, table)
const relationships = this.buildRelationships(table) const relationships = this.buildRelationships(table)
// clean up row on ingress using schema // clean up row on ingress using schema
@ -626,7 +627,7 @@ module External {
sort, sort,
paginate, paginate,
relationships, relationships,
body: row, body: row || rows,
// pass an id filter into extra, purely for mysql/returning // pass an id filter into extra, purely for mysql/returning
extra: { extra: {
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table), idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),

View File

@ -30,6 +30,8 @@ async function handleRequest(appId, operation, tableId, opts = {}) {
) )
} }
exports.handleRequest = handleRequest
exports.patch = async ctx => { exports.patch = async ctx => {
const appId = ctx.appId const appId = ctx.appId
const inputs = ctx.request.body const inputs = ctx.request.body

View File

@ -18,7 +18,7 @@ const {
const { makeExternalQuery } = require("../../../integrations/base/utils") const { makeExternalQuery } = require("../../../integrations/base/utils")
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
const csvParser = require("../../../utilities/csvParser") const csvParser = require("../../../utilities/csvParser")
const { save: rowSave } = require("../row/external") const { handleRequest } = require("../row/external")
async function makeTableRequest( async function makeTableRequest(
datasource, datasource,
@ -293,23 +293,8 @@ exports.bulkImport = async function (ctx) {
...dataImport, ...dataImport,
existingTable: table, existingTable: table,
}) })
const promises = [] await handleRequest(appId, DataSourceOperation.BULK_CREATE, table._id, {
for (let row of rows) { rows,
const rowSaveCtx = { })
appId,
params: {
tableId: table._id,
},
request: {
body: {
...row,
tableId: table._id,
},
},
}
promises.push(rowSave(rowSaveCtx))
}
// don't error if some error, as some will have been imported
await Promise.allSettled(promises)
return table return table
} }

View File

@ -69,6 +69,7 @@ exports.DataSourceOperation = {
READ: "READ", READ: "READ",
UPDATE: "UPDATE", UPDATE: "UPDATE",
DELETE: "DELETE", DELETE: "DELETE",
BULK_CREATE: "BULK_CREATE",
CREATE_TABLE: "CREATE_TABLE", CREATE_TABLE: "CREATE_TABLE",
UPDATE_TABLE: "UPDATE_TABLE", UPDATE_TABLE: "UPDATE_TABLE",
DELETE_TABLE: "DELETE_TABLE", DELETE_TABLE: "DELETE_TABLE",

View File

@ -1,10 +1,11 @@
import { Table } from "./common" import { Row, Table } from "./common"
export enum Operation { export enum Operation {
CREATE = "CREATE", CREATE = "CREATE",
READ = "READ", READ = "READ",
UPDATE = "UPDATE", UPDATE = "UPDATE",
DELETE = "DELETE", DELETE = "DELETE",
BULK_CREATE = "BULK_CREATE",
CREATE_TABLE = "CREATE_TABLE", CREATE_TABLE = "CREATE_TABLE",
UPDATE_TABLE = "UPDATE_TABLE", UPDATE_TABLE = "UPDATE_TABLE",
DELETE_TABLE = "DELETE_TABLE", DELETE_TABLE = "DELETE_TABLE",
@ -144,7 +145,7 @@ export interface QueryJson {
filters?: SearchFilters filters?: SearchFilters
sort?: SortJson sort?: SortJson
paginate?: PaginationJson paginate?: PaginationJson
body?: object body?: Row | Row[]
table?: Table table?: Table
meta?: { meta?: {
table?: Table table?: Table

View File

@ -179,6 +179,16 @@ class InternalBuilder {
} }
} }
bulkCreate(knex: Knex, json: QueryJson): KnexQuery {
const { endpoint, body } = json
let query: KnexQuery = knex(endpoint.entityId)
if (!Array.isArray(body)) {
return query
}
const parsedBody = body.map(row => parseBody(row))
return query.insert(parsedBody)
}
read(knex: Knex, json: QueryJson, limit: number): KnexQuery { read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
let { endpoint, resource, filters, sort, paginate, relationships } = json let { endpoint, resource, filters, sort, paginate, relationships } = json
const tableName = endpoint.entityId const tableName = endpoint.entityId
@ -294,6 +304,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
case Operation.DELETE: case Operation.DELETE:
query = builder.delete(client, json, opts) query = builder.delete(client, json, opts)
break break
case Operation.BULK_CREATE:
query = builder.bulkCreate(client, json)
break
case Operation.CREATE_TABLE: case Operation.CREATE_TABLE:
case Operation.UPDATE_TABLE: case Operation.UPDATE_TABLE:
case Operation.DELETE_TABLE: case Operation.DELETE_TABLE:

View File

@ -116,7 +116,7 @@ async function transform({ schema, csvString, existingTable }) {
delete element[key] delete element[key]
} }
// casing is different, fix it in row // casing is different, fix it in row
if (key !== mappedKey) { else if (key !== mappedKey) {
element[mappedKey] = element[key] element[mappedKey] = element[key]
delete element[key] delete element[key]
} }