CSV Import end to end

This commit is contained in:
Martin McKeaveney 2020-10-02 16:16:06 +01:00
parent c7a917ce40
commit 5e86076530
9 changed files with 301 additions and 83 deletions

View File

@ -3,6 +3,7 @@
import { backendUiStore } from "builderStore"
import { notifier } from "builderStore/store/notifications"
import {
Body,
DropdownMenu,
Button,
Heading,
@ -11,19 +12,20 @@
Select,
Dropzone,
} from "@budibase/bbui"
import TableDataImport from "./TableDataImport.svelte"
import api from "builderStore/api"
import analytics from "analytics"
let anchor
let dropdown
let name
let recordImport = {}
let dataImport
async function saveTable() {
const model = await backendUiStore.actions.models.save({
name,
schema: recordImport.schema || {},
recordImport,
schema: dataImport.schema || {},
dataImport,
})
notifier.success(`Table ${name} created successfully.`)
$goto(`./model/${model._id}`)
@ -36,38 +38,6 @@
name = ""
dropdown.hide()
}
function handleFileTooLarge(fileSizeLimit) {
notifier.danger(
`Files cannot exceed ${fileSizeLimit /
BYTES_IN_MB}MB. Please try again with smaller files.`
)
}
async function processFiles(fileList) {
const fileArray = Array.from(fileList)
let data = new FormData()
for (var i = 0; i < fileList.length; i++) {
data.append("file", fileList[i])
}
const response = await fetch("/api/csv/validate", {
method: "POST",
body: data,
headers: {
Accept: "application/json",
},
})
recordImport = await response.json()
if (response.status !== 200) {
notifier.danger("CSV Invalid, please try another CSV file")
return []
}
return fileArray.map(file => ({ ...file, extension: "csv" }))
}
</script>
<div bind:this={anchor}>
@ -76,21 +46,27 @@
<DropdownMenu bind:this={dropdown} {anchor} align="left">
<div class="container">
<h5>Create Table</h5>
<Body grey small>Table Name</Body>
<Input
data-cy="table-name-input"
placeholder="Table Name"
thin
bind:value={name} />
<Heading grey extraSmall>Create Table from CSV (Optional)</Heading>
<Dropzone {handleFileTooLarge} {processFiles} />
<Body grey small>Create Table from CSV (Optional)</Body>
<TableDataImport bind:dataImport />
</div>
<footer>
<div class="button-margin-3">
<Button secondary on:click={onClosed}>Cancel</Button>
</div>
<div class="button-margin-4">
<Button primary on:click={saveTable}>Save</Button>
<Button
disabled={!name || !dataImport.valid}
primary
on:click={saveTable}>
Save
</Button>
</div>
</footer>
</DropdownMenu>

View File

@ -0,0 +1,189 @@
<script>
import { Heading, Body, Button, Select } from "@budibase/bbui"
import { notifier } from "builderStore/store/notifications"
import { FIELDS } from "constants/backend"
const BYTES_IN_KB = 1000
const BYTES_IN_MB = 1000000
const FILE_SIZE_LIMIT = BYTES_IN_MB * 1
export let files = []
export let dataImport = {
valid: true,
schema: {},
}
let parseResult
$: schema = parseResult && parseResult.schema
$: valid =
schema && Object.keys(schema).every(column => schema[column].success)
$: dataImport = {
valid,
schema: buildModelSchema(schema),
path: files.length && files[0].path,
}
function buildModelSchema(schema) {
const modelSchema = {}
for (let key in schema) {
const type = schema[key].type
modelSchema[key] = {
name: key,
type,
constraints: FIELDS[type.toUpperCase()].constraints,
}
}
return modelSchema
}
async function validateCSV() {
const response = await fetch("/api/models/csv/validate", {
method: "POST",
body: JSON.stringify({
file: files[0],
schema: schema || {},
}),
headers: {
"Content-Type": "application/json",
Accept: "application/json",
},
})
parseResult = await response.json()
if (response.status !== 200) {
notifier.danger("CSV Invalid, please try another CSV file")
return []
}
}
async function handleFile(evt) {
const fileArray = Array.from(evt.target.files)
const filesToProcess = fileArray.map(({ name, path, size }) => ({
name,
path,
size,
}))
if (filesToProcess.some(file => file.size >= FILE_SIZE_LIMIT)) {
notifier.danger(
`Files cannot exceed ${FILE_SIZE_LIMIT /
BYTES_IN_MB}MB. Please try again with smaller files.`
)
return
}
files = filesToProcess
await validateCSV()
}
function omitColumn(columnName) {
parsers[columnName] = PARSERS.omit
}
const handleTypeChange = column => evt => {
schema[column].type = evt.target.value
validateCSV()
}
</script>
<div class="dropzone">
<input id="file-upload" accept=".csv" type="file" on:change={handleFile} />
<label for="file-upload">
{#if files[0]}{files[0].name}{:else}Upload{/if}
</label>
</div>
<div class="schema-fields">
{#if schema}
{#each Object.keys(schema) as columnName}
<div class="field">
<span>{columnName}</span>
<Select
secondary
thin
bind:value={schema[columnName].type}
on:change={handleTypeChange(columnName)}>
<option value={'string'}>Text</option>
<option value={'number'}>Number</option>
<option value={'datetime'}>Date</option>
</Select>
<span
class:success={schema[columnName].success}
class:error={!schema[columnName].success}>
{schema[columnName].success ? 'Success' : 'Failure'}
</span>
<i
class="omit-button ri-close-circle-fill"
on:click={() => omitColumn(columnName)} />
</div>
{/each}
{/if}
</div>
<style>
.dropzone {
text-align: center;
display: flex;
align-items: center;
flex-direction: column;
border-radius: 10px;
transition: all 0.3s;
}
.success {
color: green;
}
.error {
color: red;
}
input[type="file"] {
display: none;
}
label {
font-family: var(--font-sans);
cursor: pointer;
font-weight: 600;
box-sizing: border-box;
overflow: hidden;
border-radius: var(--border-radius-s);
color: var(--white);
padding: var(--spacing-s) var(--spacing-l);
transition: all 0.2s ease 0s;
display: inline-flex;
text-rendering: optimizeLegibility;
min-width: auto;
outline: none;
font-feature-settings: "case" 1, "rlig" 1, "calt" 0;
-webkit-box-align: center;
user-select: none;
flex-shrink: 0;
align-items: center;
justify-content: center;
margin-top: 10px;
width: 100%;
border: solid 1.5px var(--ink);
background-color: var(--ink);
}
/* .schema-fields {
} */
.omit-button {
font-size: 1.2em;
color: var(--grey-7);
cursor: pointer;
}
.field {
display: grid;
grid-template-columns: repeat(4, 1fr);
margin-top: var(--spacing-m);
align-items: center;
grid-gap: var(--spacing-m);
}
</style>

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

View File

@ -1,5 +1,6 @@
const CouchDB = require("../../db")
const newid = require("../../db/newid")
const csvParser = require("../../utilities/csvParser")
exports.fetch = async function(ctx) {
const db = new CouchDB(ctx.user.instanceId)
@ -19,7 +20,7 @@ exports.find = async function(ctx) {
exports.save = async function(ctx) {
const db = new CouchDB(ctx.user.instanceId)
const { recordImport, ...rest } = ctx.request.body
const { dataImport, ...rest } = ctx.request.body
const modelToSave = {
type: "model",
_id: newid(),
@ -86,15 +87,13 @@ exports.save = async function(ctx) {
}
await db.put(designDoc)
if (recordImport && recordImport.path) {
if (dataImport && dataImport.path) {
// Populate the table with records imported from CSV in a bulk update
const csv = require("csvtojson")
const json = await csv().fromFile(recordImport.path)
const records = json.map(record => ({
...record,
modelId: modelToSave._id,
}))
await db.bulkDocs(records)
const data = await csvParser.transform(dataImport)
for (let row of data) row.modelId = modelToSave._id
await db.bulkDocs(data)
}
ctx.status = 200
@ -135,3 +134,12 @@ exports.destroy = async function(ctx) {
ctx.status = 200
ctx.message = `Model ${ctx.params.modelId} deleted.`
}
exports.validateCSVSchema = async function(ctx) {
const { file, schema = {} } = ctx.request.body
const result = await csvParser.parse(file.path, schema)
ctx.body = {
schema: result,
path: file.path,
}
}

View File

@ -5,7 +5,6 @@ const fetch = require("node-fetch")
const fs = require("fs")
const uuid = require("uuid")
const AWS = require("aws-sdk")
const csv = require("csvtojson")
const { prepareUploadForS3 } = require("./deploy/aws")
const {
@ -247,37 +246,3 @@ exports.serveComponentLibrary = async function(ctx) {
await send(ctx, "/index.js", { root: componentLibraryPath })
}
function schemaFromCSV(path) {
const result = csv().fromFile(path)
return new Promise((resolve, reject) => {
result.on("header", headers => {
const schema = {}
for (let header of headers) {
schema[header] = {
type: "string",
constraints: {
type: "string",
length: {},
presence: {
allowEmpty: true,
},
},
name: header,
}
}
resolve(schema)
})
result.on("error", reject)
})
}
exports.validateCSV = async function(ctx) {
const file = ctx.request.files.file
const schema = await schemaFromCSV(file.path)
// if (result.length === 0) ctx.throw(400, "CSV Invalid")
ctx.body = {
schema,
path: file.path,
}
}

View File

@ -13,6 +13,11 @@ router
modelController.find
)
.post("/api/models", authorized(BUILDER), modelController.save)
.post(
"/api/models/csv/validate",
authorized(BUILDER),
modelController.validateCSVSchema
)
.delete(
"/api/models/:modelId/:revId",
authorized(BUILDER),

View File

@ -28,7 +28,6 @@ router
authorized(BUILDER),
controller.performLocalFileProcessing
)
.post("/api/csv/validate", authorized(BUILDER), controller.validateCSV)
.post("/api/attachments/upload", controller.uploadFile)
.get("/componentlibrary", controller.serveComponentLibrary)
.get("/assets/:file*", controller.serveAppAsset)

View File

@ -0,0 +1,76 @@
const csv = require("csvtojson")
const VALIDATORS = {
string: () => true,
number: attribute => !isNaN(Number(attribute)),
datetime: attribute => !isNaN(new Date(attribute).getTime()),
}
const PARSERS = {
string: attribute => attribute.toString(),
number: attribute => Number(attribute),
datetime: attribute => new Date(attribute).toISOString(),
}
function parse(path, parsers) {
const result = csv().fromFile(path)
const schema = {}
return new Promise((resolve, reject) => {
result.on("header", headers => {
for (let header of headers) {
schema[header] = {
type: parsers[header] ? parsers[header].type : "string",
success: true,
}
}
})
result.fromFile(path).subscribe(row => {
// For each CSV row
// parse all the columns that need parsed
for (let key in parsers) {
// if the schema has already borked for a parser, skip this column
if (!schema[key] || !schema[key].success) continue
// get the validator
const validator = VALIDATORS[parsers[key].type]
try {
schema[key].success = !!validator(row[key])
} catch (err) {
schema[key].success = false
}
}
})
result.on("done", error => {
if (error) {
console.error(error)
reject(error)
}
resolve(schema)
})
})
}
// TODO: significant refactor
async function transform({ schema, path }) {
const colParser = {}
for (let key in schema) {
colParser[key] = PARSERS[schema[key].type]
}
try {
const json = await csv({ colParser }).fromFile(path)
return json
} catch (err) {
console.error(`Error transforming CSV to JSON for data import`, err)
}
}
module.exports = {
parse,
transform,
}