JSON import changes (#9288)

* JSON import changes

* Fix lockfiles

* PR Feedback, fix tests

* fix tests

* pr feedback
This commit is contained in:
Gerard Burns 2023-01-17 15:07:52 +00:00 committed by GitHub
parent 82cc1ac22b
commit 48e1695318
30 changed files with 1169 additions and 1198 deletions

View File

@ -3,7 +3,6 @@ import {
Event, Event,
RowsImportedEvent, RowsImportedEvent,
RowsCreatedEvent, RowsCreatedEvent,
RowImportFormat,
Table, Table,
} from "@budibase/types" } from "@budibase/types"
@ -16,14 +15,9 @@ const created = async (count: number, timestamp?: string | number) => {
await publishEvent(Event.ROWS_CREATED, properties, timestamp) await publishEvent(Event.ROWS_CREATED, properties, timestamp)
} }
const imported = async ( const imported = async (table: Table, count: number) => {
table: Table,
format: RowImportFormat,
count: number
) => {
const properties: RowsImportedEvent = { const properties: RowsImportedEvent = {
tableId: table._id as string, tableId: table._id as string,
format,
count, count,
} }
await publishEvent(Event.ROWS_IMPORTED, properties) await publishEvent(Event.ROWS_IMPORTED, properties)

View File

@ -2,7 +2,6 @@ import { publishEvent } from "../events"
import { import {
Event, Event,
TableExportFormat, TableExportFormat,
TableImportFormat,
Table, Table,
TableCreatedEvent, TableCreatedEvent,
TableUpdatedEvent, TableUpdatedEvent,
@ -40,10 +39,9 @@ async function exported(table: Table, format: TableExportFormat) {
await publishEvent(Event.TABLE_EXPORTED, properties) await publishEvent(Event.TABLE_EXPORTED, properties)
} }
async function imported(table: Table, format: TableImportFormat) { async function imported(table: Table) {
const properties: TableImportedEvent = { const properties: TableImportedEvent = {
tableId: table._id as string, tableId: table._id as string,
format,
} }
await publishEvent(Event.TABLE_IMPORTED, properties) await publishEvent(Event.TABLE_IMPORTED, properties)
} }

File diff suppressed because it is too large Load Diff

View File

@ -12,6 +12,10 @@
name: "JSON", name: "JSON",
key: "json", key: "json",
}, },
{
name: "JSON with Schema",
key: "jsonWithSchema",
},
] ]
export let view export let view
@ -24,7 +28,7 @@
viewName: view, viewName: view,
format: exportFormat, format: exportFormat,
}) })
download(data, `export.${exportFormat}`) download(data, `export.${exportFormat === "csv" ? "csv" : "json"}`)
} catch (error) { } catch (error) {
notifications.error(`Unable to export ${exportFormat.toUpperCase()} data`) notifications.error(`Unable to export ${exportFormat.toUpperCase()} data`)
} }

View File

@ -6,22 +6,22 @@
Body, Body,
Layout, Layout,
} from "@budibase/bbui" } from "@budibase/bbui"
import TableDataImport from "../../TableNavigator/TableDataImport.svelte" import TableDataImport from "../../TableNavigator/ExistingTableDataImport.svelte"
import { API } from "api" import { API } from "api"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
export let tableId export let tableId
let dataImport let rows = []
let allValid = false
$: valid = dataImport?.csvString != null && dataImport?.valid let displayColumn = null
async function importData() { async function importData() {
try { try {
await API.importTableData({ await API.importTableData({
tableId, tableId,
data: dataImport, rows,
}) })
notifications.success("Rows successfully imported") notifications.success("Rows successfully imported")
} catch (error) { } catch (error) {
@ -37,14 +37,14 @@
title="Import Data" title="Import Data"
confirmText="Import" confirmText="Import"
onConfirm={importData} onConfirm={importData}
disabled={!valid} disabled={!allValid}
> >
<Body size="S"> <Body size="S">
Import rows to an existing table from a CSV. Only columns from the CSV which Import rows to an existing table from a CSV or JSON file. Only columns from
exist in the table will be imported. the file which exist in the table will be imported.
</Body> </Body>
<Layout gap="XS" noPadding> <Layout gap="XS" noPadding>
<Label grey extraSmall>CSV to import</Label> <Label grey extraSmall>CSV or JSON file to import</Label>
<TableDataImport bind:dataImport bind:existingTableId={tableId} /> <TableDataImport {tableId} bind:rows bind:allValid bind:displayColumn />
</Layout> </Layout>
</ModalContent> </ModalContent>

View File

@ -0,0 +1,251 @@
<script>
import { Select } from "@budibase/bbui"
import { FIELDS } from "constants/backend"
import { API } from "api"
import { parseFile } from "./utils"
let error = null
let fileName = null
let fileType = null
let loading = false
let validation = {}
let validateHash = ""
let schema = null
let invalidColumns = []
export let tableId = null
export let rows = []
export let allValid = false
const typeOptions = [
{
label: "Text",
value: FIELDS.STRING.type,
},
{
label: "Number",
value: FIELDS.NUMBER.type,
},
{
label: "Date",
value: FIELDS.DATETIME.type,
},
{
label: "Options",
value: FIELDS.OPTIONS.type,
},
{
label: "Multi-select",
value: FIELDS.ARRAY.type,
},
{
label: "Barcode/QR",
value: FIELDS.BARCODEQR.type,
},
{
label: "Long Form Text",
value: FIELDS.LONGFORM.type,
},
]
$: {
schema = fetchSchema(tableId)
}
async function fetchSchema(tableId) {
try {
const definition = await API.fetchTableDefinition(tableId)
schema = definition.schema
} catch (e) {
error = e
}
}
async function handleFile(e) {
loading = true
error = null
validation = {}
try {
const response = await parseFile(e)
rows = response.rows
fileName = response.fileName
fileType = response.fileType
} catch (e) {
loading = false
error = e
}
}
async function validate(rows) {
loading = true
error = null
validation = {}
allValid = false
try {
if (rows.length > 0) {
const response = await API.validateExistingTableImport({
rows,
tableId,
})
validation = response.schemaValidation
invalidColumns = response.invalidColumns
allValid = response.allValid
}
} catch (e) {
error = e.message
}
loading = false
}
$: {
// binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows)
if (newValidateHash !== validateHash) {
validate(rows)
}
validateHash = newValidateHash
}
</script>
<div class="dropzone">
<input
disabled={!schema || loading}
id="file-upload"
accept="text/csv,application/json"
type="file"
on:change={handleFile}
/>
<label for="file-upload" class:uploaded={rows.length > 0}>
{#if loading}
loading...
{:else if error}
error: {error}
{:else if fileName}
{fileName}
{:else}
Upload
{/if}
</label>
</div>
{#if fileName && Object.keys(validation).length === 0}
<p>No valid fields, try another file</p>
{:else if rows.length > 0 && !error}
<div class="schema-fields">
{#each Object.keys(validation) as name}
<div class="field">
<span>{name}</span>
<Select
value={schema[name]?.type}
options={typeOptions}
placeholder={null}
getOptionLabel={option => option.label}
getOptionValue={option => option.value}
disabled
/>
<span
class={loading || validation[name]
? "fieldStatusSuccess"
: "fieldStatusFailure"}
>
{validation[name] ? "Success" : "Failure"}
</span>
</div>
{/each}
</div>
{#if invalidColumns.length > 0}
<p class="spectrum-FieldLabel spectrum-FieldLabel--sizeM">
The following columns are present in the data you wish to import, but do
not match the schema of this table and will be ignored.
</p>
<ul class="ignoredList">
{#each invalidColumns as column}
<li>{column}</li>
{/each}
</ul>
{/if}
{/if}
<style>
.dropzone {
text-align: center;
display: flex;
align-items: center;
flex-direction: column;
border-radius: 10px;
transition: all 0.3s;
}
input {
display: none;
}
label {
font-family: var(--font-sans);
cursor: pointer;
font-weight: 600;
box-sizing: border-box;
overflow: hidden;
border-radius: var(--border-radius-s);
color: var(--ink);
padding: var(--spacing-m) var(--spacing-l);
transition: all 0.2s ease 0s;
display: inline-flex;
text-rendering: optimizeLegibility;
min-width: auto;
outline: none;
font-feature-settings: "case" 1, "rlig" 1, "calt" 0;
-webkit-box-align: center;
user-select: none;
flex-shrink: 0;
align-items: center;
justify-content: center;
width: 100%;
background-color: var(--grey-2);
font-size: var(--font-size-xs);
line-height: normal;
border: var(--border-transparent);
}
.uploaded {
color: var(--blue);
}
.schema-fields {
margin-top: var(--spacing-xl);
}
.field {
display: grid;
grid-template-columns: 2fr 2fr 1fr auto;
margin-top: var(--spacing-m);
align-items: center;
grid-gap: var(--spacing-m);
font-size: var(--spectrum-global-dimension-font-size-75);
}
.fieldStatusSuccess {
color: var(--green);
justify-self: center;
font-weight: 600;
}
.fieldStatusFailure {
color: var(--red);
justify-self: center;
font-weight: 600;
}
.ignoredList {
margin: 0;
padding: 0;
list-style: none;
font-size: var(--spectrum-global-dimension-font-size-75);
}
</style>

View File

@ -1,107 +1,21 @@
<script> <script>
import { Select, InlineAlert, notifications } from "@budibase/bbui" import { Select } from "@budibase/bbui"
import { FIELDS } from "constants/backend" import { FIELDS } from "constants/backend"
import { API } from "api" import { API } from "api"
import { parseFile } from "./utils"
const BYTES_IN_MB = 1000000 let error = null
const FILE_SIZE_LIMIT = BYTES_IN_MB * 5 let fileName = null
let fileType = null
export let files = [] let loading = false
export let dataImport = { let validation = {}
valid: true, let validateHash = ""
schema: {},
}
export let existingTableId
let csvString = undefined export let rows = []
let primaryDisplay = undefined export let schema = {}
let schema = {} export let allValid = false
let fields = [] export let displayColumn = null
let hasValidated = false
$: valid =
!schema ||
(fields.every(column => schema[column].success) &&
(!hasValidated || Object.keys(schema).length > 0))
$: dataImport = {
valid,
schema: buildTableSchema(schema),
csvString,
primaryDisplay,
}
$: noFieldsError = existingTableId
? "No columns in CSV match existing table schema"
: "Could not find any columns to import"
function buildTableSchema(schema) {
const tableSchema = {}
for (let key in schema) {
const type = schema[key].type
if (type === "omit") continue
tableSchema[key] = {
name: key,
type,
constraints: FIELDS[type.toUpperCase()].constraints,
}
}
return tableSchema
}
async function validateCSV() {
try {
const parseResult = await API.validateTableCSV({
csvString,
schema: schema || {},
tableId: existingTableId,
})
schema = parseResult?.schema
fields = Object.keys(schema || {}).filter(
key => schema[key].type !== "omit"
)
// Check primary display is valid
if (!primaryDisplay || fields.indexOf(primaryDisplay) === -1) {
primaryDisplay = fields[0]
}
hasValidated = true
} catch (error) {
notifications.error("CSV Invalid, please try another CSV file")
}
}
async function handleFile(evt) {
const fileArray = Array.from(evt.target.files)
if (fileArray.some(file => file.size >= FILE_SIZE_LIMIT)) {
notifications.error(
`Files cannot exceed ${
FILE_SIZE_LIMIT / BYTES_IN_MB
}MB. Please try again with smaller files.`
)
return
}
// Read CSV as plain text to upload alongside schema
let reader = new FileReader()
reader.addEventListener("load", function (e) {
csvString = e.target.result
files = fileArray
validateCSV()
})
reader.readAsText(fileArray[0])
}
async function omitColumn(columnName) {
schema[columnName].type = "omit"
await validateCSV()
}
const handleTypeChange = column => evt => {
schema[column].type = evt.detail
validateCSV()
}
const typeOptions = [ const typeOptions = [
{ {
@ -133,56 +47,116 @@
value: FIELDS.LONGFORM.type, value: FIELDS.LONGFORM.type,
}, },
] ]
async function handleFile(e) {
loading = true
error = null
validation = {}
try {
const response = await parseFile(e)
rows = response.rows
schema = response.schema
fileName = response.fileName
fileType = response.fileType
} catch (e) {
loading = false
error = e
}
}
async function validate(rows, schema) {
loading = true
error = null
validation = {}
allValid = false
try {
if (rows.length > 0) {
const response = await API.validateNewTableImport({ rows, schema })
validation = response.schemaValidation
allValid = response.allValid
}
} catch (e) {
error = e.message
}
loading = false
}
$: {
// binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
if (newValidateHash !== validateHash) {
validate(rows, schema)
}
validateHash = newValidateHash
}
</script> </script>
<div class="dropzone"> <div class="dropzone">
<input id="file-upload" accept=".csv" type="file" on:change={handleFile} /> <input
<label for="file-upload" class:uploaded={files[0]}> disabled={loading}
{#if files[0]}{files[0].name}{:else}Upload{/if} id="file-upload"
accept="text/csv,application/json"
type="file"
on:change={handleFile}
/>
<label for="file-upload" class:uploaded={rows.length > 0}>
{#if loading}
loading...
{:else if error}
error: {error}
{:else if fileName}
{fileName}
{:else}
Upload
{/if}
</label> </label>
</div> </div>
{#if fields.length} {#if rows.length > 0 && !error}
<div class="schema-fields"> <div class="schema-fields">
{#each fields as columnName} {#each Object.values(schema) as column}
<div class="field"> <div class="field">
<span>{columnName}</span> <span>{column.name}</span>
<Select <Select
bind:value={schema[columnName].type} bind:value={column.type}
on:change={handleTypeChange(columnName)} on:change={e => (column.type = e.detail)}
options={typeOptions} options={typeOptions}
placeholder={null} placeholder={null}
getOptionLabel={option => option.label} getOptionLabel={option => option.label}
getOptionValue={option => option.value} getOptionValue={option => option.value}
disabled={!!existingTableId} disabled={loading}
/> />
<span class="field-status" class:error={!schema[columnName].success}> <span
{schema[columnName].success ? "Success" : "Failure"} class={loading || validation[column.name]
? "fieldStatusSuccess"
: "fieldStatusFailure"}
>
{validation[column.name] ? "Success" : "Failure"}
</span> </span>
<i <i
class="omit-button ri-close-circle-fill" class={`omit-button ri-close-circle-fill ${
on:click={() => omitColumn(columnName)} loading ? "omit-button-disabled" : ""
}`}
on:click={() => {
delete schema[column.name]
schema = schema
}}
/> />
</div> </div>
{/each} {/each}
</div> </div>
{#if !existingTableId}
<div class="display-column"> <div class="display-column">
<Select <Select
label="Display Column" label="Display Column"
bind:value={primaryDisplay} bind:value={displayColumn}
options={fields} options={Object.keys(schema)}
sort sort
/> />
</div> </div>
{/if}
{:else if hasValidated}
<div>
<InlineAlert
header="Invalid CSV"
bind:message={noFieldsError}
type="error"
/>
</div>
{/if} {/if}
<style> <style>
@ -195,28 +169,10 @@
transition: all 0.3s; transition: all 0.3s;
} }
.field-status { input {
color: var(--green);
justify-self: center;
font-weight: 600;
}
.error {
color: var(--red);
}
.uploaded {
color: var(--blue);
}
input[type="file"] {
display: none; display: none;
} }
.schema-fields {
margin-top: var(--spacing-xl);
}
label { label {
font-family: var(--font-sans); font-family: var(--font-sans);
cursor: pointer; cursor: pointer;
@ -244,11 +200,12 @@
border: var(--border-transparent); border: var(--border-transparent);
} }
.omit-button { .uploaded {
font-size: 1.2em; color: var(--blue);
color: var(--grey-7); }
cursor: pointer;
justify-self: flex-end; .schema-fields {
margin-top: var(--spacing-xl);
} }
.field { .field {
@ -260,6 +217,30 @@
font-size: var(--spectrum-global-dimension-font-size-75); font-size: var(--spectrum-global-dimension-font-size-75);
} }
.fieldStatusSuccess {
color: var(--green);
justify-self: center;
font-weight: 600;
}
.fieldStatusFailure {
color: var(--red);
justify-self: center;
font-weight: 600;
}
.omit-button {
font-size: 1.2em;
color: var(--grey-7);
cursor: pointer;
justify-self: flex-end;
}
.omit-button-disabled {
pointer-events: none;
opacity: 70%;
}
.display-column { .display-column {
margin-top: var(--spacing-xl); margin-top: var(--spacing-xl);
} }

View File

@ -29,18 +29,27 @@
: BUDIBASE_INTERNAL_DB_ID : BUDIBASE_INTERNAL_DB_ID
export let name export let name
let dataImport
let error = "" let error = ""
let autoColumns = getAutoColumnInformation() let autoColumns = getAutoColumnInformation()
let schema = {}
let rows = []
let allValid = false
let displayColumn = null
function addAutoColumns(tableName, schema) { function getAutoColumns() {
for (let [subtype, col] of Object.entries(autoColumns)) { const selectedAutoColumns = {}
if (!col.enabled) {
continue Object.entries(autoColumns).forEach(([subtype, column]) => {
if (column.enabled) {
selectedAutoColumns[column.name] = buildAutoColumn(
name,
column.name,
subtype
)
} }
schema[col.name] = buildAutoColumn(tableName, col.name, subtype) })
}
return schema return selectedAutoColumns
} }
function checkValid(evt) { function checkValid(evt) {
@ -55,15 +64,15 @@
async function saveTable() { async function saveTable() {
let newTable = { let newTable = {
name, name,
schema: addAutoColumns(name, dataImport.schema || {}), schema: { ...schema, ...getAutoColumns() },
dataImport, rows,
type: "internal", type: "internal",
sourceId: targetDatasourceId, sourceId: targetDatasourceId,
} }
// Only set primary display if defined // Only set primary display if defined
if (dataImport.primaryDisplay && dataImport.primaryDisplay.length) { if (displayColumn && displayColumn.length) {
newTable.primaryDisplay = dataImport.primaryDisplay newTable.primaryDisplay = displayColumn
} }
// Create table // Create table
@ -90,7 +99,7 @@
title="Create Table" title="Create Table"
confirmText="Create" confirmText="Create"
onConfirm={saveTable} onConfirm={saveTable}
disabled={error || !name || (dataImport && !dataImport.valid)} disabled={error || !name || !allValid}
> >
<Input <Input
data-cy="table-name-input" data-cy="table-name-input"
@ -117,8 +126,10 @@
</div> </div>
<div> <div>
<Layout gap="XS" noPadding> <Layout gap="XS" noPadding>
<Label grey extraSmall>Create Table from CSV (Optional)</Label> <Label grey extraSmall
<TableDataImport bind:dataImport /> >Create a Table from a CSV or JSON file (Optional)</Label
>
<TableDataImport bind:rows bind:schema bind:allValid bind:displayColumn />
</Layout> </Layout>
</div> </div>
</ModalContent> </ModalContent>

View File

@ -0,0 +1,71 @@
import { API } from "api"
import { FIELDS } from "constants/backend"
const BYTES_IN_MB = 1000000
const FILE_SIZE_LIMIT = BYTES_IN_MB * 5
const getDefaultSchema = rows => {
const newSchema = {}
rows.forEach(row => {
Object.keys(row).forEach(column => {
newSchema[column] = {
name: column,
type: "string",
constraints: FIELDS["STRING"].constraints,
}
})
})
return newSchema
}
export const parseFile = e => {
return new Promise((resolve, reject) => {
const file = Array.from(e.target.files)[0]
if (file.size >= FILE_SIZE_LIMIT) {
reject("file too large")
return
}
let reader = new FileReader()
const resolveRows = (rows, schema = null) => {
resolve({
rows,
schema: schema ?? getDefaultSchema(rows),
fileName: file.name,
fileType: file.type,
})
}
reader.addEventListener("load", function (e) {
const fileData = e.target.result
if (file.type === "text/csv") {
API.csvToJson(fileData)
.then(rows => {
resolveRows(rows)
})
.catch(() => {
reject("can't convert csv to json")
})
} else if (file.type === "application/json") {
const parsedFileData = JSON.parse(fileData)
if (Array.isArray(parsedFileData)) {
resolveRows(parsedFileData)
} else if (typeof parsedFileData === "object") {
resolveRows(parsedFileData.rows, parsedFileData.schema)
} else {
reject("invalid json format")
}
} else {
reject("invalid file type")
}
})
reader.readAsText(file)
})
}

View File

@ -64,32 +64,22 @@ export const buildTableEndpoints = API => ({
* @param tableId the table ID to import to * @param tableId the table ID to import to
* @param data the data import object * @param data the data import object
*/ */
importTableData: async ({ tableId, data }) => { importTableData: async ({ tableId, rows }) => {
return await API.post({ return await API.post({
url: `/api/tables/${tableId}/import`, url: `/api/tables/${tableId}/import`,
body: { body: {
dataImport: data, rows,
}, },
}) })
}, },
csvToJson: async csvString => {
/**
* Validates a candidate CSV to be imported for a certain table.
* @param tableId the table ID to import to
* @param csvString the CSV contents as a string
* @param schema the proposed schema
*/
validateTableCSV: async ({ tableId, csvString, schema }) => {
return await API.post({ return await API.post({
url: "/api/tables/csv/validate", url: "/api/convert/csvToJson",
body: { body: {
csvString, csvString,
schema,
tableId,
}, },
}) })
}, },
/** /**
* Gets a list o tables. * Gets a list o tables.
*/ */
@ -120,4 +110,22 @@ export const buildTableEndpoints = API => ({
url: `/api/tables/${tableId}/${tableRev}`, url: `/api/tables/${tableId}/${tableRev}`,
}) })
}, },
validateNewTableImport: async ({ rows, schema }) => {
return await API.post({
url: "/api/tables/validateNewTableImport",
body: {
rows,
schema,
},
})
},
validateExistingTableImport: async ({ rows, tableId }) => {
return await API.post({
url: "/api/tables/validateExistingTableImport",
body: {
rows,
tableId,
},
})
},
}) })

View File

@ -27,7 +27,7 @@ import {
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { context, db as dbCore } from "@budibase/backend-core" import { context, db as dbCore } from "@budibase/backend-core"
import { finaliseRow, updateRelatedFormula } from "./staticFormula" import { finaliseRow, updateRelatedFormula } from "./staticFormula"
import * as exporters from "../view/exporters" import { csv, json, jsonWithSchema, Format, isFormat } from "../view/exporters"
import { apiFileReturn } from "../../../utilities/fileSystem" import { apiFileReturn } from "../../../utilities/fileSystem"
import { import {
Ctx, Ctx,
@ -412,14 +412,15 @@ export async function exportRows(ctx: Ctx) {
rows = result rows = result
} }
let headers = Object.keys(rows[0]) if (format === Format.CSV) {
// @ts-ignore ctx.attachment("export.csv")
const exporter = exporters[format] return apiFileReturn(csv(Object.keys(rows[0]), rows))
const filename = `export.${format}` } else if (format === Format.JSON) {
ctx.attachment("export.json")
// send down the file return apiFileReturn(json(rows))
ctx.attachment(filename) } else {
return apiFileReturn(exporter(headers, rows)) throw "Format not recognised"
}
} }
export async function fetchEnrichedRow(ctx: Ctx) { export async function fetchEnrichedRow(ctx: Ctx) {

View File

@ -10,9 +10,9 @@ import {
} from "./utils" } from "./utils"
import { FieldTypes, RelationshipTypes } from "../../../constants" import { FieldTypes, RelationshipTypes } from "../../../constants"
import { makeExternalQuery } from "../../../integrations/base/query" import { makeExternalQuery } from "../../../integrations/base/query"
import * as csvParser from "../../../utilities/csvParser"
import { handleRequest } from "../row/external" import { handleRequest } from "../row/external"
import { events, context } from "@budibase/backend-core" import { events, context } from "@budibase/backend-core"
import { parse, isRows, isSchema } from "../../../utilities/schema"
import { import {
Datasource, Datasource,
Table, Table,
@ -197,7 +197,7 @@ export async function save(ctx: BBContext) {
const table: TableRequest = ctx.request.body const table: TableRequest = ctx.request.body
const renamed = table?._rename const renamed = table?._rename
// can't do this right now // can't do this right now
delete table.dataImport delete table.rows
const datasourceId = getDatasourceId(ctx.request.body)! const datasourceId = getDatasourceId(ctx.request.body)!
// table doesn't exist already, note that it is created // table doesn't exist already, note that it is created
if (!table._id) { if (!table._id) {
@ -338,17 +338,17 @@ export async function destroy(ctx: BBContext) {
export async function bulkImport(ctx: BBContext) { export async function bulkImport(ctx: BBContext) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { dataImport } = ctx.request.body const { rows }: { rows: unknown } = ctx.request.body
if (!dataImport || !dataImport.schema || !dataImport.csvString) { const schema: unknown = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) {
ctx.throw(400, "Provided data import information is invalid.") ctx.throw(400, "Provided data import information is invalid.")
} }
const rows = await csvParser.transform({
...dataImport, const parsedRows = await parse(rows, schema)
existingTable: table,
})
await handleRequest(Operation.BULK_CREATE, table._id!, { await handleRequest(Operation.BULK_CREATE, table._id!, {
rows, rows: parsedRows,
}) })
await events.rows.imported(table, "csv", rows.length) await events.rows.imported(table, parsedRows.length)
return table return table
} }

View File

@ -1,11 +1,16 @@
import * as internal from "./internal" import * as internal from "./internal"
import * as external from "./external" import * as external from "./external"
import * as csvParser from "../../../utilities/csvParser" import {
validate as validateSchema,
isSchema,
isRows,
} from "../../../utilities/schema"
import { isExternalTable, isSQL } from "../../../integrations/utils" import { isExternalTable, isSQL } from "../../../integrations/utils"
import { getDatasourceParams } from "../../../db/utils" import { getDatasourceParams } from "../../../db/utils"
import { context, events } from "@budibase/backend-core" import { context, events } from "@budibase/backend-core"
import { Table, BBContext } from "@budibase/types" import { Table, BBContext } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import csv from "csvtojson"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) { function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) { if (table && !tableId) {
@ -56,16 +61,16 @@ export async function find(ctx: BBContext) {
export async function save(ctx: BBContext) { export async function save(ctx: BBContext) {
const appId = ctx.appId const appId = ctx.appId
const table = ctx.request.body const table = ctx.request.body
const importFormat = const isImport = table.rows
table.dataImport && table.dataImport.csvString ? "csv" : undefined
const savedTable = await pickApi({ table }).save(ctx) const savedTable = await pickApi({ table }).save(ctx)
if (!table._id) { if (!table._id) {
await events.table.created(savedTable) await events.table.created(savedTable)
} else { } else {
await events.table.updated(savedTable) await events.table.updated(savedTable)
} }
if (importFormat) { if (isImport) {
await events.table.imported(savedTable, importFormat) await events.table.imported(savedTable)
} }
ctx.status = 200 ctx.status = 200
ctx.message = `Table ${table.name} saved successfully.` ctx.message = `Table ${table.name} saved successfully.`
@ -96,19 +101,43 @@ export async function bulkImport(ctx: BBContext) {
ctx.body = { message: `Bulk rows created.` } ctx.body = { message: `Bulk rows created.` }
} }
export async function validateCSVSchema(ctx: BBContext) { export async function csvToJson(ctx: BBContext) {
// tableId being specified means its an import to an existing table const { csvString } = ctx.request.body
const { csvString, schema = {}, tableId } = ctx.request.body
let existingTable const result = await csv().fromString(csvString)
if (tableId) {
existingTable = await sdk.tables.getTable(tableId) ctx.status = 200
} ctx.body = result
let result: Record<string, any> | undefined = await csvParser.parse( }
csvString,
schema export async function validateNewTableImport(ctx: BBContext) {
) const { rows, schema }: { rows: unknown; schema: unknown } = ctx.request.body
if (existingTable) {
result = csvParser.updateSchema({ schema: result, existingTable }) if (isRows(rows) && isSchema(schema)) {
} ctx.status = 200
ctx.body = { schema: result } ctx.body = validateSchema(rows, schema)
} else {
ctx.status = 422
}
}
export async function validateExistingTableImport(ctx: BBContext) {
const { rows, tableId }: { rows: unknown; tableId: unknown } =
ctx.request.body
let schema = null
if (tableId) {
const table = await sdk.tables.getTable(tableId)
schema = table.schema
} else {
ctx.status = 422
return
}
if (tableId && isRows(rows) && isSchema(schema)) {
ctx.status = 200
ctx.body = validateSchema(rows, schema)
} else {
ctx.status = 422
}
} }

View File

@ -35,7 +35,7 @@ function checkAutoColumns(table: Table, oldTable: Table) {
export async function save(ctx: any) { export async function save(ctx: any) {
const db = context.getAppDB() const db = context.getAppDB()
const { dataImport, ...rest } = ctx.request.body const { rows, ...rest } = ctx.request.body
let tableToSave = { let tableToSave = {
type: "table", type: "table",
_id: generateTableID(), _id: generateTableID(),
@ -61,7 +61,7 @@ export async function save(ctx: any) {
const tableSaveFunctions = new TableSaveFunctions({ const tableSaveFunctions = new TableSaveFunctions({
user: ctx.user, user: ctx.user,
oldTable, oldTable,
dataImport, importRows: rows,
}) })
tableToSave = await tableSaveFunctions.before(tableToSave) tableToSave = await tableSaveFunctions.before(tableToSave)
@ -185,7 +185,7 @@ export async function destroy(ctx: any) {
export async function bulkImport(ctx: any) { export async function bulkImport(ctx: any) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { dataImport } = ctx.request.body const { rows } = ctx.request.body
await handleDataImport(ctx.user, table, dataImport) await handleDataImport(ctx.user, table, rows)
return table return table
} }

View File

@ -1,4 +1,4 @@
import { transform } from "../../../utilities/csvParser" import { parse, isSchema, isRows } from "../../../utilities/schema"
import { getRowParams, generateRowID, InternalTables } from "../../../db/utils" import { getRowParams, generateRowID, InternalTables } from "../../../db/utils"
import { isEqual } from "lodash" import { isEqual } from "lodash"
import { AutoFieldSubTypes, FieldTypes } from "../../../constants" import { AutoFieldSubTypes, FieldTypes } from "../../../constants"
@ -128,24 +128,23 @@ export function importToRows(data: any, table: any, user: any = {}) {
return finalData return finalData
} }
export async function handleDataImport(user: any, table: any, dataImport: any) { export async function handleDataImport(user: any, table: any, rows: any) {
if (!dataImport || !dataImport.csvString) { const schema: unknown = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) {
return table return table
} }
const db = context.getAppDB() const db = context.getAppDB()
// Populate the table with rows imported from CSV in a bulk update const data = parse(rows, schema)
const data = await transform({
...dataImport,
existingTable: table,
})
let finalData: any = importToRows(data, table, user) let finalData: any = importToRows(data, table, user)
await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), { await quotas.addRows(finalData.length, () => db.bulkDocs(finalData), {
tableId: table._id, tableId: table._id,
}) })
await events.rows.imported(table, "csv", finalData.length)
await events.rows.imported(table, finalData.length)
return table return table
} }
@ -210,14 +209,14 @@ class TableSaveFunctions {
db: any db: any
user: any user: any
oldTable: any oldTable: any
dataImport: any importRows: any
rows: any rows: any
constructor({ user, oldTable, dataImport }: any) { constructor({ user, oldTable, importRows }: any) {
this.db = context.getAppDB() this.db = context.getAppDB()
this.user = user this.user = user
this.oldTable = oldTable this.oldTable = oldTable
this.dataImport = dataImport this.importRows = importRows
// any rows that need updated // any rows that need updated
this.rows = [] this.rows = []
} }
@ -241,7 +240,7 @@ class TableSaveFunctions {
// after saving // after saving
async after(table: any) { async after(table: any) {
table = await handleSearchIndexes(table) table = await handleSearchIndexes(table)
table = await handleDataImport(this.user, table, this.dataImport) table = await handleDataImport(this.user, table, this.importRows)
return table return table
} }

View File

@ -1,4 +1,4 @@
import { Row } from "@budibase/types" import { Row, TableSchema } from "@budibase/types"
export function csv(headers: string[], rows: Row[]) { export function csv(headers: string[], rows: Row[]) {
let csv = headers.map(key => `"${key}"`).join(",") let csv = headers.map(key => `"${key}"`).join(",")
@ -18,11 +18,26 @@ export function csv(headers: string[], rows: Row[]) {
return csv return csv
} }
export function json(headers: string[], rows: Row[]) { export function json(rows: Row[]) {
return JSON.stringify(rows, undefined, 2) return JSON.stringify(rows, undefined, 2)
} }
export const ExportFormats = { export function jsonWithSchema(schema: TableSchema, rows: Row[]) {
CSV: "csv", const newSchema: TableSchema = {}
JSON: "json", Object.values(schema).forEach(column => {
if (!column.autocolumn) {
newSchema[column.name] = column
}
})
return JSON.stringify({ schema: newSchema, rows }, undefined, 2)
}
export enum Format {
CSV = "csv",
JSON = "json",
JSON_WITH_SCHEMA = "jsonWithSchema",
}
export function isFormat(format: any): format is Format {
return Object.values(Format).includes(format as Format)
} }

View File

@ -1,6 +1,6 @@
import viewTemplate from "./viewBuilder" import viewTemplate from "./viewBuilder"
import { apiFileReturn } from "../../../utilities/fileSystem" import { apiFileReturn } from "../../../utilities/fileSystem"
import * as exporters from "./exporters" import { csv, json, jsonWithSchema, Format, isFormat } from "./exporters"
import { deleteView, getView, getViews, saveView } from "./utils" import { deleteView, getView, getViews, saveView } from "./utils"
import { fetchView } from "../row" import { fetchView } from "../row"
import { FieldTypes } from "../../../constants" import { FieldTypes } from "../../../constants"
@ -127,9 +127,13 @@ export async function exportView(ctx: BBContext) {
const viewName = decodeURIComponent(ctx.query.view as string) const viewName = decodeURIComponent(ctx.query.view as string)
const view = await getView(viewName) const view = await getView(viewName)
const format = ctx.query.format as string const format = ctx.query.format as unknown
if (!format || !Object.values(exporters.ExportFormats).includes(format)) {
ctx.throw(400, "Format must be specified, either csv or json") if (!isFormat(format)) {
ctx.throw(
400,
"Format must be specified, either csv, json or jsonWithSchema"
)
} }
if (view) { if (view) {
@ -171,7 +175,7 @@ export async function exportView(ctx: BBContext) {
}) })
// make sure no "undefined" entries appear in the CSV // make sure no "undefined" entries appear in the CSV
if (format === exporters.ExportFormats.CSV) { if (format === Format.CSV) {
const schemaKeys = Object.keys(schema) const schemaKeys = Object.keys(schema)
for (let key of schemaKeys) { for (let key of schemaKeys) {
for (let row of rows) { for (let row of rows) {
@ -182,13 +186,18 @@ export async function exportView(ctx: BBContext) {
} }
} }
// Export part if (format === Format.CSV) {
let headers = Object.keys(schema) ctx.attachment(`${viewName}.csv`)
const exporter = format === "csv" ? exporters.csv : exporters.json ctx.body = apiFileReturn(csv(Object.keys(schema), rows))
const filename = `${viewName}.${format}` } else if (format === Format.JSON) {
// send down the file ctx.attachment(`${viewName}.json`)
ctx.attachment(filename) ctx.body = apiFileReturn(json(rows))
ctx.body = apiFileReturn(exporter(headers, rows)) } else if (format === Format.JSON_WITH_SCHEMA) {
ctx.attachment(`${viewName}.json`)
ctx.body = apiFileReturn(jsonWithSchema(schema, rows))
} else {
throw "Format not recognised"
}
if (viewName.startsWith(DocumentType.TABLE)) { if (viewName.startsWith(DocumentType.TABLE)) {
await events.table.exported(table, format as TableExportFormat) await events.table.exported(table, format as TableExportFormat)

View File

@ -67,10 +67,7 @@ router
* structure, and the "updated", new column name should also be supplied. The schema should also be updated, this field * structure, and the "updated", new column name should also be supplied. The schema should also be updated, this field
* lets the server know that a field hasn't just been deleted, that the data has moved to a new name, this will fix * lets the server know that a field hasn't just been deleted, that the data has moved to a new name, this will fix
* the rows in the table. This functionality is only available for internal tables. * the rows in the table. This functionality is only available for internal tables.
* @apiParam (Body) {object} [dataImport] When creating an internal table it can be built from a CSV, by using the * @apiParam (Body) {object[]} [rows] When creating a table using a compatible data source, an array of objects to be imported into the new table can be provided.
* CSV validation endpoint. Send the CSV data to the validation endpoint, then put the results of that call
* into this property, along with the CSV and a table/rows will be built from it. This is not supported when updating
* or for external tables.
* *
* @apiParamExample {json} Example: * @apiParamExample {json} Example:
* { * {
@ -99,15 +96,7 @@ router
* "old": "columnName", * "old": "columnName",
* "updated": "newColumnName", * "updated": "newColumnName",
* }, * },
* "dataImport": { * "rows": []
* "csvString": "column\nvalue",
* "primaryDisplay": "column",
* "schema": {
* "column": {
* "type": "string"
* }
* }
* }
* } * }
* *
* @apiSuccess {object} table The response body will contain the table structure after being cleaned up and * @apiSuccess {object} table The response body will contain the table structure after being cleaned up and
@ -121,30 +110,20 @@ router
tableValidator(), tableValidator(),
tableController.save tableController.save
) )
/**
* @api {post} /api/tables/csv/validate Validate a CSV for a table
* @apiName Validate a CSV for a table
* @apiGroup tables
* @apiPermission builder
* @apiDescription When creating a new table, or importing a CSV to an existing table the CSV must be validated and
* converted into a Budibase schema; this endpoint does this.
*
* @apiParam (Body) {string} csvString The CSV which is to be validated as a string.
* @apiParam (Body) {object} [schema] When a CSV has been validated it is possible to re-validate after changing the
* type of a field, by default everything will be strings as there is no way to infer types. The returned schema can
* be updated and then returned to the endpoint to re-validate and check if the type will work for the CSV, e.g.
* using a number instead of strings.
* @apiParam (Body) {string} [tableId] If importing data to an existing table this will pull the current table and
* remove any fields from the CSV schema which do not exist on the table/don't match the type of the table. When
* importing a CSV to an existing table only fields that are present on the table can be imported.
*
* @apiSuccess {object} schema The response body will contain a "schema" object that represents the schema found for
* the CSV - this will be in the same format used for table schema.s
*/
.post( .post(
"/api/tables/csv/validate", "/api/convert/csvToJson",
authorized(BUILDER), authorized(BUILDER),
tableController.validateCSVSchema tableController.csvToJson
)
.post(
"/api/tables/validateNewTableImport",
authorized(BUILDER),
tableController.validateNewTableImport
)
.post(
"/api/tables/validateExistingTableImport",
authorized(BUILDER),
tableController.validateExistingTableImport
) )
/** /**
* @api {post} /api/tables/:tableId/:revId Delete a table * @api {post} /api/tables/:tableId/:revId Delete a table
@ -177,9 +156,7 @@ router
* *
* @apiParam {string} tableId The ID of the table which the data should be imported to. * @apiParam {string} tableId The ID of the table which the data should be imported to.
* *
* @apiParam (Body) {object} dataImport This is the same as the structure used when creating an internal table with * @apiParam (Body) {object[]} rows An array of objects representing the rows to be imported, key-value pairs not matching the table schema will be ignored.
* a CSV, it will have the "schema" returned from the CSV validation endpoint and the "csvString" which is to be
* turned into rows.
* *
* @apiSuccess {string} message A message stating that the data was imported successfully. * @apiSuccess {string} message A message stating that the data was imported successfully.
*/ */

View File

@ -42,7 +42,7 @@ describe("run misc tests", () => {
}) })
describe("test table utilities", () => { describe("test table utilities", () => {
it("should be able to import a CSV", async () => { it("should be able to import data", async () => {
return config.doInContext(null, async () => { return config.doInContext(null, async () => {
const table = await config.createTable({ const table = await config.createTable({
name: "table", name: "table",
@ -75,17 +75,11 @@ describe("run misc tests", () => {
}, },
}, },
}) })
const dataImport = {
csvString: "a,b,c,d\n1,2,3,4",
schema: {},
}
for (let col of ["a", "b", "c", "d"]) {
dataImport.schema[col] = { type: "string" }
}
await tableUtils.handleDataImport( await tableUtils.handleDataImport(
{ userId: "test" }, { userId: "test" },
table, table,
dataImport [{ a: '1', b: '2', c: '3', d: '4'}]
) )
const rows = await config.getRows() const rows = await config.getRows()
expect(rows[0].a).toEqual("1") expect(rows[0].a).toEqual("1")

View File

@ -43,21 +43,18 @@ describe("/tables", () => {
expect(events.table.created).toBeCalledWith(res.body) expect(events.table.created).toBeCalledWith(res.body)
}) })
it("creates a table via data import CSV", async () => { it("creates a table via data import", async () => {
const table = basicTable() const table = basicTable()
table.dataImport = { table.rows = [{ name: 'test-name', description: 'test-desc' }]
csvString: "\"name\",\"description\"\n\"test-name\",\"test-desc\"",
}
table.dataImport.schema = table.schema
const res = await createTable(table) const res = await createTable(table)
expect(events.table.created).toBeCalledTimes(1) expect(events.table.created).toBeCalledTimes(1)
expect(events.table.created).toBeCalledWith(res.body) expect(events.table.created).toBeCalledWith(res.body)
expect(events.table.imported).toBeCalledTimes(1) expect(events.table.imported).toBeCalledTimes(1)
expect(events.table.imported).toBeCalledWith(res.body, "csv") expect(events.table.imported).toBeCalledWith(res.body)
expect(events.rows.imported).toBeCalledTimes(1) expect(events.rows.imported).toBeCalledTimes(1)
expect(events.rows.imported).toBeCalledWith(res.body, "csv", 1) expect(events.rows.imported).toBeCalledWith(res.body, 1)
}) })
it("should apply authorization to endpoint", async () => { it("should apply authorization to endpoint", async () => {
@ -155,11 +152,10 @@ describe("/tables", () => {
it("imports rows successfully", async () => { it("imports rows successfully", async () => {
const table = await config.createTable() const table = await config.createTable()
const importRequest = { const importRequest = {
dataImport: { schema: table.schema,
csvString: "\"name\",\"description\"\n\"test-name\",\"test-desc\"", rows: [{ name: 'test-name', description: 'test-desc' }]
schema: table.schema
}
} }
jest.clearAllMocks() jest.clearAllMocks()
await request await request
@ -171,7 +167,7 @@ describe("/tables", () => {
expect(events.table.created).not.toHaveBeenCalled() expect(events.table.created).not.toHaveBeenCalled()
expect(events.rows.imported).toBeCalledTimes(1) expect(events.rows.imported).toBeCalledTimes(1)
expect(events.rows.imported).toBeCalledWith(table, "csv", 1) expect(events.rows.imported).toBeCalledWith(table, 1)
}) })
}) })
@ -206,24 +202,6 @@ describe("/tables", () => {
}) })
}) })
describe("validate csv", () => {
it("should be able to validate a CSV layout", async () => {
const res = await request
.post(`/api/tables/csv/validate`)
.send({
csvString: "a,b,c,d\n1,2,3,4"
})
.set(config.defaultHeaders())
.expect('Content-Type', /json/)
.expect(200)
expect(res.body.schema).toBeDefined()
expect(res.body.schema.a).toEqual({
type: "string",
success: true,
})
})
})
describe("indexing", () => { describe("indexing", () => {
it("should be able to create a table with indexes", async () => { it("should be able to create a table with indexes", async () => {
await context.doInAppContext(appId, async () => { await context.doInAppContext(appId, async () => {

View File

@ -18,7 +18,7 @@ export function tableValidator() {
schema: Joi.object().required(), schema: Joi.object().required(),
name: Joi.string().required(), name: Joi.string().required(),
views: Joi.object(), views: Joi.object(),
dataImport: Joi.object(), rows: Joi.array(),
}).unknown(true)) }).unknown(true))
} }

View File

@ -1,161 +0,0 @@
import { FieldSchema, Table } from "@budibase/types"
import csv from "csvtojson"
import { FieldTypes } from "../constants"
type CsvParseOpts = {
schema?: { [key: string]: any }
existingTable: Table
csvString?: string
}
const VALIDATORS: any = {
[FieldTypes.STRING]: () => true,
[FieldTypes.OPTIONS]: () => true,
[FieldTypes.BARCODEQR]: () => true,
[FieldTypes.NUMBER]: (attribute?: string) => {
// allow not to be present
if (!attribute) {
return true
}
return !isNaN(Number(attribute))
},
[FieldTypes.DATETIME]: (attribute?: string) => {
// allow not to be present
if (!attribute) {
return true
}
return !isNaN(new Date(attribute).getTime())
},
}
const PARSERS: any = {
[FieldTypes.NUMBER]: (attribute?: string) => {
if (!attribute) {
return attribute
}
return Number(attribute)
},
[FieldTypes.DATETIME]: (attribute?: string) => {
if (!attribute) {
return attribute
}
return new Date(attribute).toISOString()
},
}
export function parse(csvString: string, parsers: any): Record<string, any> {
const result = csv().fromString(csvString)
const schema: Record<string, any> = {}
return new Promise((resolve, reject) => {
result.on("header", headers => {
for (let header of headers) {
schema[header] = {
type: parsers[header] ? parsers[header].type : "string",
success: true,
}
}
})
result.subscribe(row => {
// For each CSV row parse all the columns that need parsed
for (let key of Object.keys(parsers)) {
if (!schema[key] || schema[key].success) {
// get the validator for the column type
const validator = VALIDATORS[parsers[key].type]
try {
// allow null/undefined values
schema[key].success = !row[key] || validator(row[key])
} catch (err) {
schema[key].success = false
}
}
}
})
result.on("done", error => {
if (error) {
console.error(error)
reject(error)
}
resolve(schema)
})
})
}
export function updateSchema({
schema,
existingTable,
}: {
schema?: Record<string, any>
existingTable?: Table
}) {
if (!schema) {
return schema
}
const finalSchema: Record<string, FieldSchema> = {}
const schemaKeyMap: Record<string, any> = {}
Object.keys(schema).forEach(key => (schemaKeyMap[key.toLowerCase()] = key))
for (let [key, field] of Object.entries(existingTable?.schema || {})) {
const lcKey = key.toLowerCase()
const foundKey: string = schemaKeyMap[lcKey]
if (foundKey) {
finalSchema[key] = schema[foundKey]
finalSchema[key].type = field.type
}
}
return finalSchema
}
export async function transform({
schema,
csvString,
existingTable,
}: CsvParseOpts) {
if (!schema || !csvString) {
throw new Error("Unable to transform CSV without schema")
}
const colParser: any = {}
// make sure the table has all the columns required for import
if (existingTable) {
schema = updateSchema({ schema, existingTable })
}
for (let [key, field] of Object.entries(schema || {})) {
// don't import data to auto columns
if (!field.autocolumn) {
colParser[key] = PARSERS[field.type] || field.type
}
}
try {
const data = await csv({ colParser }).fromString(csvString)
const schemaKeyMap: any = {}
Object.keys(schema || {}).forEach(
key => (schemaKeyMap[key.toLowerCase()] = key)
)
for (let element of data) {
if (!data) {
continue
}
for (let key of Object.keys(element)) {
const mappedKey = schemaKeyMap[key.toLowerCase()]
// isn't a column in the table, remove it
if (mappedKey == null) {
delete element[key]
}
// casing is different, fix it in row
else if (key !== mappedKey) {
element[mappedKey] = element[key]
delete element[key]
}
}
}
return data
} catch (err) {
console.error(`Error transforming CSV to JSON for data import`, err)
throw err
}
}

View File

@ -0,0 +1,141 @@
import { FieldTypes } from "../constants"
interface SchemaColumn {
readonly name: string
readonly type: FieldTypes
readonly autocolumn?: boolean
}
interface Schema {
readonly [index: string]: SchemaColumn
}
interface Row {
[index: string]: any
}
type Rows = Array<Row>
interface SchemaValidation {
[index: string]: boolean
}
interface ValidationResults {
schemaValidation: SchemaValidation
allValid: boolean
invalidColumns: Array<string>
}
const PARSERS: any = {
[FieldTypes.NUMBER]: (attribute?: string) => {
if (!attribute) {
return attribute
}
return Number(attribute)
},
[FieldTypes.DATETIME]: (attribute?: string) => {
if (!attribute) {
return attribute
}
return new Date(attribute).toISOString()
},
}
export function isSchema(schema: any): schema is Schema {
return (
typeof schema === "object" &&
Object.values(schema).every(rawColumn => {
const column = rawColumn as SchemaColumn
return (
column !== null &&
typeof column === "object" &&
typeof column.type === "string" &&
Object.values(FieldTypes).includes(column.type as FieldTypes)
)
})
)
}
export function isRows(rows: any): rows is Rows {
return Array.isArray(rows) && rows.every(row => typeof row === "object")
}
export function validate(rows: Rows, schema: Schema): ValidationResults {
const results: ValidationResults = {
schemaValidation: {},
allValid: false,
invalidColumns: [],
}
rows.forEach(row => {
Object.entries(row).forEach(([columnName, columnData]) => {
const columnType = schema[columnName]?.type
const isAutoColumn = schema[columnName]?.autocolumn
// If the columnType is not a string, then it's not present in the schema, and should be added to the invalid columns array
if (typeof columnType !== "string") {
results.invalidColumns.push(columnName)
} else if (
// If there's no data for this field don't bother with further checks
// If the field is already marked as invalid there's no need for further checks
results.schemaValidation[columnName] === false ||
columnData == null ||
isAutoColumn
) {
return
} else if (
columnType === FieldTypes.NUMBER &&
isNaN(Number(columnData))
) {
// If provided must be a valid number
results.schemaValidation[columnName] = false
} else if (
// If provided must be a valid date
columnType === FieldTypes.DATETIME &&
isNaN(new Date(columnData).getTime())
) {
results.schemaValidation[columnName] = false
} else {
results.schemaValidation[columnName] = true
}
})
})
results.allValid =
Object.values(results.schemaValidation).length > 0 &&
Object.values(results.schemaValidation).every(column => column)
// Select unique values
results.invalidColumns = [...new Set(results.invalidColumns)]
return results
}
export function parse(rows: Rows, schema: Schema): Rows {
return rows.map(row => {
const parsedRow: Row = {}
Object.entries(row).forEach(([columnName, columnData]) => {
if (!(columnName in schema) || schema[columnName]?.autocolumn) {
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
return
}
const columnType = schema[columnName].type
if (columnType === FieldTypes.NUMBER) {
// If provided must be a valid number
parsedRow[columnName] = columnData ? Number(columnData) : columnData
} else if (columnType === FieldTypes.DATETIME) {
// If provided must be a valid date
parsedRow[columnName] = columnData
? new Date(columnData).toISOString()
: columnData
} else {
parsedRow[columnName] = columnData
}
})
return parsedRow
})
}

View File

@ -1,15 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`CSV Parser transformation transforms a CSV file into JSON 1`] = `
Array [
Object {
"Age": 4324,
},
Object {
"Age": 34,
},
Object {
"Age": 23423,
},
]
`;

View File

@ -1,112 +0,0 @@
const { readFileSync } = require("../fileSystem")
const csvParser = require("../csvParser")
const CSV_PATH = __dirname + "/test.csv"
const SCHEMAS = {
VALID: {
Age: {
type: "number",
},
},
INVALID: {
Address: {
type: "number",
},
Age: {
type: "number",
},
},
IGNORE: {
Address: {
type: "omit",
},
Age: {
type: "omit",
},
Name: {
type: "string",
},
},
BROKEN: {
Address: {
type: "datetime",
},
},
}
describe("CSV Parser", () => {
const csvString = readFileSync(CSV_PATH, "utf8")
describe("parsing", () => {
it("returns status and types for a valid CSV transformation", async () => {
expect(await csvParser.parse(csvString, SCHEMAS.VALID)).toEqual({
Address: {
success: true,
type: "string",
},
Age: {
success: true,
type: "number",
},
Name: {
success: true,
type: "string",
},
})
})
it("returns status and types for an invalid CSV transformation", async () => {
expect(await csvParser.parse(csvString, SCHEMAS.INVALID)).toEqual({
Address: {
success: false,
type: "number",
},
Age: {
success: true,
type: "number",
},
Name: {
success: true,
type: "string",
},
})
})
})
describe("transformation", () => {
it("transforms a CSV file into JSON", async () => {
expect(
await csvParser.transform({
schema: SCHEMAS.VALID,
csvString,
})
).toMatchSnapshot()
})
it("transforms a CSV file into JSON ignoring certain fields", async () => {
expect(
await csvParser.transform({
schema: SCHEMAS.IGNORE,
csvString,
})
).toEqual([
{
Name: "Bertå",
},
{
Name: "Ernie",
},
{
Name: "Big Bird",
},
])
})
it("throws an error on invalid schema", async () => {
await expect(
csvParser.transform({ schema: SCHEMAS.BROKEN, csvString })
).rejects.toThrow()
})
})
})

View File

@ -69,7 +69,7 @@ export interface Table extends Document {
constrained?: string[] constrained?: string[]
sql?: boolean sql?: boolean
indexes?: { [key: string]: any } indexes?: { [key: string]: any }
dataImport?: { [key: string]: any } rows?: { [key: string]: any }
} }
export interface TableRequest extends Table { export interface TableRequest extends Table {

View File

@ -185,6 +185,4 @@ export interface BaseEvent {
hosting?: Hosting hosting?: Hosting
} }
export type RowImportFormat = "csv"
export type TableExportFormat = "json" | "csv" export type TableExportFormat = "json" | "csv"
export type TableImportFormat = "csv"

View File

@ -1,8 +1,7 @@
import { BaseEvent, RowImportFormat } from "./event" import { BaseEvent } from "./event"
export interface RowsImportedEvent extends BaseEvent { export interface RowsImportedEvent extends BaseEvent {
tableId: string tableId: string
format: RowImportFormat
count: number count: number
} }

View File

@ -1,4 +1,4 @@
import { BaseEvent, TableExportFormat, TableImportFormat } from "./event" import { BaseEvent, TableExportFormat } from "./event"
export interface TableCreatedEvent extends BaseEvent { export interface TableCreatedEvent extends BaseEvent {
tableId: string tableId: string
@ -19,5 +19,4 @@ export interface TableExportedEvent extends BaseEvent {
export interface TableImportedEvent extends BaseEvent { export interface TableImportedEvent extends BaseEvent {
tableId: string tableId: string
format: TableImportFormat
} }

View File

@ -6,10 +6,6 @@ export const generateTable = (): Table => {
schema: {}, schema: {},
sourceId: "bb_internal", sourceId: "bb_internal",
type: "internal", type: "internal",
dataImport: {
valid: true,
schema: {},
},
} }
} }