commit
2bfb72da2b
|
@ -86,10 +86,12 @@ const contextToBindables = (tables, walkResult) => context => {
|
|||
}
|
||||
|
||||
const newBindable = ([key, fieldSchema]) => {
|
||||
// Replace link bindings with a new property representing the count
|
||||
// Replace certain bindings with a new property to help display components
|
||||
let runtimeBoundKey = key
|
||||
if (fieldSchema.type === "link") {
|
||||
runtimeBoundKey = `${key}_count`
|
||||
} else if (fieldSchema.type === "attachment") {
|
||||
runtimeBoundKey = `${key}_first`
|
||||
}
|
||||
return {
|
||||
type: "context",
|
||||
|
|
|
@ -233,7 +233,7 @@ const createLink = store => async (url, title) => {
|
|||
|
||||
// Save page and regenerate all CSS because otherwise weird things happen
|
||||
nav._children = [...nav._children, newLink]
|
||||
setCurrentPage("main")
|
||||
state.currentPageName = "main"
|
||||
regenerateCssForScreen(state.pages.main)
|
||||
for (let screen of state.pages.main._screens) {
|
||||
regenerateCssForScreen(screen)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
<script>
|
||||
import { Heading, Body, Button, Select } from "@budibase/bbui"
|
||||
import { Heading, Body, Button, Select, Label } from "@budibase/bbui"
|
||||
import { notifier } from "builderStore/store/notifications"
|
||||
import { FIELDS } from "constants/backend"
|
||||
import api from "builderStore/api"
|
||||
|
@ -14,15 +14,17 @@
|
|||
schema: {},
|
||||
}
|
||||
|
||||
let parseResult
|
||||
let csvString
|
||||
let primaryDisplay
|
||||
let schema = {}
|
||||
let fields = []
|
||||
|
||||
$: schema = parseResult && parseResult.schema
|
||||
$: valid =
|
||||
!schema || Object.keys(schema).every(column => schema[column].success)
|
||||
$: valid = !schema || fields.every(column => schema[column].success)
|
||||
$: dataImport = {
|
||||
valid,
|
||||
schema: buildTableSchema(schema),
|
||||
path: files[0] && files[0].path,
|
||||
csvString,
|
||||
primaryDisplay,
|
||||
}
|
||||
|
||||
function buildTableSchema(schema) {
|
||||
|
@ -43,11 +45,20 @@
|
|||
|
||||
async function validateCSV() {
|
||||
const response = await api.post("/api/tables/csv/validate", {
|
||||
file: files[0],
|
||||
csvString,
|
||||
schema: schema || {},
|
||||
})
|
||||
|
||||
parseResult = await response.json()
|
||||
const parseResult = await response.json()
|
||||
schema = parseResult && parseResult.schema
|
||||
fields = Object.keys(schema || {}).filter(
|
||||
key => schema[key].type !== "omit"
|
||||
)
|
||||
|
||||
// Check primary display is valid
|
||||
if (!primaryDisplay || fields.indexOf(primaryDisplay) === -1) {
|
||||
primaryDisplay = fields[0]
|
||||
}
|
||||
|
||||
if (response.status !== 200) {
|
||||
notifier.danger("CSV Invalid, please try another CSV file")
|
||||
|
@ -57,13 +68,7 @@
|
|||
|
||||
async function handleFile(evt) {
|
||||
const fileArray = Array.from(evt.target.files)
|
||||
const filesToProcess = fileArray.map(({ name, path, size }) => ({
|
||||
name,
|
||||
path,
|
||||
size,
|
||||
}))
|
||||
|
||||
if (filesToProcess.some(file => file.size >= FILE_SIZE_LIMIT)) {
|
||||
if (fileArray.some(file => file.size >= FILE_SIZE_LIMIT)) {
|
||||
notifier.danger(
|
||||
`Files cannot exceed ${FILE_SIZE_LIMIT /
|
||||
BYTES_IN_MB}MB. Please try again with smaller files.`
|
||||
|
@ -71,9 +76,14 @@
|
|||
return
|
||||
}
|
||||
|
||||
files = filesToProcess
|
||||
|
||||
await validateCSV()
|
||||
// Read CSV as plain text to upload alongside schema
|
||||
let reader = new FileReader()
|
||||
reader.addEventListener("load", function(e) {
|
||||
csvString = e.target.result
|
||||
files = fileArray
|
||||
validateCSV()
|
||||
})
|
||||
reader.readAsBinaryString(fileArray[0])
|
||||
}
|
||||
|
||||
async function omitColumn(columnName) {
|
||||
|
@ -94,8 +104,8 @@
|
|||
</label>
|
||||
</div>
|
||||
<div class="schema-fields">
|
||||
{#if schema}
|
||||
{#each Object.keys(schema).filter(key => schema[key].type !== 'omit') as columnName}
|
||||
{#if fields.length}
|
||||
{#each fields as columnName}
|
||||
<div class="field">
|
||||
<span>{columnName}</span>
|
||||
<Select
|
||||
|
@ -117,6 +127,16 @@
|
|||
{/each}
|
||||
{/if}
|
||||
</div>
|
||||
{#if fields.length}
|
||||
<div class="display-column">
|
||||
<Label extraSmall grey>Display Column</Label>
|
||||
<Select thin secondary bind:value={primaryDisplay}>
|
||||
{#each fields as field}
|
||||
<option value={field}>{field}</option>
|
||||
{/each}
|
||||
</Select>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.dropzone {
|
||||
|
@ -188,4 +208,8 @@
|
|||
grid-gap: var(--spacing-m);
|
||||
font-size: var(--font-size-xs);
|
||||
}
|
||||
|
||||
.display-column {
|
||||
margin-top: var(--spacing-xl);
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -38,12 +38,19 @@
|
|||
}
|
||||
|
||||
async function saveTable() {
|
||||
// Create table
|
||||
const table = await backendUiStore.actions.tables.save({
|
||||
let newTable = {
|
||||
name,
|
||||
schema: dataImport.schema || {},
|
||||
dataImport,
|
||||
})
|
||||
}
|
||||
|
||||
// Only set primary display if defined
|
||||
if (dataImport.primaryDisplay && dataImport.primaryDisplay.length) {
|
||||
newTable.primaryDisplay = dataImport.primaryDisplay
|
||||
}
|
||||
|
||||
// Create table
|
||||
const table = await backendUiStore.actions.tables.save(newTable)
|
||||
notifier.success(`Table ${name} created successfully.`)
|
||||
analytics.captureEvent("Table Created", { name })
|
||||
|
||||
|
|
|
@ -135,6 +135,9 @@
|
|||
|
||||
.toprightnav {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.topleftnav {
|
||||
|
|
|
@ -109,7 +109,7 @@ exports.save = async function(ctx) {
|
|||
ctx.eventEmitter &&
|
||||
ctx.eventEmitter.emitTable(`table:save`, instanceId, tableToSave)
|
||||
|
||||
if (dataImport && dataImport.path) {
|
||||
if (dataImport && dataImport.csvString) {
|
||||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await csvParser.transform(dataImport)
|
||||
|
||||
|
@ -156,10 +156,7 @@ exports.destroy = async function(ctx) {
|
|||
}
|
||||
|
||||
exports.validateCSVSchema = async function(ctx) {
|
||||
const { file, schema = {} } = ctx.request.body
|
||||
const result = await csvParser.parse(file.path, schema)
|
||||
ctx.body = {
|
||||
schema: result,
|
||||
path: file.path,
|
||||
}
|
||||
const { csvString, schema = {} } = ctx.request.body
|
||||
const result = await csvParser.parse(csvString, schema)
|
||||
ctx.body = { schema: result }
|
||||
}
|
||||
|
|
|
@ -11,8 +11,8 @@ const PARSERS = {
|
|||
datetime: attribute => new Date(attribute).toISOString(),
|
||||
}
|
||||
|
||||
function parse(path, parsers) {
|
||||
const result = csv().fromFile(path)
|
||||
function parse(csvString, parsers) {
|
||||
const result = csv().fromString(csvString)
|
||||
|
||||
const schema = {}
|
||||
|
||||
|
@ -52,7 +52,7 @@ function parse(path, parsers) {
|
|||
})
|
||||
}
|
||||
|
||||
async function transform({ schema, path }) {
|
||||
async function transform({ schema, csvString }) {
|
||||
const colParser = {}
|
||||
|
||||
for (let key in schema) {
|
||||
|
@ -60,7 +60,7 @@ async function transform({ schema, path }) {
|
|||
}
|
||||
|
||||
try {
|
||||
const json = await csv({ colParser }).fromFile(path)
|
||||
const json = await csv({ colParser }).fromString(csvString)
|
||||
return json
|
||||
} catch (err) {
|
||||
console.error(`Error transforming CSV to JSON for data import`, err)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const csvParser = require("../csvParser");
|
||||
const fs = require("fs")
|
||||
const csvParser = require("../csvParser")
|
||||
|
||||
const CSV_PATH = __dirname + "/test.csv";
|
||||
const CSV_PATH = __dirname + "/test.csv"
|
||||
|
||||
const SCHEMAS = {
|
||||
VALID: {
|
||||
|
@ -27,16 +28,16 @@ const SCHEMAS = {
|
|||
BROKEN: {
|
||||
Address: {
|
||||
type: "datetime",
|
||||
}
|
||||
},
|
||||
};
|
||||
},
|
||||
}
|
||||
|
||||
describe("CSV Parser", () => {
|
||||
const csvString = fs.readFileSync(CSV_PATH, "utf8")
|
||||
|
||||
describe("parsing", () => {
|
||||
it("returns status and types for a valid CSV transformation", async () => {
|
||||
expect(
|
||||
await csvParser.parse(CSV_PATH, SCHEMAS.VALID)
|
||||
).toEqual({
|
||||
expect(await csvParser.parse(csvString, SCHEMAS.VALID)).toEqual({
|
||||
Address: {
|
||||
success: true,
|
||||
type: "string",
|
||||
|
@ -49,13 +50,11 @@ describe("CSV Parser", () => {
|
|||
success: true,
|
||||
type: "string",
|
||||
},
|
||||
});
|
||||
});
|
||||
})
|
||||
})
|
||||
|
||||
it("returns status and types for an invalid CSV transformation", async () => {
|
||||
expect(
|
||||
await csvParser.parse(CSV_PATH, SCHEMAS.INVALID)
|
||||
).toEqual({
|
||||
expect(await csvParser.parse(csvString, SCHEMAS.INVALID)).toEqual({
|
||||
Address: {
|
||||
success: false,
|
||||
type: "number",
|
||||
|
@ -68,41 +67,43 @@ describe("CSV Parser", () => {
|
|||
success: true,
|
||||
type: "string",
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("transformation", () => {
|
||||
it("transforms a CSV file into JSON", async () => {
|
||||
expect(
|
||||
await csvParser.transform({
|
||||
schema: SCHEMAS.VALID,
|
||||
path: CSV_PATH,
|
||||
csvString,
|
||||
})
|
||||
).toMatchSnapshot()
|
||||
})
|
||||
).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it("transforms a CSV file into JSON ignoring certain fields", async () => {
|
||||
expect(
|
||||
await csvParser.transform({
|
||||
schema: SCHEMAS.IGNORE,
|
||||
path: CSV_PATH,
|
||||
csvString,
|
||||
})
|
||||
).toEqual([
|
||||
{
|
||||
Name: "Bert"
|
||||
Name: "Bert",
|
||||
},
|
||||
{
|
||||
Name: "Ernie"
|
||||
Name: "Ernie",
|
||||
},
|
||||
{
|
||||
Name: "Big Bird"
|
||||
}
|
||||
]);
|
||||
});
|
||||
Name: "Big Bird",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("throws an error on invalid schema", async () => {
|
||||
await expect(csvParser.transform({ schema: SCHEMAS.BROKEN, path: CSV_PATH })).rejects.toThrow()
|
||||
});
|
||||
});
|
||||
});
|
||||
await expect(
|
||||
csvParser.transform({ schema: SCHEMAS.BROKEN, csvString })
|
||||
).rejects.toThrow()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -51,8 +51,15 @@
|
|||
// Fetch table schema so we can check for linked rows
|
||||
const tableObj = await fetchTable(row.tableId)
|
||||
for (let key of Object.keys(tableObj.schema)) {
|
||||
if (tableObj.schema[key].type === "link") {
|
||||
const type = tableObj.schema[key].type
|
||||
if (type === "link") {
|
||||
row[`${key}_count`] = Array.isArray(row[key]) ? row[key].length : 0
|
||||
} else if (type === "attachment") {
|
||||
let url = null
|
||||
if (Array.isArray(row[key]) && row[key][0] != null) {
|
||||
url = row[key][0].url
|
||||
}
|
||||
row[`${key}_first`] = url
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,11 +6,11 @@ export default async function fetchData(datasource, store) {
|
|||
if (name) {
|
||||
let rows = []
|
||||
if (type === "table") {
|
||||
rows = fetchTableData()
|
||||
rows = await fetchTableData()
|
||||
} else if (type === "view") {
|
||||
rows = fetchViewData()
|
||||
rows = await fetchViewData()
|
||||
} else if (type === "link") {
|
||||
rows = fetchLinkedRowsData()
|
||||
rows = await fetchLinkedRowsData()
|
||||
}
|
||||
|
||||
// Fetch table schema so we can check for linked rows
|
||||
|
@ -19,8 +19,15 @@ export default async function fetchData(datasource, store) {
|
|||
const keys = Object.keys(table.schema)
|
||||
rows.forEach(row => {
|
||||
for (let key of keys) {
|
||||
if (table.schema[key].type === "link") {
|
||||
const type = table.schema[key].type
|
||||
if (type === "link") {
|
||||
row[`${key}_count`] = Array.isArray(row[key]) ? row[key].length : 0
|
||||
} else if (type === "attachment") {
|
||||
let url = null
|
||||
if (Array.isArray(row[key]) && row[key][0] != null) {
|
||||
url = row[key][0].url
|
||||
}
|
||||
row[`${key}_first`] = url
|
||||
}
|
||||
}
|
||||
})
|
||||
|
|
Loading…
Reference in New Issue