Merge pull request #10482 from Budibase/fix/inclusion-row-parsing

Fix for inclusion parsing for arrays and options.
This commit is contained in:
deanhannigan 2023-05-12 15:18:49 +01:00 committed by GitHub
commit 411b03162b
10 changed files with 182 additions and 41 deletions

View File

@ -22,6 +22,7 @@
export let rowCount
export let disableSorting = false
export let customPlaceholder = false
export let allowEditing = true
const dispatch = createEventDispatcher()
@ -109,6 +110,7 @@
{rowCount}
{disableSorting}
{customPlaceholder}
allowEditRows={allowEditing}
showAutoColumns={!hideAutocolumns}
on:clickrelationship={e => selectRelationship(e.detail)}
on:sort

View File

@ -58,6 +58,7 @@
{loading}
{type}
rowCount={10}
allowEditing={false}
bind:hideAutocolumns
>
<ViewFilterButton {view} />

View File

@ -97,6 +97,7 @@ export async function bulkImport(ctx: UserCtx) {
// right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to
// think about events for bulk items
ctx.status = 200
ctx.body = { message: `Bulk rows created.` }
}

View File

@ -184,8 +184,13 @@ export async function destroy(ctx: any) {
}
export async function bulkImport(ctx: any) {
const db = context.getAppDB()
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows } = ctx.request.body
await handleDataImport(ctx.user, table, rows)
// Ensure auto id and other table updates are persisted
await db.put(table)
return table
}

View File

@ -129,17 +129,17 @@ export function importToRows(
// the real schema of the table passed in, not the clone used for
// incrementing auto IDs
for (const [fieldName, schema] of Object.entries(originalTable.schema)) {
const rowVal = Array.isArray(row[fieldName])
? row[fieldName]
: [row[fieldName]]
if (
(schema.type === FieldTypes.OPTIONS ||
schema.type === FieldTypes.ARRAY) &&
row[fieldName] &&
(!schema.constraints!.inclusion ||
schema.constraints!.inclusion.indexOf(row[fieldName]) === -1)
row[fieldName]
) {
schema.constraints!.inclusion = [
...schema.constraints!.inclusion!,
row[fieldName],
]
let merged = [...schema.constraints!.inclusion!, ...rowVal]
let superSet = new Set(merged)
schema.constraints!.inclusion = Array.from(superSet)
schema.constraints!.inclusion.sort()
}
}

View File

@ -73,18 +73,97 @@ describe("run misc tests", () => {
type: "string",
},
},
e: {
name: "Auto ID",
type: "number",
subtype: "autoID",
icon: "ri-magic-line",
autocolumn: true,
constraints: {
type: "number",
presence: false,
numericality: {
greaterThanOrEqualTo: "",
lessThanOrEqualTo: "",
},
},
},
f: {
type: "array",
constraints: {
type: "array",
presence: {
"allowEmpty": true
},
inclusion: [
"One",
"Two",
"Three",
]
},
name: "Sample Tags",
sortable: false
},
g: {
type: "options",
constraints: {
type: "string",
presence: false,
inclusion: [
"Alpha",
"Beta",
"Gamma"
]
},
name: "Sample Opts"
}
},
})
// Shift specific row tests to the row spec
await tableUtils.handleDataImport(
{ userId: "test" },
table,
[{ a: '1', b: '2', c: '3', d: '4'}]
[
{ a: '1', b: '2', c: '3', d: '4', f: "['One']", g: "Alpha" },
{ a: '5', b: '6', c: '7', d: '8', f: "[]", g: undefined},
{ a: '9', b: '10', c: '11', d: '12', f: "['Two','Four']", g: ""},
{ a: '13', b: '14', c: '15', d: '16', g: "Omega"}
]
)
// 4 rows imported, the auto ID starts at 1
// We expect the handleDataImport function to update the lastID
expect(table.schema.e.lastID).toEqual(4);
// Array/Multi - should have added a new value to the inclusion.
expect(table.schema.f.constraints.inclusion).toEqual(['Four','One','Three','Two']);
// Options - should have a new value in the inclusion
expect(table.schema.g.constraints.inclusion).toEqual(['Alpha','Beta','Gamma','Omega']);
const rows = await config.getRows()
expect(rows[0].a).toEqual("1")
expect(rows[0].b).toEqual("2")
expect(rows[0].c).toEqual("3")
expect(rows.length).toEqual(4);
const rowOne = rows.find(row => row.e === 1)
expect(rowOne.a).toEqual("1")
expect(rowOne.f).toEqual(['One'])
expect(rowOne.g).toEqual('Alpha')
const rowTwo = rows.find(row => row.e === 2)
expect(rowTwo.a).toEqual("5")
expect(rowTwo.f).toEqual([])
expect(rowTwo.g).toEqual(undefined)
const rowThree = rows.find(row => row.e === 3)
expect(rowThree.a).toEqual("9")
expect(rowThree.f).toEqual(['Two','Four'])
expect(rowThree.g).toEqual(null)
const rowFour = rows.find(row => row.e === 4)
expect(rowFour.a).toEqual("13")
expect(rowFour.f).toEqual(undefined)
expect(rowFour.g).toEqual('Omega')
})
})
})

View File

@ -34,9 +34,9 @@ describe("/rows", () => {
row = basicRow(table._id)
})
const loadRow = async (id, status = 200) =>
const loadRow = async (id, tbl_Id, status = 200) =>
await request
.get(`/api/${table._id}/rows/${id}`)
.get(`/api/${tbl_Id}/rows/${id}`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(status)
@ -182,8 +182,32 @@ describe("/rows", () => {
type: "string",
presence: false,
datetime: { earliest: "", latest: "" },
},
}
}
const arrayField = {
type: "array",
constraints: {
type: "array",
presence: false,
inclusion: [
"One",
"Two",
"Three",
]
},
name: "Sample Tags",
sortable: false
}
const optsField = {
fieldName: "Sample Opts",
name: "Sample Opts",
type: "options",
constraints: {
type: "string",
presence: false,
inclusion: [ "Alpha", "Beta", "Gamma" ]
},
},
table = await config.createTable({
name: "TestTable2",
@ -212,7 +236,15 @@ describe("/rows", () => {
attachmentNull: attachment,
attachmentUndefined: attachment,
attachmentEmpty: attachment,
attachmentEmptyArrayStr: attachment
attachmentEmptyArrayStr: attachment,
arrayFieldEmptyArrayStr: arrayField,
arrayFieldArrayStrKnown: arrayField,
arrayFieldNull: arrayField,
arrayFieldUndefined: arrayField,
optsFieldEmptyStr: optsField,
optsFieldUndefined: optsField,
optsFieldNull: optsField,
optsFieldStrKnown: optsField
},
})
@ -241,11 +273,20 @@ describe("/rows", () => {
attachmentUndefined: undefined,
attachmentEmpty: "",
attachmentEmptyArrayStr: "[]",
arrayFieldEmptyArrayStr: "[]",
arrayFieldUndefined: undefined,
arrayFieldNull: null,
arrayFieldArrayStrKnown: "['One']",
optsFieldEmptyStr: "",
optsFieldUndefined: undefined,
optsFieldNull: null,
optsFieldStrKnown: 'Alpha'
}
const id = (await config.createRow(row))._id
const createdRow = await config.createRow(row);
const id = createdRow._id
const saved = (await loadRow(id)).body
const saved = (await loadRow(id, table._id)).body
expect(saved.stringUndefined).toBe(undefined)
expect(saved.stringNull).toBe("")
@ -270,7 +311,15 @@ describe("/rows", () => {
expect(saved.attachmentNull).toEqual([])
expect(saved.attachmentUndefined).toBe(undefined)
expect(saved.attachmentEmpty).toEqual([])
expect(saved.attachmentEmptyArrayStr).toEqual([])
expect(saved.attachmentEmptyArrayStr).toEqual([])
expect(saved.arrayFieldEmptyArrayStr).toEqual([])
expect(saved.arrayFieldNull).toEqual([])
expect(saved.arrayFieldUndefined).toEqual(undefined)
expect(saved.optsFieldEmptyStr).toEqual(null)
expect(saved.optsFieldUndefined).toEqual(undefined)
expect(saved.optsFieldNull).toEqual(null)
expect(saved.arrayFieldArrayStrKnown).toEqual(['One'])
expect(saved.optsFieldStrKnown).toEqual('Alpha')
})
})
@ -299,7 +348,7 @@ describe("/rows", () => {
expect(res.body.name).toEqual("Updated Name")
expect(res.body.description).toEqual(existing.description)
const savedRow = await loadRow(res.body._id)
const savedRow = await loadRow(res.body._id, table._id)
expect(savedRow.body.description).toEqual(existing.description)
expect(savedRow.body.name).toEqual("Updated Name")
@ -401,7 +450,7 @@ describe("/rows", () => {
.expect(200)
expect(res.body.length).toEqual(2)
await loadRow(row1._id, 404)
await loadRow(row1._id, table._id, 404)
await assertRowUsage(rowUsage - 2)
await assertQueryUsage(queryUsage + 1)
})

View File

@ -167,7 +167,10 @@ describe("/tables", () => {
expect(events.table.created).not.toHaveBeenCalled()
expect(events.rows.imported).toBeCalledTimes(1)
expect(events.rows.imported).toBeCalledWith(table, 1)
expect(events.rows.imported).toBeCalledWith(expect.objectContaining({
name: "TestTable",
_id: table._id
}), 1)
})
})

View File

@ -137,8 +137,7 @@ export function inputProcessing(
opts?: AutoColumnProcessingOpts
) {
let clonedRow = cloneDeep(row)
// need to copy the table so it can be differenced on way out
const copiedTable = cloneDeep(table)
const dontCleanseKeys = ["type", "_id", "_rev", "tableId"]
for (let [key, value] of Object.entries(clonedRow)) {
const field = table.schema[key]
@ -175,7 +174,7 @@ export function inputProcessing(
}
// handle auto columns - this returns an object like {table, row}
return processAutoColumn(user, copiedTable, clonedRow, opts)
return processAutoColumn(user, table, clonedRow, opts)
}
/**

View File

@ -2,6 +2,22 @@
import { FieldTypes } from "../../constants"
import { logging } from "@budibase/backend-core"
const parseArrayString = value => {
if (typeof value === "string") {
if (value === "") {
return []
}
let result
try {
result = JSON.parse(value.replace(/'/g, '"'))
return result
} catch (e) {
logging.logAlert("Could not parse row value", e)
}
}
return value
}
/**
* A map of how we convert various properties in rows to each other based on the row type.
*/
@ -26,9 +42,9 @@ export const TYPE_TRANSFORM_MAP: any = {
[undefined]: undefined,
},
[FieldTypes.ARRAY]: {
"": [],
[null]: [],
[undefined]: undefined,
parse: parseArrayString,
},
[FieldTypes.STRING]: {
"": "",
@ -70,21 +86,7 @@ export const TYPE_TRANSFORM_MAP: any = {
[FieldTypes.ATTACHMENT]: {
[null]: [],
[undefined]: undefined,
parse: attachments => {
if (typeof attachments === "string") {
if (attachments === "") {
return []
}
let result
try {
result = JSON.parse(attachments)
} catch (e) {
logging.logAlert("Could not parse attachments", e)
}
return result
}
return attachments
},
parse: parseArrayString,
},
[FieldTypes.BOOLEAN]: {
"": null,