Merge pull request #14302 from Budibase/BUDI-8396/user-column-csv-import-error

User column csv import error
This commit is contained in:
Adria Navarro 2024-08-02 14:20:30 +02:00 committed by GitHub
commit 88ba546710
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 684 additions and 68 deletions

View File

@ -51,7 +51,3 @@ export function jsonWithSchema(schema: TableSchema, rows: Row[]) {
export function isFormat(format: any): format is RowExportFormat { export function isFormat(format: any): format is RowExportFormat {
return Object.values(RowExportFormat).includes(format as RowExportFormat) return Object.values(RowExportFormat).includes(format as RowExportFormat)
} }
export function parseCsvExport<T>(value: string) {
return JSON.parse(value) as T
}

View File

@ -33,6 +33,7 @@ import {
UpdatedRowEventEmitter, UpdatedRowEventEmitter,
TableSchema, TableSchema,
JsonFieldSubType, JsonFieldSubType,
RowExportFormat,
} from "@budibase/types" } from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests" import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash" import _, { merge } from "lodash"
@ -1811,6 +1812,7 @@ describe.each([
await config.api.row.exportRows( await config.api.row.exportRows(
"1234567", "1234567",
{ rows: [existing._id!] }, { rows: [existing._id!] },
RowExportFormat.JSON,
{ status: 404 } { status: 404 }
) )
}) })
@ -1849,6 +1851,202 @@ describe.each([
const results = JSON.parse(res) const results = JSON.parse(res)
expect(results.length).toEqual(3) expect(results.length).toEqual(3)
}) })
describe("should allow exporting all column types", () => {
let tableId: string
let expectedRowData: Row
beforeAll(async () => {
const fullSchema = setup.structures.fullSchemaWithoutLinks({
allRequired: true,
})
const table = await config.api.table.save(
saveTableRequest({
...setup.structures.basicTable(),
schema: fullSchema,
primary: ["string"],
})
)
tableId = table._id!
const rowValues: Record<keyof typeof fullSchema, any> = {
[FieldType.STRING]: generator.guid(),
[FieldType.LONGFORM]: generator.paragraph(),
[FieldType.OPTIONS]: "option 2",
[FieldType.ARRAY]: ["options 2", "options 4"],
[FieldType.NUMBER]: generator.natural(),
[FieldType.BOOLEAN]: generator.bool(),
[FieldType.DATETIME]: generator.date().toISOString(),
[FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()],
[FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(),
[FieldType.FORMULA]: undefined, // generated field
[FieldType.AUTO]: undefined, // generated field
[FieldType.JSON]: { name: generator.guid() },
[FieldType.INTERNAL]: generator.guid(),
[FieldType.BARCODEQR]: generator.guid(),
[FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(),
[FieldType.BIGINT]: generator.integer().toString(),
[FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }],
[FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id },
}
const row = await config.api.row.save(table._id!, rowValues)
expectedRowData = {
_id: row._id,
[FieldType.STRING]: rowValues[FieldType.STRING],
[FieldType.LONGFORM]: rowValues[FieldType.LONGFORM],
[FieldType.OPTIONS]: rowValues[FieldType.OPTIONS],
[FieldType.ARRAY]: rowValues[FieldType.ARRAY],
[FieldType.NUMBER]: rowValues[FieldType.NUMBER],
[FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN],
[FieldType.DATETIME]: rowValues[FieldType.DATETIME],
[FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map(
(a: any) =>
expect.objectContaining({
...a,
url: expect.any(String),
})
),
[FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({
...rowValues[FieldType.ATTACHMENT_SINGLE],
url: expect.any(String),
}),
[FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula,
[FieldType.AUTO]: expect.any(Number),
[FieldType.JSON]: rowValues[FieldType.JSON],
[FieldType.INTERNAL]: rowValues[FieldType.INTERNAL],
[FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR],
[FieldType.SIGNATURE_SINGLE]: expect.objectContaining({
...rowValues[FieldType.SIGNATURE_SINGLE],
url: expect.any(String),
}),
[FieldType.BIGINT]: rowValues[FieldType.BIGINT],
[FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map(
expect.objectContaining
),
[FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining(
rowValues[FieldType.BB_REFERENCE_SINGLE]
),
}
})
it("as csv", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
const jsonResult = await config.api.table.csvToJson({
csvString: exportedValue,
})
const stringified = (value: string) =>
JSON.stringify(value).replace(/"/g, "'")
const matchingObject = (key: string, value: any, isArray: boolean) => {
const objectMatcher = `{'${key}':'${value[key]}'.*?}`
if (isArray) {
return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`))
}
return expect.stringMatching(new RegExp(`^${objectMatcher}$`))
}
expect(jsonResult).toEqual([
{
...expectedRowData,
auto: expect.any(String),
array: stringified(expectedRowData["array"]),
attachment: matchingObject(
"key",
expectedRowData["attachment"][0].sample,
true
),
attachment_single: matchingObject(
"key",
expectedRowData["attachment_single"].sample,
false
),
boolean: stringified(expectedRowData["boolean"]),
json: stringified(expectedRowData["json"]),
number: stringified(expectedRowData["number"]),
signature_single: matchingObject(
"key",
expectedRowData["signature_single"].sample,
false
),
bb_reference: matchingObject(
"_id",
expectedRowData["bb_reference"][0].sample,
true
),
bb_reference_single: matchingObject(
"_id",
expectedRowData["bb_reference_single"].sample,
false
),
},
])
})
it("as json", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON
)
const json = JSON.parse(exportedValue)
expect(json).toEqual([expectedRowData])
})
it("as json with schema", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON_WITH_SCHEMA
)
const json = JSON.parse(exportedValue)
expect(json).toEqual({
schema: expect.any(Object),
rows: [expectedRowData],
})
})
it("exported data can be re-imported", async () => {
// export all
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
// import all twice
const rows = await config.api.table.csvToJson({
csvString: exportedValue,
})
await config.api.row.bulkImport(tableId, {
rows,
})
await config.api.row.bulkImport(tableId, {
rows,
})
const { rows: allRows } = await config.api.row.search(tableId)
const expectedRow = {
...expectedRowData,
_id: expect.any(String),
_rev: expect.any(String),
type: "row",
tableId: tableId,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
}
expect(allRows).toEqual([expectedRow, expectedRow, expectedRow])
})
})
}) })
let o2mTable: Table let o2mTable: Table

View File

@ -17,8 +17,10 @@ import {
TableSchema, TableSchema,
TableSourceType, TableSourceType,
User, User,
ValidateTableImportResponse,
ViewCalculation, ViewCalculation,
ViewV2Enriched, ViewV2Enriched,
RowExportFormat,
} from "@budibase/types" } from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions" import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities" import * as setup from "./utilities"
@ -1086,7 +1088,10 @@ describe.each([
}) })
}) })
describe("import validation", () => { describe.each([
[RowExportFormat.CSV, (val: any) => JSON.stringify(val).replace(/"/g, "'")],
[RowExportFormat.JSON, (val: any) => val],
])("import validation (%s)", (_, userParser) => {
const basicSchema: TableSchema = { const basicSchema: TableSchema = {
id: { id: {
type: FieldType.NUMBER, type: FieldType.NUMBER,
@ -1098,9 +1103,41 @@ describe.each([
}, },
} }
describe("validateNewTableImport", () => { const importCases: [
it("can validate basic imports", async () => { string,
const result = await config.api.table.validateNewTableImport( (rows: Row[], schema: TableSchema) => Promise<ValidateTableImportResponse>
][] = [
[
"validateNewTableImport",
async (rows: Row[], schema: TableSchema) => {
const result = await config.api.table.validateNewTableImport({
rows,
schema,
})
return result
},
],
[
"validateExistingTableImport",
async (rows: Row[], schema: TableSchema) => {
const table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema,
})
)
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows,
})
return result
},
],
]
describe.each(importCases)("%s", (_, testDelegate) => {
it("validates basic imports", async () => {
const result = await testDelegate(
[{ id: generator.natural(), name: generator.first() }], [{ id: generator.natural(), name: generator.first() }],
basicSchema basicSchema
) )
@ -1119,18 +1156,18 @@ describe.each([
it.each( it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in schema (%s)", async columnName => { )("don't allow protected names in schema (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport( const result = await config.api.table.validateNewTableImport({
[ rows: [
{ {
id: generator.natural(), id: generator.natural(),
name: generator.first(), name: generator.first(),
[columnName]: generator.word(), [columnName]: generator.word(),
}, },
], ],
{ schema: {
...basicSchema, ...basicSchema,
} },
) })
expect(result).toEqual({ expect(result).toEqual({
allValid: false, allValid: false,
@ -1146,25 +1183,53 @@ describe.each([
}) })
}) })
it("does not allow imports without rows", async () => {
const result = await testDelegate([], basicSchema)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {},
})
})
it("validates imports with some empty rows", async () => {
const result = await testDelegate(
[{}, { id: generator.natural(), name: generator.first() }, {}],
basicSchema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
},
})
})
isInternal && isInternal &&
it.each( it.each(
isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS
)("don't allow protected names in the rows (%s)", async columnName => { )("don't allow protected names in the rows (%s)", async columnName => {
const result = await config.api.table.validateNewTableImport( const result = await config.api.table.validateNewTableImport({
[ rows: [
{ {
id: generator.natural(), id: generator.natural(),
name: generator.first(), name: generator.first(),
}, },
], ],
{ schema: {
...basicSchema, ...basicSchema,
[columnName]: { [columnName]: {
name: columnName, name: columnName,
type: FieldType.STRING, type: FieldType.STRING,
}, },
} },
) })
expect(result).toEqual({ expect(result).toEqual({
allValid: false, allValid: false,
@ -1179,20 +1244,24 @@ describe.each([
}, },
}) })
}) })
})
describe("validateExistingTableImport", () => { it("validates required fields and valid rows", async () => {
it("can validate basic imports", async () => { const schema: TableSchema = {
const table = await config.api.table.save( ...basicSchema,
tableForDatasource(datasource, { name: {
primary: ["id"], type: FieldType.STRING,
schema: basicSchema, name: "name",
}) constraints: { presence: true },
},
}
const result = await testDelegate(
[
{ id: generator.natural(), name: generator.first() },
{ id: generator.natural(), name: generator.first() },
],
schema
) )
const result = await config.api.table.validateExistingTableImport({
tableId: table._id,
rows: [{ id: generator.natural(), name: generator.first() }],
})
expect(result).toEqual({ expect(result).toEqual({
allValid: true, allValid: true,
@ -1205,6 +1274,154 @@ describe.each([
}) })
}) })
it("validates required fields and non-valid rows", async () => {
const schema: TableSchema = {
...basicSchema,
name: {
type: FieldType.STRING,
name: "name",
constraints: { presence: true },
},
}
const result = await testDelegate(
[
{ id: generator.natural(), name: generator.first() },
{ id: generator.natural(), name: "" },
],
schema
)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: false,
},
})
})
describe("bb references", () => {
const getUserValues = () => ({
_id: docIds.generateGlobalUserID(),
primaryDisplay: generator.first(),
email: generator.email({}),
})
it("can validate user column imports", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser(getUserValues()),
},
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: true,
},
})
})
it("can validate user column imports with invalid data", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser(getUserValues()),
},
{
id: generator.natural(),
name: generator.first(),
user: "no valid user data",
},
],
schema
)
expect(result).toEqual({
allValid: false,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: false,
},
})
})
it("can validate users column imports", async () => {
const schema: TableSchema = {
...basicSchema,
user: {
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
name: "user",
externalType: "array",
},
}
const result = await testDelegate(
[
{
id: generator.natural(),
name: generator.first(),
user: userParser([
getUserValues(),
getUserValues(),
getUserValues(),
]),
},
],
schema
)
expect(result).toEqual({
allValid: true,
errors: {},
invalidColumns: [],
schemaValidation: {
id: true,
name: true,
user: true,
},
})
})
})
})
describe("validateExistingTableImport", () => {
isInternal && isInternal &&
it("can reimport _id fields for internal tables", async () => { it("can reimport _id fields for internal tables", async () => {
const table = await config.api.table.save( const table = await config.api.table.save(

View File

@ -11,6 +11,7 @@ import {
DeleteRows, DeleteRows,
DeleteRow, DeleteRow,
PaginatedSearchRowResponse, PaginatedSearchRowResponse,
RowExportFormat,
} from "@budibase/types" } from "@budibase/types"
import { Expectations, TestAPI } from "./base" import { Expectations, TestAPI } from "./base"
@ -105,6 +106,7 @@ export class RowAPI extends TestAPI {
exportRows = async ( exportRows = async (
tableId: string, tableId: string,
body: ExportRowsRequest, body: ExportRowsRequest,
format: RowExportFormat = RowExportFormat.JSON,
expectations?: Expectations expectations?: Expectations
) => { ) => {
const response = await this._requestRaw( const response = await this._requestRaw(
@ -112,7 +114,7 @@ export class RowAPI extends TestAPI {
`/api/${tableId}/rows/exportRows`, `/api/${tableId}/rows/exportRows`,
{ {
body, body,
query: { format: "json" }, query: { format },
expectations, expectations,
} }
) )

View File

@ -1,13 +1,14 @@
import { import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
CsvToJsonRequest,
CsvToJsonResponse,
MigrateRequest, MigrateRequest,
MigrateResponse, MigrateResponse,
Row,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
Table, Table,
TableSchema, ValidateNewTableImportRequest,
ValidateTableImportRequest, ValidateTableImportRequest,
ValidateTableImportResponse, ValidateTableImportResponse,
} from "@budibase/types" } from "@budibase/types"
@ -71,17 +72,13 @@ export class TableAPI extends TestAPI {
} }
validateNewTableImport = async ( validateNewTableImport = async (
rows: Row[], body: ValidateNewTableImportRequest,
schema: TableSchema,
expectations?: Expectations expectations?: Expectations
): Promise<ValidateTableImportResponse> => { ): Promise<ValidateTableImportResponse> => {
return await this._post<ValidateTableImportResponse>( return await this._post<ValidateTableImportResponse>(
`/api/tables/validateNewTableImport`, `/api/tables/validateNewTableImport`,
{ {
body: { body,
rows,
schema,
},
expectations, expectations,
} }
) )
@ -99,4 +96,14 @@ export class TableAPI extends TestAPI {
} }
) )
} }
csvToJson = async (
body: CsvToJsonRequest,
expectations?: Expectations
): Promise<CsvToJsonResponse> => {
return await this._post<CsvToJsonResponse>(`/api/convert/csvToJson`, {
body,
expectations,
})
}
} }

View File

@ -26,6 +26,10 @@ import {
WebhookActionType, WebhookActionType,
AutomationEventType, AutomationEventType,
LoopStepType, LoopStepType,
FieldSchema,
BBReferenceFieldSubType,
JsonFieldSubType,
AutoFieldSubType,
} from "@budibase/types" } from "@budibase/types"
import { LoopInput } from "../../definitions/automations" import { LoopInput } from "../../definitions/automations"
import { merge } from "lodash" import { merge } from "lodash"
@ -573,3 +577,161 @@ export function basicEnvironmentVariable(
development: dev || prod, development: dev || prod,
} }
} }
export function fullSchemaWithoutLinks({
allRequired,
}: {
allRequired?: boolean
}) {
const schema: {
[type in Exclude<FieldType, FieldType.LINK>]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: "string",
type: FieldType.STRING,
constraints: {
presence: allRequired,
},
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
constraints: {
presence: allRequired,
},
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: allRequired,
inclusion: ["option 1", "option 2", "option 3", "option 4"],
},
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
constraints: {
presence: allRequired,
type: JsonFieldSubType.ARRAY,
inclusion: ["options 1", "options 2", "options 3", "options 4"],
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
constraints: {
presence: allRequired,
},
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
constraints: {
presence: allRequired,
},
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
constraints: {
presence: allRequired,
},
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
constraints: {
presence: allRequired,
},
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
constraints: {
presence: allRequired,
},
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
constraints: {
presence: allRequired,
},
},
[FieldType.BB_REFERENCE]: {
name: "user",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
constraints: {
presence: allRequired,
},
},
[FieldType.BB_REFERENCE_SINGLE]: {
name: "users",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
constraints: {
presence: allRequired,
},
},
[FieldType.ATTACHMENTS]: {
name: "attachments",
type: FieldType.ATTACHMENTS,
constraints: {
presence: allRequired,
},
},
[FieldType.ATTACHMENT_SINGLE]: {
name: "attachment_single",
type: FieldType.ATTACHMENT_SINGLE,
constraints: {
presence: allRequired,
},
},
[FieldType.AUTO]: {
name: "auto",
type: FieldType.AUTO,
subtype: AutoFieldSubType.AUTO_ID,
autocolumn: true,
constraints: {
presence: allRequired,
},
},
[FieldType.JSON]: {
name: "json",
type: FieldType.JSON,
constraints: {
presence: allRequired,
},
},
[FieldType.INTERNAL]: {
name: "internal",
type: FieldType.INTERNAL,
constraints: {
presence: allRequired,
},
},
[FieldType.SIGNATURE_SINGLE]: {
name: "signature_single",
type: FieldType.SIGNATURE_SINGLE,
constraints: {
presence: allRequired,
},
},
}
return schema
}
export function basicAttachment() {
return {
key: generator.guid(),
name: generator.word(),
extension: generator.word(),
size: generator.natural(),
url: `/${generator.guid()}`,
}
}

View File

@ -8,7 +8,6 @@ import {
} from "@budibase/types" } from "@budibase/types"
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core" import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
import { db } from "@budibase/backend-core" import { db } from "@budibase/backend-core"
import { parseCsvExport } from "../api/controllers/view/exporters"
type Rows = Array<Row> type Rows = Array<Row>
@ -85,7 +84,7 @@ export function validate(
"Column names can't contain special characters" "Column names can't contain special characters"
} else if ( } else if (
columnData == null && columnData == null &&
!schema[columnName].constraints?.presence !helpers.schema.isRequired(constraints)
) { ) {
results.schemaValidation[columnName] = true results.schemaValidation[columnName] = true
} else if ( } else if (
@ -95,6 +94,12 @@ export function validate(
isAutoColumn isAutoColumn
) { ) {
return return
} else if (
[FieldType.STRING].includes(columnType) &&
!columnData &&
helpers.schema.isRequired(constraints)
) {
results.schemaValidation[columnName] = false
} else if (columnType === FieldType.NUMBER && isNaN(Number(columnData))) { } else if (columnType === FieldType.NUMBER && isNaN(Number(columnData))) {
// If provided must be a valid number // If provided must be a valid number
results.schemaValidation[columnName] = false results.schemaValidation[columnName] = false
@ -159,7 +164,7 @@ export function parse(rows: Rows, table: Table): Rows {
const columnSchema = schema[columnName] const columnSchema = schema[columnName]
const { type: columnType } = columnSchema const { type: columnType } = columnSchema
if (columnType === FieldType.NUMBER) { if ([FieldType.NUMBER].includes(columnType)) {
// If provided must be a valid number // If provided must be a valid number
parsedRow[columnName] = columnData ? Number(columnData) : columnData parsedRow[columnName] = columnData ? Number(columnData) : columnData
} else if ( } else if (
@ -171,16 +176,23 @@ export function parse(rows: Rows, table: Table): Rows {
parsedRow[columnName] = columnData parsedRow[columnName] = columnData
? new Date(columnData).toISOString() ? new Date(columnData).toISOString()
: columnData : columnData
} else if (
columnType === FieldType.JSON &&
typeof columnData === "string"
) {
parsedRow[columnName] = parseJsonExport(columnData)
} else if (columnType === FieldType.BB_REFERENCE) { } else if (columnType === FieldType.BB_REFERENCE) {
let parsedValues: { _id: string }[] = columnData || [] let parsedValues: { _id: string }[] = columnData || []
if (columnData) { if (columnData && typeof columnData === "string") {
parsedValues = parseCsvExport<{ _id: string }[]>(columnData) parsedValues = parseJsonExport<{ _id: string }[]>(columnData)
} }
parsedRow[columnName] = parsedValues?.map(u => u._id) parsedRow[columnName] = parsedValues?.map(u => u._id)
} else if (columnType === FieldType.BB_REFERENCE_SINGLE) { } else if (columnType === FieldType.BB_REFERENCE_SINGLE) {
const parsedValue = let parsedValue = columnData
columnData && parseCsvExport<{ _id: string }>(columnData) if (columnData && typeof columnData === "string") {
parsedValue = parseJsonExport<{ _id: string }>(columnData)
}
parsedRow[columnName] = parsedValue?._id parsedRow[columnName] = parsedValue?._id
} else if ( } else if (
(columnType === FieldType.ATTACHMENTS || (columnType === FieldType.ATTACHMENTS ||
@ -188,7 +200,7 @@ export function parse(rows: Rows, table: Table): Rows {
columnType === FieldType.SIGNATURE_SINGLE) && columnType === FieldType.SIGNATURE_SINGLE) &&
typeof columnData === "string" typeof columnData === "string"
) { ) {
parsedRow[columnName] = parseCsvExport(columnData) parsedRow[columnName] = parseJsonExport(columnData)
} else { } else {
parsedRow[columnName] = columnData parsedRow[columnName] = columnData
} }
@ -204,32 +216,54 @@ function isValidBBReference(
subtype: BBReferenceFieldSubType, subtype: BBReferenceFieldSubType,
isRequired: boolean isRequired: boolean
): boolean { ): boolean {
if (typeof data !== "string") { try {
if (type === FieldType.BB_REFERENCE_SINGLE) {
if (!data) {
return !isRequired
}
const user = parseJsonExport<{ _id: string }>(data)
return db.isGlobalUserID(user._id)
}
switch (subtype) {
case BBReferenceFieldSubType.USER:
case BBReferenceFieldSubType.USERS: {
const userArray = parseJsonExport<{ _id: string }[]>(data)
if (!Array.isArray(userArray)) {
return false
}
const constainsWrongId = userArray.find(
user => !db.isGlobalUserID(user._id)
)
return !constainsWrongId
}
default:
throw utils.unreachable(subtype)
}
} catch {
return false return false
} }
}
if (type === FieldType.BB_REFERENCE_SINGLE) { function parseJsonExport<T>(value: any) {
if (!data) { if (typeof value !== "string") {
return !isRequired return value
}
const user = parseCsvExport<{ _id: string }>(data)
return db.isGlobalUserID(user._id)
} }
try {
const parsed = JSON.parse(value)
switch (subtype) { return parsed as T
case BBReferenceFieldSubType.USER: } catch (e: any) {
case BBReferenceFieldSubType.USERS: { if (
const userArray = parseCsvExport<{ _id: string }[]>(data) e.message.startsWith("Expected property name or '}' in JSON at position ")
if (!Array.isArray(userArray)) { ) {
return false // This was probably converted as CSV and it has single quotes instead of double ones
} const parsed = JSON.parse(value.replace(/'/g, '"'))
return parsed as T
const constainsWrongId = userArray.find(
user => !db.isGlobalUserID(user._id)
)
return !constainsWrongId
} }
default:
throw utils.unreachable(subtype) // It is no a valid JSON
throw e
} }
} }