diff --git a/packages/server/src/api/controllers/view/exporters.ts b/packages/server/src/api/controllers/view/exporters.ts index 946a1b346a..3269133d4b 100644 --- a/packages/server/src/api/controllers/view/exporters.ts +++ b/packages/server/src/api/controllers/view/exporters.ts @@ -51,7 +51,3 @@ export function jsonWithSchema(schema: TableSchema, rows: Row[]) { export function isFormat(format: any): format is RowExportFormat { return Object.values(RowExportFormat).includes(format as RowExportFormat) } - -export function parseCsvExport(value: string) { - return JSON.parse(value) as T -} diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index edceb925d6..69a6b981bb 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -33,6 +33,7 @@ import { UpdatedRowEventEmitter, TableSchema, JsonFieldSubType, + RowExportFormat, } from "@budibase/types" import { generator, mocks } from "@budibase/backend-core/tests" import _, { merge } from "lodash" @@ -1811,6 +1812,7 @@ describe.each([ await config.api.row.exportRows( "1234567", { rows: [existing._id!] }, + RowExportFormat.JSON, { status: 404 } ) }) @@ -1849,6 +1851,202 @@ describe.each([ const results = JSON.parse(res) expect(results.length).toEqual(3) }) + + describe("should allow exporting all column types", () => { + let tableId: string + let expectedRowData: Row + + beforeAll(async () => { + const fullSchema = setup.structures.fullSchemaWithoutLinks({ + allRequired: true, + }) + + const table = await config.api.table.save( + saveTableRequest({ + ...setup.structures.basicTable(), + schema: fullSchema, + primary: ["string"], + }) + ) + tableId = table._id! + + const rowValues: Record = { + [FieldType.STRING]: generator.guid(), + [FieldType.LONGFORM]: generator.paragraph(), + [FieldType.OPTIONS]: "option 2", + [FieldType.ARRAY]: ["options 2", "options 4"], + [FieldType.NUMBER]: generator.natural(), + [FieldType.BOOLEAN]: generator.bool(), + [FieldType.DATETIME]: generator.date().toISOString(), + [FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()], + [FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(), + [FieldType.FORMULA]: undefined, // generated field + [FieldType.AUTO]: undefined, // generated field + [FieldType.JSON]: { name: generator.guid() }, + [FieldType.INTERNAL]: generator.guid(), + [FieldType.BARCODEQR]: generator.guid(), + [FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(), + [FieldType.BIGINT]: generator.integer().toString(), + [FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }], + [FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id }, + } + const row = await config.api.row.save(table._id!, rowValues) + expectedRowData = { + _id: row._id, + [FieldType.STRING]: rowValues[FieldType.STRING], + [FieldType.LONGFORM]: rowValues[FieldType.LONGFORM], + [FieldType.OPTIONS]: rowValues[FieldType.OPTIONS], + [FieldType.ARRAY]: rowValues[FieldType.ARRAY], + [FieldType.NUMBER]: rowValues[FieldType.NUMBER], + [FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN], + [FieldType.DATETIME]: rowValues[FieldType.DATETIME], + [FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map( + (a: any) => + expect.objectContaining({ + ...a, + url: expect.any(String), + }) + ), + [FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({ + ...rowValues[FieldType.ATTACHMENT_SINGLE], + url: expect.any(String), + }), + [FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula, + [FieldType.AUTO]: expect.any(Number), + [FieldType.JSON]: rowValues[FieldType.JSON], + [FieldType.INTERNAL]: rowValues[FieldType.INTERNAL], + [FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR], + [FieldType.SIGNATURE_SINGLE]: expect.objectContaining({ + ...rowValues[FieldType.SIGNATURE_SINGLE], + url: expect.any(String), + }), + [FieldType.BIGINT]: rowValues[FieldType.BIGINT], + [FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map( + expect.objectContaining + ), + [FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining( + rowValues[FieldType.BB_REFERENCE_SINGLE] + ), + } + }) + + it("as csv", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.CSV + ) + + const jsonResult = await config.api.table.csvToJson({ + csvString: exportedValue, + }) + + const stringified = (value: string) => + JSON.stringify(value).replace(/"/g, "'") + + const matchingObject = (key: string, value: any, isArray: boolean) => { + const objectMatcher = `{'${key}':'${value[key]}'.*?}` + if (isArray) { + return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`)) + } + return expect.stringMatching(new RegExp(`^${objectMatcher}$`)) + } + + expect(jsonResult).toEqual([ + { + ...expectedRowData, + auto: expect.any(String), + array: stringified(expectedRowData["array"]), + attachment: matchingObject( + "key", + expectedRowData["attachment"][0].sample, + true + ), + attachment_single: matchingObject( + "key", + expectedRowData["attachment_single"].sample, + false + ), + boolean: stringified(expectedRowData["boolean"]), + json: stringified(expectedRowData["json"]), + number: stringified(expectedRowData["number"]), + signature_single: matchingObject( + "key", + expectedRowData["signature_single"].sample, + false + ), + bb_reference: matchingObject( + "_id", + expectedRowData["bb_reference"][0].sample, + true + ), + bb_reference_single: matchingObject( + "_id", + expectedRowData["bb_reference_single"].sample, + false + ), + }, + ]) + }) + + it("as json", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.JSON + ) + + const json = JSON.parse(exportedValue) + expect(json).toEqual([expectedRowData]) + }) + + it("as json with schema", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.JSON_WITH_SCHEMA + ) + + const json = JSON.parse(exportedValue) + expect(json).toEqual({ + schema: expect.any(Object), + rows: [expectedRowData], + }) + }) + + it("exported data can be re-imported", async () => { + // export all + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.CSV + ) + + // import all twice + const rows = await config.api.table.csvToJson({ + csvString: exportedValue, + }) + await config.api.row.bulkImport(tableId, { + rows, + }) + await config.api.row.bulkImport(tableId, { + rows, + }) + + const { rows: allRows } = await config.api.row.search(tableId) + + const expectedRow = { + ...expectedRowData, + _id: expect.any(String), + _rev: expect.any(String), + type: "row", + tableId: tableId, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + } + expect(allRows).toEqual([expectedRow, expectedRow, expectedRow]) + }) + }) }) let o2mTable: Table diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index f383fed927..077302f2b7 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -17,8 +17,10 @@ import { TableSchema, TableSourceType, User, + ValidateTableImportResponse, ViewCalculation, ViewV2Enriched, + RowExportFormat, } from "@budibase/types" import { checkBuilderEndpoint } from "./utilities/TestFunctions" import * as setup from "./utilities" @@ -1086,7 +1088,10 @@ describe.each([ }) }) - describe("import validation", () => { + describe.each([ + [RowExportFormat.CSV, (val: any) => JSON.stringify(val).replace(/"/g, "'")], + [RowExportFormat.JSON, (val: any) => val], + ])("import validation (%s)", (_, userParser) => { const basicSchema: TableSchema = { id: { type: FieldType.NUMBER, @@ -1098,9 +1103,41 @@ describe.each([ }, } - describe("validateNewTableImport", () => { - it("can validate basic imports", async () => { - const result = await config.api.table.validateNewTableImport( + const importCases: [ + string, + (rows: Row[], schema: TableSchema) => Promise + ][] = [ + [ + "validateNewTableImport", + async (rows: Row[], schema: TableSchema) => { + const result = await config.api.table.validateNewTableImport({ + rows, + schema, + }) + return result + }, + ], + [ + "validateExistingTableImport", + async (rows: Row[], schema: TableSchema) => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + primary: ["id"], + schema, + }) + ) + const result = await config.api.table.validateExistingTableImport({ + tableId: table._id, + rows, + }) + return result + }, + ], + ] + + describe.each(importCases)("%s", (_, testDelegate) => { + it("validates basic imports", async () => { + const result = await testDelegate( [{ id: generator.natural(), name: generator.first() }], basicSchema ) @@ -1119,18 +1156,18 @@ describe.each([ it.each( isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS )("don't allow protected names in schema (%s)", async columnName => { - const result = await config.api.table.validateNewTableImport( - [ + const result = await config.api.table.validateNewTableImport({ + rows: [ { id: generator.natural(), name: generator.first(), [columnName]: generator.word(), }, ], - { + schema: { ...basicSchema, - } - ) + }, + }) expect(result).toEqual({ allValid: false, @@ -1146,25 +1183,53 @@ describe.each([ }) }) + it("does not allow imports without rows", async () => { + const result = await testDelegate([], basicSchema) + + expect(result).toEqual({ + allValid: false, + errors: {}, + invalidColumns: [], + schemaValidation: {}, + }) + }) + + it("validates imports with some empty rows", async () => { + const result = await testDelegate( + [{}, { id: generator.natural(), name: generator.first() }, {}], + basicSchema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + }, + }) + }) + isInternal && it.each( isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS )("don't allow protected names in the rows (%s)", async columnName => { - const result = await config.api.table.validateNewTableImport( - [ + const result = await config.api.table.validateNewTableImport({ + rows: [ { id: generator.natural(), name: generator.first(), }, ], - { + schema: { ...basicSchema, [columnName]: { name: columnName, type: FieldType.STRING, }, - } - ) + }, + }) expect(result).toEqual({ allValid: false, @@ -1179,20 +1244,24 @@ describe.each([ }, }) }) - }) - describe("validateExistingTableImport", () => { - it("can validate basic imports", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - primary: ["id"], - schema: basicSchema, - }) + it("validates required fields and valid rows", async () => { + const schema: TableSchema = { + ...basicSchema, + name: { + type: FieldType.STRING, + name: "name", + constraints: { presence: true }, + }, + } + + const result = await testDelegate( + [ + { id: generator.natural(), name: generator.first() }, + { id: generator.natural(), name: generator.first() }, + ], + schema ) - const result = await config.api.table.validateExistingTableImport({ - tableId: table._id, - rows: [{ id: generator.natural(), name: generator.first() }], - }) expect(result).toEqual({ allValid: true, @@ -1205,6 +1274,154 @@ describe.each([ }) }) + it("validates required fields and non-valid rows", async () => { + const schema: TableSchema = { + ...basicSchema, + name: { + type: FieldType.STRING, + name: "name", + constraints: { presence: true }, + }, + } + + const result = await testDelegate( + [ + { id: generator.natural(), name: generator.first() }, + { id: generator.natural(), name: "" }, + ], + schema + ) + + expect(result).toEqual({ + allValid: false, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: false, + }, + }) + }) + + describe("bb references", () => { + const getUserValues = () => ({ + _id: docIds.generateGlobalUserID(), + primaryDisplay: generator.first(), + email: generator.email({}), + }) + + it("can validate user column imports", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + name: "user", + }, + } + + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser(getUserValues()), + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: true, + }, + }) + }) + + it("can validate user column imports with invalid data", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + name: "user", + }, + } + + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser(getUserValues()), + }, + { + id: generator.natural(), + name: generator.first(), + user: "no valid user data", + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: false, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: false, + }, + }) + }) + + it("can validate users column imports", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + name: "user", + externalType: "array", + }, + } + + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser([ + getUserValues(), + getUserValues(), + getUserValues(), + ]), + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: true, + }, + }) + }) + }) + }) + + describe("validateExistingTableImport", () => { isInternal && it("can reimport _id fields for internal tables", async () => { const table = await config.api.table.save( diff --git a/packages/server/src/tests/utilities/api/row.ts b/packages/server/src/tests/utilities/api/row.ts index 17d21f0996..c5614d69e7 100644 --- a/packages/server/src/tests/utilities/api/row.ts +++ b/packages/server/src/tests/utilities/api/row.ts @@ -11,6 +11,7 @@ import { DeleteRows, DeleteRow, PaginatedSearchRowResponse, + RowExportFormat, } from "@budibase/types" import { Expectations, TestAPI } from "./base" @@ -105,6 +106,7 @@ export class RowAPI extends TestAPI { exportRows = async ( tableId: string, body: ExportRowsRequest, + format: RowExportFormat = RowExportFormat.JSON, expectations?: Expectations ) => { const response = await this._requestRaw( @@ -112,7 +114,7 @@ export class RowAPI extends TestAPI { `/api/${tableId}/rows/exportRows`, { body, - query: { format: "json" }, + query: { format }, expectations, } ) diff --git a/packages/server/src/tests/utilities/api/table.ts b/packages/server/src/tests/utilities/api/table.ts index 9d4a92250a..baaf890b52 100644 --- a/packages/server/src/tests/utilities/api/table.ts +++ b/packages/server/src/tests/utilities/api/table.ts @@ -1,13 +1,14 @@ import { BulkImportRequest, BulkImportResponse, + CsvToJsonRequest, + CsvToJsonResponse, MigrateRequest, MigrateResponse, - Row, SaveTableRequest, SaveTableResponse, Table, - TableSchema, + ValidateNewTableImportRequest, ValidateTableImportRequest, ValidateTableImportResponse, } from "@budibase/types" @@ -71,17 +72,13 @@ export class TableAPI extends TestAPI { } validateNewTableImport = async ( - rows: Row[], - schema: TableSchema, + body: ValidateNewTableImportRequest, expectations?: Expectations ): Promise => { return await this._post( `/api/tables/validateNewTableImport`, { - body: { - rows, - schema, - }, + body, expectations, } ) @@ -99,4 +96,14 @@ export class TableAPI extends TestAPI { } ) } + + csvToJson = async ( + body: CsvToJsonRequest, + expectations?: Expectations + ): Promise => { + return await this._post(`/api/convert/csvToJson`, { + body, + expectations, + }) + } } diff --git a/packages/server/src/tests/utilities/structures.ts b/packages/server/src/tests/utilities/structures.ts index 8f67ad1af9..698f6d8236 100644 --- a/packages/server/src/tests/utilities/structures.ts +++ b/packages/server/src/tests/utilities/structures.ts @@ -26,6 +26,10 @@ import { WebhookActionType, AutomationEventType, LoopStepType, + FieldSchema, + BBReferenceFieldSubType, + JsonFieldSubType, + AutoFieldSubType, } from "@budibase/types" import { LoopInput } from "../../definitions/automations" import { merge } from "lodash" @@ -573,3 +577,161 @@ export function basicEnvironmentVariable( development: dev || prod, } } + +export function fullSchemaWithoutLinks({ + allRequired, +}: { + allRequired?: boolean +}) { + const schema: { + [type in Exclude]: FieldSchema & { type: type } + } = { + [FieldType.STRING]: { + name: "string", + type: FieldType.STRING, + constraints: { + presence: allRequired, + }, + }, + [FieldType.LONGFORM]: { + name: "longform", + type: FieldType.LONGFORM, + constraints: { + presence: allRequired, + }, + }, + [FieldType.OPTIONS]: { + name: "options", + type: FieldType.OPTIONS, + constraints: { + presence: allRequired, + inclusion: ["option 1", "option 2", "option 3", "option 4"], + }, + }, + [FieldType.ARRAY]: { + name: "array", + type: FieldType.ARRAY, + constraints: { + presence: allRequired, + type: JsonFieldSubType.ARRAY, + inclusion: ["options 1", "options 2", "options 3", "options 4"], + }, + }, + [FieldType.NUMBER]: { + name: "number", + type: FieldType.NUMBER, + constraints: { + presence: allRequired, + }, + }, + [FieldType.BOOLEAN]: { + name: "boolean", + type: FieldType.BOOLEAN, + constraints: { + presence: allRequired, + }, + }, + [FieldType.DATETIME]: { + name: "datetime", + type: FieldType.DATETIME, + dateOnly: true, + timeOnly: false, + constraints: { + presence: allRequired, + }, + }, + [FieldType.FORMULA]: { + name: "formula", + type: FieldType.FORMULA, + formula: "any formula", + constraints: { + presence: allRequired, + }, + }, + [FieldType.BARCODEQR]: { + name: "barcodeqr", + type: FieldType.BARCODEQR, + constraints: { + presence: allRequired, + }, + }, + [FieldType.BIGINT]: { + name: "bigint", + type: FieldType.BIGINT, + constraints: { + presence: allRequired, + }, + }, + [FieldType.BB_REFERENCE]: { + name: "user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + presence: allRequired, + }, + }, + [FieldType.BB_REFERENCE_SINGLE]: { + name: "users", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + presence: allRequired, + }, + }, + [FieldType.ATTACHMENTS]: { + name: "attachments", + type: FieldType.ATTACHMENTS, + constraints: { + presence: allRequired, + }, + }, + [FieldType.ATTACHMENT_SINGLE]: { + name: "attachment_single", + type: FieldType.ATTACHMENT_SINGLE, + constraints: { + presence: allRequired, + }, + }, + [FieldType.AUTO]: { + name: "auto", + type: FieldType.AUTO, + subtype: AutoFieldSubType.AUTO_ID, + autocolumn: true, + constraints: { + presence: allRequired, + }, + }, + [FieldType.JSON]: { + name: "json", + type: FieldType.JSON, + constraints: { + presence: allRequired, + }, + }, + [FieldType.INTERNAL]: { + name: "internal", + type: FieldType.INTERNAL, + constraints: { + presence: allRequired, + }, + }, + [FieldType.SIGNATURE_SINGLE]: { + name: "signature_single", + type: FieldType.SIGNATURE_SINGLE, + constraints: { + presence: allRequired, + }, + }, + } + + return schema +} +export function basicAttachment() { + return { + key: generator.guid(), + name: generator.word(), + extension: generator.word(), + size: generator.natural(), + url: `/${generator.guid()}`, + } +} diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index c6b26b55c8..b398285710 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -8,7 +8,6 @@ import { } from "@budibase/types" import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core" import { db } from "@budibase/backend-core" -import { parseCsvExport } from "../api/controllers/view/exporters" type Rows = Array @@ -85,7 +84,7 @@ export function validate( "Column names can't contain special characters" } else if ( columnData == null && - !schema[columnName].constraints?.presence + !helpers.schema.isRequired(constraints) ) { results.schemaValidation[columnName] = true } else if ( @@ -95,6 +94,12 @@ export function validate( isAutoColumn ) { return + } else if ( + [FieldType.STRING].includes(columnType) && + !columnData && + helpers.schema.isRequired(constraints) + ) { + results.schemaValidation[columnName] = false } else if (columnType === FieldType.NUMBER && isNaN(Number(columnData))) { // If provided must be a valid number results.schemaValidation[columnName] = false @@ -159,7 +164,7 @@ export function parse(rows: Rows, table: Table): Rows { const columnSchema = schema[columnName] const { type: columnType } = columnSchema - if (columnType === FieldType.NUMBER) { + if ([FieldType.NUMBER].includes(columnType)) { // If provided must be a valid number parsedRow[columnName] = columnData ? Number(columnData) : columnData } else if ( @@ -171,16 +176,23 @@ export function parse(rows: Rows, table: Table): Rows { parsedRow[columnName] = columnData ? new Date(columnData).toISOString() : columnData + } else if ( + columnType === FieldType.JSON && + typeof columnData === "string" + ) { + parsedRow[columnName] = parseJsonExport(columnData) } else if (columnType === FieldType.BB_REFERENCE) { let parsedValues: { _id: string }[] = columnData || [] - if (columnData) { - parsedValues = parseCsvExport<{ _id: string }[]>(columnData) + if (columnData && typeof columnData === "string") { + parsedValues = parseJsonExport<{ _id: string }[]>(columnData) } parsedRow[columnName] = parsedValues?.map(u => u._id) } else if (columnType === FieldType.BB_REFERENCE_SINGLE) { - const parsedValue = - columnData && parseCsvExport<{ _id: string }>(columnData) + let parsedValue = columnData + if (columnData && typeof columnData === "string") { + parsedValue = parseJsonExport<{ _id: string }>(columnData) + } parsedRow[columnName] = parsedValue?._id } else if ( (columnType === FieldType.ATTACHMENTS || @@ -188,7 +200,7 @@ export function parse(rows: Rows, table: Table): Rows { columnType === FieldType.SIGNATURE_SINGLE) && typeof columnData === "string" ) { - parsedRow[columnName] = parseCsvExport(columnData) + parsedRow[columnName] = parseJsonExport(columnData) } else { parsedRow[columnName] = columnData } @@ -204,32 +216,54 @@ function isValidBBReference( subtype: BBReferenceFieldSubType, isRequired: boolean ): boolean { - if (typeof data !== "string") { + try { + if (type === FieldType.BB_REFERENCE_SINGLE) { + if (!data) { + return !isRequired + } + const user = parseJsonExport<{ _id: string }>(data) + return db.isGlobalUserID(user._id) + } + + switch (subtype) { + case BBReferenceFieldSubType.USER: + case BBReferenceFieldSubType.USERS: { + const userArray = parseJsonExport<{ _id: string }[]>(data) + if (!Array.isArray(userArray)) { + return false + } + + const constainsWrongId = userArray.find( + user => !db.isGlobalUserID(user._id) + ) + return !constainsWrongId + } + default: + throw utils.unreachable(subtype) + } + } catch { return false } +} - if (type === FieldType.BB_REFERENCE_SINGLE) { - if (!data) { - return !isRequired - } - const user = parseCsvExport<{ _id: string }>(data) - return db.isGlobalUserID(user._id) +function parseJsonExport(value: any) { + if (typeof value !== "string") { + return value } + try { + const parsed = JSON.parse(value) - switch (subtype) { - case BBReferenceFieldSubType.USER: - case BBReferenceFieldSubType.USERS: { - const userArray = parseCsvExport<{ _id: string }[]>(data) - if (!Array.isArray(userArray)) { - return false - } - - const constainsWrongId = userArray.find( - user => !db.isGlobalUserID(user._id) - ) - return !constainsWrongId + return parsed as T + } catch (e: any) { + if ( + e.message.startsWith("Expected property name or '}' in JSON at position ") + ) { + // This was probably converted as CSV and it has single quotes instead of double ones + const parsed = JSON.parse(value.replace(/'/g, '"')) + return parsed as T } - default: - throw utils.unreachable(subtype) + + // It is no a valid JSON + throw e } }