From 78554bdc7c1926306428f7d913787366f4929f1c Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 31 Oct 2024 17:16:06 +0000 Subject: [PATCH 1/5] Fix flaky export test. --- packages/server/package.json | 1 + .../src/api/controllers/view/exporters.ts | 30 +++++++++------- .../server/src/api/routes/tests/row.spec.ts | 34 +++++++++++++++++++ packages/server/src/utilities/csv.ts | 6 ++-- 4 files changed, 56 insertions(+), 15 deletions(-) diff --git a/packages/server/package.json b/packages/server/package.json index 76dd03b5a8..da770a3ed9 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -75,6 +75,7 @@ "content-disposition": "^0.5.4", "cookies": "0.8.0", "csvtojson": "2.0.10", + "csv": "6.3.10", "curlconverter": "3.21.0", "dd-trace": "5.2.0", "dotenv": "8.2.0", diff --git a/packages/server/src/api/controllers/view/exporters.ts b/packages/server/src/api/controllers/view/exporters.ts index 3269133d4b..87e4bfc056 100644 --- a/packages/server/src/api/controllers/view/exporters.ts +++ b/packages/server/src/api/controllers/view/exporters.ts @@ -9,29 +9,33 @@ function getHeaders( return headers.map(header => `"${customHeaders[header] || header}"`) } +function escapeCsvString(str: string) { + return str.replace(/"/g, '""') +} + export function csv( headers: string[], rows: Row[], delimiter: string = ",", customHeaders: { [key: string]: string } = {} ) { - let csv = getHeaders(headers, customHeaders).join(delimiter) + let csvRows = [getHeaders(headers, customHeaders)] for (let row of rows) { - csv = `${csv}\n${headers - .map(header => { - let val = row[header] - val = - typeof val === "object" && !(val instanceof Date) - ? `"${JSON.stringify(val).replace(/"/g, "'")}"` - : val !== undefined - ? `"${val}"` - : "" - return val.trim() + csvRows.push( + headers.map(header => { + const val = row[header] + if (typeof val === "object" && !(val instanceof Date)) { + return `"${JSON.stringify(val).replace(/"/g, "'")}"` + } + if (val !== undefined) { + return `"${escapeCsvString(val.toString())}"` + } + return "" }) - .join(delimiter)}` + ) } - return csv + return csvRows.map(row => row.join(delimiter)).join("\n") } export function json(rows: Row[]) { diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index cf3c87c430..252731bc16 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -2597,6 +2597,40 @@ describe.each([ }) }) + it("can handle csv-special characters in strings", async () => { + const badString = 'test":, wow", "test": "wow"' + const table = await config.api.table.save( + saveTableRequest({ + schema: { + string: { + type: FieldType.STRING, + name: "string", + }, + }, + }) + ) + + await config.api.row.save(table._id!, { string: badString }) + + const exportedValue = await config.api.row.exportRows( + table._id!, + { query: {} }, + RowExportFormat.CSV + ) + + const json = await config.api.table.csvToJson( + { + csvString: exportedValue, + }, + { + status: 200, + } + ) + + expect(json).toHaveLength(1) + expect(json[0].string).toEqual(badString) + }) + it("exported data can be re-imported", async () => { // export all const exportedValue = await config.api.row.exportRows( diff --git a/packages/server/src/utilities/csv.ts b/packages/server/src/utilities/csv.ts index 2fab1d11a4..43d712165a 100644 --- a/packages/server/src/utilities/csv.ts +++ b/packages/server/src/utilities/csv.ts @@ -5,8 +5,10 @@ export async function jsonFromCsvString(csvString: string) { csvString ) - // By default the csvtojson library casts empty values as empty strings. This is causing issues on conversion. - // ignoreEmpty will remove the key completly if empty, so creating this empty object will ensure we return the values with the keys but empty values + // By default the csvtojson library casts empty values as empty strings. This + // is causing issues on conversion. ignoreEmpty will remove the key completly + // if empty, so creating this empty object will ensure we return the values + // with the keys but empty values const result = await csv({ ignoreEmpty: false }).fromString(csvString) result.forEach((r, i) => { for (const [key] of Object.entries(r).filter(([, value]) => value === "")) { From b4e758d62971b29c8d4de58273c73561e4e53ae3 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 31 Oct 2024 17:23:51 +0000 Subject: [PATCH 2/5] No longer using the csv package. --- packages/server/package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/server/package.json b/packages/server/package.json index da770a3ed9..76dd03b5a8 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -75,7 +75,6 @@ "content-disposition": "^0.5.4", "cookies": "0.8.0", "csvtojson": "2.0.10", - "csv": "6.3.10", "curlconverter": "3.21.0", "dd-trace": "5.2.0", "dotenv": "8.2.0", From c72ca658b9b949442ec0b0980a125e91ae63f486 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 1 Nov 2024 14:30:14 +0000 Subject: [PATCH 3/5] Fixing an issue with sample data being added to an app due to the changes to bulk import. --- .../server/src/api/controllers/table/utils.ts | 7 +++---- .../src/api/routes/tests/application.spec.ts | 16 ++++++++++++++++ 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts index 743cce410b..4ea11c7043 100644 --- a/packages/server/src/api/controllers/table/utils.ts +++ b/packages/server/src/api/controllers/table/utils.ts @@ -123,7 +123,7 @@ export async function importToRows( data: Row[], table: Table, userId?: string, - opts?: { keepCouchId: boolean } + opts?: { keepCouchId?: boolean; allowRelationships?: boolean } ) { const originalTable = table const finalData: Row[] = [] @@ -136,16 +136,15 @@ export async function importToRows( // We use a reference to table here and update it after input processing, // so that we can auto increment auto IDs in imported data properly - const processed = await inputProcessing(userId, table, row, { + row = await inputProcessing(userId, table, row, { noAutoRelationships: true, }) - row = processed // However here we must reference the original table, as we want to mutate // the real schema of the table passed in, not the clone used for // incrementing auto IDs for (const [fieldName, schema] of Object.entries(originalTable.schema)) { - if (schema.type === FieldType.LINK) { + if (schema.type === FieldType.LINK && data.find(row => row[fieldName])) { throw new HTTPError( `Can't bulk import relationship fields for internal databases, found value in field "${fieldName}"`, 400 diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index 729f899379..6d85cdbda9 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -1,3 +1,5 @@ +import { DEFAULT_TABLES } from "../../../db/defaultData/datasource_bb_default" + jest.mock("../../../utilities/redis", () => ({ init: jest.fn(), getLocksById: () => { @@ -447,4 +449,18 @@ describe("/applications", () => { expect(devLogs.data.length).toBe(0) }) }) + + describe("POST /api/applications/:appId/sample", () => { + it("should be able to add sample data", async () => { + await config.api.application.addSampleData(config.getAppId()) + for (let table of DEFAULT_TABLES) { + const res = await config.api.row.search( + table._id!, + { query: {} }, + { status: 200 } + ) + expect(res.rows.length).not.toEqual(0) + } + }) + }) }) From 23f2df938e2583770d1909a4affaa36139907a14 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 1 Nov 2024 14:31:33 +0000 Subject: [PATCH 4/5] Removing un-necessary statement. --- packages/server/src/api/controllers/table/utils.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts index 4ea11c7043..04e77fbe62 100644 --- a/packages/server/src/api/controllers/table/utils.ts +++ b/packages/server/src/api/controllers/table/utils.ts @@ -123,7 +123,7 @@ export async function importToRows( data: Row[], table: Table, userId?: string, - opts?: { keepCouchId?: boolean; allowRelationships?: boolean } + opts?: { keepCouchId: boolean } ) { const originalTable = table const finalData: Row[] = [] From f9c98d8f908bbd05321456d06c82b402bfc63c61 Mon Sep 17 00:00:00 2001 From: Andrew Kingston Date: Fri, 1 Nov 2024 14:53:08 +0000 Subject: [PATCH 5/5] Fix datasource renames being reverted when changing schema --- .../frontend-core/src/components/grid/lib/websocket.js | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/frontend-core/src/components/grid/lib/websocket.js b/packages/frontend-core/src/components/grid/lib/websocket.js index b0fd236989..e7b89ff58a 100644 --- a/packages/frontend-core/src/components/grid/lib/websocket.js +++ b/packages/frontend-core/src/components/grid/lib/websocket.js @@ -61,6 +61,15 @@ export const createGridWebsocket = context => { } } ) + socket.on( + GridSocketEvent.DatasourceChange, + ({ datasource: newDatasource }) => { + // Listen builder renames, as these aren't handled otherwise + if (newDatasource?.name !== get(definition).name) { + definition.set(newDatasource) + } + } + ) // Change websocket connection when table changes datasource.subscribe(connectToDatasource)