From 1d695be77c1ff595018c6632255037ea4c17183c Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 16:21:49 +0100 Subject: [PATCH 01/21] This PR includes a change to pagination which makes sure if the 5000 max row limit is hit that pagination still kicks in. This means that you can eventually return all rows, although for very large tables you may hit rate limits (if you have thousands of rows related to each row in your table). --- .../scripts/integrations/postgres/init.sql | 123 +++++------------- .../api/controllers/row/ExternalRequest.ts | 15 ++- .../src/api/controllers/row/external.ts | 2 +- .../src/sdk/app/rows/search/external.ts | 65 ++++----- .../server/src/sdk/app/rows/search/sqs.ts | 40 +++--- 5 files changed, 104 insertions(+), 141 deletions(-) diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql index 9624208deb..dce228dcfa 100644 --- a/packages/server/scripts/integrations/postgres/init.sql +++ b/packages/server/scripts/integrations/postgres/init.sql @@ -1,92 +1,35 @@ -SELECT 'CREATE DATABASE main' -WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec -CREATE SCHEMA "test-1"; -CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer', 'support'); -CREATE TABLE Persons ( - PersonID SERIAL PRIMARY KEY, - LastName varchar(255), - FirstName varchar(255), - Address varchar(255), - City varchar(255) DEFAULT 'Belfast', - Age INTEGER DEFAULT 20 NOT NULL, - Year INTEGER, - Type person_job +-- Create the first table +CREATE TABLE first_table ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + description TEXT ); -CREATE TABLE Tasks ( - TaskID SERIAL PRIMARY KEY, - ExecutorID INT, - QaID INT, - Completed BOOLEAN, - TaskName varchar(255), - CONSTRAINT fkexecutor - FOREIGN KEY(ExecutorID) - REFERENCES Persons(PersonID), - CONSTRAINT fkqa - FOREIGN KEY(QaID) - REFERENCES Persons(PersonID) -); -CREATE TABLE Products ( - ProductID SERIAL PRIMARY KEY, - ProductName varchar(255) -); -CREATE TABLE Products_Tasks ( - ProductID INT NOT NULL, - TaskID INT NOT NULL, - CONSTRAINT fkProducts - FOREIGN KEY(ProductID) - REFERENCES Products(ProductID), - CONSTRAINT fkTasks - FOREIGN KEY(TaskID) - REFERENCES Tasks(TaskID), - PRIMARY KEY (ProductID, TaskID) -); -CREATE TABLE "test-1".table1 ( - id SERIAL PRIMARY KEY, - Name varchar(255) -); -CREATE TABLE CompositeTable ( - KeyPartOne varchar(128), - KeyPartTwo varchar(128), - Name varchar(255), - PRIMARY KEY (KeyPartOne, KeyPartTwo) -); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('Mike', 'Hughes', '123 Fake Street', 'Belfast', 'qa', 1999); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support'); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Dave', 'Bar', '2 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('James', 'Bar', '3 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Jenny', 'Bar', '4 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Grace', 'Bar', '5 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Sarah', 'Bar', '6 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Kelly', 'Bar', '7 Foo Street', 'Bartown', 'support', 0, 1993); --- insert a lot of tasks for testing -WITH RECURSIVE generate_series AS ( - SELECT 1 AS n - UNION ALL - SELECT n + 1 FROM generate_series WHERE n < 6000 -), -random_data AS ( - SELECT - n, - (random() * 9 + 1)::int AS ExecutorID, - (random() * 9 + 1)::int AS QaID, - 'assembling' AS TaskName, - (random() < 0.5) AS Completed - FROM generate_series -) -INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) -SELECT ExecutorID, QaID, TaskName, Completed -FROM random_data; -INSERT INTO Products (ProductName) VALUES ('Computers'); -INSERT INTO Products (ProductName) VALUES ('Laptops'); -INSERT INTO Products (ProductName) VALUES ('Chairs'); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 1); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2); -INSERT INTO "test-1".table1 (Name) VALUES ('Test'); -INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('aaa', 'bbb', 'Michael'); -INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('bbb', 'ccc', 'Andrew'); -INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('ddd', '', 'OneKey'); +-- Create the second table +CREATE TABLE second_table ( + id SERIAL PRIMARY KEY, + first_table_id INT REFERENCES first_table(id), + data TEXT NOT NULL +); + +-- Insert 50 rows into the first table +DO +$$ +BEGIN + FOR i IN 1..50 LOOP + INSERT INTO first_table (name, description) + VALUES ('Name ' || i, 'Description ' || i); + END LOOP; +END +$$; + +-- Insert 10,000 rows into the second table, all related to the first row in the first table +DO +$$ +BEGIN + FOR i IN 1..10000 LOOP + INSERT INTO second_table (first_table_id, data) + VALUES (1, 'Data ' || i); + END LOOP; +END +$$; diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 2ecdf9a4cb..6538e7347a 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -66,9 +66,14 @@ export interface RunConfig { includeSqlRelationships?: IncludeRelationship } +export type ExternalReadRequestReturnType = { + rows: Row[] + rawResponseSize: number +} + export type ExternalRequestReturnType = T extends Operation.READ - ? Row[] + ? ExternalReadRequestReturnType : T extends Operation.COUNT ? number : { row: Row; table: Table } @@ -741,9 +746,11 @@ export class ExternalRequest { ) // if reading it'll just be an array of rows, return whole thing if (operation === Operation.READ) { - return ( - Array.isArray(output) ? output : [output] - ) as ExternalRequestReturnType + const rows = Array.isArray(output) ? output : [output] + return { + rows, + rawResponseSize: responseRows.length, + } as ExternalRequestReturnType } else { return { row: output[0], table } as ExternalRequestReturnType } diff --git a/packages/server/src/api/controllers/row/external.ts b/packages/server/src/api/controllers/row/external.ts index 06013d230c..78fae7aad8 100644 --- a/packages/server/src/api/controllers/row/external.ts +++ b/packages/server/src/api/controllers/row/external.ts @@ -136,7 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) { includeSqlRelationships: IncludeRelationship.INCLUDE, }) const table: Table = tables[tableName] - const row = response[0] + const row = response.rows[0] // this seems like a lot of work, but basically we need to dig deeper for the enrich // for a single row, there is probably a better way to do this with some smart multi-layer joins for (let [fieldName, field] of Object.entries(table.schema)) { diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index e7fd2888de..76f00a25e2 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -23,6 +23,7 @@ import pick from "lodash/pick" import { outputProcessing } from "../../../../utilities/rowProcessor" import sdk from "../../../" import { isSearchingByRowID } from "./utils" +import { ExternalReadRequestReturnType } from "../../../../api/controllers/row/ExternalRequest" function getPaginationAndLimitParameters( filters: SearchFilters, @@ -47,7 +48,7 @@ function getPaginationAndLimitParameters( limit: limit + 1, } if (bookmark) { - paginateObj.offset = limit * bookmark + paginateObj.offset = bookmark } } else if (limit) { paginateObj = { @@ -105,37 +106,42 @@ export async function search( paginate: paginateObj as PaginationJson, includeSqlRelationships: IncludeRelationship.INCLUDE, } - const queries: Promise[] = [] - queries.push(handleRequest(Operation.READ, tableId, parameters)) + const queries: [ + Promise, + Promise | undefined + ] = [handleRequest(Operation.READ, tableId, parameters), undefined] if (countRows) { - queries.push(handleRequest(Operation.COUNT, tableId, parameters)) + queries[1] = handleRequest(Operation.COUNT, tableId, parameters) } const responses = await Promise.all(queries) - let rows = responses[0] as Row[] - const totalRows = - responses.length > 1 ? (responses[1] as number) : undefined + let rows = responses[0].rows + const rawResponseSize = responses[0].rawResponseSize + const totalRows = responses.length > 1 ? responses[1] : undefined - let hasNextPage = false - // remove the extra row if it's there - if (paginate && limit && rows.length > limit) { - rows.pop() - hasNextPage = true - } - - if (options.fields) { - const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS] - rows = rows.map((r: any) => pick(r, fields)) - } - - rows = await outputProcessing(table, rows, { + let processed = await outputProcessing(table, rows, { preserveLinks: true, squash: true, }) + let hasNextPage = false + // if the raw rows is greater than the limit then we likely need to paginate + if (paginate && limit && rawResponseSize > limit) { + hasNextPage = true + // processed rows has merged relationships down, this might not be more than limit + if (processed.length > limit) { + processed.pop() + } + } + + if (options.fields) { + const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS] + processed = processed.map((r: any) => pick(r, fields)) + } + // need wrapper object for bookmarks etc when paginating - const response: SearchResponse = { rows, hasNextPage } + const response: SearchResponse = { rows: processed, hasNextPage } if (hasNextPage && bookmark != null) { - response.bookmark = bookmark + 1 + response.bookmark = processed.length } if (totalRows != null) { response.totalRows = totalRows @@ -255,24 +261,21 @@ export async function exportRows( } export async function fetch(tableId: string): Promise { - const response = await handleRequest( - Operation.READ, - tableId, - { - includeSqlRelationships: IncludeRelationship.INCLUDE, - } - ) + const response = await handleRequest(Operation.READ, tableId, { + includeSqlRelationships: IncludeRelationship.INCLUDE, + }) const table = await sdk.tables.getTable(tableId) - return await outputProcessing(table, response, { + return await outputProcessing(table, response.rows, { preserveLinks: true, squash: true, }) } export async function fetchRaw(tableId: string): Promise { - return await handleRequest(Operation.READ, tableId, { + const response = await handleRequest(Operation.READ, tableId, { includeSqlRelationships: IncludeRelationship.INCLUDE, }) + return response.rows } export async function fetchView(viewName: string) { diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 5da2a7bcfb..2b3ed0c087 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -45,6 +45,7 @@ import { dataFilters, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core" import { isSearchingByRowID } from "./utils" const builder = new sql.Sql(SqlClient.SQL_LITE) +const SQLITE_COLUMN_LIMIT = 2000 const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`) const MISSING_TABLE_REGX = new RegExp(`no such table: .+`) const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`) @@ -55,12 +56,14 @@ function buildInternalFieldList( opts?: { relationships?: RelationshipsJson[] } ) { let fieldList: string[] = [] - const addJunctionFields = (relatedTable: Table, fields: string[]) => { + const getJunctionFields = (relatedTable: Table, fields: string[]) => { + const junctionFields: string[] = [] fields.forEach(field => { - fieldList.push( + junctionFields.push( `${generateJunctionTableID(table._id!, relatedTable._id!)}.${field}` ) }) + return junctionFields } fieldList = fieldList.concat( PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`) @@ -70,18 +73,22 @@ function buildInternalFieldList( if (!opts?.relationships && isRelationship) { continue } - if (isRelationship) { + if (!isRelationship) { + fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`) + } else { const linkCol = col as RelationshipFieldMetadata const relatedTable = tables.find(table => table._id === linkCol.tableId) - // no relationships provided, don't go more than a layer deep - if (relatedTable) { - fieldList = fieldList.concat( - buildInternalFieldList(relatedTable, tables) - ) - addJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"]) + if (!relatedTable) { + continue } - } else { - fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`) + const relatedFields = buildInternalFieldList(relatedTable, tables).concat( + getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"]) + ) + // break out of the loop if we have reached the max number of columns + if (relatedFields.length + fieldList.length > SQLITE_COLUMN_LIMIT) { + break + } + fieldList = fieldList.concat(relatedFields) } } return [...new Set(fieldList)] @@ -315,7 +322,7 @@ export async function search( paginate = true request.paginate = { limit: params.limit + 1, - offset: bookmark * params.limit, + offset: bookmark, } } @@ -345,10 +352,13 @@ export async function search( ) // check for pagination final row - let nextRow: Row | undefined + let nextRow: boolean = false if (paginate && params.limit && rows.length > params.limit) { // remove the extra row that confirmed if there is another row to move to - nextRow = processed.pop() + nextRow = true + if (processed.length > params.limit) { + processed.pop() + } } // get the rows @@ -372,7 +382,7 @@ export async function search( // check for pagination if (paginate && nextRow) { response.hasNextPage = true - response.bookmark = bookmark + 1 + response.bookmark = processed.length } if (paginate && !nextRow) { response.hasNextPage = false From a2f11f17fd2947619ba488c6652f3d398be14776 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 16:31:02 +0100 Subject: [PATCH 02/21] Type fix. --- packages/server/src/sdk/app/rows/external.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/server/src/sdk/app/rows/external.ts b/packages/server/src/sdk/app/rows/external.ts index 9ab1362606..f81d67f621 100644 --- a/packages/server/src/sdk/app/rows/external.ts +++ b/packages/server/src/sdk/app/rows/external.ts @@ -21,7 +21,8 @@ export async function getRow( ? IncludeRelationship.INCLUDE : IncludeRelationship.EXCLUDE, }) - return response ? response[0] : response + const rows = response?.rows || [] + return rows[0] } export async function save( From de22a078c066da363095fcdc148f67dc5b4b8365 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 16:50:20 +0100 Subject: [PATCH 03/21] Adding bookmark to each subsequent (thanks tests ). --- packages/server/src/sdk/app/rows/search/external.ts | 2 +- packages/server/src/sdk/app/rows/search/sqs.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index 76f00a25e2..f47f6f7998 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -141,7 +141,7 @@ export async function search( // need wrapper object for bookmarks etc when paginating const response: SearchResponse = { rows: processed, hasNextPage } if (hasNextPage && bookmark != null) { - response.bookmark = processed.length + response.bookmark = bookmark + processed.length } if (totalRows != null) { response.totalRows = totalRows diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 2b3ed0c087..f3168a4462 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -382,7 +382,7 @@ export async function search( // check for pagination if (paginate && nextRow) { response.hasNextPage = true - response.bookmark = processed.length + response.bookmark = bookmark + processed.length } if (paginate && !nextRow) { response.hasNextPage = false From b54157a6fb42ce38b4a7f858572abdfa8d092bab Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 17:22:17 +0100 Subject: [PATCH 04/21] Fix for enrich endpoint discovered by tests. --- packages/server/src/api/controllers/row/external.ts | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/packages/server/src/api/controllers/row/external.ts b/packages/server/src/api/controllers/row/external.ts index 78fae7aad8..8b95d9c2f3 100644 --- a/packages/server/src/api/controllers/row/external.ts +++ b/packages/server/src/api/controllers/row/external.ts @@ -163,10 +163,14 @@ export async function fetchEnrichedRow(ctx: UserCtx) { }, includeSqlRelationships: IncludeRelationship.INCLUDE, }) - row[fieldName] = await outputProcessing(linkedTable, relatedRows, { - squash: true, - preserveLinks: true, - }) + row[fieldName] = await outputProcessing( + linkedTable, + relatedRows.rows, + { + squash: true, + preserveLinks: true, + } + ) } return row } From 8a64dd1e0a7826496af2a677374d3c89f94e5bd8 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 1 Aug 2024 11:39:31 +0100 Subject: [PATCH 05/21] Reverting init.sql. --- .../scripts/integrations/postgres/init.sql | 121 +++++++++++++----- 1 file changed, 89 insertions(+), 32 deletions(-) diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql index dce228dcfa..f5b8086e32 100644 --- a/packages/server/scripts/integrations/postgres/init.sql +++ b/packages/server/scripts/integrations/postgres/init.sql @@ -1,35 +1,92 @@ --- Create the first table -CREATE TABLE first_table ( - id SERIAL PRIMARY KEY, - name VARCHAR(255) NOT NULL, - description TEXT +SELECT 'CREATE DATABASE main' +WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec +CREATE SCHEMA "test-1"; +CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer', 'support'); +CREATE TABLE Persons ( + PersonID SERIAL PRIMARY KEY, + LastName varchar(255), + FirstName varchar(255), + Address varchar(255), + City varchar(255) DEFAULT 'Belfast', + Age INTEGER DEFAULT 20 NOT NULL, + Year INTEGER, + Type person_job ); - --- Create the second table -CREATE TABLE second_table ( - id SERIAL PRIMARY KEY, - first_table_id INT REFERENCES first_table(id), - data TEXT NOT NULL +CREATE TABLE Tasks ( + TaskID SERIAL PRIMARY KEY, + ExecutorID INT, + QaID INT, + Completed BOOLEAN, + TaskName varchar(255), + CONSTRAINT fkexecutor + FOREIGN KEY(ExecutorID) + REFERENCES Persons(PersonID), + CONSTRAINT fkqa + FOREIGN KEY(QaID) + REFERENCES Persons(PersonID) ); +CREATE TABLE Products ( + ProductID SERIAL PRIMARY KEY, + ProductName varchar(255) +); +CREATE TABLE Products_Tasks ( + ProductID INT NOT NULL, + TaskID INT NOT NULL, + CONSTRAINT fkProducts + FOREIGN KEY(ProductID) + REFERENCES Products(ProductID), + CONSTRAINT fkTasks + FOREIGN KEY(TaskID) + REFERENCES Tasks(TaskID), + PRIMARY KEY (ProductID, TaskID) +); +CREATE TABLE "test-1".table1 ( + id SERIAL PRIMARY KEY, + Name varchar(255) +); +CREATE TABLE CompositeTable ( + KeyPartOne varchar(128), + KeyPartTwo varchar(128), + Name varchar(255), + PRIMARY KEY (KeyPartOne, KeyPartTwo) +); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('Mike', 'Hughes', '123 Fake Street', 'Belfast', 'qa', 1999); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support'); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Dave', 'Bar', '2 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('James', 'Bar', '3 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Jenny', 'Bar', '4 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Grace', 'Bar', '5 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Sarah', 'Bar', '6 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Kelly', 'Bar', '7 Foo Street', 'Bartown', 'support', 0, 1993); --- Insert 50 rows into the first table -DO -$$ -BEGIN - FOR i IN 1..50 LOOP - INSERT INTO first_table (name, description) - VALUES ('Name ' || i, 'Description ' || i); - END LOOP; -END -$$; - --- Insert 10,000 rows into the second table, all related to the first row in the first table -DO -$$ -BEGIN - FOR i IN 1..10000 LOOP - INSERT INTO second_table (first_table_id, data) - VALUES (1, 'Data ' || i); - END LOOP; -END -$$; +-- insert a lot of tasks for testing +WITH RECURSIVE generate_series AS ( + SELECT 1 AS n + UNION ALL + SELECT n + 1 FROM generate_series WHERE n < 6000 +), +random_data AS ( + SELECT + n, + (random() * 9 + 1)::int AS ExecutorID, + (random() * 9 + 1)::int AS QaID, + 'assembling' AS TaskName, + (random() < 0.5) AS Completed + FROM generate_series +) +INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) +SELECT ExecutorID, QaID, TaskName, Completed +FROM random_data; +INSERT INTO Products (ProductName) VALUES ('Computers'); +INSERT INTO Products (ProductName) VALUES ('Laptops'); +INSERT INTO Products (ProductName) VALUES ('Chairs'); +INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 1); +INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1); +INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1); +INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2); +INSERT INTO "test-1".table1 (Name) VALUES ('Test'); +INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('aaa', 'bbb', 'Michael'); +INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('bbb', 'ccc', 'Andrew'); +INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('ddd', '', 'OneKey'); \ No newline at end of file From 99e8ef58dd9c9fa32b6f31228f0ddb513777577c Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 1 Aug 2024 13:03:58 +0100 Subject: [PATCH 06/21] Adding test case - had to rejig how internal limit is retrieved but works without requiring thousands of rows. --- packages/backend-core/src/sql/sql.ts | 14 +-- .../scripts/integrations/postgres/init.sql | 2 +- .../src/api/routes/tests/search.spec.ts | 90 ++++++++++++++++++- 3 files changed, 96 insertions(+), 10 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index a67da7bc10..76c86b2b62 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -37,10 +37,12 @@ import { helpers } from "@budibase/shared-core" type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any -const envLimit = environment.SQL_MAX_ROWS - ? parseInt(environment.SQL_MAX_ROWS) - : null -const BASE_LIMIT = envLimit || 5000 +function getBaseLimit() { + const envLimit = environment.SQL_MAX_ROWS + ? parseInt(environment.SQL_MAX_ROWS) + : null + return envLimit || 5000 +} // Takes a string like foo and returns a quoted string like [foo] for SQL Server // and "foo" for Postgres. @@ -838,7 +840,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { private readonly limit: number // pass through client to get flavour of SQL - constructor(client: SqlClient, limit: number = BASE_LIMIT) { + constructor(client: SqlClient, limit: number = getBaseLimit()) { super(client) this.limit = limit } @@ -882,7 +884,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { query = builder.read(client, json, { limits: { query: this.limit, - base: BASE_LIMIT, + base: getBaseLimit(), }, }) break diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql index f5b8086e32..9624208deb 100644 --- a/packages/server/scripts/integrations/postgres/init.sql +++ b/packages/server/scripts/integrations/postgres/init.sql @@ -89,4 +89,4 @@ INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2); INSERT INTO "test-1".table1 (Name) VALUES ('Test'); INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('aaa', 'bbb', 'Michael'); INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('bbb', 'ccc', 'Andrew'); -INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('ddd', '', 'OneKey'); \ No newline at end of file +INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('ddd', '', 'OneKey'); diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index 2c5756efe4..bc3cdccf18 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -53,6 +53,7 @@ describe.each([ const isLucene = name === "lucene" const isInMemory = name === "in-memory" const isInternal = isSqs || isLucene || isInMemory + const isSql = !isInMemory && !isLucene const config = setup.getConfig() let envCleanup: (() => void) | undefined @@ -192,7 +193,8 @@ describe.each([ // different to the one passed in will cause the assertion to fail. Extra // rows returned by the query will also cause the assertion to fail. async toMatchExactly(expectedRows: any[]) { - const { rows: foundRows } = await this.performSearch() + const response = await this.performSearch() + const foundRows = response.rows // eslint-disable-next-line jest/no-standalone-expect expect(foundRows).toHaveLength(expectedRows.length) @@ -202,13 +204,15 @@ describe.each([ expect.objectContaining(this.popRow(expectedRow, foundRows)) ) ) + return response } // Asserts that the query returns rows matching exactly the set of rows // passed in. The order of the rows is not important, but extra rows will // cause the assertion to fail. async toContainExactly(expectedRows: any[]) { - const { rows: foundRows } = await this.performSearch() + const response = await this.performSearch() + const foundRows = response.rows // eslint-disable-next-line jest/no-standalone-expect expect(foundRows).toHaveLength(expectedRows.length) @@ -220,6 +224,7 @@ describe.each([ ) ) ) + return response } // Asserts that the query returns some property values - this cannot be used @@ -236,6 +241,7 @@ describe.each([ expect(response[key]).toEqual(properties[key]) } } + return response } // Asserts that the query doesn't return a property, e.g. pagination parameters. @@ -245,13 +251,15 @@ describe.each([ // eslint-disable-next-line jest/no-standalone-expect expect(response[property]).toBeUndefined() } + return response } // Asserts that the query returns rows matching the set of rows passed in. // The order of the rows is not important. Extra rows will not cause the // assertion to fail. async toContain(expectedRows: any[]) { - const { rows: foundRows } = await this.performSearch() + const response = await this.performSearch() + const foundRows = response.rows // eslint-disable-next-line jest/no-standalone-expect expect([...foundRows]).toEqual( @@ -261,6 +269,7 @@ describe.each([ ) ) ) + return response } async toFindNothing() { @@ -2608,4 +2617,79 @@ describe.each([ }).toContainExactly([row]) }) }) + + isSql && + describe("pagination edge case with relationships", () => { + let mainRows: Row[] = [] + + beforeAll(async () => { + const toRelateTable = await createTable({ + name: { + name: "name", + type: FieldType.STRING, + }, + }) + table = await createTable({ + name: { + name: "name", + type: FieldType.STRING, + }, + rel: { + name: "rel", + type: FieldType.LINK, + relationshipType: RelationshipType.MANY_TO_ONE, + tableId: toRelateTable._id!, + fieldName: "rel", + }, + }) + const relatedRows = await Promise.all([ + config.api.row.save(toRelateTable._id!, { name: "tag 1" }), + config.api.row.save(toRelateTable._id!, { name: "tag 2" }), + config.api.row.save(toRelateTable._id!, { name: "tag 3" }), + config.api.row.save(toRelateTable._id!, { name: "tag 4" }), + config.api.row.save(toRelateTable._id!, { name: "tag 5" }), + config.api.row.save(toRelateTable._id!, { name: "tag 6" }), + ]) + mainRows = await Promise.all([ + config.api.row.save(table._id!, { + name: "product 1", + rel: relatedRows.map(row => row._id), + }), + config.api.row.save(table._id!, { + name: "product 2", + rel: [], + }), + config.api.row.save(table._id!, { + name: "product 3", + rel: [], + }), + ]) + }) + + it("can still page when the hard limit is hit", async () => { + await config.withCoreEnv( + { + SQL_MAX_ROWS: "6", + }, + async () => { + const params: Omit = { + query: {}, + paginate: true, + limit: 3, + sort: "name", + sortType: SortType.STRING, + sortOrder: SortOrder.ASCENDING, + } + const page1 = await expectSearch(params).toContain([mainRows[0]]) + expect(page1.hasNextPage).toBe(true) + expect(page1.bookmark).toBeDefined() + const page2 = await expectSearch({ + ...params, + bookmark: page1.bookmark, + }).toContain([mainRows[1], mainRows[2]]) + expect(page2.hasNextPage).toBe(false) + } + ) + }) + }) }) From 224d2a195330eef4c6d089ab113dc91361851ddc Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 1 Aug 2024 13:07:34 +0100 Subject: [PATCH 07/21] PR comments. --- .../src/sdk/app/rows/search/external.ts | 17 +++++--------- .../server/src/sdk/app/rows/search/sqs.ts | 22 +++++++------------ 2 files changed, 14 insertions(+), 25 deletions(-) diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index f47f6f7998..9d9f4fde39 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -106,17 +106,12 @@ export async function search( paginate: paginateObj as PaginationJson, includeSqlRelationships: IncludeRelationship.INCLUDE, } - const queries: [ - Promise, - Promise | undefined - ] = [handleRequest(Operation.READ, tableId, parameters), undefined] - if (countRows) { - queries[1] = handleRequest(Operation.COUNT, tableId, parameters) - } - const responses = await Promise.all(queries) - let rows = responses[0].rows - const rawResponseSize = responses[0].rawResponseSize - const totalRows = responses.length > 1 ? responses[1] : undefined + const [{ rows, rawResponseSize }, totalRows] = await Promise.all([ + handleRequest(Operation.READ, tableId, parameters), + countRows + ? handleRequest(Operation.COUNT, tableId, parameters) + : Promise.resolve(undefined), + ]) let processed = await outputProcessing(table, rows, { preserveLinks: true, diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 9d6bd7e89b..f6154cf70c 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -332,20 +332,14 @@ export async function search( } try { - const queries: Promise[] = [] - queries.push(runSqlQuery(request, allTables, relationships)) - if (options.countRows) { - // get the total count of rows - queries.push( - runSqlQuery(request, allTables, relationships, { - countTotalRows: true, - }) - ) - } - const responses = await Promise.all(queries) - let rows = responses[0] as Row[] - const totalRows = - responses.length > 1 ? (responses[1] as number) : undefined + const [rows, totalRows] = await Promise.all([ + runSqlQuery(request, allTables, relationships), + options.countRows + ? runSqlQuery(request, allTables, relationships, { + countTotalRows: true, + }) + : Promise.resolve(undefined), + ]) // process from the format of tableId.column to expected format also // make sure JSON columns corrected From 6eb3b2793b9390d1f6f8a297bc04c77950094ea1 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 1 Aug 2024 13:31:10 +0100 Subject: [PATCH 08/21] Linting. --- packages/server/src/sdk/app/rows/search/external.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index 9d9f4fde39..3ce1013b85 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -23,7 +23,6 @@ import pick from "lodash/pick" import { outputProcessing } from "../../../../utilities/rowProcessor" import sdk from "../../../" import { isSearchingByRowID } from "./utils" -import { ExternalReadRequestReturnType } from "../../../../api/controllers/row/ExternalRequest" function getPaginationAndLimitParameters( filters: SearchFilters, From 8afb1e6c420489a7f0d6f845c0dcca4c1b6186c3 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 2 Aug 2024 10:52:48 +0200 Subject: [PATCH 09/21] Allow format on exportRows test utils --- packages/server/src/api/routes/tests/row.spec.ts | 2 ++ packages/server/src/tests/utilities/api/row.ts | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index edceb925d6..b5e1e63910 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -33,6 +33,7 @@ import { UpdatedRowEventEmitter, TableSchema, JsonFieldSubType, + RowExportFormat, } from "@budibase/types" import { generator, mocks } from "@budibase/backend-core/tests" import _, { merge } from "lodash" @@ -1811,6 +1812,7 @@ describe.each([ await config.api.row.exportRows( "1234567", { rows: [existing._id!] }, + RowExportFormat.JSON, { status: 404 } ) }) diff --git a/packages/server/src/tests/utilities/api/row.ts b/packages/server/src/tests/utilities/api/row.ts index 17d21f0996..c5614d69e7 100644 --- a/packages/server/src/tests/utilities/api/row.ts +++ b/packages/server/src/tests/utilities/api/row.ts @@ -11,6 +11,7 @@ import { DeleteRows, DeleteRow, PaginatedSearchRowResponse, + RowExportFormat, } from "@budibase/types" import { Expectations, TestAPI } from "./base" @@ -105,6 +106,7 @@ export class RowAPI extends TestAPI { exportRows = async ( tableId: string, body: ExportRowsRequest, + format: RowExportFormat = RowExportFormat.JSON, expectations?: Expectations ) => { const response = await this._requestRaw( @@ -112,7 +114,7 @@ export class RowAPI extends TestAPI { `/api/${tableId}/rows/exportRows`, { body, - query: { format: "json" }, + query: { format }, expectations, } ) From f3c18b87b140aa0c21a5a9c9719f7a2b8feff059 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 2 Aug 2024 10:55:20 +0200 Subject: [PATCH 10/21] Expose csvToJson test utils --- packages/server/src/tests/utilities/api/table.ts | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/server/src/tests/utilities/api/table.ts b/packages/server/src/tests/utilities/api/table.ts index 9d4a92250a..e0f59f8e82 100644 --- a/packages/server/src/tests/utilities/api/table.ts +++ b/packages/server/src/tests/utilities/api/table.ts @@ -1,6 +1,8 @@ import { BulkImportRequest, BulkImportResponse, + CsvToJsonRequest, + CsvToJsonResponse, MigrateRequest, MigrateResponse, Row, @@ -99,4 +101,14 @@ export class TableAPI extends TestAPI { } ) } + + csvToJson = async ( + body: CsvToJsonRequest, + expectations?: Expectations + ): Promise => { + return await this._post(`/api/convert/csvToJson`, { + body, + expectations, + }) + } } From b1f9325987a25af38a04aeba45aa2cefd2161f5f Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 2 Aug 2024 11:00:32 +0200 Subject: [PATCH 11/21] Add tests --- .../server/src/api/routes/tests/row.spec.ts | 154 ++++++++++++++++++ 1 file changed, 154 insertions(+) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index b5e1e63910..098d51a60d 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1851,6 +1851,160 @@ describe.each([ const results = JSON.parse(res) expect(results.length).toEqual(3) }) + + describe("should allow exporting all column types", () => { + let tableId: string + let expectedRowData: Row + + beforeAll(async () => { + const fullSchema = setup.structures.fullSchemaWithoutLinks({ + allRequired: true, + }) + + const table = await config.api.table.save( + saveTableRequest({ + ...setup.structures.basicTable(), + schema: fullSchema, + primary: ["string"], + }) + ) + tableId = table._id! + + const rowValues: Record = { + [FieldType.STRING]: generator.guid(), + [FieldType.LONGFORM]: generator.paragraph(), + [FieldType.OPTIONS]: "option 2", + [FieldType.ARRAY]: ["options 2", "options 4"], + [FieldType.NUMBER]: generator.natural(), + [FieldType.BOOLEAN]: generator.bool(), + [FieldType.DATETIME]: generator.date().toISOString(), + [FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()], + [FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(), + [FieldType.FORMULA]: undefined, // generated field + [FieldType.AUTO]: undefined, // generated field + [FieldType.JSON]: { name: generator.guid() }, + [FieldType.INTERNAL]: generator.guid(), + [FieldType.BARCODEQR]: generator.guid(), + [FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(), + [FieldType.BIGINT]: generator.integer(), + [FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }], + [FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id }, + } + const row = await config.api.row.save(table._id!, rowValues) + expectedRowData = { + _id: row._id, + [FieldType.STRING]: rowValues[FieldType.STRING], + [FieldType.LONGFORM]: rowValues[FieldType.LONGFORM], + [FieldType.OPTIONS]: rowValues[FieldType.OPTIONS], + [FieldType.ARRAY]: rowValues[FieldType.ARRAY], + [FieldType.NUMBER]: rowValues[FieldType.NUMBER], + [FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN], + [FieldType.DATETIME]: rowValues[FieldType.DATETIME], + [FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map( + (a: any) => + expect.objectContaining({ + ...a, + url: expect.any(String), + }) + ), + [FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({ + ...rowValues[FieldType.ATTACHMENT_SINGLE], + url: expect.any(String), + }), + [FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula, + [FieldType.AUTO]: expect.any(Number), + [FieldType.JSON]: rowValues[FieldType.JSON], + [FieldType.INTERNAL]: rowValues[FieldType.INTERNAL], + [FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR], + [FieldType.SIGNATURE_SINGLE]: expect.objectContaining({ + ...rowValues[FieldType.SIGNATURE_SINGLE], + url: expect.any(String), + }), + [FieldType.BIGINT]: rowValues[FieldType.BIGINT], + [FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map( + expect.objectContaining + ), + [FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining( + rowValues[FieldType.BB_REFERENCE_SINGLE] + ), + } + }) + + it("as csv", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.CSV + ) + + const json = await config.api.table.csvToJson({ + csvString: exportedValue, + }) + expect(json).toEqual([expectedRowData]) + }) + + it("as json", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.JSON + ) + + const json = JSON.parse(exportedValue) + expect(json).toEqual([expectedRowData]) + }) + + it("as json with schema", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.JSON_WITH_SCHEMA + ) + + const json = JSON.parse(exportedValue) + expect(json).toEqual({ + schema: expect.any(Object), + rows: [expectedRowData], + }) + }) + + it("exported data can be re-imported", async () => { + // export all + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.CSV + ) + + // import all twice + const rows = await config.api.table.csvToJson({ + csvString: exportedValue, + }) + await config.api.row.bulkImport(tableId, { + rows, + }) + // await config.api.row.bulkImport(tableId, { + // rows, + // }) + + const { rows: allRows } = await config.api.row.search(tableId) + + const expectedRow = { + ...expectedRowData, + _id: expect.any(String), + _rev: expect.any(String), + type: "row", + tableId: tableId, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + } + expect(allRows).toEqual([ + expectedRow, + expectedRow, + // expectedRow + ]) + }) + }) }) let o2mTable: Table From 02d6458ac8f9f81bd70f377da38cb113e8617eeb Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 2 Aug 2024 11:50:21 +0200 Subject: [PATCH 12/21] Improve test --- packages/server/src/api/routes/tests/row.spec.ts | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 098d51a60d..6e05741c35 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1983,9 +1983,9 @@ describe.each([ await config.api.row.bulkImport(tableId, { rows, }) - // await config.api.row.bulkImport(tableId, { - // rows, - // }) + await config.api.row.bulkImport(tableId, { + rows, + }) const { rows: allRows } = await config.api.row.search(tableId) @@ -1998,11 +1998,7 @@ describe.each([ createdAt: new Date().toISOString(), updatedAt: new Date().toISOString(), } - expect(allRows).toEqual([ - expectedRow, - expectedRow, - // expectedRow - ]) + expect(allRows).toEqual([expectedRow, expectedRow, expectedRow]) }) }) }) From e1ace8524835f598150bf11581186b2ae9c0e3c9 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 2 Aug 2024 11:51:19 +0200 Subject: [PATCH 13/21] Fix schema parser --- .../src/api/controllers/view/exporters.ts | 4 -- packages/server/src/utilities/schema.ts | 43 +++++++++++++++---- 2 files changed, 34 insertions(+), 13 deletions(-) diff --git a/packages/server/src/api/controllers/view/exporters.ts b/packages/server/src/api/controllers/view/exporters.ts index 946a1b346a..3269133d4b 100644 --- a/packages/server/src/api/controllers/view/exporters.ts +++ b/packages/server/src/api/controllers/view/exporters.ts @@ -51,7 +51,3 @@ export function jsonWithSchema(schema: TableSchema, rows: Row[]) { export function isFormat(format: any): format is RowExportFormat { return Object.values(RowExportFormat).includes(format as RowExportFormat) } - -export function parseCsvExport(value: string) { - return JSON.parse(value) as T -} diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index c6b26b55c8..353ca6f5b3 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -8,7 +8,6 @@ import { } from "@budibase/types" import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core" import { db } from "@budibase/backend-core" -import { parseCsvExport } from "../api/controllers/view/exporters" type Rows = Array @@ -159,7 +158,7 @@ export function parse(rows: Rows, table: Table): Rows { const columnSchema = schema[columnName] const { type: columnType } = columnSchema - if (columnType === FieldType.NUMBER) { + if ([FieldType.NUMBER, FieldType.BIGINT].includes(columnType)) { // If provided must be a valid number parsedRow[columnName] = columnData ? Number(columnData) : columnData } else if ( @@ -171,16 +170,23 @@ export function parse(rows: Rows, table: Table): Rows { parsedRow[columnName] = columnData ? new Date(columnData).toISOString() : columnData + } else if ( + columnType === FieldType.JSON && + typeof columnData === "string" + ) { + parsedRow[columnName] = parseJsonExport(columnData) } else if (columnType === FieldType.BB_REFERENCE) { let parsedValues: { _id: string }[] = columnData || [] - if (columnData) { - parsedValues = parseCsvExport<{ _id: string }[]>(columnData) + if (columnData && typeof columnData === "string") { + parsedValues = parseJsonExport<{ _id: string }[]>(columnData) } parsedRow[columnName] = parsedValues?.map(u => u._id) } else if (columnType === FieldType.BB_REFERENCE_SINGLE) { - const parsedValue = - columnData && parseCsvExport<{ _id: string }>(columnData) + let parsedValue = columnData + if (columnData && typeof columnData === "string") { + parsedValue = parseJsonExport<{ _id: string }>(columnData) + } parsedRow[columnName] = parsedValue?._id } else if ( (columnType === FieldType.ATTACHMENTS || @@ -188,7 +194,7 @@ export function parse(rows: Rows, table: Table): Rows { columnType === FieldType.SIGNATURE_SINGLE) && typeof columnData === "string" ) { - parsedRow[columnName] = parseCsvExport(columnData) + parsedRow[columnName] = parseJsonExport(columnData) } else { parsedRow[columnName] = columnData } @@ -212,14 +218,14 @@ function isValidBBReference( if (!data) { return !isRequired } - const user = parseCsvExport<{ _id: string }>(data) + const user = parseJsonExport<{ _id: string }>(data) return db.isGlobalUserID(user._id) } switch (subtype) { case BBReferenceFieldSubType.USER: case BBReferenceFieldSubType.USERS: { - const userArray = parseCsvExport<{ _id: string }[]>(data) + const userArray = parseJsonExport<{ _id: string }[]>(data) if (!Array.isArray(userArray)) { return false } @@ -233,3 +239,22 @@ function isValidBBReference( throw utils.unreachable(subtype) } } + +function parseJsonExport(value: string) { + try { + const parsed = JSON.parse(value) + + return parsed as T + } catch (e: any) { + if ( + e.message.startsWith("Expected property name or '}' in JSON at position ") + ) { + // This was probably converted as CSV and it has single quotes instead of double ones + const parsed = JSON.parse(value.replace(/'/g, '"')) + return parsed as T + } + + // It is no a valid JSON + throw e + } +} From 0dce3aa02c1e6c64bb62091dca76ea33c0992862 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 2 Aug 2024 11:51:43 +0200 Subject: [PATCH 14/21] Add structures.fullSchemaWithoutLinks --- .../server/src/tests/utilities/structures.ts | 162 ++++++++++++++++++ 1 file changed, 162 insertions(+) diff --git a/packages/server/src/tests/utilities/structures.ts b/packages/server/src/tests/utilities/structures.ts index 8f67ad1af9..698f6d8236 100644 --- a/packages/server/src/tests/utilities/structures.ts +++ b/packages/server/src/tests/utilities/structures.ts @@ -26,6 +26,10 @@ import { WebhookActionType, AutomationEventType, LoopStepType, + FieldSchema, + BBReferenceFieldSubType, + JsonFieldSubType, + AutoFieldSubType, } from "@budibase/types" import { LoopInput } from "../../definitions/automations" import { merge } from "lodash" @@ -573,3 +577,161 @@ export function basicEnvironmentVariable( development: dev || prod, } } + +export function fullSchemaWithoutLinks({ + allRequired, +}: { + allRequired?: boolean +}) { + const schema: { + [type in Exclude]: FieldSchema & { type: type } + } = { + [FieldType.STRING]: { + name: "string", + type: FieldType.STRING, + constraints: { + presence: allRequired, + }, + }, + [FieldType.LONGFORM]: { + name: "longform", + type: FieldType.LONGFORM, + constraints: { + presence: allRequired, + }, + }, + [FieldType.OPTIONS]: { + name: "options", + type: FieldType.OPTIONS, + constraints: { + presence: allRequired, + inclusion: ["option 1", "option 2", "option 3", "option 4"], + }, + }, + [FieldType.ARRAY]: { + name: "array", + type: FieldType.ARRAY, + constraints: { + presence: allRequired, + type: JsonFieldSubType.ARRAY, + inclusion: ["options 1", "options 2", "options 3", "options 4"], + }, + }, + [FieldType.NUMBER]: { + name: "number", + type: FieldType.NUMBER, + constraints: { + presence: allRequired, + }, + }, + [FieldType.BOOLEAN]: { + name: "boolean", + type: FieldType.BOOLEAN, + constraints: { + presence: allRequired, + }, + }, + [FieldType.DATETIME]: { + name: "datetime", + type: FieldType.DATETIME, + dateOnly: true, + timeOnly: false, + constraints: { + presence: allRequired, + }, + }, + [FieldType.FORMULA]: { + name: "formula", + type: FieldType.FORMULA, + formula: "any formula", + constraints: { + presence: allRequired, + }, + }, + [FieldType.BARCODEQR]: { + name: "barcodeqr", + type: FieldType.BARCODEQR, + constraints: { + presence: allRequired, + }, + }, + [FieldType.BIGINT]: { + name: "bigint", + type: FieldType.BIGINT, + constraints: { + presence: allRequired, + }, + }, + [FieldType.BB_REFERENCE]: { + name: "user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + presence: allRequired, + }, + }, + [FieldType.BB_REFERENCE_SINGLE]: { + name: "users", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + presence: allRequired, + }, + }, + [FieldType.ATTACHMENTS]: { + name: "attachments", + type: FieldType.ATTACHMENTS, + constraints: { + presence: allRequired, + }, + }, + [FieldType.ATTACHMENT_SINGLE]: { + name: "attachment_single", + type: FieldType.ATTACHMENT_SINGLE, + constraints: { + presence: allRequired, + }, + }, + [FieldType.AUTO]: { + name: "auto", + type: FieldType.AUTO, + subtype: AutoFieldSubType.AUTO_ID, + autocolumn: true, + constraints: { + presence: allRequired, + }, + }, + [FieldType.JSON]: { + name: "json", + type: FieldType.JSON, + constraints: { + presence: allRequired, + }, + }, + [FieldType.INTERNAL]: { + name: "internal", + type: FieldType.INTERNAL, + constraints: { + presence: allRequired, + }, + }, + [FieldType.SIGNATURE_SINGLE]: { + name: "signature_single", + type: FieldType.SIGNATURE_SINGLE, + constraints: { + presence: allRequired, + }, + }, + } + + return schema +} +export function basicAttachment() { + return { + key: generator.guid(), + name: generator.word(), + extension: generator.word(), + size: generator.natural(), + url: `/${generator.guid()}`, + } +} From a7c8009e091ce028eb540c8917a3bd94b0ff724c Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 2 Aug 2024 12:25:15 +0200 Subject: [PATCH 15/21] Fix csv checks --- .../server/src/api/routes/tests/row.spec.ts | 51 ++++++++++++++++++- 1 file changed, 49 insertions(+), 2 deletions(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 6e05741c35..2091f081e3 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1937,10 +1937,57 @@ describe.each([ RowExportFormat.CSV ) - const json = await config.api.table.csvToJson({ + const jsonResult = await config.api.table.csvToJson({ csvString: exportedValue, }) - expect(json).toEqual([expectedRowData]) + + const stringified = (value: string) => + JSON.stringify(value).replace(/"/g, "'") + + const matchingObject = (key: string, value: any, isArray: boolean) => { + const objectMatcher = `{'${key}':'${value[key]}'.*?}` + if (isArray) { + return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`)) + } + return expect.stringMatching(new RegExp(`^${objectMatcher}$`)) + } + + expect(jsonResult).toEqual([ + { + ...expectedRowData, + auto: expect.any(String), + array: stringified(expectedRowData["array"]), + attachment: matchingObject( + "key", + expectedRowData["attachment"][0].sample, + true + ), + attachment_single: matchingObject( + "key", + expectedRowData["attachment_single"].sample, + false + ), + bigint: stringified(expectedRowData["bigint"]), + boolean: stringified(expectedRowData["boolean"]), + json: stringified(expectedRowData["json"]), + number: stringified(expectedRowData["number"]), + signature_single: matchingObject( + "key", + expectedRowData["signature_single"].sample, + false + ), + bb_reference: matchingObject( + "_id", + expectedRowData["bb_reference"][0].sample, + true + ), + bb_reference_single: matchingObject( + "_id", + expectedRowData["bb_reference_single"].sample, + false + ), + }, + ]) }) it("as json", async () => { From 616b1bf0129ba748862b05ca5d5db86fd233d504 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 2 Aug 2024 13:00:42 +0200 Subject: [PATCH 16/21] Add and fix table tests --- .../server/src/api/routes/tests/table.spec.ts | 269 ++++++++++++++++-- .../server/src/tests/utilities/api/table.ts | 11 +- packages/server/src/utilities/schema.ts | 9 +- 3 files changed, 250 insertions(+), 39 deletions(-) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index f383fed927..077302f2b7 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -17,8 +17,10 @@ import { TableSchema, TableSourceType, User, + ValidateTableImportResponse, ViewCalculation, ViewV2Enriched, + RowExportFormat, } from "@budibase/types" import { checkBuilderEndpoint } from "./utilities/TestFunctions" import * as setup from "./utilities" @@ -1086,7 +1088,10 @@ describe.each([ }) }) - describe("import validation", () => { + describe.each([ + [RowExportFormat.CSV, (val: any) => JSON.stringify(val).replace(/"/g, "'")], + [RowExportFormat.JSON, (val: any) => val], + ])("import validation (%s)", (_, userParser) => { const basicSchema: TableSchema = { id: { type: FieldType.NUMBER, @@ -1098,9 +1103,41 @@ describe.each([ }, } - describe("validateNewTableImport", () => { - it("can validate basic imports", async () => { - const result = await config.api.table.validateNewTableImport( + const importCases: [ + string, + (rows: Row[], schema: TableSchema) => Promise + ][] = [ + [ + "validateNewTableImport", + async (rows: Row[], schema: TableSchema) => { + const result = await config.api.table.validateNewTableImport({ + rows, + schema, + }) + return result + }, + ], + [ + "validateExistingTableImport", + async (rows: Row[], schema: TableSchema) => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + primary: ["id"], + schema, + }) + ) + const result = await config.api.table.validateExistingTableImport({ + tableId: table._id, + rows, + }) + return result + }, + ], + ] + + describe.each(importCases)("%s", (_, testDelegate) => { + it("validates basic imports", async () => { + const result = await testDelegate( [{ id: generator.natural(), name: generator.first() }], basicSchema ) @@ -1119,18 +1156,18 @@ describe.each([ it.each( isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS )("don't allow protected names in schema (%s)", async columnName => { - const result = await config.api.table.validateNewTableImport( - [ + const result = await config.api.table.validateNewTableImport({ + rows: [ { id: generator.natural(), name: generator.first(), [columnName]: generator.word(), }, ], - { + schema: { ...basicSchema, - } - ) + }, + }) expect(result).toEqual({ allValid: false, @@ -1146,25 +1183,53 @@ describe.each([ }) }) + it("does not allow imports without rows", async () => { + const result = await testDelegate([], basicSchema) + + expect(result).toEqual({ + allValid: false, + errors: {}, + invalidColumns: [], + schemaValidation: {}, + }) + }) + + it("validates imports with some empty rows", async () => { + const result = await testDelegate( + [{}, { id: generator.natural(), name: generator.first() }, {}], + basicSchema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + }, + }) + }) + isInternal && it.each( isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS )("don't allow protected names in the rows (%s)", async columnName => { - const result = await config.api.table.validateNewTableImport( - [ + const result = await config.api.table.validateNewTableImport({ + rows: [ { id: generator.natural(), name: generator.first(), }, ], - { + schema: { ...basicSchema, [columnName]: { name: columnName, type: FieldType.STRING, }, - } - ) + }, + }) expect(result).toEqual({ allValid: false, @@ -1179,20 +1244,24 @@ describe.each([ }, }) }) - }) - describe("validateExistingTableImport", () => { - it("can validate basic imports", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - primary: ["id"], - schema: basicSchema, - }) + it("validates required fields and valid rows", async () => { + const schema: TableSchema = { + ...basicSchema, + name: { + type: FieldType.STRING, + name: "name", + constraints: { presence: true }, + }, + } + + const result = await testDelegate( + [ + { id: generator.natural(), name: generator.first() }, + { id: generator.natural(), name: generator.first() }, + ], + schema ) - const result = await config.api.table.validateExistingTableImport({ - tableId: table._id, - rows: [{ id: generator.natural(), name: generator.first() }], - }) expect(result).toEqual({ allValid: true, @@ -1205,6 +1274,154 @@ describe.each([ }) }) + it("validates required fields and non-valid rows", async () => { + const schema: TableSchema = { + ...basicSchema, + name: { + type: FieldType.STRING, + name: "name", + constraints: { presence: true }, + }, + } + + const result = await testDelegate( + [ + { id: generator.natural(), name: generator.first() }, + { id: generator.natural(), name: "" }, + ], + schema + ) + + expect(result).toEqual({ + allValid: false, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: false, + }, + }) + }) + + describe("bb references", () => { + const getUserValues = () => ({ + _id: docIds.generateGlobalUserID(), + primaryDisplay: generator.first(), + email: generator.email({}), + }) + + it("can validate user column imports", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + name: "user", + }, + } + + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser(getUserValues()), + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: true, + }, + }) + }) + + it("can validate user column imports with invalid data", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + name: "user", + }, + } + + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser(getUserValues()), + }, + { + id: generator.natural(), + name: generator.first(), + user: "no valid user data", + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: false, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: false, + }, + }) + }) + + it("can validate users column imports", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + name: "user", + externalType: "array", + }, + } + + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser([ + getUserValues(), + getUserValues(), + getUserValues(), + ]), + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: true, + }, + }) + }) + }) + }) + + describe("validateExistingTableImport", () => { isInternal && it("can reimport _id fields for internal tables", async () => { const table = await config.api.table.save( diff --git a/packages/server/src/tests/utilities/api/table.ts b/packages/server/src/tests/utilities/api/table.ts index e0f59f8e82..baaf890b52 100644 --- a/packages/server/src/tests/utilities/api/table.ts +++ b/packages/server/src/tests/utilities/api/table.ts @@ -5,11 +5,10 @@ import { CsvToJsonResponse, MigrateRequest, MigrateResponse, - Row, SaveTableRequest, SaveTableResponse, Table, - TableSchema, + ValidateNewTableImportRequest, ValidateTableImportRequest, ValidateTableImportResponse, } from "@budibase/types" @@ -73,17 +72,13 @@ export class TableAPI extends TestAPI { } validateNewTableImport = async ( - rows: Row[], - schema: TableSchema, + body: ValidateNewTableImportRequest, expectations?: Expectations ): Promise => { return await this._post( `/api/tables/validateNewTableImport`, { - body: { - rows, - schema, - }, + body, expectations, } ) diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index 353ca6f5b3..16082dbcb7 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -210,10 +210,6 @@ function isValidBBReference( subtype: BBReferenceFieldSubType, isRequired: boolean ): boolean { - if (typeof data !== "string") { - return false - } - if (type === FieldType.BB_REFERENCE_SINGLE) { if (!data) { return !isRequired @@ -240,7 +236,10 @@ function isValidBBReference( } } -function parseJsonExport(value: string) { +function parseJsonExport(value: any) { + if (typeof value !== "string") { + return value + } try { const parsed = JSON.parse(value) From cd1a7699b23f3471dae4a848b58ecd80611298d5 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 2 Aug 2024 13:12:47 +0200 Subject: [PATCH 17/21] Fix schema require checks --- packages/server/src/utilities/schema.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index 16082dbcb7..2613190b9c 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -84,7 +84,7 @@ export function validate( "Column names can't contain special characters" } else if ( columnData == null && - !schema[columnName].constraints?.presence + !helpers.schema.isRequired(constraints) ) { results.schemaValidation[columnName] = true } else if ( @@ -94,6 +94,11 @@ export function validate( isAutoColumn ) { return + } else if ( + [FieldType.STRING].includes(columnType) && + helpers.schema.isRequired(constraints) + ) { + results.schemaValidation[columnName] = false } else if (columnType === FieldType.NUMBER && isNaN(Number(columnData))) { // If provided must be a valid number results.schemaValidation[columnName] = false From e8e4f064a56cff92482b5180784c1d28149c1a24 Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 2 Aug 2024 13:14:33 +0200 Subject: [PATCH 18/21] Fix invalid --- packages/server/src/utilities/schema.ts | 47 ++++++++++++++----------- 1 file changed, 26 insertions(+), 21 deletions(-) diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index 2613190b9c..32a5761e22 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -96,6 +96,7 @@ export function validate( return } else if ( [FieldType.STRING].includes(columnType) && + !columnData && helpers.schema.isRequired(constraints) ) { results.schemaValidation[columnName] = false @@ -215,29 +216,33 @@ function isValidBBReference( subtype: BBReferenceFieldSubType, isRequired: boolean ): boolean { - if (type === FieldType.BB_REFERENCE_SINGLE) { - if (!data) { - return !isRequired - } - const user = parseJsonExport<{ _id: string }>(data) - return db.isGlobalUserID(user._id) - } - - switch (subtype) { - case BBReferenceFieldSubType.USER: - case BBReferenceFieldSubType.USERS: { - const userArray = parseJsonExport<{ _id: string }[]>(data) - if (!Array.isArray(userArray)) { - return false + try { + if (type === FieldType.BB_REFERENCE_SINGLE) { + if (!data) { + return !isRequired } - - const constainsWrongId = userArray.find( - user => !db.isGlobalUserID(user._id) - ) - return !constainsWrongId + const user = parseJsonExport<{ _id: string }>(data) + return db.isGlobalUserID(user._id) } - default: - throw utils.unreachable(subtype) + + switch (subtype) { + case BBReferenceFieldSubType.USER: + case BBReferenceFieldSubType.USERS: { + const userArray = parseJsonExport<{ _id: string }[]>(data) + if (!Array.isArray(userArray)) { + return false + } + + const constainsWrongId = userArray.find( + user => !db.isGlobalUserID(user._id) + ) + return !constainsWrongId + } + default: + throw utils.unreachable(subtype) + } + } catch { + return false } } From d8f55498add364c1cb29e5fb94cda6870cb09daf Mon Sep 17 00:00:00 2001 From: Adria Navarro Date: Fri, 2 Aug 2024 13:44:20 +0200 Subject: [PATCH 19/21] Treat bigint as string --- packages/server/src/api/routes/tests/row.spec.ts | 3 +-- packages/server/src/utilities/schema.ts | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 2091f081e3..69a6b981bb 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1886,7 +1886,7 @@ describe.each([ [FieldType.INTERNAL]: generator.guid(), [FieldType.BARCODEQR]: generator.guid(), [FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(), - [FieldType.BIGINT]: generator.integer(), + [FieldType.BIGINT]: generator.integer().toString(), [FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }], [FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id }, } @@ -1967,7 +1967,6 @@ describe.each([ expectedRowData["attachment_single"].sample, false ), - bigint: stringified(expectedRowData["bigint"]), boolean: stringified(expectedRowData["boolean"]), json: stringified(expectedRowData["json"]), number: stringified(expectedRowData["number"]), diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index 32a5761e22..b398285710 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -164,7 +164,7 @@ export function parse(rows: Rows, table: Table): Rows { const columnSchema = schema[columnName] const { type: columnType } = columnSchema - if ([FieldType.NUMBER, FieldType.BIGINT].includes(columnType)) { + if ([FieldType.NUMBER].includes(columnType)) { // If provided must be a valid number parsedRow[columnName] = columnData ? Number(columnData) : columnData } else if ( From 661fc361a05e2c1864e6dc1030f9e0cb483cdc98 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Fri, 2 Aug 2024 15:01:33 +0100 Subject: [PATCH 20/21] Refactoring search SDK to make it obvious the different search methods, exports was using lucene always when doing internal export rows which shouldn't be the case, should go through the complete search SDK. --- packages/server/src/sdk/app/rows/search.ts | 5 +- .../src/sdk/app/rows/search/internal/index.ts | 3 + .../rows/search/{ => internal}/internal.ts | 93 +++---------------- .../sdk/app/rows/search/internal/lucene.ts | 66 +++++++++++++ .../sdk/app/rows/search/{ => internal}/sqs.ts | 18 ++-- 5 files changed, 95 insertions(+), 90 deletions(-) create mode 100644 packages/server/src/sdk/app/rows/search/internal/index.ts rename packages/server/src/sdk/app/rows/search/{ => internal}/internal.ts (70%) create mode 100644 packages/server/src/sdk/app/rows/search/internal/lucene.ts rename packages/server/src/sdk/app/rows/search/{ => internal}/sqs.ts (96%) diff --git a/packages/server/src/sdk/app/rows/search.ts b/packages/server/src/sdk/app/rows/search.ts index d074e8e2ac..c008d43548 100644 --- a/packages/server/src/sdk/app/rows/search.ts +++ b/packages/server/src/sdk/app/rows/search.ts @@ -8,7 +8,6 @@ import { import { isExternalTableID } from "../../../integrations/utils" import * as internal from "./search/internal" import * as external from "./search/external" -import * as sqs from "./search/sqs" import { ExportRowsParams, ExportRowsResult } from "./search/types" import { dataFilters } from "@budibase/shared-core" import sdk from "../../index" @@ -55,9 +54,9 @@ export async function search( if (isExternalTable) { return external.search(options, table) } else if (dbCore.isSqsEnabledForTenant()) { - return sqs.search(options, table) + return internal.sqs.search(options, table) } else { - return internal.search(options, table) + return internal.lucene.search(options, table) } } diff --git a/packages/server/src/sdk/app/rows/search/internal/index.ts b/packages/server/src/sdk/app/rows/search/internal/index.ts new file mode 100644 index 0000000000..f3db9169f4 --- /dev/null +++ b/packages/server/src/sdk/app/rows/search/internal/index.ts @@ -0,0 +1,3 @@ +export * as sqs from "./sqs" +export * as lucene from "./lucene" +export * from "./internal" diff --git a/packages/server/src/sdk/app/rows/search/internal.ts b/packages/server/src/sdk/app/rows/search/internal/internal.ts similarity index 70% rename from packages/server/src/sdk/app/rows/search/internal.ts rename to packages/server/src/sdk/app/rows/search/internal/internal.ts index f86b041597..acc7033ce1 100644 --- a/packages/server/src/sdk/app/rows/search/internal.ts +++ b/packages/server/src/sdk/app/rows/search/internal/internal.ts @@ -1,90 +1,30 @@ import { context, HTTPError } from "@budibase/backend-core" -import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core" -import env from "../../../../environment" -import { fullSearch, paginatedSearch } from "./utils" -import { getRowParams, InternalTables } from "../../../../db/utils" +import env from "../../../../../environment" +import { getRowParams, InternalTables } from "../../../../../db/utils" import { Database, DocumentType, Row, - RowSearchParams, - SearchResponse, - SortType, Table, TableSchema, - User, } from "@budibase/types" -import { getGlobalUsersFromMetadata } from "../../../../utilities/global" -import { outputProcessing } from "../../../../utilities/rowProcessor" +import { outputProcessing } from "../../../../../utilities/rowProcessor" import { csv, Format, json, jsonWithSchema, -} from "../../../../api/controllers/view/exporters" -import * as inMemoryViews from "../../../../db/inMemoryView" +} from "../../../../../api/controllers/view/exporters" +import * as inMemoryViews from "../../../../../db/inMemoryView" import { getFromDesignDoc, getFromMemoryDoc, migrateToDesignView, migrateToInMemoryView, -} from "../../../../api/controllers/view/utils" -import sdk from "../../../../sdk" -import { ExportRowsParams, ExportRowsResult } from "./types" -import pick from "lodash/pick" -import { breakRowIdField } from "../../../../integrations/utils" - -export async function search( - options: RowSearchParams, - table: Table -): Promise> { - const { tableId } = options - - const { paginate, query } = options - - const params: RowSearchParams = { - tableId: options.tableId, - sort: options.sort, - sortOrder: options.sortOrder, - sortType: options.sortType, - limit: options.limit, - bookmark: options.bookmark, - version: options.version, - disableEscaping: options.disableEscaping, - query: {}, - } - - if (params.sort && !params.sortType) { - const schema = table.schema - const sortField = schema[params.sort] - params.sortType = - sortField.type === "number" ? SortType.NUMBER : SortType.STRING - } - - let response - if (paginate) { - response = await paginatedSearch(query, params) - } else { - response = await fullSearch(query, params) - } - - // Enrich search results with relationships - if (response.rows && response.rows.length) { - // enrich with global users if from users table - if (tableId === InternalTables.USER_METADATA) { - response.rows = await getGlobalUsersFromMetadata(response.rows as User[]) - } - - if (options.fields) { - const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS] - response.rows = response.rows.map((r: any) => pick(r, fields)) - } - - response.rows = await outputProcessing(table, response.rows) - } - - return response -} +} from "../../../../../api/controllers/view/utils" +import sdk from "../../../../../sdk" +import { ExportRowsParams, ExportRowsResult } from "../types" +import { breakRowIdField } from "../../../../../integrations/utils" export async function exportRows( options: ExportRowsParams @@ -123,15 +63,12 @@ export async function exportRows( result = await outputProcessing(table, response) } else if (query) { - let searchResponse = await search( - { - tableId, - query, - sort, - sortOrder, - }, - table - ) + let searchResponse = await sdk.rows.search({ + tableId, + query, + sort, + sortOrder, + }) result = searchResponse.rows } diff --git a/packages/server/src/sdk/app/rows/search/internal/lucene.ts b/packages/server/src/sdk/app/rows/search/internal/lucene.ts new file mode 100644 index 0000000000..a25803804b --- /dev/null +++ b/packages/server/src/sdk/app/rows/search/internal/lucene.ts @@ -0,0 +1,66 @@ +import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core" +import { fullSearch, paginatedSearch } from "../utils" +import { InternalTables } from "../../../../../db/utils" +import { + Row, + RowSearchParams, + SearchResponse, + SortType, + Table, + User, +} from "@budibase/types" +import { getGlobalUsersFromMetadata } from "../../../../../utilities/global" +import { outputProcessing } from "../../../../../utilities/rowProcessor" +import pick from "lodash/pick" + +export async function search( + options: RowSearchParams, + table: Table +): Promise> { + const { tableId } = options + + const { paginate, query } = options + + const params: RowSearchParams = { + tableId: options.tableId, + sort: options.sort, + sortOrder: options.sortOrder, + sortType: options.sortType, + limit: options.limit, + bookmark: options.bookmark, + version: options.version, + disableEscaping: options.disableEscaping, + query: {}, + } + + if (params.sort && !params.sortType) { + const schema = table.schema + const sortField = schema[params.sort] + params.sortType = + sortField.type === "number" ? SortType.NUMBER : SortType.STRING + } + + let response + if (paginate) { + response = await paginatedSearch(query, params) + } else { + response = await fullSearch(query, params) + } + + // Enrich search results with relationships + if (response.rows && response.rows.length) { + // enrich with global users if from users table + if (tableId === InternalTables.USER_METADATA) { + response.rows = await getGlobalUsersFromMetadata(response.rows as User[]) + } + + if (options.fields) { + const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS] + response.rows = response.rows.map((r: any) => pick(r, fields)) + } + + response.rows = await outputProcessing(table, response.rows) + } + + return response +} diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/internal/sqs.ts similarity index 96% rename from packages/server/src/sdk/app/rows/search/sqs.ts rename to packages/server/src/sdk/app/rows/search/internal/sqs.ts index f6154cf70c..bc46e8d4c1 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/internal/sqs.ts @@ -18,31 +18,31 @@ import { import { buildInternalRelationships, sqlOutputProcessing, -} from "../../../../api/controllers/row/utils" +} from "../../../../../api/controllers/row/utils" import { decodeNonAscii, mapToUserColumn, USER_COLUMN_PREFIX, -} from "../../tables/internal/sqs" -import sdk from "../../../index" +} from "../../../tables/internal/sqs" +import sdk from "../../../../index" import { context, sql, SQLITE_DESIGN_DOC_ID, SQS_DATASOURCE_INTERNAL, } from "@budibase/backend-core" -import { generateJunctionTableID } from "../../../../db/utils" -import AliasTables from "../sqlAlias" -import { outputProcessing } from "../../../../utilities/rowProcessor" +import { generateJunctionTableID } from "../../../../../db/utils" +import AliasTables from "../../sqlAlias" +import { outputProcessing } from "../../../../../utilities/rowProcessor" import pick from "lodash/pick" -import { processRowCountResponse } from "../utils" +import { processRowCountResponse } from "../../utils" import { updateFilterKeys, getRelationshipColumns, getTableIDList, -} from "./filters" +} from "../filters" import { dataFilters, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core" -import { isSearchingByRowID } from "./utils" +import { isSearchingByRowID } from "../utils" import tracer from "dd-trace" const builder = new sql.Sql(SqlClient.SQL_LITE) From 374a1ba871be92fdc1f8106af4c4af4fdd711e96 Mon Sep 17 00:00:00 2001 From: Budibase Staging Release Bot <> Date: Fri, 2 Aug 2024 14:38:15 +0000 Subject: [PATCH 21/21] Bump version to 2.29.28 --- lerna.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lerna.json b/lerna.json index e016e50225..d7d2bdce39 100644 --- a/lerna.json +++ b/lerna.json @@ -1,6 +1,6 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", - "version": "2.29.27", + "version": "2.29.28", "npmClient": "yarn", "packages": [ "packages/*",