From 1d695be77c1ff595018c6632255037ea4c17183c Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 16:21:49 +0100 Subject: [PATCH 1/8] This PR includes a change to pagination which makes sure if the 5000 max row limit is hit that pagination still kicks in. This means that you can eventually return all rows, although for very large tables you may hit rate limits (if you have thousands of rows related to each row in your table). --- .../scripts/integrations/postgres/init.sql | 123 +++++------------- .../api/controllers/row/ExternalRequest.ts | 15 ++- .../src/api/controllers/row/external.ts | 2 +- .../src/sdk/app/rows/search/external.ts | 65 ++++----- .../server/src/sdk/app/rows/search/sqs.ts | 40 +++--- 5 files changed, 104 insertions(+), 141 deletions(-) diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql index 9624208deb..dce228dcfa 100644 --- a/packages/server/scripts/integrations/postgres/init.sql +++ b/packages/server/scripts/integrations/postgres/init.sql @@ -1,92 +1,35 @@ -SELECT 'CREATE DATABASE main' -WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec -CREATE SCHEMA "test-1"; -CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer', 'support'); -CREATE TABLE Persons ( - PersonID SERIAL PRIMARY KEY, - LastName varchar(255), - FirstName varchar(255), - Address varchar(255), - City varchar(255) DEFAULT 'Belfast', - Age INTEGER DEFAULT 20 NOT NULL, - Year INTEGER, - Type person_job +-- Create the first table +CREATE TABLE first_table ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL, + description TEXT ); -CREATE TABLE Tasks ( - TaskID SERIAL PRIMARY KEY, - ExecutorID INT, - QaID INT, - Completed BOOLEAN, - TaskName varchar(255), - CONSTRAINT fkexecutor - FOREIGN KEY(ExecutorID) - REFERENCES Persons(PersonID), - CONSTRAINT fkqa - FOREIGN KEY(QaID) - REFERENCES Persons(PersonID) -); -CREATE TABLE Products ( - ProductID SERIAL PRIMARY KEY, - ProductName varchar(255) -); -CREATE TABLE Products_Tasks ( - ProductID INT NOT NULL, - TaskID INT NOT NULL, - CONSTRAINT fkProducts - FOREIGN KEY(ProductID) - REFERENCES Products(ProductID), - CONSTRAINT fkTasks - FOREIGN KEY(TaskID) - REFERENCES Tasks(TaskID), - PRIMARY KEY (ProductID, TaskID) -); -CREATE TABLE "test-1".table1 ( - id SERIAL PRIMARY KEY, - Name varchar(255) -); -CREATE TABLE CompositeTable ( - KeyPartOne varchar(128), - KeyPartTwo varchar(128), - Name varchar(255), - PRIMARY KEY (KeyPartOne, KeyPartTwo) -); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('Mike', 'Hughes', '123 Fake Street', 'Belfast', 'qa', 1999); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support'); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Dave', 'Bar', '2 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('James', 'Bar', '3 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Jenny', 'Bar', '4 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Grace', 'Bar', '5 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Sarah', 'Bar', '6 Foo Street', 'Bartown', 'support', 0, 1993); -INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Kelly', 'Bar', '7 Foo Street', 'Bartown', 'support', 0, 1993); --- insert a lot of tasks for testing -WITH RECURSIVE generate_series AS ( - SELECT 1 AS n - UNION ALL - SELECT n + 1 FROM generate_series WHERE n < 6000 -), -random_data AS ( - SELECT - n, - (random() * 9 + 1)::int AS ExecutorID, - (random() * 9 + 1)::int AS QaID, - 'assembling' AS TaskName, - (random() < 0.5) AS Completed - FROM generate_series -) -INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) -SELECT ExecutorID, QaID, TaskName, Completed -FROM random_data; -INSERT INTO Products (ProductName) VALUES ('Computers'); -INSERT INTO Products (ProductName) VALUES ('Laptops'); -INSERT INTO Products (ProductName) VALUES ('Chairs'); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 1); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1); -INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2); -INSERT INTO "test-1".table1 (Name) VALUES ('Test'); -INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('aaa', 'bbb', 'Michael'); -INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('bbb', 'ccc', 'Andrew'); -INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('ddd', '', 'OneKey'); +-- Create the second table +CREATE TABLE second_table ( + id SERIAL PRIMARY KEY, + first_table_id INT REFERENCES first_table(id), + data TEXT NOT NULL +); + +-- Insert 50 rows into the first table +DO +$$ +BEGIN + FOR i IN 1..50 LOOP + INSERT INTO first_table (name, description) + VALUES ('Name ' || i, 'Description ' || i); + END LOOP; +END +$$; + +-- Insert 10,000 rows into the second table, all related to the first row in the first table +DO +$$ +BEGIN + FOR i IN 1..10000 LOOP + INSERT INTO second_table (first_table_id, data) + VALUES (1, 'Data ' || i); + END LOOP; +END +$$; diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index 2ecdf9a4cb..6538e7347a 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -66,9 +66,14 @@ export interface RunConfig { includeSqlRelationships?: IncludeRelationship } +export type ExternalReadRequestReturnType = { + rows: Row[] + rawResponseSize: number +} + export type ExternalRequestReturnType = T extends Operation.READ - ? Row[] + ? ExternalReadRequestReturnType : T extends Operation.COUNT ? number : { row: Row; table: Table } @@ -741,9 +746,11 @@ export class ExternalRequest { ) // if reading it'll just be an array of rows, return whole thing if (operation === Operation.READ) { - return ( - Array.isArray(output) ? output : [output] - ) as ExternalRequestReturnType + const rows = Array.isArray(output) ? output : [output] + return { + rows, + rawResponseSize: responseRows.length, + } as ExternalRequestReturnType } else { return { row: output[0], table } as ExternalRequestReturnType } diff --git a/packages/server/src/api/controllers/row/external.ts b/packages/server/src/api/controllers/row/external.ts index 06013d230c..78fae7aad8 100644 --- a/packages/server/src/api/controllers/row/external.ts +++ b/packages/server/src/api/controllers/row/external.ts @@ -136,7 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) { includeSqlRelationships: IncludeRelationship.INCLUDE, }) const table: Table = tables[tableName] - const row = response[0] + const row = response.rows[0] // this seems like a lot of work, but basically we need to dig deeper for the enrich // for a single row, there is probably a better way to do this with some smart multi-layer joins for (let [fieldName, field] of Object.entries(table.schema)) { diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index e7fd2888de..76f00a25e2 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -23,6 +23,7 @@ import pick from "lodash/pick" import { outputProcessing } from "../../../../utilities/rowProcessor" import sdk from "../../../" import { isSearchingByRowID } from "./utils" +import { ExternalReadRequestReturnType } from "../../../../api/controllers/row/ExternalRequest" function getPaginationAndLimitParameters( filters: SearchFilters, @@ -47,7 +48,7 @@ function getPaginationAndLimitParameters( limit: limit + 1, } if (bookmark) { - paginateObj.offset = limit * bookmark + paginateObj.offset = bookmark } } else if (limit) { paginateObj = { @@ -105,37 +106,42 @@ export async function search( paginate: paginateObj as PaginationJson, includeSqlRelationships: IncludeRelationship.INCLUDE, } - const queries: Promise[] = [] - queries.push(handleRequest(Operation.READ, tableId, parameters)) + const queries: [ + Promise, + Promise | undefined + ] = [handleRequest(Operation.READ, tableId, parameters), undefined] if (countRows) { - queries.push(handleRequest(Operation.COUNT, tableId, parameters)) + queries[1] = handleRequest(Operation.COUNT, tableId, parameters) } const responses = await Promise.all(queries) - let rows = responses[0] as Row[] - const totalRows = - responses.length > 1 ? (responses[1] as number) : undefined + let rows = responses[0].rows + const rawResponseSize = responses[0].rawResponseSize + const totalRows = responses.length > 1 ? responses[1] : undefined - let hasNextPage = false - // remove the extra row if it's there - if (paginate && limit && rows.length > limit) { - rows.pop() - hasNextPage = true - } - - if (options.fields) { - const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS] - rows = rows.map((r: any) => pick(r, fields)) - } - - rows = await outputProcessing(table, rows, { + let processed = await outputProcessing(table, rows, { preserveLinks: true, squash: true, }) + let hasNextPage = false + // if the raw rows is greater than the limit then we likely need to paginate + if (paginate && limit && rawResponseSize > limit) { + hasNextPage = true + // processed rows has merged relationships down, this might not be more than limit + if (processed.length > limit) { + processed.pop() + } + } + + if (options.fields) { + const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS] + processed = processed.map((r: any) => pick(r, fields)) + } + // need wrapper object for bookmarks etc when paginating - const response: SearchResponse = { rows, hasNextPage } + const response: SearchResponse = { rows: processed, hasNextPage } if (hasNextPage && bookmark != null) { - response.bookmark = bookmark + 1 + response.bookmark = processed.length } if (totalRows != null) { response.totalRows = totalRows @@ -255,24 +261,21 @@ export async function exportRows( } export async function fetch(tableId: string): Promise { - const response = await handleRequest( - Operation.READ, - tableId, - { - includeSqlRelationships: IncludeRelationship.INCLUDE, - } - ) + const response = await handleRequest(Operation.READ, tableId, { + includeSqlRelationships: IncludeRelationship.INCLUDE, + }) const table = await sdk.tables.getTable(tableId) - return await outputProcessing(table, response, { + return await outputProcessing(table, response.rows, { preserveLinks: true, squash: true, }) } export async function fetchRaw(tableId: string): Promise { - return await handleRequest(Operation.READ, tableId, { + const response = await handleRequest(Operation.READ, tableId, { includeSqlRelationships: IncludeRelationship.INCLUDE, }) + return response.rows } export async function fetchView(viewName: string) { diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 5da2a7bcfb..2b3ed0c087 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -45,6 +45,7 @@ import { dataFilters, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core" import { isSearchingByRowID } from "./utils" const builder = new sql.Sql(SqlClient.SQL_LITE) +const SQLITE_COLUMN_LIMIT = 2000 const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`) const MISSING_TABLE_REGX = new RegExp(`no such table: .+`) const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`) @@ -55,12 +56,14 @@ function buildInternalFieldList( opts?: { relationships?: RelationshipsJson[] } ) { let fieldList: string[] = [] - const addJunctionFields = (relatedTable: Table, fields: string[]) => { + const getJunctionFields = (relatedTable: Table, fields: string[]) => { + const junctionFields: string[] = [] fields.forEach(field => { - fieldList.push( + junctionFields.push( `${generateJunctionTableID(table._id!, relatedTable._id!)}.${field}` ) }) + return junctionFields } fieldList = fieldList.concat( PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`) @@ -70,18 +73,22 @@ function buildInternalFieldList( if (!opts?.relationships && isRelationship) { continue } - if (isRelationship) { + if (!isRelationship) { + fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`) + } else { const linkCol = col as RelationshipFieldMetadata const relatedTable = tables.find(table => table._id === linkCol.tableId) - // no relationships provided, don't go more than a layer deep - if (relatedTable) { - fieldList = fieldList.concat( - buildInternalFieldList(relatedTable, tables) - ) - addJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"]) + if (!relatedTable) { + continue } - } else { - fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`) + const relatedFields = buildInternalFieldList(relatedTable, tables).concat( + getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"]) + ) + // break out of the loop if we have reached the max number of columns + if (relatedFields.length + fieldList.length > SQLITE_COLUMN_LIMIT) { + break + } + fieldList = fieldList.concat(relatedFields) } } return [...new Set(fieldList)] @@ -315,7 +322,7 @@ export async function search( paginate = true request.paginate = { limit: params.limit + 1, - offset: bookmark * params.limit, + offset: bookmark, } } @@ -345,10 +352,13 @@ export async function search( ) // check for pagination final row - let nextRow: Row | undefined + let nextRow: boolean = false if (paginate && params.limit && rows.length > params.limit) { // remove the extra row that confirmed if there is another row to move to - nextRow = processed.pop() + nextRow = true + if (processed.length > params.limit) { + processed.pop() + } } // get the rows @@ -372,7 +382,7 @@ export async function search( // check for pagination if (paginate && nextRow) { response.hasNextPage = true - response.bookmark = bookmark + 1 + response.bookmark = processed.length } if (paginate && !nextRow) { response.hasNextPage = false From a2f11f17fd2947619ba488c6652f3d398be14776 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 16:31:02 +0100 Subject: [PATCH 2/8] Type fix. --- packages/server/src/sdk/app/rows/external.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/server/src/sdk/app/rows/external.ts b/packages/server/src/sdk/app/rows/external.ts index 9ab1362606..f81d67f621 100644 --- a/packages/server/src/sdk/app/rows/external.ts +++ b/packages/server/src/sdk/app/rows/external.ts @@ -21,7 +21,8 @@ export async function getRow( ? IncludeRelationship.INCLUDE : IncludeRelationship.EXCLUDE, }) - return response ? response[0] : response + const rows = response?.rows || [] + return rows[0] } export async function save( From de22a078c066da363095fcdc148f67dc5b4b8365 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 16:50:20 +0100 Subject: [PATCH 3/8] Adding bookmark to each subsequent (thanks tests ). --- packages/server/src/sdk/app/rows/search/external.ts | 2 +- packages/server/src/sdk/app/rows/search/sqs.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index 76f00a25e2..f47f6f7998 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -141,7 +141,7 @@ export async function search( // need wrapper object for bookmarks etc when paginating const response: SearchResponse = { rows: processed, hasNextPage } if (hasNextPage && bookmark != null) { - response.bookmark = processed.length + response.bookmark = bookmark + processed.length } if (totalRows != null) { response.totalRows = totalRows diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 2b3ed0c087..f3168a4462 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -382,7 +382,7 @@ export async function search( // check for pagination if (paginate && nextRow) { response.hasNextPage = true - response.bookmark = processed.length + response.bookmark = bookmark + processed.length } if (paginate && !nextRow) { response.hasNextPage = false From b54157a6fb42ce38b4a7f858572abdfa8d092bab Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Wed, 31 Jul 2024 17:22:17 +0100 Subject: [PATCH 4/8] Fix for enrich endpoint discovered by tests. --- packages/server/src/api/controllers/row/external.ts | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/packages/server/src/api/controllers/row/external.ts b/packages/server/src/api/controllers/row/external.ts index 78fae7aad8..8b95d9c2f3 100644 --- a/packages/server/src/api/controllers/row/external.ts +++ b/packages/server/src/api/controllers/row/external.ts @@ -163,10 +163,14 @@ export async function fetchEnrichedRow(ctx: UserCtx) { }, includeSqlRelationships: IncludeRelationship.INCLUDE, }) - row[fieldName] = await outputProcessing(linkedTable, relatedRows, { - squash: true, - preserveLinks: true, - }) + row[fieldName] = await outputProcessing( + linkedTable, + relatedRows.rows, + { + squash: true, + preserveLinks: true, + } + ) } return row } From 8a64dd1e0a7826496af2a677374d3c89f94e5bd8 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 1 Aug 2024 11:39:31 +0100 Subject: [PATCH 5/8] Reverting init.sql. --- .../scripts/integrations/postgres/init.sql | 121 +++++++++++++----- 1 file changed, 89 insertions(+), 32 deletions(-) diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql index dce228dcfa..f5b8086e32 100644 --- a/packages/server/scripts/integrations/postgres/init.sql +++ b/packages/server/scripts/integrations/postgres/init.sql @@ -1,35 +1,92 @@ --- Create the first table -CREATE TABLE first_table ( - id SERIAL PRIMARY KEY, - name VARCHAR(255) NOT NULL, - description TEXT +SELECT 'CREATE DATABASE main' +WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec +CREATE SCHEMA "test-1"; +CREATE TYPE person_job AS ENUM ('qa', 'programmer', 'designer', 'support'); +CREATE TABLE Persons ( + PersonID SERIAL PRIMARY KEY, + LastName varchar(255), + FirstName varchar(255), + Address varchar(255), + City varchar(255) DEFAULT 'Belfast', + Age INTEGER DEFAULT 20 NOT NULL, + Year INTEGER, + Type person_job ); - --- Create the second table -CREATE TABLE second_table ( - id SERIAL PRIMARY KEY, - first_table_id INT REFERENCES first_table(id), - data TEXT NOT NULL +CREATE TABLE Tasks ( + TaskID SERIAL PRIMARY KEY, + ExecutorID INT, + QaID INT, + Completed BOOLEAN, + TaskName varchar(255), + CONSTRAINT fkexecutor + FOREIGN KEY(ExecutorID) + REFERENCES Persons(PersonID), + CONSTRAINT fkqa + FOREIGN KEY(QaID) + REFERENCES Persons(PersonID) ); +CREATE TABLE Products ( + ProductID SERIAL PRIMARY KEY, + ProductName varchar(255) +); +CREATE TABLE Products_Tasks ( + ProductID INT NOT NULL, + TaskID INT NOT NULL, + CONSTRAINT fkProducts + FOREIGN KEY(ProductID) + REFERENCES Products(ProductID), + CONSTRAINT fkTasks + FOREIGN KEY(TaskID) + REFERENCES Tasks(TaskID), + PRIMARY KEY (ProductID, TaskID) +); +CREATE TABLE "test-1".table1 ( + id SERIAL PRIMARY KEY, + Name varchar(255) +); +CREATE TABLE CompositeTable ( + KeyPartOne varchar(128), + KeyPartTwo varchar(128), + Name varchar(255), + PRIMARY KEY (KeyPartOne, KeyPartTwo) +); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('Mike', 'Hughes', '123 Fake Street', 'Belfast', 'qa', 1999); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support'); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Dave', 'Bar', '2 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('James', 'Bar', '3 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Jenny', 'Bar', '4 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Grace', 'Bar', '5 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Sarah', 'Bar', '6 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Kelly', 'Bar', '7 Foo Street', 'Bartown', 'support', 0, 1993); --- Insert 50 rows into the first table -DO -$$ -BEGIN - FOR i IN 1..50 LOOP - INSERT INTO first_table (name, description) - VALUES ('Name ' || i, 'Description ' || i); - END LOOP; -END -$$; - --- Insert 10,000 rows into the second table, all related to the first row in the first table -DO -$$ -BEGIN - FOR i IN 1..10000 LOOP - INSERT INTO second_table (first_table_id, data) - VALUES (1, 'Data ' || i); - END LOOP; -END -$$; +-- insert a lot of tasks for testing +WITH RECURSIVE generate_series AS ( + SELECT 1 AS n + UNION ALL + SELECT n + 1 FROM generate_series WHERE n < 6000 +), +random_data AS ( + SELECT + n, + (random() * 9 + 1)::int AS ExecutorID, + (random() * 9 + 1)::int AS QaID, + 'assembling' AS TaskName, + (random() < 0.5) AS Completed + FROM generate_series +) +INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) +SELECT ExecutorID, QaID, TaskName, Completed +FROM random_data; +INSERT INTO Products (ProductName) VALUES ('Computers'); +INSERT INTO Products (ProductName) VALUES ('Laptops'); +INSERT INTO Products (ProductName) VALUES ('Chairs'); +INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 1); +INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1); +INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1); +INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2); +INSERT INTO "test-1".table1 (Name) VALUES ('Test'); +INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('aaa', 'bbb', 'Michael'); +INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('bbb', 'ccc', 'Andrew'); +INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('ddd', '', 'OneKey'); \ No newline at end of file From 99e8ef58dd9c9fa32b6f31228f0ddb513777577c Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 1 Aug 2024 13:03:58 +0100 Subject: [PATCH 6/8] Adding test case - had to rejig how internal limit is retrieved but works without requiring thousands of rows. --- packages/backend-core/src/sql/sql.ts | 14 +-- .../scripts/integrations/postgres/init.sql | 2 +- .../src/api/routes/tests/search.spec.ts | 90 ++++++++++++++++++- 3 files changed, 96 insertions(+), 10 deletions(-) diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index a67da7bc10..76c86b2b62 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -37,10 +37,12 @@ import { helpers } from "@budibase/shared-core" type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any -const envLimit = environment.SQL_MAX_ROWS - ? parseInt(environment.SQL_MAX_ROWS) - : null -const BASE_LIMIT = envLimit || 5000 +function getBaseLimit() { + const envLimit = environment.SQL_MAX_ROWS + ? parseInt(environment.SQL_MAX_ROWS) + : null + return envLimit || 5000 +} // Takes a string like foo and returns a quoted string like [foo] for SQL Server // and "foo" for Postgres. @@ -838,7 +840,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { private readonly limit: number // pass through client to get flavour of SQL - constructor(client: SqlClient, limit: number = BASE_LIMIT) { + constructor(client: SqlClient, limit: number = getBaseLimit()) { super(client) this.limit = limit } @@ -882,7 +884,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { query = builder.read(client, json, { limits: { query: this.limit, - base: BASE_LIMIT, + base: getBaseLimit(), }, }) break diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql index f5b8086e32..9624208deb 100644 --- a/packages/server/scripts/integrations/postgres/init.sql +++ b/packages/server/scripts/integrations/postgres/init.sql @@ -89,4 +89,4 @@ INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2); INSERT INTO "test-1".table1 (Name) VALUES ('Test'); INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('aaa', 'bbb', 'Michael'); INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('bbb', 'ccc', 'Andrew'); -INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('ddd', '', 'OneKey'); \ No newline at end of file +INSERT INTO CompositeTable (KeyPartOne, KeyPartTwo, Name) VALUES ('ddd', '', 'OneKey'); diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index 2c5756efe4..bc3cdccf18 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -53,6 +53,7 @@ describe.each([ const isLucene = name === "lucene" const isInMemory = name === "in-memory" const isInternal = isSqs || isLucene || isInMemory + const isSql = !isInMemory && !isLucene const config = setup.getConfig() let envCleanup: (() => void) | undefined @@ -192,7 +193,8 @@ describe.each([ // different to the one passed in will cause the assertion to fail. Extra // rows returned by the query will also cause the assertion to fail. async toMatchExactly(expectedRows: any[]) { - const { rows: foundRows } = await this.performSearch() + const response = await this.performSearch() + const foundRows = response.rows // eslint-disable-next-line jest/no-standalone-expect expect(foundRows).toHaveLength(expectedRows.length) @@ -202,13 +204,15 @@ describe.each([ expect.objectContaining(this.popRow(expectedRow, foundRows)) ) ) + return response } // Asserts that the query returns rows matching exactly the set of rows // passed in. The order of the rows is not important, but extra rows will // cause the assertion to fail. async toContainExactly(expectedRows: any[]) { - const { rows: foundRows } = await this.performSearch() + const response = await this.performSearch() + const foundRows = response.rows // eslint-disable-next-line jest/no-standalone-expect expect(foundRows).toHaveLength(expectedRows.length) @@ -220,6 +224,7 @@ describe.each([ ) ) ) + return response } // Asserts that the query returns some property values - this cannot be used @@ -236,6 +241,7 @@ describe.each([ expect(response[key]).toEqual(properties[key]) } } + return response } // Asserts that the query doesn't return a property, e.g. pagination parameters. @@ -245,13 +251,15 @@ describe.each([ // eslint-disable-next-line jest/no-standalone-expect expect(response[property]).toBeUndefined() } + return response } // Asserts that the query returns rows matching the set of rows passed in. // The order of the rows is not important. Extra rows will not cause the // assertion to fail. async toContain(expectedRows: any[]) { - const { rows: foundRows } = await this.performSearch() + const response = await this.performSearch() + const foundRows = response.rows // eslint-disable-next-line jest/no-standalone-expect expect([...foundRows]).toEqual( @@ -261,6 +269,7 @@ describe.each([ ) ) ) + return response } async toFindNothing() { @@ -2608,4 +2617,79 @@ describe.each([ }).toContainExactly([row]) }) }) + + isSql && + describe("pagination edge case with relationships", () => { + let mainRows: Row[] = [] + + beforeAll(async () => { + const toRelateTable = await createTable({ + name: { + name: "name", + type: FieldType.STRING, + }, + }) + table = await createTable({ + name: { + name: "name", + type: FieldType.STRING, + }, + rel: { + name: "rel", + type: FieldType.LINK, + relationshipType: RelationshipType.MANY_TO_ONE, + tableId: toRelateTable._id!, + fieldName: "rel", + }, + }) + const relatedRows = await Promise.all([ + config.api.row.save(toRelateTable._id!, { name: "tag 1" }), + config.api.row.save(toRelateTable._id!, { name: "tag 2" }), + config.api.row.save(toRelateTable._id!, { name: "tag 3" }), + config.api.row.save(toRelateTable._id!, { name: "tag 4" }), + config.api.row.save(toRelateTable._id!, { name: "tag 5" }), + config.api.row.save(toRelateTable._id!, { name: "tag 6" }), + ]) + mainRows = await Promise.all([ + config.api.row.save(table._id!, { + name: "product 1", + rel: relatedRows.map(row => row._id), + }), + config.api.row.save(table._id!, { + name: "product 2", + rel: [], + }), + config.api.row.save(table._id!, { + name: "product 3", + rel: [], + }), + ]) + }) + + it("can still page when the hard limit is hit", async () => { + await config.withCoreEnv( + { + SQL_MAX_ROWS: "6", + }, + async () => { + const params: Omit = { + query: {}, + paginate: true, + limit: 3, + sort: "name", + sortType: SortType.STRING, + sortOrder: SortOrder.ASCENDING, + } + const page1 = await expectSearch(params).toContain([mainRows[0]]) + expect(page1.hasNextPage).toBe(true) + expect(page1.bookmark).toBeDefined() + const page2 = await expectSearch({ + ...params, + bookmark: page1.bookmark, + }).toContain([mainRows[1], mainRows[2]]) + expect(page2.hasNextPage).toBe(false) + } + ) + }) + }) }) From 224d2a195330eef4c6d089ab113dc91361851ddc Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 1 Aug 2024 13:07:34 +0100 Subject: [PATCH 7/8] PR comments. --- .../src/sdk/app/rows/search/external.ts | 17 +++++--------- .../server/src/sdk/app/rows/search/sqs.ts | 22 +++++++------------ 2 files changed, 14 insertions(+), 25 deletions(-) diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index f47f6f7998..9d9f4fde39 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -106,17 +106,12 @@ export async function search( paginate: paginateObj as PaginationJson, includeSqlRelationships: IncludeRelationship.INCLUDE, } - const queries: [ - Promise, - Promise | undefined - ] = [handleRequest(Operation.READ, tableId, parameters), undefined] - if (countRows) { - queries[1] = handleRequest(Operation.COUNT, tableId, parameters) - } - const responses = await Promise.all(queries) - let rows = responses[0].rows - const rawResponseSize = responses[0].rawResponseSize - const totalRows = responses.length > 1 ? responses[1] : undefined + const [{ rows, rawResponseSize }, totalRows] = await Promise.all([ + handleRequest(Operation.READ, tableId, parameters), + countRows + ? handleRequest(Operation.COUNT, tableId, parameters) + : Promise.resolve(undefined), + ]) let processed = await outputProcessing(table, rows, { preserveLinks: true, diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 9d6bd7e89b..f6154cf70c 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -332,20 +332,14 @@ export async function search( } try { - const queries: Promise[] = [] - queries.push(runSqlQuery(request, allTables, relationships)) - if (options.countRows) { - // get the total count of rows - queries.push( - runSqlQuery(request, allTables, relationships, { - countTotalRows: true, - }) - ) - } - const responses = await Promise.all(queries) - let rows = responses[0] as Row[] - const totalRows = - responses.length > 1 ? (responses[1] as number) : undefined + const [rows, totalRows] = await Promise.all([ + runSqlQuery(request, allTables, relationships), + options.countRows + ? runSqlQuery(request, allTables, relationships, { + countTotalRows: true, + }) + : Promise.resolve(undefined), + ]) // process from the format of tableId.column to expected format also // make sure JSON columns corrected From 6eb3b2793b9390d1f6f8a297bc04c77950094ea1 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 1 Aug 2024 13:31:10 +0100 Subject: [PATCH 8/8] Linting. --- packages/server/src/sdk/app/rows/search/external.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index 9d9f4fde39..3ce1013b85 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -23,7 +23,6 @@ import pick from "lodash/pick" import { outputProcessing } from "../../../../utilities/rowProcessor" import sdk from "../../../" import { isSearchingByRowID } from "./utils" -import { ExternalReadRequestReturnType } from "../../../../api/controllers/row/ExternalRequest" function getPaginationAndLimitParameters( filters: SearchFilters,