Merge pull request #13969 from Budibase/feature/count-rows-to-return

Counting total rows in query (Datasource+)
This commit is contained in:
Michael Drury 2024-06-20 17:53:04 +01:00 committed by GitHub
commit 443289c66e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 1076 additions and 638 deletions

View File

@ -1,10 +1,10 @@
import { Knex, knex } from "knex" import { Knex, knex } from "knex"
import * as dbCore from "../db" import * as dbCore from "../db"
import { import {
isIsoDateString,
isValidFilter,
getNativeSql, getNativeSql,
isExternalTable, isExternalTable,
isIsoDateString,
isValidFilter,
} from "./utils" } from "./utils"
import { SqlStatements } from "./sqlStatements" import { SqlStatements } from "./sqlStatements"
import SqlTableQueryBuilder from "./sqlTable" import SqlTableQueryBuilder from "./sqlTable"
@ -12,21 +12,21 @@ import {
BBReferenceFieldMetadata, BBReferenceFieldMetadata,
FieldSchema, FieldSchema,
FieldType, FieldType,
INTERNAL_TABLE_SOURCE_ID,
JsonFieldMetadata, JsonFieldMetadata,
JsonTypes,
Operation, Operation,
prefixed,
QueryJson, QueryJson,
SqlQuery, QueryOptions,
RelationshipsJson, RelationshipsJson,
SearchFilters, SearchFilters,
SortOrder,
SqlClient,
SqlQuery,
SqlQueryBinding, SqlQueryBinding,
Table, Table,
TableSourceType, TableSourceType,
INTERNAL_TABLE_SOURCE_ID,
SqlClient,
QueryOptions,
JsonTypes,
prefixed,
SortOrder,
} from "@budibase/types" } from "@budibase/types"
import environment from "../environment" import environment from "../environment"
import { helpers } from "@budibase/shared-core" import { helpers } from "@budibase/shared-core"
@ -114,7 +114,7 @@ function generateSelectStatement(
): (string | Knex.Raw)[] | "*" { ): (string | Knex.Raw)[] | "*" {
const { resource, meta } = json const { resource, meta } = json
if (!resource) { if (!resource || !resource.fields || resource.fields.length === 0) {
return "*" return "*"
} }
@ -410,13 +410,32 @@ class InternalBuilder {
return query return query
} }
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { addDistinctCount(
let { sort, paginate } = json query: Knex.QueryBuilder,
json: QueryJson
): Knex.QueryBuilder {
const table = json.meta.table const table = json.meta.table
const primary = table.primary
const aliases = json.tableAliases
const aliased =
table.name && aliases?.[table.name] ? aliases[table.name] : table.name
if (!primary) {
throw new Error("SQL counting requires primary key to be supplied")
}
return query.countDistinct(`${aliased}.${primary[0]} as total`)
}
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort } = json
const table = json.meta.table
const primaryKey = table.primary
const tableName = getTableName(table) const tableName = getTableName(table)
const aliases = json.tableAliases const aliases = json.tableAliases
const aliased = const aliased =
tableName && aliases?.[tableName] ? aliases[tableName] : table?.name tableName && aliases?.[tableName] ? aliases[tableName] : table?.name
if (!Array.isArray(primaryKey)) {
throw new Error("Sorting requires primary key to be specified for table")
}
if (sort && Object.keys(sort || {}).length > 0) { if (sort && Object.keys(sort || {}).length > 0) {
for (let [key, value] of Object.entries(sort)) { for (let [key, value] of Object.entries(sort)) {
const direction = const direction =
@ -429,10 +448,9 @@ class InternalBuilder {
query = query.orderBy(`${aliased}.${key}`, direction, nulls) query = query.orderBy(`${aliased}.${key}`, direction, nulls)
} }
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
// @ts-ignore
query = query.orderBy(`${aliased}.${table?.primary[0]}`)
} }
// always add sorting by the primary key - make sure result is deterministic
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
return query return query
} }
@ -522,7 +540,7 @@ class InternalBuilder {
}) })
} }
} }
return query.limit(BASE_LIMIT) return query
} }
knexWithAlias( knexWithAlias(
@ -533,13 +551,12 @@ class InternalBuilder {
const tableName = endpoint.entityId const tableName = endpoint.entityId
const tableAlias = aliases?.[tableName] const tableAlias = aliases?.[tableName]
const query = knex( return knex(
this.tableNameWithSchema(tableName, { this.tableNameWithSchema(tableName, {
alias: tableAlias, alias: tableAlias,
schema: endpoint.schema, schema: endpoint.schema,
}) })
) )
return query
} }
create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
@ -596,25 +613,23 @@ class InternalBuilder {
return query.upsert(parsedBody) return query.upsert(parsedBody)
} }
read(knex: Knex, json: QueryJson, limit: number): Knex.QueryBuilder { read(
let { endpoint, resource, filters, paginate, relationships, tableAliases } = knex: Knex,
json json: QueryJson,
opts: {
limits?: { base: number; query: number }
} = {}
): Knex.QueryBuilder {
let { endpoint, filters, paginate, relationships, tableAliases } = json
const { limits } = opts
const counting = endpoint.operation === Operation.COUNT
const tableName = endpoint.entityId const tableName = endpoint.entityId
// select all if not specified // start building the query
if (!resource) { let query = this.knexWithAlias(knex, endpoint, tableAliases)
resource = { fields: [] }
}
let selectStatement: string | (string | Knex.Raw)[] = "*"
// handle select
if (resource.fields && resource.fields.length > 0) {
// select the resources as the format "table.columnName" - this is what is provided
// by the resource builder further up
selectStatement = generateSelectStatement(json, knex)
}
let foundLimit = limit || BASE_LIMIT
// handle pagination // handle pagination
let foundOffset: number | null = null let foundOffset: number | null = null
let foundLimit = limits?.query || limits?.base
if (paginate && paginate.page && paginate.limit) { if (paginate && paginate.page && paginate.limit) {
// @ts-ignore // @ts-ignore
const page = paginate.page <= 1 ? 0 : paginate.page - 1 const page = paginate.page <= 1 ? 0 : paginate.page - 1
@ -627,24 +642,39 @@ class InternalBuilder {
} else if (paginate && paginate.limit) { } else if (paginate && paginate.limit) {
foundLimit = paginate.limit foundLimit = paginate.limit
} }
// start building the query // counting should not sort, limit or offset
let query = this.knexWithAlias(knex, endpoint, tableAliases) if (!counting) {
query = query.limit(foundLimit) // add the found limit if supplied
if (foundOffset) { if (foundLimit != null) {
query = query.offset(foundOffset) query = query.limit(foundLimit)
}
// add overall pagination
if (foundOffset != null) {
query = query.offset(foundOffset)
}
// add sorting to pre-query
// no point in sorting when counting
query = this.addSorting(query, json)
} }
// add filters to the query (where)
query = this.addFilters(query, filters, json.meta.table, { query = this.addFilters(query, filters, json.meta.table, {
aliases: tableAliases, aliases: tableAliases,
}) })
// add sorting to pre-query
query = this.addSorting(query, json)
const alias = tableAliases?.[tableName] || tableName const alias = tableAliases?.[tableName] || tableName
let preQuery = knex({ let preQuery: Knex.QueryBuilder = knex({
[alias]: query, // the typescript definition for the knex constructor doesn't support this
} as any).select(selectStatement) as any // syntax, but it is the only way to alias a pre-query result as part of
// a query - there is an alias dictionary type, but it assumes it can only
// be a table name, not a pre-query
[alias]: query as any,
})
// if counting, use distinct count, else select
preQuery = !counting
? preQuery.select(generateSelectStatement(json, knex))
: this.addDistinctCount(preQuery, json)
// have to add after as well (this breaks MS-SQL) // have to add after as well (this breaks MS-SQL)
if (this.client !== SqlClient.MS_SQL) { if (this.client !== SqlClient.MS_SQL && !counting) {
preQuery = this.addSorting(preQuery, json) preQuery = this.addSorting(preQuery, json)
} }
// handle joins // handle joins
@ -655,6 +685,13 @@ class InternalBuilder {
endpoint.schema, endpoint.schema,
tableAliases tableAliases
) )
// add a base limit over the whole query
// if counting we can't set this limit
if (limits?.base) {
query = query.limit(limits.base)
}
return this.addFilters(query, filters, json.meta.table, { return this.addFilters(query, filters, json.meta.table, {
relationship: true, relationship: true,
aliases: tableAliases, aliases: tableAliases,
@ -699,6 +736,19 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
this.limit = limit this.limit = limit
} }
private convertToNative(query: Knex.QueryBuilder, opts: QueryOptions = {}) {
const sqlClient = this.getSqlClient()
if (opts?.disableBindings) {
return { sql: query.toString() }
} else {
let native = getNativeSql(query)
if (sqlClient === SqlClient.SQL_LITE) {
native = convertBooleans(native)
}
return native
}
}
/** /**
* @param json The JSON query DSL which is to be converted to SQL. * @param json The JSON query DSL which is to be converted to SQL.
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning * @param opts extra options which are to be passed into the query builder, e.g. disableReturning
@ -722,7 +772,16 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
query = builder.create(client, json, opts) query = builder.create(client, json, opts)
break break
case Operation.READ: case Operation.READ:
query = builder.read(client, json, this.limit) query = builder.read(client, json, {
limits: {
query: this.limit,
base: BASE_LIMIT,
},
})
break
case Operation.COUNT:
// read without any limits to count
query = builder.read(client, json)
break break
case Operation.UPDATE: case Operation.UPDATE:
query = builder.update(client, json, opts) query = builder.update(client, json, opts)
@ -744,15 +803,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
throw `Operation type is not supported by SQL query builder` throw `Operation type is not supported by SQL query builder`
} }
if (opts?.disableBindings) { return this.convertToNative(query, opts)
return { sql: query.toString() }
} else {
let native = getNativeSql(query)
if (sqlClient === SqlClient.SQL_LITE) {
native = convertBooleans(native)
}
return native
}
} }
async getReturningRow(queryFn: QueryFunction, json: QueryJson) { async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
@ -828,6 +879,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
await this.getReturningRow(queryFn, this.checkLookupKeys(id, json)) await this.getReturningRow(queryFn, this.checkLookupKeys(id, json))
) )
} }
if (operation === Operation.COUNT) {
return results
}
if (operation !== Operation.READ) { if (operation !== Operation.READ) {
return row return row
} }

View File

@ -54,8 +54,31 @@ INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('Mi
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996); INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993); INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support'); INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support');
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (1, 2, 'assembling', TRUE); INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Dave', 'Bar', '2 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (2, 1, 'processing', FALSE); INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('James', 'Bar', '3 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Jenny', 'Bar', '4 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Grace', 'Bar', '5 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Sarah', 'Bar', '6 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Kelly', 'Bar', '7 Foo Street', 'Bartown', 'support', 0, 1993);
-- insert a lot of tasks for testing
WITH RECURSIVE generate_series AS (
SELECT 1 AS n
UNION ALL
SELECT n + 1 FROM generate_series WHERE n < 6000
),
random_data AS (
SELECT
n,
(random() * 9 + 1)::int AS ExecutorID,
(random() * 9 + 1)::int AS QaID,
'assembling' AS TaskName,
(random() < 0.5) AS Completed
FROM generate_series
)
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed)
SELECT ExecutorID, QaID, TaskName, Completed
FROM random_data;
INSERT INTO Products (ProductName) VALUES ('Computers'); INSERT INTO Products (ProductName) VALUES ('Computers');
INSERT INTO Products (ProductName) VALUES ('Laptops'); INSERT INTO Products (ProductName) VALUES ('Laptops');
INSERT INTO Products (ProductName) VALUES ('Chairs'); INSERT INTO Products (ProductName) VALUES ('Chairs');

View File

@ -7,6 +7,7 @@ import {
FieldType, FieldType,
FilterType, FilterType,
IncludeRelationship, IncludeRelationship,
isManyToOne,
OneToManyRelationshipFieldMetadata, OneToManyRelationshipFieldMetadata,
Operation, Operation,
PaginationJson, PaginationJson,
@ -16,29 +17,33 @@ import {
SortJson, SortJson,
SortType, SortType,
Table, Table,
isManyToOne,
} from "@budibase/types" } from "@budibase/types"
import { import {
breakExternalTableId, breakExternalTableId,
breakRowIdField, breakRowIdField,
convertRowId, convertRowId,
generateRowIdField,
isRowId, isRowId,
isSQL, isSQL,
generateRowIdField,
} from "../../../integrations/utils" } from "../../../integrations/utils"
import { import {
buildExternalRelationships, buildExternalRelationships,
buildSqlFieldList, buildSqlFieldList,
generateIdForRow, generateIdForRow,
sqlOutputProcessing, isKnexEmptyReadResponse,
isManyToMany, isManyToMany,
sqlOutputProcessing,
} from "./utils" } from "./utils"
import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" import {
getDatasourceAndQuery,
processRowCountResponse,
} from "../../../sdk/app/rows/utils"
import { processObjectSync } from "@budibase/string-templates" import { processObjectSync } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import env from "../../../environment" import env from "../../../environment"
import { makeExternalQuery } from "../../../integrations/base/query"
export interface ManyRelationship { export interface ManyRelationship {
tableId?: string tableId?: string
@ -60,6 +65,13 @@ export interface RunConfig {
includeSqlRelationships?: IncludeRelationship includeSqlRelationships?: IncludeRelationship
} }
export type ExternalRequestReturnType<T extends Operation> =
T extends Operation.READ
? Row[]
: T extends Operation.COUNT
? number
: { row: Row; table: Table }
function buildFilters( function buildFilters(
id: string | undefined | string[], id: string | undefined | string[],
filters: SearchFilters, filters: SearchFilters,
@ -223,9 +235,6 @@ function isEditableColumn(column: FieldSchema) {
return !(isExternalAutoColumn || isFormula) return !(isExternalAutoColumn || isFormula)
} }
export type ExternalRequestReturnType<T extends Operation> =
T extends Operation.READ ? Row[] : { row: Row; table: Table }
export class ExternalRequest<T extends Operation> { export class ExternalRequest<T extends Operation> {
private readonly operation: T private readonly operation: T
private readonly tableId: string private readonly tableId: string
@ -428,7 +437,9 @@ export class ExternalRequest<T extends Operation> {
}) })
// this is the response from knex if no rows found // this is the response from knex if no rows found
const rows: Row[] = const rows: Row[] =
!Array.isArray(response) || response?.[0].read ? [] : response !Array.isArray(response) || isKnexEmptyReadResponse(response)
? []
: response
const storeTo = isManyToMany(field) const storeTo = isManyToMany(field)
? field.throughFrom || linkPrimaryKey ? field.throughFrom || linkPrimaryKey
: fieldName : fieldName
@ -517,7 +528,7 @@ export class ExternalRequest<T extends Operation> {
// finally cleanup anything that needs to be removed // finally cleanup anything that needs to be removed
for (let [colName, { isMany, rows, tableId }] of Object.entries(related)) { for (let [colName, { isMany, rows, tableId }] of Object.entries(related)) {
const table: Table | undefined = this.getTable(tableId) const table: Table | undefined = this.getTable(tableId)
// if its not the foreign key skip it, nothing to do // if it's not the foreign key skip it, nothing to do
if ( if (
!table || !table ||
(!isMany && table.primary && table.primary.indexOf(colName) !== -1) (!isMany && table.primary && table.primary.indexOf(colName) !== -1)
@ -662,12 +673,14 @@ export class ExternalRequest<T extends Operation> {
} }
// aliasing can be disabled fully if desired // aliasing can be disabled fully if desired
let response const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
if (env.SQL_ALIASING_DISABLE) { let response = env.SQL_ALIASING_DISABLE
response = await getDatasourceAndQuery(json) ? await getDatasourceAndQuery(json)
} else { : await aliasing.queryWithAliasing(json, makeExternalQuery)
const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
response = await aliasing.queryWithAliasing(json) // if it's a counting operation there will be no more processing, just return the number
if (this.operation === Operation.COUNT) {
return processRowCountResponse(response) as ExternalRequestReturnType<T>
} }
const responseRows = Array.isArray(response) ? response : [] const responseRows = Array.isArray(response) ? response : []

View File

@ -1,4 +1,6 @@
import { import {
DatasourcePlusQueryResponse,
DSPlusOperation,
FieldType, FieldType,
ManyToManyRelationshipFieldMetadata, ManyToManyRelationshipFieldMetadata,
RelationshipFieldMetadata, RelationshipFieldMetadata,
@ -192,3 +194,11 @@ export function buildSqlFieldList(
} }
return fields return fields
} }
export function isKnexEmptyReadResponse(resp: DatasourcePlusQueryResponse) {
return (
!Array.isArray(resp) ||
resp.length === 0 ||
(DSPlusOperation.READ in resp[0] && resp[0].read === true)
)
}

View File

@ -14,7 +14,7 @@ import {
processDates, processDates,
processFormulas, processFormulas,
} from "../../../../utilities/rowProcessor" } from "../../../../utilities/rowProcessor"
import { updateRelationshipColumns } from "./sqlUtils" import { isKnexEmptyReadResponse, updateRelationshipColumns } from "./sqlUtils"
import { import {
basicProcessing, basicProcessing,
generateIdForRow, generateIdForRow,
@ -137,7 +137,7 @@ export async function sqlOutputProcessing(
relationships: RelationshipsJson[], relationships: RelationshipsJson[],
opts?: { sqs?: boolean } opts?: { sqs?: boolean }
): Promise<Row[]> { ): Promise<Row[]> {
if (!Array.isArray(rows) || rows.length === 0 || rows[0].read === true) { if (isKnexEmptyReadResponse(rows)) {
return [] return []
} }
let finalRows: { [key: string]: Row } = {} let finalRows: { [key: string]: Row } = {}

View File

@ -69,6 +69,7 @@ export async function searchView(
limit: body.limit, limit: body.limit,
bookmark: body.bookmark, bookmark: body.bookmark,
paginate: body.paginate, paginate: body.paginate,
countRows: body.countRows,
} }
const result = await sdk.rows.search(searchOptions) const result = await sdk.rows.search(searchOptions)

View File

@ -86,6 +86,7 @@ router
router.post( router.post(
"/api/v2/views/:viewId/search", "/api/v2/views/:viewId/search",
internalSearchValidator(),
authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"), authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"),
rowController.views.searchView rowController.views.searchView
) )

View File

@ -734,6 +734,7 @@ describe.each(
name: entityId, name: entityId,
schema: {}, schema: {},
type: "table", type: "table",
primary: ["id"],
sourceId: datasource._id!, sourceId: datasource._id!,
sourceType: TableSourceType.EXTERNAL, sourceType: TableSourceType.EXTERNAL,
}, },

File diff suppressed because it is too large Load Diff

View File

@ -7,6 +7,7 @@ import {
INTERNAL_TABLE_SOURCE_ID, INTERNAL_TABLE_SOURCE_ID,
PermissionLevel, PermissionLevel,
QuotaUsageType, QuotaUsageType,
Row,
SaveTableRequest, SaveTableRequest,
SearchFilterOperator, SearchFilterOperator,
SortOrder, SortOrder,
@ -17,6 +18,7 @@ import {
UpdateViewRequest, UpdateViewRequest,
ViewUIFieldMetadata, ViewUIFieldMetadata,
ViewV2, ViewV2,
SearchResponse,
} from "@budibase/types" } from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests" import { generator, mocks } from "@budibase/backend-core/tests"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
@ -25,17 +27,21 @@ import { quotas } from "@budibase/pro"
import { db, roles } from "@budibase/backend-core" import { db, roles } from "@budibase/backend-core"
describe.each([ describe.each([
["internal", undefined], ["lucene", undefined],
["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/v2/views (%s)", (_, dsProvider) => { ])("/v2/views (%s)", (name, dsProvider) => {
const config = setup.getConfig() const config = setup.getConfig()
const isInternal = !dsProvider const isSqs = name === "sqs"
const isLucene = name === "lucene"
const isInternal = isSqs || isLucene
let table: Table let table: Table
let datasource: Datasource let datasource: Datasource
let envCleanup: (() => void) | undefined
function saveTableRequest( function saveTableRequest(
...overrides: Partial<Omit<SaveTableRequest, "name">>[] ...overrides: Partial<Omit<SaveTableRequest, "name">>[]
@ -82,6 +88,9 @@ describe.each([
} }
beforeAll(async () => { beforeAll(async () => {
if (isSqs) {
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
}
await config.init() await config.init()
if (dsProvider) { if (dsProvider) {
@ -94,6 +103,9 @@ describe.each([
afterAll(async () => { afterAll(async () => {
setup.afterAll() setup.afterAll()
if (envCleanup) {
envCleanup()
}
}) })
beforeEach(() => { beforeEach(() => {
@ -1252,12 +1264,13 @@ describe.each([
paginate: true, paginate: true,
limit: 4, limit: 4,
query: {}, query: {},
countRows: true,
}) })
expect(page1).toEqual({ expect(page1).toEqual({
rows: expect.arrayContaining(rows.slice(0, 4)), rows: expect.arrayContaining(rows.slice(0, 4)),
totalRows: isInternal ? 10 : undefined,
hasNextPage: true, hasNextPage: true,
bookmark: expect.anything(), bookmark: expect.anything(),
totalRows: 10,
}) })
const page2 = await config.api.viewV2.search(view.id, { const page2 = await config.api.viewV2.search(view.id, {
@ -1265,12 +1278,13 @@ describe.each([
limit: 4, limit: 4,
bookmark: page1.bookmark, bookmark: page1.bookmark,
query: {}, query: {},
countRows: true,
}) })
expect(page2).toEqual({ expect(page2).toEqual({
rows: expect.arrayContaining(rows.slice(4, 8)), rows: expect.arrayContaining(rows.slice(4, 8)),
totalRows: isInternal ? 10 : undefined,
hasNextPage: true, hasNextPage: true,
bookmark: expect.anything(), bookmark: expect.anything(),
totalRows: 10,
}) })
const page3 = await config.api.viewV2.search(view.id, { const page3 = await config.api.viewV2.search(view.id, {
@ -1278,13 +1292,17 @@ describe.each([
limit: 4, limit: 4,
bookmark: page2.bookmark, bookmark: page2.bookmark,
query: {}, query: {},
countRows: true,
}) })
expect(page3).toEqual({ const expectation: SearchResponse<Row> = {
rows: expect.arrayContaining(rows.slice(8)), rows: expect.arrayContaining(rows.slice(8)),
totalRows: isInternal ? 10 : undefined,
hasNextPage: false, hasNextPage: false,
bookmark: expect.anything(), totalRows: 10,
}) }
if (isLucene) {
expectation.bookmark = expect.anything()
}
expect(page3).toEqual(expectation)
}) })
const sortTestOptions: [ const sortTestOptions: [

View File

@ -109,6 +109,7 @@ export function internalSearchValidator() {
sortOrder: OPTIONAL_STRING, sortOrder: OPTIONAL_STRING,
sortType: OPTIONAL_STRING, sortType: OPTIONAL_STRING,
paginate: Joi.boolean(), paginate: Joi.boolean(),
countRows: Joi.boolean(),
bookmark: Joi.alternatives() bookmark: Joi.alternatives()
.try(OPTIONAL_STRING, OPTIONAL_NUMBER) .try(OPTIONAL_STRING, OPTIONAL_NUMBER)
.optional(), .optional(),

View File

@ -22,6 +22,9 @@ export async function makeExternalQuery(
) { ) {
throw new Error("Entity ID and table metadata do not align") throw new Error("Entity ID and table metadata do not align")
} }
if (!datasource) {
throw new Error("No datasource provided for external query")
}
datasource = await sdk.datasources.enrich(datasource) datasource = await sdk.datasources.enrich(datasource)
const Integration = await getIntegration(datasource.source) const Integration = await getIntegration(datasource.source)
// query is the opinionated function // query is the opinionated function

View File

@ -142,7 +142,7 @@ describe("SQL query builder", () => {
const query = sql._query(generateRelationshipJson({ schema: "production" })) const query = sql._query(generateRelationshipJson({ schema: "production" }))
expect(query).toEqual({ expect(query).toEqual({
bindings: [500, 5000], bindings: [500, 5000],
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" order by "test"."id" asc limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`,
}) })
}) })
@ -150,7 +150,7 @@ describe("SQL query builder", () => {
const query = sql._query(generateRelationshipJson()) const query = sql._query(generateRelationshipJson())
expect(query).toEqual({ expect(query).toEqual({
bindings: [500, 5000], bindings: [500, 5000],
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" order by "test"."id" asc limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`,
}) })
}) })
@ -160,7 +160,7 @@ describe("SQL query builder", () => {
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: [500, 5000], bindings: [500, 5000],
sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" limit $2`, sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" order by "test"."id" asc limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" order by "test"."id" asc limit $2`,
}) })
}) })
@ -175,8 +175,8 @@ describe("SQL query builder", () => {
}) })
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: ["john%", limit], bindings: ["john%", limit, 5000],
sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`, sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
}) })
query = new Sql(SqlClient.ORACLE, limit)._query( query = new Sql(SqlClient.ORACLE, limit)._query(
@ -190,8 +190,8 @@ describe("SQL query builder", () => {
}) })
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit], bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit, 5000],
sql: `select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4)) where rownum <= :5) "test"`, sql: `select * from (select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4) order by "test"."id" asc) where rownum <= :5) "test" order by "test"."id" asc) where rownum <= :6`,
}) })
query = new Sql(SqlClient.ORACLE, limit)._query( query = new Sql(SqlClient.ORACLE, limit)._query(
@ -204,8 +204,8 @@ describe("SQL query builder", () => {
}) })
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: [`%jo%`, limit], bindings: [`%jo%`, limit, 5000],
sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`, sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
}) })
}) })
}) })

View File

@ -57,15 +57,14 @@ describe("Captures of real examples", () => {
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
expect(query).toEqual({ expect(query).toEqual({
bindings: [relationshipLimit, limit], bindings: [relationshipLimit, limit],
sql: multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", sql: expect.stringContaining(
multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid",
"a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city", "a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city",
"a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname",
"b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid",
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid" "b"."completed" as "b.completed", "b"."qaid" as "b.qaid"`)
from (select * from "persons" as "a" order by "a"."firstname" asc nulls first limit $1) as "a" ),
left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid"
order by "a"."firstname" asc nulls first limit $2`),
}) })
}) })
@ -74,13 +73,10 @@ describe("Captures of real examples", () => {
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
expect(query).toEqual({ expect(query).toEqual({
bindings: [relationshipLimit, "assembling", limit], bindings: [relationshipLimit, "assembling", limit],
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", sql: expect.stringContaining(
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", multiline(`where COALESCE("b"."taskname" = $2, FALSE)
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid" order by "a"."productname" asc nulls first, "a"."productid" asc limit $3`)
from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a" ),
left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where COALESCE("b"."taskname" = $2, FALSE)
order by "a"."productname" asc nulls first limit $3`),
}) })
}) })
@ -89,13 +85,10 @@ describe("Captures of real examples", () => {
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
expect(query).toEqual({ expect(query).toEqual({
bindings: [relationshipLimit, limit], bindings: [relationshipLimit, limit],
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", sql: expect.stringContaining(
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", multiline(`left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid" `)
from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a" ),
left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
left join "tasks" as "b" on "b"."taskid" = "c"."taskid"
order by "a"."productname" asc nulls first limit $2`),
}) })
}) })
@ -106,11 +99,11 @@ describe("Captures of real examples", () => {
expect(query).toEqual({ expect(query).toEqual({
bindings: [...filters, limit, limit], bindings: [...filters, limit, limit],
sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname",
"a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid",
"b"."productname" as "b.productname", "b"."productid" as "b.productid" "b"."productname" as "b.productname", "b"."productid" as "b.productid"
from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a" from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) order by "a"."taskid" asc limit $3) as "a"
left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid"
left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`), left join "products" as "b" on "b"."productid" = "c"."productid" order by "a"."taskid" asc limit $4`),
}) })
}) })
@ -132,19 +125,11 @@ describe("Captures of real examples", () => {
equalValue, equalValue,
limit, limit,
], ],
sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid", sql: expect.stringContaining(
"a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", multiline(
"b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname", `where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE)`
"c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", )
"c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname", ),
"c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type",
"c"."city" as "c.city", "c"."lastname" as "c.lastname"
from (select * from "tasks" as "a" where COALESCE("a"."completed" != $1, TRUE)
order by "a"."taskname" asc nulls first limit $2) as "a"
left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid"
left join "products" as "b" on "b"."productid" = "d"."productid"
left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid"
where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE) order by "a"."taskname" asc nulls first limit $6`),
}) })
}) })
}) })
@ -200,8 +185,9 @@ describe("Captures of real examples", () => {
returningQuery = input returningQuery = input
}, queryJson) }, queryJson)
expect(returningQuery).toEqual({ expect(returningQuery).toEqual({
sql: "select * from (select top (@p0) * from [people] where CASE WHEN [people].[name] = @p1 THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p2 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]", sql: multiline(`select top (@p0) * from (select top (@p1) * from [people] where CASE WHEN [people].[name] = @p2
bindings: [1, "Test", 22], THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p3 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]`),
bindings: [5000, 1, "Test", 22],
}) })
}) })
}) })

View File

@ -1,14 +1,14 @@
import { import {
SortJson, IncludeRelationship,
Operation, Operation,
PaginationJson, PaginationJson,
IncludeRelationship,
Row, Row,
SearchFilters,
RowSearchParams, RowSearchParams,
SearchFilters,
SearchResponse, SearchResponse,
Table, SortJson,
SortOrder, SortOrder,
Table,
} from "@budibase/types" } from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters" import * as exporters from "../../../../api/controllers/view/exporters"
import { handleRequest } from "../../../../api/controllers/row/external" import { handleRequest } from "../../../../api/controllers/row/external"
@ -18,7 +18,7 @@ import {
} from "../../../../integrations/utils" } from "../../../../integrations/utils"
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "./types" import { ExportRowsParams, ExportRowsResult } from "./types"
import { HTTPError, db } from "@budibase/backend-core" import { db, HTTPError } from "@budibase/backend-core"
import pick from "lodash/pick" import pick from "lodash/pick"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import sdk from "../../../" import sdk from "../../../"
@ -28,20 +28,26 @@ export async function search(
table: Table table: Table
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId } = options const { tableId } = options
const { paginate, query, ...params } = options const { countRows, paginate, query, ...params } = options
const { limit } = params const { limit } = params
let bookmark = let bookmark =
(params.bookmark && parseInt(params.bookmark as string)) || undefined (params.bookmark && parseInt(params.bookmark as string)) || undefined
if (paginate && !bookmark) { if (paginate && !bookmark) {
bookmark = 1 bookmark = 0
} }
let paginateObj = {} let paginateObj: PaginationJson | undefined
if (paginate) { if (paginate && !limit) {
throw new Error("Cannot paginate query without a limit")
}
if (paginate && limit) {
paginateObj = { paginateObj = {
// add one so we can track if there is another page // add one so we can track if there is another page
limit: limit, limit: limit + 1,
page: bookmark, }
if (bookmark) {
paginateObj.offset = limit * bookmark
} }
} else if (params && limit) { } else if (params && limit) {
paginateObj = { paginateObj = {
@ -69,24 +75,27 @@ export async function search(
} }
try { try {
let rows = await handleRequest(Operation.READ, tableId, { const parameters = {
filters: query, filters: query,
sort, sort,
paginate: paginateObj as PaginationJson, paginate: paginateObj as PaginationJson,
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
}) }
const queries: Promise<Row[] | number>[] = []
queries.push(handleRequest(Operation.READ, tableId, parameters))
if (countRows) {
queries.push(handleRequest(Operation.COUNT, tableId, parameters))
}
const responses = await Promise.all(queries)
let rows = responses[0] as Row[]
const totalRows =
responses.length > 1 ? (responses[1] as number) : undefined
let hasNextPage = false let hasNextPage = false
if (paginate && rows.length === limit) { // remove the extra row if it's there
const nextRows = await handleRequest(Operation.READ, tableId, { if (paginate && limit && rows.length > limit) {
filters: query, rows.pop()
sort, hasNextPage = true
paginate: {
limit: 1,
page: bookmark! * limit + 1,
},
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
hasNextPage = nextRows.length > 0
} }
if (options.fields) { if (options.fields) {
@ -100,7 +109,17 @@ export async function search(
}) })
// need wrapper object for bookmarks etc when paginating // need wrapper object for bookmarks etc when paginating
return { rows, hasNextPage, bookmark: bookmark && bookmark + 1 } const response: SearchResponse<Row> = { rows, hasNextPage }
if (hasNextPage && bookmark != null) {
response.bookmark = bookmark + 1
}
if (totalRows != null) {
response.totalRows = totalRows
}
if (paginate && !hasNextPage) {
response.hasNextPage = false
}
return response
} catch (err: any) { } catch (err: any) {
if (err.message && err.message.includes("does not exist")) { if (err.message && err.message.includes("does not exist")) {
throw new Error( throw new Error(

View File

@ -12,6 +12,7 @@ import {
SortType, SortType,
SqlClient, SqlClient,
Table, Table,
Datasource,
} from "@budibase/types" } from "@budibase/types"
import { import {
buildInternalRelationships, buildInternalRelationships,
@ -28,6 +29,7 @@ import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
import AliasTables from "../sqlAlias" import AliasTables from "../sqlAlias"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import pick from "lodash/pick" import pick from "lodash/pick"
import { processRowCountResponse } from "../utils"
const builder = new sql.Sql(SqlClient.SQL_LITE) const builder = new sql.Sql(SqlClient.SQL_LITE)
@ -95,14 +97,29 @@ function buildTableMap(tables: Table[]) {
// update the table name, should never query by name for SQLite // update the table name, should never query by name for SQLite
table.originalName = table.name table.originalName = table.name
table.name = table._id! table.name = table._id!
// need a primary for sorting, lookups etc
table.primary = ["_id"]
tableMap[table._id!] = table tableMap[table._id!] = table
} }
return tableMap return tableMap
} }
async function runSqlQuery(json: QueryJson, tables: Table[]) { function runSqlQuery(json: QueryJson, tables: Table[]): Promise<Row[]>
function runSqlQuery(
json: QueryJson,
tables: Table[],
opts: { countTotalRows: true }
): Promise<number>
async function runSqlQuery(
json: QueryJson,
tables: Table[],
opts?: { countTotalRows?: boolean }
) {
const alias = new AliasTables(tables.map(table => table.name)) const alias = new AliasTables(tables.map(table => table.name))
return await alias.queryWithAliasing(json, async json => { if (opts?.countTotalRows) {
json.endpoint.operation = Operation.COUNT
}
const processSQLQuery = async (_: Datasource, json: QueryJson) => {
const query = builder._query(json, { const query = builder._query(json, {
disableReturning: true, disableReturning: true,
}) })
@ -124,17 +141,27 @@ async function runSqlQuery(json: QueryJson, tables: Table[]) {
const db = context.getAppDB() const db = context.getAppDB()
return await db.sql<Row>(sql, bindings) return await db.sql<Row>(sql, bindings)
}) }
const response = await alias.queryWithAliasing(json, processSQLQuery)
if (opts?.countTotalRows) {
return processRowCountResponse(response)
} else {
return response
}
} }
export async function search( export async function search(
options: RowSearchParams, options: RowSearchParams,
table: Table table: Table
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { paginate, query, ...params } = options let { paginate, query, ...params } = options
const allTables = await sdk.tables.getAllInternalTables() const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables) const allTablesMap = buildTableMap(allTables)
// make sure we have the mapped/latest table
if (table?._id) {
table = allTablesMap[table?._id]
}
if (!table) { if (!table) {
throw new Error("Unable to find table") throw new Error("Unable to find table")
} }
@ -169,7 +196,7 @@ export async function search(
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
request.sort = { request.sort = {
[sortField.name]: { [sortField.name]: {
direction: params.sortOrder || SortOrder.DESCENDING, direction: params.sortOrder || SortOrder.ASCENDING,
type: sortType as SortType, type: sortType as SortType,
}, },
} }
@ -180,7 +207,8 @@ export async function search(
} }
const bookmark: number = (params.bookmark as number) || 0 const bookmark: number = (params.bookmark as number) || 0
if (paginate && params.limit) { if (params.limit) {
paginate = true
request.paginate = { request.paginate = {
limit: params.limit + 1, limit: params.limit + 1,
offset: bookmark * params.limit, offset: bookmark * params.limit,
@ -188,7 +216,20 @@ export async function search(
} }
try { try {
const rows = await runSqlQuery(request, allTables) const queries: Promise<Row[] | number>[] = []
queries.push(runSqlQuery(request, allTables))
if (options.countRows) {
// get the total count of rows
queries.push(
runSqlQuery(request, allTables, {
countTotalRows: true,
})
)
}
const responses = await Promise.all(queries)
let rows = responses[0] as Row[]
const totalRows =
responses.length > 1 ? (responses[1] as number) : undefined
// process from the format of tableId.column to expected format also // process from the format of tableId.column to expected format also
// make sure JSON columns corrected // make sure JSON columns corrected
@ -201,7 +242,8 @@ export async function search(
// check for pagination final row // check for pagination final row
let nextRow: Row | undefined let nextRow: Row | undefined
if (paginate && params.limit && processed.length > params.limit) { if (paginate && params.limit && rows.length > params.limit) {
// remove the extra row that confirmed if there is another row to move to
nextRow = processed.pop() nextRow = processed.pop()
} }
@ -217,21 +259,21 @@ export async function search(
finalRows = finalRows.map((r: any) => pick(r, fields)) finalRows = finalRows.map((r: any) => pick(r, fields))
} }
// check for pagination const response: SearchResponse<Row> = {
if (paginate) { rows: finalRows,
const response: SearchResponse<Row> = {
rows: finalRows,
}
if (nextRow) {
response.hasNextPage = true
response.bookmark = bookmark + 1
}
return response
} else {
return {
rows: finalRows,
}
} }
if (totalRows != null) {
response.totalRows = totalRows
}
// check for pagination
if (paginate && nextRow) {
response.hasNextPage = true
response.bookmark = bookmark + 1
}
if (paginate && !nextRow) {
response.hasNextPage = false
}
return response
} catch (err: any) { } catch (err: any) {
const msg = typeof err === "string" ? err : err.message const msg = typeof err === "string" ? err : err.message
if (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) { if (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) {

View File

@ -11,7 +11,12 @@ import { SQS_DATASOURCE_INTERNAL } from "@budibase/backend-core"
import { getSQLClient } from "./utils" import { getSQLClient } from "./utils"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import datasources from "../datasources" import datasources from "../datasources"
import { makeExternalQuery } from "../../../integrations/base/query" import { BudibaseInternalDB } from "../../../db/utils"
type PerformQueryFunction = (
datasource: Datasource,
json: QueryJson
) => Promise<DatasourcePlusQueryResponse>
const WRITE_OPERATIONS: Operation[] = [ const WRITE_OPERATIONS: Operation[] = [
Operation.CREATE, Operation.CREATE,
@ -65,7 +70,7 @@ export default class AliasTables {
this.charSeq = new CharSequence() this.charSeq = new CharSequence()
} }
isAliasingEnabled(json: QueryJson, datasource: Datasource) { isAliasingEnabled(json: QueryJson, datasource?: Datasource) {
const operation = json.endpoint.operation const operation = json.endpoint.operation
const fieldLength = json.resource?.fields?.length const fieldLength = json.resource?.fields?.length
if ( if (
@ -75,6 +80,10 @@ export default class AliasTables {
) { ) {
return false return false
} }
// SQS - doesn't have a datasource
if (!datasource) {
return true
}
try { try {
const sqlClient = getSQLClient(datasource) const sqlClient = getSQLClient(datasource)
const isWrite = WRITE_OPERATIONS.includes(operation) const isWrite = WRITE_OPERATIONS.includes(operation)
@ -167,13 +176,14 @@ export default class AliasTables {
async queryWithAliasing( async queryWithAliasing(
json: QueryJson, json: QueryJson,
queryFn?: (json: QueryJson) => Promise<DatasourcePlusQueryResponse> queryFn: PerformQueryFunction
): Promise<DatasourcePlusQueryResponse> { ): Promise<DatasourcePlusQueryResponse> {
const datasourceId = json.endpoint.datasourceId const datasourceId = json.endpoint.datasourceId
const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL
let aliasingEnabled: boolean, datasource: Datasource | undefined let aliasingEnabled: boolean, datasource: Datasource
if (isSqs) { if (isSqs) {
aliasingEnabled = true aliasingEnabled = this.isAliasingEnabled(json)
datasource = BudibaseInternalDB
} else { } else {
datasource = await datasources.get(datasourceId) datasource = await datasources.get(datasourceId)
aliasingEnabled = this.isAliasingEnabled(json, datasource) aliasingEnabled = this.isAliasingEnabled(json, datasource)
@ -225,14 +235,7 @@ export default class AliasTables {
json.tableAliases = invertedTableAliases json.tableAliases = invertedTableAliases
} }
let response: DatasourcePlusQueryResponse let response: DatasourcePlusQueryResponse = await queryFn(datasource, json)
if (datasource && !isSqs) {
response = await makeExternalQuery(datasource, json)
} else if (queryFn) {
response = await queryFn(json)
} else {
throw new Error("No supplied method to perform aliased query")
}
if (Array.isArray(response) && aliasingEnabled) { if (Array.isArray(response) && aliasingEnabled) {
return this.reverse(response) return this.reverse(response)
} else { } else {

View File

@ -50,6 +50,17 @@ export function getSQLClient(datasource: Datasource): SqlClient {
throw new Error("Unable to determine client for SQL datasource") throw new Error("Unable to determine client for SQL datasource")
} }
export function processRowCountResponse(
response: DatasourcePlusQueryResponse
): number {
if (response && response.length === 1 && "total" in response[0]) {
const total = response[0].total
return typeof total === "number" ? total : parseInt(total)
} else {
throw new Error("Unable to count rows in query - no count response")
}
}
export async function getDatasourceAndQuery( export async function getDatasourceAndQuery(
json: QueryJson json: QueryJson
): Promise<DatasourcePlusQueryResponse> { ): Promise<DatasourcePlusQueryResponse> {

View File

@ -12,6 +12,7 @@ import {
SortOrder, SortOrder,
RowSearchParams, RowSearchParams,
EmptyFilterOption, EmptyFilterOption,
SearchResponse,
} from "@budibase/types" } from "@budibase/types"
import dayjs from "dayjs" import dayjs from "dayjs"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
@ -262,15 +263,23 @@ export const buildQuery = (filter: SearchFilter[]) => {
return query return query
} }
export const search = (docs: Record<string, any>[], query: RowSearchParams) => { export const search = (
docs: Record<string, any>[],
query: RowSearchParams
): SearchResponse<Record<string, any>> => {
let result = runQuery(docs, query.query) let result = runQuery(docs, query.query)
if (query.sort) { if (query.sort) {
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING) result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
} }
let totalRows = result.length
if (query.limit) { if (query.limit) {
result = limit(result, query.limit.toString()) result = limit(result, query.limit.toString())
} }
return result const response: SearchResponse<Record<string, any>> = { rows: result }
if (query.countRows) {
response.totalRows = totalRows
}
return response
} }
/** /**

View File

@ -25,6 +25,7 @@ export interface SearchViewRowRequest
| "bookmark" | "bookmark"
| "paginate" | "paginate"
| "query" | "query"
| "countRows"
> {} > {}
export interface SearchRowResponse { export interface SearchRowResponse {

View File

@ -8,6 +8,7 @@ export enum Operation {
READ = "READ", READ = "READ",
UPDATE = "UPDATE", UPDATE = "UPDATE",
DELETE = "DELETE", DELETE = "DELETE",
COUNT = "COUNT",
BULK_CREATE = "BULK_CREATE", BULK_CREATE = "BULK_CREATE",
BULK_UPSERT = "BULK_UPSERT", BULK_UPSERT = "BULK_UPSERT",
CREATE_TABLE = "CREATE_TABLE", CREATE_TABLE = "CREATE_TABLE",
@ -188,7 +189,7 @@ export interface Schema {
} }
// return these when an operation occurred but we got no response // return these when an operation occurred but we got no response
enum DSPlusOperation { export enum DSPlusOperation {
CREATE = "create", CREATE = "create",
READ = "read", READ = "read",
UPDATE = "update", UPDATE = "update",
@ -198,6 +199,7 @@ enum DSPlusOperation {
export type DatasourcePlusQueryResponse = export type DatasourcePlusQueryResponse =
| Row[] | Row[]
| Record<DSPlusOperation, boolean>[] | Record<DSPlusOperation, boolean>[]
| { total: number }[]
| void | void
export interface DatasourcePlus extends IntegrationBase { export interface DatasourcePlus extends IntegrationBase {

View File

@ -17,6 +17,7 @@ export interface SearchParams {
fields?: string[] fields?: string[]
indexer?: () => Promise<any> indexer?: () => Promise<any>
rows?: Row[] rows?: Row[]
countRows?: boolean
} }
// when searching for rows we want a more extensive search type that requires certain properties // when searching for rows we want a more extensive search type that requires certain properties

View File

@ -10296,7 +10296,7 @@ engine.io-parser@~5.0.3:
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.0.6.tgz#7811244af173e157295dec9b2718dfe42a64ef45" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.0.6.tgz#7811244af173e157295dec9b2718dfe42a64ef45"
integrity sha512-tjuoZDMAdEhVnSFleYPCtdL2GXwVTGtNjoeJd9IhIG3C1xs9uwxqRNEu5WpnDZCaozwVlK/nuQhpodhXSIMaxw== integrity sha512-tjuoZDMAdEhVnSFleYPCtdL2GXwVTGtNjoeJd9IhIG3C1xs9uwxqRNEu5WpnDZCaozwVlK/nuQhpodhXSIMaxw==
engine.io@~6.4.1: engine.io@~6.4.2:
version "6.4.2" version "6.4.2"
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.2.tgz#ffeaf68f69b1364b0286badddf15ff633476473f" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.2.tgz#ffeaf68f69b1364b0286badddf15ff633476473f"
integrity sha512-FKn/3oMiJjrOEOeUub2WCox6JhxBXq/Zn3fZOMCBxKnNYtsdKjxhl7yR3fZhM9PV+rdE75SU5SYMc+2PGzo+Tg== integrity sha512-FKn/3oMiJjrOEOeUub2WCox6JhxBXq/Zn3fZOMCBxKnNYtsdKjxhl7yR3fZhM9PV+rdE75SU5SYMc+2PGzo+Tg==
@ -20160,17 +20160,25 @@ socket.io-parser@~4.2.1:
"@socket.io/component-emitter" "~3.1.0" "@socket.io/component-emitter" "~3.1.0"
debug "~4.3.1" debug "~4.3.1"
socket.io@4.6.1: socket.io-parser@~4.2.4:
version "4.6.1" version "4.2.4"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.1.tgz#62ec117e5fce0692fa50498da9347cfb52c3bc70" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.4.tgz#c806966cf7270601e47469ddeec30fbdfda44c83"
integrity sha512-KMcaAi4l/8+xEjkRICl6ak8ySoxsYG+gG6/XfRCPJPQ/haCRIJBTL4wIl8YCsmtaBovcAXGLOShyVWQ/FG8GZA== integrity sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==
dependencies:
"@socket.io/component-emitter" "~3.1.0"
debug "~4.3.1"
socket.io@4.6.2:
version "4.6.2"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.2.tgz#d597db077d4df9cbbdfaa7a9ed8ccc3d49439786"
integrity sha512-Vp+lSks5k0dewYTfwgPT9UeGGd+ht7sCpB7p0e83VgO4X/AHYWhXITMrNk/pg8syY2bpx23ptClCQuHhqi2BgQ==
dependencies: dependencies:
accepts "~1.3.4" accepts "~1.3.4"
base64id "~2.0.0" base64id "~2.0.0"
debug "~4.3.2" debug "~4.3.2"
engine.io "~6.4.1" engine.io "~6.4.2"
socket.io-adapter "~2.5.2" socket.io-adapter "~2.5.2"
socket.io-parser "~4.2.1" socket.io-parser "~4.2.4"
socks-proxy-agent@^7.0.0: socks-proxy-agent@^7.0.0:
version "7.0.0" version "7.0.0"
@ -21102,18 +21110,6 @@ tar@6.1.11:
mkdirp "^1.0.3" mkdirp "^1.0.3"
yallist "^4.0.0" yallist "^4.0.0"
tar@6.1.15:
version "6.1.15"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69"
integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==
dependencies:
chownr "^2.0.0"
fs-minipass "^2.0.0"
minipass "^5.0.0"
minizlib "^2.1.1"
mkdirp "^1.0.3"
yallist "^4.0.0"
tar@6.2.1, tar@^6.1.11, tar@^6.1.2: tar@6.2.1, tar@^6.1.11, tar@^6.1.2:
version "6.2.1" version "6.2.1"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a"