Merge pull request #13969 from Budibase/feature/count-rows-to-return
Counting total rows in query (Datasource+)
This commit is contained in:
commit
443289c66e
|
@ -1,10 +1,10 @@
|
|||
import { Knex, knex } from "knex"
|
||||
import * as dbCore from "../db"
|
||||
import {
|
||||
isIsoDateString,
|
||||
isValidFilter,
|
||||
getNativeSql,
|
||||
isExternalTable,
|
||||
isIsoDateString,
|
||||
isValidFilter,
|
||||
} from "./utils"
|
||||
import { SqlStatements } from "./sqlStatements"
|
||||
import SqlTableQueryBuilder from "./sqlTable"
|
||||
|
@ -12,21 +12,21 @@ import {
|
|||
BBReferenceFieldMetadata,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
JsonFieldMetadata,
|
||||
JsonTypes,
|
||||
Operation,
|
||||
prefixed,
|
||||
QueryJson,
|
||||
SqlQuery,
|
||||
QueryOptions,
|
||||
RelationshipsJson,
|
||||
SearchFilters,
|
||||
SortOrder,
|
||||
SqlClient,
|
||||
SqlQuery,
|
||||
SqlQueryBinding,
|
||||
Table,
|
||||
TableSourceType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
SqlClient,
|
||||
QueryOptions,
|
||||
JsonTypes,
|
||||
prefixed,
|
||||
SortOrder,
|
||||
} from "@budibase/types"
|
||||
import environment from "../environment"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
@ -114,7 +114,7 @@ function generateSelectStatement(
|
|||
): (string | Knex.Raw)[] | "*" {
|
||||
const { resource, meta } = json
|
||||
|
||||
if (!resource) {
|
||||
if (!resource || !resource.fields || resource.fields.length === 0) {
|
||||
return "*"
|
||||
}
|
||||
|
||||
|
@ -410,13 +410,32 @@ class InternalBuilder {
|
|||
return query
|
||||
}
|
||||
|
||||
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
|
||||
let { sort, paginate } = json
|
||||
addDistinctCount(
|
||||
query: Knex.QueryBuilder,
|
||||
json: QueryJson
|
||||
): Knex.QueryBuilder {
|
||||
const table = json.meta.table
|
||||
const primary = table.primary
|
||||
const aliases = json.tableAliases
|
||||
const aliased =
|
||||
table.name && aliases?.[table.name] ? aliases[table.name] : table.name
|
||||
if (!primary) {
|
||||
throw new Error("SQL counting requires primary key to be supplied")
|
||||
}
|
||||
return query.countDistinct(`${aliased}.${primary[0]} as total`)
|
||||
}
|
||||
|
||||
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
|
||||
let { sort } = json
|
||||
const table = json.meta.table
|
||||
const primaryKey = table.primary
|
||||
const tableName = getTableName(table)
|
||||
const aliases = json.tableAliases
|
||||
const aliased =
|
||||
tableName && aliases?.[tableName] ? aliases[tableName] : table?.name
|
||||
if (!Array.isArray(primaryKey)) {
|
||||
throw new Error("Sorting requires primary key to be specified for table")
|
||||
}
|
||||
if (sort && Object.keys(sort || {}).length > 0) {
|
||||
for (let [key, value] of Object.entries(sort)) {
|
||||
const direction =
|
||||
|
@ -429,10 +448,9 @@ class InternalBuilder {
|
|||
|
||||
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
|
||||
}
|
||||
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
|
||||
// @ts-ignore
|
||||
query = query.orderBy(`${aliased}.${table?.primary[0]}`)
|
||||
}
|
||||
// always add sorting by the primary key - make sure result is deterministic
|
||||
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
|
||||
return query
|
||||
}
|
||||
|
||||
|
@ -522,7 +540,7 @@ class InternalBuilder {
|
|||
})
|
||||
}
|
||||
}
|
||||
return query.limit(BASE_LIMIT)
|
||||
return query
|
||||
}
|
||||
|
||||
knexWithAlias(
|
||||
|
@ -533,13 +551,12 @@ class InternalBuilder {
|
|||
const tableName = endpoint.entityId
|
||||
const tableAlias = aliases?.[tableName]
|
||||
|
||||
const query = knex(
|
||||
return knex(
|
||||
this.tableNameWithSchema(tableName, {
|
||||
alias: tableAlias,
|
||||
schema: endpoint.schema,
|
||||
})
|
||||
)
|
||||
return query
|
||||
}
|
||||
|
||||
create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||
|
@ -596,25 +613,23 @@ class InternalBuilder {
|
|||
return query.upsert(parsedBody)
|
||||
}
|
||||
|
||||
read(knex: Knex, json: QueryJson, limit: number): Knex.QueryBuilder {
|
||||
let { endpoint, resource, filters, paginate, relationships, tableAliases } =
|
||||
json
|
||||
read(
|
||||
knex: Knex,
|
||||
json: QueryJson,
|
||||
opts: {
|
||||
limits?: { base: number; query: number }
|
||||
} = {}
|
||||
): Knex.QueryBuilder {
|
||||
let { endpoint, filters, paginate, relationships, tableAliases } = json
|
||||
const { limits } = opts
|
||||
const counting = endpoint.operation === Operation.COUNT
|
||||
|
||||
const tableName = endpoint.entityId
|
||||
// select all if not specified
|
||||
if (!resource) {
|
||||
resource = { fields: [] }
|
||||
}
|
||||
let selectStatement: string | (string | Knex.Raw)[] = "*"
|
||||
// handle select
|
||||
if (resource.fields && resource.fields.length > 0) {
|
||||
// select the resources as the format "table.columnName" - this is what is provided
|
||||
// by the resource builder further up
|
||||
selectStatement = generateSelectStatement(json, knex)
|
||||
}
|
||||
let foundLimit = limit || BASE_LIMIT
|
||||
// start building the query
|
||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||
// handle pagination
|
||||
let foundOffset: number | null = null
|
||||
let foundLimit = limits?.query || limits?.base
|
||||
if (paginate && paginate.page && paginate.limit) {
|
||||
// @ts-ignore
|
||||
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
||||
|
@ -627,24 +642,39 @@ class InternalBuilder {
|
|||
} else if (paginate && paginate.limit) {
|
||||
foundLimit = paginate.limit
|
||||
}
|
||||
// start building the query
|
||||
let query = this.knexWithAlias(knex, endpoint, tableAliases)
|
||||
query = query.limit(foundLimit)
|
||||
if (foundOffset) {
|
||||
query = query.offset(foundOffset)
|
||||
// counting should not sort, limit or offset
|
||||
if (!counting) {
|
||||
// add the found limit if supplied
|
||||
if (foundLimit != null) {
|
||||
query = query.limit(foundLimit)
|
||||
}
|
||||
// add overall pagination
|
||||
if (foundOffset != null) {
|
||||
query = query.offset(foundOffset)
|
||||
}
|
||||
// add sorting to pre-query
|
||||
// no point in sorting when counting
|
||||
query = this.addSorting(query, json)
|
||||
}
|
||||
// add filters to the query (where)
|
||||
query = this.addFilters(query, filters, json.meta.table, {
|
||||
aliases: tableAliases,
|
||||
})
|
||||
|
||||
// add sorting to pre-query
|
||||
query = this.addSorting(query, json)
|
||||
const alias = tableAliases?.[tableName] || tableName
|
||||
let preQuery = knex({
|
||||
[alias]: query,
|
||||
} as any).select(selectStatement) as any
|
||||
let preQuery: Knex.QueryBuilder = knex({
|
||||
// the typescript definition for the knex constructor doesn't support this
|
||||
// syntax, but it is the only way to alias a pre-query result as part of
|
||||
// a query - there is an alias dictionary type, but it assumes it can only
|
||||
// be a table name, not a pre-query
|
||||
[alias]: query as any,
|
||||
})
|
||||
// if counting, use distinct count, else select
|
||||
preQuery = !counting
|
||||
? preQuery.select(generateSelectStatement(json, knex))
|
||||
: this.addDistinctCount(preQuery, json)
|
||||
// have to add after as well (this breaks MS-SQL)
|
||||
if (this.client !== SqlClient.MS_SQL) {
|
||||
if (this.client !== SqlClient.MS_SQL && !counting) {
|
||||
preQuery = this.addSorting(preQuery, json)
|
||||
}
|
||||
// handle joins
|
||||
|
@ -655,6 +685,13 @@ class InternalBuilder {
|
|||
endpoint.schema,
|
||||
tableAliases
|
||||
)
|
||||
|
||||
// add a base limit over the whole query
|
||||
// if counting we can't set this limit
|
||||
if (limits?.base) {
|
||||
query = query.limit(limits.base)
|
||||
}
|
||||
|
||||
return this.addFilters(query, filters, json.meta.table, {
|
||||
relationship: true,
|
||||
aliases: tableAliases,
|
||||
|
@ -699,6 +736,19 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
this.limit = limit
|
||||
}
|
||||
|
||||
private convertToNative(query: Knex.QueryBuilder, opts: QueryOptions = {}) {
|
||||
const sqlClient = this.getSqlClient()
|
||||
if (opts?.disableBindings) {
|
||||
return { sql: query.toString() }
|
||||
} else {
|
||||
let native = getNativeSql(query)
|
||||
if (sqlClient === SqlClient.SQL_LITE) {
|
||||
native = convertBooleans(native)
|
||||
}
|
||||
return native
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param json The JSON query DSL which is to be converted to SQL.
|
||||
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
|
||||
|
@ -722,7 +772,16 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
query = builder.create(client, json, opts)
|
||||
break
|
||||
case Operation.READ:
|
||||
query = builder.read(client, json, this.limit)
|
||||
query = builder.read(client, json, {
|
||||
limits: {
|
||||
query: this.limit,
|
||||
base: BASE_LIMIT,
|
||||
},
|
||||
})
|
||||
break
|
||||
case Operation.COUNT:
|
||||
// read without any limits to count
|
||||
query = builder.read(client, json)
|
||||
break
|
||||
case Operation.UPDATE:
|
||||
query = builder.update(client, json, opts)
|
||||
|
@ -744,15 +803,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
throw `Operation type is not supported by SQL query builder`
|
||||
}
|
||||
|
||||
if (opts?.disableBindings) {
|
||||
return { sql: query.toString() }
|
||||
} else {
|
||||
let native = getNativeSql(query)
|
||||
if (sqlClient === SqlClient.SQL_LITE) {
|
||||
native = convertBooleans(native)
|
||||
}
|
||||
return native
|
||||
}
|
||||
return this.convertToNative(query, opts)
|
||||
}
|
||||
|
||||
async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
|
||||
|
@ -828,6 +879,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
await this.getReturningRow(queryFn, this.checkLookupKeys(id, json))
|
||||
)
|
||||
}
|
||||
if (operation === Operation.COUNT) {
|
||||
return results
|
||||
}
|
||||
if (operation !== Operation.READ) {
|
||||
return row
|
||||
}
|
||||
|
|
|
@ -54,8 +54,31 @@ INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('Mi
|
|||
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996);
|
||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993);
|
||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support');
|
||||
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (1, 2, 'assembling', TRUE);
|
||||
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (2, 1, 'processing', FALSE);
|
||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Dave', 'Bar', '2 Foo Street', 'Bartown', 'support', 0, 1993);
|
||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('James', 'Bar', '3 Foo Street', 'Bartown', 'support', 0, 1993);
|
||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Jenny', 'Bar', '4 Foo Street', 'Bartown', 'support', 0, 1993);
|
||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Grace', 'Bar', '5 Foo Street', 'Bartown', 'support', 0, 1993);
|
||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Sarah', 'Bar', '6 Foo Street', 'Bartown', 'support', 0, 1993);
|
||||
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Kelly', 'Bar', '7 Foo Street', 'Bartown', 'support', 0, 1993);
|
||||
|
||||
-- insert a lot of tasks for testing
|
||||
WITH RECURSIVE generate_series AS (
|
||||
SELECT 1 AS n
|
||||
UNION ALL
|
||||
SELECT n + 1 FROM generate_series WHERE n < 6000
|
||||
),
|
||||
random_data AS (
|
||||
SELECT
|
||||
n,
|
||||
(random() * 9 + 1)::int AS ExecutorID,
|
||||
(random() * 9 + 1)::int AS QaID,
|
||||
'assembling' AS TaskName,
|
||||
(random() < 0.5) AS Completed
|
||||
FROM generate_series
|
||||
)
|
||||
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed)
|
||||
SELECT ExecutorID, QaID, TaskName, Completed
|
||||
FROM random_data;
|
||||
INSERT INTO Products (ProductName) VALUES ('Computers');
|
||||
INSERT INTO Products (ProductName) VALUES ('Laptops');
|
||||
INSERT INTO Products (ProductName) VALUES ('Chairs');
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
FieldType,
|
||||
FilterType,
|
||||
IncludeRelationship,
|
||||
isManyToOne,
|
||||
OneToManyRelationshipFieldMetadata,
|
||||
Operation,
|
||||
PaginationJson,
|
||||
|
@ -16,29 +17,33 @@ import {
|
|||
SortJson,
|
||||
SortType,
|
||||
Table,
|
||||
isManyToOne,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
breakRowIdField,
|
||||
convertRowId,
|
||||
generateRowIdField,
|
||||
isRowId,
|
||||
isSQL,
|
||||
generateRowIdField,
|
||||
} from "../../../integrations/utils"
|
||||
import {
|
||||
buildExternalRelationships,
|
||||
buildSqlFieldList,
|
||||
generateIdForRow,
|
||||
sqlOutputProcessing,
|
||||
isKnexEmptyReadResponse,
|
||||
isManyToMany,
|
||||
sqlOutputProcessing,
|
||||
} from "./utils"
|
||||
import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils"
|
||||
import {
|
||||
getDatasourceAndQuery,
|
||||
processRowCountResponse,
|
||||
} from "../../../sdk/app/rows/utils"
|
||||
import { processObjectSync } from "@budibase/string-templates"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
import env from "../../../environment"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
|
||||
export interface ManyRelationship {
|
||||
tableId?: string
|
||||
|
@ -60,6 +65,13 @@ export interface RunConfig {
|
|||
includeSqlRelationships?: IncludeRelationship
|
||||
}
|
||||
|
||||
export type ExternalRequestReturnType<T extends Operation> =
|
||||
T extends Operation.READ
|
||||
? Row[]
|
||||
: T extends Operation.COUNT
|
||||
? number
|
||||
: { row: Row; table: Table }
|
||||
|
||||
function buildFilters(
|
||||
id: string | undefined | string[],
|
||||
filters: SearchFilters,
|
||||
|
@ -223,9 +235,6 @@ function isEditableColumn(column: FieldSchema) {
|
|||
return !(isExternalAutoColumn || isFormula)
|
||||
}
|
||||
|
||||
export type ExternalRequestReturnType<T extends Operation> =
|
||||
T extends Operation.READ ? Row[] : { row: Row; table: Table }
|
||||
|
||||
export class ExternalRequest<T extends Operation> {
|
||||
private readonly operation: T
|
||||
private readonly tableId: string
|
||||
|
@ -428,7 +437,9 @@ export class ExternalRequest<T extends Operation> {
|
|||
})
|
||||
// this is the response from knex if no rows found
|
||||
const rows: Row[] =
|
||||
!Array.isArray(response) || response?.[0].read ? [] : response
|
||||
!Array.isArray(response) || isKnexEmptyReadResponse(response)
|
||||
? []
|
||||
: response
|
||||
const storeTo = isManyToMany(field)
|
||||
? field.throughFrom || linkPrimaryKey
|
||||
: fieldName
|
||||
|
@ -517,7 +528,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
// finally cleanup anything that needs to be removed
|
||||
for (let [colName, { isMany, rows, tableId }] of Object.entries(related)) {
|
||||
const table: Table | undefined = this.getTable(tableId)
|
||||
// if its not the foreign key skip it, nothing to do
|
||||
// if it's not the foreign key skip it, nothing to do
|
||||
if (
|
||||
!table ||
|
||||
(!isMany && table.primary && table.primary.indexOf(colName) !== -1)
|
||||
|
@ -662,12 +673,14 @@ export class ExternalRequest<T extends Operation> {
|
|||
}
|
||||
|
||||
// aliasing can be disabled fully if desired
|
||||
let response
|
||||
if (env.SQL_ALIASING_DISABLE) {
|
||||
response = await getDatasourceAndQuery(json)
|
||||
} else {
|
||||
const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
|
||||
response = await aliasing.queryWithAliasing(json)
|
||||
const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
|
||||
let response = env.SQL_ALIASING_DISABLE
|
||||
? await getDatasourceAndQuery(json)
|
||||
: await aliasing.queryWithAliasing(json, makeExternalQuery)
|
||||
|
||||
// if it's a counting operation there will be no more processing, just return the number
|
||||
if (this.operation === Operation.COUNT) {
|
||||
return processRowCountResponse(response) as ExternalRequestReturnType<T>
|
||||
}
|
||||
|
||||
const responseRows = Array.isArray(response) ? response : []
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import {
|
||||
DatasourcePlusQueryResponse,
|
||||
DSPlusOperation,
|
||||
FieldType,
|
||||
ManyToManyRelationshipFieldMetadata,
|
||||
RelationshipFieldMetadata,
|
||||
|
@ -192,3 +194,11 @@ export function buildSqlFieldList(
|
|||
}
|
||||
return fields
|
||||
}
|
||||
|
||||
export function isKnexEmptyReadResponse(resp: DatasourcePlusQueryResponse) {
|
||||
return (
|
||||
!Array.isArray(resp) ||
|
||||
resp.length === 0 ||
|
||||
(DSPlusOperation.READ in resp[0] && resp[0].read === true)
|
||||
)
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ import {
|
|||
processDates,
|
||||
processFormulas,
|
||||
} from "../../../../utilities/rowProcessor"
|
||||
import { updateRelationshipColumns } from "./sqlUtils"
|
||||
import { isKnexEmptyReadResponse, updateRelationshipColumns } from "./sqlUtils"
|
||||
import {
|
||||
basicProcessing,
|
||||
generateIdForRow,
|
||||
|
@ -137,7 +137,7 @@ export async function sqlOutputProcessing(
|
|||
relationships: RelationshipsJson[],
|
||||
opts?: { sqs?: boolean }
|
||||
): Promise<Row[]> {
|
||||
if (!Array.isArray(rows) || rows.length === 0 || rows[0].read === true) {
|
||||
if (isKnexEmptyReadResponse(rows)) {
|
||||
return []
|
||||
}
|
||||
let finalRows: { [key: string]: Row } = {}
|
||||
|
|
|
@ -69,6 +69,7 @@ export async function searchView(
|
|||
limit: body.limit,
|
||||
bookmark: body.bookmark,
|
||||
paginate: body.paginate,
|
||||
countRows: body.countRows,
|
||||
}
|
||||
|
||||
const result = await sdk.rows.search(searchOptions)
|
||||
|
|
|
@ -86,6 +86,7 @@ router
|
|||
|
||||
router.post(
|
||||
"/api/v2/views/:viewId/search",
|
||||
internalSearchValidator(),
|
||||
authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"),
|
||||
rowController.views.searchView
|
||||
)
|
||||
|
|
|
@ -734,6 +734,7 @@ describe.each(
|
|||
name: entityId,
|
||||
schema: {},
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
sourceId: datasource._id!,
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
},
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -7,6 +7,7 @@ import {
|
|||
INTERNAL_TABLE_SOURCE_ID,
|
||||
PermissionLevel,
|
||||
QuotaUsageType,
|
||||
Row,
|
||||
SaveTableRequest,
|
||||
SearchFilterOperator,
|
||||
SortOrder,
|
||||
|
@ -17,6 +18,7 @@ import {
|
|||
UpdateViewRequest,
|
||||
ViewUIFieldMetadata,
|
||||
ViewV2,
|
||||
SearchResponse,
|
||||
} from "@budibase/types"
|
||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||
|
@ -25,17 +27,21 @@ import { quotas } from "@budibase/pro"
|
|||
import { db, roles } from "@budibase/backend-core"
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
["lucene", undefined],
|
||||
["sqs", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/v2/views (%s)", (_, dsProvider) => {
|
||||
])("/v2/views (%s)", (name, dsProvider) => {
|
||||
const config = setup.getConfig()
|
||||
const isInternal = !dsProvider
|
||||
const isSqs = name === "sqs"
|
||||
const isLucene = name === "lucene"
|
||||
const isInternal = isSqs || isLucene
|
||||
|
||||
let table: Table
|
||||
let datasource: Datasource
|
||||
let envCleanup: (() => void) | undefined
|
||||
|
||||
function saveTableRequest(
|
||||
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
|
||||
|
@ -82,6 +88,9 @@ describe.each([
|
|||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
if (isSqs) {
|
||||
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
|
||||
}
|
||||
await config.init()
|
||||
|
||||
if (dsProvider) {
|
||||
|
@ -94,6 +103,9 @@ describe.each([
|
|||
|
||||
afterAll(async () => {
|
||||
setup.afterAll()
|
||||
if (envCleanup) {
|
||||
envCleanup()
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
|
@ -1252,12 +1264,13 @@ describe.each([
|
|||
paginate: true,
|
||||
limit: 4,
|
||||
query: {},
|
||||
countRows: true,
|
||||
})
|
||||
expect(page1).toEqual({
|
||||
rows: expect.arrayContaining(rows.slice(0, 4)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: true,
|
||||
bookmark: expect.anything(),
|
||||
totalRows: 10,
|
||||
})
|
||||
|
||||
const page2 = await config.api.viewV2.search(view.id, {
|
||||
|
@ -1265,12 +1278,13 @@ describe.each([
|
|||
limit: 4,
|
||||
bookmark: page1.bookmark,
|
||||
query: {},
|
||||
countRows: true,
|
||||
})
|
||||
expect(page2).toEqual({
|
||||
rows: expect.arrayContaining(rows.slice(4, 8)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: true,
|
||||
bookmark: expect.anything(),
|
||||
totalRows: 10,
|
||||
})
|
||||
|
||||
const page3 = await config.api.viewV2.search(view.id, {
|
||||
|
@ -1278,13 +1292,17 @@ describe.each([
|
|||
limit: 4,
|
||||
bookmark: page2.bookmark,
|
||||
query: {},
|
||||
countRows: true,
|
||||
})
|
||||
expect(page3).toEqual({
|
||||
const expectation: SearchResponse<Row> = {
|
||||
rows: expect.arrayContaining(rows.slice(8)),
|
||||
totalRows: isInternal ? 10 : undefined,
|
||||
hasNextPage: false,
|
||||
bookmark: expect.anything(),
|
||||
})
|
||||
totalRows: 10,
|
||||
}
|
||||
if (isLucene) {
|
||||
expectation.bookmark = expect.anything()
|
||||
}
|
||||
expect(page3).toEqual(expectation)
|
||||
})
|
||||
|
||||
const sortTestOptions: [
|
||||
|
|
|
@ -109,6 +109,7 @@ export function internalSearchValidator() {
|
|||
sortOrder: OPTIONAL_STRING,
|
||||
sortType: OPTIONAL_STRING,
|
||||
paginate: Joi.boolean(),
|
||||
countRows: Joi.boolean(),
|
||||
bookmark: Joi.alternatives()
|
||||
.try(OPTIONAL_STRING, OPTIONAL_NUMBER)
|
||||
.optional(),
|
||||
|
|
|
@ -22,6 +22,9 @@ export async function makeExternalQuery(
|
|||
) {
|
||||
throw new Error("Entity ID and table metadata do not align")
|
||||
}
|
||||
if (!datasource) {
|
||||
throw new Error("No datasource provided for external query")
|
||||
}
|
||||
datasource = await sdk.datasources.enrich(datasource)
|
||||
const Integration = await getIntegration(datasource.source)
|
||||
// query is the opinionated function
|
||||
|
|
|
@ -142,7 +142,7 @@ describe("SQL query builder", () => {
|
|||
const query = sql._query(generateRelationshipJson({ schema: "production" }))
|
||||
expect(query).toEqual({
|
||||
bindings: [500, 5000],
|
||||
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`,
|
||||
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" order by "test"."id" asc limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -150,7 +150,7 @@ describe("SQL query builder", () => {
|
|||
const query = sql._query(generateRelationshipJson())
|
||||
expect(query).toEqual({
|
||||
bindings: [500, 5000],
|
||||
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`,
|
||||
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" order by "test"."id" asc limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -160,7 +160,7 @@ describe("SQL query builder", () => {
|
|||
)
|
||||
expect(query).toEqual({
|
||||
bindings: [500, 5000],
|
||||
sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" limit $2`,
|
||||
sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" order by "test"."id" asc limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" order by "test"."id" asc limit $2`,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -175,8 +175,8 @@ describe("SQL query builder", () => {
|
|||
})
|
||||
)
|
||||
expect(query).toEqual({
|
||||
bindings: ["john%", limit],
|
||||
sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`,
|
||||
bindings: ["john%", limit, 5000],
|
||||
sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
|
||||
})
|
||||
|
||||
query = new Sql(SqlClient.ORACLE, limit)._query(
|
||||
|
@ -190,8 +190,8 @@ describe("SQL query builder", () => {
|
|||
})
|
||||
)
|
||||
expect(query).toEqual({
|
||||
bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit],
|
||||
sql: `select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4)) where rownum <= :5) "test"`,
|
||||
bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit, 5000],
|
||||
sql: `select * from (select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4) order by "test"."id" asc) where rownum <= :5) "test" order by "test"."id" asc) where rownum <= :6`,
|
||||
})
|
||||
|
||||
query = new Sql(SqlClient.ORACLE, limit)._query(
|
||||
|
@ -204,8 +204,8 @@ describe("SQL query builder", () => {
|
|||
})
|
||||
)
|
||||
expect(query).toEqual({
|
||||
bindings: [`%jo%`, limit],
|
||||
sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`,
|
||||
bindings: [`%jo%`, limit, 5000],
|
||||
sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -57,15 +57,14 @@ describe("Captures of real examples", () => {
|
|||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
expect(query).toEqual({
|
||||
bindings: [relationshipLimit, limit],
|
||||
sql: multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid",
|
||||
sql: expect.stringContaining(
|
||||
multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid",
|
||||
"a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city",
|
||||
"a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname",
|
||||
"b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid",
|
||||
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
|
||||
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
|
||||
from (select * from "persons" as "a" order by "a"."firstname" asc nulls first limit $1) as "a"
|
||||
left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid"
|
||||
order by "a"."firstname" asc nulls first limit $2`),
|
||||
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"`)
|
||||
),
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -74,13 +73,10 @@ describe("Captures of real examples", () => {
|
|||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
expect(query).toEqual({
|
||||
bindings: [relationshipLimit, "assembling", limit],
|
||||
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid",
|
||||
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
|
||||
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
|
||||
from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a"
|
||||
left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
|
||||
left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where COALESCE("b"."taskname" = $2, FALSE)
|
||||
order by "a"."productname" asc nulls first limit $3`),
|
||||
sql: expect.stringContaining(
|
||||
multiline(`where COALESCE("b"."taskname" = $2, FALSE)
|
||||
order by "a"."productname" asc nulls first, "a"."productid" asc limit $3`)
|
||||
),
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -89,13 +85,10 @@ describe("Captures of real examples", () => {
|
|||
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
|
||||
expect(query).toEqual({
|
||||
bindings: [relationshipLimit, limit],
|
||||
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid",
|
||||
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
|
||||
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
|
||||
from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a"
|
||||
left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
|
||||
left join "tasks" as "b" on "b"."taskid" = "c"."taskid"
|
||||
order by "a"."productname" asc nulls first limit $2`),
|
||||
sql: expect.stringContaining(
|
||||
multiline(`left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
|
||||
left join "tasks" as "b" on "b"."taskid" = "c"."taskid" `)
|
||||
),
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -106,11 +99,11 @@ describe("Captures of real examples", () => {
|
|||
expect(query).toEqual({
|
||||
bindings: [...filters, limit, limit],
|
||||
sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname",
|
||||
"a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid",
|
||||
"b"."productname" as "b.productname", "b"."productid" as "b.productid"
|
||||
from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a"
|
||||
left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid"
|
||||
left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`),
|
||||
"a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid",
|
||||
"b"."productname" as "b.productname", "b"."productid" as "b.productid"
|
||||
from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) order by "a"."taskid" asc limit $3) as "a"
|
||||
left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid"
|
||||
left join "products" as "b" on "b"."productid" = "c"."productid" order by "a"."taskid" asc limit $4`),
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -132,19 +125,11 @@ describe("Captures of real examples", () => {
|
|||
equalValue,
|
||||
limit,
|
||||
],
|
||||
sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid",
|
||||
"a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname",
|
||||
"b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname",
|
||||
"c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type",
|
||||
"c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname",
|
||||
"c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type",
|
||||
"c"."city" as "c.city", "c"."lastname" as "c.lastname"
|
||||
from (select * from "tasks" as "a" where COALESCE("a"."completed" != $1, TRUE)
|
||||
order by "a"."taskname" asc nulls first limit $2) as "a"
|
||||
left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid"
|
||||
left join "products" as "b" on "b"."productid" = "d"."productid"
|
||||
left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid"
|
||||
where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE) order by "a"."taskname" asc nulls first limit $6`),
|
||||
sql: expect.stringContaining(
|
||||
multiline(
|
||||
`where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE)`
|
||||
)
|
||||
),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -200,8 +185,9 @@ describe("Captures of real examples", () => {
|
|||
returningQuery = input
|
||||
}, queryJson)
|
||||
expect(returningQuery).toEqual({
|
||||
sql: "select * from (select top (@p0) * from [people] where CASE WHEN [people].[name] = @p1 THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p2 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]",
|
||||
bindings: [1, "Test", 22],
|
||||
sql: multiline(`select top (@p0) * from (select top (@p1) * from [people] where CASE WHEN [people].[name] = @p2
|
||||
THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p3 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]`),
|
||||
bindings: [5000, 1, "Test", 22],
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
import {
|
||||
SortJson,
|
||||
IncludeRelationship,
|
||||
Operation,
|
||||
PaginationJson,
|
||||
IncludeRelationship,
|
||||
Row,
|
||||
SearchFilters,
|
||||
RowSearchParams,
|
||||
SearchFilters,
|
||||
SearchResponse,
|
||||
Table,
|
||||
SortJson,
|
||||
SortOrder,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import * as exporters from "../../../../api/controllers/view/exporters"
|
||||
import { handleRequest } from "../../../../api/controllers/row/external"
|
||||
|
@ -18,7 +18,7 @@ import {
|
|||
} from "../../../../integrations/utils"
|
||||
import { utils } from "@budibase/shared-core"
|
||||
import { ExportRowsParams, ExportRowsResult } from "./types"
|
||||
import { HTTPError, db } from "@budibase/backend-core"
|
||||
import { db, HTTPError } from "@budibase/backend-core"
|
||||
import pick from "lodash/pick"
|
||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||
import sdk from "../../../"
|
||||
|
@ -28,20 +28,26 @@ export async function search(
|
|||
table: Table
|
||||
): Promise<SearchResponse<Row>> {
|
||||
const { tableId } = options
|
||||
const { paginate, query, ...params } = options
|
||||
const { countRows, paginate, query, ...params } = options
|
||||
const { limit } = params
|
||||
let bookmark =
|
||||
(params.bookmark && parseInt(params.bookmark as string)) || undefined
|
||||
if (paginate && !bookmark) {
|
||||
bookmark = 1
|
||||
bookmark = 0
|
||||
}
|
||||
let paginateObj = {}
|
||||
let paginateObj: PaginationJson | undefined
|
||||
|
||||
if (paginate) {
|
||||
if (paginate && !limit) {
|
||||
throw new Error("Cannot paginate query without a limit")
|
||||
}
|
||||
|
||||
if (paginate && limit) {
|
||||
paginateObj = {
|
||||
// add one so we can track if there is another page
|
||||
limit: limit,
|
||||
page: bookmark,
|
||||
limit: limit + 1,
|
||||
}
|
||||
if (bookmark) {
|
||||
paginateObj.offset = limit * bookmark
|
||||
}
|
||||
} else if (params && limit) {
|
||||
paginateObj = {
|
||||
|
@ -69,24 +75,27 @@ export async function search(
|
|||
}
|
||||
|
||||
try {
|
||||
let rows = await handleRequest(Operation.READ, tableId, {
|
||||
const parameters = {
|
||||
filters: query,
|
||||
sort,
|
||||
paginate: paginateObj as PaginationJson,
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})
|
||||
}
|
||||
const queries: Promise<Row[] | number>[] = []
|
||||
queries.push(handleRequest(Operation.READ, tableId, parameters))
|
||||
if (countRows) {
|
||||
queries.push(handleRequest(Operation.COUNT, tableId, parameters))
|
||||
}
|
||||
const responses = await Promise.all(queries)
|
||||
let rows = responses[0] as Row[]
|
||||
const totalRows =
|
||||
responses.length > 1 ? (responses[1] as number) : undefined
|
||||
|
||||
let hasNextPage = false
|
||||
if (paginate && rows.length === limit) {
|
||||
const nextRows = await handleRequest(Operation.READ, tableId, {
|
||||
filters: query,
|
||||
sort,
|
||||
paginate: {
|
||||
limit: 1,
|
||||
page: bookmark! * limit + 1,
|
||||
},
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})
|
||||
hasNextPage = nextRows.length > 0
|
||||
// remove the extra row if it's there
|
||||
if (paginate && limit && rows.length > limit) {
|
||||
rows.pop()
|
||||
hasNextPage = true
|
||||
}
|
||||
|
||||
if (options.fields) {
|
||||
|
@ -100,7 +109,17 @@ export async function search(
|
|||
})
|
||||
|
||||
// need wrapper object for bookmarks etc when paginating
|
||||
return { rows, hasNextPage, bookmark: bookmark && bookmark + 1 }
|
||||
const response: SearchResponse<Row> = { rows, hasNextPage }
|
||||
if (hasNextPage && bookmark != null) {
|
||||
response.bookmark = bookmark + 1
|
||||
}
|
||||
if (totalRows != null) {
|
||||
response.totalRows = totalRows
|
||||
}
|
||||
if (paginate && !hasNextPage) {
|
||||
response.hasNextPage = false
|
||||
}
|
||||
return response
|
||||
} catch (err: any) {
|
||||
if (err.message && err.message.includes("does not exist")) {
|
||||
throw new Error(
|
||||
|
|
|
@ -12,6 +12,7 @@ import {
|
|||
SortType,
|
||||
SqlClient,
|
||||
Table,
|
||||
Datasource,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
buildInternalRelationships,
|
||||
|
@ -28,6 +29,7 @@ import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
|
|||
import AliasTables from "../sqlAlias"
|
||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||
import pick from "lodash/pick"
|
||||
import { processRowCountResponse } from "../utils"
|
||||
|
||||
const builder = new sql.Sql(SqlClient.SQL_LITE)
|
||||
|
||||
|
@ -95,14 +97,29 @@ function buildTableMap(tables: Table[]) {
|
|||
// update the table name, should never query by name for SQLite
|
||||
table.originalName = table.name
|
||||
table.name = table._id!
|
||||
// need a primary for sorting, lookups etc
|
||||
table.primary = ["_id"]
|
||||
tableMap[table._id!] = table
|
||||
}
|
||||
return tableMap
|
||||
}
|
||||
|
||||
async function runSqlQuery(json: QueryJson, tables: Table[]) {
|
||||
function runSqlQuery(json: QueryJson, tables: Table[]): Promise<Row[]>
|
||||
function runSqlQuery(
|
||||
json: QueryJson,
|
||||
tables: Table[],
|
||||
opts: { countTotalRows: true }
|
||||
): Promise<number>
|
||||
async function runSqlQuery(
|
||||
json: QueryJson,
|
||||
tables: Table[],
|
||||
opts?: { countTotalRows?: boolean }
|
||||
) {
|
||||
const alias = new AliasTables(tables.map(table => table.name))
|
||||
return await alias.queryWithAliasing(json, async json => {
|
||||
if (opts?.countTotalRows) {
|
||||
json.endpoint.operation = Operation.COUNT
|
||||
}
|
||||
const processSQLQuery = async (_: Datasource, json: QueryJson) => {
|
||||
const query = builder._query(json, {
|
||||
disableReturning: true,
|
||||
})
|
||||
|
@ -124,17 +141,27 @@ async function runSqlQuery(json: QueryJson, tables: Table[]) {
|
|||
|
||||
const db = context.getAppDB()
|
||||
return await db.sql<Row>(sql, bindings)
|
||||
})
|
||||
}
|
||||
const response = await alias.queryWithAliasing(json, processSQLQuery)
|
||||
if (opts?.countTotalRows) {
|
||||
return processRowCountResponse(response)
|
||||
} else {
|
||||
return response
|
||||
}
|
||||
}
|
||||
|
||||
export async function search(
|
||||
options: RowSearchParams,
|
||||
table: Table
|
||||
): Promise<SearchResponse<Row>> {
|
||||
const { paginate, query, ...params } = options
|
||||
let { paginate, query, ...params } = options
|
||||
|
||||
const allTables = await sdk.tables.getAllInternalTables()
|
||||
const allTablesMap = buildTableMap(allTables)
|
||||
// make sure we have the mapped/latest table
|
||||
if (table?._id) {
|
||||
table = allTablesMap[table?._id]
|
||||
}
|
||||
if (!table) {
|
||||
throw new Error("Unable to find table")
|
||||
}
|
||||
|
@ -169,7 +196,7 @@ export async function search(
|
|||
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
|
||||
request.sort = {
|
||||
[sortField.name]: {
|
||||
direction: params.sortOrder || SortOrder.DESCENDING,
|
||||
direction: params.sortOrder || SortOrder.ASCENDING,
|
||||
type: sortType as SortType,
|
||||
},
|
||||
}
|
||||
|
@ -180,7 +207,8 @@ export async function search(
|
|||
}
|
||||
|
||||
const bookmark: number = (params.bookmark as number) || 0
|
||||
if (paginate && params.limit) {
|
||||
if (params.limit) {
|
||||
paginate = true
|
||||
request.paginate = {
|
||||
limit: params.limit + 1,
|
||||
offset: bookmark * params.limit,
|
||||
|
@ -188,7 +216,20 @@ export async function search(
|
|||
}
|
||||
|
||||
try {
|
||||
const rows = await runSqlQuery(request, allTables)
|
||||
const queries: Promise<Row[] | number>[] = []
|
||||
queries.push(runSqlQuery(request, allTables))
|
||||
if (options.countRows) {
|
||||
// get the total count of rows
|
||||
queries.push(
|
||||
runSqlQuery(request, allTables, {
|
||||
countTotalRows: true,
|
||||
})
|
||||
)
|
||||
}
|
||||
const responses = await Promise.all(queries)
|
||||
let rows = responses[0] as Row[]
|
||||
const totalRows =
|
||||
responses.length > 1 ? (responses[1] as number) : undefined
|
||||
|
||||
// process from the format of tableId.column to expected format also
|
||||
// make sure JSON columns corrected
|
||||
|
@ -201,7 +242,8 @@ export async function search(
|
|||
|
||||
// check for pagination final row
|
||||
let nextRow: Row | undefined
|
||||
if (paginate && params.limit && processed.length > params.limit) {
|
||||
if (paginate && params.limit && rows.length > params.limit) {
|
||||
// remove the extra row that confirmed if there is another row to move to
|
||||
nextRow = processed.pop()
|
||||
}
|
||||
|
||||
|
@ -217,21 +259,21 @@ export async function search(
|
|||
finalRows = finalRows.map((r: any) => pick(r, fields))
|
||||
}
|
||||
|
||||
// check for pagination
|
||||
if (paginate) {
|
||||
const response: SearchResponse<Row> = {
|
||||
rows: finalRows,
|
||||
}
|
||||
if (nextRow) {
|
||||
response.hasNextPage = true
|
||||
response.bookmark = bookmark + 1
|
||||
}
|
||||
return response
|
||||
} else {
|
||||
return {
|
||||
rows: finalRows,
|
||||
}
|
||||
const response: SearchResponse<Row> = {
|
||||
rows: finalRows,
|
||||
}
|
||||
if (totalRows != null) {
|
||||
response.totalRows = totalRows
|
||||
}
|
||||
// check for pagination
|
||||
if (paginate && nextRow) {
|
||||
response.hasNextPage = true
|
||||
response.bookmark = bookmark + 1
|
||||
}
|
||||
if (paginate && !nextRow) {
|
||||
response.hasNextPage = false
|
||||
}
|
||||
return response
|
||||
} catch (err: any) {
|
||||
const msg = typeof err === "string" ? err : err.message
|
||||
if (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) {
|
||||
|
|
|
@ -11,7 +11,12 @@ import { SQS_DATASOURCE_INTERNAL } from "@budibase/backend-core"
|
|||
import { getSQLClient } from "./utils"
|
||||
import { cloneDeep } from "lodash"
|
||||
import datasources from "../datasources"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
import { BudibaseInternalDB } from "../../../db/utils"
|
||||
|
||||
type PerformQueryFunction = (
|
||||
datasource: Datasource,
|
||||
json: QueryJson
|
||||
) => Promise<DatasourcePlusQueryResponse>
|
||||
|
||||
const WRITE_OPERATIONS: Operation[] = [
|
||||
Operation.CREATE,
|
||||
|
@ -65,7 +70,7 @@ export default class AliasTables {
|
|||
this.charSeq = new CharSequence()
|
||||
}
|
||||
|
||||
isAliasingEnabled(json: QueryJson, datasource: Datasource) {
|
||||
isAliasingEnabled(json: QueryJson, datasource?: Datasource) {
|
||||
const operation = json.endpoint.operation
|
||||
const fieldLength = json.resource?.fields?.length
|
||||
if (
|
||||
|
@ -75,6 +80,10 @@ export default class AliasTables {
|
|||
) {
|
||||
return false
|
||||
}
|
||||
// SQS - doesn't have a datasource
|
||||
if (!datasource) {
|
||||
return true
|
||||
}
|
||||
try {
|
||||
const sqlClient = getSQLClient(datasource)
|
||||
const isWrite = WRITE_OPERATIONS.includes(operation)
|
||||
|
@ -167,13 +176,14 @@ export default class AliasTables {
|
|||
|
||||
async queryWithAliasing(
|
||||
json: QueryJson,
|
||||
queryFn?: (json: QueryJson) => Promise<DatasourcePlusQueryResponse>
|
||||
queryFn: PerformQueryFunction
|
||||
): Promise<DatasourcePlusQueryResponse> {
|
||||
const datasourceId = json.endpoint.datasourceId
|
||||
const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL
|
||||
let aliasingEnabled: boolean, datasource: Datasource | undefined
|
||||
let aliasingEnabled: boolean, datasource: Datasource
|
||||
if (isSqs) {
|
||||
aliasingEnabled = true
|
||||
aliasingEnabled = this.isAliasingEnabled(json)
|
||||
datasource = BudibaseInternalDB
|
||||
} else {
|
||||
datasource = await datasources.get(datasourceId)
|
||||
aliasingEnabled = this.isAliasingEnabled(json, datasource)
|
||||
|
@ -225,14 +235,7 @@ export default class AliasTables {
|
|||
json.tableAliases = invertedTableAliases
|
||||
}
|
||||
|
||||
let response: DatasourcePlusQueryResponse
|
||||
if (datasource && !isSqs) {
|
||||
response = await makeExternalQuery(datasource, json)
|
||||
} else if (queryFn) {
|
||||
response = await queryFn(json)
|
||||
} else {
|
||||
throw new Error("No supplied method to perform aliased query")
|
||||
}
|
||||
let response: DatasourcePlusQueryResponse = await queryFn(datasource, json)
|
||||
if (Array.isArray(response) && aliasingEnabled) {
|
||||
return this.reverse(response)
|
||||
} else {
|
||||
|
|
|
@ -50,6 +50,17 @@ export function getSQLClient(datasource: Datasource): SqlClient {
|
|||
throw new Error("Unable to determine client for SQL datasource")
|
||||
}
|
||||
|
||||
export function processRowCountResponse(
|
||||
response: DatasourcePlusQueryResponse
|
||||
): number {
|
||||
if (response && response.length === 1 && "total" in response[0]) {
|
||||
const total = response[0].total
|
||||
return typeof total === "number" ? total : parseInt(total)
|
||||
} else {
|
||||
throw new Error("Unable to count rows in query - no count response")
|
||||
}
|
||||
}
|
||||
|
||||
export async function getDatasourceAndQuery(
|
||||
json: QueryJson
|
||||
): Promise<DatasourcePlusQueryResponse> {
|
||||
|
|
|
@ -12,6 +12,7 @@ import {
|
|||
SortOrder,
|
||||
RowSearchParams,
|
||||
EmptyFilterOption,
|
||||
SearchResponse,
|
||||
} from "@budibase/types"
|
||||
import dayjs from "dayjs"
|
||||
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
|
||||
|
@ -262,15 +263,23 @@ export const buildQuery = (filter: SearchFilter[]) => {
|
|||
return query
|
||||
}
|
||||
|
||||
export const search = (docs: Record<string, any>[], query: RowSearchParams) => {
|
||||
export const search = (
|
||||
docs: Record<string, any>[],
|
||||
query: RowSearchParams
|
||||
): SearchResponse<Record<string, any>> => {
|
||||
let result = runQuery(docs, query.query)
|
||||
if (query.sort) {
|
||||
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
|
||||
}
|
||||
let totalRows = result.length
|
||||
if (query.limit) {
|
||||
result = limit(result, query.limit.toString())
|
||||
}
|
||||
return result
|
||||
const response: SearchResponse<Record<string, any>> = { rows: result }
|
||||
if (query.countRows) {
|
||||
response.totalRows = totalRows
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -25,6 +25,7 @@ export interface SearchViewRowRequest
|
|||
| "bookmark"
|
||||
| "paginate"
|
||||
| "query"
|
||||
| "countRows"
|
||||
> {}
|
||||
|
||||
export interface SearchRowResponse {
|
||||
|
|
|
@ -8,6 +8,7 @@ export enum Operation {
|
|||
READ = "READ",
|
||||
UPDATE = "UPDATE",
|
||||
DELETE = "DELETE",
|
||||
COUNT = "COUNT",
|
||||
BULK_CREATE = "BULK_CREATE",
|
||||
BULK_UPSERT = "BULK_UPSERT",
|
||||
CREATE_TABLE = "CREATE_TABLE",
|
||||
|
@ -188,7 +189,7 @@ export interface Schema {
|
|||
}
|
||||
|
||||
// return these when an operation occurred but we got no response
|
||||
enum DSPlusOperation {
|
||||
export enum DSPlusOperation {
|
||||
CREATE = "create",
|
||||
READ = "read",
|
||||
UPDATE = "update",
|
||||
|
@ -198,6 +199,7 @@ enum DSPlusOperation {
|
|||
export type DatasourcePlusQueryResponse =
|
||||
| Row[]
|
||||
| Record<DSPlusOperation, boolean>[]
|
||||
| { total: number }[]
|
||||
| void
|
||||
|
||||
export interface DatasourcePlus extends IntegrationBase {
|
||||
|
|
|
@ -17,6 +17,7 @@ export interface SearchParams {
|
|||
fields?: string[]
|
||||
indexer?: () => Promise<any>
|
||||
rows?: Row[]
|
||||
countRows?: boolean
|
||||
}
|
||||
|
||||
// when searching for rows we want a more extensive search type that requires certain properties
|
||||
|
|
34
yarn.lock
34
yarn.lock
|
@ -10296,7 +10296,7 @@ engine.io-parser@~5.0.3:
|
|||
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.0.6.tgz#7811244af173e157295dec9b2718dfe42a64ef45"
|
||||
integrity sha512-tjuoZDMAdEhVnSFleYPCtdL2GXwVTGtNjoeJd9IhIG3C1xs9uwxqRNEu5WpnDZCaozwVlK/nuQhpodhXSIMaxw==
|
||||
|
||||
engine.io@~6.4.1:
|
||||
engine.io@~6.4.2:
|
||||
version "6.4.2"
|
||||
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.2.tgz#ffeaf68f69b1364b0286badddf15ff633476473f"
|
||||
integrity sha512-FKn/3oMiJjrOEOeUub2WCox6JhxBXq/Zn3fZOMCBxKnNYtsdKjxhl7yR3fZhM9PV+rdE75SU5SYMc+2PGzo+Tg==
|
||||
|
@ -20160,17 +20160,25 @@ socket.io-parser@~4.2.1:
|
|||
"@socket.io/component-emitter" "~3.1.0"
|
||||
debug "~4.3.1"
|
||||
|
||||
socket.io@4.6.1:
|
||||
version "4.6.1"
|
||||
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.1.tgz#62ec117e5fce0692fa50498da9347cfb52c3bc70"
|
||||
integrity sha512-KMcaAi4l/8+xEjkRICl6ak8ySoxsYG+gG6/XfRCPJPQ/haCRIJBTL4wIl8YCsmtaBovcAXGLOShyVWQ/FG8GZA==
|
||||
socket.io-parser@~4.2.4:
|
||||
version "4.2.4"
|
||||
resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.4.tgz#c806966cf7270601e47469ddeec30fbdfda44c83"
|
||||
integrity sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==
|
||||
dependencies:
|
||||
"@socket.io/component-emitter" "~3.1.0"
|
||||
debug "~4.3.1"
|
||||
|
||||
socket.io@4.6.2:
|
||||
version "4.6.2"
|
||||
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.2.tgz#d597db077d4df9cbbdfaa7a9ed8ccc3d49439786"
|
||||
integrity sha512-Vp+lSks5k0dewYTfwgPT9UeGGd+ht7sCpB7p0e83VgO4X/AHYWhXITMrNk/pg8syY2bpx23ptClCQuHhqi2BgQ==
|
||||
dependencies:
|
||||
accepts "~1.3.4"
|
||||
base64id "~2.0.0"
|
||||
debug "~4.3.2"
|
||||
engine.io "~6.4.1"
|
||||
engine.io "~6.4.2"
|
||||
socket.io-adapter "~2.5.2"
|
||||
socket.io-parser "~4.2.1"
|
||||
socket.io-parser "~4.2.4"
|
||||
|
||||
socks-proxy-agent@^7.0.0:
|
||||
version "7.0.0"
|
||||
|
@ -21102,18 +21110,6 @@ tar@6.1.11:
|
|||
mkdirp "^1.0.3"
|
||||
yallist "^4.0.0"
|
||||
|
||||
tar@6.1.15:
|
||||
version "6.1.15"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69"
|
||||
integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==
|
||||
dependencies:
|
||||
chownr "^2.0.0"
|
||||
fs-minipass "^2.0.0"
|
||||
minipass "^5.0.0"
|
||||
minizlib "^2.1.1"
|
||||
mkdirp "^1.0.3"
|
||||
yallist "^4.0.0"
|
||||
|
||||
tar@6.2.1, tar@^6.1.11, tar@^6.1.2:
|
||||
version "6.2.1"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a"
|
||||
|
|
Loading…
Reference in New Issue