budibase/packages/backend-core/src/sql/sql.ts

983 lines
29 KiB
TypeScript
Raw Normal View History

import { Knex, knex } from "knex"
import * as dbCore from "../db"
import {
getNativeSql,
isExternalTable,
isValidISODateString,
isValidFilter,
sqlLog,
isInvalidISODateString,
} from "./utils"
import { SqlStatements } from "./sqlStatements"
import SqlTableQueryBuilder from "./sqlTable"
2021-06-24 19:17:26 +02:00
import {
2024-07-09 14:01:44 +02:00
AnySearchFilter,
BBReferenceFieldMetadata,
2024-03-12 16:27:34 +01:00
FieldSchema,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
2024-07-09 12:24:59 +02:00
InternalSearchFilterOperator,
JsonFieldMetadata,
JsonTypes,
Operation,
prefixed,
2021-06-24 19:17:26 +02:00
QueryJson,
QueryOptions,
RelationshipsJson,
2021-10-28 20:39:42 +02:00
SearchFilters,
SortOrder,
SqlClient,
SqlQuery,
SqlQueryBinding,
2024-03-12 16:27:34 +01:00
Table,
TableSourceType,
} from "@budibase/types"
import environment from "../environment"
2024-05-13 10:50:59 +02:00
import { helpers } from "@budibase/shared-core"
2021-10-28 20:39:42 +02:00
2024-04-04 19:16:23 +02:00
type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any
2024-03-04 16:47:27 +01:00
const envLimit = environment.SQL_MAX_ROWS
? parseInt(environment.SQL_MAX_ROWS)
: null
const BASE_LIMIT = envLimit || 5000
2024-07-17 16:45:35 +02:00
// Takes a string like foo and returns a quoted string like [foo] for SQL Server
// and "foo" for Postgres.
function quote(client: SqlClient, str: string) {
switch (client) {
2024-05-24 18:52:40 +02:00
case SqlClient.SQL_LITE:
case SqlClient.ORACLE:
case SqlClient.POSTGRES:
2024-07-17 16:45:35 +02:00
return `"${str}"`
case SqlClient.MS_SQL:
2024-07-17 16:45:35 +02:00
return `[${str}]`
default:
2024-07-17 16:45:35 +02:00
return `\`${str}\``
}
2024-07-17 16:45:35 +02:00
}
// Takes a string like a.b.c and returns a quoted identifier like [a].[b].[c]
// for SQL Server and `a`.`b`.`c` for MySQL.
function quotedIdentifier(client: SqlClient, key: string): string {
return key
2024-07-17 16:45:35 +02:00
.split(".")
.map(part => quote(client, part))
.join(".")
}
function parse(input: any) {
if (Array.isArray(input)) {
return JSON.stringify(input)
}
if (input == undefined) {
return null
}
if (typeof input !== "string") {
return input
}
if (isInvalidISODateString(input)) {
return null
}
if (isValidISODateString(input)) {
2023-09-11 22:35:51 +02:00
return new Date(input.trim())
}
return input
}
function parseBody(body: any) {
for (let [key, value] of Object.entries(body)) {
body[key] = parse(value)
}
return body
}
function parseFilters(filters: SearchFilters | undefined): SearchFilters {
if (!filters) {
return {}
}
for (let [key, value] of Object.entries(filters)) {
let parsed
if (typeof value === "object") {
parsed = parseFilters(value)
} else {
parsed = parse(value)
}
// @ts-ignore
filters[key] = parsed
}
return filters
}
2022-06-23 14:09:22 +02:00
function generateSelectStatement(
json: QueryJson,
2023-02-07 13:25:02 +01:00
knex: Knex
2023-02-07 13:29:58 +01:00
): (string | Knex.Raw)[] | "*" {
const { resource, meta } = json
2024-07-17 16:45:35 +02:00
const client = knex.client.config.client as SqlClient
2023-02-07 13:29:58 +01:00
if (!resource || !resource.fields || resource.fields.length === 0) {
2023-02-07 13:29:58 +01:00
return "*"
}
2024-07-17 16:45:35 +02:00
const schema = meta.table.schema
2023-02-07 13:29:58 +01:00
return resource.fields.map(field => {
2024-07-17 16:45:35 +02:00
const [table, column, ..._rest] = field.split(/\./g)
if (
client === SqlClient.POSTGRES &&
schema[column].externalType?.includes("money")
) {
return knex.raw(`"${table}"."${column}"::money::numeric as "${field}"`)
}
2024-05-20 16:34:22 +02:00
if (
2024-07-17 16:45:35 +02:00
client === SqlClient.MS_SQL &&
schema[column]?.type === FieldType.DATETIME &&
schema[column].timeOnly
2024-05-20 16:34:22 +02:00
) {
2024-07-17 16:45:35 +02:00
// Time gets returned as timestamp from mssql, not matching the expected
// HH:mm format
2024-05-20 16:34:22 +02:00
return knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`)
}
2023-02-07 13:25:02 +01:00
return `${field} as ${field}`
2024-07-17 16:45:35 +02:00
// return knex.raw(
// `${quote(client, table)}.${quote(client, column)} as ${quote(
// client,
// field
// )}`
// )
2023-02-07 13:25:02 +01:00
})
}
2024-04-16 12:38:00 +02:00
function getTableName(table?: Table): string | undefined {
// SQS uses the table ID rather than the table name
if (
2024-04-16 12:38:00 +02:00
table?.sourceType === TableSourceType.INTERNAL ||
table?.sourceId === INTERNAL_TABLE_SOURCE_ID
) {
2024-04-16 12:38:00 +02:00
return table?._id
} else {
2024-04-16 12:38:00 +02:00
return table?.name
}
}
function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] {
if (Array.isArray(query)) {
return query.map((q: SqlQuery) => convertBooleans(q) as SqlQuery)
} else {
if (query.bindings) {
query.bindings = query.bindings.map(binding => {
if (typeof binding === "boolean") {
return binding ? 1 : 0
}
return binding
})
}
}
return query
}
class InternalBuilder {
2024-07-17 16:45:35 +02:00
private readonly client: SqlClient
2024-07-17 16:45:35 +02:00
constructor(client: SqlClient) {
this.client = client
}
// right now we only do filters on the specific table being queried
addFilters(
query: Knex.QueryBuilder,
filters: SearchFilters | undefined,
2024-04-16 18:28:21 +02:00
table: Table,
opts: {
aliases?: Record<string, string>
relationship?: boolean
columnPrefix?: string
}
): Knex.QueryBuilder {
if (!filters) {
return query
}
filters = parseFilters(filters)
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
const sqlStatements = new SqlStatements(this.client, table, {
allOr,
columnPrefix: opts.columnPrefix,
})
2024-04-16 18:45:06 +02:00
const tableName =
this.client === SqlClient.SQL_LITE ? table._id! : table.name
function getTableAlias(name: string) {
2024-01-30 18:57:10 +01:00
const alias = opts.aliases?.[name]
return alias || name
}
function iterate(
2024-07-09 14:01:44 +02:00
structure: AnySearchFilter,
fn: (key: string, value: any) => void,
complexKeyFn?: (key: string[], value: any) => void
) {
2024-07-09 14:01:44 +02:00
for (const key in structure) {
const value = structure[key]
const updatedKey = dbCore.removeKeyNumbering(key)
const isRelationshipField = updatedKey.includes(".")
2024-07-09 10:43:45 +02:00
2024-07-09 14:01:44 +02:00
let castedTypeValue
if (
key === InternalSearchFilterOperator.COMPLEX_ID_OPERATOR &&
(castedTypeValue = structure[key]) &&
complexKeyFn
) {
2024-07-09 10:43:45 +02:00
const alias = getTableAlias(tableName)
2024-07-09 14:01:44 +02:00
complexKeyFn(
castedTypeValue.id.map((x: string) =>
alias ? `${alias}.${x}` : x
),
castedTypeValue.values
2024-07-09 10:43:45 +02:00
)
} else if (!opts.relationship && !isRelationshipField) {
const alias = getTableAlias(tableName)
fn(alias ? `${alias}.${updatedKey}` : updatedKey, value)
2024-07-08 13:27:30 +02:00
} else if (opts.relationship && isRelationshipField) {
2024-01-30 18:57:10 +01:00
const [filterTableName, property] = updatedKey.split(".")
const alias = getTableAlias(filterTableName)
fn(alias ? `${alias}.${property}` : property, value)
}
}
}
const like = (key: string, value: any) => {
const fuzzyOr = filters?.fuzzyOr
const fnc = fuzzyOr || allOr ? "orWhere" : "where"
// postgres supports ilike, nothing else does
if (this.client === SqlClient.POSTGRES) {
query = query[fnc](key, "ilike", `%${value}%`)
} else {
const rawFnc = `${fnc}Raw`
// @ts-ignore
2024-07-17 16:45:35 +02:00
query = query[rawFnc](
`LOWER(${quotedIdentifier(this.client, key)}) LIKE ?`,
[`%${value.toLowerCase()}%`]
)
}
}
2024-07-09 14:01:44 +02:00
const contains = (mode: AnySearchFilter, any: boolean = false) => {
2024-05-20 18:01:52 +02:00
const rawFnc = allOr ? "orWhereRaw" : "whereRaw"
2022-07-27 12:40:46 +02:00
const not = mode === filters?.notContains ? "NOT " : ""
2022-07-27 17:37:29 +02:00
function stringifyArray(value: Array<any>, quoteStyle = '"'): string {
2022-07-27 12:40:46 +02:00
for (let i in value) {
if (typeof value[i] === "string") {
2022-07-27 17:37:29 +02:00
value[i] = `${quoteStyle}${value[i]}${quoteStyle}`
2022-07-27 12:40:46 +02:00
}
}
2022-07-27 17:37:29 +02:00
return `[${value.join(",")}]`
2022-07-27 12:40:46 +02:00
}
if (this.client === SqlClient.POSTGRES) {
2024-07-09 14:01:44 +02:00
iterate(mode, (key, value) => {
2022-07-27 17:37:29 +02:00
const wrap = any ? "" : "'"
2024-05-20 18:01:52 +02:00
const op = any ? "\\?| array" : "@>"
2022-07-27 12:40:46 +02:00
const fieldNames = key.split(/\./g)
2024-05-20 18:01:52 +02:00
const table = fieldNames[0]
const col = fieldNames[1]
2022-07-27 17:40:07 +02:00
query = query[rawFnc](
2024-05-20 18:01:52 +02:00
`${not}COALESCE("${table}"."${col}"::jsonb ${op} ${wrap}${stringifyArray(
2022-07-27 17:40:07 +02:00
value,
any ? "'" : '"'
2024-05-20 18:01:52 +02:00
)}${wrap}, FALSE)`
2022-07-27 12:40:46 +02:00
)
})
} else if (this.client === SqlClient.MY_SQL) {
2022-07-27 14:19:47 +02:00
const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS"
2024-07-09 14:01:44 +02:00
iterate(mode, (key, value) => {
2022-07-27 12:40:46 +02:00
query = query[rawFnc](
2024-05-20 18:01:52 +02:00
`${not}COALESCE(${jsonFnc}(${key}, '${stringifyArray(
value
)}'), FALSE)`
2022-07-27 12:40:46 +02:00
)
})
} else {
2022-07-28 10:20:00 +02:00
const andOr = mode === filters?.containsAny ? " OR " : " AND "
2024-07-09 14:01:44 +02:00
iterate(mode, (key, value) => {
2022-07-28 10:20:00 +02:00
let statement = ""
2022-07-27 12:40:46 +02:00
for (let i in value) {
if (typeof value[i] === "string") {
value[i] = `%"${value[i].toLowerCase()}"%`
2022-07-27 12:40:46 +02:00
} else {
value[i] = `%${value[i]}%`
}
2022-07-28 10:20:00 +02:00
statement +=
(statement ? andOr : "") +
2024-07-17 16:45:35 +02:00
`COALESCE(LOWER(${quotedIdentifier(
this.client,
key
)}), '') LIKE ?`
2022-07-27 12:40:46 +02:00
}
if (statement === "") {
return
}
2022-07-27 12:40:46 +02:00
// @ts-ignore
2022-07-28 10:20:00 +02:00
query = query[rawFnc](`${not}(${statement})`, value)
2022-07-27 12:40:46 +02:00
})
}
}
if (filters.oneOf) {
2024-07-09 14:01:44 +02:00
const fnc = allOr ? "orWhereIn" : "whereIn"
iterate(
filters.oneOf,
(key: string, array) => {
query = query[fnc](key, Array.isArray(array) ? array : [array])
},
(key: string[], array) => {
query = query[fnc](key, Array.isArray(array) ? array : [array])
}
)
}
if (filters.string) {
iterate(filters.string, (key, value) => {
const fnc = allOr ? "orWhere" : "where"
// postgres supports ilike, nothing else does
if (this.client === SqlClient.POSTGRES) {
query = query[fnc](key, "ilike", `${value}%`)
} else {
const rawFnc = `${fnc}Raw`
// @ts-ignore
2024-07-17 16:45:35 +02:00
query = query[rawFnc](
`LOWER(${quotedIdentifier(this.client, key)}) LIKE ?`,
[`${value.toLowerCase()}%`]
)
}
})
}
if (filters.fuzzy) {
iterate(filters.fuzzy, like)
}
if (filters.range) {
iterate(filters.range, (key, value) => {
2023-02-15 16:10:02 +01:00
const isEmptyObject = (val: any) => {
return (
val &&
Object.keys(val).length === 0 &&
Object.getPrototypeOf(val) === Object.prototype
)
}
if (isEmptyObject(value.low)) {
value.low = ""
}
if (isEmptyObject(value.high)) {
value.high = ""
}
const lowValid = isValidFilter(value.low),
highValid = isValidFilter(value.high)
if (lowValid && highValid) {
query = sqlStatements.between(query, key, value.low, value.high)
} else if (lowValid) {
query = sqlStatements.lte(query, key, value.low)
} else if (highValid) {
query = sqlStatements.gte(query, key, value.high)
}
})
}
if (filters.equal) {
iterate(filters.equal, (key, value) => {
2024-05-20 18:01:52 +02:00
const fnc = allOr ? "orWhereRaw" : "whereRaw"
if (this.client === SqlClient.MS_SQL) {
query = query[fnc](
2024-07-17 16:45:35 +02:00
`CASE WHEN ${quotedIdentifier(
this.client,
key
)} = ? THEN 1 ELSE 0 END = 1`,
2024-05-20 18:01:52 +02:00
[value]
)
} else {
query = query[fnc](
2024-07-17 16:45:35 +02:00
`COALESCE(${quotedIdentifier(this.client, key)} = ?, FALSE)`,
2024-05-20 18:01:52 +02:00
[value]
)
}
})
}
if (filters.notEqual) {
iterate(filters.notEqual, (key, value) => {
2024-05-20 18:01:52 +02:00
const fnc = allOr ? "orWhereRaw" : "whereRaw"
if (this.client === SqlClient.MS_SQL) {
query = query[fnc](
2024-07-17 16:45:35 +02:00
`CASE WHEN ${quotedIdentifier(
this.client,
key
)} = ? THEN 1 ELSE 0 END = 0`,
2024-05-20 18:01:52 +02:00
[value]
)
} else {
query = query[fnc](
2024-07-17 16:45:35 +02:00
`COALESCE(${quotedIdentifier(this.client, key)} != ?, TRUE)`,
2024-05-20 18:01:52 +02:00
[value]
)
}
})
}
if (filters.empty) {
iterate(filters.empty, key => {
const fnc = allOr ? "orWhereNull" : "whereNull"
query = query[fnc](key)
})
}
if (filters.notEmpty) {
iterate(filters.notEmpty, key => {
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
query = query[fnc](key)
})
}
if (filters.contains) {
2022-07-27 12:40:46 +02:00
contains(filters.contains)
}
if (filters.notContains) {
contains(filters.notContains)
}
2022-07-27 14:19:47 +02:00
if (filters.containsAny) {
contains(filters.containsAny, true)
}
2024-06-24 17:41:28 +02:00
const tableRef = opts?.aliases?.[table._id!] || table._id
// when searching internal tables make sure long looking for rows
2024-06-24 17:41:28 +02:00
if (filters.documentType && !isExternalTable(table) && tableRef) {
// has to be its own option, must always be AND onto the search
query.andWhereLike(
`${tableRef}._id`,
`${prefixed(filters.documentType)}%`
)
}
2021-06-23 20:05:32 +02:00
return query
}
addDistinctCount(
query: Knex.QueryBuilder,
json: QueryJson
): Knex.QueryBuilder {
const table = json.meta.table
const primary = table.primary
const aliases = json.tableAliases
const aliased =
table.name && aliases?.[table.name] ? aliases[table.name] : table.name
if (!primary) {
throw new Error("SQL counting requires primary key to be supplied")
}
return query.countDistinct(`${aliased}.${primary[0]} as total`)
}
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort } = json
2024-04-16 18:39:05 +02:00
const table = json.meta.table
const primaryKey = table.primary
const tableName = getTableName(table)
const aliases = json.tableAliases
const aliased =
2024-04-16 12:38:00 +02:00
tableName && aliases?.[tableName] ? aliases[tableName] : table?.name
if (!Array.isArray(primaryKey)) {
throw new Error("Sorting requires primary key to be specified for table")
}
if (sort && Object.keys(sort || {}).length > 0) {
for (let [key, value] of Object.entries(sort)) {
const direction =
value.direction === SortOrder.ASCENDING ? "asc" : "desc"
2024-05-21 16:08:22 +02:00
let nulls
if (this.client === SqlClient.POSTGRES) {
// All other clients already sort this as expected by default, and adding this to the rest of the clients is causing issues
nulls = value.direction === SortOrder.ASCENDING ? "first" : "last"
2024-05-21 16:08:22 +02:00
}
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
}
}
2024-06-24 18:30:10 +02:00
// add sorting by the primary key if the result isn't already sorted by it,
// to make sure result is deterministic
if (!sort || sort[primaryKey[0]] === undefined) {
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
}
return query
}
2023-11-28 19:43:38 +01:00
tableNameWithSchema(
tableName: string,
opts?: { alias?: string; schema?: string }
) {
let withSchema = opts?.schema ? `${opts.schema}.${tableName}` : tableName
if (opts?.alias) {
withSchema += ` as ${opts.alias}`
}
return withSchema
}
addRelationships(
query: Knex.QueryBuilder,
fromTable: string,
relationships: RelationshipsJson[] | undefined,
2024-01-30 18:57:10 +01:00
schema: string | undefined,
aliases?: Record<string, string>
): Knex.QueryBuilder {
if (!relationships) {
return query
}
2023-11-28 19:43:38 +01:00
const tableSets: Record<string, [RelationshipsJson]> = {}
// aggregate into table sets (all the same to tables)
for (let relationship of relationships) {
const keyObj: { toTable: string; throughTable: string | undefined } = {
toTable: relationship.tableName,
throughTable: undefined,
}
if (relationship.through) {
keyObj.throughTable = relationship.through
}
const key = JSON.stringify(keyObj)
if (tableSets[key]) {
tableSets[key].push(relationship)
} else {
tableSets[key] = [relationship]
}
}
for (let [key, relationships] of Object.entries(tableSets)) {
const { toTable, throughTable } = JSON.parse(key)
2024-01-30 18:57:10 +01:00
const toAlias = aliases?.[toTable] || toTable,
throughAlias = aliases?.[throughTable] || throughTable,
fromAlias = aliases?.[fromTable] || fromTable
2023-11-28 19:43:38 +01:00
let toTableWithSchema = this.tableNameWithSchema(toTable, {
alias: toAlias,
schema,
})
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
alias: throughAlias,
schema,
})
if (!throughTable) {
// @ts-ignore
query = query.leftJoin(toTableWithSchema, function () {
for (let relationship of relationships) {
const from = relationship.from,
to = relationship.to
// @ts-ignore
2023-11-28 19:45:05 +01:00
this.orOn(`${fromAlias}.${from}`, "=", `${toAlias}.${to}`)
}
})
} else {
query = query
2022-02-02 19:18:53 +01:00
// @ts-ignore
.leftJoin(throughTableWithSchema, function () {
for (let relationship of relationships) {
const fromPrimary = relationship.fromPrimary
const from = relationship.from
// @ts-ignore
this.orOn(
2023-11-28 19:43:38 +01:00
`${fromAlias}.${fromPrimary}`,
"=",
2023-11-28 19:43:38 +01:00
`${throughAlias}.${from}`
)
}
})
.leftJoin(toTableWithSchema, function () {
for (let relationship of relationships) {
const toPrimary = relationship.toPrimary
const to = relationship.to
// @ts-ignore
2023-11-28 19:43:38 +01:00
this.orOn(`${toAlias}.${toPrimary}`, `${throughAlias}.${to}`)
}
})
}
2021-06-23 20:05:32 +02:00
}
return query
2021-06-23 20:05:32 +02:00
}
knexWithAlias(
knex: Knex,
2024-01-30 18:57:10 +01:00
endpoint: QueryJson["endpoint"],
aliases?: QueryJson["tableAliases"]
): Knex.QueryBuilder {
const tableName = endpoint.entityId
const tableAlias = aliases?.[tableName]
2024-04-18 16:40:43 +02:00
2024-06-19 12:52:50 +02:00
return knex(
2024-04-18 16:40:43 +02:00
this.tableNameWithSchema(tableName, {
alias: tableAlias,
schema: endpoint.schema,
})
)
}
create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
const { endpoint, body } = json
2024-01-30 18:57:10 +01:00
let query = this.knexWithAlias(knex, endpoint)
const parsedBody = parseBody(body)
// make sure no null values in body for creation
for (let [key, value] of Object.entries(parsedBody)) {
if (value == null) {
delete parsedBody[key]
}
}
2023-02-06 21:47:49 +01:00
// mysql can't use returning
if (opts.disableReturning) {
return query.insert(parsedBody)
} else {
2023-02-23 10:28:24 +01:00
return query.insert(parsedBody).returning("*")
}
}
bulkCreate(knex: Knex, json: QueryJson): Knex.QueryBuilder {
const { endpoint, body } = json
2024-01-30 18:57:10 +01:00
let query = this.knexWithAlias(knex, endpoint)
if (!Array.isArray(body)) {
return query
}
const parsedBody = body.map(row => parseBody(row))
return query.insert(parsedBody)
}
bulkUpsert(knex: Knex, json: QueryJson): Knex.QueryBuilder {
const { endpoint, body } = json
let query = this.knexWithAlias(knex, endpoint)
if (!Array.isArray(body)) {
return query
}
const parsedBody = body.map(row => parseBody(row))
if (
this.client === SqlClient.POSTGRES ||
this.client === SqlClient.SQL_LITE ||
this.client === SqlClient.MY_SQL
) {
const primary = json.meta.table.primary
if (!primary) {
throw new Error("Primary key is required for upsert")
}
const ret = query.insert(parsedBody).onConflict(primary).merge()
return ret
} else if (this.client === SqlClient.MS_SQL) {
// No upsert or onConflict support in MSSQL yet, see:
// https://github.com/knex/knex/pull/6050
return query.insert(parsedBody)
}
return query.upsert(parsedBody)
}
read(
knex: Knex,
json: QueryJson,
opts: {
limits?: { base: number; query: number }
} = {}
): Knex.QueryBuilder {
let { endpoint, filters, paginate, relationships, tableAliases } = json
const { limits } = opts
const counting = endpoint.operation === Operation.COUNT
const tableName = endpoint.entityId
// start building the query
2024-01-30 18:57:10 +01:00
let query = this.knexWithAlias(knex, endpoint, tableAliases)
// handle pagination
let foundOffset: number | null = null
let foundLimit = limits?.query || limits?.base
if (paginate && paginate.page && paginate.limit) {
// @ts-ignore
const page = paginate.page <= 1 ? 0 : paginate.page - 1
const offset = page * paginate.limit
foundLimit = paginate.limit
foundOffset = offset
} else if (paginate && paginate.offset && paginate.limit) {
foundLimit = paginate.limit
foundOffset = paginate.offset
} else if (paginate && paginate.limit) {
foundLimit = paginate.limit
}
2024-06-19 19:46:48 +02:00
// counting should not sort, limit or offset
if (!counting) {
// add the found limit if supplied
if (foundLimit != null) {
query = query.limit(foundLimit)
}
// add overall pagination
if (foundOffset != null) {
query = query.offset(foundOffset)
}
// add sorting to pre-query
// no point in sorting when counting
query = this.addSorting(query, json)
}
// add filters to the query (where)
2024-04-16 18:39:05 +02:00
query = this.addFilters(query, filters, json.meta.table, {
columnPrefix: json.meta.columnPrefix,
2024-01-30 18:57:10 +01:00
aliases: tableAliases,
})
2024-01-30 18:57:10 +01:00
const alias = tableAliases?.[tableName] || tableName
2024-06-19 14:59:03 +02:00
let preQuery: Knex.QueryBuilder = knex({
// the typescript definition for the knex constructor doesn't support this
// syntax, but it is the only way to alias a pre-query result as part of
// a query - there is an alias dictionary type, but it assumes it can only
// be a table name, not a pre-query
[alias]: query as any,
})
2024-06-19 19:46:48 +02:00
// if counting, use distinct count, else select
preQuery = !counting
? preQuery.select(generateSelectStatement(json, knex))
: this.addDistinctCount(preQuery, json)
// have to add after as well (this breaks MS-SQL)
if (this.client !== SqlClient.MS_SQL && !counting) {
preQuery = this.addSorting(preQuery, json)
}
// handle joins
query = this.addRelationships(
preQuery,
tableName,
relationships,
2024-01-30 18:57:10 +01:00
endpoint.schema,
tableAliases
)
2024-06-19 12:52:50 +02:00
// add a base limit over the whole query
// if counting we can't set this limit
if (limits?.base) {
query = query.limit(limits.base)
}
2024-04-16 18:39:05 +02:00
return this.addFilters(query, filters, json.meta.table, {
columnPrefix: json.meta.columnPrefix,
2024-01-30 18:57:10 +01:00
relationship: true,
aliases: tableAliases,
})
}
update(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
2024-01-30 18:57:10 +01:00
const { endpoint, body, filters, tableAliases } = json
let query = this.knexWithAlias(knex, endpoint, tableAliases)
const parsedBody = parseBody(body)
2024-04-16 18:39:05 +02:00
query = this.addFilters(query, filters, json.meta.table, {
columnPrefix: json.meta.columnPrefix,
2024-01-30 18:57:10 +01:00
aliases: tableAliases,
})
// mysql can't use returning
if (opts.disableReturning) {
return query.update(parsedBody)
} else {
2023-02-22 11:54:55 +01:00
return query.update(parsedBody).returning("*")
}
}
delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
2024-01-30 18:57:10 +01:00
const { endpoint, filters, tableAliases } = json
let query = this.knexWithAlias(knex, endpoint, tableAliases)
2024-04-16 18:39:05 +02:00
query = this.addFilters(query, filters, json.meta.table, {
columnPrefix: json.meta.columnPrefix,
2024-01-30 18:57:10 +01:00
aliases: tableAliases,
})
// mysql can't use returning
if (opts.disableReturning) {
return query.delete()
} else {
2023-02-07 13:25:02 +01:00
return query.delete().returning(generateSelectStatement(json, knex))
}
}
}
2021-10-28 20:39:42 +02:00
class SqlQueryBuilder extends SqlTableQueryBuilder {
private readonly limit: number
2024-07-09 10:43:45 +02:00
// pass through client to get flavour of SQL
2024-07-17 16:45:35 +02:00
constructor(client: SqlClient, limit: number = BASE_LIMIT) {
2021-10-28 20:39:42 +02:00
super(client)
this.limit = limit
}
private convertToNative(query: Knex.QueryBuilder, opts: QueryOptions = {}) {
const sqlClient = this.getSqlClient()
if (opts?.disableBindings) {
return { sql: query.toString() }
} else {
let native = getNativeSql(query)
if (sqlClient === SqlClient.SQL_LITE) {
native = convertBooleans(native)
}
return native
}
}
/**
* @param json The JSON query DSL which is to be converted to SQL.
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
* @return the query ready to be passed to the driver.
*/
2024-04-04 19:16:23 +02:00
_query(json: QueryJson, opts: QueryOptions = {}): SqlQuery | SqlQuery[] {
2021-10-28 20:39:42 +02:00
const sqlClient = this.getSqlClient()
2024-05-20 16:34:22 +02:00
const config: Knex.Config = {
client: sqlClient,
}
if (sqlClient === SqlClient.SQL_LITE) {
config.useNullAsDefault = true
}
2024-05-20 16:34:22 +02:00
const client = knex(config)
let query: Knex.QueryBuilder
const builder = new InternalBuilder(sqlClient)
2021-06-03 19:48:04 +02:00
switch (this._operation(json)) {
case Operation.CREATE:
query = builder.create(client, json, opts)
break
case Operation.READ:
query = builder.read(client, json, {
limits: {
query: this.limit,
base: BASE_LIMIT,
},
})
break
case Operation.COUNT:
// read without any limits to count
query = builder.read(client, json)
break
case Operation.UPDATE:
query = builder.update(client, json, opts)
break
case Operation.DELETE:
query = builder.delete(client, json, opts)
break
case Operation.BULK_CREATE:
query = builder.bulkCreate(client, json)
break
case Operation.BULK_UPSERT:
query = builder.bulkUpsert(client, json)
break
case Operation.CREATE_TABLE:
case Operation.UPDATE_TABLE:
case Operation.DELETE_TABLE:
2021-10-28 20:39:42 +02:00
return this._tableQuery(json)
default:
throw `Operation type is not supported by SQL query builder`
}
return this.convertToNative(query, opts)
}
2024-03-04 16:47:27 +01:00
async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
if (!json.extra || !json.extra.idFilter) {
return {}
}
const input = this._query({
endpoint: {
...json.endpoint,
operation: Operation.READ,
},
resource: {
fields: [],
},
2024-03-04 16:47:27 +01:00
filters: json.extra?.idFilter,
paginate: {
limit: 1,
},
meta: json.meta,
})
return queryFn(input, Operation.READ)
}
// when creating if an ID has been inserted need to make sure
// the id filter is enriched with it before trying to retrieve the row
checkLookupKeys(id: any, json: QueryJson) {
2024-04-16 18:39:05 +02:00
if (!id || !json.meta.table || !json.meta.table.primary) {
return json
}
const primaryKey = json.meta.table.primary?.[0]
json.extra = {
idFilter: {
equal: {
[primaryKey]: id,
},
},
}
return json
}
// this function recreates the returning functionality of postgres
async queryWithReturning(
json: QueryJson,
2024-03-04 16:47:27 +01:00
queryFn: QueryFunction,
processFn: Function = (result: any) => result
) {
const sqlClient = this.getSqlClient()
const operation = this._operation(json)
const input = this._query(json, { disableReturning: true })
if (Array.isArray(input)) {
const responses = []
for (let query of input) {
responses.push(await queryFn(query, operation))
}
return responses
}
let row
// need to manage returning, a feature mySQL can't do
if (operation === Operation.DELETE) {
row = processFn(await this.getReturningRow(queryFn, json))
}
const response = await queryFn(input, operation)
const results = processFn(response)
// same as delete, manage returning
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
let id
if (sqlClient === SqlClient.MS_SQL) {
id = results?.[0].id
} else if (sqlClient === SqlClient.MY_SQL) {
id = results?.insertId
}
row = processFn(
await this.getReturningRow(queryFn, this.checkLookupKeys(id, json))
)
}
if (operation === Operation.COUNT) {
return results
}
if (operation !== Operation.READ) {
return row
}
return results.length ? results : [{ [operation.toLowerCase()]: true }]
}
2024-01-18 19:13:11 +01:00
convertJsonStringColumns<T extends Record<string, any>>(
2024-03-12 16:27:34 +01:00
table: Table,
results: T[],
aliases?: Record<string, string>
): T[] {
const tableName = getTableName(table)
2024-03-12 16:27:34 +01:00
for (const [name, field] of Object.entries(table.schema)) {
if (!this._isJsonColumn(field)) {
continue
}
2024-04-16 12:38:00 +02:00
const aliasedTableName = (tableName && aliases?.[tableName]) || tableName
const fullName = `${aliasedTableName}.${name}`
2024-03-12 16:27:34 +01:00
for (let row of results) {
if (typeof row[fullName as keyof T] === "string") {
row[fullName as keyof T] = JSON.parse(row[fullName])
2024-03-12 16:27:34 +01:00
}
if (typeof row[name as keyof T] === "string") {
row[name as keyof T] = JSON.parse(row[name])
2024-03-12 16:27:34 +01:00
}
}
}
return results
}
_isJsonColumn(
field: FieldSchema
): field is JsonFieldMetadata | BBReferenceFieldMetadata {
2024-03-12 16:27:34 +01:00
return (
JsonTypes.includes(field.type) &&
!helpers.schema.isDeprecatedSingleUserColumn(field)
2024-03-12 16:27:34 +01:00
)
}
log(query: string, values?: SqlQueryBinding) {
sqlLog(this.getSqlClient(), query, values)
2024-01-18 19:13:11 +01:00
}
}
export default SqlQueryBuilder