2021-06-24 19:16:48 +02:00
|
|
|
import { Knex, knex } from "knex"
|
2021-06-24 19:17:26 +02:00
|
|
|
import {
|
2021-10-28 20:39:42 +02:00
|
|
|
Operation,
|
2021-06-24 19:17:26 +02:00
|
|
|
QueryJson,
|
|
|
|
QueryOptions,
|
2021-06-25 19:13:11 +02:00
|
|
|
RelationshipsJson,
|
2021-10-28 20:39:42 +02:00
|
|
|
SearchFilters,
|
|
|
|
SortDirection,
|
2021-06-27 00:09:46 +02:00
|
|
|
} from "../../definitions/datasource"
|
2021-11-08 19:12:40 +01:00
|
|
|
import { isIsoDateString, SqlClients } from "../utils"
|
2021-10-28 20:39:42 +02:00
|
|
|
import SqlTableQueryBuilder from "./sqlTable"
|
|
|
|
|
|
|
|
const BASE_LIMIT = 5000
|
2021-06-24 19:16:48 +02:00
|
|
|
|
2021-06-25 19:13:11 +02:00
|
|
|
type KnexQuery = Knex.QueryBuilder | Knex
|
2021-11-11 16:36:21 +01:00
|
|
|
// these are invalid dates sent by the client, need to convert them to a real max date
|
|
|
|
const MIN_ISO_DATE = "0000-00-00T00:00:00.000Z"
|
|
|
|
const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z"
|
|
|
|
|
|
|
|
function parse(input: any) {
|
|
|
|
if (Array.isArray(input)) {
|
|
|
|
return JSON.stringify(input)
|
|
|
|
}
|
|
|
|
if (typeof input !== "string") {
|
|
|
|
return input
|
|
|
|
}
|
|
|
|
if (input === MAX_ISO_DATE) {
|
|
|
|
return new Date(8640000000000000)
|
|
|
|
}
|
|
|
|
if (input === MIN_ISO_DATE) {
|
|
|
|
return new Date(-8640000000000000)
|
|
|
|
}
|
|
|
|
if (isIsoDateString(input)) {
|
|
|
|
return new Date(input)
|
|
|
|
}
|
2021-11-11 17:20:30 +01:00
|
|
|
return input
|
2021-11-11 16:36:21 +01:00
|
|
|
}
|
2021-06-03 17:31:24 +02:00
|
|
|
|
2021-07-12 11:51:30 +02:00
|
|
|
function parseBody(body: any) {
|
|
|
|
for (let [key, value] of Object.entries(body)) {
|
2021-11-11 16:36:21 +01:00
|
|
|
body[key] = parse(value)
|
2021-07-12 11:51:30 +02:00
|
|
|
}
|
|
|
|
return body
|
|
|
|
}
|
|
|
|
|
2021-11-11 16:36:21 +01:00
|
|
|
function parseFilters(filters: SearchFilters): SearchFilters {
|
|
|
|
for (let [key, value] of Object.entries(filters)) {
|
|
|
|
let parsed
|
|
|
|
if (typeof value === "object") {
|
|
|
|
parsed = parseFilters(value)
|
|
|
|
} else {
|
|
|
|
parsed = parse(value)
|
2021-06-03 17:31:24 +02:00
|
|
|
}
|
2021-11-11 16:36:21 +01:00
|
|
|
// @ts-ignore
|
|
|
|
filters[key] = parsed
|
2021-06-03 17:31:24 +02:00
|
|
|
}
|
2021-11-11 16:36:21 +01:00
|
|
|
return filters
|
2021-06-03 17:31:24 +02:00
|
|
|
}
|
|
|
|
|
2021-11-05 14:48:13 +01:00
|
|
|
class InternalBuilder {
|
|
|
|
private readonly client: string
|
|
|
|
|
|
|
|
constructor(client: string) {
|
|
|
|
this.client = client
|
2021-06-03 17:31:24 +02:00
|
|
|
}
|
|
|
|
|
2021-11-05 14:48:13 +01:00
|
|
|
// right now we only do filters on the specific table being queried
|
|
|
|
addFilters(
|
|
|
|
tableName: string,
|
|
|
|
query: KnexQuery,
|
|
|
|
filters: SearchFilters | undefined
|
|
|
|
): KnexQuery {
|
|
|
|
function iterate(
|
|
|
|
structure: { [key: string]: any },
|
|
|
|
fn: (key: string, value: any) => void
|
|
|
|
) {
|
|
|
|
for (let [key, value] of Object.entries(structure)) {
|
|
|
|
fn(`${tableName}.${key}`, value)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!filters) {
|
|
|
|
return query
|
|
|
|
}
|
2021-11-11 16:36:21 +01:00
|
|
|
filters = parseFilters(filters)
|
2021-11-05 14:48:13 +01:00
|
|
|
// if all or specified in filters, then everything is an or
|
|
|
|
const allOr = filters.allOr
|
|
|
|
if (filters.oneOf) {
|
|
|
|
iterate(filters.oneOf, (key, array) => {
|
|
|
|
const fnc = allOr ? "orWhereIn" : "whereIn"
|
|
|
|
query = query[fnc](key, array)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if (filters.string) {
|
|
|
|
iterate(filters.string, (key, value) => {
|
|
|
|
const fnc = allOr ? "orWhere" : "where"
|
|
|
|
// postgres supports ilike, nothing else does
|
2021-11-08 19:12:40 +01:00
|
|
|
if (this.client === SqlClients.POSTGRES) {
|
2021-11-05 14:48:13 +01:00
|
|
|
query = query[fnc](key, "ilike", `${value}%`)
|
|
|
|
} else {
|
|
|
|
const rawFnc = `${fnc}Raw`
|
|
|
|
// @ts-ignore
|
|
|
|
query = query[rawFnc](`LOWER(${key}) LIKE ?`, [`${value}%`])
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if (filters.fuzzy) {
|
|
|
|
iterate(filters.fuzzy, (key, value) => {
|
|
|
|
const fnc = allOr ? "orWhere" : "where"
|
|
|
|
// postgres supports ilike, nothing else does
|
2021-11-08 19:12:40 +01:00
|
|
|
if (this.client === SqlClients.POSTGRES) {
|
2021-11-05 14:48:13 +01:00
|
|
|
query = query[fnc](key, "ilike", `%${value}%`)
|
|
|
|
} else {
|
|
|
|
const rawFnc = `${fnc}Raw`
|
|
|
|
// @ts-ignore
|
|
|
|
query = query[rawFnc](`LOWER(${key}) LIKE ?`, [`%${value}%`])
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if (filters.range) {
|
|
|
|
iterate(filters.range, (key, value) => {
|
|
|
|
if (!value.high || !value.low) {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
const fnc = allOr ? "orWhereBetween" : "whereBetween"
|
|
|
|
query = query[fnc](key, [value.low, value.high])
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if (filters.equal) {
|
|
|
|
iterate(filters.equal, (key, value) => {
|
|
|
|
const fnc = allOr ? "orWhere" : "where"
|
|
|
|
query = query[fnc]({ [key]: value })
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if (filters.notEqual) {
|
|
|
|
iterate(filters.notEqual, (key, value) => {
|
|
|
|
const fnc = allOr ? "orWhereNot" : "whereNot"
|
|
|
|
query = query[fnc]({ [key]: value })
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if (filters.empty) {
|
|
|
|
iterate(filters.empty, key => {
|
|
|
|
const fnc = allOr ? "orWhereNull" : "whereNull"
|
|
|
|
query = query[fnc](key)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if (filters.notEmpty) {
|
|
|
|
iterate(filters.notEmpty, key => {
|
|
|
|
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
|
|
|
|
query = query[fnc](key)
|
|
|
|
})
|
|
|
|
}
|
2021-06-23 20:05:32 +02:00
|
|
|
return query
|
|
|
|
}
|
2021-11-05 14:48:13 +01:00
|
|
|
|
|
|
|
addRelationships(
|
|
|
|
knex: Knex,
|
|
|
|
query: KnexQuery,
|
|
|
|
fields: string | string[],
|
|
|
|
fromTable: string,
|
|
|
|
relationships: RelationshipsJson[] | undefined
|
|
|
|
): KnexQuery {
|
|
|
|
if (!relationships) {
|
|
|
|
return query
|
|
|
|
}
|
|
|
|
for (let relationship of relationships) {
|
|
|
|
const from = relationship.from,
|
|
|
|
to = relationship.to,
|
|
|
|
toTable = relationship.tableName
|
|
|
|
if (!relationship.through) {
|
2021-06-25 19:13:11 +02:00
|
|
|
// @ts-ignore
|
2021-11-05 14:48:13 +01:00
|
|
|
query = query.leftJoin(
|
|
|
|
toTable,
|
|
|
|
`${fromTable}.${from}`,
|
|
|
|
`${toTable}.${to}`
|
2021-06-29 19:38:27 +02:00
|
|
|
)
|
2021-11-05 14:48:13 +01:00
|
|
|
} else {
|
|
|
|
const throughTable = relationship.through
|
|
|
|
const fromPrimary = relationship.fromPrimary
|
|
|
|
const toPrimary = relationship.toPrimary
|
|
|
|
query = query
|
|
|
|
// @ts-ignore
|
|
|
|
.leftJoin(
|
|
|
|
throughTable,
|
|
|
|
`${fromTable}.${fromPrimary}`,
|
|
|
|
`${throughTable}.${from}`
|
|
|
|
)
|
|
|
|
.leftJoin(toTable, `${toTable}.${toPrimary}`, `${throughTable}.${to}`)
|
|
|
|
}
|
2021-06-23 20:05:32 +02:00
|
|
|
}
|
2021-11-05 14:48:13 +01:00
|
|
|
return query.limit(BASE_LIMIT)
|
2021-06-23 20:05:32 +02:00
|
|
|
}
|
|
|
|
|
2021-11-05 14:48:13 +01:00
|
|
|
create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
|
|
|
const { endpoint, body } = json
|
|
|
|
let query: KnexQuery = knex(endpoint.entityId)
|
|
|
|
const parsedBody = parseBody(body)
|
|
|
|
// make sure no null values in body for creation
|
|
|
|
for (let [key, value] of Object.entries(parsedBody)) {
|
|
|
|
if (value == null) {
|
|
|
|
delete parsedBody[key]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// mysql can't use returning
|
|
|
|
if (opts.disableReturning) {
|
|
|
|
return query.insert(parsedBody)
|
|
|
|
} else {
|
|
|
|
return query.insert(parsedBody).returning("*")
|
2021-10-06 18:55:03 +02:00
|
|
|
}
|
|
|
|
}
|
2021-06-03 17:31:24 +02:00
|
|
|
|
2021-11-12 20:24:56 +01:00
|
|
|
bulkCreate(knex: Knex, json: QueryJson): KnexQuery {
|
|
|
|
const { endpoint, body } = json
|
|
|
|
let query: KnexQuery = knex(endpoint.entityId)
|
|
|
|
if (!Array.isArray(body)) {
|
|
|
|
return query
|
|
|
|
}
|
|
|
|
const parsedBody = body.map(row => parseBody(row))
|
2021-07-12 11:51:30 +02:00
|
|
|
return query.insert(parsedBody)
|
2021-06-18 14:14:45 +02:00
|
|
|
}
|
2021-06-03 17:31:24 +02:00
|
|
|
|
2021-11-05 14:48:13 +01:00
|
|
|
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
|
|
|
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
|
|
|
const tableName = endpoint.entityId
|
|
|
|
// select all if not specified
|
|
|
|
if (!resource) {
|
|
|
|
resource = { fields: [] }
|
2021-09-23 17:17:23 +02:00
|
|
|
}
|
2021-11-05 14:48:13 +01:00
|
|
|
let selectStatement: string | string[] = "*"
|
|
|
|
// handle select
|
|
|
|
if (resource.fields && resource.fields.length > 0) {
|
|
|
|
// select the resources as the format "table.columnName" - this is what is provided
|
|
|
|
// by the resource builder further up
|
|
|
|
selectStatement = resource.fields.map(field => `${field} as ${field}`)
|
|
|
|
}
|
|
|
|
let foundLimit = limit || BASE_LIMIT
|
|
|
|
// handle pagination
|
|
|
|
let foundOffset: number | null = null
|
|
|
|
if (paginate && paginate.page && paginate.limit) {
|
|
|
|
// @ts-ignore
|
|
|
|
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
|
|
|
const offset = page * paginate.limit
|
|
|
|
foundLimit = paginate.limit
|
|
|
|
foundOffset = offset
|
|
|
|
} else if (paginate && paginate.limit) {
|
|
|
|
foundLimit = paginate.limit
|
|
|
|
}
|
|
|
|
// start building the query
|
|
|
|
let query: KnexQuery = knex(tableName).limit(foundLimit)
|
|
|
|
if (foundOffset) {
|
|
|
|
query = query.offset(foundOffset)
|
|
|
|
}
|
|
|
|
if (sort) {
|
|
|
|
for (let [key, value] of Object.entries(sort)) {
|
|
|
|
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
|
|
|
|
query = query.orderBy(key, direction)
|
|
|
|
}
|
|
|
|
}
|
2021-11-08 19:12:40 +01:00
|
|
|
if (this.client === SqlClients.MS_SQL && !sort && paginate?.limit) {
|
2021-11-05 19:55:36 +01:00
|
|
|
// @ts-ignore
|
|
|
|
query = query.orderBy(json.meta?.table?.primary[0])
|
|
|
|
}
|
2021-11-05 14:48:13 +01:00
|
|
|
query = this.addFilters(tableName, query, filters)
|
2021-09-23 17:17:23 +02:00
|
|
|
// @ts-ignore
|
2021-11-05 14:48:13 +01:00
|
|
|
let preQuery: KnexQuery = knex({
|
|
|
|
// @ts-ignore
|
|
|
|
[tableName]: query,
|
|
|
|
}).select(selectStatement)
|
|
|
|
// handle joins
|
|
|
|
return this.addRelationships(
|
|
|
|
knex,
|
|
|
|
preQuery,
|
|
|
|
selectStatement,
|
|
|
|
tableName,
|
|
|
|
relationships
|
|
|
|
)
|
|
|
|
}
|
2021-06-03 17:31:24 +02:00
|
|
|
|
2021-11-05 14:48:13 +01:00
|
|
|
update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
|
|
|
const { endpoint, body, filters } = json
|
|
|
|
let query: KnexQuery = knex(endpoint.entityId)
|
|
|
|
const parsedBody = parseBody(body)
|
|
|
|
query = this.addFilters(endpoint.entityId, query, filters)
|
|
|
|
// mysql can't use returning
|
|
|
|
if (opts.disableReturning) {
|
|
|
|
return query.update(parsedBody)
|
|
|
|
} else {
|
|
|
|
return query.update(parsedBody).returning("*")
|
|
|
|
}
|
2021-06-18 14:14:45 +02:00
|
|
|
}
|
2021-06-03 17:31:24 +02:00
|
|
|
|
2021-11-05 14:48:13 +01:00
|
|
|
delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
|
|
|
const { endpoint, filters } = json
|
|
|
|
let query: KnexQuery = knex(endpoint.entityId)
|
|
|
|
query = this.addFilters(endpoint.entityId, query, filters)
|
|
|
|
// mysql can't use returning
|
|
|
|
if (opts.disableReturning) {
|
|
|
|
return query.delete()
|
|
|
|
} else {
|
|
|
|
return query.delete().returning("*")
|
|
|
|
}
|
2021-06-18 14:14:45 +02:00
|
|
|
}
|
2021-06-03 17:31:24 +02:00
|
|
|
}
|
|
|
|
|
2021-10-28 20:39:42 +02:00
|
|
|
class SqlQueryBuilder extends SqlTableQueryBuilder {
|
2021-06-24 19:16:48 +02:00
|
|
|
private readonly limit: number
|
2021-06-03 17:31:24 +02:00
|
|
|
// pass through client to get flavour of SQL
|
2021-06-25 14:46:02 +02:00
|
|
|
constructor(client: string, limit: number = BASE_LIMIT) {
|
2021-10-28 20:39:42 +02:00
|
|
|
super(client)
|
2021-06-24 19:16:48 +02:00
|
|
|
this.limit = limit
|
2021-06-03 17:31:24 +02:00
|
|
|
}
|
|
|
|
|
2021-06-18 14:14:45 +02:00
|
|
|
/**
|
|
|
|
* @param json The JSON query DSL which is to be converted to SQL.
|
|
|
|
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
|
|
|
|
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
|
|
|
|
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver.
|
|
|
|
*/
|
2021-06-24 19:16:48 +02:00
|
|
|
_query(json: QueryJson, opts: QueryOptions = {}) {
|
2021-10-28 20:39:42 +02:00
|
|
|
const sqlClient = this.getSqlClient()
|
|
|
|
const client = knex({ client: sqlClient })
|
2021-06-03 18:45:19 +02:00
|
|
|
let query
|
2021-11-05 14:48:13 +01:00
|
|
|
const builder = new InternalBuilder(sqlClient)
|
2021-06-03 19:48:04 +02:00
|
|
|
switch (this._operation(json)) {
|
2021-06-24 19:16:48 +02:00
|
|
|
case Operation.CREATE:
|
2021-11-05 14:48:13 +01:00
|
|
|
query = builder.create(client, json, opts)
|
2021-06-03 18:45:19 +02:00
|
|
|
break
|
2021-06-24 19:16:48 +02:00
|
|
|
case Operation.READ:
|
2021-11-05 14:48:13 +01:00
|
|
|
query = builder.read(client, json, this.limit)
|
2021-06-03 18:45:19 +02:00
|
|
|
break
|
2021-06-24 19:16:48 +02:00
|
|
|
case Operation.UPDATE:
|
2021-11-05 14:48:13 +01:00
|
|
|
query = builder.update(client, json, opts)
|
2021-06-03 18:45:19 +02:00
|
|
|
break
|
2021-06-24 19:16:48 +02:00
|
|
|
case Operation.DELETE:
|
2021-11-05 14:48:13 +01:00
|
|
|
query = builder.delete(client, json, opts)
|
2021-06-03 18:45:19 +02:00
|
|
|
break
|
2021-11-12 20:24:56 +01:00
|
|
|
case Operation.BULK_CREATE:
|
|
|
|
query = builder.bulkCreate(client, json)
|
2021-06-03 18:45:19 +02:00
|
|
|
break
|
2021-11-05 13:33:48 +01:00
|
|
|
case Operation.CREATE_TABLE:
|
|
|
|
case Operation.UPDATE_TABLE:
|
|
|
|
case Operation.DELETE_TABLE:
|
2021-10-28 20:39:42 +02:00
|
|
|
return this._tableQuery(json)
|
2021-06-03 17:31:24 +02:00
|
|
|
default:
|
2021-06-04 15:53:49 +02:00
|
|
|
throw `Operation type is not supported by SQL query builder`
|
2021-06-03 17:31:24 +02:00
|
|
|
}
|
2021-06-25 19:13:11 +02:00
|
|
|
|
|
|
|
// @ts-ignore
|
2021-06-03 18:45:19 +02:00
|
|
|
return query.toSQL().toNative()
|
2021-06-03 17:31:24 +02:00
|
|
|
}
|
2021-11-05 19:55:36 +01:00
|
|
|
|
|
|
|
async getReturningRow(queryFn: Function, json: QueryJson) {
|
|
|
|
if (!json.extra || !json.extra.idFilter) {
|
|
|
|
return {}
|
|
|
|
}
|
|
|
|
const input = this._query({
|
|
|
|
endpoint: {
|
|
|
|
...json.endpoint,
|
|
|
|
operation: Operation.READ,
|
|
|
|
},
|
|
|
|
resource: {
|
|
|
|
fields: [],
|
|
|
|
},
|
|
|
|
filters: json.extra.idFilter,
|
|
|
|
paginate: {
|
|
|
|
limit: 1,
|
|
|
|
},
|
|
|
|
meta: json.meta,
|
|
|
|
})
|
|
|
|
return queryFn(input, Operation.READ)
|
|
|
|
}
|
|
|
|
|
|
|
|
// when creating if an ID has been inserted need to make sure
|
|
|
|
// the id filter is enriched with it before trying to retrieve the row
|
|
|
|
checkLookupKeys(id: any, json: QueryJson) {
|
|
|
|
if (!id || !json.meta?.table || !json.meta.table.primary) {
|
|
|
|
return json
|
|
|
|
}
|
|
|
|
const primaryKey = json.meta.table.primary?.[0]
|
|
|
|
json.extra = {
|
|
|
|
idFilter: {
|
|
|
|
equal: {
|
|
|
|
[primaryKey]: id,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
return json
|
|
|
|
}
|
|
|
|
|
|
|
|
// this function recreates the returning functionality of postgres
|
|
|
|
async queryWithReturning(
|
|
|
|
json: QueryJson,
|
|
|
|
queryFn: Function,
|
|
|
|
processFn: Function = (result: any) => result
|
|
|
|
) {
|
|
|
|
const sqlClient = this.getSqlClient()
|
|
|
|
const operation = this._operation(json)
|
|
|
|
const input = this._query(json, { disableReturning: true })
|
|
|
|
if (Array.isArray(input)) {
|
|
|
|
const responses = []
|
|
|
|
for (let query of input) {
|
|
|
|
responses.push(await queryFn(query, operation))
|
|
|
|
}
|
|
|
|
return responses
|
|
|
|
}
|
|
|
|
let row
|
|
|
|
// need to manage returning, a feature mySQL can't do
|
|
|
|
if (operation === Operation.DELETE) {
|
|
|
|
row = processFn(await this.getReturningRow(queryFn, json))
|
|
|
|
}
|
|
|
|
const response = await queryFn(input, operation)
|
|
|
|
const results = processFn(response)
|
|
|
|
// same as delete, manage returning
|
|
|
|
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
|
|
|
|
let id
|
2021-11-08 19:12:40 +01:00
|
|
|
if (sqlClient === SqlClients.MS_SQL) {
|
2021-11-05 19:55:36 +01:00
|
|
|
id = results?.[0].id
|
2021-11-08 19:12:40 +01:00
|
|
|
} else if (sqlClient === SqlClients.MY_SQL) {
|
2021-11-05 19:55:36 +01:00
|
|
|
id = results?.insertId
|
|
|
|
}
|
|
|
|
row = processFn(
|
|
|
|
await this.getReturningRow(queryFn, this.checkLookupKeys(id, json))
|
|
|
|
)
|
|
|
|
}
|
|
|
|
if (operation !== Operation.READ) {
|
|
|
|
return row
|
|
|
|
}
|
|
|
|
return results.length ? results : [{ [operation.toLowerCase()]: true }]
|
|
|
|
}
|
2021-06-03 17:31:24 +02:00
|
|
|
}
|
|
|
|
|
2021-11-05 14:56:54 +01:00
|
|
|
export default SqlQueryBuilder
|
2021-06-03 17:31:24 +02:00
|
|
|
module.exports = SqlQueryBuilder
|