Updating underlying sql to not use ilike unless in postgres client.
This commit is contained in:
parent
5ec0d803af
commit
3474f3ae8e
|
@ -342,7 +342,7 @@ module External {
|
||||||
table: Table,
|
table: Table,
|
||||||
relationships: RelationshipsJson[]
|
relationships: RelationshipsJson[]
|
||||||
) {
|
) {
|
||||||
if (rows[0].read === true) {
|
if (!rows || rows.length === 0 || rows[0].read === true) {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
let finalRows: { [key: string]: Row } = {}
|
let finalRows: { [key: string]: Row } = {}
|
||||||
|
|
|
@ -29,222 +29,232 @@ function parseBody(body: any) {
|
||||||
return body
|
return body
|
||||||
}
|
}
|
||||||
|
|
||||||
// right now we only do filters on the specific table being queried
|
class InternalBuilder {
|
||||||
function addFilters(
|
private readonly client: string
|
||||||
tableName: string,
|
|
||||||
query: KnexQuery,
|
constructor(client: string) {
|
||||||
filters: SearchFilters | undefined
|
this.client = client
|
||||||
): KnexQuery {
|
|
||||||
function iterate(
|
|
||||||
structure: { [key: string]: any },
|
|
||||||
fn: (key: string, value: any) => void
|
|
||||||
) {
|
|
||||||
for (let [key, value] of Object.entries(structure)) {
|
|
||||||
fn(`${tableName}.${key}`, value)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!filters) {
|
|
||||||
return query
|
// right now we only do filters on the specific table being queried
|
||||||
}
|
addFilters(
|
||||||
// if all or specified in filters, then everything is an or
|
tableName: string,
|
||||||
const allOr = filters.allOr
|
query: KnexQuery,
|
||||||
if (filters.oneOf) {
|
filters: SearchFilters | undefined
|
||||||
iterate(filters.oneOf, (key, array) => {
|
): KnexQuery {
|
||||||
const fnc = allOr ? "orWhereIn" : "whereIn"
|
function iterate(
|
||||||
query = query[fnc](key, array)
|
structure: { [key: string]: any },
|
||||||
})
|
fn: (key: string, value: any) => void
|
||||||
}
|
) {
|
||||||
if (filters.string) {
|
for (let [key, value] of Object.entries(structure)) {
|
||||||
iterate(filters.string, (key, value) => {
|
fn(`${tableName}.${key}`, value)
|
||||||
const fnc = allOr ? "orWhere" : "where"
|
|
||||||
query = query[fnc](key, "ilike", `${value}%`)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (filters.fuzzy) {
|
|
||||||
iterate(filters.fuzzy, (key, value) => {
|
|
||||||
const fnc = allOr ? "orWhere" : "where"
|
|
||||||
query = query[fnc](key, "ilike", `%${value}%`)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (filters.range) {
|
|
||||||
iterate(filters.range, (key, value) => {
|
|
||||||
if (!value.high || !value.low) {
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
const fnc = allOr ? "orWhereBetween" : "whereBetween"
|
}
|
||||||
query = query[fnc](key, [value.low, value.high])
|
if (!filters) {
|
||||||
})
|
return query
|
||||||
}
|
}
|
||||||
if (filters.equal) {
|
// if all or specified in filters, then everything is an or
|
||||||
iterate(filters.equal, (key, value) => {
|
const allOr = filters.allOr
|
||||||
const fnc = allOr ? "orWhere" : "where"
|
if (filters.oneOf) {
|
||||||
query = query[fnc]({ [key]: value })
|
iterate(filters.oneOf, (key, array) => {
|
||||||
})
|
const fnc = allOr ? "orWhereIn" : "whereIn"
|
||||||
}
|
query = query[fnc](key, array)
|
||||||
if (filters.notEqual) {
|
})
|
||||||
iterate(filters.notEqual, (key, value) => {
|
}
|
||||||
const fnc = allOr ? "orWhereNot" : "whereNot"
|
if (filters.string) {
|
||||||
query = query[fnc]({ [key]: value })
|
iterate(filters.string, (key, value) => {
|
||||||
})
|
const fnc = allOr ? "orWhere" : "where"
|
||||||
}
|
// postgres supports ilike, nothing else does
|
||||||
if (filters.empty) {
|
if (this.client === "pg") {
|
||||||
iterate(filters.empty, key => {
|
query = query[fnc](key, "ilike", `${value}%`)
|
||||||
const fnc = allOr ? "orWhereNull" : "whereNull"
|
} else {
|
||||||
query = query[fnc](key)
|
const rawFnc = `${fnc}Raw`
|
||||||
})
|
// @ts-ignore
|
||||||
}
|
query = query[rawFnc](`LOWER(${key}) LIKE ?`, [`${value}%`])
|
||||||
if (filters.notEmpty) {
|
}
|
||||||
iterate(filters.notEmpty, key => {
|
})
|
||||||
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
|
}
|
||||||
query = query[fnc](key)
|
if (filters.fuzzy) {
|
||||||
})
|
iterate(filters.fuzzy, (key, value) => {
|
||||||
}
|
const fnc = allOr ? "orWhere" : "where"
|
||||||
return query
|
// postgres supports ilike, nothing else does
|
||||||
}
|
if (this.client === "pg") {
|
||||||
|
query = query[fnc](key, "ilike", `%${value}%`)
|
||||||
function addRelationships(
|
} else {
|
||||||
knex: Knex,
|
const rawFnc = `${fnc}Raw`
|
||||||
query: KnexQuery,
|
// @ts-ignore
|
||||||
fields: string | string[],
|
query = query[rawFnc](`LOWER(${key}) LIKE ?`, [`%${value}%`])
|
||||||
fromTable: string,
|
}
|
||||||
relationships: RelationshipsJson[] | undefined
|
})
|
||||||
): KnexQuery {
|
}
|
||||||
if (!relationships) {
|
if (filters.range) {
|
||||||
|
iterate(filters.range, (key, value) => {
|
||||||
|
if (!value.high || !value.low) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const fnc = allOr ? "orWhereBetween" : "whereBetween"
|
||||||
|
query = query[fnc](key, [value.low, value.high])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (filters.equal) {
|
||||||
|
iterate(filters.equal, (key, value) => {
|
||||||
|
const fnc = allOr ? "orWhere" : "where"
|
||||||
|
query = query[fnc]({ [key]: value })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (filters.notEqual) {
|
||||||
|
iterate(filters.notEqual, (key, value) => {
|
||||||
|
const fnc = allOr ? "orWhereNot" : "whereNot"
|
||||||
|
query = query[fnc]({ [key]: value })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (filters.empty) {
|
||||||
|
iterate(filters.empty, key => {
|
||||||
|
const fnc = allOr ? "orWhereNull" : "whereNull"
|
||||||
|
query = query[fnc](key)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (filters.notEmpty) {
|
||||||
|
iterate(filters.notEmpty, key => {
|
||||||
|
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
|
||||||
|
query = query[fnc](key)
|
||||||
|
})
|
||||||
|
}
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
for (let relationship of relationships) {
|
|
||||||
const from = relationship.from,
|
addRelationships(
|
||||||
to = relationship.to,
|
knex: Knex,
|
||||||
toTable = relationship.tableName
|
query: KnexQuery,
|
||||||
if (!relationship.through) {
|
fields: string | string[],
|
||||||
// @ts-ignore
|
fromTable: string,
|
||||||
query = query.leftJoin(
|
relationships: RelationshipsJson[] | undefined
|
||||||
toTable,
|
): KnexQuery {
|
||||||
`${fromTable}.${from}`,
|
if (!relationships) {
|
||||||
`${toTable}.${to}`
|
return query
|
||||||
)
|
}
|
||||||
} else {
|
for (let relationship of relationships) {
|
||||||
const throughTable = relationship.through
|
const from = relationship.from,
|
||||||
const fromPrimary = relationship.fromPrimary
|
to = relationship.to,
|
||||||
const toPrimary = relationship.toPrimary
|
toTable = relationship.tableName
|
||||||
query = query
|
if (!relationship.through) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
.leftJoin(
|
query = query.leftJoin(
|
||||||
throughTable,
|
toTable,
|
||||||
`${fromTable}.${fromPrimary}`,
|
`${fromTable}.${from}`,
|
||||||
`${throughTable}.${from}`
|
`${toTable}.${to}`
|
||||||
)
|
)
|
||||||
.leftJoin(toTable, `${toTable}.${toPrimary}`, `${throughTable}.${to}`)
|
} else {
|
||||||
|
const throughTable = relationship.through
|
||||||
|
const fromPrimary = relationship.fromPrimary
|
||||||
|
const toPrimary = relationship.toPrimary
|
||||||
|
query = query
|
||||||
|
// @ts-ignore
|
||||||
|
.leftJoin(
|
||||||
|
throughTable,
|
||||||
|
`${fromTable}.${fromPrimary}`,
|
||||||
|
`${throughTable}.${from}`
|
||||||
|
)
|
||||||
|
.leftJoin(toTable, `${toTable}.${toPrimary}`, `${throughTable}.${to}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return query.limit(BASE_LIMIT)
|
||||||
|
}
|
||||||
|
|
||||||
|
create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
|
const { endpoint, body } = json
|
||||||
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
|
const parsedBody = parseBody(body)
|
||||||
|
// make sure no null values in body for creation
|
||||||
|
for (let [key, value] of Object.entries(parsedBody)) {
|
||||||
|
if (value == null) {
|
||||||
|
delete parsedBody[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// mysql can't use returning
|
||||||
|
if (opts.disableReturning) {
|
||||||
|
return query.insert(parsedBody)
|
||||||
|
} else {
|
||||||
|
return query.insert(parsedBody).returning("*")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return query.limit(BASE_LIMIT)
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildCreate(
|
read(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||||
knex: Knex,
|
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
||||||
json: QueryJson,
|
const tableName = endpoint.entityId
|
||||||
opts: QueryOptions
|
// select all if not specified
|
||||||
): KnexQuery {
|
if (!resource) {
|
||||||
const { endpoint, body } = json
|
resource = { fields: [] }
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
|
||||||
const parsedBody = parseBody(body)
|
|
||||||
// make sure no null values in body for creation
|
|
||||||
for (let [key, value] of Object.entries(parsedBody)) {
|
|
||||||
if (value == null) {
|
|
||||||
delete parsedBody[key]
|
|
||||||
}
|
}
|
||||||
}
|
let selectStatement: string | string[] = "*"
|
||||||
// mysql can't use returning
|
// handle select
|
||||||
if (opts.disableReturning) {
|
if (resource.fields && resource.fields.length > 0) {
|
||||||
return query.insert(parsedBody)
|
// select the resources as the format "table.columnName" - this is what is provided
|
||||||
} else {
|
// by the resource builder further up
|
||||||
return query.insert(parsedBody).returning("*")
|
selectStatement = resource.fields.map(field => `${field} as ${field}`)
|
||||||
}
|
}
|
||||||
}
|
let foundLimit = limit || BASE_LIMIT
|
||||||
|
// handle pagination
|
||||||
function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
let foundOffset: number | null = null
|
||||||
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
if (paginate && paginate.page && paginate.limit) {
|
||||||
const tableName = endpoint.entityId
|
// @ts-ignore
|
||||||
// select all if not specified
|
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
||||||
if (!resource) {
|
const offset = page * paginate.limit
|
||||||
resource = { fields: [] }
|
foundLimit = paginate.limit
|
||||||
}
|
foundOffset = offset
|
||||||
let selectStatement: string | string[] = "*"
|
} else if (paginate && paginate.limit) {
|
||||||
// handle select
|
foundLimit = paginate.limit
|
||||||
if (resource.fields && resource.fields.length > 0) {
|
}
|
||||||
// select the resources as the format "table.columnName" - this is what is provided
|
// start building the query
|
||||||
// by the resource builder further up
|
let query: KnexQuery = knex(tableName).limit(foundLimit)
|
||||||
selectStatement = resource.fields.map(field => `${field} as ${field}`)
|
if (foundOffset) {
|
||||||
}
|
query = query.offset(foundOffset)
|
||||||
let foundLimit = limit || BASE_LIMIT
|
}
|
||||||
// handle pagination
|
if (sort) {
|
||||||
let foundOffset: number | null = null
|
for (let [key, value] of Object.entries(sort)) {
|
||||||
if (paginate && paginate.page && paginate.limit) {
|
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
|
||||||
|
query = query.orderBy(key, direction)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
query = this.addFilters(tableName, query, filters)
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
let preQuery: KnexQuery = knex({
|
||||||
const offset = page * paginate.limit
|
// @ts-ignore
|
||||||
foundLimit = paginate.limit
|
[tableName]: query,
|
||||||
foundOffset = offset
|
}).select(selectStatement)
|
||||||
} else if (paginate && paginate.limit) {
|
// handle joins
|
||||||
foundLimit = paginate.limit
|
return this.addRelationships(
|
||||||
|
knex,
|
||||||
|
preQuery,
|
||||||
|
selectStatement,
|
||||||
|
tableName,
|
||||||
|
relationships
|
||||||
|
)
|
||||||
}
|
}
|
||||||
// start building the query
|
|
||||||
let query: KnexQuery = knex(tableName).limit(foundLimit)
|
update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
if (foundOffset) {
|
const { endpoint, body, filters } = json
|
||||||
query = query.offset(foundOffset)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
}
|
const parsedBody = parseBody(body)
|
||||||
if (sort) {
|
query = this.addFilters(endpoint.entityId, query, filters)
|
||||||
for (let [key, value] of Object.entries(sort)) {
|
// mysql can't use returning
|
||||||
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
|
if (opts.disableReturning) {
|
||||||
query = query.orderBy(key, direction)
|
return query.update(parsedBody)
|
||||||
|
} else {
|
||||||
|
return query.update(parsedBody).returning("*")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
query = addFilters(tableName, query, filters)
|
|
||||||
// @ts-ignore
|
|
||||||
let preQuery: KnexQuery = knex({
|
|
||||||
// @ts-ignore
|
|
||||||
[tableName]: query,
|
|
||||||
}).select(selectStatement)
|
|
||||||
// handle joins
|
|
||||||
return addRelationships(
|
|
||||||
knex,
|
|
||||||
preQuery,
|
|
||||||
selectStatement,
|
|
||||||
tableName,
|
|
||||||
relationships
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildUpdate(
|
delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
||||||
knex: Knex,
|
const { endpoint, filters } = json
|
||||||
json: QueryJson,
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
opts: QueryOptions
|
query = this.addFilters(endpoint.entityId, query, filters)
|
||||||
): KnexQuery {
|
// mysql can't use returning
|
||||||
const { endpoint, body, filters } = json
|
if (opts.disableReturning) {
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
return query.delete()
|
||||||
const parsedBody = parseBody(body)
|
} else {
|
||||||
query = addFilters(endpoint.entityId, query, filters)
|
return query.delete().returning("*")
|
||||||
// mysql can't use returning
|
}
|
||||||
if (opts.disableReturning) {
|
|
||||||
return query.update(parsedBody)
|
|
||||||
} else {
|
|
||||||
return query.update(parsedBody).returning("*")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildDelete(
|
|
||||||
knex: Knex,
|
|
||||||
json: QueryJson,
|
|
||||||
opts: QueryOptions
|
|
||||||
): KnexQuery {
|
|
||||||
const { endpoint, filters } = json
|
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
|
||||||
query = addFilters(endpoint.entityId, query, filters)
|
|
||||||
// mysql can't use returning
|
|
||||||
if (opts.disableReturning) {
|
|
||||||
return query.delete()
|
|
||||||
} else {
|
|
||||||
return query.delete().returning("*")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -266,18 +276,19 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
const sqlClient = this.getSqlClient()
|
const sqlClient = this.getSqlClient()
|
||||||
const client = knex({ client: sqlClient })
|
const client = knex({ client: sqlClient })
|
||||||
let query
|
let query
|
||||||
|
const builder = new InternalBuilder(sqlClient)
|
||||||
switch (this._operation(json)) {
|
switch (this._operation(json)) {
|
||||||
case Operation.CREATE:
|
case Operation.CREATE:
|
||||||
query = buildCreate(client, json, opts)
|
query = builder.create(client, json, opts)
|
||||||
break
|
break
|
||||||
case Operation.READ:
|
case Operation.READ:
|
||||||
query = buildRead(client, json, this.limit)
|
query = builder.read(client, json, this.limit)
|
||||||
break
|
break
|
||||||
case Operation.UPDATE:
|
case Operation.UPDATE:
|
||||||
query = buildUpdate(client, json, opts)
|
query = builder.update(client, json, opts)
|
||||||
break
|
break
|
||||||
case Operation.DELETE:
|
case Operation.DELETE:
|
||||||
query = buildDelete(client, json, opts)
|
query = builder.delete(client, json, opts)
|
||||||
break
|
break
|
||||||
case Operation.CREATE_TABLE:
|
case Operation.CREATE_TABLE:
|
||||||
case Operation.UPDATE_TABLE:
|
case Operation.UPDATE_TABLE:
|
||||||
|
|
|
@ -245,7 +245,9 @@ module MSSQLModule {
|
||||||
schema,
|
schema,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
this.tables = tables
|
const final = finaliseExternalTables(tables)
|
||||||
|
this.tables = final.tables
|
||||||
|
this.schemaErrors = final.errors
|
||||||
}
|
}
|
||||||
|
|
||||||
async read(query: SqlQuery | string) {
|
async read(query: SqlQuery | string) {
|
||||||
|
|
Loading…
Reference in New Issue