Merge pull request #14532 from Budibase/fix/sql-many-relationships

SQL - utilise JSON aggregations for relationships
This commit is contained in:
Michael Drury 2024-09-11 10:33:51 +01:00 committed by GitHub
commit 31f8691708
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 783 additions and 501 deletions

View File

@ -171,6 +171,7 @@ const environment = {
// Couch/search
SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE,
SQL_MAX_ROWS: process.env.SQL_MAX_ROWS,
SQL_MAX_RELATED_ROWS: process.env.MAX_RELATED_ROWS,
// smtp
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
SMTP_USER: process.env.SMTP_USER,

View File

@ -7,6 +7,7 @@ import {
isValidFilter,
isValidISODateString,
sqlLog,
validateManyToMany,
} from "./utils"
import SqlTableQueryBuilder from "./sqlTable"
import {
@ -39,6 +40,7 @@ import { dataFilters, helpers } from "@budibase/shared-core"
import { cloneDeep } from "lodash"
type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any
const MAX_SQS_RELATIONSHIP_FIELDS = 63
function getBaseLimit() {
const envLimit = environment.SQL_MAX_ROWS
@ -47,6 +49,13 @@ function getBaseLimit() {
return envLimit || 5000
}
function getRelationshipLimit() {
const envLimit = environment.SQL_MAX_RELATED_ROWS
? parseInt(environment.SQL_MAX_RELATED_ROWS)
: null
return envLimit || 500
}
function getTableName(table?: Table): string | undefined {
// SQS uses the table ID rather than the table name
if (
@ -92,6 +101,23 @@ class InternalBuilder {
})
}
// states the various situations in which we need a full mapped select statement
private readonly SPECIAL_SELECT_CASES = {
POSTGRES_MONEY: (field: FieldSchema | undefined) => {
return (
this.client === SqlClient.POSTGRES &&
field?.externalType?.includes("money")
)
},
MSSQL_DATES: (field: FieldSchema | undefined) => {
return (
this.client === SqlClient.MS_SQL &&
field?.type === FieldType.DATETIME &&
field.timeOnly
)
},
}
get table(): Table {
return this.query.meta.table
}
@ -125,46 +151,52 @@ class InternalBuilder {
.join(".")
}
private isFullSelectStatementRequired(): boolean {
const { meta } = this.query
for (let column of Object.values(meta.table.schema)) {
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(column)) {
return true
} else if (this.SPECIAL_SELECT_CASES.MSSQL_DATES(column)) {
return true
}
}
return false
}
private generateSelectStatement(): (string | Knex.Raw)[] | "*" {
const { resource, meta } = this.query
const { meta, endpoint, resource, tableAliases } = this.query
if (!resource || !resource.fields || resource.fields.length === 0) {
return "*"
}
const alias = tableAliases?.[endpoint.entityId]
? tableAliases?.[endpoint.entityId]
: endpoint.entityId
const schema = meta.table.schema
return resource.fields.map(field => {
if (!this.isFullSelectStatementRequired()) {
return [this.knex.raw(`${this.quote(alias)}.*`)]
}
// get just the fields for this table
return resource.fields
.map(field => {
const parts = field.split(/\./g)
let table: string | undefined = undefined
let column: string | undefined = undefined
let column = parts[0]
// Just a column name, e.g.: "column"
if (parts.length === 1) {
column = parts[0]
}
// A table name and a column name, e.g.: "table.column"
if (parts.length === 2) {
table = parts[0]
column = parts[1]
}
// A link doc, e.g.: "table.doc1.fieldName"
if (parts.length > 2) {
if (parts.length > 1) {
table = parts[0]
column = parts.slice(1).join(".")
}
if (!column) {
throw new Error(`Invalid field name: ${field}`)
}
return { table, column, field }
})
.filter(({ table }) => !table || table === alias)
.map(({ table, column, field }) => {
const columnSchema = schema[column]
if (
this.client === SqlClient.POSTGRES &&
columnSchema?.externalType?.includes("money")
) {
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(columnSchema)) {
return this.knex.raw(
`${this.quotedIdentifier(
[table, column].join(".")
@ -172,31 +204,16 @@ class InternalBuilder {
)
}
if (
this.client === SqlClient.MS_SQL &&
columnSchema?.type === FieldType.DATETIME &&
columnSchema.timeOnly
) {
if (this.SPECIAL_SELECT_CASES.MSSQL_DATES(columnSchema)) {
// Time gets returned as timestamp from mssql, not matching the expected
// HH:mm format
return this.knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`)
}
// There's at least two edge cases being handled in the expression below.
// 1. The column name could start/end with a space, and in that case we
// want to preseve that space.
// 2. Almost all column names are specified in the form table.column, except
// in the case of relationships, where it's table.doc1.column. In that
// case, we want to split it into `table`.`doc1.column` for reasons that
// aren't actually clear to me, but `table`.`doc1` breaks things with the
// sample data tests.
if (table) {
return this.knex.raw(
`${this.quote(table)}.${this.quote(column)} as ${this.quote(field)}`
)
} else {
return this.knex.raw(`${this.quote(field)} as ${this.quote(field)}`)
}
const quoted = table
? `${this.quote(table)}.${this.quote(column)}`
: this.quote(field)
return this.knex.raw(quoted)
})
}
@ -328,6 +345,85 @@ class InternalBuilder {
return filters
}
addJoinFieldCheck(query: Knex.QueryBuilder, relationship: RelationshipsJson) {
const document = relationship.from?.split(".")[0] || ""
return query.andWhere(`${document}.fieldName`, "=", relationship.column)
}
addRelationshipForFilter(
query: Knex.QueryBuilder,
filterKey: string,
whereCb: (query: Knex.QueryBuilder) => Knex.QueryBuilder
): Knex.QueryBuilder {
const mainKnex = this.knex
const { relationships, endpoint, tableAliases: aliases } = this.query
const tableName = endpoint.entityId
const fromAlias = aliases?.[tableName] || tableName
const matches = (possibleTable: string) =>
filterKey.startsWith(`${possibleTable}`)
if (!relationships) {
return query
}
for (const relationship of relationships) {
const relatedTableName = relationship.tableName
const toAlias = aliases?.[relatedTableName] || relatedTableName
// this is the relationship which is being filtered
if (
(matches(relatedTableName) || matches(toAlias)) &&
relationship.to &&
relationship.tableName
) {
let subQuery = mainKnex
.select(mainKnex.raw(1))
.from({ [toAlias]: relatedTableName })
const manyToMany = validateManyToMany(relationship)
if (manyToMany) {
const throughAlias =
aliases?.[manyToMany.through] || relationship.through
let throughTable = this.tableNameWithSchema(manyToMany.through, {
alias: throughAlias,
schema: endpoint.schema,
})
subQuery = subQuery
// add a join through the junction table
.innerJoin(throughTable, function () {
// @ts-ignore
this.on(
`${toAlias}.${manyToMany.toPrimary}`,
"=",
`${throughAlias}.${manyToMany.to}`
)
})
// check the document in the junction table points to the main table
.where(
`${throughAlias}.${manyToMany.from}`,
"=",
mainKnex.raw(
this.quotedIdentifier(`${fromAlias}.${manyToMany.fromPrimary}`)
)
)
// in SQS the same junction table is used for different many-to-many relationships between the
// two same tables, this is needed to avoid rows ending up in all columns
if (this.client === SqlClient.SQL_LITE) {
subQuery = this.addJoinFieldCheck(subQuery, manyToMany)
}
} else {
// "join" to the main table, making sure the ID matches that of the main
subQuery = subQuery.where(
`${toAlias}.${relationship.to}`,
"=",
mainKnex.raw(
this.quotedIdentifier(`${fromAlias}.${relationship.from}`)
)
)
}
query = query.whereExists(whereCb(subQuery))
break
}
}
return query
}
// right now we only do filters on the specific table being queried
addFilters(
query: Knex.QueryBuilder,
@ -339,12 +435,13 @@ class InternalBuilder {
if (!filters) {
return query
}
const builder = this
filters = this.parseFilters({ ...filters })
const aliases = this.query.tableAliases
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
const tableName =
this.client === SqlClient.SQL_LITE ? this.table._id! : this.table.name
const isSqlite = this.client === SqlClient.SQL_LITE
const tableName = isSqlite ? this.table._id! : this.table.name
function getTableAlias(name: string) {
const alias = aliases?.[name]
@ -352,13 +449,33 @@ class InternalBuilder {
}
function iterate(
structure: AnySearchFilter,
fn: (key: string, value: any) => void,
complexKeyFn?: (key: string[], value: any) => void
fn: (
query: Knex.QueryBuilder,
key: string,
value: any
) => Knex.QueryBuilder,
complexKeyFn?: (
query: Knex.QueryBuilder,
key: string[],
value: any
) => Knex.QueryBuilder
) {
const handleRelationship = (
q: Knex.QueryBuilder,
key: string,
value: any
) => {
const [filterTableName, ...otherProperties] = key.split(".")
const property = otherProperties.join(".")
const alias = getTableAlias(filterTableName)
return fn(q, alias ? `${alias}.${property}` : property, value)
}
for (const key in structure) {
const value = structure[key]
const updatedKey = dbCore.removeKeyNumbering(key)
const isRelationshipField = updatedKey.includes(".")
const shouldProcessRelationship =
opts?.relationship && isRelationshipField
let castedTypeValue
if (
@ -367,7 +484,8 @@ class InternalBuilder {
complexKeyFn
) {
const alias = getTableAlias(tableName)
complexKeyFn(
query = complexKeyFn(
query,
castedTypeValue.id.map((x: string) =>
alias ? `${alias}.${x}` : x
),
@ -375,26 +493,29 @@ class InternalBuilder {
)
} else if (!isRelationshipField) {
const alias = getTableAlias(tableName)
fn(alias ? `${alias}.${updatedKey}` : updatedKey, value)
}
if (opts?.relationship && isRelationshipField) {
const [filterTableName, property] = updatedKey.split(".")
const alias = getTableAlias(filterTableName)
fn(alias ? `${alias}.${property}` : property, value)
query = fn(
query,
alias ? `${alias}.${updatedKey}` : updatedKey,
value
)
} else if (shouldProcessRelationship) {
query = builder.addRelationshipForFilter(query, updatedKey, q => {
return handleRelationship(q, updatedKey, value)
})
}
}
}
const like = (key: string, value: any) => {
const like = (q: Knex.QueryBuilder, key: string, value: any) => {
const fuzzyOr = filters?.fuzzyOr
const fnc = fuzzyOr || allOr ? "orWhere" : "where"
// postgres supports ilike, nothing else does
if (this.client === SqlClient.POSTGRES) {
query = query[fnc](key, "ilike", `%${value}%`)
return q[fnc](key, "ilike", `%${value}%`)
} else {
const rawFnc = `${fnc}Raw`
// @ts-ignore
query = query[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [
return q[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [
`%${value.toLowerCase()}%`,
])
}
@ -412,13 +533,13 @@ class InternalBuilder {
return `[${value.join(",")}]`
}
if (this.client === SqlClient.POSTGRES) {
iterate(mode, (key, value) => {
iterate(mode, (q, key, value) => {
const wrap = any ? "" : "'"
const op = any ? "\\?| array" : "@>"
const fieldNames = key.split(/\./g)
const table = fieldNames[0]
const col = fieldNames[1]
query = query[rawFnc](
return q[rawFnc](
`${not}COALESCE("${table}"."${col}"::jsonb ${op} ${wrap}${stringifyArray(
value,
any ? "'" : '"'
@ -427,8 +548,8 @@ class InternalBuilder {
})
} else if (this.client === SqlClient.MY_SQL) {
const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS"
iterate(mode, (key, value) => {
query = query[rawFnc](
iterate(mode, (q, key, value) => {
return q[rawFnc](
`${not}COALESCE(${jsonFnc}(${key}, '${stringifyArray(
value
)}'), FALSE)`
@ -436,7 +557,7 @@ class InternalBuilder {
})
} else {
const andOr = mode === filters?.containsAny ? " OR " : " AND "
iterate(mode, (key, value) => {
iterate(mode, (q, key, value) => {
let statement = ""
const identifier = this.quotedIdentifier(key)
for (let i in value) {
@ -451,16 +572,16 @@ class InternalBuilder {
}
if (statement === "") {
return
return q
}
if (not) {
query = query[rawFnc](
return q[rawFnc](
`(NOT (${statement}) OR ${identifier} IS NULL)`,
value
)
} else {
query = query[rawFnc](statement, value)
return q[rawFnc](statement, value)
}
})
}
@ -490,39 +611,39 @@ class InternalBuilder {
const fnc = allOr ? "orWhereIn" : "whereIn"
iterate(
filters.oneOf,
(key: string, array) => {
(q, key: string, array) => {
if (this.client === SqlClient.ORACLE) {
key = this.convertClobs(key)
array = Array.isArray(array) ? array : [array]
const binding = new Array(array.length).fill("?").join(",")
query = query.whereRaw(`${key} IN (${binding})`, array)
return q.whereRaw(`${key} IN (${binding})`, array)
} else {
query = query[fnc](key, Array.isArray(array) ? array : [array])
return q[fnc](key, Array.isArray(array) ? array : [array])
}
},
(key: string[], array) => {
(q, key: string[], array) => {
if (this.client === SqlClient.ORACLE) {
const keyStr = `(${key.map(k => this.convertClobs(k)).join(",")})`
const binding = `(${array
.map((a: any) => `(${new Array(a.length).fill("?").join(",")})`)
.join(",")})`
query = query.whereRaw(`${keyStr} IN ${binding}`, array.flat())
return q.whereRaw(`${keyStr} IN ${binding}`, array.flat())
} else {
query = query[fnc](key, Array.isArray(array) ? array : [array])
return q[fnc](key, Array.isArray(array) ? array : [array])
}
}
)
}
if (filters.string) {
iterate(filters.string, (key, value) => {
iterate(filters.string, (q, key, value) => {
const fnc = allOr ? "orWhere" : "where"
// postgres supports ilike, nothing else does
if (this.client === SqlClient.POSTGRES) {
query = query[fnc](key, "ilike", `${value}%`)
return q[fnc](key, "ilike", `${value}%`)
} else {
const rawFnc = `${fnc}Raw`
// @ts-ignore
query = query[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [
return q[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [
`${value.toLowerCase()}%`,
])
}
@ -532,7 +653,7 @@ class InternalBuilder {
iterate(filters.fuzzy, like)
}
if (filters.range) {
iterate(filters.range, (key, value) => {
iterate(filters.range, (q, key, value) => {
const isEmptyObject = (val: any) => {
return (
val &&
@ -561,97 +682,93 @@ class InternalBuilder {
schema?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(
return q.whereRaw(
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
[value.low, value.high]
)
} else {
const fnc = allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [value.low, value.high])
return q[fnc](key, [value.low, value.high])
}
} else if (lowValid) {
if (
schema?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(
`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`,
[value.low]
)
return q.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
value.low,
])
} else {
const fnc = allOr ? "orWhere" : "where"
query = query[fnc](key, ">=", value.low)
return q[fnc](key, ">=", value.low)
}
} else if (highValid) {
if (
schema?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(
`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`,
[value.high]
)
return q.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
value.high,
])
} else {
const fnc = allOr ? "orWhere" : "where"
query = query[fnc](key, "<=", value.high)
return q[fnc](key, "<=", value.high)
}
}
return q
})
}
if (filters.equal) {
iterate(filters.equal, (key, value) => {
iterate(filters.equal, (q, key, value) => {
const fnc = allOr ? "orWhereRaw" : "whereRaw"
if (this.client === SqlClient.MS_SQL) {
query = query[fnc](
return q[fnc](
`CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 1`,
[value]
)
} else if (this.client === SqlClient.ORACLE) {
const identifier = this.convertClobs(key)
query = query[fnc](
`(${identifier} IS NOT NULL AND ${identifier} = ?)`,
[value]
)
return q[fnc](`(${identifier} IS NOT NULL AND ${identifier} = ?)`, [
value,
])
} else {
query = query[fnc](
`COALESCE(${this.quotedIdentifier(key)} = ?, FALSE)`,
[value]
)
return q[fnc](`COALESCE(${this.quotedIdentifier(key)} = ?, FALSE)`, [
value,
])
}
})
}
if (filters.notEqual) {
iterate(filters.notEqual, (key, value) => {
iterate(filters.notEqual, (q, key, value) => {
const fnc = allOr ? "orWhereRaw" : "whereRaw"
if (this.client === SqlClient.MS_SQL) {
query = query[fnc](
return q[fnc](
`CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 0`,
[value]
)
} else if (this.client === SqlClient.ORACLE) {
const identifier = this.convertClobs(key)
query = query[fnc](
return q[fnc](
`(${identifier} IS NOT NULL AND ${identifier} != ?) OR ${identifier} IS NULL`,
[value]
)
} else {
query = query[fnc](
`COALESCE(${this.quotedIdentifier(key)} != ?, TRUE)`,
[value]
)
return q[fnc](`COALESCE(${this.quotedIdentifier(key)} != ?, TRUE)`, [
value,
])
}
})
}
if (filters.empty) {
iterate(filters.empty, key => {
iterate(filters.empty, (q, key) => {
const fnc = allOr ? "orWhereNull" : "whereNull"
query = query[fnc](key)
return q[fnc](key)
})
}
if (filters.notEmpty) {
iterate(filters.notEmpty, key => {
iterate(filters.notEmpty, (q, key) => {
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
query = query[fnc](key)
return q[fnc](key)
})
}
if (filters.contains) {
@ -745,16 +862,222 @@ class InternalBuilder {
return withSchema
}
addJsonRelationships(
query: Knex.QueryBuilder,
fromTable: string,
relationships: RelationshipsJson[]
): Knex.QueryBuilder {
const sqlClient = this.client
const knex = this.knex
const { resource, tableAliases: aliases, endpoint } = this.query
const fields = resource?.fields || []
const jsonField = (field: string) => {
const parts = field.split(".")
let tableField: string, unaliased: string
if (parts.length > 1) {
const alias = parts.shift()!
unaliased = parts.join(".")
tableField = `${this.quote(alias)}.${this.quote(unaliased)}`
} else {
unaliased = parts.join(".")
tableField = this.quote(unaliased)
}
let separator = ","
switch (sqlClient) {
case SqlClient.ORACLE:
separator = " VALUE "
break
case SqlClient.MS_SQL:
separator = ":"
}
return `'${unaliased}'${separator}${tableField}`
}
for (let relationship of relationships) {
const {
tableName: toTable,
through: throughTable,
to: toKey,
from: fromKey,
fromPrimary,
toPrimary,
} = relationship
// skip invalid relationships
if (!toTable || !fromTable) {
continue
}
const toAlias = aliases?.[toTable] || toTable,
fromAlias = aliases?.[fromTable] || fromTable
let toTableWithSchema = this.tableNameWithSchema(toTable, {
alias: toAlias,
schema: endpoint.schema,
})
let relationshipFields = fields.filter(
field => field.split(".")[0] === toAlias
)
if (this.client === SqlClient.SQL_LITE) {
relationshipFields = relationshipFields.slice(
0,
MAX_SQS_RELATIONSHIP_FIELDS
)
}
const fieldList: string = relationshipFields
.map(field => jsonField(field))
.join(",")
// SQL Server uses TOP - which performs a little differently to the normal LIMIT syntax
// it reduces the result set rather than limiting how much data it filters over
const primaryKey = `${toAlias}.${toPrimary || toKey}`
let subQuery: Knex.QueryBuilder = knex
.from(toTableWithSchema)
.limit(getRelationshipLimit())
// add sorting to get consistent order
.orderBy(primaryKey)
// many-to-many relationship with junction table
if (throughTable && toPrimary && fromPrimary) {
const throughAlias = aliases?.[throughTable] || throughTable
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
alias: throughAlias,
schema: endpoint.schema,
})
subQuery = subQuery
.join(throughTableWithSchema, function () {
this.on(`${toAlias}.${toPrimary}`, "=", `${throughAlias}.${toKey}`)
})
.where(
`${throughAlias}.${fromKey}`,
"=",
knex.raw(this.quotedIdentifier(`${fromAlias}.${fromPrimary}`))
)
}
// one-to-many relationship with foreign key
else {
subQuery = subQuery.where(
`${toAlias}.${toKey}`,
"=",
knex.raw(this.quotedIdentifier(`${fromAlias}.${fromKey}`))
)
}
const standardWrap = (select: string): Knex.QueryBuilder => {
subQuery = subQuery.select(`${toAlias}.*`)
// @ts-ignore - the from alias syntax isn't in Knex typing
return knex.select(knex.raw(select)).from({
[toAlias]: subQuery,
})
}
let wrapperQuery: Knex.QueryBuilder | Knex.Raw
switch (sqlClient) {
case SqlClient.SQL_LITE:
// need to check the junction table document is to the right column, this is just for SQS
subQuery = this.addJoinFieldCheck(subQuery, relationship)
wrapperQuery = standardWrap(
`json_group_array(json_object(${fieldList}))`
)
break
case SqlClient.POSTGRES:
wrapperQuery = standardWrap(
`json_agg(json_build_object(${fieldList}))`
)
break
case SqlClient.MY_SQL:
wrapperQuery = subQuery.select(
knex.raw(`json_arrayagg(json_object(${fieldList}))`)
)
break
case SqlClient.ORACLE:
wrapperQuery = standardWrap(
`json_arrayagg(json_object(${fieldList}))`
)
break
case SqlClient.MS_SQL:
wrapperQuery = knex.raw(
`(SELECT ${this.quote(toAlias)} = (${knex
.select(`${fromAlias}.*`)
// @ts-ignore - from alias syntax not TS supported
.from({
[fromAlias]: subQuery.select(`${toAlias}.*`),
})} FOR JSON PATH))`
)
break
default:
throw new Error(`JSON relationships not implement for ${sqlClient}`)
}
query = query.select({ [relationship.column]: wrapperQuery })
}
return query
}
addJoin(
query: Knex.QueryBuilder,
tables: { from: string; to: string; through?: string },
columns: {
from?: string
to?: string
fromPrimary?: string
toPrimary?: string
}[]
): Knex.QueryBuilder {
const { tableAliases: aliases, endpoint } = this.query
const schema = endpoint.schema
const toTable = tables.to,
fromTable = tables.from,
throughTable = tables.through
const toAlias = aliases?.[toTable] || toTable,
throughAlias = (throughTable && aliases?.[throughTable]) || throughTable,
fromAlias = aliases?.[fromTable] || fromTable
let toTableWithSchema = this.tableNameWithSchema(toTable, {
alias: toAlias,
schema,
})
let throughTableWithSchema = throughTable
? this.tableNameWithSchema(throughTable, {
alias: throughAlias,
schema,
})
: undefined
if (!throughTable) {
// @ts-ignore
query = query.leftJoin(toTableWithSchema, function () {
for (let relationship of columns) {
const from = relationship.from,
to = relationship.to
// @ts-ignore
this.orOn(`${fromAlias}.${from}`, "=", `${toAlias}.${to}`)
}
})
} else {
query = query
// @ts-ignore
.leftJoin(throughTableWithSchema, function () {
for (let relationship of columns) {
const fromPrimary = relationship.fromPrimary
const from = relationship.from
// @ts-ignore
this.orOn(
`${fromAlias}.${fromPrimary}`,
"=",
`${throughAlias}.${from}`
)
}
})
.leftJoin(toTableWithSchema, function () {
for (let relationship of columns) {
const toPrimary = relationship.toPrimary
const to = relationship.to
// @ts-ignore
this.orOn(`${toAlias}.${toPrimary}`, `${throughAlias}.${to}`)
}
})
}
return query
}
addRelationships(
query: Knex.QueryBuilder,
fromTable: string,
relationships: RelationshipsJson[] | undefined,
schema: string | undefined,
aliases?: Record<string, string>
relationships: RelationshipsJson[]
): Knex.QueryBuilder {
if (!relationships) {
return query
}
const tableSets: Record<string, [RelationshipsJson]> = {}
// aggregate into table sets (all the same to tables)
for (let relationship of relationships) {
@ -774,52 +1097,16 @@ class InternalBuilder {
}
for (let [key, relationships] of Object.entries(tableSets)) {
const { toTable, throughTable } = JSON.parse(key)
const toAlias = aliases?.[toTable] || toTable,
throughAlias = aliases?.[throughTable] || throughTable,
fromAlias = aliases?.[fromTable] || fromTable
let toTableWithSchema = this.tableNameWithSchema(toTable, {
alias: toAlias,
schema,
})
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
alias: throughAlias,
schema,
})
if (!throughTable) {
// @ts-ignore
query = query.leftJoin(toTableWithSchema, function () {
for (let relationship of relationships) {
const from = relationship.from,
to = relationship.to
// @ts-ignore
this.orOn(`${fromAlias}.${from}`, "=", `${toAlias}.${to}`)
}
})
} else {
query = query
// @ts-ignore
.leftJoin(throughTableWithSchema, function () {
for (let relationship of relationships) {
const fromPrimary = relationship.fromPrimary
const from = relationship.from
// @ts-ignore
this.orOn(
`${fromAlias}.${fromPrimary}`,
"=",
`${throughAlias}.${from}`
query = this.addJoin(
query,
{
from: fromTable,
to: toTable,
through: throughTable,
},
relationships
)
}
})
.leftJoin(toTableWithSchema, function () {
for (let relationship of relationships) {
const toPrimary = relationship.toPrimary
const to = relationship.to
// @ts-ignore
this.orOn(`${toAlias}.${toPrimary}`, `${throughAlias}.${to}`)
}
})
}
}
return query
}
@ -924,8 +1211,7 @@ class InternalBuilder {
limits?: { base: number; query: number }
} = {}
): Knex.QueryBuilder {
let { endpoint, filters, paginate, relationships, tableAliases } =
this.query
let { endpoint, filters, paginate, relationships } = this.query
const { limits } = opts
const counting = endpoint.operation === Operation.COUNT
@ -957,42 +1243,19 @@ class InternalBuilder {
if (foundOffset != null) {
query = query.offset(foundOffset)
}
// add sorting to pre-query
// no point in sorting when counting
}
// if counting, use distinct count, else select
query = !counting
? query.select(this.generateSelectStatement())
: this.addDistinctCount(query)
// have to add after as well (this breaks MS-SQL)
if (!counting) {
query = this.addSorting(query)
}
// add filters to the query (where)
query = this.addFilters(query, filters)
const alias = tableAliases?.[tableName] || tableName
let preQuery: Knex.QueryBuilder = this.knex({
// the typescript definition for the knex constructor doesn't support this
// syntax, but it is the only way to alias a pre-query result as part of
// a query - there is an alias dictionary type, but it assumes it can only
// be a table name, not a pre-query
[alias]: query as any,
})
// if counting, use distinct count, else select
preQuery = !counting
? preQuery.select(this.generateSelectStatement())
: this.addDistinctCount(preQuery)
// have to add after as well (this breaks MS-SQL)
if (this.client !== SqlClient.MS_SQL && !counting) {
preQuery = this.addSorting(preQuery)
}
// handle joins
query = this.addRelationships(
preQuery,
tableName,
relationships,
endpoint.schema,
tableAliases
)
// add a base limit over the whole query
// if counting we can't set this limit
if (limits?.base) {
query = query.limit(limits.base)
if (relationships) {
query = this.addJsonRelationships(query, tableName, relationships)
}
return this.addFilters(query, filters, { relationship: true })

View File

@ -1,4 +1,11 @@
import { DocumentType, SqlQuery, Table, TableSourceType } from "@budibase/types"
import {
DocumentType,
ManyToManyRelationshipJson,
RelationshipsJson,
SqlQuery,
Table,
TableSourceType,
} from "@budibase/types"
import { DEFAULT_BB_DATASOURCE_ID } from "../constants"
import { Knex } from "knex"
import { SEPARATOR } from "../db"
@ -163,3 +170,24 @@ export function sqlLog(client: string, query: string, values?: any[]) {
}
console.log(string)
}
function isValidManyToManyRelationship(
relationship: RelationshipsJson
): relationship is ManyToManyRelationshipJson {
return (
!!relationship.through &&
!!relationship.fromPrimary &&
!!relationship.from &&
!!relationship.toPrimary &&
!!relationship.to
)
}
export function validateManyToMany(
relationship: RelationshipsJson
): ManyToManyRelationshipJson | undefined {
if (isValidManyToManyRelationship(relationship)) {
return relationship
}
return undefined
}

View File

@ -1,4 +1,4 @@
MSSQL_SHA=sha256:c4369c38385eba011c10906dc8892425831275bb035d5ce69656da8e29de50d8
MSSQL_SHA=sha256:3b913841850a4d57fcfcb798be06acc88ea0f2acc5418bc0c140a43e91c4a545
MYSQL_SHA=sha256:9de9d54fecee6253130e65154b930978b1fcc336bcc86dfd06e89b72a2588ebe
POSTGRES_SHA=sha256:bd0d8e485d1aca439d39e5ea99b931160bd28d862e74c786f7508e9d0053090e
MONGODB_SHA=sha256:afa36bca12295b5f9dae68a493c706113922bdab520e901bd5d6c9d7247a1d8d

View File

@ -1,6 +1,10 @@
// need to handle table name + field or just field, depending on if relationships used
import { FieldType, Row, Table } from "@budibase/types"
import { helpers, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
import { FieldSchema, FieldType, Row, Table, JsonTypes } from "@budibase/types"
import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import { generateRowIdField } from "../../../../integrations/utils"
function extractFieldValue({
@ -58,14 +62,32 @@ export function generateIdForRow(
return generateRowIdField(idParts)
}
function fixJsonTypes(row: Row, table: Table) {
for (let [fieldName, schema] of Object.entries(table.schema)) {
if (JsonTypes.includes(schema.type) && typeof row[fieldName] === "string") {
try {
row[fieldName] = JSON.parse(row[fieldName])
} catch (err) {
if (!helpers.schema.isDeprecatedSingleUserColumn(schema)) {
// couldn't convert back to array, ignore
delete row[fieldName]
}
}
}
}
return row
}
export function basicProcessing({
row,
table,
tables,
isLinked,
sqs,
}: {
row: Row
table: Table
tables: Table[]
isLinked: boolean
sqs?: boolean
}): Row {
@ -82,16 +104,18 @@ export function basicProcessing({
value = value.toString()
}
// all responses include "select col as table.col" so that overlaps are handled
if (value != null) {
else if (value != null) {
thisRow[fieldName] = value
}
}
let columns: string[] = Object.keys(table.schema)
if (!sqs) {
thisRow._id = generateIdForRow(row, table, isLinked)
thisRow.tableId = table._id
thisRow._rev = "rev"
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
} else {
const columns = Object.keys(table.schema)
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) {
thisRow[internalColumn] = extractFieldValue({
row,
@ -101,24 +125,56 @@ export function basicProcessing({
})
}
}
return thisRow
for (let col of columns) {
const schema: FieldSchema | undefined = table.schema[col]
if (schema?.type !== FieldType.LINK) {
continue
}
export function fixArrayTypes(row: Row, table: Table) {
for (let [fieldName, schema] of Object.entries(table.schema)) {
if (
[FieldType.ARRAY, FieldType.BB_REFERENCE].includes(schema.type) &&
typeof row[fieldName] === "string"
) {
try {
row[fieldName] = JSON.parse(row[fieldName])
} catch (err) {
if (!helpers.schema.isDeprecatedSingleUserColumn(schema)) {
// couldn't convert back to array, ignore
delete row[fieldName]
const relatedTable = tables.find(tbl => tbl._id === schema.tableId)
if (!relatedTable) {
continue
}
const value = extractFieldValue({
row,
tableName: table._id!,
fieldName: col,
isLinked,
})
const array: Row[] = Array.isArray(value)
? value
: typeof value === "string"
? JSON.parse(value)
: undefined
if (array) {
thisRow[col] = array
// make sure all of them have an _id
if (Array.isArray(thisRow[col])) {
const sortField =
relatedTable.primaryDisplay || relatedTable.primary![0]!
thisRow[col] = (thisRow[col] as Row[])
.map(relatedRow =>
basicProcessing({
row: relatedRow,
table: relatedTable,
tables,
isLinked: false,
sqs,
})
)
.sort((a, b) => {
const aField = a?.[sortField],
bField = b?.[sortField]
if (!aField) {
return 1
} else if (!bField) {
return -1
}
return aField.localeCompare
? aField.localeCompare(bField)
: aField - bField
})
}
}
}
}
return row
return fixJsonTypes(thisRow, table)
}

View File

@ -7,11 +7,9 @@ import {
ManyToManyRelationshipFieldMetadata,
RelationshipFieldMetadata,
RelationshipsJson,
Row,
Table,
} from "@budibase/types"
import { breakExternalTableId } from "../../../../integrations/utils"
import { basicProcessing } from "./basic"
import { generateJunctionTableID } from "../../../../db/utils"
type TableMap = Record<string, Table>
@ -22,87 +20,6 @@ export function isManyToMany(
return !!(field as ManyToManyRelationshipFieldMetadata).through
}
function isCorrectRelationship(
relationship: RelationshipsJson,
table1: Table,
table2: Table,
row: Row
): boolean {
const junctionTableId = generateJunctionTableID(table1._id!, table2._id!)
const possibleColumns = [
`${junctionTableId}.doc1.fieldName`,
`${junctionTableId}.doc2.fieldName`,
]
return !!possibleColumns.find(col => row[col] === relationship.column)
}
/**
* This iterates through the returned rows and works out what elements of the rows
* actually match up to another row (based on primary keys) - this is pretty specific
* to SQL and the way that SQL relationships are returned based on joins.
* This is complicated, but the idea is that when a SQL query returns all the relations
* will be separate rows, with all of the data in each row. We have to decipher what comes
* from where (which tables) and how to convert that into budibase columns.
*/
export async function updateRelationshipColumns(
table: Table,
tables: TableMap,
row: Row,
rows: { [key: string]: Row },
relationships: RelationshipsJson[],
opts?: { sqs?: boolean }
) {
const columns: { [key: string]: any } = {}
for (let relationship of relationships) {
const linkedTable = tables[relationship.tableName]
if (!linkedTable) {
continue
}
const fromColumn = `${table.name}.${relationship.from}`
const toColumn = `${linkedTable.name}.${relationship.to}`
// this is important when working with multiple relationships
// between the same tables, don't want to overlap/multiply the relations
if (
!relationship.through &&
row[fromColumn]?.toString() !== row[toColumn]?.toString()
) {
continue
}
let linked = basicProcessing({
row,
table: linkedTable,
isLinked: true,
sqs: opts?.sqs,
})
if (!linked._id) {
continue
}
if (
!opts?.sqs ||
isCorrectRelationship(relationship, table, linkedTable, row)
) {
columns[relationship.column] = linked
}
}
for (let [column, related] of Object.entries(columns)) {
if (!row._id) {
continue
}
const rowId: string = row._id
if (!Array.isArray(rows[rowId][column])) {
rows[rowId][column] = []
}
// make sure relationship hasn't been found already
if (
!rows[rowId][column].find((relation: Row) => relation._id === related._id)
) {
rows[rowId][column].push(related)
}
}
return rows
}
/**
* Gets the list of relationship JSON structures based on the columns in the table,
* this will be used by the underlying library to build whatever relationship mechanism

View File

@ -13,13 +13,8 @@ import {
processDates,
processFormulas,
} from "../../../../utilities/rowProcessor"
import { isKnexEmptyReadResponse, updateRelationshipColumns } from "./sqlUtils"
import {
basicProcessing,
generateIdForRow,
fixArrayTypes,
getInternalRowId,
} from "./basic"
import { isKnexEmptyReadResponse } from "./sqlUtils"
import { basicProcessing, generateIdForRow, getInternalRowId } from "./basic"
import sdk from "../../../../sdk"
import { processStringSync } from "@budibase/string-templates"
import validateJs from "validate.js"
@ -149,42 +144,18 @@ export async function sqlOutputProcessing(
rowId = generateIdForRow(row, table)
row._id = rowId
}
// this is a relationship of some sort
if (finalRows[rowId]) {
finalRows = await updateRelationshipColumns(
table,
tables,
row,
finalRows,
relationships,
opts
)
continue
}
const thisRow = fixArrayTypes(
basicProcessing({
const thisRow = basicProcessing({
row,
table,
tables: Object.values(tables),
isLinked: false,
sqs: opts?.sqs,
}),
table
)
})
if (thisRow._id == null) {
throw new Error("Unable to generate row ID for SQL rows")
}
finalRows[thisRow._id] = fixBooleanFields({ row: thisRow, table })
// do this at end once its been added to the final rows
finalRows = await updateRelationshipColumns(
table,
tables,
row,
finalRows,
relationships,
opts
)
}
// make sure all related rows are correct

View File

@ -832,10 +832,12 @@ describe.each(
},
})
expect(res).toHaveLength(1)
expect(res[0]).toEqual({
expect(res[0]).toEqual(
expect.objectContaining({
id: 2,
name: "two",
})
)
})
// this parameter really only impacts SQL queries

View File

@ -9,10 +9,10 @@ import {
db as dbCore,
MAX_VALID_DATE,
MIN_VALID_DATE,
setEnv as setCoreEnv,
SQLITE_DESIGN_DOC_ID,
utils,
withEnv as withCoreEnv,
setEnv as setCoreEnv,
} from "@budibase/backend-core"
import * as setup from "./utilities"
@ -1937,6 +1937,67 @@ describe.each([
})
})
isSql &&
describe("related formulas", () => {
beforeAll(async () => {
const arrayTable = await createTable(
{
name: { name: "name", type: FieldType.STRING },
array: {
name: "array",
type: FieldType.ARRAY,
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: ["option 1", "option 2"],
},
},
},
"array"
)
table = await createTable(
{
relationship: {
type: FieldType.LINK,
relationshipType: RelationshipType.MANY_TO_ONE,
name: "relationship",
fieldName: "relate",
tableId: arrayTable._id!,
constraints: {
type: "array",
},
},
formula: {
type: FieldType.FORMULA,
name: "formula",
formula: encodeJSBinding(
`let array = [];$("relationship").forEach(rel => array = array.concat(rel.array));return array.sort().join(",")`
),
},
},
"main"
)
const arrayRows = await Promise.all([
config.api.row.save(arrayTable._id!, {
name: "foo",
array: ["option 1"],
}),
config.api.row.save(arrayTable._id!, {
name: "bar",
array: ["option 2"],
}),
])
await Promise.all([
config.api.row.save(table._id!, {
relationship: [arrayRows[0]._id, arrayRows[1]._id],
}),
])
})
it("formula is correct with relationship arrays", async () => {
await expectQuery({}).toContain([{ formula: "option 1,option 2" }])
})
})
describe("user", () => {
let user1: User
let user2: User
@ -2690,81 +2751,6 @@ describe.each([
})
})
isSql &&
describe("pagination edge case with relationships", () => {
let mainRows: Row[] = []
beforeAll(async () => {
const toRelateTable = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
})
table = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
rel: {
name: "rel",
type: FieldType.LINK,
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: toRelateTable._id!,
fieldName: "rel",
},
})
const relatedRows = await Promise.all([
config.api.row.save(toRelateTable._id!, { name: "tag 1" }),
config.api.row.save(toRelateTable._id!, { name: "tag 2" }),
config.api.row.save(toRelateTable._id!, { name: "tag 3" }),
config.api.row.save(toRelateTable._id!, { name: "tag 4" }),
config.api.row.save(toRelateTable._id!, { name: "tag 5" }),
config.api.row.save(toRelateTable._id!, { name: "tag 6" }),
])
mainRows = await Promise.all([
config.api.row.save(table._id!, {
name: "product 1",
rel: relatedRows.map(row => row._id),
}),
config.api.row.save(table._id!, {
name: "product 2",
rel: [],
}),
config.api.row.save(table._id!, {
name: "product 3",
rel: [],
}),
])
})
it("can still page when the hard limit is hit", async () => {
await withCoreEnv(
{
SQL_MAX_ROWS: "6",
},
async () => {
const params: Omit<RowSearchParams, "tableId"> = {
query: {},
paginate: true,
limit: 3,
sort: "name",
sortType: SortType.STRING,
sortOrder: SortOrder.ASCENDING,
}
const page1 = await expectSearch(params).toContain([mainRows[0]])
expect(page1.hasNextPage).toBe(true)
expect(page1.bookmark).toBeDefined()
const page2 = await expectSearch({
...params,
bookmark: page1.bookmark,
}).toContain([mainRows[1], mainRows[2]])
expect(page2.hasNextPage).toBe(false)
}
)
})
})
isSql &&
describe("primaryDisplay", () => {
beforeAll(async () => {

View File

@ -112,6 +112,7 @@ const environment = {
parseIntSafe(process.env.JS_RUNNER_MEMORY_LIMIT) ||
DEFAULTS.JS_RUNNER_MEMORY_LIMIT,
LOG_JS_ERRORS: process.env.LOG_JS_ERRORS,
DISABLE_USER_SYNC: process.env.DISABLE_USER_SYNC,
// old
CLIENT_ID: process.env.CLIENT_ID,
_set(key: string, value: any) {

View File

@ -343,9 +343,9 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
err.number
)
if (readableMessage) {
throw new Error(readableMessage)
throw new Error(readableMessage, { cause: err })
} else {
throw new Error(err.message as string)
throw new Error(err.message as string, { cause: err })
}
}
}

View File

@ -149,6 +149,7 @@ function generateManyRelationshipJson(config: { schema?: string } = {}) {
}
describe("SQL query builder", () => {
const relationshipLimit = 500
const limit = 500
const client = SqlClient.POSTGRES
let sql: any
@ -160,16 +161,16 @@ describe("SQL query builder", () => {
it("should add the schema to the LEFT JOIN", () => {
const query = sql._query(generateRelationshipJson({ schema: "production" }))
expect(query).toEqual({
bindings: [500, 5000],
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" order by "test"."id" asc limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`,
bindings: [relationshipLimit, limit],
sql: `select "brands".*, (select json_agg(json_build_object('product_id',"products"."product_id",'product_name',"products"."product_name",'brand_id',"products"."brand_id")) from (select "products".* from "production"."products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $1) as "products") as "products" from "production"."brands" order by "test"."id" asc limit $2`,
})
})
it("should handle if the schema is not present when doing a LEFT JOIN", () => {
const query = sql._query(generateRelationshipJson())
expect(query).toEqual({
bindings: [500, 5000],
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" order by "test"."id" asc limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`,
bindings: [relationshipLimit, limit],
sql: `select "brands".*, (select json_agg(json_build_object('product_id',"products"."product_id",'product_name',"products"."product_name",'brand_id',"products"."brand_id")) from (select "products".* from "products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $1) as "products") as "products" from "brands" order by "test"."id" asc limit $2`,
})
})
@ -178,8 +179,8 @@ describe("SQL query builder", () => {
generateManyRelationshipJson({ schema: "production" })
)
expect(query).toEqual({
bindings: [500, 5000],
sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" order by "test"."id" asc limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" order by "test"."id" asc limit $2`,
bindings: [relationshipLimit, limit],
sql: `select "stores".*, (select json_agg(json_build_object('product_id',"products"."product_id",'product_name',"products"."product_name")) from (select "products".* from "production"."products" as "products" inner join "production"."stocks" as "stocks" on "products"."product_id" = "stocks"."product_id" where "stocks"."store_id" = "stores"."store_id" order by "products"."product_id" asc limit $1) as "products") as "products" from "production"."stores" order by "test"."id" asc limit $2`,
})
})
@ -194,8 +195,8 @@ describe("SQL query builder", () => {
})
)
expect(query).toEqual({
bindings: ["john%", limit, "john%", 5000],
sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" where LOWER("test"."name") LIKE :3 order by "test"."id" asc) where rownum <= :4`,
bindings: ["john%", limit],
sql: `select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2`,
})
query = new Sql(SqlClient.ORACLE, limit)._query(
@ -210,8 +211,8 @@ describe("SQL query builder", () => {
)
const filterSet = [`%20%`, `%25%`, `%"john"%`, `%"mary"%`]
expect(query).toEqual({
bindings: [...filterSet, limit, ...filterSet, 5000],
sql: `select * from (select * from (select * from (select * from "test" where COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2 and COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4 order by "test"."id" asc) where rownum <= :5) "test" where COALESCE(LOWER("test"."age"), '') LIKE :6 AND COALESCE(LOWER("test"."age"), '') LIKE :7 and COALESCE(LOWER("test"."name"), '') LIKE :8 AND COALESCE(LOWER("test"."name"), '') LIKE :9 order by "test"."id" asc) where rownum <= :10`,
bindings: [...filterSet, limit],
sql: `select * from (select * from "test" where COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2 and COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4 order by "test"."id" asc) where rownum <= :5`,
})
query = new Sql(SqlClient.ORACLE, limit)._query(
@ -224,8 +225,8 @@ describe("SQL query builder", () => {
})
)
expect(query).toEqual({
bindings: [`%jo%`, limit, `%jo%`, 5000],
sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" where LOWER("test"."name") LIKE :3 order by "test"."id" asc) where rownum <= :4`,
bindings: [`%jo%`, limit],
sql: `select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2`,
})
})
@ -242,8 +243,8 @@ describe("SQL query builder", () => {
)
expect(query).toEqual({
bindings: ["John", limit, "John", 5000],
sql: `select * from (select * from (select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2) "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") = :3) order by "test"."id" asc) where rownum <= :4`,
bindings: ["John", limit],
sql: `select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2`,
})
})
@ -260,8 +261,8 @@ describe("SQL query builder", () => {
)
expect(query).toEqual({
bindings: ["John", limit, "John", 5000],
sql: `select * from (select * from (select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") != :1) OR to_char("test"."name") IS NULL order by "test"."id" asc) where rownum <= :2) "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") != :3) OR to_char("test"."name") IS NULL order by "test"."id" asc) where rownum <= :4`,
bindings: ["John", limit],
sql: `select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") != :1) OR to_char("test"."name") IS NULL order by "test"."id" asc) where rownum <= :2`,
})
})
})

View File

@ -32,8 +32,8 @@ function multiline(sql: string) {
}
describe("Captures of real examples", () => {
const limit = 5000
const relationshipLimit = 100
const relationshipLimit = 500
const primaryLimit = 100
function getJson(name: string): QueryJson {
return require(join(__dirname, "sqlQueryJson", name)) as QueryJson
@ -42,7 +42,9 @@ describe("Captures of real examples", () => {
describe("create", () => {
it("should create a row with relationships", () => {
const queryJson = getJson("createWithRelationships.json")
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
let query = new Sql(SqlClient.POSTGRES, relationshipLimit)._query(
queryJson
)
expect(query).toEqual({
bindings: ["A Street", 34, "London", "A", "B", "designer", 1990],
sql: multiline(`insert into "persons" ("address", "age", "city", "firstname", "lastname", "type", "year")
@ -54,40 +56,48 @@ describe("Captures of real examples", () => {
describe("read", () => {
it("should handle basic retrieval with relationships", () => {
const queryJson = getJson("basicFetchWithRelationships.json")
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
let query = new Sql(SqlClient.POSTGRES, relationshipLimit)._query(
queryJson
)
expect(query).toEqual({
bindings: [relationshipLimit, limit],
bindings: [relationshipLimit, relationshipLimit, primaryLimit],
sql: expect.stringContaining(
multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid",
"a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city",
"a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname",
"b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid",
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"`)
multiline(
`select json_agg(json_build_object('executorid',"b"."executorid",'taskname',"b"."taskname",'taskid',"b"."taskid",'completed',"b"."completed",'qaid',"b"."qaid",'executorid',"b"."executorid",'taskname',"b"."taskname",'taskid',"b"."taskid",'completed',"b"."completed",'qaid',"b"."qaid")`
)
),
})
})
it("should handle filtering by relationship", () => {
const queryJson = getJson("filterByRelationship.json")
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
let query = new Sql(SqlClient.POSTGRES, relationshipLimit)._query(
queryJson
)
expect(query).toEqual({
bindings: [relationshipLimit, "assembling", limit],
bindings: [relationshipLimit, "assembling", primaryLimit],
sql: expect.stringContaining(
multiline(`where COALESCE("b"."taskname" = $2, FALSE)
order by "a"."productname" asc nulls first, "a"."productid" asc limit $3`)
multiline(
`where exists (select 1 from "tasks" as "b" inner join "products_tasks" as "c" on "b"."taskid" = "c"."taskid"
where "c"."productid" = "a"."productid" and COALESCE("b"."taskname" = $2, FALSE)`
)
),
})
})
it("should handle fetching many to many relationships", () => {
const queryJson = getJson("fetchManyToMany.json")
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
let query = new Sql(SqlClient.POSTGRES, relationshipLimit)._query(
queryJson
)
expect(query).toEqual({
bindings: [relationshipLimit, limit],
bindings: [relationshipLimit, primaryLimit],
sql: expect.stringContaining(
multiline(`left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
left join "tasks" as "b" on "b"."taskid" = "c"."taskid" `)
multiline(
`select json_agg(json_build_object('executorid',"b"."executorid",'taskname',"b"."taskname",'taskid',"b"."taskid",'completed',"b"."completed",'qaid',"b"."qaid"))
from (select "b".* from "tasks" as "b" inner join "products_tasks" as "c" on "b"."taskid" = "c"."taskid"
where "c"."productid" = "a"."productid" order by "b"."taskid" asc limit $1`
)
),
})
})
@ -95,22 +105,25 @@ describe("Captures of real examples", () => {
it("should handle enrichment of rows", () => {
const queryJson = getJson("enrichRelationship.json")
const filters = queryJson.filters?.oneOf?.taskid as number[]
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
let query = new Sql(SqlClient.POSTGRES, relationshipLimit)._query(
queryJson
)
expect(query).toEqual({
bindings: [...filters, limit, ...filters, limit],
bindings: [relationshipLimit, ...filters, relationshipLimit],
sql: multiline(
`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid",
"a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", "b"."productid" as "b.productid"
from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) order by "a"."taskid" asc limit $3) as "a"
left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" left join "products" as "b" on "b"."productid" = "c"."productid"
where "a"."taskid" in ($4, $5) order by "a"."taskid" asc limit $6`
`select "a".*, (select json_agg(json_build_object('productname',"b"."productname",'productid',"b"."productid"))
from (select "b".* from "products" as "b" inner join "products_tasks" as "c" on "b"."productid" = "c"."productid"
where "c"."taskid" = "a"."taskid" order by "b"."productid" asc limit $1) as "b") as "products"
from "tasks" as "a" where "a"."taskid" in ($2, $3) order by "a"."taskid" asc limit $4`
),
})
})
it("should manage query with many relationship filters", () => {
const queryJson = getJson("manyRelationshipFilters.json")
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
let query = new Sql(SqlClient.POSTGRES, relationshipLimit)._query(
queryJson
)
const filters = queryJson.filters
const notEqualsValue = Object.values(filters?.notEqual!)[0]
const rangeValue: { high?: string | number; low?: string | number } =
@ -119,17 +132,18 @@ describe("Captures of real examples", () => {
expect(query).toEqual({
bindings: [
notEqualsValue,
relationshipLimit,
relationshipLimit,
relationshipLimit,
rangeValue.low,
rangeValue.high,
equalValue,
true,
limit,
notEqualsValue,
primaryLimit,
],
sql: expect.stringContaining(
multiline(
`where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE)`
`where exists (select 1 from "persons" as "c" where "c"."personid" = "a"."executorid" and "c"."year" between $4 and $5)`
)
),
})
@ -139,17 +153,23 @@ describe("Captures of real examples", () => {
describe("update", () => {
it("should handle performing a simple update", () => {
const queryJson = getJson("updateSimple.json")
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
let query = new Sql(SqlClient.POSTGRES, relationshipLimit)._query(
queryJson
)
expect(query).toEqual({
bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5],
sql: multiline(`update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4,
"type" = $5, "city" = $6, "lastname" = $7 where COALESCE("a"."personid" = $8, FALSE) returning *`),
sql: multiline(
`update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4,
"type" = $5, "city" = $6, "lastname" = $7 where COALESCE("a"."personid" = $8, FALSE) returning *`
),
})
})
it("should handle performing an update of relationships", () => {
const queryJson = getJson("updateRelationship.json")
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
let query = new Sql(SqlClient.POSTGRES, relationshipLimit)._query(
queryJson
)
expect(query).toEqual({
bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5],
sql: multiline(`update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4,
@ -161,12 +181,14 @@ describe("Captures of real examples", () => {
describe("delete", () => {
it("should handle deleting with relationships", () => {
const queryJson = getJson("deleteSimple.json")
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
let query = new Sql(SqlClient.POSTGRES, relationshipLimit)._query(
queryJson
)
expect(query).toEqual({
bindings: ["ddd", ""],
sql: multiline(`delete from "compositetable" as "a"
where COALESCE("a"."keypartone" = $1, FALSE) and COALESCE("a"."keyparttwo" = $2, FALSE)
returning "a"."keyparttwo" as "a.keyparttwo", "a"."keypartone" as "a.keypartone", "a"."name" as "a.name"`),
returning "a".*`),
})
})
})
@ -174,7 +196,7 @@ describe("Captures of real examples", () => {
describe("returning (everything bar Postgres)", () => {
it("should be able to handle row returning", () => {
const queryJson = getJson("createSimple.json")
const SQL = new Sql(SqlClient.MS_SQL, limit)
const SQL = new Sql(SqlClient.MS_SQL, relationshipLimit)
let query = SQL._query(queryJson, { disableReturning: true })
expect(query).toEqual({
sql: "insert into [people] ([age], [name]) values (@p0, @p1)",
@ -187,10 +209,11 @@ describe("Captures of real examples", () => {
returningQuery = input
}, queryJson)
expect(returningQuery).toEqual({
sql: multiline(`select top (@p0) * from (select top (@p1) * from [people] where CASE WHEN [people].[name] = @p2
THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p3 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]
where CASE WHEN [people].[name] = @p4 THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p5 THEN 1 ELSE 0 END = 1`),
bindings: [5000, 1, "Test", 22, "Test", 22],
sql: multiline(
`select top (@p0) * from [people] where CASE WHEN [people].[name] = @p1 THEN 1 ELSE 0 END = 1
and CASE WHEN [people].[age] = @p2 THEN 1 ELSE 0 END = 1 order by [people].[name] asc`
),
bindings: [1, "Test", 22],
})
})
})

View File

@ -8,6 +8,10 @@ import { generateUserMetadataID, InternalTables } from "../../../db/utils"
type DeletedUser = { _id: string; deleted: boolean }
function userSyncEnabled() {
return !env.DISABLE_USER_SYNC
}
async function syncUsersToApp(
appId: string,
users: (User | DeletedUser)[],
@ -56,7 +60,7 @@ async function syncUsersToApp(
// the user doesn't exist, or doesn't have a role anymore
// get rid of their metadata
if (deletedUser || !roleId) {
if (userSyncEnabled() && (deletedUser || !roleId)) {
await db.remove(metadata)
continue
}
@ -149,7 +153,9 @@ export async function syncApp(
}
// sync the users - kept for safe keeping
if (userSyncEnabled()) {
await sdk.users.syncGlobalUsers()
}
if (error) {
throw error

View File

@ -37,9 +37,9 @@ import { outputProcessing } from "../../../../../utilities/rowProcessor"
import pick from "lodash/pick"
import { processRowCountResponse } from "../../utils"
import {
updateFilterKeys,
getRelationshipColumns,
getTableIDList,
updateFilterKeys,
} from "../filters"
import {
dataFilters,
@ -182,11 +182,20 @@ function buildTableMap(tables: Table[]) {
return tableMap
}
function reverseUserColumnMapping(rows: Row[]) {
// table is only needed to handle relationships
function reverseUserColumnMapping(rows: Row[], table?: Table) {
const prefixLength = USER_COLUMN_PREFIX.length
return rows.map(row => {
const finalRow: Row = {}
for (let key of Object.keys(row)) {
// handle relationships
if (
table?.schema[key]?.type === FieldType.LINK &&
typeof row[key] === "string"
) {
// no table required, relationship rows don't contain relationships
row[key] = reverseUserColumnMapping(JSON.parse(row[key]))
}
// it should be the first prefix
const index = key.indexOf(USER_COLUMN_PREFIX)
if (index !== -1) {
@ -261,7 +270,7 @@ async function runSqlQuery(
if (opts?.countTotalRows) {
return processRowCountResponse(response)
} else if (Array.isArray(response)) {
return reverseUserColumnMapping(response)
return reverseUserColumnMapping(response, json.meta.table)
}
return response
}

View File

@ -337,6 +337,13 @@ export async function outputProcessing<T extends Row[] | Row>(
row[property] = `${hours}:${minutes}:${seconds}`
}
}
} else if (column.type === FieldType.LINK) {
for (let row of enriched) {
// if relationship is empty - remove the array, this has been part of the API for some time
if (Array.isArray(row[property]) && row[property].length === 0) {
delete row[property]
}
}
}
}

View File

@ -134,6 +134,17 @@ export interface RelationshipsJson {
column: string
}
// TODO - this can be combined with the above type
export interface ManyToManyRelationshipJson {
through: string
from: string
to: string
fromPrimary: string
toPrimary: string
tableName: string
column: string
}
export interface QueryJson {
endpoint: {
datasourceId: string