Work to support all SQL DBs across the board using the aggregation method.

This commit is contained in:
mike12345567 2024-09-03 18:24:50 +01:00
parent ac7838f80d
commit b29a4e2b37
7 changed files with 375 additions and 409 deletions

View File

@ -7,6 +7,7 @@ import {
isValidFilter, isValidFilter,
isValidISODateString, isValidISODateString,
sqlLog, sqlLog,
validateManyToMany,
} from "./utils" } from "./utils"
import SqlTableQueryBuilder from "./sqlTable" import SqlTableQueryBuilder from "./sqlTable"
import { import {
@ -133,80 +134,78 @@ class InternalBuilder {
return "*" return "*"
} }
// no relationships - select everything in SQLite const alias = tableAliases?.[endpoint.entityId]
if (this.client === SqlClient.SQL_LITE) { ? tableAliases?.[endpoint.entityId]
const alias = tableAliases?.[endpoint.entityId] : endpoint.entityId
? tableAliases?.[endpoint.entityId] return [this.knex.raw(`${this.quote(alias)}.*`)]
: endpoint.entityId //
return [this.knex.raw(`${this.quote(alias)}.*`)] //
} // const schema = meta.table.schema
// return resource.fields.map(field => {
const schema = meta.table.schema // const parts = field.split(/\./g)
return resource.fields.map(field => { // let table: string | undefined = undefined
const parts = field.split(/\./g) // let column: string | undefined = undefined
let table: string | undefined = undefined //
let column: string | undefined = undefined // // Just a column name, e.g.: "column"
// if (parts.length === 1) {
// Just a column name, e.g.: "column" // column = parts[0]
if (parts.length === 1) { // }
column = parts[0] //
} // // A table name and a column name, e.g.: "table.column"
// if (parts.length === 2) {
// A table name and a column name, e.g.: "table.column" // table = parts[0]
if (parts.length === 2) { // column = parts[1]
table = parts[0] // }
column = parts[1] //
} // // A link doc, e.g.: "table.doc1.fieldName"
// if (parts.length > 2) {
// A link doc, e.g.: "table.doc1.fieldName" // table = parts[0]
if (parts.length > 2) { // column = parts.slice(1).join(".")
table = parts[0] // }
column = parts.slice(1).join(".") //
} // if (!column) {
// throw new Error(`Invalid field name: ${field}`)
if (!column) { // }
throw new Error(`Invalid field name: ${field}`) //
} // const columnSchema = schema[column]
//
const columnSchema = schema[column] // if (
// this.client === SqlClient.POSTGRES &&
if ( // columnSchema?.externalType?.includes("money")
this.client === SqlClient.POSTGRES && // ) {
columnSchema?.externalType?.includes("money") // return this.knex.raw(
) { // `${this.quotedIdentifier(
return this.knex.raw( // [table, column].join(".")
`${this.quotedIdentifier( // )}::money::numeric as ${this.quote(field)}`
[table, column].join(".") // )
)}::money::numeric as ${this.quote(field)}` // }
) //
} // if (
// this.client === SqlClient.MS_SQL &&
if ( // columnSchema?.type === FieldType.DATETIME &&
this.client === SqlClient.MS_SQL && // columnSchema.timeOnly
columnSchema?.type === FieldType.DATETIME && // ) {
columnSchema.timeOnly // // Time gets returned as timestamp from mssql, not matching the expected
) { // // HH:mm format
// Time gets returned as timestamp from mssql, not matching the expected // return this.knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`)
// HH:mm format // }
return this.knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`) //
} // // There's at least two edge cases being handled in the expression below.
// // 1. The column name could start/end with a space, and in that case we
// There's at least two edge cases being handled in the expression below. // // want to preseve that space.
// 1. The column name could start/end with a space, and in that case we // // 2. Almost all column names are specified in the form table.column, except
// want to preseve that space. // // in the case of relationships, where it's table.doc1.column. In that
// 2. Almost all column names are specified in the form table.column, except // // case, we want to split it into `table`.`doc1.column` for reasons that
// in the case of relationships, where it's table.doc1.column. In that // // aren't actually clear to me, but `table`.`doc1` breaks things with the
// case, we want to split it into `table`.`doc1.column` for reasons that // // sample data tests.
// aren't actually clear to me, but `table`.`doc1` breaks things with the // if (table) {
// sample data tests. // return this.knex.raw(
if (table) { // `${this.quote(table)}.${this.quote(column)} as ${this.quote(field)}`
return this.knex.raw( // )
`${this.quote(table)}.${this.quote(column)} as ${this.quote(field)}` // } else {
) // return this.knex.raw(`${this.quote(field)} as ${this.quote(field)}`)
} else { // }
return this.knex.raw(`${this.quote(field)} as ${this.quote(field)}`) // })
}
})
} }
// OracleDB can't use character-large-objects (CLOBs) in WHERE clauses, // OracleDB can't use character-large-objects (CLOBs) in WHERE clauses,
@ -368,35 +367,47 @@ class InternalBuilder {
let subQuery = mainKnex let subQuery = mainKnex
.select(mainKnex.raw(1)) .select(mainKnex.raw(1))
.from({ [toAlias]: relatedTableName }) .from({ [toAlias]: relatedTableName })
let mainTableRelatesTo = toAlias const manyToMany = validateManyToMany(relationship)
if (relationship.through) { if (manyToMany) {
const throughAlias = const throughAlias =
aliases?.[relationship.through] || relationship.through aliases?.[manyToMany.through] || relationship.through
let throughTable = this.tableNameWithSchema(relationship.through, { let throughTable = this.tableNameWithSchema(manyToMany.through, {
alias: throughAlias, alias: throughAlias,
schema: endpoint.schema, schema: endpoint.schema,
}) })
subQuery = subQuery.innerJoin(throughTable, function () { subQuery = subQuery
// @ts-ignore // add a join through the junction table
this.on( .innerJoin(throughTable, function () {
`${toAlias}.${relationship.toPrimary}`, // @ts-ignore
this.on(
`${toAlias}.${manyToMany.toPrimary}`,
"=",
`${throughAlias}.${manyToMany.to}`
)
})
// check the document in the junction table points to the main table
.where(
`${throughAlias}.${manyToMany.from}`,
"=", "=",
`${throughAlias}.${relationship.to}` mainKnex.raw(
this.quotedIdentifier(`${fromAlias}.${manyToMany.fromPrimary}`)
)
) )
}) // in SQS the same junction table is used for different many-to-many relationships between the
// two same tables, this is needed to avoid rows ending up in all columns
if (this.client === SqlClient.SQL_LITE) { if (this.client === SqlClient.SQL_LITE) {
subQuery = this.addJoinFieldCheck(subQuery, relationship) subQuery = this.addJoinFieldCheck(subQuery, manyToMany)
} }
mainTableRelatesTo = throughAlias } else {
} // "join" to the main table, making sure the ID matches that of the main
// "join" to the main table, making sure the ID matches that of the main subQuery = subQuery.where(
subQuery = subQuery.where( `${toAlias}.${relationship.to}`,
`${mainTableRelatesTo}.${relationship.from}`, "=",
"=", mainKnex.raw(
mainKnex.raw( this.quotedIdentifier(`${fromAlias}.${relationship.from}`)
this.quotedIdentifier(`${fromAlias}.${relationship.fromPrimary}`) )
) )
) }
query = query.whereExists(whereCb(subQuery)) query = query.whereExists(whereCb(subQuery))
break break
} }
@ -478,12 +489,10 @@ class InternalBuilder {
alias ? `${alias}.${updatedKey}` : updatedKey, alias ? `${alias}.${updatedKey}` : updatedKey,
value value
) )
} else if (isSqlite && shouldProcessRelationship) { } else if (shouldProcessRelationship) {
query = builder.addRelationshipForFilter(query, updatedKey, q => { query = builder.addRelationshipForFilter(query, updatedKey, q => {
return handleRelationship(q, updatedKey, value) return handleRelationship(q, updatedKey, value)
}) })
} else if (shouldProcessRelationship) {
query = handleRelationship(query, updatedKey, value)
} }
} }
} }
@ -849,6 +858,7 @@ class InternalBuilder {
fromTable: string, fromTable: string,
relationships: RelationshipsJson[] relationships: RelationshipsJson[]
): Knex.QueryBuilder { ): Knex.QueryBuilder {
const sqlClient = this.client
const { resource, tableAliases: aliases, endpoint } = this.query const { resource, tableAliases: aliases, endpoint } = this.query
const fields = resource?.fields || [] const fields = resource?.fields || []
const jsonField = (field: string) => { const jsonField = (field: string) => {
@ -862,7 +872,15 @@ class InternalBuilder {
unaliased = parts.join(".") unaliased = parts.join(".")
tableField = this.quote(unaliased) tableField = this.quote(unaliased)
} }
return `'${unaliased}',${tableField}` let separator = ","
switch (sqlClient) {
case SqlClient.ORACLE:
separator = " VALUE "
break
case SqlClient.MS_SQL:
separator = ":"
}
return `'${unaliased}'${separator}${tableField}`
} }
for (let relationship of relationships) { for (let relationship of relationships) {
const { const {
@ -874,23 +892,15 @@ class InternalBuilder {
toPrimary, toPrimary,
} = relationship } = relationship
// skip invalid relationships // skip invalid relationships
if (!toTable || !fromTable || !fromPrimary || !toPrimary) { if (!toTable || !fromTable) {
continue continue
} }
if (!throughTable) {
throw new Error("Only many-to-many implemented for JSON relationships")
}
const toAlias = aliases?.[toTable] || toTable, const toAlias = aliases?.[toTable] || toTable,
throughAlias = aliases?.[throughTable] || throughTable,
fromAlias = aliases?.[fromTable] || fromTable fromAlias = aliases?.[fromTable] || fromTable
let toTableWithSchema = this.tableNameWithSchema(toTable, { let toTableWithSchema = this.tableNameWithSchema(toTable, {
alias: toAlias, alias: toAlias,
schema: endpoint.schema, schema: endpoint.schema,
}) })
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
alias: throughAlias,
schema: endpoint.schema,
})
let relationshipFields = fields.filter( let relationshipFields = fields.filter(
field => field.split(".")[0] === toAlias field => field.split(".")[0] === toAlias
) )
@ -903,32 +913,75 @@ class InternalBuilder {
const fieldList: string = relationshipFields const fieldList: string = relationshipFields
.map(field => jsonField(field)) .map(field => jsonField(field))
.join(",") .join(",")
let rawJsonArray: Knex.Raw let rawJsonArray: Knex.Raw, limit: number
switch (this.client) { switch (sqlClient) {
case SqlClient.SQL_LITE: case SqlClient.SQL_LITE:
rawJsonArray = this.knex.raw( rawJsonArray = this.knex.raw(
`json_group_array(json_object(${fieldList}))` `json_group_array(json_object(${fieldList}))`
) )
limit = getBaseLimit()
break
case SqlClient.POSTGRES:
rawJsonArray = this.knex.raw(
`json_agg(json_build_object(${fieldList}))`
)
limit = 1
break
case SqlClient.MY_SQL:
case SqlClient.ORACLE:
rawJsonArray = this.knex.raw(
`json_arrayagg(json_object(${fieldList}))`
)
limit = getBaseLimit()
break
case SqlClient.MS_SQL:
rawJsonArray = this.knex.raw(`json_array(json_object(${fieldList}))`)
limit = 1
break break
default: default:
throw new Error(`JSON relationships not implement for ${this.client}`) throw new Error(`JSON relationships not implement for ${this.client}`)
} }
// SQL Server uses TOP - which performs a little differently to the normal LIMIT syntax
// it reduces the result set rather than limiting how much data it filters over
const primaryKey = `${toAlias}.${toPrimary || toKey}`
let subQuery = this.knex let subQuery = this.knex
.select(rawJsonArray) .select(rawJsonArray)
.from(toTableWithSchema) .from(toTableWithSchema)
.join(throughTableWithSchema, function () { .limit(limit)
this.on(`${toAlias}.${toPrimary}`, "=", `${throughAlias}.${toKey}`)
})
.where(
`${throughAlias}.${fromKey}`,
"=",
this.knex.raw(this.quotedIdentifier(`${fromAlias}.${fromPrimary}`))
)
// relationships should never have more than the base limit
.limit(getBaseLimit())
// add sorting to get consistent order // add sorting to get consistent order
.orderBy(`${toAlias}.${toPrimary}`) .orderBy(primaryKey)
// need to check the junction table document is to the right column
if (sqlClient === SqlClient.POSTGRES) {
subQuery = subQuery.groupBy(primaryKey)
}
// many-to-many relationship with junction table
if (throughTable && toPrimary && fromPrimary) {
const throughAlias = aliases?.[throughTable] || throughTable
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
alias: throughAlias,
schema: endpoint.schema,
})
subQuery = subQuery
.join(throughTableWithSchema, function () {
this.on(`${toAlias}.${toPrimary}`, "=", `${throughAlias}.${toKey}`)
})
.where(
`${throughAlias}.${fromKey}`,
"=",
this.knex.raw(this.quotedIdentifier(`${fromAlias}.${fromPrimary}`))
)
}
// one-to-many relationship with foreign key
else {
subQuery = subQuery.where(
`${toAlias}.${toKey}`,
"=",
this.knex.raw(this.quotedIdentifier(`${fromAlias}.${fromKey}`))
)
}
// need to check the junction table document is to the right column, this is just for SQS
if (this.client === SqlClient.SQL_LITE) { if (this.client === SqlClient.SQL_LITE) {
subQuery = this.addJoinFieldCheck(subQuery, relationship) subQuery = this.addJoinFieldCheck(subQuery, relationship)
} }
@ -1179,14 +1232,12 @@ class InternalBuilder {
? query.select(this.generateSelectStatement()) ? query.select(this.generateSelectStatement())
: this.addDistinctCount(query) : this.addDistinctCount(query)
// have to add after as well (this breaks MS-SQL) // have to add after as well (this breaks MS-SQL)
if (this.client !== SqlClient.MS_SQL && !counting) { if (!counting) {
query = this.addSorting(query) query = this.addSorting(query)
} }
// handle joins // handle joins
if (relationships && this.client === SqlClient.SQL_LITE) { if (relationships) {
query = this.addJsonRelationships(query, tableName, relationships) query = this.addJsonRelationships(query, tableName, relationships)
} else if (relationships) {
query = this.addRelationships(query, tableName, relationships)
} }
return this.addFilters(query, filters, { relationship: true }) return this.addFilters(query, filters, { relationship: true })

View File

@ -1,4 +1,11 @@
import { DocumentType, SqlQuery, Table, TableSourceType } from "@budibase/types" import {
DocumentType,
ManyToManyRelationshipJson,
RelationshipsJson,
SqlQuery,
Table,
TableSourceType,
} from "@budibase/types"
import { DEFAULT_BB_DATASOURCE_ID } from "../constants" import { DEFAULT_BB_DATASOURCE_ID } from "../constants"
import { Knex } from "knex" import { Knex } from "knex"
import { SEPARATOR } from "../db" import { SEPARATOR } from "../db"
@ -163,3 +170,24 @@ export function sqlLog(client: string, query: string, values?: any[]) {
} }
console.log(string) console.log(string)
} }
function isValidManyToManyRelationship(
relationship: RelationshipsJson
): relationship is ManyToManyRelationshipJson {
return (
!!relationship.through &&
!!relationship.fromPrimary &&
!!relationship.from &&
!!relationship.toPrimary &&
!!relationship.to
)
}
export function validateManyToMany(
relationship: RelationshipsJson
): ManyToManyRelationshipJson | undefined {
if (isValidManyToManyRelationship(relationship)) {
return relationship
}
return undefined
}

View File

@ -1,6 +1,10 @@
// need to handle table name + field or just field, depending on if relationships used // need to handle table name + field or just field, depending on if relationships used
import { FieldSchema, FieldType, Row, Table } from "@budibase/types" import { FieldSchema, FieldType, Row, Table } from "@budibase/types"
import { helpers, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core" import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import { generateRowIdField } from "../../../../integrations/utils" import { generateRowIdField } from "../../../../integrations/utils"
function extractFieldValue({ function extractFieldValue({
@ -61,11 +65,13 @@ export function generateIdForRow(
export function basicProcessing({ export function basicProcessing({
row, row,
table, table,
tables,
isLinked, isLinked,
sqs, sqs,
}: { }: {
row: Row row: Row
table: Table table: Table
tables: Table[]
isLinked: boolean isLinked: boolean
sqs?: boolean sqs?: boolean
}): Row { }): Row {
@ -86,24 +92,65 @@ export function basicProcessing({
thisRow[fieldName] = value thisRow[fieldName] = value
} }
} }
let columns: string[] = Object.keys(table.schema)
if (!sqs) { if (!sqs) {
thisRow._id = generateIdForRow(row, table, isLinked) thisRow._id = generateIdForRow(row, table, isLinked)
thisRow.tableId = table._id thisRow.tableId = table._id
thisRow._rev = "rev" thisRow._rev = "rev"
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
} else { } else {
const columns = Object.keys(table.schema) columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) { for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) {
const schema: FieldSchema | undefined = table.schema[internalColumn] thisRow[internalColumn] = extractFieldValue({
let value = extractFieldValue({
row, row,
tableName: table._id!, tableName: table._id!,
fieldName: internalColumn, fieldName: internalColumn,
isLinked, isLinked,
}) })
if (sqs && schema?.type === FieldType.LINK && typeof value === "string") { }
value = JSON.parse(value) }
for (let col of columns) {
const schema: FieldSchema | undefined = table.schema[col]
if (schema?.type !== FieldType.LINK) {
continue
}
const relatedTable = tables.find(tbl => tbl._id === schema.tableId)
if (!relatedTable) {
continue
}
const value = extractFieldValue({
row,
tableName: table._id!,
fieldName: col,
isLinked,
})
const array: Row[] = Array.isArray(value)
? value
: typeof value === "string"
? JSON.parse(value)
: undefined
if (array) {
thisRow[col] = array
// make sure all of them have an _id
if (Array.isArray(thisRow[col])) {
const sortField =
relatedTable.primaryDisplay || relatedTable.primary![0]!
thisRow[col] = (thisRow[col] as Row[])
.map(relatedRow => {
relatedRow._id = relatedRow._id
? relatedRow._id
: generateIdForRow(relatedRow, relatedTable)
return relatedRow
})
.sort((a, b) => {
if (!a?.[sortField]) {
return 1
} else if (!b?.[sortField]) {
return -1
}
return a[sortField].localeCompare(b[sortField])
})
} }
thisRow[internalColumn] = value
} }
} }
return thisRow return thisRow

View File

@ -36,73 +36,6 @@ function isCorrectRelationship(
return !!possibleColumns.find(col => row[col] === relationship.column) return !!possibleColumns.find(col => row[col] === relationship.column)
} }
/**
* This iterates through the returned rows and works out what elements of the rows
* actually match up to another row (based on primary keys) - this is pretty specific
* to SQL and the way that SQL relationships are returned based on joins.
* This is complicated, but the idea is that when a SQL query returns all the relations
* will be separate rows, with all of the data in each row. We have to decipher what comes
* from where (which tables) and how to convert that into budibase columns.
*/
export async function updateRelationshipColumns(
table: Table,
tables: TableMap,
row: Row,
rows: { [key: string]: Row },
relationships: RelationshipsJson[],
opts?: { sqs?: boolean }
) {
const columns: { [key: string]: any } = {}
for (let relationship of relationships) {
const linkedTable = tables[relationship.tableName]
if (!linkedTable) {
continue
}
const fromColumn = `${table.name}.${relationship.from}`
const toColumn = `${linkedTable.name}.${relationship.to}`
// this is important when working with multiple relationships
// between the same tables, don't want to overlap/multiply the relations
if (
!relationship.through &&
row[fromColumn]?.toString() !== row[toColumn]?.toString()
) {
continue
}
let linked = basicProcessing({
row,
table: linkedTable,
isLinked: true,
sqs: opts?.sqs,
})
if (!linked._id) {
continue
}
if (
!opts?.sqs ||
isCorrectRelationship(relationship, table, linkedTable, row)
) {
columns[relationship.column] = linked
}
}
for (let [column, related] of Object.entries(columns)) {
if (!row._id) {
continue
}
const rowId: string = row._id
if (!Array.isArray(rows[rowId][column])) {
rows[rowId][column] = []
}
// make sure relationship hasn't been found already
if (
!rows[rowId][column].find((relation: Row) => relation._id === related._id)
) {
rows[rowId][column].push(related)
}
}
return rows
}
/** /**
* Gets the list of relationship JSON structures based on the columns in the table, * Gets the list of relationship JSON structures based on the columns in the table,
* this will be used by the underlying library to build whatever relationship mechanism * this will be used by the underlying library to build whatever relationship mechanism

View File

@ -13,7 +13,7 @@ import {
processDates, processDates,
processFormulas, processFormulas,
} from "../../../../utilities/rowProcessor" } from "../../../../utilities/rowProcessor"
import { isKnexEmptyReadResponse, updateRelationshipColumns } from "./sqlUtils" import { isKnexEmptyReadResponse } from "./sqlUtils"
import { import {
basicProcessing, basicProcessing,
generateIdForRow, generateIdForRow,
@ -149,22 +149,11 @@ export async function sqlOutputProcessing(
rowId = generateIdForRow(row, table) rowId = generateIdForRow(row, table)
row._id = rowId row._id = rowId
} }
// this is a relationship of some sort
if (!opts?.sqs && finalRows[rowId]) {
finalRows = await updateRelationshipColumns(
table,
tables,
row,
finalRows,
relationships,
opts
)
continue
}
const thisRow = fixArrayTypes( const thisRow = fixArrayTypes(
basicProcessing({ basicProcessing({
row, row,
table, table,
tables: Object.values(tables),
isLinked: false, isLinked: false,
sqs: opts?.sqs, sqs: opts?.sqs,
}), }),
@ -175,18 +164,6 @@ export async function sqlOutputProcessing(
} }
finalRows[thisRow._id] = fixBooleanFields({ row: thisRow, table }) finalRows[thisRow._id] = fixBooleanFields({ row: thisRow, table })
// do this at end once its been added to the final rows
if (!opts?.sqs) {
finalRows = await updateRelationshipColumns(
table,
tables,
row,
finalRows,
relationships,
opts
)
}
} }
// make sure all related rows are correct // make sure all related rows are correct

View File

@ -2126,81 +2126,76 @@ describe.each([
}) })
}) })
// This will never work for Lucene. describe("relations", () => {
!isLucene && let productCategoryTable: Table, productCatRows: Row[]
// It also can't work for in-memory searching because the related table name
// isn't available.
!isInMemory &&
describe("relations", () => {
let productCategoryTable: Table, productCatRows: Row[]
beforeAll(async () => { beforeAll(async () => {
productCategoryTable = await createTable( productCategoryTable = await createTable(
{ {
name: { name: "name", type: FieldType.STRING }, name: { name: "name", type: FieldType.STRING },
}, },
"productCategory" "productCategory"
) )
table = await createTable( table = await createTable(
{ {
name: { name: "name", type: FieldType.STRING }, name: { name: "name", type: FieldType.STRING },
productCat: { productCat: {
type: FieldType.LINK, type: FieldType.LINK,
relationshipType: RelationshipType.ONE_TO_MANY, relationshipType: RelationshipType.ONE_TO_MANY,
name: "productCat", name: "productCat",
fieldName: "product", fieldName: "product",
tableId: productCategoryTable._id!, tableId: productCategoryTable._id!,
constraints: { constraints: {
type: "array", type: "array",
},
}, },
}, },
"product" },
) "product"
)
productCatRows = await Promise.all([ productCatRows = await Promise.all([
config.api.row.save(productCategoryTable._id!, { name: "foo" }), config.api.row.save(productCategoryTable._id!, { name: "foo" }),
config.api.row.save(productCategoryTable._id!, { name: "bar" }), config.api.row.save(productCategoryTable._id!, { name: "bar" }),
]) ])
await Promise.all([ await Promise.all([
config.api.row.save(table._id!, { config.api.row.save(table._id!, {
name: "foo", name: "foo",
productCat: [productCatRows[0]._id], productCat: [productCatRows[0]._id],
}), }),
config.api.row.save(table._id!, { config.api.row.save(table._id!, {
name: "bar", name: "bar",
productCat: [productCatRows[1]._id], productCat: [productCatRows[1]._id],
}), }),
config.api.row.save(table._id!, { config.api.row.save(table._id!, {
name: "baz", name: "baz",
productCat: [], productCat: [],
}), }),
]) ])
})
it("should be able to filter by relationship using column name", async () => {
await expectQuery({
equal: { ["productCat.name"]: "foo" },
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
])
})
it("should be able to filter by relationship using table name", async () => {
await expectQuery({
equal: { ["productCategory.name"]: "foo" },
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
])
})
it("shouldn't return any relationship for last row", async () => {
await expectQuery({
equal: { ["name"]: "baz" },
}).toContainExactly([{ name: "baz", productCat: undefined }])
})
}) })
it("should be able to filter by relationship using column name", async () => {
await expectQuery({
equal: { ["productCat.name"]: "foo" },
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
])
})
it("should be able to filter by relationship using table name", async () => {
await expectQuery({
equal: { ["productCategory.name"]: "foo" },
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
])
})
it("shouldn't return any relationship for last row", async () => {
await expectQuery({
equal: { ["name"]: "baz" },
}).toContainExactly([{ name: "baz", productCat: undefined }])
})
})
;(isSqs || isLucene) && ;(isSqs || isLucene) &&
describe("relations to same table", () => { describe("relations to same table", () => {
let relatedTable: Table, relatedRows: Row[] let relatedTable: Table, relatedRows: Row[]
@ -2610,50 +2605,50 @@ describe.each([
}) })
}) })
!isInMemory && // !isInMemory &&
describe("search by _id", () => { describe("search by _id", () => {
let row: Row let row: Row
beforeAll(async () => { beforeAll(async () => {
const toRelateTable = await createTable({ const toRelateTable = await createTable({
name: { name: {
name: "name", name: "name",
type: FieldType.STRING, type: FieldType.STRING,
}, },
})
table = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
rel: {
name: "rel",
type: FieldType.LINK,
relationshipType: RelationshipType.MANY_TO_MANY,
tableId: toRelateTable._id!,
fieldName: "rel",
},
})
const [row1, row2] = await Promise.all([
config.api.row.save(toRelateTable._id!, { name: "tag 1" }),
config.api.row.save(toRelateTable._id!, { name: "tag 2" }),
])
row = await config.api.row.save(table._id!, {
name: "product 1",
rel: [row1._id, row2._id],
})
}) })
table = await createTable({
it("can filter by the row ID with limit 1", async () => { name: {
await expectSearch({ name: "name",
query: { type: FieldType.STRING,
equal: { _id: row._id }, },
}, rel: {
limit: 1, name: "rel",
}).toContainExactly([row]) type: FieldType.LINK,
relationshipType: RelationshipType.MANY_TO_MANY,
tableId: toRelateTable._id!,
fieldName: "rel",
},
})
const [row1, row2] = await Promise.all([
config.api.row.save(toRelateTable._id!, { name: "tag 1" }),
config.api.row.save(toRelateTable._id!, { name: "tag 2" }),
])
row = await config.api.row.save(table._id!, {
name: "product 1",
rel: [row1._id, row2._id],
}) })
}) })
it("can filter by the row ID with limit 1", async () => {
await expectSearch({
query: {
equal: { _id: row._id },
},
limit: 1,
}).toContainExactly([row])
})
})
!isInternal && !isInternal &&
describe("search by composite key", () => { describe("search by composite key", () => {
beforeAll(async () => { beforeAll(async () => {
@ -2690,82 +2685,6 @@ describe.each([
}) })
}) })
// TODO: when all SQL databases use the same mechanism - remove this test, new relationship system doesn't have this problem
!isInternal &&
describe("pagination edge case with relationships", () => {
let mainRows: Row[] = []
beforeAll(async () => {
const toRelateTable = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
})
table = await createTable({
name: {
name: "name",
type: FieldType.STRING,
},
rel: {
name: "rel",
type: FieldType.LINK,
relationshipType: RelationshipType.MANY_TO_ONE,
tableId: toRelateTable._id!,
fieldName: "rel",
},
})
const relatedRows = await Promise.all([
config.api.row.save(toRelateTable._id!, { name: "tag 1" }),
config.api.row.save(toRelateTable._id!, { name: "tag 2" }),
config.api.row.save(toRelateTable._id!, { name: "tag 3" }),
config.api.row.save(toRelateTable._id!, { name: "tag 4" }),
config.api.row.save(toRelateTable._id!, { name: "tag 5" }),
config.api.row.save(toRelateTable._id!, { name: "tag 6" }),
])
mainRows = await Promise.all([
config.api.row.save(table._id!, {
name: "product 1",
rel: relatedRows.map(row => row._id),
}),
config.api.row.save(table._id!, {
name: "product 2",
rel: [],
}),
config.api.row.save(table._id!, {
name: "product 3",
rel: [],
}),
])
})
it("can still page when the hard limit is hit", async () => {
await withCoreEnv(
{
SQL_MAX_ROWS: "6",
},
async () => {
const params: Omit<RowSearchParams, "tableId"> = {
query: {},
paginate: true,
limit: 3,
sort: "name",
sortType: SortType.STRING,
sortOrder: SortOrder.ASCENDING,
}
const page1 = await expectSearch(params).toContain([mainRows[0]])
expect(page1.hasNextPage).toBe(true)
expect(page1.bookmark).toBeDefined()
const page2 = await expectSearch({
...params,
bookmark: page1.bookmark,
}).toContain([mainRows[1], mainRows[2]])
expect(page2.hasNextPage).toBe(false)
}
)
})
})
isSql && isSql &&
describe("primaryDisplay", () => { describe("primaryDisplay", () => {
beforeAll(async () => { beforeAll(async () => {

View File

@ -134,6 +134,17 @@ export interface RelationshipsJson {
column: string column: string
} }
// TODO - this can be combined with the above type
export interface ManyToManyRelationshipJson {
through: string
from: string
to: string
fromPrimary: string
toPrimary: string
tableName: string
column: string
}
export interface QueryJson { export interface QueryJson {
endpoint: { endpoint: {
datasourceId: string datasourceId: string