Merge branch 'fix/multiple-relationships-same-table' into chore/negated-join-filters
This commit is contained in:
commit
960fa33559
|
@ -425,23 +425,27 @@ class InternalBuilder {
|
|||
query: Knex.QueryBuilder,
|
||||
allowEmptyRelationships: boolean,
|
||||
filterKey: string,
|
||||
whereCb: (query: Knex.QueryBuilder) => Knex.QueryBuilder
|
||||
whereCb: (filterKey: string, query: Knex.QueryBuilder) => Knex.QueryBuilder
|
||||
): Knex.QueryBuilder {
|
||||
const mainKnex = this.knex
|
||||
const { relationships, endpoint, tableAliases: aliases } = this.query
|
||||
const tableName = endpoint.entityId
|
||||
const fromAlias = aliases?.[tableName] || tableName
|
||||
const matches = (possibleTable: string) =>
|
||||
filterKey.startsWith(`${possibleTable}`)
|
||||
const matches = (value: string) =>
|
||||
filterKey.match(new RegExp(`^${value}\\.`))
|
||||
if (!relationships) {
|
||||
return query
|
||||
}
|
||||
for (const relationship of relationships) {
|
||||
const relatedTableName = relationship.tableName
|
||||
const toAlias = aliases?.[relatedTableName] || relatedTableName
|
||||
|
||||
const matchesTableName = matches(relatedTableName) || matches(toAlias)
|
||||
const matchesRelationName = matches(relationship.column)
|
||||
|
||||
// this is the relationship which is being filtered
|
||||
if (
|
||||
(matches(relatedTableName) || matches(toAlias)) &&
|
||||
(matchesTableName || matchesRelationName) &&
|
||||
relationship.to &&
|
||||
relationship.tableName
|
||||
) {
|
||||
|
@ -450,6 +454,17 @@ class InternalBuilder {
|
|||
.from({ [toAlias]: relatedTableName })
|
||||
let subQuery = joinTable.clone()
|
||||
const manyToMany = validateManyToMany(relationship)
|
||||
let updatedKey
|
||||
|
||||
if (!matchesTableName) {
|
||||
updatedKey = filterKey.replace(
|
||||
new RegExp(`^${relationship.column}.`),
|
||||
`${aliases![relationship.tableName]}.`
|
||||
)
|
||||
} else {
|
||||
updatedKey = filterKey
|
||||
}
|
||||
|
||||
if (manyToMany) {
|
||||
const throughAlias =
|
||||
aliases?.[manyToMany.through] || relationship.through
|
||||
|
@ -481,7 +496,7 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
query = query.where(q => {
|
||||
q.whereExists(whereCb(subQuery))
|
||||
q.whereExists(whereCb(updatedKey, subQuery))
|
||||
if (allowEmptyRelationships) {
|
||||
q.orWhereNotExists(
|
||||
joinTable.clone().innerJoin(throughTable, function () {
|
||||
|
@ -503,7 +518,7 @@ class InternalBuilder {
|
|||
mainKnex.raw(this.quotedIdentifier(foreignKey))
|
||||
)
|
||||
|
||||
query = query.whereExists(whereCb(subQuery))
|
||||
query = query.whereExists(whereCb(updatedKey, subQuery))
|
||||
if (allowEmptyRelationships) {
|
||||
query = query.orWhereNull(foreignKey)
|
||||
}
|
||||
|
@ -599,7 +614,7 @@ class InternalBuilder {
|
|||
query,
|
||||
allowEmptyRelationships[operation],
|
||||
updatedKey,
|
||||
q => {
|
||||
(updatedKey, q) => {
|
||||
return handleRelationship(q, updatedKey, value)
|
||||
}
|
||||
)
|
||||
|
|
|
@ -204,18 +204,6 @@ export class ExternalRequest<T extends Operation> {
|
|||
filters: SearchFilters,
|
||||
table: Table
|
||||
): SearchFilters {
|
||||
// replace any relationship columns initially, table names and relationship column names are acceptable
|
||||
const relationshipColumns = sdk.rows.filters.getRelationshipColumns(table)
|
||||
filters = sdk.rows.filters.updateFilterKeys(
|
||||
filters,
|
||||
relationshipColumns.map(({ name, definition }) => {
|
||||
const { tableName } = breakExternalTableId(definition.tableId)
|
||||
return {
|
||||
original: name,
|
||||
updated: tableName,
|
||||
}
|
||||
})
|
||||
)
|
||||
const primary = table.primary
|
||||
// if passed in array need to copy for shifting etc
|
||||
let idCopy: undefined | string | any[] = cloneDeep(id)
|
||||
|
|
|
@ -15,13 +15,16 @@ import {
|
|||
ExportRowsResponse,
|
||||
FieldType,
|
||||
GetRowResponse,
|
||||
isRelationshipField,
|
||||
PatchRowRequest,
|
||||
PatchRowResponse,
|
||||
Row,
|
||||
RowAttachment,
|
||||
RowSearchParams,
|
||||
SearchFilters,
|
||||
SearchRowRequest,
|
||||
SearchRowResponse,
|
||||
Table,
|
||||
UserCtx,
|
||||
ValidateResponse,
|
||||
} from "@budibase/types"
|
||||
|
@ -33,6 +36,7 @@ import sdk from "../../../sdk"
|
|||
import * as exporters from "../view/exporters"
|
||||
import { Format } from "../view/exporters"
|
||||
import { apiFileReturn } from "../../../utilities/fileSystem"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
|
||||
export * as views from "./views"
|
||||
|
||||
|
@ -211,12 +215,15 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
|||
|
||||
await context.ensureSnippetContext(true)
|
||||
|
||||
const enrichedQuery = await utils.enrichSearchContext(
|
||||
{ ...ctx.request.body.query },
|
||||
{
|
||||
user: sdk.users.getUserContextBindings(ctx.user),
|
||||
}
|
||||
)
|
||||
let { query } = ctx.request.body
|
||||
if (query) {
|
||||
const allTables = await sdk.tables.getAllTables()
|
||||
query = replaceTableNamesInFilters(tableId, query, allTables)
|
||||
}
|
||||
|
||||
let enrichedQuery: SearchFilters = await utils.enrichSearchContext(query, {
|
||||
user: sdk.users.getUserContextBindings(ctx.user),
|
||||
})
|
||||
|
||||
const searchParams: RowSearchParams = {
|
||||
...ctx.request.body,
|
||||
|
@ -229,6 +236,47 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
|||
ctx.body = await sdk.rows.search(searchParams)
|
||||
}
|
||||
|
||||
function replaceTableNamesInFilters(
|
||||
tableId: string,
|
||||
filters: SearchFilters,
|
||||
allTables: Table[]
|
||||
): SearchFilters {
|
||||
for (const filter of Object.values(filters)) {
|
||||
for (const key of Object.keys(filter)) {
|
||||
const matches = key.match(`^(?<relation>.+)\\.(?<field>.+)`)
|
||||
|
||||
const relation = matches?.groups?.["relation"]
|
||||
const field = matches?.groups?.["field"]
|
||||
|
||||
if (!relation || !field) {
|
||||
continue
|
||||
}
|
||||
|
||||
const table = allTables.find(r => r._id === tableId)!
|
||||
if (Object.values(table.schema).some(f => f.name === relation)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const matchedTable = allTables.find(t => t.name === relation)
|
||||
const relationship = Object.values(table.schema).find(
|
||||
f => isRelationshipField(f) && f.tableId === matchedTable?._id
|
||||
)
|
||||
if (!relationship) {
|
||||
continue
|
||||
}
|
||||
|
||||
const updatedField = `${relationship.name}.${field}`
|
||||
if (updatedField && updatedField !== key) {
|
||||
filter[updatedField] = filter[key]
|
||||
delete filter[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return dataFilters.recurseLogicalOperators(filters, (f: SearchFilters) => {
|
||||
return replaceTableNamesInFilters(tableId, f, allTables)
|
||||
})
|
||||
}
|
||||
|
||||
export async function validate(ctx: Ctx<Row, ValidateResponse>) {
|
||||
const source = await utils.getSource(ctx)
|
||||
const table = await utils.getTableFromSource(source)
|
||||
|
|
|
@ -2583,7 +2583,8 @@ describe.each([
|
|||
expect(response.rows[0].productCat).toBeArrayOfSize(11)
|
||||
})
|
||||
})
|
||||
;(isSqs || isLucene) &&
|
||||
|
||||
isSql &&
|
||||
describe("relations to same table", () => {
|
||||
let relatedTable: string, relatedRows: Row[]
|
||||
|
||||
|
@ -2625,6 +2626,11 @@ describe.each([
|
|||
related1: [relatedRows[2]._id!],
|
||||
related2: [relatedRows[3]._id!],
|
||||
}),
|
||||
config.api.row.save(tableOrViewId, {
|
||||
name: "test3",
|
||||
related1: [relatedRows[1]._id],
|
||||
related2: [relatedRows[2]._id!],
|
||||
}),
|
||||
])
|
||||
})
|
||||
|
||||
|
@ -2642,42 +2648,59 @@ describe.each([
|
|||
related1: [{ _id: relatedRows[2]._id }],
|
||||
related2: [{ _id: relatedRows[3]._id }],
|
||||
},
|
||||
{
|
||||
name: "test3",
|
||||
related1: [{ _id: relatedRows[1]._id }],
|
||||
related2: [{ _id: relatedRows[2]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
isSqs &&
|
||||
it("should be able to filter down to second row with equal", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
equal: {
|
||||
["related1.name"]: "baz",
|
||||
},
|
||||
it("should be able to filter via the first relation field with equal", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
equal: {
|
||||
["related1.name"]: "baz",
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "test2",
|
||||
related1: [{ _id: relatedRows[2]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "test2",
|
||||
related1: [{ _id: relatedRows[2]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
isSqs &&
|
||||
it("should be able to filter down to first row with not equal", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
notEqual: {
|
||||
["1:related2.name"]: "bar",
|
||||
["2:related2.name"]: "baz",
|
||||
["3:related2.name"]: "boo",
|
||||
},
|
||||
it("should be able to filter via the second relation field with not equal", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
notEqual: {
|
||||
["1:related2.name"]: "foo",
|
||||
["2:related2.name"]: "baz",
|
||||
["3:related2.name"]: "boo",
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "test",
|
||||
related1: [{ _id: relatedRows[0]._id }],
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "test",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should be able to filter on both fields", async () => {
|
||||
await expectSearch({
|
||||
query: {
|
||||
notEqual: {
|
||||
["related1.name"]: "foo",
|
||||
["related2.name"]: "baz",
|
||||
},
|
||||
])
|
||||
})
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "test2",
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
|
|
|
@ -3,14 +3,12 @@ import * as rows from "./rows"
|
|||
import * as search from "./search"
|
||||
import * as utils from "./utils"
|
||||
import * as external from "./external"
|
||||
import * as filters from "./search/filters"
|
||||
import AliasTables from "./sqlAlias"
|
||||
|
||||
export default {
|
||||
...attachments,
|
||||
...rows,
|
||||
...search,
|
||||
filters,
|
||||
utils,
|
||||
external,
|
||||
AliasTables,
|
||||
|
|
|
@ -1,65 +0,0 @@
|
|||
import {
|
||||
FieldType,
|
||||
RelationshipFieldMetadata,
|
||||
SearchFilters,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import { isPlainObject } from "lodash"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
|
||||
export function getRelationshipColumns(table: Table): {
|
||||
name: string
|
||||
definition: RelationshipFieldMetadata
|
||||
}[] {
|
||||
// performing this with a for loop rather than an array filter improves
|
||||
// type guarding, as no casts are required
|
||||
const linkEntries: [string, RelationshipFieldMetadata][] = []
|
||||
for (let entry of Object.entries(table.schema)) {
|
||||
if (entry[1].type === FieldType.LINK) {
|
||||
const linkColumn: RelationshipFieldMetadata = entry[1]
|
||||
linkEntries.push([entry[0], linkColumn])
|
||||
}
|
||||
}
|
||||
return linkEntries.map(entry => ({
|
||||
name: entry[0],
|
||||
definition: entry[1],
|
||||
}))
|
||||
}
|
||||
|
||||
export function getTableIDList(
|
||||
tables: Table[]
|
||||
): { name: string; id: string }[] {
|
||||
return tables
|
||||
.filter(table => table.originalName && table._id)
|
||||
.map(table => ({ id: table._id!, name: table.originalName! }))
|
||||
}
|
||||
|
||||
export function updateFilterKeys(
|
||||
filters: SearchFilters,
|
||||
updates: { original: string; updated: string }[]
|
||||
): SearchFilters {
|
||||
const makeFilterKeyRegex = (str: string) =>
|
||||
new RegExp(`^${str}\\.|:${str}\\.`)
|
||||
for (let filter of Object.values(filters)) {
|
||||
if (!isPlainObject(filter)) {
|
||||
continue
|
||||
}
|
||||
for (let [key, keyFilter] of Object.entries(filter)) {
|
||||
if (keyFilter === "") {
|
||||
delete filter[key]
|
||||
}
|
||||
const possibleKey = updates.find(({ original }) =>
|
||||
key.match(makeFilterKeyRegex(original))
|
||||
)
|
||||
if (possibleKey && possibleKey.original !== possibleKey.updated) {
|
||||
// only replace the first, not replaceAll
|
||||
filter[key.replace(possibleKey.original, possibleKey.updated)] =
|
||||
filter[key]
|
||||
delete filter[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return dataFilters.recurseLogicalOperators(filters, (f: SearchFilters) => {
|
||||
return updateFilterKeys(f, updates)
|
||||
})
|
||||
}
|
|
@ -39,11 +39,6 @@ import AliasTables from "../../sqlAlias"
|
|||
import { outputProcessing } from "../../../../../utilities/rowProcessor"
|
||||
import pick from "lodash/pick"
|
||||
import { processRowCountResponse } from "../../utils"
|
||||
import {
|
||||
getRelationshipColumns,
|
||||
getTableIDList,
|
||||
updateFilterKeys,
|
||||
} from "../filters"
|
||||
import {
|
||||
dataFilters,
|
||||
helpers,
|
||||
|
@ -133,31 +128,7 @@ async function buildInternalFieldList(
|
|||
return [...new Set(fieldList)]
|
||||
}
|
||||
|
||||
function cleanupFilters(
|
||||
filters: SearchFilters,
|
||||
table: Table,
|
||||
allTables: Table[]
|
||||
) {
|
||||
// get a list of all relationship columns in the table for updating
|
||||
const relationshipColumns = getRelationshipColumns(table)
|
||||
// get table names to ID map for relationships
|
||||
const tableNameToID = getTableIDList(allTables)
|
||||
// all should be applied at once
|
||||
filters = updateFilterKeys(
|
||||
filters,
|
||||
relationshipColumns
|
||||
.map(({ name, definition }) => ({
|
||||
original: name,
|
||||
updated: definition.tableId,
|
||||
}))
|
||||
.concat(
|
||||
tableNameToID.map(({ name, id }) => ({
|
||||
original: name,
|
||||
updated: id,
|
||||
}))
|
||||
)
|
||||
)
|
||||
|
||||
function cleanupFilters(filters: SearchFilters, allTables: Table[]) {
|
||||
// generate a map of all possible column names (these can be duplicated across tables
|
||||
// the map of them will always be the same
|
||||
const userColumnMap: Record<string, string> = {}
|
||||
|
@ -356,7 +327,7 @@ export async function search(
|
|||
const relationships = buildInternalRelationships(table, allTables)
|
||||
|
||||
const searchFilters: SearchFilters = {
|
||||
...cleanupFilters(query, table, allTables),
|
||||
...cleanupFilters(query, allTables),
|
||||
documentType: DocumentType.ROW,
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue