Managing the scenario where columns can overlap in SQL relationships which most JSON based libraries cannot manage, instead of trying to manage this just don't return the overlapping columns which are not of interest.
This commit is contained in:
parent
13100a69ba
commit
fd4dc4b45e
|
@ -1,8 +1,5 @@
|
||||||
const { makeExternalQuery } = require("./utils")
|
const { makeExternalQuery } = require("./utils")
|
||||||
const {
|
const { DataSourceOperation, SortDirection } = require("../../../constants")
|
||||||
DataSourceOperation,
|
|
||||||
SortDirection,
|
|
||||||
} = require("../../../constants")
|
|
||||||
const { getAllExternalTables } = require("../table/utils")
|
const { getAllExternalTables } = require("../table/utils")
|
||||||
const {
|
const {
|
||||||
breakExternalTableId,
|
breakExternalTableId,
|
||||||
|
@ -14,6 +11,7 @@ const {
|
||||||
inputProcessing,
|
inputProcessing,
|
||||||
outputProcessing,
|
outputProcessing,
|
||||||
generateIdForRow,
|
generateIdForRow,
|
||||||
|
buildFields,
|
||||||
} = require("./externalUtils")
|
} = require("./externalUtils")
|
||||||
const { processObjectSync } = require("@budibase/string-templates")
|
const { processObjectSync } = require("@budibase/string-templates")
|
||||||
|
|
||||||
|
@ -23,7 +21,7 @@ async function handleRequest(
|
||||||
tableId,
|
tableId,
|
||||||
{ id, row, filters, sort, paginate } = {}
|
{ id, row, filters, sort, paginate } = {}
|
||||||
) {
|
) {
|
||||||
let {datasourceId, tableName} = breakExternalTableId(tableId)
|
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||||
const tables = await getAllExternalTables(appId, datasourceId)
|
const tables = await getAllExternalTables(appId, datasourceId)
|
||||||
const table = tables[tableName]
|
const table = tables[tableName]
|
||||||
if (!table) {
|
if (!table) {
|
||||||
|
@ -32,7 +30,7 @@ async function handleRequest(
|
||||||
// clean up row on ingress using schema
|
// clean up row on ingress using schema
|
||||||
filters = buildFilters(id, filters, table)
|
filters = buildFilters(id, filters, table)
|
||||||
const relationships = buildRelationships(table, tables)
|
const relationships = buildRelationships(table, tables)
|
||||||
const processed = inputProcessing(row, table)
|
const processed = inputProcessing(row, table, tables)
|
||||||
row = processed.row
|
row = processed.row
|
||||||
if (
|
if (
|
||||||
operation === DataSourceOperation.DELETE &&
|
operation === DataSourceOperation.DELETE &&
|
||||||
|
@ -47,8 +45,8 @@ async function handleRequest(
|
||||||
operation,
|
operation,
|
||||||
},
|
},
|
||||||
resource: {
|
resource: {
|
||||||
// not specifying any fields means "*"
|
// have to specify the fields to avoid column overlap
|
||||||
fields: [],
|
fields: buildFields(table, tables),
|
||||||
},
|
},
|
||||||
filters,
|
filters,
|
||||||
sort,
|
sort,
|
||||||
|
@ -66,15 +64,17 @@ async function handleRequest(
|
||||||
if (processed.manyRelationships) {
|
if (processed.manyRelationships) {
|
||||||
const promises = []
|
const promises = []
|
||||||
for (let toInsert of processed.manyRelationships) {
|
for (let toInsert of processed.manyRelationships) {
|
||||||
const {tableName} = breakExternalTableId(toInsert.tableId)
|
const { tableName } = breakExternalTableId(toInsert.tableId)
|
||||||
delete toInsert.tableId
|
delete toInsert.tableId
|
||||||
promises.push(makeExternalQuery(appId, {
|
promises.push(
|
||||||
endpoint: {
|
makeExternalQuery(appId, {
|
||||||
...json.endpoint,
|
endpoint: {
|
||||||
entityId: tableName,
|
...json.endpoint,
|
||||||
},
|
entityId: processObjectSync(tableName, row),
|
||||||
body: toInsert,
|
},
|
||||||
}))
|
body: toInsert,
|
||||||
|
})
|
||||||
|
)
|
||||||
}
|
}
|
||||||
await Promise.all(promises)
|
await Promise.all(promises)
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,8 @@ exports.inputProcessing = (row, table, allTables) => {
|
||||||
if (!row) {
|
if (!row) {
|
||||||
return { row, manyRelationships: [] }
|
return { row, manyRelationships: [] }
|
||||||
}
|
}
|
||||||
let newRow = {}, manyRelationships = []
|
let newRow = {},
|
||||||
|
manyRelationships = []
|
||||||
for (let [key, field] of Object.entries(table.schema)) {
|
for (let [key, field] of Object.entries(table.schema)) {
|
||||||
// currently excludes empty strings
|
// currently excludes empty strings
|
||||||
if (!row[key]) {
|
if (!row[key]) {
|
||||||
|
@ -21,18 +22,19 @@ exports.inputProcessing = (row, table, allTables) => {
|
||||||
// we don't really support composite keys for relationships, this is why [0] is used
|
// we don't really support composite keys for relationships, this is why [0] is used
|
||||||
newRow[key] = breakRowIdField(row[key][0])[0]
|
newRow[key] = breakRowIdField(row[key][0])[0]
|
||||||
} else if (isLink && field.through) {
|
} else if (isLink && field.through) {
|
||||||
const linkTable = allTables.find(table => table._id === field.tableId)
|
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||||
// table has to exist for many to many
|
// table has to exist for many to many
|
||||||
if (!linkTable) {
|
if (!allTables[linkTableName]) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
const linkTable = allTables[linkTableName]
|
||||||
row[key].map(relationship => {
|
row[key].map(relationship => {
|
||||||
// we don't really support composite keys for relationships, this is why [0] is used
|
// we don't really support composite keys for relationships, this is why [0] is used
|
||||||
manyRelationships.push({
|
manyRelationships.push({
|
||||||
tableId: field.through,
|
tableId: field.through,
|
||||||
[linkTable.primary]: breakRowIdField(relationship)[0],
|
[linkTable.primary]: breakRowIdField(relationship)[0],
|
||||||
// leave the ID for enrichment later
|
// leave the ID for enrichment later
|
||||||
[table.primary]: `{{ id }}`,
|
[table.primary]: `{{ _id }}`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
@ -120,6 +122,42 @@ exports.outputProcessing = (rows, table, relationships, allTables) => {
|
||||||
return Object.values(finalRows)
|
return Object.values(finalRows)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which
|
||||||
|
* you have column overlap in relationships, e.g. we join a few different tables and they all have the
|
||||||
|
* concept of an ID, but for some of them it will be null (if they say don't have a relationship).
|
||||||
|
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
|
||||||
|
* is more performant and has the added benefit of protecting against this scenario.
|
||||||
|
* @param {Object} table The table we are retrieving fields for.
|
||||||
|
* @param {Object[]} allTables All of the tables that exist in the external data source, this is
|
||||||
|
* needed to work out what is needed from other tables based on relationships.
|
||||||
|
* @return {string[]} A list of fields like ["products.productid"] which can be used for an SQL select.
|
||||||
|
*/
|
||||||
|
exports.buildFields = (table, allTables) => {
|
||||||
|
function extractNonLinkFieldNames(table, existing = []) {
|
||||||
|
return Object.entries(table.schema)
|
||||||
|
.filter(
|
||||||
|
column =>
|
||||||
|
column[1].type !== FieldTypes.LINK &&
|
||||||
|
!existing.find(field => field.includes(column[0]))
|
||||||
|
)
|
||||||
|
.map(column => `${table.name}.${column[0]}`)
|
||||||
|
}
|
||||||
|
let fields = extractNonLinkFieldNames(table)
|
||||||
|
for (let field of Object.values(table.schema)) {
|
||||||
|
if (field.type !== FieldTypes.LINK) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||||
|
const linkTable = allTables[linkTableName]
|
||||||
|
if (linkTable) {
|
||||||
|
const linkedFields = extractNonLinkFieldNames(linkTable, fields)
|
||||||
|
fields = fields.concat(linkedFields)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fields
|
||||||
|
}
|
||||||
|
|
||||||
exports.buildFilters = (id, filters, table) => {
|
exports.buildFilters = (id, filters, table) => {
|
||||||
const primary = table.primary
|
const primary = table.primary
|
||||||
// if passed in array need to copy for shifting etc
|
// if passed in array need to copy for shifting etc
|
||||||
|
@ -177,7 +215,9 @@ exports.buildRelationships = (table, allTables) => {
|
||||||
column: fieldName,
|
column: fieldName,
|
||||||
}
|
}
|
||||||
if (field.through) {
|
if (field.through) {
|
||||||
const { tableName: throughTableName } = breakExternalTableId(field.through)
|
const { tableName: throughTableName } = breakExternalTableId(
|
||||||
|
field.through
|
||||||
|
)
|
||||||
definition.through = throughTableName
|
definition.through = throughTableName
|
||||||
// don't support composite keys for relationships
|
// don't support composite keys for relationships
|
||||||
definition.from = table.primary[0]
|
definition.from = table.primary[0]
|
||||||
|
|
|
@ -84,7 +84,7 @@ function addRelationships(
|
||||||
toTable = relationship.tableName
|
toTable = relationship.tableName
|
||||||
if (!relationship.through) {
|
if (!relationship.through) {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
query = query.innerJoin(
|
query = query.leftJoin(
|
||||||
toTable,
|
toTable,
|
||||||
`${fromTable}.${from}`,
|
`${fromTable}.${from}`,
|
||||||
`${relationship.tableName}.${to}`
|
`${relationship.tableName}.${to}`
|
||||||
|
@ -93,8 +93,12 @@ function addRelationships(
|
||||||
const throughTable = relationship.through
|
const throughTable = relationship.through
|
||||||
query = query
|
query = query
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
.innerJoin(throughTable, `${fromTable}.${from}`, `${throughTable}.${from}`)
|
.leftJoin(
|
||||||
.innerJoin(toTable, `${toTable}.${to}`, `${throughTable}.${to}`)
|
throughTable,
|
||||||
|
`${fromTable}.${from}`,
|
||||||
|
`${throughTable}.${from}`
|
||||||
|
)
|
||||||
|
.leftJoin(toTable, `${toTable}.${to}`, `${throughTable}.${to}`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return query
|
return query
|
||||||
|
|
Loading…
Reference in New Issue