Removing PG hack and handling the updating of relationships from the one side, e.g. one person is linked to many tasks, allow updating the person and having FK in tasks get updated with that persons ID.

This commit is contained in:
mike12345567 2021-07-01 18:23:15 +01:00
parent e82e175b9c
commit 5e819faa58
4 changed files with 114 additions and 117 deletions

View File

@ -19,24 +19,55 @@ const {
} = require("./externalUtils") } = require("./externalUtils")
const { processObjectSync } = require("@budibase/string-templates") const { processObjectSync } = require("@budibase/string-templates")
async function handleRequest( class ExternalRequest {
appId, constructor(appId, operation, tableId, tables) {
operation, this.appId = appId
tableId, this.operation = operation
{ id, row, filters, sort, paginate, tables } = {} this.tableId = tableId
) { this.tables = tables
let { datasourceId, tableName } = breakExternalTableId(tableId)
if (!tables) {
tables = await getAllExternalTables(appId, datasourceId)
} }
const table = tables[tableName]
async handleManyRelationships(row, relationships) {
const { appId, tables } = this
const promises = []
for (let relationship of relationships) {
const { tableId, isUpdate, id, ...rest } = relationship
const { datasourceId, tableName } = breakExternalTableId(tableId)
const linkedTable = tables[tableName]
if (!linkedTable) {
continue
}
const endpoint = {
datasourceId,
entityId: tableName,
operation: isUpdate ? DataSourceOperation.UPDATE : DataSourceOperation.CREATE,
}
promises.push(
makeExternalQuery(appId, {
endpoint,
// if we're doing many relationships then we're writing, only one response
body: processObjectSync(rest, row),
filters: buildFilters(id, {}, linkedTable)
})
)
}
await Promise.all(promises)
}
async run({ id, row, filters, sort, paginate }) {
const { appId, operation, tableId } = this
let { datasourceId, tableName } = breakExternalTableId(tableId)
if (!this.tables) {
this.tables = await getAllExternalTables(appId, datasourceId)
}
const table = this.tables[tableName]
if (!table) { if (!table) {
throw `Unable to process query, table "${tableName}" not defined.` throw `Unable to process query, table "${tableName}" not defined.`
} }
// clean up row on ingress using schema // clean up row on ingress using schema
filters = buildFilters(id, filters, table) filters = buildFilters(id, filters, table)
const relationships = buildRelationships(table, tables) const relationships = buildRelationships(table, this.tables)
const processed = inputProcessing(row, table, tables) const processed = inputProcessing(row, table, this.tables)
row = processed.row row = processed.row
if ( if (
operation === DataSourceOperation.DELETE && operation === DataSourceOperation.DELETE &&
@ -52,7 +83,7 @@ async function handleRequest(
}, },
resource: { resource: {
// have to specify the fields to avoid column overlap // have to specify the fields to avoid column overlap
fields: buildFields(table, tables), fields: buildFields(table, this.tables),
}, },
filters, filters,
sort, sort,
@ -68,28 +99,23 @@ async function handleRequest(
const response = await makeExternalQuery(appId, json) const response = await makeExternalQuery(appId, json)
// handle many to many relationships now if we know the ID (could be auto increment) // handle many to many relationships now if we know the ID (could be auto increment)
if (processed.manyRelationships) { if (processed.manyRelationships) {
const promises = [] await this.handleManyRelationships(response[0], processed.manyRelationships)
for (let toInsert of processed.manyRelationships) {
const { tableName } = breakExternalTableId(toInsert.tableId)
delete toInsert.tableId
promises.push(
makeExternalQuery(appId, {
endpoint: {
...json.endpoint,
entityId: tableName,
},
// if we're doing many relationships then we're writing, only one response
body: processObjectSync(toInsert, response[0]),
})
)
} }
await Promise.all(promises) const output = outputProcessing(response, table, relationships, this.tables)
}
const output = outputProcessing(response, table, relationships, tables)
// if reading it'll just be an array of rows, return whole thing // if reading it'll just be an array of rows, return whole thing
return operation === DataSourceOperation.READ && Array.isArray(response) return operation === DataSourceOperation.READ && Array.isArray(response)
? output ? output
: { row: output[0], table } : { row: output[0], table }
}
}
async function handleRequest(
appId,
operation,
tableId,
opts = {}
) {
return new ExternalRequest(appId, operation, tableId, opts.tables).run(opts)
} }
exports.patch = async ctx => { exports.patch = async ctx => {

View File

@ -18,6 +18,10 @@ function basicProcessing(row, table) {
return thisRow return thisRow
} }
function isMany(field) {
return field.relationshipType.split("-")[0] === "many"
}
exports.inputProcessing = (row, table, allTables) => { exports.inputProcessing = (row, table, allTables) => {
if (!row) { if (!row) {
return { row, manyRelationships: [] } return { row, manyRelationships: [] }
@ -40,19 +44,24 @@ exports.inputProcessing = (row, table, allTables) => {
continue continue
} }
const linkTable = allTables[linkTableName] const linkTable = allTables[linkTableName]
if (!field.through) { if (!isMany(field)) {
// we don't really support composite keys for relationships, this is why [0] is used // we don't really support composite keys for relationships, this is why [0] is used
newRow[field.foreignKey || linkTable.primary] = breakRowIdField( newRow[field.foreignKey || linkTable.primary] = breakRowIdField(
row[key][0] row[key][0]
)[0] )[0]
} else { } else {
// we're not inserting a doc, will be a bunch of update calls
const isUpdate = !field.through
const thisKey = isUpdate ? "id" : linkTable.primary
const otherKey = isUpdate ? field.foreignKey : table.primary
row[key].map(relationship => { row[key].map(relationship => {
// we don't really support composite keys for relationships, this is why [0] is used // we don't really support composite keys for relationships, this is why [0] is used
manyRelationships.push({ manyRelationships.push({
tableId: field.through, tableId: field.through || field.tableId,
[linkTable.primary]: breakRowIdField(relationship)[0], isUpdate,
[thisKey]: breakRowIdField(relationship)[0],
// leave the ID for enrichment later // leave the ID for enrichment later
[table.primary]: `{{ ${table.primary} }}`, [otherKey]: `{{ ${table.primary} }}`,
}) })
}) })
} }
@ -65,14 +74,19 @@ exports.inputProcessing = (row, table, allTables) => {
exports.generateIdForRow = (row, table) => { exports.generateIdForRow = (row, table) => {
if (!row) { if (!row) {
return return null
} }
const primary = table.primary const primary = table.primary
// build id array // build id array
let idParts = [] let idParts = []
for (let field of primary) { for (let field of primary) {
if (row[field]) {
idParts.push(row[field]) idParts.push(row[field])
} }
}
if (idParts.length === 0) {
return null
}
return generateRowIdField(idParts) return generateRowIdField(idParts)
} }
@ -84,6 +98,9 @@ exports.updateRelationshipColumns = (row, rows, relationships, allTables) => {
continue continue
} }
let linked = basicProcessing(row, linkedTable) let linked = basicProcessing(row, linkedTable)
if (!linked._id) {
continue
}
// if not returning full docs then get the minimal links out // if not returning full docs then get the minimal links out
const display = linkedTable.primaryDisplay const display = linkedTable.primaryDisplay
linked = { linked = {
@ -193,7 +210,7 @@ exports.buildFilters = (id, filters, table) => {
return filters return filters
} }
// if used as URL parameter it will have been joined // if used as URL parameter it will have been joined
if (typeof idCopy === "string") { if (!Array.isArray(idCopy)) {
idCopy = breakRowIdField(idCopy) idCopy = breakRowIdField(idCopy)
} }
const equal = {} const equal = {}

View File

@ -174,54 +174,6 @@ module PostgresModule {
name: columnName, name: columnName,
type, type,
} }
// TODO: hack for testing
// if (tableName === "persons") {
// tables[tableName].primaryDisplay = "firstname"
// }
// if (tableName === "products") {
// tables[tableName].primaryDisplay = "productname"
// }
// if (tableName === "tasks") {
// tables[tableName].primaryDisplay = "taskname"
// }
// if (tableName === "products") {
// tables[tableName].schema["tasks"] = {
// name: "tasks",
// type: "link",
// tableId: buildExternalTableId(datasourceId, "tasks"),
// relationshipType: "many-to-many",
// through: buildExternalTableId(datasourceId, "products_tasks"),
// fieldName: "taskid",
// }
// }
// if (tableName === "persons") {
// tables[tableName].schema["tasks"] = {
// name: "tasks",
// type: "link",
// tableId: buildExternalTableId(datasourceId, "tasks"),
// relationshipType: "many-to-one",
// fieldName: "personid",
// }
// }
// if (tableName === "tasks") {
// tables[tableName].schema["products"] = {
// name: "products",
// type: "link",
// tableId: buildExternalTableId(datasourceId, "products"),
// relationshipType: "many-to-many",
// through: buildExternalTableId(datasourceId, "products_tasks"),
// fieldName: "productid",
// }
// tables[tableName].schema["people"] = {
// name: "people",
// type: "link",
// tableId: buildExternalTableId(datasourceId, "persons"),
// relationshipType: "one-to-many",
// fieldName: "personid",
// foreignKey: "personid",
// }
// }
} }
this.tables = tables this.tables = tables
} }

View File

@ -33,7 +33,9 @@ export function breakRowIdField(_id: string) {
if (!_id) { if (!_id) {
return null return null
} }
return JSON.parse(decodeURIComponent(_id)) const decoded = decodeURIComponent(_id)
const parsed = JSON.parse(decoded)
return Array.isArray(parsed) ? parsed : [parsed]
} }
export function convertType(type: string, map: { [key: string]: any }) { export function convertType(type: string, map: { [key: string]: any }) {