Removing PG hack and handling the updating of relationships from the one side, e.g. one person is linked to many tasks, allow updating the person and having FK in tasks get updated with that persons ID.

This commit is contained in:
mike12345567 2021-07-01 18:23:15 +01:00
parent e82e175b9c
commit 5e819faa58
4 changed files with 114 additions and 117 deletions

View File

@ -19,77 +19,103 @@ const {
} = require("./externalUtils") } = require("./externalUtils")
const { processObjectSync } = require("@budibase/string-templates") const { processObjectSync } = require("@budibase/string-templates")
async function handleRequest( class ExternalRequest {
appId, constructor(appId, operation, tableId, tables) {
operation, this.appId = appId
tableId, this.operation = operation
{ id, row, filters, sort, paginate, tables } = {} this.tableId = tableId
) { this.tables = tables
let { datasourceId, tableName } = breakExternalTableId(tableId)
if (!tables) {
tables = await getAllExternalTables(appId, datasourceId)
} }
const table = tables[tableName]
if (!table) { async handleManyRelationships(row, relationships) {
throw `Unable to process query, table "${tableName}" not defined.` const { appId, tables } = this
}
// clean up row on ingress using schema
filters = buildFilters(id, filters, table)
const relationships = buildRelationships(table, tables)
const processed = inputProcessing(row, table, tables)
row = processed.row
if (
operation === DataSourceOperation.DELETE &&
(filters == null || Object.keys(filters).length === 0)
) {
throw "Deletion must be filtered"
}
let json = {
endpoint: {
datasourceId,
entityId: tableName,
operation,
},
resource: {
// have to specify the fields to avoid column overlap
fields: buildFields(table, tables),
},
filters,
sort,
paginate,
relationships,
body: row,
// pass an id filter into extra, purely for mysql/returning
extra: {
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
},
}
// can't really use response right now
const response = await makeExternalQuery(appId, json)
// handle many to many relationships now if we know the ID (could be auto increment)
if (processed.manyRelationships) {
const promises = [] const promises = []
for (let toInsert of processed.manyRelationships) { for (let relationship of relationships) {
const { tableName } = breakExternalTableId(toInsert.tableId) const { tableId, isUpdate, id, ...rest } = relationship
delete toInsert.tableId const { datasourceId, tableName } = breakExternalTableId(tableId)
const linkedTable = tables[tableName]
if (!linkedTable) {
continue
}
const endpoint = {
datasourceId,
entityId: tableName,
operation: isUpdate ? DataSourceOperation.UPDATE : DataSourceOperation.CREATE,
}
promises.push( promises.push(
makeExternalQuery(appId, { makeExternalQuery(appId, {
endpoint: { endpoint,
...json.endpoint,
entityId: tableName,
},
// if we're doing many relationships then we're writing, only one response // if we're doing many relationships then we're writing, only one response
body: processObjectSync(toInsert, response[0]), body: processObjectSync(rest, row),
filters: buildFilters(id, {}, linkedTable)
}) })
) )
} }
await Promise.all(promises) await Promise.all(promises)
} }
const output = outputProcessing(response, table, relationships, tables)
// if reading it'll just be an array of rows, return whole thing async run({ id, row, filters, sort, paginate }) {
return operation === DataSourceOperation.READ && Array.isArray(response) const { appId, operation, tableId } = this
? output let { datasourceId, tableName } = breakExternalTableId(tableId)
: { row: output[0], table } if (!this.tables) {
this.tables = await getAllExternalTables(appId, datasourceId)
}
const table = this.tables[tableName]
if (!table) {
throw `Unable to process query, table "${tableName}" not defined.`
}
// clean up row on ingress using schema
filters = buildFilters(id, filters, table)
const relationships = buildRelationships(table, this.tables)
const processed = inputProcessing(row, table, this.tables)
row = processed.row
if (
operation === DataSourceOperation.DELETE &&
(filters == null || Object.keys(filters).length === 0)
) {
throw "Deletion must be filtered"
}
let json = {
endpoint: {
datasourceId,
entityId: tableName,
operation,
},
resource: {
// have to specify the fields to avoid column overlap
fields: buildFields(table, this.tables),
},
filters,
sort,
paginate,
relationships,
body: row,
// pass an id filter into extra, purely for mysql/returning
extra: {
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
},
}
// can't really use response right now
const response = await makeExternalQuery(appId, json)
// handle many to many relationships now if we know the ID (could be auto increment)
if (processed.manyRelationships) {
await this.handleManyRelationships(response[0], processed.manyRelationships)
}
const output = outputProcessing(response, table, relationships, this.tables)
// if reading it'll just be an array of rows, return whole thing
return operation === DataSourceOperation.READ && Array.isArray(response)
? output
: { row: output[0], table }
}
}
async function handleRequest(
appId,
operation,
tableId,
opts = {}
) {
return new ExternalRequest(appId, operation, tableId, opts.tables).run(opts)
} }
exports.patch = async ctx => { exports.patch = async ctx => {

View File

@ -18,6 +18,10 @@ function basicProcessing(row, table) {
return thisRow return thisRow
} }
function isMany(field) {
return field.relationshipType.split("-")[0] === "many"
}
exports.inputProcessing = (row, table, allTables) => { exports.inputProcessing = (row, table, allTables) => {
if (!row) { if (!row) {
return { row, manyRelationships: [] } return { row, manyRelationships: [] }
@ -40,19 +44,24 @@ exports.inputProcessing = (row, table, allTables) => {
continue continue
} }
const linkTable = allTables[linkTableName] const linkTable = allTables[linkTableName]
if (!field.through) { if (!isMany(field)) {
// we don't really support composite keys for relationships, this is why [0] is used // we don't really support composite keys for relationships, this is why [0] is used
newRow[field.foreignKey || linkTable.primary] = breakRowIdField( newRow[field.foreignKey || linkTable.primary] = breakRowIdField(
row[key][0] row[key][0]
)[0] )[0]
} else { } else {
// we're not inserting a doc, will be a bunch of update calls
const isUpdate = !field.through
const thisKey = isUpdate ? "id" : linkTable.primary
const otherKey = isUpdate ? field.foreignKey : table.primary
row[key].map(relationship => { row[key].map(relationship => {
// we don't really support composite keys for relationships, this is why [0] is used // we don't really support composite keys for relationships, this is why [0] is used
manyRelationships.push({ manyRelationships.push({
tableId: field.through, tableId: field.through || field.tableId,
[linkTable.primary]: breakRowIdField(relationship)[0], isUpdate,
[thisKey]: breakRowIdField(relationship)[0],
// leave the ID for enrichment later // leave the ID for enrichment later
[table.primary]: `{{ ${table.primary} }}`, [otherKey]: `{{ ${table.primary} }}`,
}) })
}) })
} }
@ -65,13 +74,18 @@ exports.inputProcessing = (row, table, allTables) => {
exports.generateIdForRow = (row, table) => { exports.generateIdForRow = (row, table) => {
if (!row) { if (!row) {
return return null
} }
const primary = table.primary const primary = table.primary
// build id array // build id array
let idParts = [] let idParts = []
for (let field of primary) { for (let field of primary) {
idParts.push(row[field]) if (row[field]) {
idParts.push(row[field])
}
}
if (idParts.length === 0) {
return null
} }
return generateRowIdField(idParts) return generateRowIdField(idParts)
} }
@ -84,6 +98,9 @@ exports.updateRelationshipColumns = (row, rows, relationships, allTables) => {
continue continue
} }
let linked = basicProcessing(row, linkedTable) let linked = basicProcessing(row, linkedTable)
if (!linked._id) {
continue
}
// if not returning full docs then get the minimal links out // if not returning full docs then get the minimal links out
const display = linkedTable.primaryDisplay const display = linkedTable.primaryDisplay
linked = { linked = {
@ -193,7 +210,7 @@ exports.buildFilters = (id, filters, table) => {
return filters return filters
} }
// if used as URL parameter it will have been joined // if used as URL parameter it will have been joined
if (typeof idCopy === "string") { if (!Array.isArray(idCopy)) {
idCopy = breakRowIdField(idCopy) idCopy = breakRowIdField(idCopy)
} }
const equal = {} const equal = {}

View File

@ -174,54 +174,6 @@ module PostgresModule {
name: columnName, name: columnName,
type, type,
} }
// TODO: hack for testing
// if (tableName === "persons") {
// tables[tableName].primaryDisplay = "firstname"
// }
// if (tableName === "products") {
// tables[tableName].primaryDisplay = "productname"
// }
// if (tableName === "tasks") {
// tables[tableName].primaryDisplay = "taskname"
// }
// if (tableName === "products") {
// tables[tableName].schema["tasks"] = {
// name: "tasks",
// type: "link",
// tableId: buildExternalTableId(datasourceId, "tasks"),
// relationshipType: "many-to-many",
// through: buildExternalTableId(datasourceId, "products_tasks"),
// fieldName: "taskid",
// }
// }
// if (tableName === "persons") {
// tables[tableName].schema["tasks"] = {
// name: "tasks",
// type: "link",
// tableId: buildExternalTableId(datasourceId, "tasks"),
// relationshipType: "many-to-one",
// fieldName: "personid",
// }
// }
// if (tableName === "tasks") {
// tables[tableName].schema["products"] = {
// name: "products",
// type: "link",
// tableId: buildExternalTableId(datasourceId, "products"),
// relationshipType: "many-to-many",
// through: buildExternalTableId(datasourceId, "products_tasks"),
// fieldName: "productid",
// }
// tables[tableName].schema["people"] = {
// name: "people",
// type: "link",
// tableId: buildExternalTableId(datasourceId, "persons"),
// relationshipType: "one-to-many",
// fieldName: "personid",
// foreignKey: "personid",
// }
// }
} }
this.tables = tables this.tables = tables
} }

View File

@ -33,7 +33,9 @@ export function breakRowIdField(_id: string) {
if (!_id) { if (!_id) {
return null return null
} }
return JSON.parse(decodeURIComponent(_id)) const decoded = decodeURIComponent(_id)
const parsed = JSON.parse(decoded)
return Array.isArray(parsed) ? parsed : [parsed]
} }
export function convertType(type: string, map: { [key: string]: any }) { export function convertType(type: string, map: { [key: string]: any }) {