Most of the work towards writing related rows back.

This commit is contained in:
mike12345567 2021-06-28 18:19:34 +01:00
parent 5dd53f67ac
commit 8e0ba1eb45
1 changed files with 47 additions and 6 deletions

View File

@ -12,18 +12,40 @@ const {
} = require("../../../integrations/utils") } = require("../../../integrations/utils")
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
function inputProcessing(row, table) { function inputProcessing(row, table, allTables) {
if (!row) { if (!row) {
return row return row
} }
let newRow = {} let newRow = {}, manyRelationships = []
for (let key of Object.keys(table.schema)) { for (let [key, field] of Object.entries(table.schema)) {
// currently excludes empty strings // currently excludes empty strings
if (row[key]) { if (!row[key]) {
continue
}
const isLink = field.type === FieldTypes.LINK
if (isLink && !field.through) {
// we don't really support composite keys for relationships, this is why [0] is used
newRow[key] = breakRowIdField(row[key][0])[0]
} else if (isLink && field.through) {
const linkTable = allTables.find(table => table._id === field.tableId)
// table has to exist for many to many
if (!linkTable) {
continue
}
row[key].map(relationship => {
// we don't really support composite keys for relationships, this is why [0] is used
manyRelationships.push({
tableId: field.through,
[linkTable.primary]: breakRowIdField(relationship)[0],
// leave the ID for enrichment later
[table.primary]: `{{ id }}`,
})
})
} else {
newRow[key] = row[key] newRow[key] = row[key]
} }
} }
return newRow return { row: newRow, manyRelationships }
} }
function generateIdForRow(row, table) { function generateIdForRow(row, table) {
@ -63,6 +85,22 @@ function updateRelationshipColumns(rows, row, relationships, allTables) {
return rows return rows
} }
async function insertManyRelationships(appId, json, relationships) {
const promises = []
for (let relationship of relationships) {
const newJson = {
// copy over datasource stuff
endpoint: json.endpoint,
}
const { tableName } = breakExternalTableId(relationship.tableId)
delete relationship.tableId
newJson.endpoint.entityId = tableName
newJson.body = relationship
promises.push(makeExternalQuery(appId, newJson))
}
await Promise.all(promises)
}
function outputProcessing(rows, table, relationships, allTables) { function outputProcessing(rows, table, relationships, allTables) {
// if no rows this is what is returned? Might be PG only // if no rows this is what is returned? Might be PG only
if (rows[0].read === true) { if (rows[0].read === true) {
@ -178,7 +216,8 @@ async function handleRequest(
// clean up row on ingress using schema // clean up row on ingress using schema
filters = buildFilters(id, filters, table) filters = buildFilters(id, filters, table)
const relationships = buildRelationships(table, tables) const relationships = buildRelationships(table, tables)
row = inputProcessing(row, table) const processed = inputProcessing(row, table)
row = processed.row
if ( if (
operation === DataSourceOperation.DELETE && operation === DataSourceOperation.DELETE &&
(filters == null || Object.keys(filters).length === 0) (filters == null || Object.keys(filters).length === 0)
@ -207,6 +246,8 @@ async function handleRequest(
} }
// can't really use response right now // can't really use response right now
const response = await makeExternalQuery(appId, json) const response = await makeExternalQuery(appId, json)
// handle many to many relationships now if we know the ID (could be auto increment)
await insertManyRelationships(appId, json, processed.manyRelationships)
// we searched for rows in someway // we searched for rows in someway
if (operation === DataSourceOperation.READ && Array.isArray(response)) { if (operation === DataSourceOperation.READ && Array.isArray(response)) {
return outputProcessing(response, table, relationships, tables) return outputProcessing(response, table, relationships, tables)