Starting with some work towards a deeper enrichment of rows.
This commit is contained in:
parent
bec436f747
commit
a93eb180ce
|
@ -287,7 +287,7 @@ describe("/rows", () => {
|
||||||
})).body
|
})).body
|
||||||
const enriched = await outputProcessing(appId, table, [secondRow])
|
const enriched = await outputProcessing(appId, table, [secondRow])
|
||||||
expect(enriched[0].link.length).toBe(1)
|
expect(enriched[0].link.length).toBe(1)
|
||||||
expect(enriched[0].link[0]).toBe(firstRow._id)
|
expect(enriched[0].link[0]._id).toBe(firstRow._id)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ const {
|
||||||
getUniqueByProp,
|
getUniqueByProp,
|
||||||
} = require("./linkUtils")
|
} = require("./linkUtils")
|
||||||
const { flatten } = require("lodash")
|
const { flatten } = require("lodash")
|
||||||
|
const CouchDB = require("../../db")
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This functionality makes sure that when rows with links are created, updated or deleted they are processed
|
* This functionality makes sure that when rows with links are created, updated or deleted they are processed
|
||||||
|
@ -27,6 +28,30 @@ exports.IncludeDocs = IncludeDocs
|
||||||
exports.getLinkDocuments = getLinkDocuments
|
exports.getLinkDocuments = getLinkDocuments
|
||||||
exports.createLinkView = createLinkView
|
exports.createLinkView = createLinkView
|
||||||
|
|
||||||
|
async function getLinksForRows(appId, rows) {
|
||||||
|
let tableIds = [...new Set(rows.map(el => el.tableId))]
|
||||||
|
// start by getting all the link values for performance reasons
|
||||||
|
let responses = flatten(
|
||||||
|
await Promise.all(
|
||||||
|
tableIds.map(tableId =>
|
||||||
|
getLinkDocuments({
|
||||||
|
appId,
|
||||||
|
tableId: tableId,
|
||||||
|
includeDocs: IncludeDocs.EXCLUDE,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
// have to get unique as the previous table query can
|
||||||
|
// return duplicates, could be querying for both tables in a relation
|
||||||
|
return getUniqueByProp(
|
||||||
|
responses
|
||||||
|
// create a unique ID which we can use for getting only unique ones
|
||||||
|
.map(el => ({ ...el, unique: el.id + el.fieldName })),
|
||||||
|
"unique"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update link documents for a row or table - this is to be called by the API controller when a change is occurring.
|
* Update link documents for a row or table - this is to be called by the API controller when a change is occurring.
|
||||||
* @param {string} eventType states what type of change which is occurring, means this can be expanded upon in the
|
* @param {string} eventType states what type of change which is occurring, means this can be expanded upon in the
|
||||||
|
@ -99,42 +124,56 @@ exports.attachLinkInfo = async (appId, rows) => {
|
||||||
rows = [rows]
|
rows = [rows]
|
||||||
wasArray = false
|
wasArray = false
|
||||||
}
|
}
|
||||||
let tableIds = [...new Set(rows.map(el => el.tableId))]
|
|
||||||
// start by getting all the link values for performance reasons
|
const links = await getLinksForRows(appId, rows)
|
||||||
let responses = flatten(
|
|
||||||
await Promise.all(
|
|
||||||
tableIds.map(tableId =>
|
|
||||||
getLinkDocuments({
|
|
||||||
appId,
|
|
||||||
tableId: tableId,
|
|
||||||
includeDocs: IncludeDocs.EXCLUDE,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
// now iterate through the rows and all field information
|
// now iterate through the rows and all field information
|
||||||
for (let row of rows) {
|
for (let row of rows) {
|
||||||
// get all links for row, ignore fieldName for now
|
// find anything that matches the row's ID we are searching for and join it
|
||||||
// have to get unique as the previous table query can
|
links
|
||||||
// return duplicates, could be querying for both tables in a relation
|
|
||||||
const linkVals = getUniqueByProp(
|
|
||||||
responses
|
|
||||||
// find anything that matches the row's ID we are searching for
|
|
||||||
.filter(el => el.thisId === row._id)
|
.filter(el => el.thisId === row._id)
|
||||||
// create a unique ID which we can use for getting only unique ones
|
.forEach(link => {
|
||||||
.map(el => ({ ...el, unique: el.id + el.fieldName })),
|
if (row[link.fieldName] == null) {
|
||||||
"unique"
|
row[link.fieldName] = []
|
||||||
)
|
|
||||||
for (let linkVal of linkVals) {
|
|
||||||
// work out which link pertains to this row
|
|
||||||
if (!(row[linkVal.fieldName] instanceof Array)) {
|
|
||||||
row[linkVal.fieldName] = [linkVal.id]
|
|
||||||
} else {
|
|
||||||
row[linkVal.fieldName].push(linkVal.id)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
row[link.fieldName].push(link.id)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
// if it was an array when it came in then handle it as an array in response
|
// if it was an array when it came in then handle it as an array in response
|
||||||
// otherwise return the first element as there was only one input
|
// otherwise return the first element as there was only one input
|
||||||
return wasArray ? rows : rows[0]
|
return wasArray ? rows : rows[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.attachLinkedRows = async (appId, rows) => {
|
||||||
|
let wasArray = true
|
||||||
|
if (!(rows instanceof Array)) {
|
||||||
|
rows = [rows]
|
||||||
|
wasArray = false
|
||||||
|
}
|
||||||
|
const db = new CouchDB(appId)
|
||||||
|
const links = (await getLinksForRows(appId, rows)).filter(link =>
|
||||||
|
rows.some(row => row._id === link.thisId)
|
||||||
|
)
|
||||||
|
const linkedRows = (
|
||||||
|
await db.find({
|
||||||
|
selector: {
|
||||||
|
_id: {
|
||||||
|
$in: links.map(link => link.id),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
).docs
|
||||||
|
for (let row of rows) {
|
||||||
|
links
|
||||||
|
.filter(link => link.thisId === row._id)
|
||||||
|
.forEach(link => {
|
||||||
|
if (row[link.fieldName] == null) {
|
||||||
|
row[link.fieldName] = []
|
||||||
|
}
|
||||||
|
const linkedRow = linkedRows.find(row => row._id === link.id)
|
||||||
|
if (linkedRow) {
|
||||||
|
row[link.fieldName].push(linkedRow)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return wasArray ? rows : rows[0]
|
||||||
|
}
|
||||||
|
|
|
@ -167,7 +167,7 @@ exports.inputProcessing = async (user, table, row) => {
|
||||||
*/
|
*/
|
||||||
exports.outputProcessing = async (appId, table, rows) => {
|
exports.outputProcessing = async (appId, table, rows) => {
|
||||||
// attach any linked row information
|
// attach any linked row information
|
||||||
const outputRows = await linkRows.attachLinkInfo(appId, rows)
|
const outputRows = await linkRows.attachLinkedRows(appId, rows)
|
||||||
// update the attachments URL depending on hosting
|
// update the attachments URL depending on hosting
|
||||||
if (env.CLOUD && env.SELF_HOSTED) {
|
if (env.CLOUD && env.SELF_HOSTED) {
|
||||||
for (let [property, column] of Object.entries(table.schema)) {
|
for (let [property, column] of Object.entries(table.schema)) {
|
||||||
|
|
Loading…
Reference in New Issue