Getting processing of SQS relationships working.

This commit is contained in:
mike12345567 2024-08-23 18:30:29 +01:00
parent ab5f50d2b8
commit 80f3e5954b
3 changed files with 31 additions and 13 deletions

View File

@ -1,5 +1,5 @@
// need to handle table name + field or just field, depending on if relationships used // need to handle table name + field or just field, depending on if relationships used
import { FieldType, Row, Table } from "@budibase/types" import { FieldSchema, FieldType, Row, Table } from "@budibase/types"
import { helpers, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core" import { helpers, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
import { generateRowIdField } from "../../../../integrations/utils" import { generateRowIdField } from "../../../../integrations/utils"
@ -82,7 +82,7 @@ export function basicProcessing({
value = value.toString() value = value.toString()
} }
// all responses include "select col as table.col" so that overlaps are handled // all responses include "select col as table.col" so that overlaps are handled
if (value != null) { else if (value != null) {
thisRow[fieldName] = value thisRow[fieldName] = value
} }
} }
@ -93,12 +93,17 @@ export function basicProcessing({
} else { } else {
const columns = Object.keys(table.schema) const columns = Object.keys(table.schema)
for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) { for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) {
thisRow[internalColumn] = extractFieldValue({ const schema: FieldSchema | undefined = table.schema[internalColumn]
let value = extractFieldValue({
row, row,
tableName: table._id!, tableName: table._id!,
fieldName: internalColumn, fieldName: internalColumn,
isLinked, isLinked,
}) })
if (sqs && schema?.type === FieldType.LINK && typeof value === "string") {
value = JSON.parse(value)
}
thisRow[internalColumn] = value
} }
} }
return thisRow return thisRow

View File

@ -147,7 +147,7 @@ export async function sqlOutputProcessing(
row._id = rowId row._id = rowId
} }
// this is a relationship of some sort // this is a relationship of some sort
if (finalRows[rowId]) { if (!opts?.sqs && finalRows[rowId]) {
finalRows = await updateRelationshipColumns( finalRows = await updateRelationshipColumns(
table, table,
tables, tables,
@ -174,6 +174,7 @@ export async function sqlOutputProcessing(
finalRows[thisRow._id] = fixBooleanFields({ row: thisRow, table }) finalRows[thisRow._id] = fixBooleanFields({ row: thisRow, table })
// do this at end once its been added to the final rows // do this at end once its been added to the final rows
if (!opts?.sqs) {
finalRows = await updateRelationshipColumns( finalRows = await updateRelationshipColumns(
table, table,
tables, tables,
@ -183,6 +184,7 @@ export async function sqlOutputProcessing(
opts opts
) )
} }
}
// make sure all related rows are correct // make sure all related rows are correct
let finalRowArray = [] let finalRowArray = []

View File

@ -37,9 +37,9 @@ import { outputProcessing } from "../../../../../utilities/rowProcessor"
import pick from "lodash/pick" import pick from "lodash/pick"
import { processRowCountResponse } from "../../utils" import { processRowCountResponse } from "../../utils"
import { import {
updateFilterKeys,
getRelationshipColumns, getRelationshipColumns,
getTableIDList, getTableIDList,
updateFilterKeys,
} from "../filters" } from "../filters"
import { import {
dataFilters, dataFilters,
@ -368,6 +368,17 @@ export async function search(
}) })
) )
// make sure relationships have columns reversed correctly
for (let columnName of Object.keys(table.schema)) {
if (table.schema[columnName].type !== FieldType.LINK) {
continue
}
// process the relationships (JSON generated by SQS)
for (let row of processed) {
row[columnName] = reverseUserColumnMapping(row[columnName])
}
}
// check for pagination final row // check for pagination final row
let nextRow: boolean = false let nextRow: boolean = false
if (paginate && params.limit && rows.length > params.limit) { if (paginate && params.limit && rows.length > params.limit) {