Handling JSON types within relationships, they need to be parsed as well.

This commit is contained in:
mike12345567 2024-09-10 15:45:32 +01:00
parent f63c95e44c
commit 2fd5c1a99f
3 changed files with 47 additions and 56 deletions

View File

@ -1,5 +1,5 @@
// need to handle table name + field or just field, depending on if relationships used // need to handle table name + field or just field, depending on if relationships used
import { FieldSchema, FieldType, Row, Table } from "@budibase/types" import { FieldSchema, FieldType, Row, Table, JsonTypes } from "@budibase/types"
import { import {
helpers, helpers,
PROTECTED_EXTERNAL_COLUMNS, PROTECTED_EXTERNAL_COLUMNS,
@ -62,6 +62,22 @@ export function generateIdForRow(
return generateRowIdField(idParts) return generateRowIdField(idParts)
} }
function fixJsonTypes(row: Row, table: Table) {
for (let [fieldName, schema] of Object.entries(table.schema)) {
if (JsonTypes.includes(schema.type) && typeof row[fieldName] === "string") {
try {
row[fieldName] = JSON.parse(row[fieldName])
} catch (err) {
if (!helpers.schema.isDeprecatedSingleUserColumn(schema)) {
// couldn't convert back to array, ignore
delete row[fieldName]
}
}
}
}
return row
}
export function basicProcessing({ export function basicProcessing({
row, row,
table, table,
@ -136,12 +152,15 @@ export function basicProcessing({
const sortField = const sortField =
relatedTable.primaryDisplay || relatedTable.primary![0]! relatedTable.primaryDisplay || relatedTable.primary![0]!
thisRow[col] = (thisRow[col] as Row[]) thisRow[col] = (thisRow[col] as Row[])
.map(relatedRow => { .map(relatedRow =>
relatedRow._id = relatedRow._id basicProcessing({
? relatedRow._id row: relatedRow,
: generateIdForRow(relatedRow, relatedTable) table: relatedTable,
return relatedRow tables,
}) isLinked: false,
sqs,
})
)
.sort((a, b) => { .sort((a, b) => {
const aField = a?.[sortField], const aField = a?.[sortField],
bField = b?.[sortField] bField = b?.[sortField]
@ -157,24 +176,5 @@ export function basicProcessing({
} }
} }
} }
return thisRow return fixJsonTypes(thisRow, table)
}
export function fixArrayTypes(row: Row, table: Table) {
for (let [fieldName, schema] of Object.entries(table.schema)) {
if (
[FieldType.ARRAY, FieldType.BB_REFERENCE].includes(schema.type) &&
typeof row[fieldName] === "string"
) {
try {
row[fieldName] = JSON.parse(row[fieldName])
} catch (err) {
if (!helpers.schema.isDeprecatedSingleUserColumn(schema)) {
// couldn't convert back to array, ignore
delete row[fieldName]
}
}
}
}
return row
} }

View File

@ -14,12 +14,7 @@ import {
processFormulas, processFormulas,
} from "../../../../utilities/rowProcessor" } from "../../../../utilities/rowProcessor"
import { isKnexEmptyReadResponse } from "./sqlUtils" import { isKnexEmptyReadResponse } from "./sqlUtils"
import { import { basicProcessing, generateIdForRow, getInternalRowId } from "./basic"
basicProcessing,
generateIdForRow,
fixArrayTypes,
getInternalRowId,
} from "./basic"
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
import { processStringSync } from "@budibase/string-templates" import { processStringSync } from "@budibase/string-templates"
import validateJs from "validate.js" import validateJs from "validate.js"
@ -149,16 +144,13 @@ export async function sqlOutputProcessing(
rowId = generateIdForRow(row, table) rowId = generateIdForRow(row, table)
row._id = rowId row._id = rowId
} }
const thisRow = fixArrayTypes( const thisRow = basicProcessing({
basicProcessing({ row,
row, table,
table, tables: Object.values(tables),
tables: Object.values(tables), isLinked: false,
isLinked: false, sqs: opts?.sqs,
sqs: opts?.sqs, })
}),
table
)
if (thisRow._id == null) { if (thisRow._id == null) {
throw new Error("Unable to generate row ID for SQL rows") throw new Error("Unable to generate row ID for SQL rows")
} }

View File

@ -18,6 +18,7 @@ import {
} from "@budibase/types" } from "@budibase/types"
import { import {
buildInternalRelationships, buildInternalRelationships,
fixJsonTypes,
sqlOutputProcessing, sqlOutputProcessing,
} from "../../../../../api/controllers/row/utils" } from "../../../../../api/controllers/row/utils"
import sdk from "../../../../index" import sdk from "../../../../index"
@ -182,11 +183,20 @@ function buildTableMap(tables: Table[]) {
return tableMap return tableMap
} }
function reverseUserColumnMapping(rows: Row[]) { // table is only needed to handle relationships
function reverseUserColumnMapping(rows: Row[], table?: Table) {
const prefixLength = USER_COLUMN_PREFIX.length const prefixLength = USER_COLUMN_PREFIX.length
return rows.map(row => { return rows.map(row => {
const finalRow: Row = {} const finalRow: Row = {}
for (let key of Object.keys(row)) { for (let key of Object.keys(row)) {
// handle relationships
if (
table?.schema[key]?.type === FieldType.LINK &&
typeof row[key] === "string"
) {
// no table required, relationship rows don't contain relationships
row[key] = reverseUserColumnMapping(JSON.parse(row[key]))
}
// it should be the first prefix // it should be the first prefix
const index = key.indexOf(USER_COLUMN_PREFIX) const index = key.indexOf(USER_COLUMN_PREFIX)
if (index !== -1) { if (index !== -1) {
@ -261,7 +271,7 @@ async function runSqlQuery(
if (opts?.countTotalRows) { if (opts?.countTotalRows) {
return processRowCountResponse(response) return processRowCountResponse(response)
} else if (Array.isArray(response)) { } else if (Array.isArray(response)) {
return reverseUserColumnMapping(response) return reverseUserColumnMapping(response, json.meta.table)
} }
return response return response
} }
@ -368,17 +378,6 @@ export async function search(
}) })
) )
// make sure relationships have columns reversed correctly
for (let columnName of Object.keys(table.schema)) {
if (table.schema[columnName].type !== FieldType.LINK) {
continue
}
// process the relationships (JSON generated by SQS)
for (let row of processed) {
row[columnName] = reverseUserColumnMapping(row[columnName])
}
}
// check for pagination final row // check for pagination final row
let nextRow: boolean = false let nextRow: boolean = false
if (paginate && params.limit && rows.length > params.limit) { if (paginate && params.limit && rows.length > params.limit) {