Some work to correctly handle JSON columns from SQS as well.

This commit is contained in:
mike12345567 2024-05-23 14:57:38 +01:00
parent 90d646facb
commit 0c28d05d40
3 changed files with 16 additions and 17 deletions

View File

@ -768,11 +768,11 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
return results.length ? results : [{ [operation.toLowerCase()]: true }] return results.length ? results : [{ [operation.toLowerCase()]: true }]
} }
convertJsonStringColumns( convertJsonStringColumns<T extends Record<string, any>>(
table: Table, table: Table,
results: Record<string, any>[], results: T[],
aliases?: Record<string, string> aliases?: Record<string, string>
): Record<string, any>[] { ): T[] {
const tableName = getTableName(table) const tableName = getTableName(table)
for (const [name, field] of Object.entries(table.schema)) { for (const [name, field] of Object.entries(table.schema)) {
if (!this._isJsonColumn(field)) { if (!this._isJsonColumn(field)) {
@ -781,11 +781,11 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
const aliasedTableName = (tableName && aliases?.[tableName]) || tableName const aliasedTableName = (tableName && aliases?.[tableName]) || tableName
const fullName = `${aliasedTableName}.${name}` const fullName = `${aliasedTableName}.${name}`
for (let row of results) { for (let row of results) {
if (typeof row[fullName] === "string") { if (typeof row[fullName as keyof T] === "string") {
row[fullName] = JSON.parse(row[fullName]) row[fullName as keyof T] = JSON.parse(row[fullName])
} }
if (typeof row[name] === "string") { if (typeof row[name as keyof T] === "string") {
row[name] = JSON.parse(row[name]) row[name as keyof T] = JSON.parse(row[name])
} }
} }
} }

@ -1 +1 @@
Subproject commit 58338686c65024eb4140bb53965b618d9d647ec0 Subproject commit 2b023157c53adf97bb4f8d4df61cf07c4ad13b07

View File

@ -29,6 +29,8 @@ import AliasTables from "../sqlAlias"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import pick from "lodash/pick" import pick from "lodash/pick"
const builder = new sql.Sql(SqlClient.SQL_LITE)
function buildInternalFieldList( function buildInternalFieldList(
table: Table, table: Table,
tables: Table[], tables: Table[],
@ -99,7 +101,6 @@ function buildTableMap(tables: Table[]) {
} }
async function runSqlQuery(json: QueryJson, tables: Table[]) { async function runSqlQuery(json: QueryJson, tables: Table[]) {
const builder = new sql.Sql(SqlClient.SQL_LITE)
const alias = new AliasTables(tables.map(table => table.name)) const alias = new AliasTables(tables.map(table => table.name))
return await alias.queryWithAliasing(json, async json => { return await alias.queryWithAliasing(json, async json => {
const query = builder._query(json, { const query = builder._query(json, {
@ -184,15 +185,13 @@ export async function search(
try { try {
const rows = await runSqlQuery(request, allTables) const rows = await runSqlQuery(request, allTables)
// process from the format of tableId.column to expected format // process from the format of tableId.column to expected format also
const processed = await sqlOutputProcessing( // make sure JSON columns corrected
rows, const processed = builder.convertJsonStringColumns<Row>(
table!, table,
allTablesMap, await sqlOutputProcessing(rows, table!, allTablesMap, relationships, {
relationships,
{
sqs: true, sqs: true,
} })
) )
// check for pagination final row // check for pagination final row