Getting external rows to be more consistent with the internal API - the formulas should be processed using the outputProcessing as they were being processed, but was causing an overwrite. The problem was that the external system internally processed the formulas/relationships, then squashed the relationships. Once it got to the external API, it would go through normal output processing, which would run over the squashed rows, causing an inconsistent behaviour.
This commit is contained in:
parent
ce6de27714
commit
635af0f76a
|
@ -106,7 +106,6 @@ export async function fetchDeployments(ctx: any) {
|
||||||
}
|
}
|
||||||
ctx.body = Object.values(deployments.history).reverse()
|
ctx.body = Object.values(deployments.history).reverse()
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
|
||||||
ctx.body = []
|
ctx.body = []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,6 @@ import {
|
||||||
breakRowIdField,
|
breakRowIdField,
|
||||||
convertRowId,
|
convertRowId,
|
||||||
generateRowIdField,
|
generateRowIdField,
|
||||||
getPrimaryDisplay,
|
|
||||||
isRowId,
|
isRowId,
|
||||||
isSQL,
|
isSQL,
|
||||||
} from "../../../integrations/utils"
|
} from "../../../integrations/utils"
|
||||||
|
@ -237,7 +236,7 @@ function basicProcessing({
|
||||||
thisRow._id = generateIdForRow(row, table, isLinked)
|
thisRow._id = generateIdForRow(row, table, isLinked)
|
||||||
thisRow.tableId = table._id
|
thisRow.tableId = table._id
|
||||||
thisRow._rev = "rev"
|
thisRow._rev = "rev"
|
||||||
return processFormulas(table, thisRow)
|
return thisRow
|
||||||
}
|
}
|
||||||
|
|
||||||
function fixArrayTypes(row: Row, table: Table) {
|
function fixArrayTypes(row: Row, table: Table) {
|
||||||
|
@ -392,7 +391,7 @@ export class ExternalRequest<T extends Operation> {
|
||||||
return { row: newRow, manyRelationships }
|
return { row: newRow, manyRelationships }
|
||||||
}
|
}
|
||||||
|
|
||||||
squashRelationshipColumns(
|
processRelationshipFields(
|
||||||
table: Table,
|
table: Table,
|
||||||
row: Row,
|
row: Row,
|
||||||
relationships: RelationshipsJson[]
|
relationships: RelationshipsJson[]
|
||||||
|
@ -402,7 +401,6 @@ export class ExternalRequest<T extends Operation> {
|
||||||
if (!linkedTable || !row[relationship.column]) {
|
if (!linkedTable || !row[relationship.column]) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
const display = linkedTable.primaryDisplay
|
|
||||||
for (let key of Object.keys(row[relationship.column])) {
|
for (let key of Object.keys(row[relationship.column])) {
|
||||||
let relatedRow: Row = row[relationship.column][key]
|
let relatedRow: Row = row[relationship.column][key]
|
||||||
// add this row as context for the relationship
|
// add this row as context for the relationship
|
||||||
|
@ -411,15 +409,10 @@ export class ExternalRequest<T extends Operation> {
|
||||||
relatedRow[col.name] = [row]
|
relatedRow[col.name] = [row]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// process additional types
|
||||||
|
relatedRow = processDates(table, relatedRow)
|
||||||
relatedRow = processFormulas(linkedTable, relatedRow)
|
relatedRow = processFormulas(linkedTable, relatedRow)
|
||||||
let relatedDisplay
|
row[relationship.column][key] = relatedRow
|
||||||
if (display) {
|
|
||||||
relatedDisplay = getPrimaryDisplay(relatedRow[display])
|
|
||||||
}
|
|
||||||
row[relationship.column][key] = {
|
|
||||||
primaryDisplay: relatedDisplay || "Invalid display column",
|
|
||||||
_id: relatedRow._id,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return row
|
return row
|
||||||
|
@ -521,14 +514,14 @@ export class ExternalRequest<T extends Operation> {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process some additional data types
|
// make sure all related rows are correct
|
||||||
let finalRowArray = Object.values(finalRows)
|
let finalRowArray = Object.values(finalRows).map(row =>
|
||||||
finalRowArray = processDates(table, finalRowArray)
|
this.processRelationshipFields(table, row, relationships)
|
||||||
finalRowArray = processFormulas(table, finalRowArray) as Row[]
|
|
||||||
|
|
||||||
return finalRowArray.map((row: Row) =>
|
|
||||||
this.squashRelationshipColumns(table, row, relationships)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// process some additional types
|
||||||
|
finalRowArray = processDates(table, finalRowArray)
|
||||||
|
return finalRowArray
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -663,7 +656,7 @@ export class ExternalRequest<T extends Operation> {
|
||||||
linkPrimary,
|
linkPrimary,
|
||||||
linkSecondary,
|
linkSecondary,
|
||||||
}: {
|
}: {
|
||||||
row: { [key: string]: any }
|
row: Row
|
||||||
linkPrimary: string
|
linkPrimary: string
|
||||||
linkSecondary?: string
|
linkSecondary?: string
|
||||||
}) {
|
}) {
|
||||||
|
|
|
@ -76,6 +76,7 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||||
relationships: true,
|
relationships: true,
|
||||||
})
|
})
|
||||||
const enrichedRow = await outputProcessing(table, row, {
|
const enrichedRow = await outputProcessing(table, row, {
|
||||||
|
squash: true,
|
||||||
preserveLinks: true,
|
preserveLinks: true,
|
||||||
})
|
})
|
||||||
return {
|
return {
|
||||||
|
@ -119,7 +120,10 @@ export async function save(ctx: UserCtx) {
|
||||||
})
|
})
|
||||||
return {
|
return {
|
||||||
...response,
|
...response,
|
||||||
row: await outputProcessing(table, row, { preserveLinks: true }),
|
row: await outputProcessing(table, row, {
|
||||||
|
preserveLinks: true,
|
||||||
|
squash: true,
|
||||||
|
}),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return response
|
return response
|
||||||
|
@ -140,7 +144,7 @@ export async function find(ctx: UserCtx): Promise<Row> {
|
||||||
const table = await sdk.tables.getTable(tableId)
|
const table = await sdk.tables.getTable(tableId)
|
||||||
// Preserving links, as the outputProcessing does not support external rows yet and we don't need it in this use case
|
// Preserving links, as the outputProcessing does not support external rows yet and we don't need it in this use case
|
||||||
return await outputProcessing(table, row, {
|
return await outputProcessing(table, row, {
|
||||||
squash: false,
|
squash: true,
|
||||||
preserveLinks: true,
|
preserveLinks: true,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -207,7 +211,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
||||||
// don't support composite keys right now
|
// don't support composite keys right now
|
||||||
const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0])
|
const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0])
|
||||||
const primaryLink = linkedTable.primary?.[0] as string
|
const primaryLink = linkedTable.primary?.[0] as string
|
||||||
row[fieldName] = await handleRequest(Operation.READ, linkedTableId!, {
|
const relatedRows = await handleRequest(Operation.READ, linkedTableId!, {
|
||||||
tables,
|
tables,
|
||||||
filters: {
|
filters: {
|
||||||
oneOf: {
|
oneOf: {
|
||||||
|
@ -216,6 +220,10 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
||||||
},
|
},
|
||||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||||
})
|
})
|
||||||
|
row[fieldName] = await outputProcessing(linkedTable, relatedRows, {
|
||||||
|
squash: true,
|
||||||
|
preserveLinks: true,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
return row
|
return row
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,12 +86,12 @@ export async function updateAllFormulasInTable(table: Table) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
// start by getting the raw rows (which will be written back to DB after update)
|
// start by getting the raw rows (which will be written back to DB after update)
|
||||||
let rows = (
|
let rows = (
|
||||||
await db.allDocs(
|
await db.allDocs<Row>(
|
||||||
getRowParams(table._id, null, {
|
getRowParams(table._id, null, {
|
||||||
include_docs: true,
|
include_docs: true,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
).rows.map(row => row.doc)
|
).rows.map(row => row.doc as Row)
|
||||||
// now enrich the rows, note the clone so that we have the base state of the
|
// now enrich the rows, note the clone so that we have the base state of the
|
||||||
// rows so that we don't write any of the enriched information back
|
// rows so that we don't write any of the enriched information back
|
||||||
let enrichedRows = await outputProcessing(table, cloneDeep(rows), {
|
let enrichedRows = await outputProcessing(table, cloneDeep(rows), {
|
||||||
|
@ -101,12 +101,12 @@ export async function updateAllFormulasInTable(table: Table) {
|
||||||
for (let row of rows) {
|
for (let row of rows) {
|
||||||
// find the enriched row, if found process the formulas
|
// find the enriched row, if found process the formulas
|
||||||
const enrichedRow = enrichedRows.find(
|
const enrichedRow = enrichedRows.find(
|
||||||
(enriched: any) => enriched._id === row._id
|
(enriched: Row) => enriched._id === row._id
|
||||||
)
|
)
|
||||||
if (enrichedRow) {
|
if (enrichedRow) {
|
||||||
const processed = processFormulas(table, cloneDeep(row), {
|
const processed = processFormulas(table, cloneDeep(row), {
|
||||||
dynamic: false,
|
dynamic: false,
|
||||||
contextRows: enrichedRow,
|
contextRows: [enrichedRow],
|
||||||
})
|
})
|
||||||
// values have changed, need to add to bulk docs to update
|
// values have changed, need to add to bulk docs to update
|
||||||
if (!isEqual(processed, row)) {
|
if (!isEqual(processed, row)) {
|
||||||
|
@ -139,7 +139,7 @@ export async function finaliseRow(
|
||||||
// use enriched row to generate formulas for saving, specifically only use as context
|
// use enriched row to generate formulas for saving, specifically only use as context
|
||||||
row = processFormulas(table, row, {
|
row = processFormulas(table, row, {
|
||||||
dynamic: false,
|
dynamic: false,
|
||||||
contextRows: enrichedRow,
|
contextRows: [enrichedRow],
|
||||||
})
|
})
|
||||||
// don't worry about rev, tables handle rev/lastID updates
|
// don't worry about rev, tables handle rev/lastID updates
|
||||||
// if another row has been written since processing this will
|
// if another row has been written since processing this will
|
||||||
|
@ -163,7 +163,9 @@ export async function finaliseRow(
|
||||||
const response = await db.put(row)
|
const response = await db.put(row)
|
||||||
// for response, calculate the formulas for the enriched row
|
// for response, calculate the formulas for the enriched row
|
||||||
enrichedRow._rev = response.rev
|
enrichedRow._rev = response.rev
|
||||||
enrichedRow = await processFormulas(table, enrichedRow, { dynamic: false })
|
enrichedRow = processFormulas(table, enrichedRow, {
|
||||||
|
dynamic: false,
|
||||||
|
})
|
||||||
// this updates the related formulas in other rows based on the relations to this row
|
// this updates the related formulas in other rows based on the relations to this row
|
||||||
if (updateFormula) {
|
if (updateFormula) {
|
||||||
await updateRelatedFormula(table, enrichedRow)
|
await updateRelatedFormula(table, enrichedRow)
|
||||||
|
|
|
@ -2,7 +2,6 @@ import LinkController from "./LinkController"
|
||||||
import {
|
import {
|
||||||
IncludeDocs,
|
IncludeDocs,
|
||||||
getLinkDocuments,
|
getLinkDocuments,
|
||||||
createLinkView,
|
|
||||||
getUniqueByProp,
|
getUniqueByProp,
|
||||||
getRelatedTableForField,
|
getRelatedTableForField,
|
||||||
getLinkedTableIDs,
|
getLinkedTableIDs,
|
||||||
|
|
|
@ -8,6 +8,7 @@ import {
|
||||||
LinkDocumentValue,
|
LinkDocumentValue,
|
||||||
Table,
|
Table,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
import sdk from "../../sdk"
|
||||||
|
|
||||||
export { createLinkView } from "../views/staticViews"
|
export { createLinkView } from "../views/staticViews"
|
||||||
|
|
||||||
|
@ -110,12 +111,11 @@ export function getLinkedTableIDs(table: Table): string[] {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getLinkedTable(id: string, tables: Table[]) {
|
export async function getLinkedTable(id: string, tables: Table[]) {
|
||||||
const db = context.getAppDB()
|
|
||||||
let linkedTable = tables.find(table => table._id === id)
|
let linkedTable = tables.find(table => table._id === id)
|
||||||
if (linkedTable) {
|
if (linkedTable) {
|
||||||
return linkedTable
|
return linkedTable
|
||||||
}
|
}
|
||||||
linkedTable = await db.get(id)
|
linkedTable = await sdk.tables.getTable(id)
|
||||||
if (linkedTable) {
|
if (linkedTable) {
|
||||||
tables.push(linkedTable)
|
tables.push(linkedTable)
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,7 +80,10 @@ export async function search(options: SearchParams) {
|
||||||
rows = rows.map((r: any) => pick(r, fields))
|
rows = rows.map((r: any) => pick(r, fields))
|
||||||
}
|
}
|
||||||
|
|
||||||
rows = await outputProcessing(table, rows, { preserveLinks: true })
|
rows = await outputProcessing(table, rows, {
|
||||||
|
preserveLinks: true,
|
||||||
|
squash: true,
|
||||||
|
})
|
||||||
|
|
||||||
// need wrapper object for bookmarks etc when paginating
|
// need wrapper object for bookmarks etc when paginating
|
||||||
return { rows, hasNextPage, bookmark: bookmark && bookmark + 1 }
|
return { rows, hasNextPage, bookmark: bookmark && bookmark + 1 }
|
||||||
|
|
|
@ -258,7 +258,7 @@ export async function outputProcessing<T extends Row[] | Row>(
|
||||||
}
|
}
|
||||||
|
|
||||||
// process formulas after the complex types had been processed
|
// process formulas after the complex types had been processed
|
||||||
enriched = processFormulas(table, enriched, { dynamic: true }) as Row[]
|
enriched = processFormulas(table, enriched, { dynamic: true })
|
||||||
|
|
||||||
if (opts.squash) {
|
if (opts.squash) {
|
||||||
enriched = (await linkRows.squashLinksToPrimaryDisplay(
|
enriched = (await linkRows.squashLinksToPrimaryDisplay(
|
||||||
|
|
|
@ -12,6 +12,11 @@ import {
|
||||||
Table,
|
Table,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
|
interface FormulaOpts {
|
||||||
|
dynamic?: boolean
|
||||||
|
contextRows?: Row[]
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If the subtype has been lost for any reason this works out what
|
* If the subtype has been lost for any reason this works out what
|
||||||
* subtype the auto column should be.
|
* subtype the auto column should be.
|
||||||
|
@ -40,52 +45,50 @@ export function fixAutoColumnSubType(
|
||||||
/**
|
/**
|
||||||
* Looks through the rows provided and finds formulas - which it then processes.
|
* Looks through the rows provided and finds formulas - which it then processes.
|
||||||
*/
|
*/
|
||||||
export function processFormulas(
|
export function processFormulas<T extends Row | Row[]>(
|
||||||
table: Table,
|
table: Table,
|
||||||
rows: Row[] | Row,
|
inputRows: T,
|
||||||
{ dynamic, contextRows }: any = { dynamic: true }
|
{ dynamic, contextRows }: FormulaOpts = { dynamic: true }
|
||||||
) {
|
): Promise<T> {
|
||||||
const single = !Array.isArray(rows)
|
const rows = Array.isArray(inputRows) ? inputRows : [inputRows]
|
||||||
let rowArray: Row[]
|
if (rows)
|
||||||
if (single) {
|
for (let [column, schema] of Object.entries(table.schema)) {
|
||||||
rowArray = [rows]
|
if (schema.type !== FieldTypes.FORMULA) {
|
||||||
contextRows = contextRows ? [contextRows] : contextRows
|
continue
|
||||||
} else {
|
}
|
||||||
rowArray = rows
|
|
||||||
}
|
|
||||||
for (let [column, schema] of Object.entries(table.schema)) {
|
|
||||||
if (schema.type !== FieldTypes.FORMULA) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const isStatic = schema.formulaType === FormulaTypes.STATIC
|
const isStatic = schema.formulaType === FormulaTypes.STATIC
|
||||||
|
|
||||||
if (
|
if (
|
||||||
schema.formula == null ||
|
schema.formula == null ||
|
||||||
(dynamic && isStatic) ||
|
(dynamic && isStatic) ||
|
||||||
(!dynamic && !isStatic)
|
(!dynamic && !isStatic)
|
||||||
) {
|
) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// iterate through rows and process formula
|
// iterate through rows and process formula
|
||||||
for (let i = 0; i < rowArray.length; i++) {
|
for (let i = 0; i < rows.length; i++) {
|
||||||
let row = rowArray[i]
|
let row = rows[i]
|
||||||
let context = contextRows ? contextRows[i] : row
|
let context = contextRows ? contextRows[i] : row
|
||||||
rowArray[i] = {
|
rows[i] = {
|
||||||
...row,
|
...row,
|
||||||
[column]: processStringSync(schema.formula, context),
|
[column]: processStringSync(schema.formula, context),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
return Array.isArray(inputRows) ? rows : rows[0]
|
||||||
return single ? rowArray[0] : rowArray
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Processes any date columns and ensures that those without the ignoreTimezones
|
* Processes any date columns and ensures that those without the ignoreTimezones
|
||||||
* flag set are parsed as UTC rather than local time.
|
* flag set are parsed as UTC rather than local time.
|
||||||
*/
|
*/
|
||||||
export function processDates(table: Table, rows: Row[]) {
|
export function processDates<T extends Row | Row[]>(
|
||||||
let datesWithTZ = []
|
table: Table,
|
||||||
|
inputRows: T
|
||||||
|
): T {
|
||||||
|
let rows = Array.isArray(inputRows) ? inputRows : [inputRows]
|
||||||
|
let datesWithTZ: string[] = []
|
||||||
for (let [column, schema] of Object.entries(table.schema)) {
|
for (let [column, schema] of Object.entries(table.schema)) {
|
||||||
if (schema.type !== FieldTypes.DATETIME) {
|
if (schema.type !== FieldTypes.DATETIME) {
|
||||||
continue
|
continue
|
||||||
|
@ -102,5 +105,6 @@ export function processDates(table: Table, rows: Row[]) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return rows
|
|
||||||
|
return Array.isArray(inputRows) ? rows : rows[0]
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue