Fixing issue uncovered by test case.
This commit is contained in:
parent
b95b53f648
commit
04ea2b99e9
|
@ -19,6 +19,17 @@ import { cloneDeep, merge } from "lodash/fp"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import * as pro from "@budibase/pro"
|
import * as pro from "@budibase/pro"
|
||||||
|
|
||||||
|
function mergeRows(row1: Row, row2: Row) {
|
||||||
|
const merged = merge(row1, row2)
|
||||||
|
// make sure any specifically undefined fields are removed
|
||||||
|
for (const key of Object.keys(row2)) {
|
||||||
|
if (row2[key] === undefined) {
|
||||||
|
delete merged[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return merged
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function runs through a list of enriched rows, looks at the rows which
|
* This function runs through a list of enriched rows, looks at the rows which
|
||||||
* are related and then checks if they need the state of their formulas
|
* are related and then checks if they need the state of their formulas
|
||||||
|
@ -164,7 +175,11 @@ export async function finaliseRow(
|
||||||
|
|
||||||
await db.put(row)
|
await db.put(row)
|
||||||
const retrieved = await db.tryGet<Row>(row._id)
|
const retrieved = await db.tryGet<Row>(row._id)
|
||||||
enrichedRow = merge(retrieved, enrichedRow)
|
if (!retrieved) {
|
||||||
|
throw new Error(`Unable to retrieve row ${row._id} after saving.`)
|
||||||
|
}
|
||||||
|
|
||||||
|
enrichedRow = mergeRows(retrieved, enrichedRow)
|
||||||
enrichedRow = await processFormulas(table, enrichedRow, {
|
enrichedRow = await processFormulas(table, enrichedRow, {
|
||||||
dynamic: false,
|
dynamic: false,
|
||||||
})
|
})
|
||||||
|
|
|
@ -2571,14 +2571,12 @@ if (descriptions.length) {
|
||||||
let tableId: string
|
let tableId: string
|
||||||
let o2mData: Row[]
|
let o2mData: Row[]
|
||||||
let m2mData: Row[]
|
let m2mData: Row[]
|
||||||
let isRelationship: boolean
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
const table = await config.api.table.save(
|
const table = await config.api.table.save(
|
||||||
defaultTable({ schema: relSchema() })
|
defaultTable({ schema: relSchema() })
|
||||||
)
|
)
|
||||||
tableId = table._id!
|
tableId = table._id!
|
||||||
isRelationship = relSchema().user.type === FieldType.LINK
|
|
||||||
|
|
||||||
o2mData = [
|
o2mData = [
|
||||||
await dataGenerator(o2mTable._id!),
|
await dataGenerator(o2mTable._id!),
|
||||||
|
@ -2755,19 +2753,8 @@ if (descriptions.length) {
|
||||||
user: null,
|
user: null,
|
||||||
users: null,
|
users: null,
|
||||||
})
|
})
|
||||||
expect(updatedRow).toEqual({
|
expect(updatedRow.user).toBeUndefined()
|
||||||
name: "foo",
|
expect(updatedRow.users).toBeUndefined()
|
||||||
description: "bar",
|
|
||||||
tableId,
|
|
||||||
_id: row._id,
|
|
||||||
_rev: expect.any(String),
|
|
||||||
id: isInternal ? undefined : expect.any(Number),
|
|
||||||
type: isInternal ? "row" : undefined,
|
|
||||||
createdAt: isInternal ? new Date().toISOString() : undefined,
|
|
||||||
updatedAt: isInternal ? new Date().toISOString() : undefined,
|
|
||||||
users: isRelationship ? undefined : [],
|
|
||||||
user: isRelationship ? undefined : [],
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fetch all will populate the relationships", async () => {
|
it("fetch all will populate the relationships", async () => {
|
||||||
|
|
|
@ -153,10 +153,10 @@ export async function processOutputBBReference(
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function processOutputBBReferences(
|
export async function processOutputBBReferences(
|
||||||
value: string | null | undefined,
|
value: string | string[] | null | undefined,
|
||||||
subtype: BBReferenceFieldSubType
|
subtype: BBReferenceFieldSubType
|
||||||
): Promise<UserReferenceInfo[] | undefined> {
|
): Promise<UserReferenceInfo[] | undefined> {
|
||||||
if (!value) {
|
if (!value || (Array.isArray(value) && value.length === 0)) {
|
||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
const ids =
|
const ids =
|
||||||
|
|
Loading…
Reference in New Issue