inputProcessing async
This commit is contained in:
parent
e4caf8b737
commit
9860023c9e
|
@ -168,7 +168,7 @@ export const addSampleData = async (ctx: UserCtx) => {
|
|||
// Check if default datasource exists before creating it
|
||||
await sdk.datasources.get(DEFAULT_BB_DATASOURCE_ID)
|
||||
} catch (err: any) {
|
||||
const defaultDbDocs = buildDefaultDocs()
|
||||
const defaultDbDocs = await buildDefaultDocs()
|
||||
|
||||
// add in the default db data docs - tables, datasource, rows and links
|
||||
await db.bulkDocs([...defaultDbDocs])
|
||||
|
|
|
@ -80,7 +80,7 @@ export async function save(ctx: UserCtx) {
|
|||
}
|
||||
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
const { table: updatedTable, row } = inputProcessing(
|
||||
const { table: updatedTable, row } = await inputProcessing(
|
||||
ctx.user?._id,
|
||||
cloneDeep(table),
|
||||
inputs
|
||||
|
|
|
@ -59,7 +59,11 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
|||
const tableClone = cloneDeep(dbTable)
|
||||
|
||||
// this returns the table and row incase they have been updated
|
||||
let { table, row } = inputProcessing(ctx.user?._id, tableClone, combinedRow)
|
||||
let { table, row } = await inputProcessing(
|
||||
ctx.user?._id,
|
||||
tableClone,
|
||||
combinedRow
|
||||
)
|
||||
const validateResult = await sdk.rows.utils.validate({
|
||||
row,
|
||||
table,
|
||||
|
@ -106,7 +110,7 @@ export async function save(ctx: UserCtx) {
|
|||
// need to copy the table so it can be differenced on way out
|
||||
const tableClone = cloneDeep(dbTable)
|
||||
|
||||
let { table, row } = inputProcessing(ctx.user?._id, tableClone, inputs)
|
||||
let { table, row } = await inputProcessing(ctx.user?._id, tableClone, inputs)
|
||||
|
||||
const validateResult = await sdk.rows.utils.validate({
|
||||
row,
|
||||
|
|
|
@ -42,7 +42,7 @@ describe("utils", () => {
|
|||
|
||||
const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }]
|
||||
|
||||
const result = importToRows(data, table, config.user)
|
||||
const result = await importToRows(data, table, config.user)
|
||||
expect(result).toEqual([
|
||||
expect.objectContaining({
|
||||
autoId: 1,
|
||||
|
@ -89,7 +89,7 @@ describe("utils", () => {
|
|||
|
||||
const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }]
|
||||
|
||||
const result = importToRows(data, table)
|
||||
const result = await importToRows(data, table)
|
||||
expect(result).toHaveLength(3)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -99,7 +99,7 @@ export function makeSureTableUpToDate(table: any, tableToSave: any) {
|
|||
return tableToSave
|
||||
}
|
||||
|
||||
export function importToRows(
|
||||
export async function importToRows(
|
||||
data: any[],
|
||||
table: Table,
|
||||
user: ContextUser | null = null
|
||||
|
@ -113,7 +113,7 @@ export function importToRows(
|
|||
|
||||
// We use a reference to table here and update it after input processing,
|
||||
// so that we can auto increment auto IDs in imported data properly
|
||||
const processed = inputProcessing(user?._id, table, row, {
|
||||
const processed = await inputProcessing(user?._id, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
row = processed.row
|
||||
|
@ -158,7 +158,7 @@ export async function handleDataImport(
|
|||
const db = context.getAppDB()
|
||||
const data = parse(rows, schema)
|
||||
|
||||
let finalData: any = importToRows(data, table, user)
|
||||
let finalData: any = await importToRows(data, table, user)
|
||||
|
||||
//Set IDs of finalData to match existing row if an update is expected
|
||||
if (identifierFields.length > 0) {
|
||||
|
|
|
@ -34,9 +34,9 @@ function syncLastIds(table: Table, rowCount: number) {
|
|||
})
|
||||
}
|
||||
|
||||
function tableImport(table: Table, data: Row[]) {
|
||||
async function tableImport(table: Table, data: Row[]) {
|
||||
const cloneTable = cloneDeep(table)
|
||||
const rowDocs = importToRows(data, cloneTable)
|
||||
const rowDocs = await importToRows(data, cloneTable)
|
||||
syncLastIds(cloneTable, rowDocs.length)
|
||||
return { rows: rowDocs, table: cloneTable }
|
||||
}
|
||||
|
@ -601,20 +601,20 @@ export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = {
|
|||
},
|
||||
}
|
||||
|
||||
export function buildDefaultDocs() {
|
||||
const inventoryData = tableImport(
|
||||
export async function buildDefaultDocs() {
|
||||
const inventoryData = await tableImport(
|
||||
DEFAULT_INVENTORY_TABLE_SCHEMA,
|
||||
inventoryImport
|
||||
)
|
||||
|
||||
const employeeData = tableImport(
|
||||
const employeeData = await tableImport(
|
||||
DEFAULT_EMPLOYEE_TABLE_SCHEMA,
|
||||
employeeImport
|
||||
)
|
||||
|
||||
const jobData = tableImport(DEFAULT_JOBS_TABLE_SCHEMA, jobsImport)
|
||||
const jobData = await tableImport(DEFAULT_JOBS_TABLE_SCHEMA, jobsImport)
|
||||
|
||||
const expensesData = tableImport(
|
||||
const expensesData = await tableImport(
|
||||
DEFAULT_EXPENSES_TABLE_SCHEMA,
|
||||
expensesImport
|
||||
)
|
||||
|
|
|
@ -130,7 +130,7 @@ export function coerce(row: any, type: string) {
|
|||
* @param {object} opts some input processing options (like disabling auto-column relationships).
|
||||
* @returns {object} the row which has been prepared to be written to the DB.
|
||||
*/
|
||||
export function inputProcessing(
|
||||
export async function inputProcessing(
|
||||
userId: string | null | undefined,
|
||||
table: Table,
|
||||
row: Row,
|
||||
|
|
Loading…
Reference in New Issue