inputProcessing async

This commit is contained in:
Adria Navarro 2023-09-15 10:31:54 +02:00
parent 1cbfeafe39
commit edd8879d04
7 changed files with 21 additions and 17 deletions

View File

@ -168,7 +168,7 @@ export const addSampleData = async (ctx: UserCtx) => {
// Check if default datasource exists before creating it // Check if default datasource exists before creating it
await sdk.datasources.get(DEFAULT_BB_DATASOURCE_ID) await sdk.datasources.get(DEFAULT_BB_DATASOURCE_ID)
} catch (err: any) { } catch (err: any) {
const defaultDbDocs = buildDefaultDocs() const defaultDbDocs = await buildDefaultDocs()
// add in the default db data docs - tables, datasource, rows and links // add in the default db data docs - tables, datasource, rows and links
await db.bulkDocs([...defaultDbDocs]) await db.bulkDocs([...defaultDbDocs])

View File

@ -92,7 +92,7 @@ export async function save(ctx: UserCtx) {
} }
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
const { table: updatedTable, row } = inputProcessing( const { table: updatedTable, row } = await inputProcessing(
ctx.user?._id, ctx.user?._id,
cloneDeep(table), cloneDeep(table),
inputs inputs

View File

@ -59,7 +59,11 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const tableClone = cloneDeep(dbTable) const tableClone = cloneDeep(dbTable)
// this returns the table and row incase they have been updated // this returns the table and row incase they have been updated
let { table, row } = inputProcessing(ctx.user?._id, tableClone, combinedRow) let { table, row } = await inputProcessing(
ctx.user?._id,
tableClone,
combinedRow
)
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row, row,
table, table,
@ -106,7 +110,7 @@ export async function save(ctx: UserCtx) {
// need to copy the table so it can be differenced on way out // need to copy the table so it can be differenced on way out
const tableClone = cloneDeep(dbTable) const tableClone = cloneDeep(dbTable)
let { table, row } = inputProcessing(ctx.user?._id, tableClone, inputs) let { table, row } = await inputProcessing(ctx.user?._id, tableClone, inputs)
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row, row,

View File

@ -42,7 +42,7 @@ describe("utils", () => {
const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }] const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }]
const result = importToRows(data, table, config.user) const result = await importToRows(data, table, config.user)
expect(result).toEqual([ expect(result).toEqual([
expect.objectContaining({ expect.objectContaining({
autoId: 1, autoId: 1,
@ -89,7 +89,7 @@ describe("utils", () => {
const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }] const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }]
const result = importToRows(data, table) const result = await importToRows(data, table)
expect(result).toHaveLength(3) expect(result).toHaveLength(3)
}) })
}) })

View File

@ -99,7 +99,7 @@ export function makeSureTableUpToDate(table: any, tableToSave: any) {
return tableToSave return tableToSave
} }
export function importToRows( export async function importToRows(
data: any[], data: any[],
table: Table, table: Table,
user: ContextUser | null = null user: ContextUser | null = null
@ -113,7 +113,7 @@ export function importToRows(
// We use a reference to table here and update it after input processing, // We use a reference to table here and update it after input processing,
// so that we can auto increment auto IDs in imported data properly // so that we can auto increment auto IDs in imported data properly
const processed = inputProcessing(user?._id, table, row, { const processed = await inputProcessing(user?._id, table, row, {
noAutoRelationships: true, noAutoRelationships: true,
}) })
row = processed.row row = processed.row
@ -158,7 +158,7 @@ export async function handleDataImport(
const db = context.getAppDB() const db = context.getAppDB()
const data = parse(rows, schema) const data = parse(rows, schema)
let finalData: any = importToRows(data, table, user) let finalData: any = await importToRows(data, table, user)
//Set IDs of finalData to match existing row if an update is expected //Set IDs of finalData to match existing row if an update is expected
if (identifierFields.length > 0) { if (identifierFields.length > 0) {

View File

@ -34,9 +34,9 @@ function syncLastIds(table: Table, rowCount: number) {
}) })
} }
function tableImport(table: Table, data: Row[]) { async function tableImport(table: Table, data: Row[]) {
const cloneTable = cloneDeep(table) const cloneTable = cloneDeep(table)
const rowDocs = importToRows(data, cloneTable) const rowDocs = await importToRows(data, cloneTable)
syncLastIds(cloneTable, rowDocs.length) syncLastIds(cloneTable, rowDocs.length)
return { rows: rowDocs, table: cloneTable } return { rows: rowDocs, table: cloneTable }
} }
@ -601,20 +601,20 @@ export const DEFAULT_EXPENSES_TABLE_SCHEMA: Table = {
}, },
} }
export function buildDefaultDocs() { export async function buildDefaultDocs() {
const inventoryData = tableImport( const inventoryData = await tableImport(
DEFAULT_INVENTORY_TABLE_SCHEMA, DEFAULT_INVENTORY_TABLE_SCHEMA,
inventoryImport inventoryImport
) )
const employeeData = tableImport( const employeeData = await tableImport(
DEFAULT_EMPLOYEE_TABLE_SCHEMA, DEFAULT_EMPLOYEE_TABLE_SCHEMA,
employeeImport employeeImport
) )
const jobData = tableImport(DEFAULT_JOBS_TABLE_SCHEMA, jobsImport) const jobData = await tableImport(DEFAULT_JOBS_TABLE_SCHEMA, jobsImport)
const expensesData = tableImport( const expensesData = await tableImport(
DEFAULT_EXPENSES_TABLE_SCHEMA, DEFAULT_EXPENSES_TABLE_SCHEMA,
expensesImport expensesImport
) )

View File

@ -130,7 +130,7 @@ export function coerce(row: any, type: string) {
* @param {object} opts some input processing options (like disabling auto-column relationships). * @param {object} opts some input processing options (like disabling auto-column relationships).
* @returns {object} the row which has been prepared to be written to the DB. * @returns {object} the row which has been prepared to be written to the DB.
*/ */
export function inputProcessing( export async function inputProcessing(
userId: string | null | undefined, userId: string | null | undefined,
table: Table, table: Table,
row: Row, row: Row,