Merge pull request #3019 from Budibase/fix/large-csv-import
Fix performance of importing large CSVs
This commit is contained in:
commit
9b1da162d6
|
@ -1,5 +1,4 @@
|
||||||
const CouchDB = require("../../../db")
|
const CouchDB = require("../../../db")
|
||||||
const linkRows = require("../../../db/linkedRows")
|
|
||||||
const csvParser = require("../../../utilities/csvParser")
|
const csvParser = require("../../../utilities/csvParser")
|
||||||
const {
|
const {
|
||||||
getRowParams,
|
getRowParams,
|
||||||
|
@ -93,19 +92,10 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// make sure link rows are up to date
|
finalData.push(row)
|
||||||
finalData.push(
|
|
||||||
linkRows.updateLinks({
|
|
||||||
appId,
|
|
||||||
eventType: linkRows.EventType.ROW_SAVE,
|
|
||||||
row,
|
|
||||||
tableId: row.tableId,
|
|
||||||
table,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
await db.bulkDocs(await Promise.all(finalData))
|
await db.bulkDocs(finalData)
|
||||||
let response = await db.put(table)
|
let response = await db.put(table)
|
||||||
table._rev = response._rev
|
table._rev = response._rev
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue