Performance improvements after testing - switching to temporary in memory queries for cloud views.
This commit is contained in:
parent
0651b874ac
commit
8f70ad9cdc
|
@ -107,6 +107,7 @@
|
|||
"pouchdb-all-dbs": "1.0.2",
|
||||
"pouchdb-find": "^7.2.2",
|
||||
"pouchdb-replication-stream": "1.2.9",
|
||||
"pouchdb-adapter-memory": "^7.2.1",
|
||||
"server-destroy": "1.0.1",
|
||||
"svelte": "^3.38.2",
|
||||
"to-json-schema": "0.2.5",
|
||||
|
@ -132,7 +133,6 @@
|
|||
"express": "^4.17.1",
|
||||
"jest": "^27.0.5",
|
||||
"nodemon": "^2.0.4",
|
||||
"pouchdb-adapter-memory": "^7.2.1",
|
||||
"prettier": "^2.3.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"supertest": "^4.0.2",
|
||||
|
|
|
@ -206,13 +206,7 @@ exports.fetchView = async ctx => {
|
|||
} else {
|
||||
const tableId = viewInfo.meta.tableId
|
||||
const data = await getRawTableData(ctx, db, tableId)
|
||||
response = await inMemoryViews.runView(
|
||||
appId,
|
||||
viewInfo,
|
||||
calculation,
|
||||
group,
|
||||
data
|
||||
)
|
||||
response = await inMemoryViews.runView(viewInfo, calculation, group, data)
|
||||
}
|
||||
|
||||
let rows
|
||||
|
|
|
@ -68,23 +68,17 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
|
|||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await csvParser.transform(dataImport)
|
||||
|
||||
let finalData = []
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let row = data[i]
|
||||
row._id = generateRowID(table._id)
|
||||
row.tableId = table._id
|
||||
const processed = inputProcessing(user, table, row)
|
||||
const processed = inputProcessing(user, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
table = processed.table
|
||||
row = processed.row
|
||||
|
||||
// make sure link rows are up to date
|
||||
row = await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.ROW_SAVE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
table,
|
||||
})
|
||||
|
||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||
// check whether the options need to be updated for inclusion as part of the data import
|
||||
if (
|
||||
|
@ -98,10 +92,20 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
|
|||
]
|
||||
}
|
||||
}
|
||||
data[i] = row
|
||||
|
||||
// make sure link rows are up to date
|
||||
finalData.push(
|
||||
linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.ROW_SAVE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
table,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
await db.bulkDocs(data)
|
||||
await db.bulkDocs(await Promise.all(finalData))
|
||||
let response = await db.put(table)
|
||||
table._rev = response._rev
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
const PouchDB = require("pouchdb")
|
||||
const memory = require("pouchdb-adapter-memory")
|
||||
const newid = require("./newid")
|
||||
|
||||
PouchDB.plugin(memory)
|
||||
const Pouch = PouchDB.defaults({
|
||||
|
@ -7,16 +8,11 @@ const Pouch = PouchDB.defaults({
|
|||
adapter: "memory",
|
||||
})
|
||||
|
||||
exports.runView = async (appId, view, calculation, group, data) => {
|
||||
// appId doesn't really do anything since its all in memory
|
||||
// use it just incase multiple databases at the same time
|
||||
const db = new Pouch(appId)
|
||||
await db.put({
|
||||
_id: "_design/database",
|
||||
views: {
|
||||
runner: view,
|
||||
},
|
||||
})
|
||||
exports.runView = async (view, calculation, group, data) => {
|
||||
// use a different ID each time for the DB, make sure they
|
||||
// are always unique for each query, don't want overlap
|
||||
// which could cause 409s
|
||||
const db = new Pouch(newid())
|
||||
// write all the docs to the in memory Pouch (remove revs)
|
||||
await db.bulkDocs(
|
||||
data.map(row => ({
|
||||
|
@ -24,7 +20,16 @@ exports.runView = async (appId, view, calculation, group, data) => {
|
|||
_rev: undefined,
|
||||
}))
|
||||
)
|
||||
const response = await db.query("database/runner", {
|
||||
let fn = (doc, emit) => emit(doc._id)
|
||||
eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)"))
|
||||
const queryFns = {
|
||||
meta: view.meta,
|
||||
map: fn,
|
||||
}
|
||||
if (view.reduce) {
|
||||
queryFns.reduce = view.reduce
|
||||
}
|
||||
const response = await db.query(queryFns, {
|
||||
include_docs: !calculation,
|
||||
group: !!group,
|
||||
})
|
||||
|
|
|
@ -76,9 +76,12 @@ async function getFullLinkedDocs(ctx, appId, links) {
|
|||
// create DBs
|
||||
const db = new CouchDB(appId)
|
||||
const linkedRowIds = links.map(link => link.id)
|
||||
let linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map(
|
||||
const uniqueRowIds = [...new Set(linkedRowIds)]
|
||||
let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map(
|
||||
row => row.doc
|
||||
)
|
||||
// convert the unique db rows back to a full list of linked rows
|
||||
const linked = linkedRowIds.map(id => dbRows.find(row => row._id === id))
|
||||
// need to handle users as specific cases
|
||||
let [users, other] = partition(linked, linkRow =>
|
||||
linkRow._id.startsWith(USER_METDATA_PREFIX)
|
||||
|
@ -112,7 +115,7 @@ exports.updateLinks = async function (args) {
|
|||
let linkController = new LinkController(args)
|
||||
try {
|
||||
if (
|
||||
!(await linkController.doesTableHaveLinkedFields()) &&
|
||||
!(await linkController.doesTableHaveLinkedFields(table)) &&
|
||||
(oldTable == null ||
|
||||
!(await linkController.doesTableHaveLinkedFields(oldTable)))
|
||||
) {
|
||||
|
|
|
@ -93,7 +93,12 @@ const TYPE_TRANSFORM_MAP = {
|
|||
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
|
||||
* for automatic ID purposes.
|
||||
*/
|
||||
function processAutoColumn(user, table, row, opts = { reprocessing: false }) {
|
||||
function processAutoColumn(
|
||||
user,
|
||||
table,
|
||||
row,
|
||||
opts = { reprocessing: false, noAutoRelationships: false }
|
||||
) {
|
||||
let now = new Date().toISOString()
|
||||
// if a row doesn't have a revision then it doesn't exist yet
|
||||
const creating = !row._rev
|
||||
|
@ -103,7 +108,7 @@ function processAutoColumn(user, table, row, opts = { reprocessing: false }) {
|
|||
}
|
||||
switch (schema.subtype) {
|
||||
case AutoFieldSubTypes.CREATED_BY:
|
||||
if (creating && !opts.reprocessing) {
|
||||
if (creating && !opts.reprocessing && !opts.noAutoRelationships) {
|
||||
row[key] = [user.userId]
|
||||
}
|
||||
break
|
||||
|
@ -113,7 +118,7 @@ function processAutoColumn(user, table, row, opts = { reprocessing: false }) {
|
|||
}
|
||||
break
|
||||
case AutoFieldSubTypes.UPDATED_BY:
|
||||
if (!opts.reprocessing) {
|
||||
if (!opts.reprocessing && !opts.noAutoRelationships) {
|
||||
row[key] = [user.userId]
|
||||
}
|
||||
break
|
||||
|
@ -155,9 +160,15 @@ exports.coerce = (row, type) => {
|
|||
* @param {object} user the user which is performing the input.
|
||||
* @param {object} row the row which is being created/updated.
|
||||
* @param {object} table the table which the row is being saved to.
|
||||
* @param {object} opts some input processing options (like disabling auto-column relationships).
|
||||
* @returns {object} the row which has been prepared to be written to the DB.
|
||||
*/
|
||||
exports.inputProcessing = (user = {}, table, row) => {
|
||||
exports.inputProcessing = (
|
||||
user = {},
|
||||
table,
|
||||
row,
|
||||
opts = { noAutoRelationships: false }
|
||||
) => {
|
||||
let clonedRow = cloneDeep(row)
|
||||
// need to copy the table so it can be differenced on way out
|
||||
const copiedTable = cloneDeep(table)
|
||||
|
@ -180,7 +191,7 @@ exports.inputProcessing = (user = {}, table, row) => {
|
|||
}
|
||||
}
|
||||
// handle auto columns - this returns an object like {table, row}
|
||||
return processAutoColumn(user, copiedTable, clonedRow)
|
||||
return processAutoColumn(user, copiedTable, clonedRow, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
Loading…
Reference in New Issue