Performance improvements after testing - switching to temporary in memory queries for cloud views.
This commit is contained in:
parent
cf5b7b1a99
commit
c8dbf02acf
|
@ -107,6 +107,7 @@
|
||||||
"pouchdb-all-dbs": "1.0.2",
|
"pouchdb-all-dbs": "1.0.2",
|
||||||
"pouchdb-find": "^7.2.2",
|
"pouchdb-find": "^7.2.2",
|
||||||
"pouchdb-replication-stream": "1.2.9",
|
"pouchdb-replication-stream": "1.2.9",
|
||||||
|
"pouchdb-adapter-memory": "^7.2.1",
|
||||||
"server-destroy": "1.0.1",
|
"server-destroy": "1.0.1",
|
||||||
"svelte": "^3.38.2",
|
"svelte": "^3.38.2",
|
||||||
"to-json-schema": "0.2.5",
|
"to-json-schema": "0.2.5",
|
||||||
|
@ -132,7 +133,6 @@
|
||||||
"express": "^4.17.1",
|
"express": "^4.17.1",
|
||||||
"jest": "^27.0.5",
|
"jest": "^27.0.5",
|
||||||
"nodemon": "^2.0.4",
|
"nodemon": "^2.0.4",
|
||||||
"pouchdb-adapter-memory": "^7.2.1",
|
|
||||||
"prettier": "^2.3.1",
|
"prettier": "^2.3.1",
|
||||||
"rimraf": "^3.0.2",
|
"rimraf": "^3.0.2",
|
||||||
"supertest": "^4.0.2",
|
"supertest": "^4.0.2",
|
||||||
|
|
|
@ -206,13 +206,7 @@ exports.fetchView = async ctx => {
|
||||||
} else {
|
} else {
|
||||||
const tableId = viewInfo.meta.tableId
|
const tableId = viewInfo.meta.tableId
|
||||||
const data = await getRawTableData(ctx, db, tableId)
|
const data = await getRawTableData(ctx, db, tableId)
|
||||||
response = await inMemoryViews.runView(
|
response = await inMemoryViews.runView(viewInfo, calculation, group, data)
|
||||||
appId,
|
|
||||||
viewInfo,
|
|
||||||
calculation,
|
|
||||||
group,
|
|
||||||
data
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let rows
|
let rows
|
||||||
|
|
|
@ -68,23 +68,17 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
|
||||||
// Populate the table with rows imported from CSV in a bulk update
|
// Populate the table with rows imported from CSV in a bulk update
|
||||||
const data = await csvParser.transform(dataImport)
|
const data = await csvParser.transform(dataImport)
|
||||||
|
|
||||||
|
let finalData = []
|
||||||
for (let i = 0; i < data.length; i++) {
|
for (let i = 0; i < data.length; i++) {
|
||||||
let row = data[i]
|
let row = data[i]
|
||||||
row._id = generateRowID(table._id)
|
row._id = generateRowID(table._id)
|
||||||
row.tableId = table._id
|
row.tableId = table._id
|
||||||
const processed = inputProcessing(user, table, row)
|
const processed = inputProcessing(user, table, row, {
|
||||||
|
noAutoRelationships: true,
|
||||||
|
})
|
||||||
table = processed.table
|
table = processed.table
|
||||||
row = processed.row
|
row = processed.row
|
||||||
|
|
||||||
// make sure link rows are up to date
|
|
||||||
row = await linkRows.updateLinks({
|
|
||||||
appId,
|
|
||||||
eventType: linkRows.EventType.ROW_SAVE,
|
|
||||||
row,
|
|
||||||
tableId: row.tableId,
|
|
||||||
table,
|
|
||||||
})
|
|
||||||
|
|
||||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||||
// check whether the options need to be updated for inclusion as part of the data import
|
// check whether the options need to be updated for inclusion as part of the data import
|
||||||
if (
|
if (
|
||||||
|
@ -98,10 +92,20 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
data[i] = row
|
|
||||||
|
// make sure link rows are up to date
|
||||||
|
finalData.push(
|
||||||
|
linkRows.updateLinks({
|
||||||
|
appId,
|
||||||
|
eventType: linkRows.EventType.ROW_SAVE,
|
||||||
|
row,
|
||||||
|
tableId: row.tableId,
|
||||||
|
table,
|
||||||
|
})
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
await db.bulkDocs(data)
|
await db.bulkDocs(await Promise.all(finalData))
|
||||||
let response = await db.put(table)
|
let response = await db.put(table)
|
||||||
table._rev = response._rev
|
table._rev = response._rev
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
const PouchDB = require("pouchdb")
|
const PouchDB = require("pouchdb")
|
||||||
const memory = require("pouchdb-adapter-memory")
|
const memory = require("pouchdb-adapter-memory")
|
||||||
|
const newid = require("./newid")
|
||||||
|
|
||||||
PouchDB.plugin(memory)
|
PouchDB.plugin(memory)
|
||||||
const Pouch = PouchDB.defaults({
|
const Pouch = PouchDB.defaults({
|
||||||
|
@ -7,16 +8,11 @@ const Pouch = PouchDB.defaults({
|
||||||
adapter: "memory",
|
adapter: "memory",
|
||||||
})
|
})
|
||||||
|
|
||||||
exports.runView = async (appId, view, calculation, group, data) => {
|
exports.runView = async (view, calculation, group, data) => {
|
||||||
// appId doesn't really do anything since its all in memory
|
// use a different ID each time for the DB, make sure they
|
||||||
// use it just incase multiple databases at the same time
|
// are always unique for each query, don't want overlap
|
||||||
const db = new Pouch(appId)
|
// which could cause 409s
|
||||||
await db.put({
|
const db = new Pouch(newid())
|
||||||
_id: "_design/database",
|
|
||||||
views: {
|
|
||||||
runner: view,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
// write all the docs to the in memory Pouch (remove revs)
|
// write all the docs to the in memory Pouch (remove revs)
|
||||||
await db.bulkDocs(
|
await db.bulkDocs(
|
||||||
data.map(row => ({
|
data.map(row => ({
|
||||||
|
@ -24,7 +20,16 @@ exports.runView = async (appId, view, calculation, group, data) => {
|
||||||
_rev: undefined,
|
_rev: undefined,
|
||||||
}))
|
}))
|
||||||
)
|
)
|
||||||
const response = await db.query("database/runner", {
|
let fn = (doc, emit) => emit(doc._id)
|
||||||
|
eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)"))
|
||||||
|
const queryFns = {
|
||||||
|
meta: view.meta,
|
||||||
|
map: fn,
|
||||||
|
}
|
||||||
|
if (view.reduce) {
|
||||||
|
queryFns.reduce = view.reduce
|
||||||
|
}
|
||||||
|
const response = await db.query(queryFns, {
|
||||||
include_docs: !calculation,
|
include_docs: !calculation,
|
||||||
group: !!group,
|
group: !!group,
|
||||||
})
|
})
|
||||||
|
|
|
@ -76,9 +76,12 @@ async function getFullLinkedDocs(ctx, appId, links) {
|
||||||
// create DBs
|
// create DBs
|
||||||
const db = new CouchDB(appId)
|
const db = new CouchDB(appId)
|
||||||
const linkedRowIds = links.map(link => link.id)
|
const linkedRowIds = links.map(link => link.id)
|
||||||
let linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map(
|
const uniqueRowIds = [...new Set(linkedRowIds)]
|
||||||
|
let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map(
|
||||||
row => row.doc
|
row => row.doc
|
||||||
)
|
)
|
||||||
|
// convert the unique db rows back to a full list of linked rows
|
||||||
|
const linked = linkedRowIds.map(id => dbRows.find(row => row._id === id))
|
||||||
// need to handle users as specific cases
|
// need to handle users as specific cases
|
||||||
let [users, other] = partition(linked, linkRow =>
|
let [users, other] = partition(linked, linkRow =>
|
||||||
linkRow._id.startsWith(USER_METDATA_PREFIX)
|
linkRow._id.startsWith(USER_METDATA_PREFIX)
|
||||||
|
@ -112,7 +115,7 @@ exports.updateLinks = async function (args) {
|
||||||
let linkController = new LinkController(args)
|
let linkController = new LinkController(args)
|
||||||
try {
|
try {
|
||||||
if (
|
if (
|
||||||
!(await linkController.doesTableHaveLinkedFields()) &&
|
!(await linkController.doesTableHaveLinkedFields(table)) &&
|
||||||
(oldTable == null ||
|
(oldTable == null ||
|
||||||
!(await linkController.doesTableHaveLinkedFields(oldTable)))
|
!(await linkController.doesTableHaveLinkedFields(oldTable)))
|
||||||
) {
|
) {
|
||||||
|
|
|
@ -93,7 +93,12 @@ const TYPE_TRANSFORM_MAP = {
|
||||||
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
|
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
|
||||||
* for automatic ID purposes.
|
* for automatic ID purposes.
|
||||||
*/
|
*/
|
||||||
function processAutoColumn(user, table, row, opts = { reprocessing: false }) {
|
function processAutoColumn(
|
||||||
|
user,
|
||||||
|
table,
|
||||||
|
row,
|
||||||
|
opts = { reprocessing: false, noAutoRelationships: false }
|
||||||
|
) {
|
||||||
let now = new Date().toISOString()
|
let now = new Date().toISOString()
|
||||||
// if a row doesn't have a revision then it doesn't exist yet
|
// if a row doesn't have a revision then it doesn't exist yet
|
||||||
const creating = !row._rev
|
const creating = !row._rev
|
||||||
|
@ -103,7 +108,7 @@ function processAutoColumn(user, table, row, opts = { reprocessing: false }) {
|
||||||
}
|
}
|
||||||
switch (schema.subtype) {
|
switch (schema.subtype) {
|
||||||
case AutoFieldSubTypes.CREATED_BY:
|
case AutoFieldSubTypes.CREATED_BY:
|
||||||
if (creating && !opts.reprocessing) {
|
if (creating && !opts.reprocessing && !opts.noAutoRelationships) {
|
||||||
row[key] = [user.userId]
|
row[key] = [user.userId]
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
|
@ -113,7 +118,7 @@ function processAutoColumn(user, table, row, opts = { reprocessing: false }) {
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
case AutoFieldSubTypes.UPDATED_BY:
|
case AutoFieldSubTypes.UPDATED_BY:
|
||||||
if (!opts.reprocessing) {
|
if (!opts.reprocessing && !opts.noAutoRelationships) {
|
||||||
row[key] = [user.userId]
|
row[key] = [user.userId]
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
|
@ -155,9 +160,15 @@ exports.coerce = (row, type) => {
|
||||||
* @param {object} user the user which is performing the input.
|
* @param {object} user the user which is performing the input.
|
||||||
* @param {object} row the row which is being created/updated.
|
* @param {object} row the row which is being created/updated.
|
||||||
* @param {object} table the table which the row is being saved to.
|
* @param {object} table the table which the row is being saved to.
|
||||||
|
* @param {object} opts some input processing options (like disabling auto-column relationships).
|
||||||
* @returns {object} the row which has been prepared to be written to the DB.
|
* @returns {object} the row which has been prepared to be written to the DB.
|
||||||
*/
|
*/
|
||||||
exports.inputProcessing = (user = {}, table, row) => {
|
exports.inputProcessing = (
|
||||||
|
user = {},
|
||||||
|
table,
|
||||||
|
row,
|
||||||
|
opts = { noAutoRelationships: false }
|
||||||
|
) => {
|
||||||
let clonedRow = cloneDeep(row)
|
let clonedRow = cloneDeep(row)
|
||||||
// need to copy the table so it can be differenced on way out
|
// need to copy the table so it can be differenced on way out
|
||||||
const copiedTable = cloneDeep(table)
|
const copiedTable = cloneDeep(table)
|
||||||
|
@ -180,7 +191,7 @@ exports.inputProcessing = (user = {}, table, row) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// handle auto columns - this returns an object like {table, row}
|
// handle auto columns - this returns an object like {table, row}
|
||||||
return processAutoColumn(user, copiedTable, clonedRow)
|
return processAutoColumn(user, copiedTable, clonedRow, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in New Issue