Some major performance optimisations, found that db.find is not fast unless its indexed, there is also no point in indexing for our internal relationship searching, however we can use the allDocs call. This will likely be slow for very large calls (say 100K records) but for these sort of calls we really need to paginate anyway.
This commit is contained in:
parent
5fab1529ec
commit
f080fa6537
|
@ -24,7 +24,7 @@
|
|||
timeOnly: {
|
||||
hour: "numeric",
|
||||
minute: "numeric",
|
||||
hour12: true,
|
||||
hourCycle: "h12",
|
||||
},
|
||||
}
|
||||
const POLL_INTERVAL = 5000
|
||||
|
|
|
@ -14,6 +14,7 @@ const {
|
|||
outputProcessing,
|
||||
} = require("../../utilities/rowProcessor")
|
||||
const { FieldTypes } = require("../../constants")
|
||||
const { isEqual } = require("lodash")
|
||||
|
||||
const TABLE_VIEW_BEGINS_WITH = `all${SEPARATOR}${DocumentTypes.TABLE}${SEPARATOR}`
|
||||
|
||||
|
@ -68,7 +69,7 @@ exports.patch = async function(ctx) {
|
|||
}
|
||||
|
||||
// this returns the table and row incase they have been updated
|
||||
let { table, row } = await inputProcessing(ctx.user, dbTable, dbRow)
|
||||
let { table, row } = inputProcessing(ctx.user, dbTable, dbRow)
|
||||
const validateResult = await validate({
|
||||
row,
|
||||
table,
|
||||
|
@ -101,6 +102,10 @@ exports.patch = async function(ctx) {
|
|||
}
|
||||
|
||||
const response = await db.put(row)
|
||||
// don't worry about rev, tables handle rev/lastID updates
|
||||
if (!isEqual(dbTable, table)) {
|
||||
await db.put(table)
|
||||
}
|
||||
row._rev = response.rev
|
||||
row.type = "row"
|
||||
|
||||
|
@ -136,11 +141,8 @@ exports.save = async function(ctx) {
|
|||
}
|
||||
|
||||
// this returns the table and row incase they have been updated
|
||||
let { table, row } = await inputProcessing(
|
||||
ctx.user,
|
||||
await db.get(inputs.tableId),
|
||||
inputs
|
||||
)
|
||||
const dbTable = await db.get(inputs.tableId)
|
||||
let { table, row } = inputProcessing(ctx.user, dbTable, inputs)
|
||||
const validateResult = await validate({
|
||||
row,
|
||||
table,
|
||||
|
@ -174,6 +176,10 @@ exports.save = async function(ctx) {
|
|||
|
||||
row.type = "row"
|
||||
const response = await db.put(row)
|
||||
// don't worry about rev, tables handle rev/lastID updates
|
||||
if (!isEqual(dbTable, table)) {
|
||||
await db.put(table)
|
||||
}
|
||||
row._rev = response.rev
|
||||
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
|
||||
ctx.body = row
|
||||
|
|
|
@ -9,6 +9,7 @@ const {
|
|||
} = require("../../db/utils")
|
||||
const { isEqual } = require("lodash/fp")
|
||||
const { FieldTypes, AutoFieldSubTypes } = require("../../constants")
|
||||
const { inputProcessing } = require("../../utilities/rowProcessor")
|
||||
|
||||
async function checkForColumnUpdates(db, oldTable, updatedTable) {
|
||||
let updatedRows
|
||||
|
@ -61,6 +62,82 @@ function makeSureTableUpToDate(table, tableToSave) {
|
|||
return tableToSave
|
||||
}
|
||||
|
||||
async function handleDataImport(user, table, dataImport) {
|
||||
const db = new CouchDB(user.appId)
|
||||
if (dataImport && dataImport.csvString) {
|
||||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await csvParser.transform(dataImport)
|
||||
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let row = data[i]
|
||||
row._id = generateRowID(table._id)
|
||||
row.tableId = table._id
|
||||
const processed = inputProcessing(user, table, row)
|
||||
row = processed.row
|
||||
// these auto-fields will never actually link anywhere (always builder)
|
||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||
if (
|
||||
schema.autocolumn &&
|
||||
(schema.subtype === AutoFieldSubTypes.CREATED_BY ||
|
||||
schema.subtype === AutoFieldSubTypes.UPDATED_BY)
|
||||
) {
|
||||
delete row[fieldName]
|
||||
}
|
||||
}
|
||||
table = processed.table
|
||||
data[i] = row
|
||||
}
|
||||
|
||||
await db.bulkDocs(data)
|
||||
let response = await db.put(table)
|
||||
table._rev = response._rev
|
||||
}
|
||||
return table
|
||||
}
|
||||
|
||||
async function handleSearchIndexes(db, table) {
|
||||
// create relevant search indexes
|
||||
if (table.indexes && table.indexes.length > 0) {
|
||||
const currentIndexes = await db.getIndexes()
|
||||
const indexName = `search:${table._id}`
|
||||
|
||||
const existingIndex = currentIndexes.indexes.find(
|
||||
existing => existing.name === indexName
|
||||
)
|
||||
|
||||
if (existingIndex) {
|
||||
const currentFields = existingIndex.def.fields.map(
|
||||
field => Object.keys(field)[0]
|
||||
)
|
||||
|
||||
// if index fields have changed, delete the original index
|
||||
if (!isEqual(currentFields, table.indexes)) {
|
||||
await db.deleteIndex(existingIndex)
|
||||
// create/recreate the index with fields
|
||||
await db.createIndex({
|
||||
index: {
|
||||
fields: table.indexes,
|
||||
name: indexName,
|
||||
ddoc: "search_ddoc",
|
||||
type: "json",
|
||||
},
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// create/recreate the index with fields
|
||||
await db.createIndex({
|
||||
index: {
|
||||
fields: table.indexes,
|
||||
name: indexName,
|
||||
ddoc: "search_ddoc",
|
||||
type: "json",
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
return table
|
||||
}
|
||||
|
||||
exports.fetch = async function(ctx) {
|
||||
const db = new CouchDB(ctx.user.appId)
|
||||
const body = await db.allDocs(
|
||||
|
@ -152,61 +229,12 @@ exports.save = async function(ctx) {
|
|||
const result = await db.post(tableToSave)
|
||||
tableToSave._rev = result.rev
|
||||
|
||||
// create relevant search indexes
|
||||
if (tableToSave.indexes && tableToSave.indexes.length > 0) {
|
||||
const currentIndexes = await db.getIndexes()
|
||||
const indexName = `search:${result.id}`
|
||||
|
||||
const existingIndex = currentIndexes.indexes.find(
|
||||
existing => existing.name === indexName
|
||||
)
|
||||
|
||||
if (existingIndex) {
|
||||
const currentFields = existingIndex.def.fields.map(
|
||||
field => Object.keys(field)[0]
|
||||
)
|
||||
|
||||
// if index fields have changed, delete the original index
|
||||
if (!isEqual(currentFields, tableToSave.indexes)) {
|
||||
await db.deleteIndex(existingIndex)
|
||||
// create/recreate the index with fields
|
||||
await db.createIndex({
|
||||
index: {
|
||||
fields: tableToSave.indexes,
|
||||
name: indexName,
|
||||
ddoc: "search_ddoc",
|
||||
type: "json",
|
||||
},
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// create/recreate the index with fields
|
||||
await db.createIndex({
|
||||
index: {
|
||||
fields: tableToSave.indexes,
|
||||
name: indexName,
|
||||
ddoc: "search_ddoc",
|
||||
type: "json",
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
tableToSave = await handleSearchIndexes(db, tableToSave)
|
||||
tableToSave = await handleDataImport(ctx.user, tableToSave, dataImport)
|
||||
|
||||
ctx.eventEmitter &&
|
||||
ctx.eventEmitter.emitTable(`table:save`, appId, tableToSave)
|
||||
|
||||
if (dataImport && dataImport.csvString) {
|
||||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await csvParser.transform(dataImport)
|
||||
|
||||
for (let row of data) {
|
||||
row._id = generateRowID(tableToSave._id)
|
||||
row.tableId = tableToSave._id
|
||||
}
|
||||
|
||||
await db.bulkDocs(data)
|
||||
}
|
||||
|
||||
ctx.status = 200
|
||||
ctx.message = `Table ${ctx.request.body.name} saved successfully.`
|
||||
ctx.body = tableToSave
|
||||
|
|
|
@ -137,7 +137,7 @@ exports.addPermission = async (
|
|||
exports.createLinkedTable = async (request, appId) => {
|
||||
// get the ID to link to
|
||||
const table = await exports.createTable(request, appId)
|
||||
table.displayName = "name"
|
||||
table.primaryDisplay = "name"
|
||||
table.schema.link = {
|
||||
type: "link",
|
||||
fieldName: "link",
|
||||
|
|
|
@ -159,7 +159,7 @@ exports.attachLinkIDs = async (appId, rows) => {
|
|||
* @param {array<object>} rows The rows which are to be enriched with the linked display names/IDs.
|
||||
* @returns {Promise<Array>} The enriched rows after having display names/IDs attached to the linked fields.
|
||||
*/
|
||||
exports.attachLinkedDisplayName = async (appId, table, rows) => {
|
||||
exports.attachLinkedPrimaryDisplay = async (appId, table, rows) => {
|
||||
const linkedTableIds = getLinkedTableIDs(table)
|
||||
if (linkedTableIds.length === 0) {
|
||||
return rows
|
||||
|
@ -170,18 +170,14 @@ exports.attachLinkedDisplayName = async (appId, table, rows) => {
|
|||
wasArray = false
|
||||
}
|
||||
const db = new CouchDB(appId)
|
||||
const linkedTables = (await db.find(getMultiIDParams(linkedTableIds))).docs
|
||||
const linkedTables = await Promise.all(linkedTableIds.map(id => db.get(id)))
|
||||
const links = (await getLinksForRows(appId, rows)).filter(link =>
|
||||
rows.some(row => row._id === link.thisId)
|
||||
)
|
||||
const fields = [
|
||||
"_id",
|
||||
...linkedTables
|
||||
.filter(table => table.displayName != null)
|
||||
.map(table => table.displayName),
|
||||
]
|
||||
const linkedRowIds = links.map(link => link.id)
|
||||
const linked = (await db.find(getMultiIDParams(linkedRowIds, fields))).docs
|
||||
const linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map(
|
||||
row => row.doc
|
||||
)
|
||||
for (let row of rows) {
|
||||
links
|
||||
.filter(link => link.thisId === row._id)
|
||||
|
@ -194,10 +190,13 @@ exports.attachLinkedDisplayName = async (appId, table, rows) => {
|
|||
const linkedTable = linkedTables.find(
|
||||
table => table._id === linkedTableId
|
||||
)
|
||||
if (linkedRow && linkedTable) {
|
||||
row[link.fieldName].push(
|
||||
linkedRow[linkedTable.displayName] || linkedRow._id
|
||||
)
|
||||
if (!linkedRow || !linkedTable) {
|
||||
return
|
||||
}
|
||||
// need to handle an edge case where relationship just wasn't found
|
||||
const value = linkedRow[linkedTable.primaryDisplay] || linkedRow._id
|
||||
if (value) {
|
||||
row[link.fieldName].push(value)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -279,18 +279,11 @@ exports.getQueryParams = (datasourceId = null, otherProps = {}) => {
|
|||
}
|
||||
|
||||
/**
|
||||
* This can be used with the db.find functionality to get a list of IDs
|
||||
* This can be used with the db.allDocs to get a list of IDs
|
||||
*/
|
||||
exports.getMultiIDParams = (ids, fields = null) => {
|
||||
let config = {
|
||||
selector: {
|
||||
_id: {
|
||||
$in: ids,
|
||||
},
|
||||
},
|
||||
exports.getMultiIDParams = ids => {
|
||||
return {
|
||||
keys: ids,
|
||||
include_docs: true,
|
||||
}
|
||||
if (fields) {
|
||||
config.fields = fields
|
||||
}
|
||||
return config
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@ const { OBJ_STORE_DIRECTORY } = require("../constants")
|
|||
const linkRows = require("../db/linkedRows")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { FieldTypes, AutoFieldSubTypes } = require("../constants")
|
||||
const CouchDB = require("../db")
|
||||
|
||||
const BASE_AUTO_ID = 1
|
||||
|
||||
|
@ -71,14 +70,13 @@ const TYPE_TRANSFORM_MAP = {
|
|||
* @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields.
|
||||
* @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing.
|
||||
* @param {Object} row The row which is to be updated with information for the auto columns.
|
||||
* @returns {Promise<{row: Object, table: Object}>} The updated row and table, the table may need to be updated
|
||||
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
|
||||
* for automatic ID purposes.
|
||||
*/
|
||||
async function processAutoColumn(user, table, row) {
|
||||
function processAutoColumn(user, table, row) {
|
||||
let now = new Date().toISOString()
|
||||
// if a row doesn't have a revision then it doesn't exist yet
|
||||
const creating = !row._rev
|
||||
let tableUpdated = false
|
||||
for (let [key, schema] of Object.entries(table.schema)) {
|
||||
if (!schema.autocolumn) {
|
||||
continue
|
||||
|
@ -104,17 +102,10 @@ async function processAutoColumn(user, table, row) {
|
|||
if (creating) {
|
||||
schema.lastID = !schema.lastID ? BASE_AUTO_ID : schema.lastID + 1
|
||||
row[key] = schema.lastID
|
||||
tableUpdated = true
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if (tableUpdated) {
|
||||
const db = new CouchDB(user.appId)
|
||||
const response = await db.put(table)
|
||||
// update the revision
|
||||
table._rev = response._rev
|
||||
}
|
||||
return { table, row }
|
||||
}
|
||||
|
||||
|
@ -143,7 +134,7 @@ exports.coerce = (row, type) => {
|
|||
* @param {object} table the table which the row is being saved to.
|
||||
* @returns {object} the row which has been prepared to be written to the DB.
|
||||
*/
|
||||
exports.inputProcessing = async (user, table, row) => {
|
||||
exports.inputProcessing = (user, table, row) => {
|
||||
let clonedRow = cloneDeep(row)
|
||||
for (let [key, value] of Object.entries(clonedRow)) {
|
||||
const field = table.schema[key]
|
||||
|
@ -167,7 +158,11 @@ exports.inputProcessing = async (user, table, row) => {
|
|||
*/
|
||||
exports.outputProcessing = async (appId, table, rows) => {
|
||||
// attach any linked row information
|
||||
const outputRows = await linkRows.attachLinkedDisplayName(appId, table, rows)
|
||||
const outputRows = await linkRows.attachLinkedPrimaryDisplay(
|
||||
appId,
|
||||
table,
|
||||
rows
|
||||
)
|
||||
// update the attachments URL depending on hosting
|
||||
if (env.CLOUD && env.SELF_HOSTED) {
|
||||
for (let [property, column] of Object.entries(table.schema)) {
|
||||
|
|
Loading…
Reference in New Issue