2021-02-10 17:10:39 +01:00
|
|
|
const env = require("../environment")
|
|
|
|
const { OBJ_STORE_DIRECTORY } = require("../constants")
|
|
|
|
const linkRows = require("../db/linkedRows")
|
|
|
|
const { cloneDeep } = require("lodash/fp")
|
2021-02-15 18:47:14 +01:00
|
|
|
const { FieldTypes, AutoFieldSubTypes } = require("../constants")
|
|
|
|
|
|
|
|
const BASE_AUTO_ID = 1
|
2021-02-10 17:10:39 +01:00
|
|
|
|
|
|
|
/**
|
|
|
|
* A map of how we convert various properties in rows to each other based on the row type.
|
|
|
|
*/
|
|
|
|
const TYPE_TRANSFORM_MAP = {
|
2021-02-15 18:47:14 +01:00
|
|
|
[FieldTypes.LINK]: {
|
2021-02-10 17:10:39 +01:00
|
|
|
"": [],
|
|
|
|
[null]: [],
|
|
|
|
[undefined]: undefined,
|
2021-02-12 14:40:27 +01:00
|
|
|
parse: link => {
|
2021-02-25 10:41:04 +01:00
|
|
|
if (Array.isArray(link) && typeof link[0] === "object") {
|
|
|
|
return link.map(el => (el && el._id ? el._id : el))
|
|
|
|
}
|
2021-02-12 14:40:27 +01:00
|
|
|
if (typeof link === "string") {
|
|
|
|
return [link]
|
|
|
|
}
|
|
|
|
return link
|
|
|
|
},
|
2021-02-10 17:10:39 +01:00
|
|
|
},
|
2021-02-15 18:47:14 +01:00
|
|
|
[FieldTypes.OPTIONS]: {
|
2021-02-25 12:09:00 +01:00
|
|
|
"": null,
|
2021-02-10 17:10:39 +01:00
|
|
|
[null]: "",
|
|
|
|
[undefined]: undefined,
|
|
|
|
},
|
2021-02-15 18:47:14 +01:00
|
|
|
[FieldTypes.STRING]: {
|
2021-02-10 17:10:39 +01:00
|
|
|
"": "",
|
|
|
|
[null]: "",
|
|
|
|
[undefined]: undefined,
|
|
|
|
},
|
2021-02-15 18:47:14 +01:00
|
|
|
[FieldTypes.LONGFORM]: {
|
2021-02-10 17:10:39 +01:00
|
|
|
"": "",
|
|
|
|
[null]: "",
|
|
|
|
[undefined]: undefined,
|
|
|
|
},
|
2021-02-15 18:47:14 +01:00
|
|
|
[FieldTypes.NUMBER]: {
|
2021-02-10 17:10:39 +01:00
|
|
|
"": null,
|
|
|
|
[null]: null,
|
|
|
|
[undefined]: undefined,
|
|
|
|
parse: n => parseFloat(n),
|
|
|
|
},
|
2021-02-15 18:47:14 +01:00
|
|
|
[FieldTypes.DATETIME]: {
|
2021-02-10 17:10:39 +01:00
|
|
|
"": null,
|
|
|
|
[undefined]: undefined,
|
|
|
|
[null]: null,
|
|
|
|
},
|
2021-02-15 18:47:14 +01:00
|
|
|
[FieldTypes.ATTACHMENT]: {
|
2021-02-10 17:10:39 +01:00
|
|
|
"": [],
|
|
|
|
[null]: [],
|
|
|
|
[undefined]: undefined,
|
|
|
|
},
|
2021-02-15 18:47:14 +01:00
|
|
|
[FieldTypes.BOOLEAN]: {
|
2021-02-10 17:10:39 +01:00
|
|
|
"": null,
|
|
|
|
[null]: null,
|
|
|
|
[undefined]: undefined,
|
|
|
|
true: true,
|
|
|
|
false: false,
|
|
|
|
},
|
2021-02-15 18:47:14 +01:00
|
|
|
[FieldTypes.AUTO]: {
|
|
|
|
parse: () => undefined,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* This will update any auto columns that are found on the row/table with the correct information based on
|
|
|
|
* time now and the current logged in user making the request.
|
|
|
|
* @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields.
|
|
|
|
* @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing.
|
|
|
|
* @param {Object} row The row which is to be updated with information for the auto columns.
|
2021-02-18 14:38:57 +01:00
|
|
|
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
|
2021-02-15 18:47:14 +01:00
|
|
|
* for automatic ID purposes.
|
|
|
|
*/
|
2021-02-18 14:38:57 +01:00
|
|
|
function processAutoColumn(user, table, row) {
|
2021-02-15 18:47:14 +01:00
|
|
|
let now = new Date().toISOString()
|
|
|
|
// if a row doesn't have a revision then it doesn't exist yet
|
|
|
|
const creating = !row._rev
|
|
|
|
for (let [key, schema] of Object.entries(table.schema)) {
|
|
|
|
if (!schema.autocolumn) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
switch (schema.subtype) {
|
|
|
|
case AutoFieldSubTypes.CREATED_BY:
|
|
|
|
if (creating) {
|
|
|
|
row[key] = [user.userId]
|
|
|
|
}
|
|
|
|
break
|
|
|
|
case AutoFieldSubTypes.CREATED_AT:
|
|
|
|
if (creating) {
|
|
|
|
row[key] = now
|
|
|
|
}
|
|
|
|
break
|
|
|
|
case AutoFieldSubTypes.UPDATED_BY:
|
|
|
|
row[key] = [user.userId]
|
|
|
|
break
|
|
|
|
case AutoFieldSubTypes.UPDATED_AT:
|
|
|
|
row[key] = now
|
|
|
|
break
|
|
|
|
case AutoFieldSubTypes.AUTO_ID:
|
2021-02-16 14:56:40 +01:00
|
|
|
if (creating) {
|
|
|
|
schema.lastID = !schema.lastID ? BASE_AUTO_ID : schema.lastID + 1
|
|
|
|
row[key] = schema.lastID
|
|
|
|
}
|
2021-02-15 18:47:14 +01:00
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return { table, row }
|
2021-02-10 17:10:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* This will coerce a value to the correct types based on the type transform map
|
2021-02-12 14:40:27 +01:00
|
|
|
* @param {object} row The value to coerce
|
|
|
|
* @param {object} type The type fo coerce to
|
|
|
|
* @returns {object} The coerced value
|
2021-02-10 17:10:39 +01:00
|
|
|
*/
|
2021-02-15 18:47:14 +01:00
|
|
|
exports.coerce = (row, type) => {
|
2021-02-10 17:10:39 +01:00
|
|
|
// eslint-disable-next-line no-prototype-builtins
|
2021-02-12 14:40:27 +01:00
|
|
|
if (TYPE_TRANSFORM_MAP[type].hasOwnProperty(row)) {
|
|
|
|
return TYPE_TRANSFORM_MAP[type][row]
|
2021-02-10 17:10:39 +01:00
|
|
|
} else if (TYPE_TRANSFORM_MAP[type].parse) {
|
2021-02-12 14:40:27 +01:00
|
|
|
return TYPE_TRANSFORM_MAP[type].parse(row)
|
2021-02-10 17:10:39 +01:00
|
|
|
}
|
2021-02-12 14:40:27 +01:00
|
|
|
|
|
|
|
return row
|
2021-02-10 17:10:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Given an input route this function will apply all the necessary pre-processing to it, such as coercion
|
|
|
|
* of column values or adding auto-column values.
|
|
|
|
* @param {object} user the user which is performing the input.
|
|
|
|
* @param {object} row the row which is being created/updated.
|
|
|
|
* @param {object} table the table which the row is being saved to.
|
|
|
|
* @returns {object} the row which has been prepared to be written to the DB.
|
|
|
|
*/
|
2021-02-18 14:38:57 +01:00
|
|
|
exports.inputProcessing = (user, table, row) => {
|
2021-02-15 18:47:14 +01:00
|
|
|
let clonedRow = cloneDeep(row)
|
2021-02-24 18:59:55 +01:00
|
|
|
// need to copy the table so it can be differenced on way out
|
|
|
|
const copiedTable = cloneDeep(table)
|
2021-02-10 17:10:39 +01:00
|
|
|
for (let [key, value] of Object.entries(clonedRow)) {
|
|
|
|
const field = table.schema[key]
|
2021-02-15 18:47:14 +01:00
|
|
|
if (!field) {
|
|
|
|
continue
|
|
|
|
}
|
2021-02-10 17:10:39 +01:00
|
|
|
clonedRow[key] = exports.coerce(value, field.type)
|
|
|
|
}
|
2021-02-15 18:47:14 +01:00
|
|
|
// handle auto columns - this returns an object like {table, row}
|
2021-02-24 18:59:55 +01:00
|
|
|
return processAutoColumn(user, copiedTable, clonedRow)
|
2021-02-10 17:10:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* This function enriches the input rows with anything they are supposed to contain, for example
|
|
|
|
* link records or attachment links.
|
|
|
|
* @param {string} appId the ID of the application for which rows are being enriched.
|
|
|
|
* @param {object} table the table from which these rows came from originally, this is used to determine
|
|
|
|
* the schema of the rows and then enrich.
|
|
|
|
* @param {object[]} rows the rows which are to be enriched.
|
|
|
|
* @returns {object[]} the enriched rows will be returned.
|
|
|
|
*/
|
|
|
|
exports.outputProcessing = async (appId, table, rows) => {
|
2021-02-19 11:32:24 +01:00
|
|
|
let wasArray = true
|
|
|
|
if (!(rows instanceof Array)) {
|
|
|
|
rows = [rows]
|
|
|
|
wasArray = false
|
|
|
|
}
|
2021-02-10 17:10:39 +01:00
|
|
|
// attach any linked row information
|
2021-02-18 14:38:57 +01:00
|
|
|
const outputRows = await linkRows.attachLinkedPrimaryDisplay(
|
|
|
|
appId,
|
|
|
|
table,
|
|
|
|
rows
|
|
|
|
)
|
2021-02-10 17:10:39 +01:00
|
|
|
// update the attachments URL depending on hosting
|
|
|
|
if (env.CLOUD && env.SELF_HOSTED) {
|
|
|
|
for (let [property, column] of Object.entries(table.schema)) {
|
2021-02-10 18:55:19 +01:00
|
|
|
if (column.type === FieldTypes.ATTACHMENT) {
|
2021-02-10 17:10:39 +01:00
|
|
|
for (let row of outputRows) {
|
|
|
|
if (row[property] == null || row[property].length === 0) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
row[property].forEach(attachment => {
|
|
|
|
attachment.url = `${OBJ_STORE_DIRECTORY}/${appId}/${attachment.url}`
|
|
|
|
attachment.url = attachment.url.replace("//", "/")
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-02-19 11:32:24 +01:00
|
|
|
return wasArray ? outputRows : outputRows[0]
|
2021-02-10 17:10:39 +01:00
|
|
|
}
|