diff --git a/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte b/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte
index faadbdeb49..b6bf9ec8d7 100644
--- a/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte
+++ b/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte
@@ -6,8 +6,6 @@
Input,
Label,
ModalContent,
- Button,
- Spacer,
Toggle,
} from "@budibase/bbui"
import TableDataImport from "../TableDataImport.svelte"
@@ -28,6 +26,13 @@
let dataImport
let error = ""
let createAutoscreens = true
+ let autoColumns = {
+ createdBy: false,
+ createdAt: false,
+ updatedBy: false,
+ updatedAt: false,
+ autoNumber: false,
+ }
function checkValid(evt) {
const tableName = evt.target.value
@@ -42,6 +47,7 @@
let newTable = {
name,
schema: dataImport.schema || {},
+ autoColumns,
dataImport,
}
@@ -93,6 +99,30 @@
on:input={checkValid}
bind:value={name}
{error} />
+
@@ -101,3 +131,25 @@
+
+
diff --git a/packages/server/src/api/controllers/row.js b/packages/server/src/api/controllers/row.js
index 857d1dd2ad..03af91a2b6 100644
--- a/packages/server/src/api/controllers/row.js
+++ b/packages/server/src/api/controllers/row.js
@@ -9,7 +9,10 @@ const {
ViewNames,
} = require("../../db/utils")
const usersController = require("./user")
-const { coerceRowValues, enrichRows } = require("../../utilities")
+const {
+ inputProcessing,
+ outputProcessing,
+} = require("../../utilities/rowProcessor")
const TABLE_VIEW_BEGINS_WITH = `all${SEPARATOR}${DocumentTypes.TABLE}${SEPARATOR}`
@@ -64,7 +67,7 @@ exports.patch = async function(ctx) {
row[key] = patchfields[key]
}
- row = coerceRowValues(row, table)
+ row = inputProcessing(ctx.user, table, row)
const validateResult = await validate({
row,
@@ -134,7 +137,7 @@ exports.save = async function(ctx) {
const table = await db.get(row.tableId)
- row = coerceRowValues(row, table)
+ row = inputProcessing(ctx.user, table, row)
const validateResult = await validate({
row,
@@ -204,7 +207,7 @@ exports.fetchView = async function(ctx) {
schema: {},
}
}
- ctx.body = await enrichRows(appId, table, response.rows)
+ ctx.body = await outputProcessing(appId, table, response.rows)
}
if (calculation === CALCULATION_TYPES.STATS) {
@@ -247,7 +250,7 @@ exports.fetchTableRows = async function(ctx) {
)
rows = response.rows.map(row => row.doc)
}
- ctx.body = await enrichRows(appId, table, rows)
+ ctx.body = await outputProcessing(appId, table, rows)
}
exports.find = async function(ctx) {
@@ -256,7 +259,7 @@ exports.find = async function(ctx) {
try {
const table = await db.get(ctx.params.tableId)
const row = await findRow(db, appId, ctx.params.tableId, ctx.params.rowId)
- ctx.body = await enrichRows(appId, table, row)
+ ctx.body = await outputProcessing(appId, table, row)
} catch (err) {
ctx.throw(400, err)
}
@@ -341,7 +344,7 @@ exports.fetchEnrichedRow = async function(ctx) {
keys: linkVals.map(linkVal => linkVal.id),
})
// need to include the IDs in these rows for any links they may have
- let linkedRows = await enrichRows(
+ let linkedRows = await outputProcessing(
appId,
table,
response.rows.map(row => row.doc)
diff --git a/packages/server/src/api/routes/table.js b/packages/server/src/api/routes/table.js
index da5c753b83..0e6b916c24 100644
--- a/packages/server/src/api/routes/table.js
+++ b/packages/server/src/api/routes/table.js
@@ -7,9 +7,31 @@ const {
PermissionLevels,
PermissionTypes,
} = require("../../utilities/security/permissions")
+const joiValidator = require("../../middleware/joi-validator")
+const Joi = require("joi")
const router = Router()
+function generateSaveValidator() {
+ // prettier-ignore
+ return joiValidator.body(Joi.object({
+ _id: Joi.string(),
+ _rev: Joi.string(),
+ type: Joi.string().valid("table"),
+ primaryDisplay: Joi.string(),
+ schema: Joi.object().required(),
+ name: Joi.string().required(),
+ views: Joi.object(),
+ autoColumns: Joi.object({
+ createdBy: Joi.boolean(),
+ createdAt: Joi.boolean(),
+ updatedBy: Joi.boolean(),
+ updatedAt: Joi.boolean(),
+ }),
+ dataImport: Joi.object(),
+ }).unknown(true))
+}
+
router
.get("/api/tables", authorized(BUILDER), tableController.fetch)
.get(
@@ -23,6 +45,7 @@ router
// allows control over updating a table
bodyResource("_id"),
authorized(BUILDER),
+ generateSaveValidator(),
tableController.save
)
.post(
diff --git a/packages/server/src/automations/triggers.js b/packages/server/src/automations/triggers.js
index 6634016e3f..e4c91e5610 100644
--- a/packages/server/src/automations/triggers.js
+++ b/packages/server/src/automations/triggers.js
@@ -2,7 +2,7 @@ const CouchDB = require("../db")
const emitter = require("../events/index")
const InMemoryQueue = require("../utilities/queue/inMemoryQueue")
const { getAutomationParams } = require("../db/utils")
-const { coerceValue } = require("../utilities")
+const { coerce } = require("../utilities/rowProcessor")
let automationQueue = new InMemoryQueue("automationQueue")
@@ -240,8 +240,8 @@ module.exports.externalTrigger = async function(automation, params) {
// values are likely to be submitted as strings, so we shall convert to correct type
const coercedFields = {}
const fields = automation.definition.trigger.inputs.fields
- for (let key in fields) {
- coercedFields[key] = coerceValue(params.fields[key], fields[key])
+ for (let key of Object.keys(fields)) {
+ coercedFields[key] = coerce(params.fields[key], fields[key])
}
params.fields = coercedFields
}
diff --git a/packages/server/src/utilities/index.js b/packages/server/src/utilities/index.js
index 31cc74b5e6..4cf01dc836 100644
--- a/packages/server/src/utilities/index.js
+++ b/packages/server/src/utilities/index.js
@@ -3,60 +3,9 @@ const { DocumentTypes, SEPARATOR } = require("../db/utils")
const fs = require("fs")
const { cloneDeep } = require("lodash/fp")
const CouchDB = require("../db")
-const { OBJ_STORE_DIRECTORY } = require("../constants")
-const linkRows = require("../db/linkedRows")
const APP_PREFIX = DocumentTypes.APP + SEPARATOR
-/**
- * A map of how we convert various properties in rows to each other based on the row type.
- */
-const TYPE_TRANSFORM_MAP = {
- link: {
- "": [],
- [null]: [],
- [undefined]: undefined,
- },
- options: {
- "": "",
- [null]: "",
- [undefined]: undefined,
- },
- string: {
- "": "",
- [null]: "",
- [undefined]: undefined,
- },
- longform: {
- "": "",
- [null]: "",
- [undefined]: undefined,
- },
- number: {
- "": null,
- [null]: null,
- [undefined]: undefined,
- parse: n => parseFloat(n),
- },
- datetime: {
- "": null,
- [undefined]: undefined,
- [null]: null,
- },
- attachment: {
- "": [],
- [null]: [],
- [undefined]: undefined,
- },
- boolean: {
- "": null,
- [null]: null,
- [undefined]: undefined,
- true: true,
- false: false,
- },
-}
-
function confirmAppId(possibleAppId) {
return possibleAppId && possibleAppId.startsWith(APP_PREFIX)
? possibleAppId
@@ -159,43 +108,6 @@ exports.walkDir = (dirPath, callback) => {
}
}
-/**
- * This will coerce a value to the correct types based on the type transform map
- * @param {object} row The value to coerce
- * @param {object} type The type fo coerce to
- * @returns {object} The coerced value
- */
-exports.coerceValue = (value, type) => {
- // eslint-disable-next-line no-prototype-builtins
- if (TYPE_TRANSFORM_MAP[type].hasOwnProperty(value)) {
- return TYPE_TRANSFORM_MAP[type][value]
- } else if (TYPE_TRANSFORM_MAP[type].parse) {
- return TYPE_TRANSFORM_MAP[type].parse(value)
- }
-
- return value
-}
-
-/**
- * This will coerce the values in a row to the correct types based on the type transform map and the
- * table schema.
- * @param {object} row The row which is to be coerced to correct values based on schema, this input
- * row will not be updated.
- * @param {object} table The table that has been retrieved from DB, this must contain the expected
- * schema for the rows.
- * @returns {object} The updated row will be returned with all values coerced.
- */
-exports.coerceRowValues = (row, table) => {
- const clonedRow = cloneDeep(row)
- for (let [key, value] of Object.entries(clonedRow)) {
- const field = table.schema[key]
- if (!field) continue
-
- clonedRow[key] = exports.coerceValue(value, field.type)
- }
- return clonedRow
-}
-
exports.getLogoUrl = () => {
return "https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg"
}
@@ -213,34 +125,3 @@ exports.getAllApps = async () => {
.map(({ value }) => value)
}
}
-
-/**
- * This function "enriches" the input rows with anything they are supposed to contain, for example
- * link records or attachment links.
- * @param {string} appId the ID of the application for which rows are being enriched.
- * @param {object} table the table from which these rows came from originally, this is used to determine
- * the schema of the rows and then enrich.
- * @param {object[]} rows the rows which are to be enriched.
- * @returns {object[]} the enriched rows will be returned.
- */
-exports.enrichRows = async (appId, table, rows) => {
- // attach any linked row information
- const enriched = await linkRows.attachLinkInfo(appId, rows)
- // update the attachments URL depending on hosting
- if (env.CLOUD && env.SELF_HOSTED) {
- for (let [property, column] of Object.entries(table.schema)) {
- if (column.type === "attachment") {
- for (let row of enriched) {
- if (row[property] == null || row[property].length === 0) {
- continue
- }
- row[property].forEach(attachment => {
- attachment.url = `${OBJ_STORE_DIRECTORY}/${appId}/${attachment.url}`
- attachment.url = attachment.url.replace("//", "/")
- })
- }
- }
- }
- }
- return enriched
-}
diff --git a/packages/server/src/utilities/rowProcessor.js b/packages/server/src/utilities/rowProcessor.js
new file mode 100644
index 0000000000..270f636aae
--- /dev/null
+++ b/packages/server/src/utilities/rowProcessor.js
@@ -0,0 +1,119 @@
+const env = require("../environment")
+const { OBJ_STORE_DIRECTORY } = require("../constants")
+const linkRows = require("../db/linkedRows")
+const { cloneDeep } = require("lodash/fp")
+
+/**
+ * A map of how we convert various properties in rows to each other based on the row type.
+ */
+const TYPE_TRANSFORM_MAP = {
+ link: {
+ "": [],
+ [null]: [],
+ [undefined]: undefined,
+ },
+ options: {
+ "": "",
+ [null]: "",
+ [undefined]: undefined,
+ },
+ string: {
+ "": "",
+ [null]: "",
+ [undefined]: undefined,
+ },
+ longform: {
+ "": "",
+ [null]: "",
+ [undefined]: undefined,
+ },
+ number: {
+ "": null,
+ [null]: null,
+ [undefined]: undefined,
+ parse: n => parseFloat(n),
+ },
+ datetime: {
+ "": null,
+ [undefined]: undefined,
+ [null]: null,
+ },
+ attachment: {
+ "": [],
+ [null]: [],
+ [undefined]: undefined,
+ },
+ boolean: {
+ "": null,
+ [null]: null,
+ [undefined]: undefined,
+ true: true,
+ false: false,
+ },
+}
+
+/**
+ * This will coerce a value to the correct types based on the type transform map
+ * @param {any} value The value to coerce
+ * @param {string} type The type fo coerce to
+ * @returns {any} The coerced value
+ */
+exports.coerce = (value, type) => {
+ // eslint-disable-next-line no-prototype-builtins
+ if (TYPE_TRANSFORM_MAP[type].hasOwnProperty(value)) {
+ return TYPE_TRANSFORM_MAP[type][value]
+ } else if (TYPE_TRANSFORM_MAP[type].parse) {
+ return TYPE_TRANSFORM_MAP[type].parse(value)
+ }
+ return value
+}
+
+/**
+ * Given an input route this function will apply all the necessary pre-processing to it, such as coercion
+ * of column values or adding auto-column values.
+ * @param {object} user the user which is performing the input.
+ * @param {object} row the row which is being created/updated.
+ * @param {object} table the table which the row is being saved to.
+ * @returns {object} the row which has been prepared to be written to the DB.
+ */
+exports.inputProcessing = (user, table, row) => {
+ const clonedRow = cloneDeep(row)
+ for (let [key, value] of Object.entries(clonedRow)) {
+ const field = table.schema[key]
+ if (!field) continue
+
+ clonedRow[key] = exports.coerce(value, field.type)
+ }
+ return clonedRow
+}
+
+/**
+ * This function enriches the input rows with anything they are supposed to contain, for example
+ * link records or attachment links.
+ * @param {string} appId the ID of the application for which rows are being enriched.
+ * @param {object} table the table from which these rows came from originally, this is used to determine
+ * the schema of the rows and then enrich.
+ * @param {object[]} rows the rows which are to be enriched.
+ * @returns {object[]} the enriched rows will be returned.
+ */
+exports.outputProcessing = async (appId, table, rows) => {
+ // attach any linked row information
+ const outputRows = await linkRows.attachLinkInfo(appId, rows)
+ // update the attachments URL depending on hosting
+ if (env.CLOUD && env.SELF_HOSTED) {
+ for (let [property, column] of Object.entries(table.schema)) {
+ if (column.type === "attachment") {
+ for (let row of outputRows) {
+ if (row[property] == null || row[property].length === 0) {
+ continue
+ }
+ row[property].forEach(attachment => {
+ attachment.url = `${OBJ_STORE_DIRECTORY}/${appId}/${attachment.url}`
+ attachment.url = attachment.url.replace("//", "/")
+ })
+ }
+ }
+ }
+ }
+ return outputRows
+}