diff --git a/packages/builder/src/components/backend/DataTable/RowFieldControl.svelte b/packages/builder/src/components/backend/DataTable/RowFieldControl.svelte index 3390b95288..075b556134 100644 --- a/packages/builder/src/components/backend/DataTable/RowFieldControl.svelte +++ b/packages/builder/src/components/backend/DataTable/RowFieldControl.svelte @@ -11,8 +11,9 @@ import { capitalise } from "../../../helpers" import LinkedRowSelector from "components/common/LinkedRowSelector.svelte" + export let defaultValue export let meta - export let value = meta.type === "boolean" ? false : "" + export let value = defaultValue || (meta.type === "boolean" ? false : "") export let readonly $: type = meta.type diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte index 1e3f820d69..ae19489575 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte @@ -7,7 +7,6 @@ Select, Toggle, Radio, - } from "@budibase/bbui" import { cloneDeep } from "lodash/fp" import { backendUiStore } from "builderStore" @@ -38,12 +37,14 @@ $backendUiStore.selectedTable.primaryDisplay === field.name let relationshipTypes = [ - {text: 'Many to many (N:N)', value: 'many-to-many',}, - {text: 'One to many (1:N)', value: 'one-to-many',} + { text: "Many to many (N:N)", value: "many-to-many" }, + { text: "One to many (1:N)", value: "one-to-many" }, ] - let types = ['Many to many (N:N)', 'One to many (1:N)'] + let types = ["Many to many (N:N)", "One to many (1:N)"] - let selectedRelationshipType = relationshipTypes.find(type => type.value === field.relationshipType)?.text || 'Many to many (N:N)' + let selectedRelationshipType = + relationshipTypes.find(type => type.value === field.relationshipType) + ?.text || "Many to many (N:N)" let indexes = [...($backendUiStore.selectedTable.indexes || [])] let confirmDeleteDialog @@ -68,10 +69,12 @@ field.type !== LINK_TYPE && !uneditable && field.type !== AUTO_COL async function saveColumn() { - // Set relationship type if it's - if (field.type === 'link') { - field.relationshipType = relationshipTypes.find(type => type.text === selectedRelationshipType).value - } + // Set relationship type if it's + if (field.type === "link") { + field.relationshipType = relationshipTypes.find( + type => type.text === selectedRelationshipType + ).value + } if (field.type === AUTO_COL) { field = buildAutoColumn( @@ -228,11 +231,15 @@ label="Max Value" bind:value={field.constraints.numericality.lessThanOrEqualTo} /> {:else if field.type === 'link'} -
+
{#each types as type} - + {/each} @@ -282,7 +289,7 @@ .radio-buttons { display: flex; gap: var(--spacing-m); - font-size: var(--font-size-xs) + font-size: var(--font-size-xs); } .actions { display: grid; diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditUser.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditUser.svelte index aac515e74b..5753b7d5bb 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditUser.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditUser.svelte @@ -29,6 +29,7 @@ let customSchema = { ...schema } delete customSchema["email"] delete customSchema["roleId"] + delete customSchema["status"] return Object.entries(customSchema) } @@ -79,7 +80,13 @@ {/each} + {#each customSchemaKeys as [key, meta]} - + {#if !meta.autocolumn} + + {/if} {/each} diff --git a/packages/builder/src/components/common/LinkedRowSelector.svelte b/packages/builder/src/components/common/LinkedRowSelector.svelte index a0f5af052d..ac660d4a71 100644 --- a/packages/builder/src/components/common/LinkedRowSelector.svelte +++ b/packages/builder/src/components/common/LinkedRowSelector.svelte @@ -42,10 +42,17 @@ {:else} {#if schema.relationshipType === 'one-to-many'} - (linkedRows = [e.target.value])} + name={label} + {label}> {#each rows as row} - + {/each} {:else} diff --git a/packages/client/src/api/api.js b/packages/client/src/api/api.js index 817f14896c..8b2328c6a2 100644 --- a/packages/client/src/api/api.js +++ b/packages/client/src/api/api.js @@ -1,7 +1,7 @@ /** * API cache for cached request responses. */ -import { notificationStore } from "../store/notification" +import { notificationStore } from "../store" let cache = {} /** @@ -34,6 +34,9 @@ const makeApiCall = async ({ method, url, body, json = true }) => { switch (response.status) { case 200: return response.json() + case 401: + notificationStore.danger("Invalid credentials") + return handleError(`Invalid credentials`) case 404: notificationStore.danger("Not found") return handleError(`${url}: Not Found`) diff --git a/packages/server/src/api/controllers/auth.js b/packages/server/src/api/controllers/auth.js index 1dfe6f12d8..1cc6db3185 100644 --- a/packages/server/src/api/controllers/auth.js +++ b/packages/server/src/api/controllers/auth.js @@ -7,6 +7,9 @@ const { generateUserID } = require("../../db/utils") const { setCookie } = require("../../utilities") const { outputProcessing } = require("../../utilities/rowProcessor") const { ViewNames } = require("../../db/utils") +const { UserStatus } = require("../../constants") + +const INVALID_ERR = "Invalid Credentials" exports.authenticate = async ctx => { const appId = ctx.appId @@ -27,7 +30,12 @@ exports.authenticate = async ctx => { } catch (_) { // do not want to throw a 404 - as this could be // used to determine valid emails - ctx.throw(401, "Invalid Credentials") + ctx.throw(401, INVALID_ERR) + } + + // check that the user is currently inactive, if this is the case throw invalid + if (dbUser.status === UserStatus.INACTIVE) { + ctx.throw(401, INVALID_ERR) } // authenticate @@ -56,7 +64,7 @@ exports.authenticate = async ctx => { appId, } } else { - ctx.throw(401, "Invalid credentials.") + ctx.throw(401, INVALID_ERR) } } diff --git a/packages/server/src/api/controllers/row.js b/packages/server/src/api/controllers/row.js index 3c4c19eed2..43d831dc60 100644 --- a/packages/server/src/api/controllers/row.js +++ b/packages/server/src/api/controllers/row.js @@ -96,7 +96,10 @@ exports.patch = async function(ctx) { // Creation of a new user goes to the user controller if (row.tableId === ViewNames.USERS) { // the row has been updated, need to put it into the ctx - ctx.request.body = row + ctx.request.body = { + ...row, + password: ctx.request.body.password, + } await usersController.update(ctx) return } diff --git a/packages/server/src/api/controllers/table.js b/packages/server/src/api/controllers/table.js deleted file mode 100644 index fb18210821..0000000000 --- a/packages/server/src/api/controllers/table.js +++ /dev/null @@ -1,285 +0,0 @@ -const CouchDB = require("../../db") -const linkRows = require("../../db/linkedRows") -const csvParser = require("../../utilities/csvParser") -const { - getRowParams, - getTableParams, - generateTableID, - generateRowID, -} = require("../../db/utils") -const { isEqual } = require("lodash/fp") -const { FieldTypes, AutoFieldSubTypes } = require("../../constants") -const { inputProcessing } = require("../../utilities/rowProcessor") - -async function checkForColumnUpdates(db, oldTable, updatedTable) { - let updatedRows - const rename = updatedTable._rename - let deletedColumns = [] - if (oldTable && oldTable.schema && updatedTable.schema) { - deletedColumns = Object.keys(oldTable.schema).filter( - colName => updatedTable.schema[colName] == null - ) - } - // check for renaming of columns or deleted columns - if (rename || deletedColumns.length !== 0) { - const rows = await db.allDocs( - getRowParams(updatedTable._id, null, { - include_docs: true, - }) - ) - updatedRows = rows.rows.map(({ doc }) => { - if (rename) { - doc[rename.updated] = doc[rename.old] - delete doc[rename.old] - } else if (deletedColumns.length !== 0) { - deletedColumns.forEach(colName => delete doc[colName]) - } - return doc - }) - delete updatedTable._rename - } - return updatedRows -} - -// makes sure the passed in table isn't going to reset the auto ID -function makeSureTableUpToDate(table, tableToSave) { - if (!table) { - return tableToSave - } - // sure sure rev is up to date - tableToSave._rev = table._rev - // make sure auto IDs are always updated - these are internal - // so the client may not know they have changed - for (let [field, column] of Object.entries(table.schema)) { - if ( - column.autocolumn && - column.subtype === AutoFieldSubTypes.AUTO_ID && - tableToSave.schema[field] - ) { - tableToSave.schema[field].lastID = column.lastID - } - } - return tableToSave -} - -async function handleDataImport(user, table, dataImport) { - const db = new CouchDB(user.appId) - if (dataImport && dataImport.csvString) { - // Populate the table with rows imported from CSV in a bulk update - const data = await csvParser.transform(dataImport) - - for (let i = 0; i < data.length; i++) { - let row = data[i] - row._id = generateRowID(table._id) - row.tableId = table._id - const processed = inputProcessing(user, table, row) - row = processed.row - // these auto-fields will never actually link anywhere (always builder) - for (let [fieldName, schema] of Object.entries(table.schema)) { - if ( - schema.autocolumn && - (schema.subtype === AutoFieldSubTypes.CREATED_BY || - schema.subtype === AutoFieldSubTypes.UPDATED_BY) - ) { - delete row[fieldName] - } - } - table = processed.table - data[i] = row - } - - await db.bulkDocs(data) - let response = await db.put(table) - table._rev = response._rev - } - return table -} - -async function handleSearchIndexes(db, table) { - // create relevant search indexes - if (table.indexes && table.indexes.length > 0) { - const currentIndexes = await db.getIndexes() - const indexName = `search:${table._id}` - - const existingIndex = currentIndexes.indexes.find( - existing => existing.name === indexName - ) - - if (existingIndex) { - const currentFields = existingIndex.def.fields.map( - field => Object.keys(field)[0] - ) - - // if index fields have changed, delete the original index - if (!isEqual(currentFields, table.indexes)) { - await db.deleteIndex(existingIndex) - // create/recreate the index with fields - await db.createIndex({ - index: { - fields: table.indexes, - name: indexName, - ddoc: "search_ddoc", - type: "json", - }, - }) - } - } else { - // create/recreate the index with fields - await db.createIndex({ - index: { - fields: table.indexes, - name: indexName, - ddoc: "search_ddoc", - type: "json", - }, - }) - } - } - return table -} - -exports.fetch = async function(ctx) { - const db = new CouchDB(ctx.user.appId) - const body = await db.allDocs( - getTableParams(null, { - include_docs: true, - }) - ) - ctx.body = body.rows.map(row => row.doc) -} - -exports.find = async function(ctx) { - const db = new CouchDB(ctx.user.appId) - ctx.body = await db.get(ctx.params.id) -} - -exports.save = async function(ctx) { - const appId = ctx.user.appId - const db = new CouchDB(appId) - const { dataImport, ...rest } = ctx.request.body - let tableToSave = { - type: "table", - _id: generateTableID(), - views: {}, - ...rest, - } - - // if the table obj had an _id then it will have been retrieved - let oldTable - if (ctx.request.body && ctx.request.body._id) { - oldTable = await db.get(ctx.request.body._id) - tableToSave = makeSureTableUpToDate(oldTable, tableToSave) - } - - // make sure that types don't change of a column, have to remove - // the column if you want to change the type - if (oldTable && oldTable.schema) { - for (let propKey of Object.keys(tableToSave.schema)) { - let column = tableToSave.schema[propKey] - let oldColumn = oldTable.schema[propKey] - if (oldColumn && oldColumn.type !== column.type) { - ctx.throw(400, "Cannot change the type of a column") - } - } - } - - // Don't rename if the name is the same - let { _rename } = tableToSave - if (_rename && _rename.old === _rename.updated) { - _rename = null - delete tableToSave._rename - } - - // rename row fields when table column is renamed - if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) { - ctx.throw(400, "Cannot rename a linked column.") - } else if (_rename && tableToSave.primaryDisplay === _rename.old) { - ctx.throw(400, "Cannot rename the display column.") - } - - let updatedRows = await checkForColumnUpdates(db, oldTable, tableToSave) - - // update schema of non-statistics views when new columns are added - for (let view in tableToSave.views) { - const tableView = tableToSave.views[view] - if (!tableView) continue - - if (tableView.schema.group || tableView.schema.field) continue - tableView.schema = tableToSave.schema - } - - // update linked rows - const linkResp = await linkRows.updateLinks({ - appId, - eventType: oldTable - ? linkRows.EventType.TABLE_UPDATED - : linkRows.EventType.TABLE_SAVE, - table: tableToSave, - oldTable: oldTable, - }) - if (linkResp != null && linkResp._rev) { - tableToSave._rev = linkResp._rev - } - - // don't perform any updates until relationships have been - // checked by the updateLinks function - if (updatedRows && updatedRows.length !== 0) { - await db.bulkDocs(updatedRows) - } - const result = await db.post(tableToSave) - tableToSave._rev = result.rev - - tableToSave = await handleSearchIndexes(db, tableToSave) - tableToSave = await handleDataImport(ctx.user, tableToSave, dataImport) - - ctx.eventEmitter && - ctx.eventEmitter.emitTable(`table:save`, appId, tableToSave) - - ctx.status = 200 - ctx.message = `Table ${ctx.request.body.name} saved successfully.` - ctx.body = tableToSave -} - -exports.destroy = async function(ctx) { - const appId = ctx.user.appId - const db = new CouchDB(appId) - const tableToDelete = await db.get(ctx.params.tableId) - - // Delete all rows for that table - const rows = await db.allDocs( - getRowParams(ctx.params.tableId, null, { - include_docs: true, - }) - ) - await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true }))) - - // update linked rows - await linkRows.updateLinks({ - appId, - eventType: linkRows.EventType.TABLE_DELETE, - table: tableToDelete, - }) - - // don't remove the table itself until very end - await db.remove(tableToDelete) - - // remove table search index - const currentIndexes = await db.getIndexes() - const existingIndex = currentIndexes.indexes.find( - existing => existing.name === `search:${ctx.params.tableId}` - ) - if (existingIndex) { - await db.deleteIndex(existingIndex) - } - - ctx.eventEmitter && - ctx.eventEmitter.emitTable(`table:delete`, appId, tableToDelete) - ctx.status = 200 - ctx.message = `Table ${ctx.params.tableId} deleted.` -} - -exports.validateCSVSchema = async function(ctx) { - const { csvString, schema = {} } = ctx.request.body - const result = await csvParser.parse(csvString, schema) - ctx.body = { schema: result } -} diff --git a/packages/server/src/api/controllers/table/index.js b/packages/server/src/api/controllers/table/index.js new file mode 100644 index 0000000000..b8e3e56e3a --- /dev/null +++ b/packages/server/src/api/controllers/table/index.js @@ -0,0 +1,165 @@ +const CouchDB = require("../../../db") +const linkRows = require("../../../db/linkedRows") +const csvParser = require("../../../utilities/csvParser") +const { + getRowParams, + getTableParams, + generateTableID, +} = require("../../../db/utils") +const { FieldTypes } = require("../../../constants") +const { TableSaveFunctions } = require("./utils") + +exports.fetch = async function(ctx) { + const db = new CouchDB(ctx.user.appId) + const body = await db.allDocs( + getTableParams(null, { + include_docs: true, + }) + ) + ctx.body = body.rows.map(row => row.doc) +} + +exports.find = async function(ctx) { + const db = new CouchDB(ctx.user.appId) + ctx.body = await db.get(ctx.params.id) +} + +exports.save = async function(ctx) { + const appId = ctx.user.appId + const db = new CouchDB(appId) + const { dataImport, ...rest } = ctx.request.body + let tableToSave = { + type: "table", + _id: generateTableID(), + views: {}, + ...rest, + } + + // if the table obj had an _id then it will have been retrieved + let oldTable + if (ctx.request.body && ctx.request.body._id) { + oldTable = await db.get(ctx.request.body._id) + } + + // saving a table is a complex operation, involving many different steps, this + // has been broken out into a utility to make it more obvious/easier to manipulate + const tableSaveFunctions = new TableSaveFunctions({ + db, + ctx, + oldTable, + dataImport, + }) + tableToSave = await tableSaveFunctions.before(tableToSave) + + // make sure that types don't change of a column, have to remove + // the column if you want to change the type + if (oldTable && oldTable.schema) { + for (let propKey of Object.keys(tableToSave.schema)) { + let column = tableToSave.schema[propKey] + let oldColumn = oldTable.schema[propKey] + if (oldColumn && oldColumn.type !== column.type) { + ctx.throw(400, "Cannot change the type of a column") + } + } + } + + // Don't rename if the name is the same + let { _rename } = tableToSave + if (_rename && _rename.old === _rename.updated) { + _rename = null + delete tableToSave._rename + } + + // rename row fields when table column is renamed + if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) { + ctx.throw(400, "Cannot rename a linked column.") + } else if (_rename && tableToSave.primaryDisplay === _rename.old) { + ctx.throw(400, "Cannot rename the display column.") + } + + tableToSave = await tableSaveFunctions.mid(tableToSave) + + // update schema of non-statistics views when new columns are added + for (let view in tableToSave.views) { + const tableView = tableToSave.views[view] + if (!tableView) continue + + if (tableView.schema.group || tableView.schema.field) continue + tableView.schema = tableToSave.schema + } + + // update linked rows + const linkResp = await linkRows.updateLinks({ + appId, + eventType: oldTable + ? linkRows.EventType.TABLE_UPDATED + : linkRows.EventType.TABLE_SAVE, + table: tableToSave, + oldTable: oldTable, + }) + if (linkResp != null && linkResp._rev) { + tableToSave._rev = linkResp._rev + } + + // don't perform any updates until relationships have been + // checked by the updateLinks function + const updatedRows = tableSaveFunctions.getUpdatedRows() + if (updatedRows && updatedRows.length !== 0) { + await db.bulkDocs(updatedRows) + } + const result = await db.post(tableToSave) + tableToSave._rev = result.rev + + tableToSave = await tableSaveFunctions.after(tableToSave) + + ctx.eventEmitter && + ctx.eventEmitter.emitTable(`table:save`, appId, tableToSave) + + ctx.status = 200 + ctx.message = `Table ${ctx.request.body.name} saved successfully.` + ctx.body = tableToSave +} + +exports.destroy = async function(ctx) { + const appId = ctx.user.appId + const db = new CouchDB(appId) + const tableToDelete = await db.get(ctx.params.tableId) + + // Delete all rows for that table + const rows = await db.allDocs( + getRowParams(ctx.params.tableId, null, { + include_docs: true, + }) + ) + await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true }))) + + // update linked rows + await linkRows.updateLinks({ + appId, + eventType: linkRows.EventType.TABLE_DELETE, + table: tableToDelete, + }) + + // don't remove the table itself until very end + await db.remove(tableToDelete) + + // remove table search index + const currentIndexes = await db.getIndexes() + const existingIndex = currentIndexes.indexes.find( + existing => existing.name === `search:${ctx.params.tableId}` + ) + if (existingIndex) { + await db.deleteIndex(existingIndex) + } + + ctx.eventEmitter && + ctx.eventEmitter.emitTable(`table:delete`, appId, tableToDelete) + ctx.status = 200 + ctx.message = `Table ${ctx.params.tableId} deleted.` +} + +exports.validateCSVSchema = async function(ctx) { + const { csvString, schema = {} } = ctx.request.body + const result = await csvParser.parse(csvString, schema) + ctx.body = { schema: result } +} diff --git a/packages/server/src/api/controllers/table/utils.js b/packages/server/src/api/controllers/table/utils.js new file mode 100644 index 0000000000..73e6e60551 --- /dev/null +++ b/packages/server/src/api/controllers/table/utils.js @@ -0,0 +1,195 @@ +const CouchDB = require("../../../db") +const csvParser = require("../../../utilities/csvParser") +const { getRowParams, generateRowID, ViewNames } = require("../../../db/utils") +const { isEqual } = require("lodash/fp") +const { AutoFieldSubTypes } = require("../../../constants") +const { inputProcessing } = require("../../../utilities/rowProcessor") +const { USERS_TABLE_SCHEMA } = require("../../../constants") + +exports.checkForColumnUpdates = async (db, oldTable, updatedTable) => { + let updatedRows = [] + const rename = updatedTable._rename + let deletedColumns = [] + if (oldTable && oldTable.schema && updatedTable.schema) { + deletedColumns = Object.keys(oldTable.schema).filter( + colName => updatedTable.schema[colName] == null + ) + } + // check for renaming of columns or deleted columns + if (rename || deletedColumns.length !== 0) { + const rows = await db.allDocs( + getRowParams(updatedTable._id, null, { + include_docs: true, + }) + ) + updatedRows = rows.rows.map(({ doc }) => { + if (rename) { + doc[rename.updated] = doc[rename.old] + delete doc[rename.old] + } else if (deletedColumns.length !== 0) { + deletedColumns.forEach(colName => delete doc[colName]) + } + return doc + }) + delete updatedTable._rename + } + return { rows: updatedRows, table: updatedTable } +} + +// makes sure the passed in table isn't going to reset the auto ID +exports.makeSureTableUpToDate = (table, tableToSave) => { + if (!table) { + return tableToSave + } + // sure sure rev is up to date + tableToSave._rev = table._rev + // make sure auto IDs are always updated - these are internal + // so the client may not know they have changed + for (let [field, column] of Object.entries(table.schema)) { + if ( + column.autocolumn && + column.subtype === AutoFieldSubTypes.AUTO_ID && + tableToSave.schema[field] + ) { + tableToSave.schema[field].lastID = column.lastID + } + } + return tableToSave +} + +exports.handleDataImport = async (user, table, dataImport) => { + const db = new CouchDB(user.appId) + if (dataImport && dataImport.csvString) { + // Populate the table with rows imported from CSV in a bulk update + const data = await csvParser.transform(dataImport) + + for (let i = 0; i < data.length; i++) { + let row = data[i] + row._id = generateRowID(table._id) + row.tableId = table._id + const processed = inputProcessing(user, table, row) + row = processed.row + // these auto-fields will never actually link anywhere (always builder) + for (let [fieldName, schema] of Object.entries(table.schema)) { + if ( + schema.autocolumn && + (schema.subtype === AutoFieldSubTypes.CREATED_BY || + schema.subtype === AutoFieldSubTypes.UPDATED_BY) + ) { + delete row[fieldName] + } + } + table = processed.table + data[i] = row + } + + await db.bulkDocs(data) + let response = await db.put(table) + table._rev = response._rev + } + return table +} + +exports.handleSearchIndexes = async (db, table) => { + // create relevant search indexes + if (table.indexes && table.indexes.length > 0) { + const currentIndexes = await db.getIndexes() + const indexName = `search:${table._id}` + + const existingIndex = currentIndexes.indexes.find( + existing => existing.name === indexName + ) + + if (existingIndex) { + const currentFields = existingIndex.def.fields.map( + field => Object.keys(field)[0] + ) + + // if index fields have changed, delete the original index + if (!isEqual(currentFields, table.indexes)) { + await db.deleteIndex(existingIndex) + // create/recreate the index with fields + await db.createIndex({ + index: { + fields: table.indexes, + name: indexName, + ddoc: "search_ddoc", + type: "json", + }, + }) + } + } else { + // create/recreate the index with fields + await db.createIndex({ + index: { + fields: table.indexes, + name: indexName, + ddoc: "search_ddoc", + type: "json", + }, + }) + } + } + return table +} + +exports.checkStaticTables = table => { + // check user schema has all required elements + if (table._id === ViewNames.USERS) { + for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) { + // check if the schema exists on the table to be created/updated + if (table.schema[key] == null) { + table.schema[key] = schema + } + } + } + return table +} + +class TableSaveFunctions { + constructor({ db, ctx, oldTable, dataImport }) { + this.db = db + this.ctx = ctx + this.oldTable = oldTable + this.dataImport = dataImport + // any rows that need updated + this.rows = [] + } + + // before anything is done + async before(table) { + if (this.oldTable) { + table = exports.makeSureTableUpToDate(this.oldTable, table) + } + table = exports.checkStaticTables(table) + return table + } + + // when confirmed valid + async mid(table) { + let response = await exports.checkForColumnUpdates( + this.db, + this.oldTable, + table + ) + this.rows = this.rows.concat(response.rows) + return table + } + + // after saving + async after(table) { + table = await exports.handleSearchIndexes(this.db, table) + table = await exports.handleDataImport( + this.ctx.user, + table, + this.dataImport + ) + return table + } + + getUpdatedRows() { + return this.rows + } +} + +exports.TableSaveFunctions = TableSaveFunctions diff --git a/packages/server/src/api/controllers/user.js b/packages/server/src/api/controllers/user.js index fcb4c34319..c100f43d88 100644 --- a/packages/server/src/api/controllers/user.js +++ b/packages/server/src/api/controllers/user.js @@ -2,6 +2,7 @@ const CouchDB = require("../../db") const bcrypt = require("../../utilities/bcrypt") const { generateUserID, getUserParams, ViewNames } = require("../../db/utils") const { getRole } = require("../../utilities/security/roles") +const { UserStatus } = require("../../constants") exports.fetch = async function(ctx) { const database = new CouchDB(ctx.user.appId) @@ -42,6 +43,10 @@ exports.create = async function(ctx) { password: hashedPassword, tableId: ViewNames.USERS, } + // add the active status to a user if its not provided + if (user.status == null) { + user.status = UserStatus.ACTIVE + } try { const response = await db.post(user) @@ -64,13 +69,21 @@ exports.create = async function(ctx) { exports.update = async function(ctx) { const db = new CouchDB(ctx.user.appId) const user = ctx.request.body + let dbUser + // get user incase password removed + if (user._id) { + dbUser = await db.get(user._id) + } if (user.password) { user.password = await bcrypt.hash(user.password) } else { delete user.password } - const response = await db.put(user) + const response = await db.put({ + password: dbUser.password, + ...user, + }) user._rev = response.rev ctx.status = 200 diff --git a/packages/server/src/constants/index.js b/packages/server/src/constants/index.js index 314e220a97..ff2e6f19f0 100644 --- a/packages/server/src/constants/index.js +++ b/packages/server/src/constants/index.js @@ -1,44 +1,5 @@ const { BUILTIN_ROLE_IDS } = require("../utilities/security/roles") -const AuthTypes = { - APP: "app", - BUILDER: "builder", - EXTERNAL: "external", -} - -const USERS_TABLE_SCHEMA = { - _id: "ta_users", - type: "table", - views: {}, - name: "Users", - schema: { - email: { - type: "string", - constraints: { - type: "string", - email: true, - length: { - maximum: "", - }, - presence: true, - }, - fieldName: "email", - name: "email", - }, - roleId: { - fieldName: "roleId", - name: "roleId", - type: "options", - constraints: { - type: "string", - presence: false, - inclusion: Object.values(BUILTIN_ROLE_IDS), - }, - }, - }, - primaryDisplay: "email", -} - exports.FieldTypes = { STRING: "string", LONGFORM: "longform", @@ -51,6 +12,60 @@ exports.FieldTypes = { AUTO: "auto", } +exports.AuthTypes = { + APP: "app", + BUILDER: "builder", + EXTERNAL: "external", +} + +exports.UserStatus = { + ACTIVE: "active", + INACTIVE: "inactive", +} + +exports.USERS_TABLE_SCHEMA = { + _id: "ta_users", + type: "table", + views: {}, + name: "Users", + schema: { + email: { + type: exports.FieldTypes.STRING, + constraints: { + type: exports.FieldTypes.STRING, + email: true, + length: { + maximum: "", + }, + presence: true, + }, + fieldName: "email", + name: "email", + }, + roleId: { + fieldName: "roleId", + name: "roleId", + type: exports.FieldTypes.OPTIONS, + constraints: { + type: exports.FieldTypes.STRING, + presence: false, + inclusion: Object.values(BUILTIN_ROLE_IDS), + }, + }, + status: { + fieldName: "status", + name: "status", + type: exports.FieldTypes.OPTIONS, + constraints: { + type: exports.FieldTypes.STRING, + presence: false, + inclusion: Object.values(exports.UserStatus), + }, + }, + }, + primaryDisplay: "email", +} + exports.AutoFieldSubTypes = { CREATED_BY: "createdBy", CREATED_AT: "createdAt", @@ -59,8 +74,6 @@ exports.AutoFieldSubTypes = { AUTO_ID: "autoID", } -exports.AuthTypes = AuthTypes -exports.USERS_TABLE_SCHEMA = USERS_TABLE_SCHEMA exports.BUILDER_CONFIG_DB = "builder-config-db" exports.HOSTING_DOC = "hosting-doc" exports.OBJ_STORE_DIRECTORY = "/app-assets/assets" diff --git a/packages/standard-components/src/forms/RelationshipField.svelte b/packages/standard-components/src/forms/RelationshipField.svelte index 5c4c7f4bc9..f4f5244fd1 100644 --- a/packages/standard-components/src/forms/RelationshipField.svelte +++ b/packages/standard-components/src/forms/RelationshipField.svelte @@ -18,25 +18,25 @@ let options = [] let tableDefinition let fieldText = "" - - const setFieldText = (value) => { - if (fieldSchema?.relationshipType === 'one-to-many') { + + const setFieldText = value => { + if (fieldSchema?.relationshipType === "one-to-many") { if (value?.length && options?.length) { const row = options.find(row => row._id === value[0]) return row.name } else { - return placeholder || 'Choose an option' - } + return placeholder || "Choose an option" + } } else { if (value?.length) { return `${value?.length ?? 0} selected rows` } else { - return placeholder || 'Choose some options' - } + return placeholder || "Choose some options" + } } } - $: options, fieldText = setFieldText($fieldState?.value) + $: options, (fieldText = setFieldText($fieldState?.value)) $: valueLookupMap = getValueLookupMap($fieldState?.value) $: isOptionSelected = option => valueLookupMap[option] === true $: linkedTableId = fieldSchema?.tableId @@ -74,14 +74,14 @@ } const toggleOption = option => { - if (fieldSchema.type === 'one-to-many') { + if (fieldSchema.type === "one-to-many") { fieldApi.setValue([option]) } else { if ($fieldState.value.includes(option)) { - fieldApi.setValue($fieldState.value.filter(x => x !== option)) - } else { - fieldApi.setValue([...$fieldState.value, option]) - } + fieldApi.setValue($fieldState.value.filter(x => x !== option)) + } else { + fieldApi.setValue([...$fieldState.value, option]) + } } }