Merge pull request #1152 from Budibase/user-active-and-bugs
Some fixes and the ability to set a user to inactive
This commit is contained in:
commit
adaabf5441
|
@ -11,8 +11,9 @@
|
||||||
import { capitalise } from "../../../helpers"
|
import { capitalise } from "../../../helpers"
|
||||||
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
|
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
|
||||||
|
|
||||||
|
export let defaultValue
|
||||||
export let meta
|
export let meta
|
||||||
export let value = meta.type === "boolean" ? false : ""
|
export let value = defaultValue || (meta.type === "boolean" ? false : "")
|
||||||
export let readonly
|
export let readonly
|
||||||
|
|
||||||
$: type = meta.type
|
$: type = meta.type
|
||||||
|
|
|
@ -7,7 +7,6 @@
|
||||||
Select,
|
Select,
|
||||||
Toggle,
|
Toggle,
|
||||||
Radio,
|
Radio,
|
||||||
|
|
||||||
} from "@budibase/bbui"
|
} from "@budibase/bbui"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import { backendUiStore } from "builderStore"
|
import { backendUiStore } from "builderStore"
|
||||||
|
@ -38,12 +37,14 @@
|
||||||
$backendUiStore.selectedTable.primaryDisplay === field.name
|
$backendUiStore.selectedTable.primaryDisplay === field.name
|
||||||
|
|
||||||
let relationshipTypes = [
|
let relationshipTypes = [
|
||||||
{text: 'Many to many (N:N)', value: 'many-to-many',},
|
{ text: "Many to many (N:N)", value: "many-to-many" },
|
||||||
{text: 'One to many (1:N)', value: 'one-to-many',}
|
{ text: "One to many (1:N)", value: "one-to-many" },
|
||||||
]
|
]
|
||||||
let types = ['Many to many (N:N)', 'One to many (1:N)']
|
let types = ["Many to many (N:N)", "One to many (1:N)"]
|
||||||
|
|
||||||
let selectedRelationshipType = relationshipTypes.find(type => type.value === field.relationshipType)?.text || 'Many to many (N:N)'
|
let selectedRelationshipType =
|
||||||
|
relationshipTypes.find(type => type.value === field.relationshipType)
|
||||||
|
?.text || "Many to many (N:N)"
|
||||||
|
|
||||||
let indexes = [...($backendUiStore.selectedTable.indexes || [])]
|
let indexes = [...($backendUiStore.selectedTable.indexes || [])]
|
||||||
let confirmDeleteDialog
|
let confirmDeleteDialog
|
||||||
|
@ -69,8 +70,10 @@
|
||||||
|
|
||||||
async function saveColumn() {
|
async function saveColumn() {
|
||||||
// Set relationship type if it's
|
// Set relationship type if it's
|
||||||
if (field.type === 'link') {
|
if (field.type === "link") {
|
||||||
field.relationshipType = relationshipTypes.find(type => type.text === selectedRelationshipType).value
|
field.relationshipType = relationshipTypes.find(
|
||||||
|
type => type.text === selectedRelationshipType
|
||||||
|
).value
|
||||||
}
|
}
|
||||||
|
|
||||||
if (field.type === AUTO_COL) {
|
if (field.type === AUTO_COL) {
|
||||||
|
@ -232,7 +235,11 @@
|
||||||
<Label grey extraSmall>Select relationship type</Label>
|
<Label grey extraSmall>Select relationship type</Label>
|
||||||
<div class="radio-buttons">
|
<div class="radio-buttons">
|
||||||
{#each types as type}
|
{#each types as type}
|
||||||
<Radio disabled={originalName} name="Relationship type" value={type} bind:group={selectedRelationshipType}>
|
<Radio
|
||||||
|
disabled={originalName}
|
||||||
|
name="Relationship type"
|
||||||
|
value={type}
|
||||||
|
bind:group={selectedRelationshipType}>
|
||||||
<label for={type}>{type}</label>
|
<label for={type}>{type}</label>
|
||||||
</Radio>
|
</Radio>
|
||||||
{/each}
|
{/each}
|
||||||
|
@ -282,7 +289,7 @@
|
||||||
.radio-buttons {
|
.radio-buttons {
|
||||||
display: flex;
|
display: flex;
|
||||||
gap: var(--spacing-m);
|
gap: var(--spacing-m);
|
||||||
font-size: var(--font-size-xs)
|
font-size: var(--font-size-xs);
|
||||||
}
|
}
|
||||||
.actions {
|
.actions {
|
||||||
display: grid;
|
display: grid;
|
||||||
|
|
|
@ -29,6 +29,7 @@
|
||||||
let customSchema = { ...schema }
|
let customSchema = { ...schema }
|
||||||
delete customSchema["email"]
|
delete customSchema["email"]
|
||||||
delete customSchema["roleId"]
|
delete customSchema["roleId"]
|
||||||
|
delete customSchema["status"]
|
||||||
return Object.entries(customSchema)
|
return Object.entries(customSchema)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,7 +80,13 @@
|
||||||
<option value={role._id}>{role.name}</option>
|
<option value={role._id}>{role.name}</option>
|
||||||
{/each}
|
{/each}
|
||||||
</Select>
|
</Select>
|
||||||
|
<RowFieldControl
|
||||||
|
meta={{ name: 'status', type: 'options', constraints: { inclusion: ['active', 'inactive'] } }}
|
||||||
|
bind:value={row.status}
|
||||||
|
defaultValue={'active'} />
|
||||||
{#each customSchemaKeys as [key, meta]}
|
{#each customSchemaKeys as [key, meta]}
|
||||||
|
{#if !meta.autocolumn}
|
||||||
<RowFieldControl {meta} bind:value={row[key]} {creating} />
|
<RowFieldControl {meta} bind:value={row[key]} {creating} />
|
||||||
|
{/if}
|
||||||
{/each}
|
{/each}
|
||||||
</ModalContent>
|
</ModalContent>
|
||||||
|
|
|
@ -42,10 +42,17 @@
|
||||||
</Label>
|
</Label>
|
||||||
{:else}
|
{:else}
|
||||||
{#if schema.relationshipType === 'one-to-many'}
|
{#if schema.relationshipType === 'one-to-many'}
|
||||||
<Select thin secondary on:change={e => linkedRows = [e.target.value]} name={label} {label}>
|
<Select
|
||||||
|
thin
|
||||||
|
secondary
|
||||||
|
on:change={e => (linkedRows = [e.target.value])}
|
||||||
|
name={label}
|
||||||
|
{label}>
|
||||||
<option value="">Choose an option</option>
|
<option value="">Choose an option</option>
|
||||||
{#each rows as row}
|
{#each rows as row}
|
||||||
<option selected={row._id === linkedRows[0]} value={row._id}>{getPrettyName(row)}</option>
|
<option selected={row._id === linkedRows[0]} value={row._id}>
|
||||||
|
{getPrettyName(row)}
|
||||||
|
</option>
|
||||||
{/each}
|
{/each}
|
||||||
</Select>
|
</Select>
|
||||||
{:else}
|
{:else}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
/**
|
/**
|
||||||
* API cache for cached request responses.
|
* API cache for cached request responses.
|
||||||
*/
|
*/
|
||||||
import { notificationStore } from "../store/notification"
|
import { notificationStore } from "../store"
|
||||||
let cache = {}
|
let cache = {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -34,6 +34,9 @@ const makeApiCall = async ({ method, url, body, json = true }) => {
|
||||||
switch (response.status) {
|
switch (response.status) {
|
||||||
case 200:
|
case 200:
|
||||||
return response.json()
|
return response.json()
|
||||||
|
case 401:
|
||||||
|
notificationStore.danger("Invalid credentials")
|
||||||
|
return handleError(`Invalid credentials`)
|
||||||
case 404:
|
case 404:
|
||||||
notificationStore.danger("Not found")
|
notificationStore.danger("Not found")
|
||||||
return handleError(`${url}: Not Found`)
|
return handleError(`${url}: Not Found`)
|
||||||
|
|
|
@ -7,6 +7,9 @@ const { generateUserID } = require("../../db/utils")
|
||||||
const { setCookie } = require("../../utilities")
|
const { setCookie } = require("../../utilities")
|
||||||
const { outputProcessing } = require("../../utilities/rowProcessor")
|
const { outputProcessing } = require("../../utilities/rowProcessor")
|
||||||
const { ViewNames } = require("../../db/utils")
|
const { ViewNames } = require("../../db/utils")
|
||||||
|
const { UserStatus } = require("../../constants")
|
||||||
|
|
||||||
|
const INVALID_ERR = "Invalid Credentials"
|
||||||
|
|
||||||
exports.authenticate = async ctx => {
|
exports.authenticate = async ctx => {
|
||||||
const appId = ctx.appId
|
const appId = ctx.appId
|
||||||
|
@ -27,7 +30,12 @@ exports.authenticate = async ctx => {
|
||||||
} catch (_) {
|
} catch (_) {
|
||||||
// do not want to throw a 404 - as this could be
|
// do not want to throw a 404 - as this could be
|
||||||
// used to determine valid emails
|
// used to determine valid emails
|
||||||
ctx.throw(401, "Invalid Credentials")
|
ctx.throw(401, INVALID_ERR)
|
||||||
|
}
|
||||||
|
|
||||||
|
// check that the user is currently inactive, if this is the case throw invalid
|
||||||
|
if (dbUser.status === UserStatus.INACTIVE) {
|
||||||
|
ctx.throw(401, INVALID_ERR)
|
||||||
}
|
}
|
||||||
|
|
||||||
// authenticate
|
// authenticate
|
||||||
|
@ -56,7 +64,7 @@ exports.authenticate = async ctx => {
|
||||||
appId,
|
appId,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
ctx.throw(401, "Invalid credentials.")
|
ctx.throw(401, INVALID_ERR)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -96,7 +96,10 @@ exports.patch = async function(ctx) {
|
||||||
// Creation of a new user goes to the user controller
|
// Creation of a new user goes to the user controller
|
||||||
if (row.tableId === ViewNames.USERS) {
|
if (row.tableId === ViewNames.USERS) {
|
||||||
// the row has been updated, need to put it into the ctx
|
// the row has been updated, need to put it into the ctx
|
||||||
ctx.request.body = row
|
ctx.request.body = {
|
||||||
|
...row,
|
||||||
|
password: ctx.request.body.password,
|
||||||
|
}
|
||||||
await usersController.update(ctx)
|
await usersController.update(ctx)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,285 +0,0 @@
|
||||||
const CouchDB = require("../../db")
|
|
||||||
const linkRows = require("../../db/linkedRows")
|
|
||||||
const csvParser = require("../../utilities/csvParser")
|
|
||||||
const {
|
|
||||||
getRowParams,
|
|
||||||
getTableParams,
|
|
||||||
generateTableID,
|
|
||||||
generateRowID,
|
|
||||||
} = require("../../db/utils")
|
|
||||||
const { isEqual } = require("lodash/fp")
|
|
||||||
const { FieldTypes, AutoFieldSubTypes } = require("../../constants")
|
|
||||||
const { inputProcessing } = require("../../utilities/rowProcessor")
|
|
||||||
|
|
||||||
async function checkForColumnUpdates(db, oldTable, updatedTable) {
|
|
||||||
let updatedRows
|
|
||||||
const rename = updatedTable._rename
|
|
||||||
let deletedColumns = []
|
|
||||||
if (oldTable && oldTable.schema && updatedTable.schema) {
|
|
||||||
deletedColumns = Object.keys(oldTable.schema).filter(
|
|
||||||
colName => updatedTable.schema[colName] == null
|
|
||||||
)
|
|
||||||
}
|
|
||||||
// check for renaming of columns or deleted columns
|
|
||||||
if (rename || deletedColumns.length !== 0) {
|
|
||||||
const rows = await db.allDocs(
|
|
||||||
getRowParams(updatedTable._id, null, {
|
|
||||||
include_docs: true,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
updatedRows = rows.rows.map(({ doc }) => {
|
|
||||||
if (rename) {
|
|
||||||
doc[rename.updated] = doc[rename.old]
|
|
||||||
delete doc[rename.old]
|
|
||||||
} else if (deletedColumns.length !== 0) {
|
|
||||||
deletedColumns.forEach(colName => delete doc[colName])
|
|
||||||
}
|
|
||||||
return doc
|
|
||||||
})
|
|
||||||
delete updatedTable._rename
|
|
||||||
}
|
|
||||||
return updatedRows
|
|
||||||
}
|
|
||||||
|
|
||||||
// makes sure the passed in table isn't going to reset the auto ID
|
|
||||||
function makeSureTableUpToDate(table, tableToSave) {
|
|
||||||
if (!table) {
|
|
||||||
return tableToSave
|
|
||||||
}
|
|
||||||
// sure sure rev is up to date
|
|
||||||
tableToSave._rev = table._rev
|
|
||||||
// make sure auto IDs are always updated - these are internal
|
|
||||||
// so the client may not know they have changed
|
|
||||||
for (let [field, column] of Object.entries(table.schema)) {
|
|
||||||
if (
|
|
||||||
column.autocolumn &&
|
|
||||||
column.subtype === AutoFieldSubTypes.AUTO_ID &&
|
|
||||||
tableToSave.schema[field]
|
|
||||||
) {
|
|
||||||
tableToSave.schema[field].lastID = column.lastID
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return tableToSave
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleDataImport(user, table, dataImport) {
|
|
||||||
const db = new CouchDB(user.appId)
|
|
||||||
if (dataImport && dataImport.csvString) {
|
|
||||||
// Populate the table with rows imported from CSV in a bulk update
|
|
||||||
const data = await csvParser.transform(dataImport)
|
|
||||||
|
|
||||||
for (let i = 0; i < data.length; i++) {
|
|
||||||
let row = data[i]
|
|
||||||
row._id = generateRowID(table._id)
|
|
||||||
row.tableId = table._id
|
|
||||||
const processed = inputProcessing(user, table, row)
|
|
||||||
row = processed.row
|
|
||||||
// these auto-fields will never actually link anywhere (always builder)
|
|
||||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
|
||||||
if (
|
|
||||||
schema.autocolumn &&
|
|
||||||
(schema.subtype === AutoFieldSubTypes.CREATED_BY ||
|
|
||||||
schema.subtype === AutoFieldSubTypes.UPDATED_BY)
|
|
||||||
) {
|
|
||||||
delete row[fieldName]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
table = processed.table
|
|
||||||
data[i] = row
|
|
||||||
}
|
|
||||||
|
|
||||||
await db.bulkDocs(data)
|
|
||||||
let response = await db.put(table)
|
|
||||||
table._rev = response._rev
|
|
||||||
}
|
|
||||||
return table
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleSearchIndexes(db, table) {
|
|
||||||
// create relevant search indexes
|
|
||||||
if (table.indexes && table.indexes.length > 0) {
|
|
||||||
const currentIndexes = await db.getIndexes()
|
|
||||||
const indexName = `search:${table._id}`
|
|
||||||
|
|
||||||
const existingIndex = currentIndexes.indexes.find(
|
|
||||||
existing => existing.name === indexName
|
|
||||||
)
|
|
||||||
|
|
||||||
if (existingIndex) {
|
|
||||||
const currentFields = existingIndex.def.fields.map(
|
|
||||||
field => Object.keys(field)[0]
|
|
||||||
)
|
|
||||||
|
|
||||||
// if index fields have changed, delete the original index
|
|
||||||
if (!isEqual(currentFields, table.indexes)) {
|
|
||||||
await db.deleteIndex(existingIndex)
|
|
||||||
// create/recreate the index with fields
|
|
||||||
await db.createIndex({
|
|
||||||
index: {
|
|
||||||
fields: table.indexes,
|
|
||||||
name: indexName,
|
|
||||||
ddoc: "search_ddoc",
|
|
||||||
type: "json",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// create/recreate the index with fields
|
|
||||||
await db.createIndex({
|
|
||||||
index: {
|
|
||||||
fields: table.indexes,
|
|
||||||
name: indexName,
|
|
||||||
ddoc: "search_ddoc",
|
|
||||||
type: "json",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return table
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.fetch = async function(ctx) {
|
|
||||||
const db = new CouchDB(ctx.user.appId)
|
|
||||||
const body = await db.allDocs(
|
|
||||||
getTableParams(null, {
|
|
||||||
include_docs: true,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
ctx.body = body.rows.map(row => row.doc)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.find = async function(ctx) {
|
|
||||||
const db = new CouchDB(ctx.user.appId)
|
|
||||||
ctx.body = await db.get(ctx.params.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.save = async function(ctx) {
|
|
||||||
const appId = ctx.user.appId
|
|
||||||
const db = new CouchDB(appId)
|
|
||||||
const { dataImport, ...rest } = ctx.request.body
|
|
||||||
let tableToSave = {
|
|
||||||
type: "table",
|
|
||||||
_id: generateTableID(),
|
|
||||||
views: {},
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
|
|
||||||
// if the table obj had an _id then it will have been retrieved
|
|
||||||
let oldTable
|
|
||||||
if (ctx.request.body && ctx.request.body._id) {
|
|
||||||
oldTable = await db.get(ctx.request.body._id)
|
|
||||||
tableToSave = makeSureTableUpToDate(oldTable, tableToSave)
|
|
||||||
}
|
|
||||||
|
|
||||||
// make sure that types don't change of a column, have to remove
|
|
||||||
// the column if you want to change the type
|
|
||||||
if (oldTable && oldTable.schema) {
|
|
||||||
for (let propKey of Object.keys(tableToSave.schema)) {
|
|
||||||
let column = tableToSave.schema[propKey]
|
|
||||||
let oldColumn = oldTable.schema[propKey]
|
|
||||||
if (oldColumn && oldColumn.type !== column.type) {
|
|
||||||
ctx.throw(400, "Cannot change the type of a column")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't rename if the name is the same
|
|
||||||
let { _rename } = tableToSave
|
|
||||||
if (_rename && _rename.old === _rename.updated) {
|
|
||||||
_rename = null
|
|
||||||
delete tableToSave._rename
|
|
||||||
}
|
|
||||||
|
|
||||||
// rename row fields when table column is renamed
|
|
||||||
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
|
|
||||||
ctx.throw(400, "Cannot rename a linked column.")
|
|
||||||
} else if (_rename && tableToSave.primaryDisplay === _rename.old) {
|
|
||||||
ctx.throw(400, "Cannot rename the display column.")
|
|
||||||
}
|
|
||||||
|
|
||||||
let updatedRows = await checkForColumnUpdates(db, oldTable, tableToSave)
|
|
||||||
|
|
||||||
// update schema of non-statistics views when new columns are added
|
|
||||||
for (let view in tableToSave.views) {
|
|
||||||
const tableView = tableToSave.views[view]
|
|
||||||
if (!tableView) continue
|
|
||||||
|
|
||||||
if (tableView.schema.group || tableView.schema.field) continue
|
|
||||||
tableView.schema = tableToSave.schema
|
|
||||||
}
|
|
||||||
|
|
||||||
// update linked rows
|
|
||||||
const linkResp = await linkRows.updateLinks({
|
|
||||||
appId,
|
|
||||||
eventType: oldTable
|
|
||||||
? linkRows.EventType.TABLE_UPDATED
|
|
||||||
: linkRows.EventType.TABLE_SAVE,
|
|
||||||
table: tableToSave,
|
|
||||||
oldTable: oldTable,
|
|
||||||
})
|
|
||||||
if (linkResp != null && linkResp._rev) {
|
|
||||||
tableToSave._rev = linkResp._rev
|
|
||||||
}
|
|
||||||
|
|
||||||
// don't perform any updates until relationships have been
|
|
||||||
// checked by the updateLinks function
|
|
||||||
if (updatedRows && updatedRows.length !== 0) {
|
|
||||||
await db.bulkDocs(updatedRows)
|
|
||||||
}
|
|
||||||
const result = await db.post(tableToSave)
|
|
||||||
tableToSave._rev = result.rev
|
|
||||||
|
|
||||||
tableToSave = await handleSearchIndexes(db, tableToSave)
|
|
||||||
tableToSave = await handleDataImport(ctx.user, tableToSave, dataImport)
|
|
||||||
|
|
||||||
ctx.eventEmitter &&
|
|
||||||
ctx.eventEmitter.emitTable(`table:save`, appId, tableToSave)
|
|
||||||
|
|
||||||
ctx.status = 200
|
|
||||||
ctx.message = `Table ${ctx.request.body.name} saved successfully.`
|
|
||||||
ctx.body = tableToSave
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.destroy = async function(ctx) {
|
|
||||||
const appId = ctx.user.appId
|
|
||||||
const db = new CouchDB(appId)
|
|
||||||
const tableToDelete = await db.get(ctx.params.tableId)
|
|
||||||
|
|
||||||
// Delete all rows for that table
|
|
||||||
const rows = await db.allDocs(
|
|
||||||
getRowParams(ctx.params.tableId, null, {
|
|
||||||
include_docs: true,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
|
|
||||||
|
|
||||||
// update linked rows
|
|
||||||
await linkRows.updateLinks({
|
|
||||||
appId,
|
|
||||||
eventType: linkRows.EventType.TABLE_DELETE,
|
|
||||||
table: tableToDelete,
|
|
||||||
})
|
|
||||||
|
|
||||||
// don't remove the table itself until very end
|
|
||||||
await db.remove(tableToDelete)
|
|
||||||
|
|
||||||
// remove table search index
|
|
||||||
const currentIndexes = await db.getIndexes()
|
|
||||||
const existingIndex = currentIndexes.indexes.find(
|
|
||||||
existing => existing.name === `search:${ctx.params.tableId}`
|
|
||||||
)
|
|
||||||
if (existingIndex) {
|
|
||||||
await db.deleteIndex(existingIndex)
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.eventEmitter &&
|
|
||||||
ctx.eventEmitter.emitTable(`table:delete`, appId, tableToDelete)
|
|
||||||
ctx.status = 200
|
|
||||||
ctx.message = `Table ${ctx.params.tableId} deleted.`
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.validateCSVSchema = async function(ctx) {
|
|
||||||
const { csvString, schema = {} } = ctx.request.body
|
|
||||||
const result = await csvParser.parse(csvString, schema)
|
|
||||||
ctx.body = { schema: result }
|
|
||||||
}
|
|
|
@ -0,0 +1,165 @@
|
||||||
|
const CouchDB = require("../../../db")
|
||||||
|
const linkRows = require("../../../db/linkedRows")
|
||||||
|
const csvParser = require("../../../utilities/csvParser")
|
||||||
|
const {
|
||||||
|
getRowParams,
|
||||||
|
getTableParams,
|
||||||
|
generateTableID,
|
||||||
|
} = require("../../../db/utils")
|
||||||
|
const { FieldTypes } = require("../../../constants")
|
||||||
|
const { TableSaveFunctions } = require("./utils")
|
||||||
|
|
||||||
|
exports.fetch = async function(ctx) {
|
||||||
|
const db = new CouchDB(ctx.user.appId)
|
||||||
|
const body = await db.allDocs(
|
||||||
|
getTableParams(null, {
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
ctx.body = body.rows.map(row => row.doc)
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.find = async function(ctx) {
|
||||||
|
const db = new CouchDB(ctx.user.appId)
|
||||||
|
ctx.body = await db.get(ctx.params.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.save = async function(ctx) {
|
||||||
|
const appId = ctx.user.appId
|
||||||
|
const db = new CouchDB(appId)
|
||||||
|
const { dataImport, ...rest } = ctx.request.body
|
||||||
|
let tableToSave = {
|
||||||
|
type: "table",
|
||||||
|
_id: generateTableID(),
|
||||||
|
views: {},
|
||||||
|
...rest,
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the table obj had an _id then it will have been retrieved
|
||||||
|
let oldTable
|
||||||
|
if (ctx.request.body && ctx.request.body._id) {
|
||||||
|
oldTable = await db.get(ctx.request.body._id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// saving a table is a complex operation, involving many different steps, this
|
||||||
|
// has been broken out into a utility to make it more obvious/easier to manipulate
|
||||||
|
const tableSaveFunctions = new TableSaveFunctions({
|
||||||
|
db,
|
||||||
|
ctx,
|
||||||
|
oldTable,
|
||||||
|
dataImport,
|
||||||
|
})
|
||||||
|
tableToSave = await tableSaveFunctions.before(tableToSave)
|
||||||
|
|
||||||
|
// make sure that types don't change of a column, have to remove
|
||||||
|
// the column if you want to change the type
|
||||||
|
if (oldTable && oldTable.schema) {
|
||||||
|
for (let propKey of Object.keys(tableToSave.schema)) {
|
||||||
|
let column = tableToSave.schema[propKey]
|
||||||
|
let oldColumn = oldTable.schema[propKey]
|
||||||
|
if (oldColumn && oldColumn.type !== column.type) {
|
||||||
|
ctx.throw(400, "Cannot change the type of a column")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't rename if the name is the same
|
||||||
|
let { _rename } = tableToSave
|
||||||
|
if (_rename && _rename.old === _rename.updated) {
|
||||||
|
_rename = null
|
||||||
|
delete tableToSave._rename
|
||||||
|
}
|
||||||
|
|
||||||
|
// rename row fields when table column is renamed
|
||||||
|
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
|
||||||
|
ctx.throw(400, "Cannot rename a linked column.")
|
||||||
|
} else if (_rename && tableToSave.primaryDisplay === _rename.old) {
|
||||||
|
ctx.throw(400, "Cannot rename the display column.")
|
||||||
|
}
|
||||||
|
|
||||||
|
tableToSave = await tableSaveFunctions.mid(tableToSave)
|
||||||
|
|
||||||
|
// update schema of non-statistics views when new columns are added
|
||||||
|
for (let view in tableToSave.views) {
|
||||||
|
const tableView = tableToSave.views[view]
|
||||||
|
if (!tableView) continue
|
||||||
|
|
||||||
|
if (tableView.schema.group || tableView.schema.field) continue
|
||||||
|
tableView.schema = tableToSave.schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// update linked rows
|
||||||
|
const linkResp = await linkRows.updateLinks({
|
||||||
|
appId,
|
||||||
|
eventType: oldTable
|
||||||
|
? linkRows.EventType.TABLE_UPDATED
|
||||||
|
: linkRows.EventType.TABLE_SAVE,
|
||||||
|
table: tableToSave,
|
||||||
|
oldTable: oldTable,
|
||||||
|
})
|
||||||
|
if (linkResp != null && linkResp._rev) {
|
||||||
|
tableToSave._rev = linkResp._rev
|
||||||
|
}
|
||||||
|
|
||||||
|
// don't perform any updates until relationships have been
|
||||||
|
// checked by the updateLinks function
|
||||||
|
const updatedRows = tableSaveFunctions.getUpdatedRows()
|
||||||
|
if (updatedRows && updatedRows.length !== 0) {
|
||||||
|
await db.bulkDocs(updatedRows)
|
||||||
|
}
|
||||||
|
const result = await db.post(tableToSave)
|
||||||
|
tableToSave._rev = result.rev
|
||||||
|
|
||||||
|
tableToSave = await tableSaveFunctions.after(tableToSave)
|
||||||
|
|
||||||
|
ctx.eventEmitter &&
|
||||||
|
ctx.eventEmitter.emitTable(`table:save`, appId, tableToSave)
|
||||||
|
|
||||||
|
ctx.status = 200
|
||||||
|
ctx.message = `Table ${ctx.request.body.name} saved successfully.`
|
||||||
|
ctx.body = tableToSave
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.destroy = async function(ctx) {
|
||||||
|
const appId = ctx.user.appId
|
||||||
|
const db = new CouchDB(appId)
|
||||||
|
const tableToDelete = await db.get(ctx.params.tableId)
|
||||||
|
|
||||||
|
// Delete all rows for that table
|
||||||
|
const rows = await db.allDocs(
|
||||||
|
getRowParams(ctx.params.tableId, null, {
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
|
||||||
|
|
||||||
|
// update linked rows
|
||||||
|
await linkRows.updateLinks({
|
||||||
|
appId,
|
||||||
|
eventType: linkRows.EventType.TABLE_DELETE,
|
||||||
|
table: tableToDelete,
|
||||||
|
})
|
||||||
|
|
||||||
|
// don't remove the table itself until very end
|
||||||
|
await db.remove(tableToDelete)
|
||||||
|
|
||||||
|
// remove table search index
|
||||||
|
const currentIndexes = await db.getIndexes()
|
||||||
|
const existingIndex = currentIndexes.indexes.find(
|
||||||
|
existing => existing.name === `search:${ctx.params.tableId}`
|
||||||
|
)
|
||||||
|
if (existingIndex) {
|
||||||
|
await db.deleteIndex(existingIndex)
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.eventEmitter &&
|
||||||
|
ctx.eventEmitter.emitTable(`table:delete`, appId, tableToDelete)
|
||||||
|
ctx.status = 200
|
||||||
|
ctx.message = `Table ${ctx.params.tableId} deleted.`
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.validateCSVSchema = async function(ctx) {
|
||||||
|
const { csvString, schema = {} } = ctx.request.body
|
||||||
|
const result = await csvParser.parse(csvString, schema)
|
||||||
|
ctx.body = { schema: result }
|
||||||
|
}
|
|
@ -0,0 +1,195 @@
|
||||||
|
const CouchDB = require("../../../db")
|
||||||
|
const csvParser = require("../../../utilities/csvParser")
|
||||||
|
const { getRowParams, generateRowID, ViewNames } = require("../../../db/utils")
|
||||||
|
const { isEqual } = require("lodash/fp")
|
||||||
|
const { AutoFieldSubTypes } = require("../../../constants")
|
||||||
|
const { inputProcessing } = require("../../../utilities/rowProcessor")
|
||||||
|
const { USERS_TABLE_SCHEMA } = require("../../../constants")
|
||||||
|
|
||||||
|
exports.checkForColumnUpdates = async (db, oldTable, updatedTable) => {
|
||||||
|
let updatedRows = []
|
||||||
|
const rename = updatedTable._rename
|
||||||
|
let deletedColumns = []
|
||||||
|
if (oldTable && oldTable.schema && updatedTable.schema) {
|
||||||
|
deletedColumns = Object.keys(oldTable.schema).filter(
|
||||||
|
colName => updatedTable.schema[colName] == null
|
||||||
|
)
|
||||||
|
}
|
||||||
|
// check for renaming of columns or deleted columns
|
||||||
|
if (rename || deletedColumns.length !== 0) {
|
||||||
|
const rows = await db.allDocs(
|
||||||
|
getRowParams(updatedTable._id, null, {
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
updatedRows = rows.rows.map(({ doc }) => {
|
||||||
|
if (rename) {
|
||||||
|
doc[rename.updated] = doc[rename.old]
|
||||||
|
delete doc[rename.old]
|
||||||
|
} else if (deletedColumns.length !== 0) {
|
||||||
|
deletedColumns.forEach(colName => delete doc[colName])
|
||||||
|
}
|
||||||
|
return doc
|
||||||
|
})
|
||||||
|
delete updatedTable._rename
|
||||||
|
}
|
||||||
|
return { rows: updatedRows, table: updatedTable }
|
||||||
|
}
|
||||||
|
|
||||||
|
// makes sure the passed in table isn't going to reset the auto ID
|
||||||
|
exports.makeSureTableUpToDate = (table, tableToSave) => {
|
||||||
|
if (!table) {
|
||||||
|
return tableToSave
|
||||||
|
}
|
||||||
|
// sure sure rev is up to date
|
||||||
|
tableToSave._rev = table._rev
|
||||||
|
// make sure auto IDs are always updated - these are internal
|
||||||
|
// so the client may not know they have changed
|
||||||
|
for (let [field, column] of Object.entries(table.schema)) {
|
||||||
|
if (
|
||||||
|
column.autocolumn &&
|
||||||
|
column.subtype === AutoFieldSubTypes.AUTO_ID &&
|
||||||
|
tableToSave.schema[field]
|
||||||
|
) {
|
||||||
|
tableToSave.schema[field].lastID = column.lastID
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tableToSave
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.handleDataImport = async (user, table, dataImport) => {
|
||||||
|
const db = new CouchDB(user.appId)
|
||||||
|
if (dataImport && dataImport.csvString) {
|
||||||
|
// Populate the table with rows imported from CSV in a bulk update
|
||||||
|
const data = await csvParser.transform(dataImport)
|
||||||
|
|
||||||
|
for (let i = 0; i < data.length; i++) {
|
||||||
|
let row = data[i]
|
||||||
|
row._id = generateRowID(table._id)
|
||||||
|
row.tableId = table._id
|
||||||
|
const processed = inputProcessing(user, table, row)
|
||||||
|
row = processed.row
|
||||||
|
// these auto-fields will never actually link anywhere (always builder)
|
||||||
|
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||||
|
if (
|
||||||
|
schema.autocolumn &&
|
||||||
|
(schema.subtype === AutoFieldSubTypes.CREATED_BY ||
|
||||||
|
schema.subtype === AutoFieldSubTypes.UPDATED_BY)
|
||||||
|
) {
|
||||||
|
delete row[fieldName]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table = processed.table
|
||||||
|
data[i] = row
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.bulkDocs(data)
|
||||||
|
let response = await db.put(table)
|
||||||
|
table._rev = response._rev
|
||||||
|
}
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.handleSearchIndexes = async (db, table) => {
|
||||||
|
// create relevant search indexes
|
||||||
|
if (table.indexes && table.indexes.length > 0) {
|
||||||
|
const currentIndexes = await db.getIndexes()
|
||||||
|
const indexName = `search:${table._id}`
|
||||||
|
|
||||||
|
const existingIndex = currentIndexes.indexes.find(
|
||||||
|
existing => existing.name === indexName
|
||||||
|
)
|
||||||
|
|
||||||
|
if (existingIndex) {
|
||||||
|
const currentFields = existingIndex.def.fields.map(
|
||||||
|
field => Object.keys(field)[0]
|
||||||
|
)
|
||||||
|
|
||||||
|
// if index fields have changed, delete the original index
|
||||||
|
if (!isEqual(currentFields, table.indexes)) {
|
||||||
|
await db.deleteIndex(existingIndex)
|
||||||
|
// create/recreate the index with fields
|
||||||
|
await db.createIndex({
|
||||||
|
index: {
|
||||||
|
fields: table.indexes,
|
||||||
|
name: indexName,
|
||||||
|
ddoc: "search_ddoc",
|
||||||
|
type: "json",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// create/recreate the index with fields
|
||||||
|
await db.createIndex({
|
||||||
|
index: {
|
||||||
|
fields: table.indexes,
|
||||||
|
name: indexName,
|
||||||
|
ddoc: "search_ddoc",
|
||||||
|
type: "json",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.checkStaticTables = table => {
|
||||||
|
// check user schema has all required elements
|
||||||
|
if (table._id === ViewNames.USERS) {
|
||||||
|
for (let [key, schema] of Object.entries(USERS_TABLE_SCHEMA.schema)) {
|
||||||
|
// check if the schema exists on the table to be created/updated
|
||||||
|
if (table.schema[key] == null) {
|
||||||
|
table.schema[key] = schema
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
class TableSaveFunctions {
|
||||||
|
constructor({ db, ctx, oldTable, dataImport }) {
|
||||||
|
this.db = db
|
||||||
|
this.ctx = ctx
|
||||||
|
this.oldTable = oldTable
|
||||||
|
this.dataImport = dataImport
|
||||||
|
// any rows that need updated
|
||||||
|
this.rows = []
|
||||||
|
}
|
||||||
|
|
||||||
|
// before anything is done
|
||||||
|
async before(table) {
|
||||||
|
if (this.oldTable) {
|
||||||
|
table = exports.makeSureTableUpToDate(this.oldTable, table)
|
||||||
|
}
|
||||||
|
table = exports.checkStaticTables(table)
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
// when confirmed valid
|
||||||
|
async mid(table) {
|
||||||
|
let response = await exports.checkForColumnUpdates(
|
||||||
|
this.db,
|
||||||
|
this.oldTable,
|
||||||
|
table
|
||||||
|
)
|
||||||
|
this.rows = this.rows.concat(response.rows)
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
// after saving
|
||||||
|
async after(table) {
|
||||||
|
table = await exports.handleSearchIndexes(this.db, table)
|
||||||
|
table = await exports.handleDataImport(
|
||||||
|
this.ctx.user,
|
||||||
|
table,
|
||||||
|
this.dataImport
|
||||||
|
)
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
getUpdatedRows() {
|
||||||
|
return this.rows
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.TableSaveFunctions = TableSaveFunctions
|
|
@ -2,6 +2,7 @@ const CouchDB = require("../../db")
|
||||||
const bcrypt = require("../../utilities/bcrypt")
|
const bcrypt = require("../../utilities/bcrypt")
|
||||||
const { generateUserID, getUserParams, ViewNames } = require("../../db/utils")
|
const { generateUserID, getUserParams, ViewNames } = require("../../db/utils")
|
||||||
const { getRole } = require("../../utilities/security/roles")
|
const { getRole } = require("../../utilities/security/roles")
|
||||||
|
const { UserStatus } = require("../../constants")
|
||||||
|
|
||||||
exports.fetch = async function(ctx) {
|
exports.fetch = async function(ctx) {
|
||||||
const database = new CouchDB(ctx.user.appId)
|
const database = new CouchDB(ctx.user.appId)
|
||||||
|
@ -42,6 +43,10 @@ exports.create = async function(ctx) {
|
||||||
password: hashedPassword,
|
password: hashedPassword,
|
||||||
tableId: ViewNames.USERS,
|
tableId: ViewNames.USERS,
|
||||||
}
|
}
|
||||||
|
// add the active status to a user if its not provided
|
||||||
|
if (user.status == null) {
|
||||||
|
user.status = UserStatus.ACTIVE
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await db.post(user)
|
const response = await db.post(user)
|
||||||
|
@ -64,13 +69,21 @@ exports.create = async function(ctx) {
|
||||||
exports.update = async function(ctx) {
|
exports.update = async function(ctx) {
|
||||||
const db = new CouchDB(ctx.user.appId)
|
const db = new CouchDB(ctx.user.appId)
|
||||||
const user = ctx.request.body
|
const user = ctx.request.body
|
||||||
|
let dbUser
|
||||||
|
// get user incase password removed
|
||||||
|
if (user._id) {
|
||||||
|
dbUser = await db.get(user._id)
|
||||||
|
}
|
||||||
if (user.password) {
|
if (user.password) {
|
||||||
user.password = await bcrypt.hash(user.password)
|
user.password = await bcrypt.hash(user.password)
|
||||||
} else {
|
} else {
|
||||||
delete user.password
|
delete user.password
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await db.put(user)
|
const response = await db.put({
|
||||||
|
password: dbUser.password,
|
||||||
|
...user,
|
||||||
|
})
|
||||||
user._rev = response.rev
|
user._rev = response.rev
|
||||||
|
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
|
|
|
@ -1,44 +1,5 @@
|
||||||
const { BUILTIN_ROLE_IDS } = require("../utilities/security/roles")
|
const { BUILTIN_ROLE_IDS } = require("../utilities/security/roles")
|
||||||
|
|
||||||
const AuthTypes = {
|
|
||||||
APP: "app",
|
|
||||||
BUILDER: "builder",
|
|
||||||
EXTERNAL: "external",
|
|
||||||
}
|
|
||||||
|
|
||||||
const USERS_TABLE_SCHEMA = {
|
|
||||||
_id: "ta_users",
|
|
||||||
type: "table",
|
|
||||||
views: {},
|
|
||||||
name: "Users",
|
|
||||||
schema: {
|
|
||||||
email: {
|
|
||||||
type: "string",
|
|
||||||
constraints: {
|
|
||||||
type: "string",
|
|
||||||
email: true,
|
|
||||||
length: {
|
|
||||||
maximum: "",
|
|
||||||
},
|
|
||||||
presence: true,
|
|
||||||
},
|
|
||||||
fieldName: "email",
|
|
||||||
name: "email",
|
|
||||||
},
|
|
||||||
roleId: {
|
|
||||||
fieldName: "roleId",
|
|
||||||
name: "roleId",
|
|
||||||
type: "options",
|
|
||||||
constraints: {
|
|
||||||
type: "string",
|
|
||||||
presence: false,
|
|
||||||
inclusion: Object.values(BUILTIN_ROLE_IDS),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
primaryDisplay: "email",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.FieldTypes = {
|
exports.FieldTypes = {
|
||||||
STRING: "string",
|
STRING: "string",
|
||||||
LONGFORM: "longform",
|
LONGFORM: "longform",
|
||||||
|
@ -51,6 +12,60 @@ exports.FieldTypes = {
|
||||||
AUTO: "auto",
|
AUTO: "auto",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.AuthTypes = {
|
||||||
|
APP: "app",
|
||||||
|
BUILDER: "builder",
|
||||||
|
EXTERNAL: "external",
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.UserStatus = {
|
||||||
|
ACTIVE: "active",
|
||||||
|
INACTIVE: "inactive",
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.USERS_TABLE_SCHEMA = {
|
||||||
|
_id: "ta_users",
|
||||||
|
type: "table",
|
||||||
|
views: {},
|
||||||
|
name: "Users",
|
||||||
|
schema: {
|
||||||
|
email: {
|
||||||
|
type: exports.FieldTypes.STRING,
|
||||||
|
constraints: {
|
||||||
|
type: exports.FieldTypes.STRING,
|
||||||
|
email: true,
|
||||||
|
length: {
|
||||||
|
maximum: "",
|
||||||
|
},
|
||||||
|
presence: true,
|
||||||
|
},
|
||||||
|
fieldName: "email",
|
||||||
|
name: "email",
|
||||||
|
},
|
||||||
|
roleId: {
|
||||||
|
fieldName: "roleId",
|
||||||
|
name: "roleId",
|
||||||
|
type: exports.FieldTypes.OPTIONS,
|
||||||
|
constraints: {
|
||||||
|
type: exports.FieldTypes.STRING,
|
||||||
|
presence: false,
|
||||||
|
inclusion: Object.values(BUILTIN_ROLE_IDS),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
status: {
|
||||||
|
fieldName: "status",
|
||||||
|
name: "status",
|
||||||
|
type: exports.FieldTypes.OPTIONS,
|
||||||
|
constraints: {
|
||||||
|
type: exports.FieldTypes.STRING,
|
||||||
|
presence: false,
|
||||||
|
inclusion: Object.values(exports.UserStatus),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
primaryDisplay: "email",
|
||||||
|
}
|
||||||
|
|
||||||
exports.AutoFieldSubTypes = {
|
exports.AutoFieldSubTypes = {
|
||||||
CREATED_BY: "createdBy",
|
CREATED_BY: "createdBy",
|
||||||
CREATED_AT: "createdAt",
|
CREATED_AT: "createdAt",
|
||||||
|
@ -59,8 +74,6 @@ exports.AutoFieldSubTypes = {
|
||||||
AUTO_ID: "autoID",
|
AUTO_ID: "autoID",
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.AuthTypes = AuthTypes
|
|
||||||
exports.USERS_TABLE_SCHEMA = USERS_TABLE_SCHEMA
|
|
||||||
exports.BUILDER_CONFIG_DB = "builder-config-db"
|
exports.BUILDER_CONFIG_DB = "builder-config-db"
|
||||||
exports.HOSTING_DOC = "hosting-doc"
|
exports.HOSTING_DOC = "hosting-doc"
|
||||||
exports.OBJ_STORE_DIRECTORY = "/app-assets/assets"
|
exports.OBJ_STORE_DIRECTORY = "/app-assets/assets"
|
||||||
|
|
|
@ -19,24 +19,24 @@
|
||||||
let tableDefinition
|
let tableDefinition
|
||||||
let fieldText = ""
|
let fieldText = ""
|
||||||
|
|
||||||
const setFieldText = (value) => {
|
const setFieldText = value => {
|
||||||
if (fieldSchema?.relationshipType === 'one-to-many') {
|
if (fieldSchema?.relationshipType === "one-to-many") {
|
||||||
if (value?.length && options?.length) {
|
if (value?.length && options?.length) {
|
||||||
const row = options.find(row => row._id === value[0])
|
const row = options.find(row => row._id === value[0])
|
||||||
return row.name
|
return row.name
|
||||||
} else {
|
} else {
|
||||||
return placeholder || 'Choose an option'
|
return placeholder || "Choose an option"
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (value?.length) {
|
if (value?.length) {
|
||||||
return `${value?.length ?? 0} selected rows`
|
return `${value?.length ?? 0} selected rows`
|
||||||
} else {
|
} else {
|
||||||
return placeholder || 'Choose some options'
|
return placeholder || "Choose some options"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$: options, fieldText = setFieldText($fieldState?.value)
|
$: options, (fieldText = setFieldText($fieldState?.value))
|
||||||
$: valueLookupMap = getValueLookupMap($fieldState?.value)
|
$: valueLookupMap = getValueLookupMap($fieldState?.value)
|
||||||
$: isOptionSelected = option => valueLookupMap[option] === true
|
$: isOptionSelected = option => valueLookupMap[option] === true
|
||||||
$: linkedTableId = fieldSchema?.tableId
|
$: linkedTableId = fieldSchema?.tableId
|
||||||
|
@ -74,7 +74,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
const toggleOption = option => {
|
const toggleOption = option => {
|
||||||
if (fieldSchema.type === 'one-to-many') {
|
if (fieldSchema.type === "one-to-many") {
|
||||||
fieldApi.setValue([option])
|
fieldApi.setValue([option])
|
||||||
} else {
|
} else {
|
||||||
if ($fieldState.value.includes(option)) {
|
if ($fieldState.value.includes(option)) {
|
||||||
|
|
Loading…
Reference in New Issue