diff --git a/packages/builder/cypress/integration/createTable.spec.js b/packages/builder/cypress/integration/createTable.spec.js index d309f4fcb9..067505cfab 100644 --- a/packages/builder/cypress/integration/createTable.spec.js +++ b/packages/builder/cypress/integration/createTable.spec.js @@ -30,7 +30,7 @@ context("Create a Table", () => { // Unset table display column cy.contains("display column").click() cy.contains("Save Column").click() - cy.contains("nameupdated").should("have.text", "nameupdated") + cy.contains("nameupdated ").should("have.text", "nameupdated ") }) it("edits a row", () => { diff --git a/packages/builder/cypress/integration/createView.spec.js b/packages/builder/cypress/integration/createView.spec.js index bdc89bec53..cfafdc6705 100644 --- a/packages/builder/cypress/integration/createView.spec.js +++ b/packages/builder/cypress/integration/createView.spec.js @@ -1,3 +1,11 @@ +function removeSpacing(headers) { + let newHeaders = [] + for (let header of headers) { + newHeaders.push(header.replace(/\s\s+/g, " ")) + } + return newHeaders +} + context("Create a View", () => { before(() => { cy.visit("localhost:4001/_builder") @@ -28,7 +36,7 @@ context("Create a View", () => { const headers = Array.from($headers).map(header => header.textContent.trim() ) - expect(headers).to.deep.eq([ 'rating', 'age', 'group' ]) + expect(removeSpacing(headers)).to.deep.eq([ "rating Number", "age Number", "group Text" ]) }) }) @@ -60,13 +68,19 @@ context("Create a View", () => { const headers = Array.from($headers).map(header => header.textContent.trim() ) - expect(headers).to.deep.eq([ 'avg', 'sumsqr', 'count', 'max', 'min', 'sum', 'field' ]) + expect(removeSpacing(headers)).to.deep.eq([ "avg Number", + "sumsqr Number", + "count Number", + "max Number", + "min Number", + "sum Number", + "field Text" ]) }) cy.get(".ag-cell").then($values => { let values = Array.from($values).map(header => header.textContent.trim() ) - expect(values).to.deep.eq([ '31', '5347', '5', '49', '20', '155', 'age' ]) + expect(values).to.deep.eq([ "31", "5347", "5", "49", "20", "155", "age" ]) }) }) @@ -85,7 +99,7 @@ context("Create a View", () => { .find(".ag-cell") .then($values => { const values = Array.from($values).map(value => value.textContent) - expect(values).to.deep.eq([ 'Students', '23.333333333333332', '1650', '3', '25', '20', '70' ]) + expect(values).to.deep.eq([ "Students", "23.333333333333332", "1650", "3", "25", "20", "70" ]) }) }) diff --git a/packages/builder/src/builderStore/dataBinding.js b/packages/builder/src/builderStore/dataBinding.js index 55a6328ca3..b2eb30c55a 100644 --- a/packages/builder/src/builderStore/dataBinding.js +++ b/packages/builder/src/builderStore/dataBinding.js @@ -136,7 +136,7 @@ const getContextBindings = (asset, componentId) => { // Replace certain bindings with a new property to help display components let runtimeBoundKey = key if (fieldSchema.type === "link") { - runtimeBoundKey = `${key}_count` + runtimeBoundKey = `${key}_text` } else if (fieldSchema.type === "attachment") { runtimeBoundKey = `${key}_first` } @@ -176,7 +176,7 @@ const getUserBindings = () => { // Replace certain bindings with a new property to help display components let runtimeBoundKey = key if (fieldSchema.type === "link") { - runtimeBoundKey = `${key}_count` + runtimeBoundKey = `${key}_text` } else if (fieldSchema.type === "attachment") { runtimeBoundKey = `${key}_first` } diff --git a/packages/builder/src/components/backend/DataTable/Table.svelte b/packages/builder/src/components/backend/DataTable/Table.svelte index 195876cf05..fa04829634 100644 --- a/packages/builder/src/components/backend/DataTable/Table.svelte +++ b/packages/builder/src/components/backend/DataTable/Table.svelte @@ -301,4 +301,13 @@ padding-top: var(--spacing-xs); padding-bottom: var(--spacing-xs); } + + :global(.ag-header) { + height: 61px !important; + min-height: 61px !important; + } + + :global(.ag-header-row) { + height: 60px !important; + } diff --git a/packages/builder/src/components/backend/DataTable/TableHeader/TableHeader.svelte b/packages/builder/src/components/backend/DataTable/TableHeader/TableHeader.svelte index 2d65010a62..e929bbc206 100644 --- a/packages/builder/src/components/backend/DataTable/TableHeader/TableHeader.svelte +++ b/packages/builder/src/components/backend/DataTable/TableHeader/TableHeader.svelte @@ -2,6 +2,7 @@ import { onMount, onDestroy } from "svelte" import { Modal, ModalContent } from "@budibase/bbui" import CreateEditColumn from "../modals/CreateEditColumn.svelte" + import { FIELDS } from "constants/backend" const SORT_ICON_MAP = { asc: "ri-arrow-down-fill", @@ -51,6 +52,8 @@ column.removeEventListener("sortChanged", setSort) column.removeEventListener("filterActiveChanged", setFilterActive) }) + + $: type = FIELDS[field?.type?.toUpperCase()]?.name
(hovered = true)} on:mouseleave={() => (hovered = false)}> -
-
- {#if field.autocolumn}{/if} - {displayName} +
+
+
+ {displayName} + {#if field.autocolumn}{/if} +
+ {#if type} +
{type}
+ {/if}
- +
-
selectRelationship(row, columnName)}> - {count} - related row(s) +
selectRelationship(row, columnName)}> + {#each items as item} +
{item}
+ {/each}
diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte index cea802d522..1e3f820d69 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte @@ -20,6 +20,7 @@ import ConfirmDialog from "components/common/ConfirmDialog.svelte" const AUTO_COL = "auto" + const LINK_TYPE = FIELDS.LINK.type let fieldDefinitions = cloneDeep(FIELDS) export let onClosed @@ -55,15 +56,16 @@ $: uneditable = $backendUiStore.selectedTable?._id === TableNames.USERS && UNEDITABLE_USER_FIELDS.includes(field.name) + $: invalid = field.type === FIELDS.LINK.type && !field.tableId // used to select what different options can be displayed for column type $: canBeSearched = - field.type !== "link" && + field.type !== LINK_TYPE && field.subtype !== AUTO_COLUMN_SUB_TYPES.CREATED_BY && field.subtype !== AUTO_COLUMN_SUB_TYPES.UPDATED_BY - $: canBeDisplay = field.type !== "link" && field.type !== AUTO_COL + $: canBeDisplay = field.type !== LINK_TYPE && field.type !== AUTO_COL $: canBeRequired = - field.type !== "link" && !uneditable && field.type !== AUTO_COL + field.type !== LINK_TYPE && !uneditable && field.type !== AUTO_COL async function saveColumn() { // Set relationship type if it's @@ -100,13 +102,17 @@ } } - function handleFieldConstraints(event) { + function handleTypeChange(event) { const definition = fieldDefinitions[event.target.value.toUpperCase()] if (!definition) { return } field.type = definition.type field.constraints = definition.constraints + // remove any extra fields that may not be related to this type + delete field.autocolumn + delete field.subtype + delete field.tableId } function onChangeRequired(e) { @@ -154,7 +160,7 @@ secondary thin label="Type" - on:change={handleFieldConstraints} + on:change={handleTypeChange} bind:value={field.type}> {#each Object.values(fieldDefinitions) as field} @@ -255,7 +261,9 @@ Delete Column {/if} - +
{ for (let key of keys) { const type = schema[key].type if (type === "link") { - // Enrich row with the count of any relationship fields - row[`${key}_count`] = Array.isArray(row[key]) ? row[key].length : 0 + // Enrich row a string join of relationship fields + row[`${key}_text`] = row[key]?.join(", ") || "" } else if (type === "attachment") { // Enrich row with the first image URL for any attachment fields let url = null diff --git a/packages/server/src/api/controllers/auth.js b/packages/server/src/api/controllers/auth.js index 5c3674d1b9..1dfe6f12d8 100644 --- a/packages/server/src/api/controllers/auth.js +++ b/packages/server/src/api/controllers/auth.js @@ -5,6 +5,8 @@ const env = require("../../environment") const { getAPIKey } = require("../../utilities/usageQuota") const { generateUserID } = require("../../db/utils") const { setCookie } = require("../../utilities") +const { outputProcessing } = require("../../utilities/rowProcessor") +const { ViewNames } = require("../../db/utils") exports.authenticate = async ctx => { const appId = ctx.appId @@ -62,12 +64,14 @@ exports.fetchSelf = async ctx => { const { userId, appId } = ctx.user if (!userId || !appId) { ctx.body = {} - } else { - const database = new CouchDB(appId) - const user = await database.get(userId) - if (user) { - delete user.password - } - ctx.body = user + return } + const db = new CouchDB(appId) + const user = await db.get(userId) + const userTable = await db.get(ViewNames.USERS) + if (user) { + delete user.password + } + // specifically needs to make sure is enriched + ctx.body = await outputProcessing(appId, userTable, user) } diff --git a/packages/server/src/api/controllers/row.js b/packages/server/src/api/controllers/row.js index 4f195ad4e8..3c4c19eed2 100644 --- a/packages/server/src/api/controllers/row.js +++ b/packages/server/src/api/controllers/row.js @@ -14,6 +14,7 @@ const { outputProcessing, } = require("../../utilities/rowProcessor") const { FieldTypes } = require("../../constants") +const { isEqual } = require("lodash") const TABLE_VIEW_BEGINS_WITH = `all${SEPARATOR}${DocumentTypes.TABLE}${SEPARATOR}` @@ -68,7 +69,7 @@ exports.patch = async function(ctx) { } // this returns the table and row incase they have been updated - let { table, row } = await inputProcessing(ctx.user, dbTable, dbRow) + let { table, row } = inputProcessing(ctx.user, dbTable, dbRow) const validateResult = await validate({ row, table, @@ -101,6 +102,10 @@ exports.patch = async function(ctx) { } const response = await db.put(row) + // don't worry about rev, tables handle rev/lastID updates + if (!isEqual(dbTable, table)) { + await db.put(table) + } row._rev = response.rev row.type = "row" @@ -136,11 +141,8 @@ exports.save = async function(ctx) { } // this returns the table and row incase they have been updated - let { table, row } = await inputProcessing( - ctx.user, - await db.get(inputs.tableId), - inputs - ) + const dbTable = await db.get(inputs.tableId) + let { table, row } = inputProcessing(ctx.user, dbTable, inputs) const validateResult = await validate({ row, table, @@ -174,6 +176,10 @@ exports.save = async function(ctx) { row.type = "row" const response = await db.put(row) + // don't worry about rev, tables handle rev/lastID updates + if (!isEqual(dbTable, table)) { + await db.put(table) + } row._rev = response.rev ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table) ctx.body = row diff --git a/packages/server/src/api/controllers/table.js b/packages/server/src/api/controllers/table.js index 88f06f14ce..fb18210821 100644 --- a/packages/server/src/api/controllers/table.js +++ b/packages/server/src/api/controllers/table.js @@ -9,6 +9,7 @@ const { } = require("../../db/utils") const { isEqual } = require("lodash/fp") const { FieldTypes, AutoFieldSubTypes } = require("../../constants") +const { inputProcessing } = require("../../utilities/rowProcessor") async function checkForColumnUpdates(db, oldTable, updatedTable) { let updatedRows @@ -61,6 +62,82 @@ function makeSureTableUpToDate(table, tableToSave) { return tableToSave } +async function handleDataImport(user, table, dataImport) { + const db = new CouchDB(user.appId) + if (dataImport && dataImport.csvString) { + // Populate the table with rows imported from CSV in a bulk update + const data = await csvParser.transform(dataImport) + + for (let i = 0; i < data.length; i++) { + let row = data[i] + row._id = generateRowID(table._id) + row.tableId = table._id + const processed = inputProcessing(user, table, row) + row = processed.row + // these auto-fields will never actually link anywhere (always builder) + for (let [fieldName, schema] of Object.entries(table.schema)) { + if ( + schema.autocolumn && + (schema.subtype === AutoFieldSubTypes.CREATED_BY || + schema.subtype === AutoFieldSubTypes.UPDATED_BY) + ) { + delete row[fieldName] + } + } + table = processed.table + data[i] = row + } + + await db.bulkDocs(data) + let response = await db.put(table) + table._rev = response._rev + } + return table +} + +async function handleSearchIndexes(db, table) { + // create relevant search indexes + if (table.indexes && table.indexes.length > 0) { + const currentIndexes = await db.getIndexes() + const indexName = `search:${table._id}` + + const existingIndex = currentIndexes.indexes.find( + existing => existing.name === indexName + ) + + if (existingIndex) { + const currentFields = existingIndex.def.fields.map( + field => Object.keys(field)[0] + ) + + // if index fields have changed, delete the original index + if (!isEqual(currentFields, table.indexes)) { + await db.deleteIndex(existingIndex) + // create/recreate the index with fields + await db.createIndex({ + index: { + fields: table.indexes, + name: indexName, + ddoc: "search_ddoc", + type: "json", + }, + }) + } + } else { + // create/recreate the index with fields + await db.createIndex({ + index: { + fields: table.indexes, + name: indexName, + ddoc: "search_ddoc", + type: "json", + }, + }) + } + } + return table +} + exports.fetch = async function(ctx) { const db = new CouchDB(ctx.user.appId) const body = await db.allDocs( @@ -152,61 +229,12 @@ exports.save = async function(ctx) { const result = await db.post(tableToSave) tableToSave._rev = result.rev - // create relevant search indexes - if (tableToSave.indexes && tableToSave.indexes.length > 0) { - const currentIndexes = await db.getIndexes() - const indexName = `search:${result.id}` - - const existingIndex = currentIndexes.indexes.find( - existing => existing.name === indexName - ) - - if (existingIndex) { - const currentFields = existingIndex.def.fields.map( - field => Object.keys(field)[0] - ) - - // if index fields have changed, delete the original index - if (!isEqual(currentFields, tableToSave.indexes)) { - await db.deleteIndex(existingIndex) - // create/recreate the index with fields - await db.createIndex({ - index: { - fields: tableToSave.indexes, - name: indexName, - ddoc: "search_ddoc", - type: "json", - }, - }) - } - } else { - // create/recreate the index with fields - await db.createIndex({ - index: { - fields: tableToSave.indexes, - name: indexName, - ddoc: "search_ddoc", - type: "json", - }, - }) - } - } + tableToSave = await handleSearchIndexes(db, tableToSave) + tableToSave = await handleDataImport(ctx.user, tableToSave, dataImport) ctx.eventEmitter && ctx.eventEmitter.emitTable(`table:save`, appId, tableToSave) - if (dataImport && dataImport.csvString) { - // Populate the table with rows imported from CSV in a bulk update - const data = await csvParser.transform(dataImport) - - for (let row of data) { - row._id = generateRowID(tableToSave._id) - row.tableId = tableToSave._id - } - - await db.bulkDocs(data) - } - ctx.status = 200 ctx.message = `Table ${ctx.request.body.name} saved successfully.` ctx.body = tableToSave diff --git a/packages/server/src/api/routes/tests/couchTestUtils.js b/packages/server/src/api/routes/tests/couchTestUtils.js index d89dce4087..90d15612f0 100644 --- a/packages/server/src/api/routes/tests/couchTestUtils.js +++ b/packages/server/src/api/routes/tests/couchTestUtils.js @@ -137,6 +137,7 @@ exports.addPermission = async ( exports.createLinkedTable = async (request, appId) => { // get the ID to link to const table = await exports.createTable(request, appId) + table.primaryDisplay = "name" table.schema.link = { type: "link", fieldName: "link", diff --git a/packages/server/src/api/routes/tests/row.spec.js b/packages/server/src/api/routes/tests/row.spec.js index b8f74c103e..1c11369ae5 100644 --- a/packages/server/src/api/routes/tests/row.spec.js +++ b/packages/server/src/api/routes/tests/row.spec.js @@ -287,7 +287,7 @@ describe("/rows", () => { })).body const enriched = await outputProcessing(appId, table, [secondRow]) expect(enriched[0].link.length).toBe(1) - expect(enriched[0].link[0]).toBe(firstRow._id) + expect(enriched[0].link[0]).toBe("Test Contact") }) }) diff --git a/packages/server/src/db/linkedRows/index.js b/packages/server/src/db/linkedRows/index.js index 5ea758a976..aa795284cb 100644 --- a/packages/server/src/db/linkedRows/index.js +++ b/packages/server/src/db/linkedRows/index.js @@ -4,8 +4,13 @@ const { getLinkDocuments, createLinkView, getUniqueByProp, + getRelatedTableForField, + getLinkedTableIDs, + getLinkedTable, } = require("./linkUtils") const { flatten } = require("lodash") +const CouchDB = require("../../db") +const { getMultiIDParams } = require("../../db/utils") /** * This functionality makes sure that when rows with links are created, updated or deleted they are processed @@ -27,6 +32,30 @@ exports.IncludeDocs = IncludeDocs exports.getLinkDocuments = getLinkDocuments exports.createLinkView = createLinkView +async function getLinksForRows(appId, rows) { + const tableIds = [...new Set(rows.map(el => el.tableId))] + // start by getting all the link values for performance reasons + const responses = flatten( + await Promise.all( + tableIds.map(tableId => + getLinkDocuments({ + appId, + tableId: tableId, + includeDocs: IncludeDocs.EXCLUDE, + }) + ) + ) + ) + // have to get unique as the previous table query can + // return duplicates, could be querying for both tables in a relation + return getUniqueByProp( + responses + // create a unique ID which we can use for getting only unique ones + .map(el => ({ ...el, unique: el.id + el.fieldName })), + "unique" + ) +} + /** * Update link documents for a row or table - this is to be called by the API controller when a change is occurring. * @param {string} eventType states what type of change which is occurring, means this can be expanded upon in the @@ -92,49 +121,66 @@ exports.updateLinks = async function({ * @returns {Promise} The updated row (this may be the same if no links were found). If an array was input * then an array will be output, object input -> object output. */ -exports.attachLinkInfo = async (appId, rows) => { - // handle a single row as well as multiple - let wasArray = true - if (!(rows instanceof Array)) { - rows = [rows] - wasArray = false - } - let tableIds = [...new Set(rows.map(el => el.tableId))] - // start by getting all the link values for performance reasons - let responses = flatten( - await Promise.all( - tableIds.map(tableId => - getLinkDocuments({ - appId, - tableId: tableId, - includeDocs: IncludeDocs.EXCLUDE, - }) - ) - ) - ) +exports.attachLinkIDs = async (appId, rows) => { + const links = await getLinksForRows(appId, rows) // now iterate through the rows and all field information for (let row of rows) { - // get all links for row, ignore fieldName for now - // have to get unique as the previous table query can - // return duplicates, could be querying for both tables in a relation - const linkVals = getUniqueByProp( - responses - // find anything that matches the row's ID we are searching for - .filter(el => el.thisId === row._id) - // create a unique ID which we can use for getting only unique ones - .map(el => ({ ...el, unique: el.id + el.fieldName })), - "unique" - ) - for (let linkVal of linkVals) { - // work out which link pertains to this row - if (!(row[linkVal.fieldName] instanceof Array)) { - row[linkVal.fieldName] = [linkVal.id] - } else { - row[linkVal.fieldName].push(linkVal.id) - } - } + // find anything that matches the row's ID we are searching for and join it + links + .filter(el => el.thisId === row._id) + .forEach(link => { + if (row[link.fieldName] == null) { + row[link.fieldName] = [] + } + row[link.fieldName].push(link.id) + }) } // if it was an array when it came in then handle it as an array in response // otherwise return the first element as there was only one input - return wasArray ? rows : rows[0] + return rows +} + +/** + * Given information about the table we can extract the display name from the linked rows, this + * is what we do for showing the display name of each linked row when in a table format. + * @param {string} appId The app in which the tables/rows/links exist. + * @param {object} table The table from which the rows originated. + * @param {array} rows The rows which are to be enriched with the linked display names/IDs. + * @returns {Promise} The enriched rows after having display names/IDs attached to the linked fields. + */ +exports.attachLinkedPrimaryDisplay = async (appId, table, rows) => { + const linkedTableIds = getLinkedTableIDs(table) + if (linkedTableIds.length === 0) { + return rows + } + const db = new CouchDB(appId) + const links = (await getLinksForRows(appId, rows)).filter(link => + rows.some(row => row._id === link.thisId) + ) + const linkedRowIds = links.map(link => link.id) + const linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map( + row => row.doc + ) + // will populate this as we find them + const linkedTables = [] + for (let row of rows) { + for (let link of links.filter(link => link.thisId === row._id)) { + if (row[link.fieldName] == null) { + row[link.fieldName] = [] + } + const linkedRow = linked.find(row => row._id === link.id) + const linkedTableId = + linkedRow.tableId || getRelatedTableForField(table, link.fieldName) + const linkedTable = await getLinkedTable(db, linkedTableId, linkedTables) + if (!linkedRow || !linkedTable) { + continue + } + // need to handle an edge case where relationship just wasn't found + const value = linkedRow[linkedTable.primaryDisplay] || linkedRow._id + if (value) { + row[link.fieldName].push(value) + } + } + } + return rows } diff --git a/packages/server/src/db/linkedRows/linkUtils.js b/packages/server/src/db/linkedRows/linkUtils.js index fba12aec0b..157f9e4e63 100644 --- a/packages/server/src/db/linkedRows/linkUtils.js +++ b/packages/server/src/db/linkedRows/linkUtils.js @@ -1,6 +1,7 @@ const CouchDB = require("../index") const Sentry = require("@sentry/node") const { ViewNames, getQueryIndex } = require("../utils") +const { FieldTypes } = require("../../constants") /** * Only needed so that boolean parameters are being used for includeDocs @@ -120,3 +121,35 @@ exports.getUniqueByProp = (array, prop) => { return arr.map(mapObj => mapObj[prop]).indexOf(obj[prop]) === pos }) } + +exports.getLinkedTableIDs = table => { + return Object.values(table.schema) + .filter(column => column.type === FieldTypes.LINK) + .map(column => column.tableId) +} + +exports.getLinkedTable = async (db, id, tables) => { + let linkedTable = tables.find(table => table._id === id) + if (linkedTable) { + return linkedTable + } + linkedTable = await db.get(id) + if (linkedTable) { + tables.push(linkedTable) + } + return linkedTable +} + +exports.getRelatedTableForField = (table, fieldName) => { + // look to see if its on the table, straight in the schema + const field = table.schema[fieldName] + if (field != null) { + return field.tableId + } + for (let column of Object.values(table.schema)) { + if (column.type === FieldTypes.LINK && column.fieldName === fieldName) { + return column.tableId + } + } + return null +} diff --git a/packages/server/src/db/utils.js b/packages/server/src/db/utils.js index 6ca55b6336..2d0722d83a 100644 --- a/packages/server/src/db/utils.js +++ b/packages/server/src/db/utils.js @@ -277,3 +277,13 @@ exports.getQueryParams = (datasourceId = null, otherProps = {}) => { otherProps ) } + +/** + * This can be used with the db.allDocs to get a list of IDs + */ +exports.getMultiIDParams = ids => { + return { + keys: ids, + include_docs: true, + } +} diff --git a/packages/server/src/utilities/rowProcessor.js b/packages/server/src/utilities/rowProcessor.js index 5374adf808..362f7e8820 100644 --- a/packages/server/src/utilities/rowProcessor.js +++ b/packages/server/src/utilities/rowProcessor.js @@ -3,7 +3,6 @@ const { OBJ_STORE_DIRECTORY } = require("../constants") const linkRows = require("../db/linkedRows") const { cloneDeep } = require("lodash/fp") const { FieldTypes, AutoFieldSubTypes } = require("../constants") -const CouchDB = require("../db") const BASE_AUTO_ID = 1 @@ -71,14 +70,13 @@ const TYPE_TRANSFORM_MAP = { * @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields. * @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing. * @param {Object} row The row which is to be updated with information for the auto columns. - * @returns {Promise<{row: Object, table: Object}>} The updated row and table, the table may need to be updated + * @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated * for automatic ID purposes. */ -async function processAutoColumn(user, table, row) { +function processAutoColumn(user, table, row) { let now = new Date().toISOString() // if a row doesn't have a revision then it doesn't exist yet const creating = !row._rev - let tableUpdated = false for (let [key, schema] of Object.entries(table.schema)) { if (!schema.autocolumn) { continue @@ -104,17 +102,10 @@ async function processAutoColumn(user, table, row) { if (creating) { schema.lastID = !schema.lastID ? BASE_AUTO_ID : schema.lastID + 1 row[key] = schema.lastID - tableUpdated = true } break } } - if (tableUpdated) { - const db = new CouchDB(user.appId) - const response = await db.put(table) - // update the revision - table._rev = response._rev - } return { table, row } } @@ -143,7 +134,7 @@ exports.coerce = (row, type) => { * @param {object} table the table which the row is being saved to. * @returns {object} the row which has been prepared to be written to the DB. */ -exports.inputProcessing = async (user, table, row) => { +exports.inputProcessing = (user, table, row) => { let clonedRow = cloneDeep(row) for (let [key, value] of Object.entries(clonedRow)) { const field = table.schema[key] @@ -166,8 +157,17 @@ exports.inputProcessing = async (user, table, row) => { * @returns {object[]} the enriched rows will be returned. */ exports.outputProcessing = async (appId, table, rows) => { + let wasArray = true + if (!(rows instanceof Array)) { + rows = [rows] + wasArray = false + } // attach any linked row information - const outputRows = await linkRows.attachLinkInfo(appId, rows) + const outputRows = await linkRows.attachLinkedPrimaryDisplay( + appId, + table, + rows + ) // update the attachments URL depending on hosting if (env.CLOUD && env.SELF_HOSTED) { for (let [property, column] of Object.entries(table.schema)) { @@ -184,5 +184,5 @@ exports.outputProcessing = async (appId, table, rows) => { } } } - return outputRows + return wasArray ? outputRows : outputRows[0] } diff --git a/packages/standard-components/src/grid/Relationship/RelationshipCount.svelte b/packages/standard-components/src/grid/Relationship/RelationshipCount.svelte index dfc10295f0..6a96a0b90b 100644 --- a/packages/standard-components/src/grid/Relationship/RelationshipCount.svelte +++ b/packages/standard-components/src/grid/Relationship/RelationshipCount.svelte @@ -2,13 +2,14 @@ export let columnName export let row - $: count = - row && columnName && Array.isArray(row[columnName]) - ? row[columnName].length - : 0 + $: items = row?.[columnName] || [] -
{count} related row(s)
+
+ {#each items as item} +
{item}
+ {/each} +