diff --git a/hosting/couchdb/runner.sh b/hosting/couchdb/runner.sh index 19c612a17e..e56b8e0e7f 100644 --- a/hosting/couchdb/runner.sh +++ b/hosting/couchdb/runner.sh @@ -37,10 +37,11 @@ elif [[ "${TARGETBUILD}" = "docker-compose" ]]; then # image. sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini + sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini elif [[ -n $KUBERNETES_SERVICE_HOST ]]; then # In Kubernetes the directory /opt/couchdb/data has a persistent volume # mount for storing database data. - sed -i "s#DATA_DIR#/opt/couchdb/data#g" /opt/clouseau/clouseau.ini + sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini # We remove the database_dir and view_index_dir settings from the local.ini # in Kubernetes because it will default to /opt/couchdb/data which is what diff --git a/packages/bbui/src/DetailSummary/DetailSummary.svelte b/packages/bbui/src/DetailSummary/DetailSummary.svelte index daa9f3f5ca..e5d6fda86b 100644 --- a/packages/bbui/src/DetailSummary/DetailSummary.svelte +++ b/packages/bbui/src/DetailSummary/DetailSummary.svelte @@ -1,20 +1,17 @@ diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte index 028300be9f..c9010c0c1a 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte @@ -171,7 +171,7 @@ } } } - if (!savingColumn) { + if (!savingColumn && !originalName) { let highestNumber = 0 Object.keys(table.schema).forEach(columnName => { const columnNumber = extractColumnNumber(columnName) diff --git a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte index 6093d2a45e..df97561013 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte @@ -32,6 +32,7 @@ const generalSettings = settings.filter( setting => !setting.section && setting.tag === tag ) + const customSections = settings.filter( setting => setting.section && setting.tag === tag ) @@ -151,7 +152,7 @@ {#if section.visible} {#if section.info}
diff --git a/packages/server/src/api/controllers/row/internal.ts b/packages/server/src/api/controllers/row/internal.ts index 0907c22f0e..a251724b0a 100644 --- a/packages/server/src/api/controllers/row/internal.ts +++ b/packages/server/src/api/controllers/row/internal.ts @@ -2,7 +2,7 @@ import * as linkRows from "../../../db/linkedRows" import { generateRowID, InternalTables } from "../../../db/utils" import * as userController from "../user" import { - cleanupAttachments, + AttachmentCleanup, inputProcessing, outputProcessing, } from "../../../utilities/rowProcessor" @@ -79,7 +79,7 @@ export async function patch(ctx: UserCtx) { table, })) as Row // check if any attachments removed - await cleanupAttachments(table, { oldRow, row }) + await AttachmentCleanup.rowUpdate(table, { row, oldRow }) if (isUserTable) { // the row has been updated, need to put it into the ctx @@ -119,7 +119,7 @@ export async function save(ctx: UserCtx) { throw { validation: validateResult.errors } } - // make sure link rows are up to date + // make sure link rows are up-to-date row = (await linkRows.updateLinks({ eventType: linkRows.EventType.ROW_SAVE, row, @@ -165,7 +165,7 @@ export async function destroy(ctx: UserCtx) { tableId, }) // remove any attachments that were on the row from object storage - await cleanupAttachments(table, { row }) + await AttachmentCleanup.rowDelete(table, [row]) // remove any static formula await updateRelatedFormula(table, row) @@ -216,7 +216,7 @@ export async function bulkDestroy(ctx: UserCtx) { await db.bulkDocs(processedRows.map(row => ({ ...row, _deleted: true }))) } // remove any attachments that were on the rows from object storage - await cleanupAttachments(table, { rows: processedRows }) + await AttachmentCleanup.rowDelete(table, processedRows) await updateRelatedFormula(table, processedRows) await Promise.all(updates) return { response: { ok: true }, rows: processedRows } diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts index b52b50c628..f821801d20 100644 --- a/packages/server/src/api/controllers/table/utils.ts +++ b/packages/server/src/api/controllers/table/utils.ts @@ -11,7 +11,7 @@ import { } from "../../../constants" import { inputProcessing, - cleanupAttachments, + AttachmentCleanup, } from "../../../utilities/rowProcessor" import { getViews, saveView } from "../view/utils" import viewTemplate from "../view/viewBuilder" @@ -82,7 +82,10 @@ export async function checkForColumnUpdates( }) // cleanup any attachments from object storage for deleted attachment columns - await cleanupAttachments(updatedTable, { oldTable, rows: rawRows }) + await AttachmentCleanup.tableUpdate(updatedTable, rawRows, { + oldTable, + rename: columnRename, + }) // Update views await checkForViewUpdates(updatedTable, deletedColumns, columnRename) } diff --git a/packages/server/src/sdk/app/tables/internal/index.ts b/packages/server/src/sdk/app/tables/internal/index.ts index 1092c0e32d..4da90e211d 100644 --- a/packages/server/src/sdk/app/tables/internal/index.ts +++ b/packages/server/src/sdk/app/tables/internal/index.ts @@ -19,11 +19,10 @@ import { context } from "@budibase/backend-core" import { getTable } from "../getters" import { checkAutoColumns } from "./utils" import * as viewsSdk from "../../views" -import sdk from "../../../index" import { getRowParams } from "../../../../db/utils" import { quotas } from "@budibase/pro" import env from "../../../../environment" -import { cleanupAttachments } from "../../../../utilities/rowProcessor" +import { AttachmentCleanup } from "../../../../utilities/rowProcessor" export async function save( table: Table, @@ -164,9 +163,10 @@ export async function destroy(table: Table) { await runStaticFormulaChecks(table, { deletion: true, }) - await cleanupAttachments(table, { - rows: rowsData.rows.map((row: any) => row.doc), - }) + await AttachmentCleanup.tableDelete( + table, + rowsData.rows.map((row: any) => row.doc) + ) return { table } } diff --git a/packages/server/src/utilities/rowProcessor/attachments.ts b/packages/server/src/utilities/rowProcessor/attachments.ts new file mode 100644 index 0000000000..3b670cbc05 --- /dev/null +++ b/packages/server/src/utilities/rowProcessor/attachments.ts @@ -0,0 +1,99 @@ +import { FieldTypes, ObjectStoreBuckets } from "../../constants" +import { context, db as dbCore, objectStore } from "@budibase/backend-core" +import { RenameColumn, Row, RowAttachment, Table } from "@budibase/types" + +export class AttachmentCleanup { + static async coreCleanup(fileListFn: () => string[]): Promise { + const appId = context.getAppId() + if (!dbCore.isProdAppID(appId)) { + const prodAppId = dbCore.getProdAppID(appId!) + // if prod exists, then don't allow deleting + const exists = await dbCore.dbExists(prodAppId) + if (exists) { + return + } + } + const files = fileListFn() + if (files.length > 0) { + await objectStore.deleteFiles(ObjectStoreBuckets.APPS, files) + } + } + + private static async tableChange( + table: Table, + rows: Row[], + opts: { oldTable?: Table; rename?: RenameColumn; deleting?: boolean } + ) { + return AttachmentCleanup.coreCleanup(() => { + let files: string[] = [] + const tableSchema = opts.oldTable?.schema || table.schema + for (let [key, schema] of Object.entries(tableSchema)) { + if (schema.type !== FieldTypes.ATTACHMENT) { + continue + } + const columnRemoved = opts.oldTable && !table.schema[key] + const renaming = opts.rename?.old === key + // old table had this column, new table doesn't - delete it + if ((columnRemoved && !renaming) || opts.deleting) { + rows.forEach(row => { + files = files.concat( + row[key].map((attachment: any) => attachment.key) + ) + }) + } + } + return files + }) + } + + static async tableDelete(table: Table, rows: Row[]) { + return AttachmentCleanup.tableChange(table, rows, { deleting: true }) + } + + static async tableUpdate( + table: Table, + rows: Row[], + opts: { oldTable?: Table; rename?: RenameColumn } + ) { + return AttachmentCleanup.tableChange(table, rows, opts) + } + + static async rowDelete(table: Table, rows: Row[]) { + return AttachmentCleanup.coreCleanup(() => { + let files: string[] = [] + for (let [key, schema] of Object.entries(table.schema)) { + if (schema.type !== FieldTypes.ATTACHMENT) { + continue + } + rows.forEach(row => { + files = files.concat( + row[key].map((attachment: any) => attachment.key) + ) + }) + } + return files + }) + } + + static rowUpdate(table: Table, opts: { row: Row; oldRow: Row }) { + return AttachmentCleanup.coreCleanup(() => { + let files: string[] = [] + for (let [key, schema] of Object.entries(table.schema)) { + if (schema.type !== FieldTypes.ATTACHMENT) { + continue + } + const oldKeys = + opts.oldRow[key]?.map( + (attachment: RowAttachment) => attachment.key + ) || [] + const newKeys = + opts.row[key]?.map((attachment: RowAttachment) => attachment.key) || + [] + files = files.concat( + oldKeys.filter((key: string) => newKeys.indexOf(key) === -1) + ) + } + return files + }) + } +} diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts index a6817ddf19..65012d64e1 100644 --- a/packages/server/src/utilities/rowProcessor/index.ts +++ b/packages/server/src/utilities/rowProcessor/index.ts @@ -1,16 +1,7 @@ import * as linkRows from "../../db/linkedRows" -import { - FieldTypes, - AutoFieldSubTypes, - ObjectStoreBuckets, -} from "../../constants" +import { FieldTypes, AutoFieldSubTypes } from "../../constants" import { processFormulas, fixAutoColumnSubType } from "./utils" -import { - context, - db as dbCore, - objectStore, - utils, -} from "@budibase/backend-core" +import { objectStore, utils } from "@budibase/backend-core" import { InternalTables } from "../../db/utils" import { TYPE_TRANSFORM_MAP } from "./map" import { FieldSubtype, Row, RowAttachment, Table } from "@budibase/types" @@ -22,6 +13,7 @@ import { import { isExternalTableID } from "../../integrations/utils" export * from "./utils" +export * from "./attachments" type AutoColumnProcessingOpts = { reprocessing?: boolean @@ -30,27 +22,6 @@ type AutoColumnProcessingOpts = { const BASE_AUTO_ID = 1 -/** - * Given the old state of the row and the new one after an update, this will - * find the keys that have been removed in the updated row. - */ -function getRemovedAttachmentKeys( - oldRow: Row, - row: Row, - attachmentKey: string -) { - if (!oldRow[attachmentKey]) { - return [] - } - const oldKeys = oldRow[attachmentKey].map((attachment: any) => attachment.key) - // no attachments in new row, all removed - if (!row[attachmentKey]) { - return oldKeys - } - const newKeys = row[attachmentKey].map((attachment: any) => attachment.key) - return oldKeys.filter((key: string) => newKeys.indexOf(key) === -1) -} - /** * This will update any auto columns that are found on the row/table with the correct information based on * time now and the current logged in user making the request. @@ -288,59 +259,3 @@ export async function outputProcessing( } return (wasArray ? enriched : enriched[0]) as T } - -/** - * Clean up any attachments that were attached to a row. - * @param table The table from which a row is being removed. - * @param row optional - the row being removed. - * @param rows optional - if multiple rows being deleted can do this in bulk. - * @param oldRow optional - if updating a row this will determine the difference. - * @param oldTable optional - if updating a table, can supply the old table to look for - * deleted attachment columns. - * @return When all attachments have been removed this will return. - */ -export async function cleanupAttachments( - table: Table, - { - row, - rows, - oldRow, - oldTable, - }: { row?: Row; rows?: Row[]; oldRow?: Row; oldTable?: Table } -): Promise { - const appId = context.getAppId() - if (!dbCore.isProdAppID(appId)) { - const prodAppId = dbCore.getProdAppID(appId!) - // if prod exists, then don't allow deleting - const exists = await dbCore.dbExists(prodAppId) - if (exists) { - return - } - } - let files: string[] = [] - function addFiles(row: Row, key: string) { - if (row[key]) { - files = files.concat(row[key].map((attachment: any) => attachment.key)) - } - } - const schemaToUse = oldTable ? oldTable.schema : table.schema - for (let [key, schema] of Object.entries(schemaToUse)) { - if (schema.type !== FieldTypes.ATTACHMENT) { - continue - } - // old table had this column, new table doesn't - delete it - if (rows && oldTable && !table.schema[key]) { - rows.forEach(row => addFiles(row, key)) - } else if (oldRow && row) { - // if updating, need to manage the differences - files = files.concat(getRemovedAttachmentKeys(oldRow, row, key)) - } else if (row) { - addFiles(row, key) - } else if (rows) { - rows.forEach(row => addFiles(row, key)) - } - } - if (files.length > 0) { - await objectStore.deleteFiles(ObjectStoreBuckets.APPS, files) - } -} diff --git a/packages/server/src/utilities/rowProcessor/tests/attachments.spec.ts b/packages/server/src/utilities/rowProcessor/tests/attachments.spec.ts new file mode 100644 index 0000000000..762ec3bb8c --- /dev/null +++ b/packages/server/src/utilities/rowProcessor/tests/attachments.spec.ts @@ -0,0 +1,110 @@ +import { AttachmentCleanup } from "../attachments" +import { FieldType, Table, Row, TableSourceType } from "@budibase/types" +import { DEFAULT_BB_DATASOURCE_ID } from "../../../constants" +import { objectStore } from "@budibase/backend-core" + +const BUCKET = "prod-budi-app-assets" +const FILE_NAME = "file/thing.jpg" + +jest.mock("@budibase/backend-core", () => { + const actual = jest.requireActual("@budibase/backend-core") + return { + ...actual, + objectStore: { + deleteFiles: jest.fn(), + ObjectStoreBuckets: actual.objectStore.ObjectStoreBuckets, + }, + db: { + isProdAppID: () => jest.fn(() => false), + dbExists: () => jest.fn(() => false), + }, + } +}) + +const mockedDeleteFiles = objectStore.deleteFiles as jest.MockedFunction< + typeof objectStore.deleteFiles +> + +function table(): Table { + return { + name: "table", + sourceId: DEFAULT_BB_DATASOURCE_ID, + sourceType: TableSourceType.INTERNAL, + type: "table", + schema: { + attach: { + name: "attach", + type: FieldType.ATTACHMENT, + constraints: {}, + }, + }, + } +} + +function row(fileKey: string = FILE_NAME): Row { + return { + attach: [ + { + size: 1, + extension: "jpg", + key: fileKey, + }, + ], + } +} + +describe("attachment cleanup", () => { + beforeEach(() => { + mockedDeleteFiles.mockClear() + }) + + it("should be able to cleanup a table update", async () => { + const originalTable = table() + delete originalTable.schema["attach"] + await AttachmentCleanup.tableUpdate(originalTable, [row()], { + oldTable: table(), + }) + expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME]) + }) + + it("should be able to cleanup a table deletion", async () => { + await AttachmentCleanup.tableDelete(table(), [row()]) + expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME]) + }) + + it("should handle table column renaming", async () => { + const updatedTable = table() + updatedTable.schema.attach2 = updatedTable.schema.attach + delete updatedTable.schema.attach + await AttachmentCleanup.tableUpdate(updatedTable, [row()], { + oldTable: table(), + rename: { old: "attach", updated: "attach2" }, + }) + expect(mockedDeleteFiles).not.toBeCalled() + }) + + it("shouldn't cleanup if no table changes", async () => { + await AttachmentCleanup.tableUpdate(table(), [row()], { oldTable: table() }) + expect(mockedDeleteFiles).not.toBeCalled() + }) + + it("should handle row updates", async () => { + const updatedRow = row() + delete updatedRow.attach + await AttachmentCleanup.rowUpdate(table(), { + row: updatedRow, + oldRow: row(), + }) + expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME]) + }) + + it("should handle row deletion", async () => { + await AttachmentCleanup.rowDelete(table(), [row()]) + expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME]) + }) + + it("shouldn't cleanup attachments if row not updated", async () => { + await AttachmentCleanup.rowUpdate(table(), { row: row(), oldRow: row() }) + expect(mockedDeleteFiles).not.toBeCalled() + }) +})