Merge pull request #12525 from Budibase/fix/attachment-improvements

Fix attachment cleanup when in development mode
This commit is contained in:
Michael Drury 2023-12-08 14:53:20 +00:00 committed by GitHub
commit ac0e0dfce7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 228 additions and 101 deletions

View File

@ -171,7 +171,7 @@
} }
} }
} }
if (!savingColumn) { if (!savingColumn && !originalName) {
let highestNumber = 0 let highestNumber = 0
Object.keys(table.schema).forEach(columnName => { Object.keys(table.schema).forEach(columnName => {
const columnNumber = extractColumnNumber(columnName) const columnNumber = extractColumnNumber(columnName)

View File

@ -2,7 +2,7 @@ import * as linkRows from "../../../db/linkedRows"
import { generateRowID, InternalTables } from "../../../db/utils" import { generateRowID, InternalTables } from "../../../db/utils"
import * as userController from "../user" import * as userController from "../user"
import { import {
cleanupAttachments, AttachmentCleanup,
inputProcessing, inputProcessing,
outputProcessing, outputProcessing,
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
@ -79,7 +79,7 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
table, table,
})) as Row })) as Row
// check if any attachments removed // check if any attachments removed
await cleanupAttachments(table, { oldRow, row }) await AttachmentCleanup.rowUpdate(table, { row, oldRow })
if (isUserTable) { if (isUserTable) {
// the row has been updated, need to put it into the ctx // the row has been updated, need to put it into the ctx
@ -119,7 +119,7 @@ export async function save(ctx: UserCtx) {
throw { validation: validateResult.errors } throw { validation: validateResult.errors }
} }
// make sure link rows are up to date // make sure link rows are up-to-date
row = (await linkRows.updateLinks({ row = (await linkRows.updateLinks({
eventType: linkRows.EventType.ROW_SAVE, eventType: linkRows.EventType.ROW_SAVE,
row, row,
@ -165,7 +165,7 @@ export async function destroy(ctx: UserCtx) {
tableId, tableId,
}) })
// remove any attachments that were on the row from object storage // remove any attachments that were on the row from object storage
await cleanupAttachments(table, { row }) await AttachmentCleanup.rowDelete(table, [row])
// remove any static formula // remove any static formula
await updateRelatedFormula(table, row) await updateRelatedFormula(table, row)
@ -216,7 +216,7 @@ export async function bulkDestroy(ctx: UserCtx) {
await db.bulkDocs(processedRows.map(row => ({ ...row, _deleted: true }))) await db.bulkDocs(processedRows.map(row => ({ ...row, _deleted: true })))
} }
// remove any attachments that were on the rows from object storage // remove any attachments that were on the rows from object storage
await cleanupAttachments(table, { rows: processedRows }) await AttachmentCleanup.rowDelete(table, processedRows)
await updateRelatedFormula(table, processedRows) await updateRelatedFormula(table, processedRows)
await Promise.all(updates) await Promise.all(updates)
return { response: { ok: true }, rows: processedRows } return { response: { ok: true }, rows: processedRows }

View File

@ -11,7 +11,7 @@ import {
} from "../../../constants" } from "../../../constants"
import { import {
inputProcessing, inputProcessing,
cleanupAttachments, AttachmentCleanup,
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import { getViews, saveView } from "../view/utils" import { getViews, saveView } from "../view/utils"
import viewTemplate from "../view/viewBuilder" import viewTemplate from "../view/viewBuilder"
@ -82,7 +82,10 @@ export async function checkForColumnUpdates(
}) })
// cleanup any attachments from object storage for deleted attachment columns // cleanup any attachments from object storage for deleted attachment columns
await cleanupAttachments(updatedTable, { oldTable, rows: rawRows }) await AttachmentCleanup.tableUpdate(updatedTable, rawRows, {
oldTable,
rename: columnRename,
})
// Update views // Update views
await checkForViewUpdates(updatedTable, deletedColumns, columnRename) await checkForViewUpdates(updatedTable, deletedColumns, columnRename)
} }

View File

@ -19,11 +19,10 @@ import { context } from "@budibase/backend-core"
import { getTable } from "../getters" import { getTable } from "../getters"
import { checkAutoColumns } from "./utils" import { checkAutoColumns } from "./utils"
import * as viewsSdk from "../../views" import * as viewsSdk from "../../views"
import sdk from "../../../index"
import { getRowParams } from "../../../../db/utils" import { getRowParams } from "../../../../db/utils"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import env from "../../../../environment" import env from "../../../../environment"
import { cleanupAttachments } from "../../../../utilities/rowProcessor" import { AttachmentCleanup } from "../../../../utilities/rowProcessor"
export async function save( export async function save(
table: Table, table: Table,
@ -164,9 +163,10 @@ export async function destroy(table: Table) {
await runStaticFormulaChecks(table, { await runStaticFormulaChecks(table, {
deletion: true, deletion: true,
}) })
await cleanupAttachments(table, { await AttachmentCleanup.tableDelete(
rows: rowsData.rows.map((row: any) => row.doc), table,
}) rowsData.rows.map((row: any) => row.doc)
)
return { table } return { table }
} }

View File

@ -0,0 +1,99 @@
import { FieldTypes, ObjectStoreBuckets } from "../../constants"
import { context, db as dbCore, objectStore } from "@budibase/backend-core"
import { RenameColumn, Row, RowAttachment, Table } from "@budibase/types"
export class AttachmentCleanup {
static async coreCleanup(fileListFn: () => string[]): Promise<void> {
const appId = context.getAppId()
if (!dbCore.isProdAppID(appId)) {
const prodAppId = dbCore.getProdAppID(appId!)
// if prod exists, then don't allow deleting
const exists = await dbCore.dbExists(prodAppId)
if (exists) {
return
}
}
const files = fileListFn()
if (files.length > 0) {
await objectStore.deleteFiles(ObjectStoreBuckets.APPS, files)
}
}
private static async tableChange(
table: Table,
rows: Row[],
opts: { oldTable?: Table; rename?: RenameColumn; deleting?: boolean }
) {
return AttachmentCleanup.coreCleanup(() => {
let files: string[] = []
const tableSchema = opts.oldTable?.schema || table.schema
for (let [key, schema] of Object.entries(tableSchema)) {
if (schema.type !== FieldTypes.ATTACHMENT) {
continue
}
const columnRemoved = opts.oldTable && !table.schema[key]
const renaming = opts.rename?.old === key
// old table had this column, new table doesn't - delete it
if ((columnRemoved && !renaming) || opts.deleting) {
rows.forEach(row => {
files = files.concat(
row[key].map((attachment: any) => attachment.key)
)
})
}
}
return files
})
}
static async tableDelete(table: Table, rows: Row[]) {
return AttachmentCleanup.tableChange(table, rows, { deleting: true })
}
static async tableUpdate(
table: Table,
rows: Row[],
opts: { oldTable?: Table; rename?: RenameColumn }
) {
return AttachmentCleanup.tableChange(table, rows, opts)
}
static async rowDelete(table: Table, rows: Row[]) {
return AttachmentCleanup.coreCleanup(() => {
let files: string[] = []
for (let [key, schema] of Object.entries(table.schema)) {
if (schema.type !== FieldTypes.ATTACHMENT) {
continue
}
rows.forEach(row => {
files = files.concat(
row[key].map((attachment: any) => attachment.key)
)
})
}
return files
})
}
static rowUpdate(table: Table, opts: { row: Row; oldRow: Row }) {
return AttachmentCleanup.coreCleanup(() => {
let files: string[] = []
for (let [key, schema] of Object.entries(table.schema)) {
if (schema.type !== FieldTypes.ATTACHMENT) {
continue
}
const oldKeys =
opts.oldRow[key]?.map(
(attachment: RowAttachment) => attachment.key
) || []
const newKeys =
opts.row[key]?.map((attachment: RowAttachment) => attachment.key) ||
[]
files = files.concat(
oldKeys.filter((key: string) => newKeys.indexOf(key) === -1)
)
}
return files
})
}
}

View File

@ -1,16 +1,7 @@
import * as linkRows from "../../db/linkedRows" import * as linkRows from "../../db/linkedRows"
import { import { FieldTypes, AutoFieldSubTypes } from "../../constants"
FieldTypes,
AutoFieldSubTypes,
ObjectStoreBuckets,
} from "../../constants"
import { processFormulas, fixAutoColumnSubType } from "./utils" import { processFormulas, fixAutoColumnSubType } from "./utils"
import { import { objectStore, utils } from "@budibase/backend-core"
context,
db as dbCore,
objectStore,
utils,
} from "@budibase/backend-core"
import { InternalTables } from "../../db/utils" import { InternalTables } from "../../db/utils"
import { TYPE_TRANSFORM_MAP } from "./map" import { TYPE_TRANSFORM_MAP } from "./map"
import { FieldSubtype, Row, RowAttachment, Table } from "@budibase/types" import { FieldSubtype, Row, RowAttachment, Table } from "@budibase/types"
@ -22,6 +13,7 @@ import {
import { isExternalTableID } from "../../integrations/utils" import { isExternalTableID } from "../../integrations/utils"
export * from "./utils" export * from "./utils"
export * from "./attachments"
type AutoColumnProcessingOpts = { type AutoColumnProcessingOpts = {
reprocessing?: boolean reprocessing?: boolean
@ -30,27 +22,6 @@ type AutoColumnProcessingOpts = {
const BASE_AUTO_ID = 1 const BASE_AUTO_ID = 1
/**
* Given the old state of the row and the new one after an update, this will
* find the keys that have been removed in the updated row.
*/
function getRemovedAttachmentKeys(
oldRow: Row,
row: Row,
attachmentKey: string
) {
if (!oldRow[attachmentKey]) {
return []
}
const oldKeys = oldRow[attachmentKey].map((attachment: any) => attachment.key)
// no attachments in new row, all removed
if (!row[attachmentKey]) {
return oldKeys
}
const newKeys = row[attachmentKey].map((attachment: any) => attachment.key)
return oldKeys.filter((key: string) => newKeys.indexOf(key) === -1)
}
/** /**
* This will update any auto columns that are found on the row/table with the correct information based on * This will update any auto columns that are found on the row/table with the correct information based on
* time now and the current logged in user making the request. * time now and the current logged in user making the request.
@ -288,59 +259,3 @@ export async function outputProcessing<T extends Row[] | Row>(
} }
return (wasArray ? enriched : enriched[0]) as T return (wasArray ? enriched : enriched[0]) as T
} }
/**
* Clean up any attachments that were attached to a row.
* @param table The table from which a row is being removed.
* @param row optional - the row being removed.
* @param rows optional - if multiple rows being deleted can do this in bulk.
* @param oldRow optional - if updating a row this will determine the difference.
* @param oldTable optional - if updating a table, can supply the old table to look for
* deleted attachment columns.
* @return When all attachments have been removed this will return.
*/
export async function cleanupAttachments(
table: Table,
{
row,
rows,
oldRow,
oldTable,
}: { row?: Row; rows?: Row[]; oldRow?: Row; oldTable?: Table }
): Promise<any> {
const appId = context.getAppId()
if (!dbCore.isProdAppID(appId)) {
const prodAppId = dbCore.getProdAppID(appId!)
// if prod exists, then don't allow deleting
const exists = await dbCore.dbExists(prodAppId)
if (exists) {
return
}
}
let files: string[] = []
function addFiles(row: Row, key: string) {
if (row[key]) {
files = files.concat(row[key].map((attachment: any) => attachment.key))
}
}
const schemaToUse = oldTable ? oldTable.schema : table.schema
for (let [key, schema] of Object.entries(schemaToUse)) {
if (schema.type !== FieldTypes.ATTACHMENT) {
continue
}
// old table had this column, new table doesn't - delete it
if (rows && oldTable && !table.schema[key]) {
rows.forEach(row => addFiles(row, key))
} else if (oldRow && row) {
// if updating, need to manage the differences
files = files.concat(getRemovedAttachmentKeys(oldRow, row, key))
} else if (row) {
addFiles(row, key)
} else if (rows) {
rows.forEach(row => addFiles(row, key))
}
}
if (files.length > 0) {
await objectStore.deleteFiles(ObjectStoreBuckets.APPS, files)
}
}

View File

@ -0,0 +1,110 @@
import { AttachmentCleanup } from "../attachments"
import { FieldType, Table, Row, TableSourceType } from "@budibase/types"
import { DEFAULT_BB_DATASOURCE_ID } from "../../../constants"
import { objectStore } from "@budibase/backend-core"
const BUCKET = "prod-budi-app-assets"
const FILE_NAME = "file/thing.jpg"
jest.mock("@budibase/backend-core", () => {
const actual = jest.requireActual("@budibase/backend-core")
return {
...actual,
objectStore: {
deleteFiles: jest.fn(),
ObjectStoreBuckets: actual.objectStore.ObjectStoreBuckets,
},
db: {
isProdAppID: () => jest.fn(() => false),
dbExists: () => jest.fn(() => false),
},
}
})
const mockedDeleteFiles = objectStore.deleteFiles as jest.MockedFunction<
typeof objectStore.deleteFiles
>
function table(): Table {
return {
name: "table",
sourceId: DEFAULT_BB_DATASOURCE_ID,
sourceType: TableSourceType.INTERNAL,
type: "table",
schema: {
attach: {
name: "attach",
type: FieldType.ATTACHMENT,
constraints: {},
},
},
}
}
function row(fileKey: string = FILE_NAME): Row {
return {
attach: [
{
size: 1,
extension: "jpg",
key: fileKey,
},
],
}
}
describe("attachment cleanup", () => {
beforeEach(() => {
mockedDeleteFiles.mockClear()
})
it("should be able to cleanup a table update", async () => {
const originalTable = table()
delete originalTable.schema["attach"]
await AttachmentCleanup.tableUpdate(originalTable, [row()], {
oldTable: table(),
})
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME])
})
it("should be able to cleanup a table deletion", async () => {
await AttachmentCleanup.tableDelete(table(), [row()])
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME])
})
it("should handle table column renaming", async () => {
const updatedTable = table()
updatedTable.schema.attach2 = updatedTable.schema.attach
delete updatedTable.schema.attach
await AttachmentCleanup.tableUpdate(updatedTable, [row()], {
oldTable: table(),
rename: { old: "attach", updated: "attach2" },
})
expect(mockedDeleteFiles).not.toBeCalled()
})
it("shouldn't cleanup if no table changes", async () => {
await AttachmentCleanup.tableUpdate(table(), [row()], { oldTable: table() })
expect(mockedDeleteFiles).not.toBeCalled()
})
it("should handle row updates", async () => {
const updatedRow = row()
delete updatedRow.attach
await AttachmentCleanup.rowUpdate(table(), {
row: updatedRow,
oldRow: row(),
})
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME])
})
it("should handle row deletion", async () => {
await AttachmentCleanup.rowDelete(table(), [row()])
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, [FILE_NAME])
})
it("shouldn't cleanup attachments if row not updated", async () => {
await AttachmentCleanup.rowUpdate(table(), { row: row(), oldRow: row() })
expect(mockedDeleteFiles).not.toBeCalled()
})
})