- {#each $visibleColumns as column, columnIdx}
+ {#each $visibleColumns as column}
{@const cellId = `new-${column.name}`}
= $columnHorizontalInversionIndex}
- {invertY}
hidden={!$columnRenderMap[column.name]}
>
{#if column?.schema?.autocolumn}
diff --git a/packages/frontend-core/src/components/grid/lib/renderers.js b/packages/frontend-core/src/components/grid/lib/renderers.js
index 8911d68cba..2e22ee1871 100644
--- a/packages/frontend-core/src/components/grid/lib/renderers.js
+++ b/packages/frontend-core/src/components/grid/lib/renderers.js
@@ -13,6 +13,7 @@ import JSONCell from "../cells/JSONCell.svelte"
import AttachmentCell from "../cells/AttachmentCell.svelte"
import AttachmentSingleCell from "../cells/AttachmentSingleCell.svelte"
import BBReferenceCell from "../cells/BBReferenceCell.svelte"
+import SignatureCell from "../cells/SignatureCell.svelte"
import BBReferenceSingleCell from "../cells/BBReferenceSingleCell.svelte"
const TypeComponentMap = {
@@ -20,6 +21,7 @@ const TypeComponentMap = {
[FieldType.OPTIONS]: OptionsCell,
[FieldType.DATETIME]: DateCell,
[FieldType.BARCODEQR]: TextCell,
+ [FieldType.SIGNATURE_SINGLE]: SignatureCell,
[FieldType.LONGFORM]: LongFormCell,
[FieldType.ARRAY]: MultiSelectCell,
[FieldType.NUMBER]: NumberCell,
diff --git a/packages/frontend-core/src/components/grid/stores/viewport.js b/packages/frontend-core/src/components/grid/stores/viewport.js
index 96a5a954ee..a5079d3c11 100644
--- a/packages/frontend-core/src/components/grid/stores/viewport.js
+++ b/packages/frontend-core/src/components/grid/stores/viewport.js
@@ -1,9 +1,5 @@
import { derived } from "svelte/store"
-import {
- MaxCellRenderOverflow,
- MinColumnWidth,
- ScrollBarSize,
-} from "../lib/constants"
+import { MinColumnWidth } from "../lib/constants"
export const deriveStores = context => {
const {
@@ -85,51 +81,10 @@ export const deriveStores = context => {
}
)
- // Determine the row index at which we should start vertically inverting cell
- // dropdowns
- const rowVerticalInversionIndex = derived(
- [height, rowHeight, scrollTop],
- ([$height, $rowHeight, $scrollTop]) => {
- const offset = $scrollTop % $rowHeight
-
- // Compute the last row index with space to render popovers below it
- const minBottom =
- $height - ScrollBarSize * 3 - MaxCellRenderOverflow + offset
- const lastIdx = Math.floor(minBottom / $rowHeight)
-
- // Compute the first row index with space to render popovers above it
- const minTop = MaxCellRenderOverflow + offset
- const firstIdx = Math.ceil(minTop / $rowHeight)
-
- // Use the greater of the two indices so that we prefer content below,
- // unless there is room to render the entire popover above
- return Math.max(lastIdx, firstIdx)
- }
- )
-
- // Determine the column index at which we should start horizontally inverting
- // cell dropdowns
- const columnHorizontalInversionIndex = derived(
- [visibleColumns, scrollLeft, width],
- ([$visibleColumns, $scrollLeft, $width]) => {
- const cutoff = $width + $scrollLeft - ScrollBarSize * 3
- let inversionIdx = $visibleColumns.length
- for (let i = $visibleColumns.length - 1; i >= 0; i--, inversionIdx--) {
- const rightEdge = $visibleColumns[i].left + $visibleColumns[i].width
- if (rightEdge + MaxCellRenderOverflow <= cutoff) {
- break
- }
- }
- return inversionIdx
- }
- )
-
return {
scrolledRowCount,
visualRowCapacity,
renderedRows,
columnRenderMap,
- rowVerticalInversionIndex,
- columnHorizontalInversionIndex,
}
}
diff --git a/packages/frontend-core/src/components/index.js b/packages/frontend-core/src/components/index.js
index 0d4ff8ea35..06e1348988 100644
--- a/packages/frontend-core/src/components/index.js
+++ b/packages/frontend-core/src/components/index.js
@@ -1,5 +1,6 @@
export { default as SplitPage } from "./SplitPage.svelte"
export { default as TestimonialPage } from "./TestimonialPage.svelte"
+export { default as SignatureModal } from "./SignatureModal.svelte"
export { default as Testimonial } from "./Testimonial.svelte"
export { default as UserAvatar } from "./UserAvatar.svelte"
export { default as UserAvatars } from "./UserAvatars.svelte"
diff --git a/packages/frontend-core/src/constants.js b/packages/frontend-core/src/constants.js
index bd05fafd33..9178628360 100644
--- a/packages/frontend-core/src/constants.js
+++ b/packages/frontend-core/src/constants.js
@@ -121,6 +121,7 @@ export const TypeIconMap = {
[FieldType.OPTIONS]: "Dropdown",
[FieldType.DATETIME]: "Calendar",
[FieldType.BARCODEQR]: "Camera",
+ [FieldType.SIGNATURE_SINGLE]: "AnnotatePen",
[FieldType.LONGFORM]: "TextAlignLeft",
[FieldType.ARRAY]: "Duplicate",
[FieldType.NUMBER]: "123",
diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts
index 83041cbef6..934a838e6a 100644
--- a/packages/server/src/api/routes/tests/row.spec.ts
+++ b/packages/server/src/api/routes/tests/row.spec.ts
@@ -309,6 +309,11 @@ describe.each([
name: "attachments",
constraints: { type: "array", presence: false },
}
+ const signature: FieldSchema = {
+ type: FieldType.SIGNATURE_SINGLE,
+ name: "signature",
+ constraints: { presence: false },
+ }
const bool: FieldSchema = {
type: FieldType.BOOLEAN,
name: "boolean",
@@ -375,6 +380,8 @@ describe.each([
attachmentListUndefined: attachmentList,
attachmentListEmpty: attachmentList,
attachmentListEmptyArrayStr: attachmentList,
+ signatureNull: signature,
+ signatureUndefined: signature,
arrayFieldEmptyArrayStr: arrayField,
arrayFieldArrayStrKnown: arrayField,
arrayFieldNull: arrayField,
@@ -416,6 +423,8 @@ describe.each([
attachmentListUndefined: undefined,
attachmentListEmpty: "",
attachmentListEmptyArrayStr: "[]",
+ signatureNull: null,
+ signatureUndefined: undefined,
arrayFieldEmptyArrayStr: "[]",
arrayFieldUndefined: undefined,
arrayFieldNull: null,
@@ -450,6 +459,8 @@ describe.each([
expect(row.attachmentListUndefined).toBe(undefined)
expect(row.attachmentListEmpty).toEqual([])
expect(row.attachmentListEmptyArrayStr).toEqual([])
+ expect(row.signatureNull).toEqual(null)
+ expect(row.signatureUndefined).toBe(undefined)
expect(row.arrayFieldEmptyArrayStr).toEqual([])
expect(row.arrayFieldNull).toEqual([])
expect(row.arrayFieldUndefined).toEqual(undefined)
@@ -894,70 +905,91 @@ describe.each([
})
isInternal &&
- describe("attachments", () => {
- it("should allow enriching single attachment rows", async () => {
- const table = await config.api.table.save(
+ describe("attachments and signatures", () => {
+ const coreAttachmentEnrichment = async (
+ schema: any,
+ field: string,
+ attachmentCfg: string | string[]
+ ) => {
+ const testTable = await config.api.table.save(
defaultTable({
- schema: {
- attachment: {
- type: FieldType.ATTACHMENT_SINGLE,
- name: "attachment",
- constraints: { presence: false },
- },
- },
+ schema,
})
)
- const attachmentId = `${uuid.v4()}.csv`
- const row = await config.api.row.save(table._id!, {
+ const attachmentToStoreKey = (attachmentId: string) => {
+ return {
+ key: `${config.getAppId()}/attachments/${attachmentId}`,
+ }
+ }
+ const draftRow = {
name: "test",
description: "test",
- attachment: {
- key: `${config.getAppId()}/attachments/${attachmentId}`,
- },
+ [field]:
+ typeof attachmentCfg === "string"
+ ? attachmentToStoreKey(attachmentCfg)
+ : attachmentCfg.map(attachmentToStoreKey),
+ tableId: testTable._id,
+ }
+ const row = await config.api.row.save(testTable._id!, draftRow)
- tableId: table._id,
- })
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
return context.doInAppContext(config.getAppId(), async () => {
- const enriched = await outputProcessing(table, [row])
- expect((enriched as Row[])[0].attachment.url.split("?")[0]).toBe(
- `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
- )
+ const enriched: Row[] = await outputProcessing(table, [row])
+ const [targetRow] = enriched
+ const attachmentEntries = Array.isArray(targetRow[field])
+ ? targetRow[field]
+ : [targetRow[field]]
+
+ for (const entry of attachmentEntries) {
+ const attachmentId = entry.key.split("/").pop()
+ expect(entry.url.split("?")[0]).toBe(
+ `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
+ )
+ }
})
})
+ }
+
+ it("should allow enriching single attachment rows", async () => {
+ await coreAttachmentEnrichment(
+ {
+ attachment: {
+ type: FieldType.ATTACHMENT_SINGLE,
+ name: "attachment",
+ constraints: { presence: false },
+ },
+ },
+ "attachment",
+ `${uuid.v4()}.csv`
+ )
})
it("should allow enriching attachment list rows", async () => {
- const table = await config.api.table.save(
- defaultTable({
- schema: {
- attachment: {
- type: FieldType.ATTACHMENTS,
- name: "attachment",
- constraints: { type: "array", presence: false },
- },
+ await coreAttachmentEnrichment(
+ {
+ attachments: {
+ type: FieldType.ATTACHMENTS,
+ name: "attachments",
+ constraints: { type: "array", presence: false },
},
- })
+ },
+ "attachments",
+ [`${uuid.v4()}.csv`]
)
- const attachmentId = `${uuid.v4()}.csv`
- const row = await config.api.row.save(table._id!, {
- name: "test",
- description: "test",
- attachment: [
- {
- key: `${config.getAppId()}/attachments/${attachmentId}`,
+ })
+
+ it("should allow enriching signature rows", async () => {
+ await coreAttachmentEnrichment(
+ {
+ signature: {
+ type: FieldType.SIGNATURE_SINGLE,
+ name: "signature",
+ constraints: { presence: false },
},
- ],
- tableId: table._id,
- })
- await config.withEnv({ SELF_HOSTED: "true" }, async () => {
- return context.doInAppContext(config.getAppId(), async () => {
- const enriched = await outputProcessing(table, [row])
- expect((enriched as Row[])[0].attachment[0].url.split("?")[0]).toBe(
- `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
- )
- })
- })
+ },
+ "signature",
+ `${uuid.v4()}.png`
+ )
})
})
diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts
index 87d0aa72c7..0dc244cfee 100644
--- a/packages/server/src/api/routes/tests/search.spec.ts
+++ b/packages/server/src/api/routes/tests/search.spec.ts
@@ -21,9 +21,6 @@ import _ from "lodash"
import tk from "timekeeper"
import { encodeJSBinding } from "@budibase/string-templates"
-const serverTime = new Date("2024-05-06T00:00:00.000Z")
-tk.freeze(serverTime)
-
describe.each([
["lucene", undefined],
["sqs", undefined],
@@ -251,8 +248,14 @@ describe.each([
describe("bindings", () => {
let globalUsers: any = []
- const future = new Date(serverTime.getTime())
- future.setDate(future.getDate() + 30)
+ const serverTime = new Date()
+
+ // In MariaDB and MySQL we only store dates to second precision, so we need
+ // to remove milliseconds from the server time to ensure searches work as
+ // expected.
+ serverTime.setMilliseconds(0)
+
+ const future = new Date(serverTime.getTime() + 1000 * 60 * 60 * 24 * 30)
const rows = (currentUser: User) => {
return [
@@ -358,20 +361,22 @@ describe.each([
})
it("should parse the date binding and return all rows after the resolved value", async () => {
- await expectQuery({
- range: {
- appointment: {
- low: "{{ [now] }}",
- high: "9999-00-00T00:00:00.000Z",
+ await tk.withFreeze(serverTime, async () => {
+ await expectQuery({
+ range: {
+ appointment: {
+ low: "{{ [now] }}",
+ high: "9999-00-00T00:00:00.000Z",
+ },
},
- },
- }).toContainExactly([
- {
- name: config.getUser().firstName,
- appointment: future.toISOString(),
- },
- { name: "serverDate", appointment: serverTime.toISOString() },
- ])
+ }).toContainExactly([
+ {
+ name: config.getUser().firstName,
+ appointment: future.toISOString(),
+ },
+ { name: "serverDate", appointment: serverTime.toISOString() },
+ ])
+ })
})
it("should parse the date binding and return all rows before the resolved value", async () => {
@@ -407,8 +412,7 @@ describe.each([
})
it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => {
- const jsBinding =
- "const currentTime = new Date()\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();"
+ const jsBinding = `const currentTime = new Date(${Date.now()})\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();`
const encodedBinding = encodeJSBinding(jsBinding)
await expectQuery({
diff --git a/packages/server/src/automations/automationUtils.ts b/packages/server/src/automations/automationUtils.ts
index cb09f860da..de6e1b3d88 100644
--- a/packages/server/src/automations/automationUtils.ts
+++ b/packages/server/src/automations/automationUtils.ts
@@ -113,7 +113,8 @@ export async function sendAutomationAttachmentsToStorage(
const schema = table.schema[prop]
if (
schema?.type === FieldType.ATTACHMENTS ||
- schema?.type === FieldType.ATTACHMENT_SINGLE
+ schema?.type === FieldType.ATTACHMENT_SINGLE ||
+ schema?.type === FieldType.SIGNATURE_SINGLE
) {
attachmentRows[prop] = value
}
diff --git a/packages/server/src/integrations/base/sqlTable.ts b/packages/server/src/integrations/base/sqlTable.ts
index a82a9fcea8..5e6ce75bbe 100644
--- a/packages/server/src/integrations/base/sqlTable.ts
+++ b/packages/server/src/integrations/base/sqlTable.ts
@@ -125,6 +125,7 @@ function generateSchema(
break
case FieldType.ATTACHMENTS:
case FieldType.ATTACHMENT_SINGLE:
+ case FieldType.SIGNATURE_SINGLE:
case FieldType.AUTO:
case FieldType.JSON:
case FieldType.INTERNAL:
diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts
index d8a0dc9e65..2da0c2c201 100644
--- a/packages/server/src/integrations/googlesheets.ts
+++ b/packages/server/src/integrations/googlesheets.ts
@@ -72,6 +72,7 @@ const isTypeAllowed: Record = {
[FieldType.JSON]: false,
[FieldType.INTERNAL]: false,
[FieldType.BIGINT]: false,
+ [FieldType.SIGNATURE_SINGLE]: false,
}
const ALLOWED_TYPES = Object.entries(isTypeAllowed)
diff --git a/packages/server/src/integrations/utils/utils.ts b/packages/server/src/integrations/utils/utils.ts
index 892d8ae034..a15cb246ef 100644
--- a/packages/server/src/integrations/utils/utils.ts
+++ b/packages/server/src/integrations/utils/utils.ts
@@ -381,6 +381,7 @@ function copyExistingPropsOver(
case FieldType.ARRAY:
case FieldType.ATTACHMENTS:
case FieldType.ATTACHMENT_SINGLE:
+ case FieldType.SIGNATURE_SINGLE:
case FieldType.JSON:
case FieldType.BB_REFERENCE:
case FieldType.BB_REFERENCE_SINGLE:
diff --git a/packages/server/src/sdk/app/backups/imports.ts b/packages/server/src/sdk/app/backups/imports.ts
index 1c08c8c3bf..a16bfb418d 100644
--- a/packages/server/src/sdk/app/backups/imports.ts
+++ b/packages/server/src/sdk/app/backups/imports.ts
@@ -68,7 +68,8 @@ export async function updateAttachmentColumns(prodAppId: string, db: Database) {
rewriteAttachmentUrl(prodAppId, attachment)
)
} else if (
- columnType === FieldType.ATTACHMENT_SINGLE &&
+ (columnType === FieldType.ATTACHMENT_SINGLE ||
+ columnType === FieldType.SIGNATURE_SINGLE) &&
row[column]
) {
row[column] = rewriteAttachmentUrl(prodAppId, row[column])
diff --git a/packages/server/src/sdk/app/rows/attachments.ts b/packages/server/src/sdk/app/rows/attachments.ts
index ee816ea1fc..0bcfcbe4b4 100644
--- a/packages/server/src/sdk/app/rows/attachments.ts
+++ b/packages/server/src/sdk/app/rows/attachments.ts
@@ -32,7 +32,8 @@ export async function getRowsWithAttachments(appId: string, table: Table) {
for (let [key, column] of Object.entries(table.schema)) {
if (
column.type === FieldType.ATTACHMENTS ||
- column.type === FieldType.ATTACHMENT_SINGLE
+ column.type === FieldType.ATTACHMENT_SINGLE ||
+ column.type === FieldType.SIGNATURE_SINGLE
) {
attachmentCols.push(key)
}
diff --git a/packages/server/src/sdk/app/tables/internal/sqs.ts b/packages/server/src/sdk/app/tables/internal/sqs.ts
index 5ecfd9692e..2b91c5cfa7 100644
--- a/packages/server/src/sdk/app/tables/internal/sqs.ts
+++ b/packages/server/src/sdk/app/tables/internal/sqs.ts
@@ -42,6 +42,7 @@ const FieldTypeMap: Record = {
[FieldType.BARCODEQR]: SQLiteType.BLOB,
[FieldType.ATTACHMENTS]: SQLiteType.BLOB,
[FieldType.ATTACHMENT_SINGLE]: SQLiteType.BLOB,
+ [FieldType.SIGNATURE_SINGLE]: SQLiteType.BLOB,
[FieldType.ARRAY]: SQLiteType.BLOB,
[FieldType.LINK]: SQLiteType.BLOB,
[FieldType.BIGINT]: SQLiteType.TEXT,
diff --git a/packages/server/src/sdk/tests/attachments.spec.ts b/packages/server/src/sdk/tests/attachments.spec.ts
index 46165f68fc..12c808d3a5 100644
--- a/packages/server/src/sdk/tests/attachments.spec.ts
+++ b/packages/server/src/sdk/tests/attachments.spec.ts
@@ -23,13 +23,35 @@ describe("should be able to re-write attachment URLs", () => {
await config.init()
})
- it("should update URLs on a number of rows over the limit", async () => {
+ const coreBehaviour = async (tblSchema: any, row: any) => {
const table = await config.api.table.save({
name: "photos",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
- schema: {
+ schema: tblSchema,
+ })
+
+ for (let i = 0; i < FIND_LIMIT * 4; i++) {
+ await config.api.row.save(table._id!, {
+ ...row,
+ })
+ }
+
+ const db = dbCore.getDB(config.getAppId())
+ await sdk.backups.updateAttachmentColumns(db.name, db)
+
+ return {
+ db,
+ rows: (await sdk.rows.getAllInternalRows(db.name)).filter(
+ row => row.tableId === table._id
+ ),
+ }
+ }
+
+ it("Attachment field, should update URLs on a number of rows over the limit", async () => {
+ const { rows, db } = await coreBehaviour(
+ {
photo: {
type: FieldType.ATTACHMENT_SINGLE,
name: "photo",
@@ -43,21 +65,11 @@ describe("should be able to re-write attachment URLs", () => {
name: "otherCol",
},
},
- })
-
- for (let i = 0; i < FIND_LIMIT * 4; i++) {
- await config.api.row.save(table._id!, {
+ {
photo: { ...attachment },
gallery: [{ ...attachment }, { ...attachment }],
otherCol: "string",
- })
- }
-
- const db = dbCore.getDB(config.getAppId())
- await sdk.backups.updateAttachmentColumns(db.name, db)
-
- const rows = (await sdk.rows.getAllInternalRows(db.name)).filter(
- row => row.tableId === table._id
+ }
)
for (const row of rows) {
expect(row.otherCol).toBe("string")
@@ -69,4 +81,27 @@ describe("should be able to re-write attachment URLs", () => {
expect(row.gallery[1].key).toBe(`${db.name}/attachments/a.png`)
}
})
+ it("Signature field, should update URLs on a number of rows over the limit", async () => {
+ const { rows, db } = await coreBehaviour(
+ {
+ signature: {
+ type: FieldType.SIGNATURE_SINGLE,
+ name: "signature",
+ },
+ otherCol: {
+ type: FieldType.STRING,
+ name: "otherCol",
+ },
+ },
+ {
+ signature: { ...attachment },
+ otherCol: "string",
+ }
+ )
+ for (const row of rows) {
+ expect(row.otherCol).toBe("string")
+ expect(row.signature.url).toBe("")
+ expect(row.signature.key).toBe(`${db.name}/attachments/a.png`)
+ }
+ })
})
diff --git a/packages/server/src/utilities/rowProcessor/attachments.ts b/packages/server/src/utilities/rowProcessor/attachments.ts
index bfa216c25b..4b0cc38cb1 100644
--- a/packages/server/src/utilities/rowProcessor/attachments.ts
+++ b/packages/server/src/utilities/rowProcessor/attachments.ts
@@ -31,7 +31,8 @@ export class AttachmentCleanup {
): string[] {
if (
type !== FieldType.ATTACHMENTS &&
- type !== FieldType.ATTACHMENT_SINGLE
+ type !== FieldType.ATTACHMENT_SINGLE &&
+ type !== FieldType.SIGNATURE_SINGLE
) {
return []
}
@@ -62,7 +63,8 @@ export class AttachmentCleanup {
for (let [key, schema] of Object.entries(tableSchema)) {
if (
schema.type !== FieldType.ATTACHMENTS &&
- schema.type !== FieldType.ATTACHMENT_SINGLE
+ schema.type !== FieldType.ATTACHMENT_SINGLE &&
+ schema.type !== FieldType.SIGNATURE_SINGLE
) {
continue
}
@@ -100,10 +102,12 @@ export class AttachmentCleanup {
for (let [key, schema] of Object.entries(table.schema)) {
if (
schema.type !== FieldType.ATTACHMENTS &&
- schema.type !== FieldType.ATTACHMENT_SINGLE
+ schema.type !== FieldType.ATTACHMENT_SINGLE &&
+ schema.type !== FieldType.SIGNATURE_SINGLE
) {
continue
}
+
rows.forEach(row => {
files = files.concat(
AttachmentCleanup.extractAttachmentKeys(schema.type, row[key])
@@ -120,7 +124,8 @@ export class AttachmentCleanup {
for (let [key, schema] of Object.entries(table.schema)) {
if (
schema.type !== FieldType.ATTACHMENTS &&
- schema.type !== FieldType.ATTACHMENT_SINGLE
+ schema.type !== FieldType.ATTACHMENT_SINGLE &&
+ schema.type !== FieldType.SIGNATURE_SINGLE
) {
continue
}
diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts
index 7bbf57964e..73176af6d8 100644
--- a/packages/server/src/utilities/rowProcessor/index.ts
+++ b/packages/server/src/utilities/rowProcessor/index.ts
@@ -158,7 +158,10 @@ export async function inputProcessing(
delete attachment.url
})
}
- } else if (field.type === FieldType.ATTACHMENT_SINGLE) {
+ } else if (
+ field.type === FieldType.ATTACHMENT_SINGLE ||
+ field.type === FieldType.SIGNATURE_SINGLE
+ ) {
const attachment = clonedRow[key]
if (attachment?.url) {
delete clonedRow[key].url
@@ -230,7 +233,8 @@ export async function outputProcessing(
for (let [property, column] of Object.entries(table.schema)) {
if (
column.type === FieldType.ATTACHMENTS ||
- column.type === FieldType.ATTACHMENT_SINGLE
+ column.type === FieldType.ATTACHMENT_SINGLE ||
+ column.type === FieldType.SIGNATURE_SINGLE
) {
for (let row of enriched) {
if (row[property] == null) {
diff --git a/packages/server/src/utilities/rowProcessor/tests/attachments.spec.ts b/packages/server/src/utilities/rowProcessor/tests/attachments.spec.ts
index 3ef8c71afc..a39fc23152 100644
--- a/packages/server/src/utilities/rowProcessor/tests/attachments.spec.ts
+++ b/packages/server/src/utilities/rowProcessor/tests/attachments.spec.ts
@@ -1,22 +1,30 @@
import { AttachmentCleanup } from "../attachments"
import { FieldType, Table, Row, TableSourceType } from "@budibase/types"
import { DEFAULT_BB_DATASOURCE_ID } from "../../../constants"
-import { objectStore } from "@budibase/backend-core"
+import { objectStore, db, context } from "@budibase/backend-core"
+import * as uuid from "uuid"
const BUCKET = "prod-budi-app-assets"
const FILE_NAME = "file/thing.jpg"
+const DEV_APPID = "abc_dev_123"
+const PROD_APPID = "abc_123"
jest.mock("@budibase/backend-core", () => {
const actual = jest.requireActual("@budibase/backend-core")
return {
...actual,
+ context: {
+ ...actual.context,
+ getAppId: jest.fn(),
+ },
objectStore: {
deleteFiles: jest.fn(),
ObjectStoreBuckets: actual.objectStore.ObjectStoreBuckets,
},
db: {
- isProdAppID: () => jest.fn(() => false),
- dbExists: () => jest.fn(() => false),
+ isProdAppID: jest.fn(),
+ getProdAppID: jest.fn(),
+ dbExists: jest.fn(),
},
}
})
@@ -27,12 +35,18 @@ const mockedDeleteFiles = objectStore.deleteFiles as jest.MockedFunction<
const rowGenerators: [
string,
- FieldType.ATTACHMENT_SINGLE | FieldType.ATTACHMENTS,
+ (
+ | FieldType.ATTACHMENT_SINGLE
+ | FieldType.ATTACHMENTS
+ | FieldType.SIGNATURE_SINGLE
+ ),
+ string,
(fileKey?: string) => Row
][] = [
[
"row with a attachment list column",
FieldType.ATTACHMENTS,
+ "attach",
function rowWithAttachments(fileKey: string = FILE_NAME): Row {
return {
attach: [
@@ -48,6 +62,7 @@ const rowGenerators: [
[
"row with a single attachment column",
FieldType.ATTACHMENT_SINGLE,
+ "attach",
function rowWithAttachments(fileKey: string = FILE_NAME): Row {
return {
attach: {
@@ -58,11 +73,25 @@ const rowGenerators: [
}
},
],
+ [
+ "row with a single signature column",
+ FieldType.SIGNATURE_SINGLE,
+ "signature",
+ function rowWithSignature(): Row {
+ return {
+ signature: {
+ size: 1,
+ extension: "png",
+ key: `${uuid.v4()}.png`,
+ },
+ }
+ },
+ ],
]
describe.each(rowGenerators)(
"attachment cleanup",
- (_, attachmentFieldType, rowGenerator) => {
+ (_, attachmentFieldType, colKey, rowGenerator) => {
function tableGenerator(): Table {
return {
name: "table",
@@ -75,97 +104,158 @@ describe.each(rowGenerators)(
type: attachmentFieldType,
constraints: {},
},
+ signature: {
+ name: "signature",
+ type: FieldType.SIGNATURE_SINGLE,
+ constraints: {},
+ },
},
}
}
+ const getRowKeys = (row: any, col: string) => {
+ return Array.isArray(row[col])
+ ? row[col].map((entry: any) => entry.key)
+ : [row[col]?.key]
+ }
+
beforeEach(() => {
mockedDeleteFiles.mockClear()
+ jest.resetAllMocks()
+
+ jest.spyOn(context, "getAppId").mockReturnValue(DEV_APPID)
+ jest.spyOn(db, "isProdAppID").mockReturnValue(false)
+ jest.spyOn(db, "getProdAppID").mockReturnValue(PROD_APPID)
+ jest.spyOn(db, "dbExists").mockReturnValue(Promise.resolve(false))
})
- it("should be able to cleanup a table update", async () => {
+ // Ignore calls to prune attachments when app is in production.
+ it(`${attachmentFieldType} - should not attempt to delete attachments/signatures if a published app exists`, async () => {
+ jest.spyOn(db, "dbExists").mockReturnValue(Promise.resolve(true))
const originalTable = tableGenerator()
- delete originalTable.schema["attach"]
+ delete originalTable.schema[colKey]
await AttachmentCleanup.tableUpdate(originalTable, [rowGenerator()], {
oldTable: tableGenerator(),
})
- expect(mockedDeleteFiles).toHaveBeenCalledWith(BUCKET, [FILE_NAME])
- })
-
- it("should be able to cleanup a table deletion", async () => {
- await AttachmentCleanup.tableDelete(tableGenerator(), [rowGenerator()])
- expect(mockedDeleteFiles).toHaveBeenCalledWith(BUCKET, [FILE_NAME])
- })
-
- it("should handle table column renaming", async () => {
- const updatedTable = tableGenerator()
- updatedTable.schema.attach2 = updatedTable.schema.attach
- delete updatedTable.schema.attach
- await AttachmentCleanup.tableUpdate(updatedTable, [rowGenerator()], {
- oldTable: tableGenerator(),
- rename: { old: "attach", updated: "attach2" },
- })
expect(mockedDeleteFiles).not.toHaveBeenCalled()
})
- it("shouldn't cleanup if no table changes", async () => {
+ it(`${attachmentFieldType} - should be able to cleanup a table update`, async () => {
+ const originalTable = tableGenerator()
+ delete originalTable.schema[colKey]
+ const targetRow = rowGenerator()
+
+ await AttachmentCleanup.tableUpdate(originalTable, [targetRow], {
+ oldTable: tableGenerator(),
+ })
+
+ expect(mockedDeleteFiles).toHaveBeenCalledWith(
+ BUCKET,
+ getRowKeys(targetRow, colKey)
+ )
+ })
+
+ it(`${attachmentFieldType} - should be able to cleanup a table deletion`, async () => {
+ const targetRow = rowGenerator()
+ await AttachmentCleanup.tableDelete(tableGenerator(), [targetRow])
+ expect(mockedDeleteFiles).toHaveBeenCalledWith(
+ BUCKET,
+ getRowKeys(targetRow, colKey)
+ )
+ })
+
+ it(`${attachmentFieldType} - should handle table column renaming`, async () => {
+ const updatedTable = tableGenerator()
+ updatedTable.schema.col2 = updatedTable.schema[colKey]
+ delete updatedTable.schema.attach
+ await AttachmentCleanup.tableUpdate(updatedTable, [rowGenerator()], {
+ oldTable: tableGenerator(),
+ rename: { old: colKey, updated: "col2" },
+ })
+ expect(mockedDeleteFiles).not.toHaveBeenCalled()
+ })
+
+ it(`${attachmentFieldType} - shouldn't cleanup if no table changes`, async () => {
await AttachmentCleanup.tableUpdate(tableGenerator(), [rowGenerator()], {
oldTable: tableGenerator(),
})
expect(mockedDeleteFiles).not.toHaveBeenCalled()
})
- it("should handle row updates", async () => {
+ it(`${attachmentFieldType} - should handle row updates`, async () => {
const updatedRow = rowGenerator()
- delete updatedRow.attach
+ delete updatedRow[colKey]
+
+ const targetRow = rowGenerator()
await AttachmentCleanup.rowUpdate(tableGenerator(), {
row: updatedRow,
- oldRow: rowGenerator(),
+ oldRow: targetRow,
})
- expect(mockedDeleteFiles).toHaveBeenCalledWith(BUCKET, [FILE_NAME])
+
+ expect(mockedDeleteFiles).toHaveBeenCalledWith(
+ BUCKET,
+ getRowKeys(targetRow, colKey)
+ )
})
- it("should handle row deletion", async () => {
- await AttachmentCleanup.rowDelete(tableGenerator(), [rowGenerator()])
- expect(mockedDeleteFiles).toHaveBeenCalledWith(BUCKET, [FILE_NAME])
+ it(`${attachmentFieldType} - should handle row deletion`, async () => {
+ const targetRow = rowGenerator()
+ await AttachmentCleanup.rowDelete(tableGenerator(), [targetRow])
+ expect(mockedDeleteFiles).toHaveBeenCalledWith(
+ BUCKET,
+ getRowKeys(targetRow, colKey)
+ )
})
- it("should handle row deletion and not throw when attachments are undefined", async () => {
+ it(`${attachmentFieldType} - should handle row deletion, prune signature`, async () => {
+ const targetRow = rowGenerator()
+ await AttachmentCleanup.rowDelete(tableGenerator(), [targetRow])
+ expect(mockedDeleteFiles).toHaveBeenCalledWith(
+ BUCKET,
+ getRowKeys(targetRow, colKey)
+ )
+ })
+
+ it(`${attachmentFieldType} - should handle row deletion and not throw when attachments are undefined`, async () => {
await AttachmentCleanup.rowDelete(tableGenerator(), [
{
- multipleAttachments: undefined,
+ [colKey]: undefined,
},
])
})
- it("shouldn't cleanup attachments if row not updated", async () => {
+ it(`${attachmentFieldType} - shouldn't cleanup attachments if row not updated`, async () => {
+ const targetRow = rowGenerator()
await AttachmentCleanup.rowUpdate(tableGenerator(), {
- row: rowGenerator(),
- oldRow: rowGenerator(),
+ row: targetRow,
+ oldRow: targetRow,
})
expect(mockedDeleteFiles).not.toHaveBeenCalled()
})
- it("should be able to cleanup a column and not throw when attachments are undefined", async () => {
+ it(`${attachmentFieldType} - should be able to cleanup a column and not throw when attachments are undefined`, async () => {
const originalTable = tableGenerator()
- delete originalTable.schema["attach"]
+ delete originalTable.schema[colKey]
+ const row1 = rowGenerator("file 1")
+ const row2 = rowGenerator("file 2")
await AttachmentCleanup.tableUpdate(
originalTable,
- [rowGenerator("file 1"), { attach: undefined }, rowGenerator("file 2")],
+ [row1, { [colKey]: undefined }, row2],
{
oldTable: tableGenerator(),
}
)
+ const expectedKeys = [row1, row2].reduce((acc: string[], row) => {
+ acc = [...acc, ...getRowKeys(row, colKey)]
+ return acc
+ }, [])
expect(mockedDeleteFiles).toHaveBeenCalledTimes(1)
- expect(mockedDeleteFiles).toHaveBeenCalledWith(BUCKET, [
- "file 1",
- "file 2",
- ])
+ expect(mockedDeleteFiles).toHaveBeenCalledWith(BUCKET, expectedKeys)
})
- it("should be able to cleanup a column and not throw when ALL attachments are undefined", async () => {
+ it(`${attachmentFieldType} - should be able to cleanup a column and not throw when ALL attachments are undefined`, async () => {
const originalTable = tableGenerator()
- delete originalTable.schema["attach"]
+ delete originalTable.schema[colKey]
await AttachmentCleanup.tableUpdate(
originalTable,
[{}, { attach: undefined }],
diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts
index 4f0feb3c93..c3230d238c 100644
--- a/packages/server/src/utilities/schema.ts
+++ b/packages/server/src/utilities/schema.ts
@@ -151,7 +151,8 @@ export function parse(rows: Rows, schema: TableSchema): Rows {
parsedRow[columnName] = parsedValue?._id
} else if (
(columnType === FieldType.ATTACHMENTS ||
- columnType === FieldType.ATTACHMENT_SINGLE) &&
+ columnType === FieldType.ATTACHMENT_SINGLE ||
+ columnType === FieldType.SIGNATURE_SINGLE) &&
typeof columnData === "string"
) {
parsedRow[columnName] = parseCsvExport(columnData)
diff --git a/packages/shared-core/src/table.ts b/packages/shared-core/src/table.ts
index 2b3586932a..7706b78037 100644
--- a/packages/shared-core/src/table.ts
+++ b/packages/shared-core/src/table.ts
@@ -15,6 +15,7 @@ const allowDisplayColumnByType: Record = {
[FieldType.ARRAY]: false,
[FieldType.ATTACHMENTS]: false,
[FieldType.ATTACHMENT_SINGLE]: false,
+ [FieldType.SIGNATURE_SINGLE]: false,
[FieldType.LINK]: false,
[FieldType.JSON]: false,
[FieldType.BB_REFERENCE]: false,
@@ -33,10 +34,10 @@ const allowSortColumnByType: Record = {
[FieldType.BIGINT]: true,
[FieldType.BOOLEAN]: true,
[FieldType.JSON]: true,
-
[FieldType.FORMULA]: false,
[FieldType.ATTACHMENTS]: false,
[FieldType.ATTACHMENT_SINGLE]: false,
+ [FieldType.SIGNATURE_SINGLE]: false,
[FieldType.ARRAY]: false,
[FieldType.LINK]: false,
[FieldType.BB_REFERENCE]: false,
diff --git a/packages/types/src/documents/app/row.ts b/packages/types/src/documents/app/row.ts
index 45436cc9c1..27d7df09fd 100644
--- a/packages/types/src/documents/app/row.ts
+++ b/packages/types/src/documents/app/row.ts
@@ -93,6 +93,11 @@ export enum FieldType {
* type is found. The column will contain the contents of any barcode scanned.
*/
BARCODEQR = "barcodeqr",
+ /**
+ * a JSON type, called Signature within Budibase. This type functions much the same as ATTACHMENTS but restricted
+ * only to signatures.
+ */
+ SIGNATURE_SINGLE = "signature_single",
/**
* a string type, this allows representing very large integers, but they are held/managed within Budibase as
* strings. When stored in external databases Budibase will attempt to use a real big integer type and depend