Merge branch 'master' into feature/filter-bindings
This commit is contained in:
commit
c1874f3376
|
@ -13,13 +13,14 @@ import { bucketTTLConfig, budibaseTempDir } from "./utils"
|
|||
import { v4 } from "uuid"
|
||||
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
||||
import fsp from "fs/promises"
|
||||
import { HeadObjectOutput } from "aws-sdk/clients/s3"
|
||||
|
||||
const streamPipeline = promisify(stream.pipeline)
|
||||
// use this as a temporary store of buckets that are being created
|
||||
const STATE = {
|
||||
bucketCreationPromises: {},
|
||||
}
|
||||
const signedFilePrefix = "/files/signed"
|
||||
export const SIGNED_FILE_PREFIX = "/files/signed"
|
||||
|
||||
type ListParams = {
|
||||
ContinuationToken?: string
|
||||
|
@ -40,8 +41,13 @@ type UploadParams = BaseUploadParams & {
|
|||
path?: string | PathLike
|
||||
}
|
||||
|
||||
type StreamUploadParams = BaseUploadParams & {
|
||||
stream: ReadStream
|
||||
export type StreamTypes =
|
||||
| ReadStream
|
||||
| NodeJS.ReadableStream
|
||||
| ReadableStream<Uint8Array>
|
||||
|
||||
export type StreamUploadParams = BaseUploadParams & {
|
||||
stream?: StreamTypes
|
||||
}
|
||||
|
||||
const CONTENT_TYPE_MAP: any = {
|
||||
|
@ -174,11 +180,9 @@ export async function upload({
|
|||
const objectStore = ObjectStore(bucketName)
|
||||
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
||||
|
||||
if (ttl && (bucketCreated.created || bucketCreated.exists)) {
|
||||
if (ttl && bucketCreated.created) {
|
||||
let ttlConfig = bucketTTLConfig(bucketName, ttl)
|
||||
if (objectStore.putBucketLifecycleConfiguration) {
|
||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
||||
}
|
||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
||||
}
|
||||
|
||||
let contentType = type
|
||||
|
@ -222,11 +226,9 @@ export async function streamUpload({
|
|||
const objectStore = ObjectStore(bucketName)
|
||||
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
||||
|
||||
if (ttl && (bucketCreated.created || bucketCreated.exists)) {
|
||||
if (ttl && bucketCreated.created) {
|
||||
let ttlConfig = bucketTTLConfig(bucketName, ttl)
|
||||
if (objectStore.putBucketLifecycleConfiguration) {
|
||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
||||
}
|
||||
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
|
||||
}
|
||||
|
||||
// Set content type for certain known extensions
|
||||
|
@ -333,7 +335,7 @@ export function getPresignedUrl(
|
|||
const signedUrl = new URL(url)
|
||||
const path = signedUrl.pathname
|
||||
const query = signedUrl.search
|
||||
return `${signedFilePrefix}${path}${query}`
|
||||
return `${SIGNED_FILE_PREFIX}${path}${query}`
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -521,6 +523,26 @@ export async function getReadStream(
|
|||
return client.getObject(params).createReadStream()
|
||||
}
|
||||
|
||||
export async function getObjectMetadata(
|
||||
bucket: string,
|
||||
path: string
|
||||
): Promise<HeadObjectOutput> {
|
||||
bucket = sanitizeBucket(bucket)
|
||||
path = sanitizeKey(path)
|
||||
|
||||
const client = ObjectStore(bucket)
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: path,
|
||||
}
|
||||
|
||||
try {
|
||||
return await client.headObject(params).promise()
|
||||
} catch (err: any) {
|
||||
throw new Error("Unable to retrieve metadata from object")
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Given a signed url like '/files/signed/tmp-files-attachments/app_123456/myfile.txt' extract
|
||||
the bucket and the path from it
|
||||
|
@ -530,7 +552,9 @@ export function extractBucketAndPath(
|
|||
): { bucket: string; path: string } | null {
|
||||
const baseUrl = url.split("?")[0]
|
||||
|
||||
const regex = new RegExp(`^${signedFilePrefix}/(?<bucket>[^/]+)/(?<path>.+)$`)
|
||||
const regex = new RegExp(
|
||||
`^${SIGNED_FILE_PREFIX}/(?<bucket>[^/]+)/(?<path>.+)$`
|
||||
)
|
||||
const match = baseUrl.match(regex)
|
||||
|
||||
if (match && match.groups) {
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
import { join } from "path"
|
||||
import path, { join } from "path"
|
||||
import { tmpdir } from "os"
|
||||
import fs from "fs"
|
||||
import env from "../environment"
|
||||
import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3"
|
||||
|
||||
import * as objectStore from "./objectStore"
|
||||
import {
|
||||
AutomationAttachment,
|
||||
AutomationAttachmentContent,
|
||||
BucketedContent,
|
||||
} from "@budibase/types"
|
||||
/****************************************************
|
||||
* NOTE: When adding a new bucket - name *
|
||||
* sure that S3 usages (like budibase-infra) *
|
||||
|
@ -55,3 +60,50 @@ export const bucketTTLConfig = (
|
|||
|
||||
return params
|
||||
}
|
||||
|
||||
async function processUrlAttachment(
|
||||
attachment: AutomationAttachment
|
||||
): Promise<AutomationAttachmentContent> {
|
||||
const response = await fetch(attachment.url)
|
||||
if (!response.ok || !response.body) {
|
||||
throw new Error(`Unexpected response ${response.statusText}`)
|
||||
}
|
||||
const fallbackFilename = path.basename(new URL(attachment.url).pathname)
|
||||
return {
|
||||
filename: attachment.filename || fallbackFilename,
|
||||
content: response.body,
|
||||
}
|
||||
}
|
||||
|
||||
export async function processObjectStoreAttachment(
|
||||
attachment: AutomationAttachment
|
||||
): Promise<BucketedContent> {
|
||||
const result = objectStore.extractBucketAndPath(attachment.url)
|
||||
|
||||
if (result === null) {
|
||||
throw new Error("Invalid signed URL")
|
||||
}
|
||||
|
||||
const { bucket, path: objectPath } = result
|
||||
const readStream = await objectStore.getReadStream(bucket, objectPath)
|
||||
const fallbackFilename = path.basename(objectPath)
|
||||
return {
|
||||
bucket,
|
||||
path: objectPath,
|
||||
filename: attachment.filename || fallbackFilename,
|
||||
content: readStream,
|
||||
}
|
||||
}
|
||||
|
||||
export async function processAutomationAttachment(
|
||||
attachment: AutomationAttachment
|
||||
): Promise<AutomationAttachmentContent | BucketedContent> {
|
||||
const isFullyFormedUrl =
|
||||
attachment.url?.startsWith("http://") ||
|
||||
attachment.url?.startsWith("https://")
|
||||
if (isFullyFormedUrl) {
|
||||
return await processUrlAttachment(attachment)
|
||||
} else {
|
||||
return await processObjectStoreAttachment(attachment)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -358,7 +358,8 @@
|
|||
value.customType !== "cron" &&
|
||||
value.customType !== "triggerSchema" &&
|
||||
value.customType !== "automationFields" &&
|
||||
value.type !== "attachment"
|
||||
value.type !== "attachment" &&
|
||||
value.type !== "attachment_single"
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
import { tables } from "stores/builder"
|
||||
import { Select, Checkbox, Label } from "@budibase/bbui"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { FieldType } from "@budibase/types"
|
||||
|
||||
import RowSelectorTypes from "./RowSelectorTypes.svelte"
|
||||
import DrawerBindableSlot from "../../common/bindings/DrawerBindableSlot.svelte"
|
||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||
|
@ -14,7 +16,6 @@
|
|||
export let bindings
|
||||
export let isTestModal
|
||||
export let isUpdateRow
|
||||
|
||||
$: parsedBindings = bindings.map(binding => {
|
||||
let clone = Object.assign({}, binding)
|
||||
clone.icon = "ShareAndroid"
|
||||
|
@ -26,15 +27,19 @@
|
|||
|
||||
$: {
|
||||
table = $tables.list.find(table => table._id === value?.tableId)
|
||||
schemaFields = Object.entries(table?.schema ?? {})
|
||||
// surface the schema so the user can see it in the json
|
||||
schemaFields.map(([, schema]) => {
|
||||
|
||||
// Just sorting attachment types to the bottom here for a cleaner UX
|
||||
schemaFields = Object.entries(table?.schema ?? {}).sort(
|
||||
([, schemaA], [, schemaB]) =>
|
||||
(schemaA.type === "attachment") - (schemaB.type === "attachment")
|
||||
)
|
||||
|
||||
schemaFields.forEach(([, schema]) => {
|
||||
if (!schema.autocolumn && !value[schema.name]) {
|
||||
value[schema.name] = ""
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const onChangeTable = e => {
|
||||
value["tableId"] = e.detail
|
||||
dispatch("change", value)
|
||||
|
@ -114,10 +119,16 @@
|
|||
</div>
|
||||
{#if schemaFields.length}
|
||||
{#each schemaFields as [field, schema]}
|
||||
{#if !schema.autocolumn && schema.type !== "attachment"}
|
||||
<div class="schema-fields">
|
||||
{#if !schema.autocolumn}
|
||||
<div
|
||||
class:schema-fields={schema.type !== FieldType.ATTACHMENTS &&
|
||||
schema.type !== FieldType.ATTACHMENT_SINGLE}
|
||||
>
|
||||
<Label>{field}</Label>
|
||||
<div class="field-width">
|
||||
<div
|
||||
class:field-width={schema.type !== FieldType.ATTACHMENTS &&
|
||||
schema.type !== FieldType.ATTACHMENT_SINGLE}
|
||||
>
|
||||
{#if isTestModal}
|
||||
<RowSelectorTypes
|
||||
{isTestModal}
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
<script>
|
||||
import { Select, DatePicker, Multiselect, TextArea } from "@budibase/bbui"
|
||||
import { FieldType } from "@budibase/types"
|
||||
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
|
||||
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
||||
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
|
||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||
import Editor from "components/integration/QueryEditor.svelte"
|
||||
import KeyValueBuilder from "components/integration/KeyValueBuilder.svelte"
|
||||
|
||||
export let onChange
|
||||
export let field
|
||||
|
@ -22,6 +24,27 @@
|
|||
function schemaHasOptions(schema) {
|
||||
return !!schema.constraints?.inclusion?.length
|
||||
}
|
||||
|
||||
const handleAttachmentParams = keyValuObj => {
|
||||
let params = {}
|
||||
|
||||
if (
|
||||
schema.type === FieldType.ATTACHMENT_SINGLE &&
|
||||
Object.keys(keyValuObj).length === 0
|
||||
) {
|
||||
return []
|
||||
}
|
||||
if (!Array.isArray(keyValuObj)) {
|
||||
keyValuObj = [keyValuObj]
|
||||
}
|
||||
|
||||
if (keyValuObj.length) {
|
||||
for (let param of keyValuObj) {
|
||||
params[param.url] = param.filename
|
||||
}
|
||||
}
|
||||
return params
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if schemaHasOptions(schema) && schema.type !== "array"}
|
||||
|
@ -77,6 +100,35 @@
|
|||
on:change={e => onChange(e, field)}
|
||||
useLabel={false}
|
||||
/>
|
||||
{:else if schema.type === FieldType.ATTACHMENTS || schema.type === FieldType.ATTACHMENT_SINGLE}
|
||||
<div class="attachment-field-spacinng">
|
||||
<KeyValueBuilder
|
||||
on:change={e =>
|
||||
onChange(
|
||||
{
|
||||
detail:
|
||||
schema.type === FieldType.ATTACHMENT_SINGLE
|
||||
? e.detail.length > 0
|
||||
? { url: e.detail[0].name, filename: e.detail[0].value }
|
||||
: {}
|
||||
: e.detail.map(({ name, value }) => ({
|
||||
url: name,
|
||||
filename: value,
|
||||
})),
|
||||
},
|
||||
field
|
||||
)}
|
||||
object={handleAttachmentParams(value[field])}
|
||||
allowJS
|
||||
{bindings}
|
||||
keyBindings
|
||||
customButtonText={"Add attachment"}
|
||||
keyPlaceholder={"URL"}
|
||||
valuePlaceholder={"Filename"}
|
||||
actionButtonDisabled={schema.type === FieldType.ATTACHMENT_SINGLE &&
|
||||
Object.keys(value[field]).length >= 1}
|
||||
/>
|
||||
</div>
|
||||
{:else if ["string", "number", "bigint", "barcodeqr", "array"].includes(schema.type)}
|
||||
<svelte:component
|
||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||
|
@ -90,3 +142,10 @@
|
|||
title={schema.name}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
<style>
|
||||
.attachment-field-spacinng {
|
||||
margin-top: var(--spacing-s);
|
||||
margin-bottom: var(--spacing-l);
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
readableToRuntimeBinding,
|
||||
runtimeToReadableBinding,
|
||||
} from "dataBinding"
|
||||
import { FieldType } from "@budibase/types"
|
||||
|
||||
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
|
||||
import { createEventDispatcher, setContext } from "svelte"
|
||||
|
@ -102,6 +103,8 @@
|
|||
longform: value => !isJSBinding(value),
|
||||
json: value => !isJSBinding(value),
|
||||
boolean: isValidBoolean,
|
||||
attachment: false,
|
||||
attachment_single: false,
|
||||
}
|
||||
|
||||
const isValid = value => {
|
||||
|
@ -116,7 +119,16 @@
|
|||
if (type === "json" && !isJSBinding(value)) {
|
||||
return "json-slot-icon"
|
||||
}
|
||||
if (!["string", "number", "bigint", "barcodeqr"].includes(type)) {
|
||||
if (
|
||||
![
|
||||
"string",
|
||||
"number",
|
||||
"bigint",
|
||||
"barcodeqr",
|
||||
"attachment",
|
||||
"attachment_single",
|
||||
].includes(type)
|
||||
) {
|
||||
return "slot-icon"
|
||||
}
|
||||
return ""
|
||||
|
@ -157,7 +169,7 @@
|
|||
{updateOnChange}
|
||||
/>
|
||||
{/if}
|
||||
{#if !disabled && type !== "formula"}
|
||||
{#if !disabled && type !== "formula" && !disabled && type !== FieldType.ATTACHMENTS && !disabled && type !== FieldType.ATTACHMENT_SINGLE}
|
||||
<div
|
||||
class={`icon ${getIconClass(value, type)}`}
|
||||
on:click={() => {
|
||||
|
|
|
@ -37,6 +37,7 @@
|
|||
export let customButtonText = null
|
||||
export let keyBindings = false
|
||||
export let allowJS = false
|
||||
export let actionButtonDisabled = false
|
||||
export let compare = (option, value) => option === value
|
||||
|
||||
let fields = Object.entries(object || {}).map(([name, value]) => ({
|
||||
|
@ -189,7 +190,14 @@
|
|||
{/if}
|
||||
{#if !readOnly && !noAddButton}
|
||||
<div>
|
||||
<ActionButton icon="Add" secondary thin outline on:click={addEntry}>
|
||||
<ActionButton
|
||||
disabled={actionButtonDisabled}
|
||||
icon="Add"
|
||||
secondary
|
||||
thin
|
||||
outline
|
||||
on:click={addEntry}
|
||||
>
|
||||
{#if customButtonText}
|
||||
{customButtonText}
|
||||
{:else}
|
||||
|
|
|
@ -4,8 +4,11 @@ import {
|
|||
encodeJSBinding,
|
||||
} from "@budibase/string-templates"
|
||||
import sdk from "../sdk"
|
||||
import { Row } from "@budibase/types"
|
||||
import { AutomationAttachment, FieldType, Row } from "@budibase/types"
|
||||
import { LoopInput, LoopStepType } from "../definitions/automations"
|
||||
import { objectStore, context } from "@budibase/backend-core"
|
||||
import * as uuid from "uuid"
|
||||
import path from "path"
|
||||
|
||||
/**
|
||||
* When values are input to the system generally they will be of type string as this is required for template strings.
|
||||
|
@ -96,6 +99,98 @@ export function getError(err: any) {
|
|||
return typeof err !== "string" ? err.toString() : err
|
||||
}
|
||||
|
||||
export async function sendAutomationAttachmentsToStorage(
|
||||
tableId: string,
|
||||
row: Row
|
||||
): Promise<Row> {
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
const attachmentRows: Record<
|
||||
string,
|
||||
AutomationAttachment[] | AutomationAttachment
|
||||
> = {}
|
||||
|
||||
for (const [prop, value] of Object.entries(row)) {
|
||||
const schema = table.schema[prop]
|
||||
if (
|
||||
schema?.type === FieldType.ATTACHMENTS ||
|
||||
schema?.type === FieldType.ATTACHMENT_SINGLE
|
||||
) {
|
||||
attachmentRows[prop] = value
|
||||
}
|
||||
}
|
||||
for (const [prop, attachments] of Object.entries(attachmentRows)) {
|
||||
if (Array.isArray(attachments)) {
|
||||
if (attachments.length) {
|
||||
row[prop] = await Promise.all(
|
||||
attachments.map(attachment => generateAttachmentRow(attachment))
|
||||
)
|
||||
}
|
||||
} else if (Object.keys(row[prop]).length > 0) {
|
||||
row[prop] = await generateAttachmentRow(attachments)
|
||||
}
|
||||
}
|
||||
|
||||
return row
|
||||
}
|
||||
|
||||
async function generateAttachmentRow(attachment: AutomationAttachment) {
|
||||
const prodAppId = context.getProdAppId()
|
||||
|
||||
async function uploadToS3(
|
||||
extension: string,
|
||||
content: objectStore.StreamTypes
|
||||
) {
|
||||
const fileName = `${uuid.v4()}${extension}`
|
||||
const s3Key = `${prodAppId}/attachments/${fileName}`
|
||||
|
||||
await objectStore.streamUpload({
|
||||
bucket: objectStore.ObjectStoreBuckets.APPS,
|
||||
stream: content,
|
||||
filename: s3Key,
|
||||
})
|
||||
|
||||
return s3Key
|
||||
}
|
||||
|
||||
async function getSize(s3Key: string) {
|
||||
return (
|
||||
await objectStore.getObjectMetadata(
|
||||
objectStore.ObjectStoreBuckets.APPS,
|
||||
s3Key
|
||||
)
|
||||
).ContentLength
|
||||
}
|
||||
|
||||
try {
|
||||
const { filename } = attachment
|
||||
const extension = path.extname(filename)
|
||||
const attachmentResult = await objectStore.processAutomationAttachment(
|
||||
attachment
|
||||
)
|
||||
|
||||
let s3Key = ""
|
||||
if (
|
||||
"path" in attachmentResult &&
|
||||
attachmentResult.path.startsWith(`${prodAppId}/attachments/`)
|
||||
) {
|
||||
s3Key = attachmentResult.path
|
||||
} else {
|
||||
s3Key = await uploadToS3(extension, attachmentResult.content)
|
||||
}
|
||||
|
||||
const size = await getSize(s3Key)
|
||||
|
||||
return {
|
||||
size,
|
||||
name: filename,
|
||||
extension,
|
||||
key: s3Key,
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to process attachment:", error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
export function substituteLoopStep(hbsString: string, substitute: string) {
|
||||
let checkForJS = isJSBinding(hbsString)
|
||||
let substitutedHbsString = ""
|
||||
|
|
|
@ -1,5 +1,9 @@
|
|||
import { save } from "../../api/controllers/row"
|
||||
import { cleanUpRow, getError } from "../automationUtils"
|
||||
import {
|
||||
cleanUpRow,
|
||||
getError,
|
||||
sendAutomationAttachmentsToStorage,
|
||||
} from "../automationUtils"
|
||||
import { buildCtx } from "./utils"
|
||||
import {
|
||||
AutomationActionStepId,
|
||||
|
@ -89,6 +93,10 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
|||
|
||||
try {
|
||||
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
|
||||
inputs.row = await sendAutomationAttachmentsToStorage(
|
||||
inputs.row.tableId,
|
||||
inputs.row
|
||||
)
|
||||
await save(ctx)
|
||||
return {
|
||||
row: inputs.row,
|
||||
|
|
|
@ -108,7 +108,15 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
|||
|
||||
try {
|
||||
if (tableId) {
|
||||
inputs.row = await automationUtils.cleanUpRow(tableId, inputs.row)
|
||||
inputs.row = await automationUtils.cleanUpRow(
|
||||
inputs.row.tableId,
|
||||
inputs.row
|
||||
)
|
||||
|
||||
inputs.row = await automationUtils.sendAutomationAttachmentsToStorage(
|
||||
inputs.row.tableId,
|
||||
inputs.row
|
||||
)
|
||||
}
|
||||
await rowController.patch(ctx)
|
||||
return {
|
||||
|
|
|
@ -1,5 +1,18 @@
|
|||
import * as setup from "./utilities"
|
||||
import { basicTableWithAttachmentField } from "../../tests/utilities/structures"
|
||||
import { objectStore } from "@budibase/backend-core"
|
||||
|
||||
async function uploadTestFile(filename: string) {
|
||||
let bucket = "testbucket"
|
||||
await objectStore.upload({
|
||||
bucket,
|
||||
filename,
|
||||
body: Buffer.from("test data"),
|
||||
})
|
||||
let presignedUrl = await objectStore.getPresignedUrl(bucket, filename, 60000)
|
||||
|
||||
return presignedUrl
|
||||
}
|
||||
describe("test the create row action", () => {
|
||||
let table: any
|
||||
let row: any
|
||||
|
@ -43,4 +56,76 @@ describe("test the create row action", () => {
|
|||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {})
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
it("should check that an attachment field is sent to storage and parsed", async () => {
|
||||
let attachmentTable = await config.createTable(
|
||||
basicTableWithAttachmentField()
|
||||
)
|
||||
|
||||
let attachmentRow: any = {
|
||||
tableId: attachmentTable._id,
|
||||
}
|
||||
|
||||
let filename = "test1.txt"
|
||||
let presignedUrl = await uploadTestFile(filename)
|
||||
let attachmentObject = [
|
||||
{
|
||||
url: presignedUrl,
|
||||
filename,
|
||||
},
|
||||
]
|
||||
|
||||
attachmentRow.file_attachment = attachmentObject
|
||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||
row: attachmentRow,
|
||||
})
|
||||
|
||||
expect(res.success).toEqual(true)
|
||||
expect(res.row.file_attachment[0]).toHaveProperty("key")
|
||||
let s3Key = res.row.file_attachment[0].key
|
||||
|
||||
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
|
||||
|
||||
const objectData = await client
|
||||
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
|
||||
.promise()
|
||||
|
||||
expect(objectData).toBeDefined()
|
||||
expect(objectData.ContentLength).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it("should check that an single attachment field is sent to storage and parsed", async () => {
|
||||
let attachmentTable = await config.createTable(
|
||||
basicTableWithAttachmentField()
|
||||
)
|
||||
|
||||
let attachmentRow: any = {
|
||||
tableId: attachmentTable._id,
|
||||
}
|
||||
|
||||
let filename = "test2.txt"
|
||||
let presignedUrl = await uploadTestFile(filename)
|
||||
let attachmentObject = {
|
||||
url: presignedUrl,
|
||||
filename,
|
||||
}
|
||||
|
||||
attachmentRow.single_file_attachment = attachmentObject
|
||||
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||
row: attachmentRow,
|
||||
})
|
||||
|
||||
expect(res.success).toEqual(true)
|
||||
expect(res.row.single_file_attachment).toHaveProperty("key")
|
||||
let s3Key = res.row.single_file_attachment.key
|
||||
|
||||
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
|
||||
|
||||
const objectData = await client
|
||||
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
|
||||
.promise()
|
||||
|
||||
expect(objectData).toBeDefined()
|
||||
expect(objectData.ContentLength).toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -139,13 +139,13 @@ class RestIntegration implements IntegrationBase {
|
|||
const contentType = response.headers.get("content-type") || ""
|
||||
const contentDisposition = response.headers.get("content-disposition") || ""
|
||||
if (
|
||||
contentDisposition.includes("filename") ||
|
||||
contentDisposition.includes("attachment") ||
|
||||
contentDisposition.includes("form-data")
|
||||
) {
|
||||
filename =
|
||||
path.basename(parse(contentDisposition).parameters?.filename) || ""
|
||||
}
|
||||
|
||||
try {
|
||||
if (filename) {
|
||||
return handleFileResponse(response, filename, this.startTimeMs)
|
||||
|
|
|
@ -78,6 +78,32 @@ export function basicTable(
|
|||
)
|
||||
}
|
||||
|
||||
export function basicTableWithAttachmentField(
|
||||
datasource?: Datasource,
|
||||
...extra: Partial<Table>[]
|
||||
): Table {
|
||||
return tableForDatasource(
|
||||
datasource,
|
||||
{
|
||||
name: "TestTable",
|
||||
schema: {
|
||||
file_attachment: {
|
||||
type: FieldType.ATTACHMENTS,
|
||||
name: "description",
|
||||
constraints: {
|
||||
type: "array",
|
||||
},
|
||||
},
|
||||
single_file_attachment: {
|
||||
type: FieldType.ATTACHMENT_SINGLE,
|
||||
name: "description",
|
||||
},
|
||||
},
|
||||
},
|
||||
...extra
|
||||
)
|
||||
}
|
||||
|
||||
export function basicView(tableId: string) {
|
||||
return {
|
||||
tableId,
|
||||
|
|
|
@ -234,7 +234,7 @@ export async function outputProcessing<T extends Row[] | Row>(
|
|||
}
|
||||
} else if (column.type === FieldType.ATTACHMENT_SINGLE) {
|
||||
for (let row of enriched) {
|
||||
if (!row[property]) {
|
||||
if (!row[property] || Object.keys(row[property]).length === 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { Document } from "../document"
|
||||
import { EventEmitter } from "events"
|
||||
import { User } from "../global"
|
||||
import { ReadStream } from "fs"
|
||||
|
||||
export enum AutomationIOType {
|
||||
OBJECT = "object",
|
||||
|
@ -235,3 +236,18 @@ export interface AutomationMetadata extends Document {
|
|||
errorCount?: number
|
||||
automationChainCount?: number
|
||||
}
|
||||
|
||||
export type AutomationAttachment = {
|
||||
url: string
|
||||
filename: string
|
||||
}
|
||||
|
||||
export type AutomationAttachmentContent = {
|
||||
filename: string
|
||||
content: ReadStream | NodeJS.ReadableStream | ReadableStream<Uint8Array>
|
||||
}
|
||||
|
||||
export type BucketedContent = AutomationAttachmentContent & {
|
||||
bucket: string
|
||||
path: string
|
||||
}
|
||||
|
|
|
@ -6,8 +6,7 @@ import { processString } from "@budibase/string-templates"
|
|||
import { User, SendEmailOpts, SMTPInnerConfig } from "@budibase/types"
|
||||
import { configs, cache, objectStore } from "@budibase/backend-core"
|
||||
import ical from "ical-generator"
|
||||
import fetch from "node-fetch"
|
||||
import path from "path"
|
||||
import _ from "lodash"
|
||||
|
||||
const nodemailer = require("nodemailer")
|
||||
|
||||
|
@ -165,39 +164,12 @@ export async function sendEmail(
|
|||
}),
|
||||
}
|
||||
if (opts?.attachments) {
|
||||
const attachments = await Promise.all(
|
||||
opts.attachments?.map(async attachment => {
|
||||
const isFullyFormedUrl =
|
||||
attachment.url.startsWith("http://") ||
|
||||
attachment.url.startsWith("https://")
|
||||
if (isFullyFormedUrl) {
|
||||
const response = await fetch(attachment.url)
|
||||
if (!response.ok) {
|
||||
throw new Error(`unexpected response ${response.statusText}`)
|
||||
}
|
||||
const fallbackFilename = path.basename(
|
||||
new URL(attachment.url).pathname
|
||||
)
|
||||
return {
|
||||
filename: attachment.filename || fallbackFilename,
|
||||
content: response?.body,
|
||||
}
|
||||
} else {
|
||||
const url = attachment.url
|
||||
const result = objectStore.extractBucketAndPath(url)
|
||||
if (result === null) {
|
||||
throw new Error("Invalid signed URL")
|
||||
}
|
||||
const { bucket, path } = result
|
||||
const readStream = await objectStore.getReadStream(bucket, path)
|
||||
const fallbackFilename = path.split("/").pop() || ""
|
||||
return {
|
||||
filename: attachment.filename || fallbackFilename,
|
||||
content: readStream,
|
||||
}
|
||||
}
|
||||
})
|
||||
let attachments = await Promise.all(
|
||||
opts.attachments?.map(objectStore.processAutomationAttachment)
|
||||
)
|
||||
attachments = attachments.map(attachment => {
|
||||
return _.omit(attachment, "path")
|
||||
})
|
||||
message = { ...message, attachments }
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue