diff --git a/packages/backend-core/src/objectStore/objectStore.ts b/packages/backend-core/src/objectStore/objectStore.ts index 35316b1329..9bf9a958ec 100644 --- a/packages/backend-core/src/objectStore/objectStore.ts +++ b/packages/backend-core/src/objectStore/objectStore.ts @@ -32,7 +32,7 @@ type UploadParams = { metadata?: { [key: string]: string | undefined } - body?: Buffer + body?: ReadableStream | Buffer } const CONTENT_TYPE_MAP: any = { @@ -43,6 +43,7 @@ const CONTENT_TYPE_MAP: any = { json: "application/json", gz: "application/gzip", svg: "image/svg+xml", + form: "multipart/form-data", } const STRING_CONTENT_TYPES = [ @@ -107,11 +108,8 @@ export function ObjectStore( * Given an object store and a bucket name this will make sure the bucket exists, * if it does not exist then it will create it. */ -export async function makeSureBucketExists( - client: any, - bucketName: string, - addLifecycleConfig: boolean = true -) { + +export async function makeSureBucketExists(client: any, bucketName: string) { bucketName = sanitizeBucket(bucketName) try { await client @@ -127,38 +125,12 @@ export async function makeSureBucketExists( await promises[bucketName] } else if (doesntExist || noAccess) { if (doesntExist) { - // bucket doesn't exist, create it + // bucket doesn't exist create it promises[bucketName] = client .createBucket({ Bucket: bucketName, }) .promise() - .then(() => { - if (addLifecycleConfig) { - return client - .putBucketLifecycleConfiguration({ - Bucket: bucketName, - LifecycleConfiguration: { - Rules: [ - { - ID: "TTL Rule", - Status: "Enabled", - NoncurrentVersionExpiration: { - NoncurrentDays: 1, - }, - Filter: { - Prefix: "", - }, - AbortIncompleteMultipartUpload: { - DaysAfterInitiation: 1, - }, - }, - ], - }, - }) - .promise() - } - }) await promises[bucketName] delete promises[bucketName] } @@ -178,7 +150,8 @@ export async function upload({ type, metadata, body, -}: UploadParams) { + ttl, +}: UploadParams & { ttl?: number }) { const extension = filename.split(".").pop() const fileBytes = path ? fs.readFileSync(path) : body const objectStore = ObjectStore(bucketName) @@ -205,7 +178,20 @@ export async function upload({ } config.Metadata = metadata } - return objectStore.upload(config).promise() + + /* Playing around here trying to get TTL working */ + const currentDate = new Date() + currentDate.setMinutes(currentDate.getMinutes() + 30) + + return objectStore + .upload(config, { + params: { + Expires: currentDate, + Bucket: bucketName, + Key: sanitizeKey(filename), + }, + }) + .promise() } /** @@ -233,7 +219,6 @@ export async function streamUpload( ContentType: "image", } } - const params = { Bucket: sanitizeBucket(bucketName), Key: sanitizeKey(filename), diff --git a/packages/server/src/integrations/rest.ts b/packages/server/src/integrations/rest.ts index 67ca917fd2..f27dfa1e02 100644 --- a/packages/server/src/integrations/rest.ts +++ b/packages/server/src/integrations/rest.ts @@ -21,8 +21,9 @@ import { performance } from "perf_hooks" import FormData from "form-data" import { URLSearchParams } from "url" import { blacklist, context, objectStore } from "@budibase/backend-core" -import * as uuid from "uuid" - +const multipart = require("parse-multipart-data") +import path from "path" +import { v4 } from "uuid" const BodyTypes = { NONE: "none", FORM_DATA: "form", @@ -130,68 +131,86 @@ class RestIntegration implements IntegrationBase { } async parseResponse(response: any, pagination: PaginationConfig | null) { - let data, raw, headers, presignedUrl, fileExtension + let data, raw, headers, presignedUrl, fileExtension, filename const contentType = response.headers.get("content-type") || "" const contentDisposition = response.headers.get("content-disposition") || "" + const matches = + /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(contentDisposition) || [] + filename = matches[1]?.replace(/['"]/g, "") || "" - const filenameMatch = contentDisposition.match(/filename="?(.+)"?/i) - if (filenameMatch) { - const filename = filenameMatch[1] - const lastDotIndex = filename.lastIndexOf(".") - if (lastDotIndex !== -1) { - fileExtension = filename.slice(lastDotIndex + 1) - } - } try { - if (response.status === 204) { - data = [] - raw = [] - } else if (contentType.includes("application/json")) { - data = await response.json() - raw = JSON.stringify(data) - } else if ( - contentType.includes("text/xml") || - contentType.includes("application/xml") - ) { - const rawXml = await response.text() - data = - (await xmlParser(rawXml, { - explicitArray: false, - trim: true, - explicitRoot: false, - })) || {} - // there is only one structure, its an array, return the array so it appears as rows - const keys = Object.keys(data) - if (keys.length === 1 && Array.isArray(data[keys[0]])) { - data = data[keys[0]] - } - raw = rawXml - } else if (/^(image|video|audio|application|text)\//.test(contentType)) { - const data = await response.arrayBuffer() - let bucketName = `tmp-bucket-attachments-${context.getTenantId()}` + if (filename) { + const responseBuffer = await response.arrayBuffer() + fileExtension = path.extname(filename).slice(1) - const processedFileName = `${uuid.v4()}.${ - fileExtension || contentType.split("/")[1] - }` + const processedFileName = `${v4()}.${fileExtension}` const key = `${context.getProdAppId()}/attachments/${processedFileName}` await objectStore.upload({ - bucket: bucketName, + bucket: objectStore.ObjectStoreBuckets.APPS, filename: key, - type: contentType, - body: Buffer.from(data), + body: Buffer.from(responseBuffer), + ttl: 1800, }) - presignedUrl = await objectStore.getPresignedUrl(bucketName, key, 600) - raw = Buffer.from(data) + presignedUrl = await objectStore.getPresignedUrl("test", key, 600) + raw = Buffer.from(responseBuffer).toString() + + return { + data: { + size: responseBuffer.byteLength, + name: processedFileName, + url: presignedUrl, + extension: fileExtension, + key: key, + }, + info: { + code: response.status, + size: formatBytes(responseBuffer.byteLength), + time: `${Math.round(performance.now() - this.startTimeMs)}ms`, + }, + extra: { + headers: response.headers.raw(), + }, + pagination: { + cursor: null, + }, + } } else { - data = await response.text() - raw = data + if (response.status === 204) { + data = [] + raw = [] + } else if (contentType.includes("application/json")) { + data = await response.json() + raw = JSON.stringify(data) + } else if ( + contentType.includes("text/xml") || + contentType.includes("application/xml") + ) { + const rawXml = await response.text() + data = + (await xmlParser(rawXml, { + explicitArray: false, + trim: true, + explicitRoot: false, + })) || {} + // there is only one structure, its an array, return the array so it appears as rows + const keys = Object.keys(data) + if (keys.length === 1 && Array.isArray(data[keys[0]])) { + data = data[keys[0]] + } + raw = rawXml + } else { + data = await response.text() + raw = data + } } } catch (err) { + console.log(err) throw "Failed to parse response body." } + const size = formatBytes( response.headers.get("content-length") || Buffer.byteLength(raw, "utf8") ) @@ -206,6 +225,7 @@ class RestIntegration implements IntegrationBase { if (pagination?.responseParam) { nextCursor = get(data, pagination.responseParam) } + return { data, info: { @@ -216,7 +236,6 @@ class RestIntegration implements IntegrationBase { extra: { raw, headers, - presignedUrl, }, pagination: { cursor: nextCursor,