further updates to handle files in rest connector
This commit is contained in:
parent
cf82ef057f
commit
f47bdaa8f5
|
@ -32,7 +32,7 @@ type UploadParams = {
|
|||
metadata?: {
|
||||
[key: string]: string | undefined
|
||||
}
|
||||
body?: Buffer
|
||||
body?: ReadableStream | Buffer
|
||||
}
|
||||
|
||||
const CONTENT_TYPE_MAP: any = {
|
||||
|
@ -43,6 +43,7 @@ const CONTENT_TYPE_MAP: any = {
|
|||
json: "application/json",
|
||||
gz: "application/gzip",
|
||||
svg: "image/svg+xml",
|
||||
form: "multipart/form-data",
|
||||
}
|
||||
|
||||
const STRING_CONTENT_TYPES = [
|
||||
|
@ -107,11 +108,8 @@ export function ObjectStore(
|
|||
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||
* if it does not exist then it will create it.
|
||||
*/
|
||||
export async function makeSureBucketExists(
|
||||
client: any,
|
||||
bucketName: string,
|
||||
addLifecycleConfig: boolean = true
|
||||
) {
|
||||
|
||||
export async function makeSureBucketExists(client: any, bucketName: string) {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
try {
|
||||
await client
|
||||
|
@ -127,38 +125,12 @@ export async function makeSureBucketExists(
|
|||
await promises[bucketName]
|
||||
} else if (doesntExist || noAccess) {
|
||||
if (doesntExist) {
|
||||
// bucket doesn't exist, create it
|
||||
// bucket doesn't exist create it
|
||||
promises[bucketName] = client
|
||||
.createBucket({
|
||||
Bucket: bucketName,
|
||||
})
|
||||
.promise()
|
||||
.then(() => {
|
||||
if (addLifecycleConfig) {
|
||||
return client
|
||||
.putBucketLifecycleConfiguration({
|
||||
Bucket: bucketName,
|
||||
LifecycleConfiguration: {
|
||||
Rules: [
|
||||
{
|
||||
ID: "TTL Rule",
|
||||
Status: "Enabled",
|
||||
NoncurrentVersionExpiration: {
|
||||
NoncurrentDays: 1,
|
||||
},
|
||||
Filter: {
|
||||
Prefix: "",
|
||||
},
|
||||
AbortIncompleteMultipartUpload: {
|
||||
DaysAfterInitiation: 1,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
})
|
||||
await promises[bucketName]
|
||||
delete promises[bucketName]
|
||||
}
|
||||
|
@ -178,7 +150,8 @@ export async function upload({
|
|||
type,
|
||||
metadata,
|
||||
body,
|
||||
}: UploadParams) {
|
||||
ttl,
|
||||
}: UploadParams & { ttl?: number }) {
|
||||
const extension = filename.split(".").pop()
|
||||
const fileBytes = path ? fs.readFileSync(path) : body
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
|
@ -205,7 +178,20 @@ export async function upload({
|
|||
}
|
||||
config.Metadata = metadata
|
||||
}
|
||||
return objectStore.upload(config).promise()
|
||||
|
||||
/* Playing around here trying to get TTL working */
|
||||
const currentDate = new Date()
|
||||
currentDate.setMinutes(currentDate.getMinutes() + 30)
|
||||
|
||||
return objectStore
|
||||
.upload(config, {
|
||||
params: {
|
||||
Expires: currentDate,
|
||||
Bucket: bucketName,
|
||||
Key: sanitizeKey(filename),
|
||||
},
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -233,7 +219,6 @@ export async function streamUpload(
|
|||
ContentType: "image",
|
||||
}
|
||||
}
|
||||
|
||||
const params = {
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Key: sanitizeKey(filename),
|
||||
|
|
|
@ -21,8 +21,9 @@ import { performance } from "perf_hooks"
|
|||
import FormData from "form-data"
|
||||
import { URLSearchParams } from "url"
|
||||
import { blacklist, context, objectStore } from "@budibase/backend-core"
|
||||
import * as uuid from "uuid"
|
||||
|
||||
const multipart = require("parse-multipart-data")
|
||||
import path from "path"
|
||||
import { v4 } from "uuid"
|
||||
const BodyTypes = {
|
||||
NONE: "none",
|
||||
FORM_DATA: "form",
|
||||
|
@ -130,20 +131,53 @@ class RestIntegration implements IntegrationBase {
|
|||
}
|
||||
|
||||
async parseResponse(response: any, pagination: PaginationConfig | null) {
|
||||
let data, raw, headers, presignedUrl, fileExtension
|
||||
let data, raw, headers, presignedUrl, fileExtension, filename
|
||||
|
||||
const contentType = response.headers.get("content-type") || ""
|
||||
const contentDisposition = response.headers.get("content-disposition") || ""
|
||||
const matches =
|
||||
/filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(contentDisposition) || []
|
||||
filename = matches[1]?.replace(/['"]/g, "") || ""
|
||||
|
||||
const filenameMatch = contentDisposition.match(/filename="?(.+)"?/i)
|
||||
if (filenameMatch) {
|
||||
const filename = filenameMatch[1]
|
||||
const lastDotIndex = filename.lastIndexOf(".")
|
||||
if (lastDotIndex !== -1) {
|
||||
fileExtension = filename.slice(lastDotIndex + 1)
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (filename) {
|
||||
const responseBuffer = await response.arrayBuffer()
|
||||
fileExtension = path.extname(filename).slice(1)
|
||||
|
||||
const processedFileName = `${v4()}.${fileExtension}`
|
||||
const key = `${context.getProdAppId()}/attachments/${processedFileName}`
|
||||
|
||||
await objectStore.upload({
|
||||
bucket: objectStore.ObjectStoreBuckets.APPS,
|
||||
filename: key,
|
||||
body: Buffer.from(responseBuffer),
|
||||
ttl: 1800,
|
||||
})
|
||||
|
||||
presignedUrl = await objectStore.getPresignedUrl("test", key, 600)
|
||||
raw = Buffer.from(responseBuffer).toString()
|
||||
|
||||
return {
|
||||
data: {
|
||||
size: responseBuffer.byteLength,
|
||||
name: processedFileName,
|
||||
url: presignedUrl,
|
||||
extension: fileExtension,
|
||||
key: key,
|
||||
},
|
||||
info: {
|
||||
code: response.status,
|
||||
size: formatBytes(responseBuffer.byteLength),
|
||||
time: `${Math.round(performance.now() - this.startTimeMs)}ms`,
|
||||
},
|
||||
extra: {
|
||||
headers: response.headers.raw(),
|
||||
},
|
||||
pagination: {
|
||||
cursor: null,
|
||||
},
|
||||
}
|
||||
} else {
|
||||
if (response.status === 204) {
|
||||
data = []
|
||||
raw = []
|
||||
|
@ -167,31 +201,16 @@ class RestIntegration implements IntegrationBase {
|
|||
data = data[keys[0]]
|
||||
}
|
||||
raw = rawXml
|
||||
} else if (/^(image|video|audio|application|text)\//.test(contentType)) {
|
||||
const data = await response.arrayBuffer()
|
||||
let bucketName = `tmp-bucket-attachments-${context.getTenantId()}`
|
||||
|
||||
const processedFileName = `${uuid.v4()}.${
|
||||
fileExtension || contentType.split("/")[1]
|
||||
}`
|
||||
const key = `${context.getProdAppId()}/attachments/${processedFileName}`
|
||||
|
||||
await objectStore.upload({
|
||||
bucket: bucketName,
|
||||
filename: key,
|
||||
type: contentType,
|
||||
body: Buffer.from(data),
|
||||
})
|
||||
|
||||
presignedUrl = await objectStore.getPresignedUrl(bucketName, key, 600)
|
||||
raw = Buffer.from(data)
|
||||
} else {
|
||||
data = await response.text()
|
||||
raw = data
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
throw "Failed to parse response body."
|
||||
}
|
||||
|
||||
const size = formatBytes(
|
||||
response.headers.get("content-length") || Buffer.byteLength(raw, "utf8")
|
||||
)
|
||||
|
@ -206,6 +225,7 @@ class RestIntegration implements IntegrationBase {
|
|||
if (pagination?.responseParam) {
|
||||
nextCursor = get(data, pagination.responseParam)
|
||||
}
|
||||
|
||||
return {
|
||||
data,
|
||||
info: {
|
||||
|
@ -216,7 +236,6 @@ class RestIntegration implements IntegrationBase {
|
|||
extra: {
|
||||
raw,
|
||||
headers,
|
||||
presignedUrl,
|
||||
},
|
||||
pagination: {
|
||||
cursor: nextCursor,
|
||||
|
|
Loading…
Reference in New Issue