further updates to handle files in rest connector
This commit is contained in:
parent
cf82ef057f
commit
f47bdaa8f5
|
@ -32,7 +32,7 @@ type UploadParams = {
|
||||||
metadata?: {
|
metadata?: {
|
||||||
[key: string]: string | undefined
|
[key: string]: string | undefined
|
||||||
}
|
}
|
||||||
body?: Buffer
|
body?: ReadableStream | Buffer
|
||||||
}
|
}
|
||||||
|
|
||||||
const CONTENT_TYPE_MAP: any = {
|
const CONTENT_TYPE_MAP: any = {
|
||||||
|
@ -43,6 +43,7 @@ const CONTENT_TYPE_MAP: any = {
|
||||||
json: "application/json",
|
json: "application/json",
|
||||||
gz: "application/gzip",
|
gz: "application/gzip",
|
||||||
svg: "image/svg+xml",
|
svg: "image/svg+xml",
|
||||||
|
form: "multipart/form-data",
|
||||||
}
|
}
|
||||||
|
|
||||||
const STRING_CONTENT_TYPES = [
|
const STRING_CONTENT_TYPES = [
|
||||||
|
@ -107,11 +108,8 @@ export function ObjectStore(
|
||||||
* Given an object store and a bucket name this will make sure the bucket exists,
|
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||||
* if it does not exist then it will create it.
|
* if it does not exist then it will create it.
|
||||||
*/
|
*/
|
||||||
export async function makeSureBucketExists(
|
|
||||||
client: any,
|
export async function makeSureBucketExists(client: any, bucketName: string) {
|
||||||
bucketName: string,
|
|
||||||
addLifecycleConfig: boolean = true
|
|
||||||
) {
|
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
try {
|
try {
|
||||||
await client
|
await client
|
||||||
|
@ -127,38 +125,12 @@ export async function makeSureBucketExists(
|
||||||
await promises[bucketName]
|
await promises[bucketName]
|
||||||
} else if (doesntExist || noAccess) {
|
} else if (doesntExist || noAccess) {
|
||||||
if (doesntExist) {
|
if (doesntExist) {
|
||||||
// bucket doesn't exist, create it
|
// bucket doesn't exist create it
|
||||||
promises[bucketName] = client
|
promises[bucketName] = client
|
||||||
.createBucket({
|
.createBucket({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
})
|
})
|
||||||
.promise()
|
.promise()
|
||||||
.then(() => {
|
|
||||||
if (addLifecycleConfig) {
|
|
||||||
return client
|
|
||||||
.putBucketLifecycleConfiguration({
|
|
||||||
Bucket: bucketName,
|
|
||||||
LifecycleConfiguration: {
|
|
||||||
Rules: [
|
|
||||||
{
|
|
||||||
ID: "TTL Rule",
|
|
||||||
Status: "Enabled",
|
|
||||||
NoncurrentVersionExpiration: {
|
|
||||||
NoncurrentDays: 1,
|
|
||||||
},
|
|
||||||
Filter: {
|
|
||||||
Prefix: "",
|
|
||||||
},
|
|
||||||
AbortIncompleteMultipartUpload: {
|
|
||||||
DaysAfterInitiation: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.promise()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
await promises[bucketName]
|
await promises[bucketName]
|
||||||
delete promises[bucketName]
|
delete promises[bucketName]
|
||||||
}
|
}
|
||||||
|
@ -178,7 +150,8 @@ export async function upload({
|
||||||
type,
|
type,
|
||||||
metadata,
|
metadata,
|
||||||
body,
|
body,
|
||||||
}: UploadParams) {
|
ttl,
|
||||||
|
}: UploadParams & { ttl?: number }) {
|
||||||
const extension = filename.split(".").pop()
|
const extension = filename.split(".").pop()
|
||||||
const fileBytes = path ? fs.readFileSync(path) : body
|
const fileBytes = path ? fs.readFileSync(path) : body
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
@ -205,7 +178,20 @@ export async function upload({
|
||||||
}
|
}
|
||||||
config.Metadata = metadata
|
config.Metadata = metadata
|
||||||
}
|
}
|
||||||
return objectStore.upload(config).promise()
|
|
||||||
|
/* Playing around here trying to get TTL working */
|
||||||
|
const currentDate = new Date()
|
||||||
|
currentDate.setMinutes(currentDate.getMinutes() + 30)
|
||||||
|
|
||||||
|
return objectStore
|
||||||
|
.upload(config, {
|
||||||
|
params: {
|
||||||
|
Expires: currentDate,
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: sanitizeKey(filename),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -233,7 +219,6 @@ export async function streamUpload(
|
||||||
ContentType: "image",
|
ContentType: "image",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: sanitizeBucket(bucketName),
|
Bucket: sanitizeBucket(bucketName),
|
||||||
Key: sanitizeKey(filename),
|
Key: sanitizeKey(filename),
|
||||||
|
|
|
@ -21,8 +21,9 @@ import { performance } from "perf_hooks"
|
||||||
import FormData from "form-data"
|
import FormData from "form-data"
|
||||||
import { URLSearchParams } from "url"
|
import { URLSearchParams } from "url"
|
||||||
import { blacklist, context, objectStore } from "@budibase/backend-core"
|
import { blacklist, context, objectStore } from "@budibase/backend-core"
|
||||||
import * as uuid from "uuid"
|
const multipart = require("parse-multipart-data")
|
||||||
|
import path from "path"
|
||||||
|
import { v4 } from "uuid"
|
||||||
const BodyTypes = {
|
const BodyTypes = {
|
||||||
NONE: "none",
|
NONE: "none",
|
||||||
FORM_DATA: "form",
|
FORM_DATA: "form",
|
||||||
|
@ -130,20 +131,53 @@ class RestIntegration implements IntegrationBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
async parseResponse(response: any, pagination: PaginationConfig | null) {
|
async parseResponse(response: any, pagination: PaginationConfig | null) {
|
||||||
let data, raw, headers, presignedUrl, fileExtension
|
let data, raw, headers, presignedUrl, fileExtension, filename
|
||||||
|
|
||||||
const contentType = response.headers.get("content-type") || ""
|
const contentType = response.headers.get("content-type") || ""
|
||||||
const contentDisposition = response.headers.get("content-disposition") || ""
|
const contentDisposition = response.headers.get("content-disposition") || ""
|
||||||
|
const matches =
|
||||||
|
/filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(contentDisposition) || []
|
||||||
|
filename = matches[1]?.replace(/['"]/g, "") || ""
|
||||||
|
|
||||||
const filenameMatch = contentDisposition.match(/filename="?(.+)"?/i)
|
|
||||||
if (filenameMatch) {
|
|
||||||
const filename = filenameMatch[1]
|
|
||||||
const lastDotIndex = filename.lastIndexOf(".")
|
|
||||||
if (lastDotIndex !== -1) {
|
|
||||||
fileExtension = filename.slice(lastDotIndex + 1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try {
|
try {
|
||||||
|
if (filename) {
|
||||||
|
const responseBuffer = await response.arrayBuffer()
|
||||||
|
fileExtension = path.extname(filename).slice(1)
|
||||||
|
|
||||||
|
const processedFileName = `${v4()}.${fileExtension}`
|
||||||
|
const key = `${context.getProdAppId()}/attachments/${processedFileName}`
|
||||||
|
|
||||||
|
await objectStore.upload({
|
||||||
|
bucket: objectStore.ObjectStoreBuckets.APPS,
|
||||||
|
filename: key,
|
||||||
|
body: Buffer.from(responseBuffer),
|
||||||
|
ttl: 1800,
|
||||||
|
})
|
||||||
|
|
||||||
|
presignedUrl = await objectStore.getPresignedUrl("test", key, 600)
|
||||||
|
raw = Buffer.from(responseBuffer).toString()
|
||||||
|
|
||||||
|
return {
|
||||||
|
data: {
|
||||||
|
size: responseBuffer.byteLength,
|
||||||
|
name: processedFileName,
|
||||||
|
url: presignedUrl,
|
||||||
|
extension: fileExtension,
|
||||||
|
key: key,
|
||||||
|
},
|
||||||
|
info: {
|
||||||
|
code: response.status,
|
||||||
|
size: formatBytes(responseBuffer.byteLength),
|
||||||
|
time: `${Math.round(performance.now() - this.startTimeMs)}ms`,
|
||||||
|
},
|
||||||
|
extra: {
|
||||||
|
headers: response.headers.raw(),
|
||||||
|
},
|
||||||
|
pagination: {
|
||||||
|
cursor: null,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} else {
|
||||||
if (response.status === 204) {
|
if (response.status === 204) {
|
||||||
data = []
|
data = []
|
||||||
raw = []
|
raw = []
|
||||||
|
@ -167,31 +201,16 @@ class RestIntegration implements IntegrationBase {
|
||||||
data = data[keys[0]]
|
data = data[keys[0]]
|
||||||
}
|
}
|
||||||
raw = rawXml
|
raw = rawXml
|
||||||
} else if (/^(image|video|audio|application|text)\//.test(contentType)) {
|
|
||||||
const data = await response.arrayBuffer()
|
|
||||||
let bucketName = `tmp-bucket-attachments-${context.getTenantId()}`
|
|
||||||
|
|
||||||
const processedFileName = `${uuid.v4()}.${
|
|
||||||
fileExtension || contentType.split("/")[1]
|
|
||||||
}`
|
|
||||||
const key = `${context.getProdAppId()}/attachments/${processedFileName}`
|
|
||||||
|
|
||||||
await objectStore.upload({
|
|
||||||
bucket: bucketName,
|
|
||||||
filename: key,
|
|
||||||
type: contentType,
|
|
||||||
body: Buffer.from(data),
|
|
||||||
})
|
|
||||||
|
|
||||||
presignedUrl = await objectStore.getPresignedUrl(bucketName, key, 600)
|
|
||||||
raw = Buffer.from(data)
|
|
||||||
} else {
|
} else {
|
||||||
data = await response.text()
|
data = await response.text()
|
||||||
raw = data
|
raw = data
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
console.log(err)
|
||||||
throw "Failed to parse response body."
|
throw "Failed to parse response body."
|
||||||
}
|
}
|
||||||
|
|
||||||
const size = formatBytes(
|
const size = formatBytes(
|
||||||
response.headers.get("content-length") || Buffer.byteLength(raw, "utf8")
|
response.headers.get("content-length") || Buffer.byteLength(raw, "utf8")
|
||||||
)
|
)
|
||||||
|
@ -206,6 +225,7 @@ class RestIntegration implements IntegrationBase {
|
||||||
if (pagination?.responseParam) {
|
if (pagination?.responseParam) {
|
||||||
nextCursor = get(data, pagination.responseParam)
|
nextCursor = get(data, pagination.responseParam)
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
data,
|
data,
|
||||||
info: {
|
info: {
|
||||||
|
@ -216,7 +236,6 @@ class RestIntegration implements IntegrationBase {
|
||||||
extra: {
|
extra: {
|
||||||
raw,
|
raw,
|
||||||
headers,
|
headers,
|
||||||
presignedUrl,
|
|
||||||
},
|
},
|
||||||
pagination: {
|
pagination: {
|
||||||
cursor: nextCursor,
|
cursor: nextCursor,
|
||||||
|
|
Loading…
Reference in New Issue