First pass at migrating to AWS SDK v3.

This commit is contained in:
mike12345567 2024-12-12 18:14:53 +00:00
parent cf98669c2a
commit 525e249d41
19 changed files with 1352 additions and 269 deletions

View File

@ -30,6 +30,9 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@aws-sdk/client-s3": "3.709.0",
"@aws-sdk/lib-storage": "3.709.0",
"@aws-sdk/s3-request-presigner": "3.709.0",
"@budibase/nano": "10.1.5",
"@budibase/pouchdb-replication-stream": "1.2.11",
"@budibase/shared-core": "*",
@ -76,6 +79,7 @@
"@types/chance": "1.1.3",
"@types/cookies": "0.7.8",
"@types/jest": "29.5.5",
"@types/koa": "2.13.4",
"@types/lodash": "4.14.200",
"@types/node-fetch": "2.6.4",
"@types/pouchdb": "6.4.2",
@ -83,7 +87,6 @@
"@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4",
"@types/koa": "2.13.4",
"chance": "1.1.8",
"ioredis-mock": "8.9.0",
"jest": "29.7.0",

View File

@ -13,7 +13,7 @@ export function clientLibraryPath(appId: string) {
* due to issues with the domain we were unable to continue doing this - keeping
* incase we are able to switch back to CDN path again in future.
*/
export function clientLibraryCDNUrl(appId: string, version: string) {
export async function clientLibraryCDNUrl(appId: string, version: string) {
let file = clientLibraryPath(appId)
if (env.CLOUDFRONT_CDN) {
// append app version to bust the cache
@ -24,7 +24,7 @@ export function clientLibraryCDNUrl(appId: string, version: string) {
// file is public
return cloudfront.getUrl(file)
} else {
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
}
}
@ -44,10 +44,10 @@ export function clientLibraryUrl(appId: string, version: string) {
return `/api/assets/client?${qs.encode(qsParams)}`
}
export function getAppFileUrl(s3Key: string) {
export async function getAppFileUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key)
} else {
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
}
}

View File

@ -5,7 +5,11 @@ import * as cloudfront from "../cloudfront"
// URLs
export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
export const getGlobalFileUrl = async (
type: string,
name: string,
etag?: string
) => {
let file = getGlobalFileS3Key(type, name)
if (env.CLOUDFRONT_CDN) {
if (etag) {
@ -13,7 +17,7 @@ export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
}
return cloudfront.getPresignedUrl(file)
} else {
return objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
return await objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
}
}

View File

@ -6,23 +6,25 @@ import { Plugin } from "@budibase/types"
// URLS
export function enrichPluginURLs(plugins?: Plugin[]): Plugin[] {
export async function enrichPluginURLs(plugins?: Plugin[]): Promise<Plugin[]> {
if (!plugins || !plugins.length) {
return []
}
return plugins.map(plugin => {
const jsUrl = getPluginJSUrl(plugin)
const iconUrl = getPluginIconUrl(plugin)
return { ...plugin, jsUrl, iconUrl }
})
return await Promise.all(
plugins.map(async plugin => {
const jsUrl = await getPluginJSUrl(plugin)
const iconUrl = await getPluginIconUrl(plugin)
return { ...plugin, jsUrl, iconUrl }
})
)
}
function getPluginJSUrl(plugin: Plugin) {
async function getPluginJSUrl(plugin: Plugin) {
const s3Key = getPluginJSKey(plugin)
return getPluginUrl(s3Key)
}
function getPluginIconUrl(plugin: Plugin): string | undefined {
async function getPluginIconUrl(plugin: Plugin) {
const s3Key = getPluginIconKey(plugin)
if (!s3Key) {
return
@ -30,11 +32,11 @@ function getPluginIconUrl(plugin: Plugin): string | undefined {
return getPluginUrl(s3Key)
}
function getPluginUrl(s3Key: string) {
async function getPluginUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key)
} else {
return objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
return await objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
}
}

View File

@ -109,9 +109,9 @@ describe("app", () => {
)
})
it("gets url with cloudfront + s3", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
const url = getAppFileUrl()
const url = await getAppFileUrl()
// omit rest of signed params
expect(
url.includes("http://cf.example.com/app_123/attachments/image.jpeg?")
@ -146,8 +146,8 @@ describe("app", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
await testEnv.withTenant(() => {
const url = getAppFileUrl()
await testEnv.withTenant(async () => {
const url = await getAppFileUrl()
// omit rest of signed params
expect(
url.includes(

View File

@ -24,9 +24,9 @@ describe("global", () => {
expect(url).toBe("http://s3.example.com/global/settings/logoUrl")
})
it("gets url with cloudfront + s3", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
const url = getGlobalFileUrl()
const url = await getGlobalFileUrl()
// omit rest of signed params
expect(
url.includes("http://cf.example.com/settings/logoUrl?etag=etag&")
@ -59,8 +59,8 @@ describe("global", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
await testEnv.withTenant(tenantId => {
const url = getGlobalFileUrl()
await testEnv.withTenant(async tenantId => {
const url = await getGlobalFileUrl()
// omit rest of signed params
expect(
url.includes(

View File

@ -1,6 +1,15 @@
const sanitize = require("sanitize-s3-objectkey")
import AWS from "aws-sdk"
import {
HeadObjectCommandOutput,
PutObjectCommandInput,
S3,
S3ClientConfig,
GetObjectCommand,
_Object as S3Object,
} from "@aws-sdk/client-s3"
import { Upload } from "@aws-sdk/lib-storage"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import stream, { Readable } from "stream"
import fetch from "node-fetch"
import tar from "tar-fs"
@ -13,7 +22,6 @@ import { bucketTTLConfig, budibaseTempDir } from "./utils"
import { v4 } from "uuid"
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
import fsp from "fs/promises"
import { HeadObjectOutput } from "aws-sdk/clients/s3"
import { ReadableStream } from "stream/web"
const streamPipeline = promisify(stream.pipeline)
@ -84,26 +92,24 @@ export function sanitizeBucket(input: string) {
* @constructor
*/
export function ObjectStore(
bucket: string,
opts: { presigning: boolean } = { presigning: false }
) {
const config: AWS.S3.ClientConfiguration = {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
const config: S3ClientConfig = {
forcePathStyle: true,
credentials: {
accessKeyId: env.MINIO_ACCESS_KEY!,
secretAccessKey: env.MINIO_SECRET_KEY!,
},
region: env.AWS_REGION,
}
if (bucket) {
config.params = {
Bucket: sanitizeBucket(bucket),
}
}
// for AWS Credentials using temporary session token
if (!env.MINIO_ENABLED && env.AWS_SESSION_TOKEN) {
config.sessionToken = env.AWS_SESSION_TOKEN
config.credentials = {
accessKeyId: env.MINIO_ACCESS_KEY!,
secretAccessKey: env.MINIO_SECRET_KEY!,
sessionToken: env.AWS_SESSION_TOKEN,
}
}
// custom S3 is in use i.e. minio
@ -119,7 +125,7 @@ export function ObjectStore(
}
}
return new AWS.S3(config)
return new S3(config)
}
/**
@ -132,7 +138,9 @@ export async function createBucketIfNotExists(
): Promise<{ created: boolean; exists: boolean }> {
bucketName = sanitizeBucket(bucketName)
try {
await client
await // The `.promise()` call might be on an JS SDK v2 client API.
// If yes, please remove .promise(). If not, remove this comment.
client
.headBucket({
Bucket: bucketName,
})
@ -147,11 +155,13 @@ export async function createBucketIfNotExists(
return { created: false, exists: true }
} else if (doesntExist || noAccess) {
if (doesntExist) {
promises[bucketName] = client
.createBucket({
Bucket: bucketName,
})
.promise()
promises[bucketName] = // The `.promise()` call might be on an JS SDK v2 client API.
// If yes, please remove .promise(). If not, remove this comment.
client
.createBucket({
Bucket: bucketName,
})
.promise()
await promises[bucketName]
delete promises[bucketName]
return { created: true, exists: false }
@ -180,12 +190,12 @@ export async function upload({
const fileBytes = path ? (await fsp.open(path)).createReadStream() : body
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && bucketCreated.created) {
let ttlConfig = bucketTTLConfig(bucketName, ttl)
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
await objectStore.putBucketLifecycleConfiguration(ttlConfig)
}
let contentType = type
@ -194,11 +204,12 @@ export async function upload({
? CONTENT_TYPE_MAP[extension.toLowerCase()]
: CONTENT_TYPE_MAP.txt
}
const config: any = {
const config: PutObjectCommandInput = {
// windows file paths need to be converted to forward slashes for s3
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename),
Body: fileBytes,
ContentType: contentType,
Body: fileBytes as stream.Readable | Buffer,
ContentType: contentType!,
}
if (metadata && typeof metadata === "object") {
// remove any nullish keys from the metadata object, as these may be considered invalid
@ -207,10 +218,15 @@ export async function upload({
delete metadata[key]
}
}
config.Metadata = metadata
config.Metadata = metadata as Record<string, string>
}
return objectStore.upload(config).promise()
const upload = new Upload({
client: objectStore,
params: config,
})
return upload.done()
}
/**
@ -229,12 +245,12 @@ export async function streamUpload({
throw new Error("Stream to upload is invalid/undefined")
}
const extension = filename.split(".").pop()
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && bucketCreated.created) {
let ttlConfig = bucketTTLConfig(bucketName, ttl)
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
await objectStore.putBucketLifecycleConfiguration(ttlConfig)
}
// Set content type for certain known extensions
@ -267,13 +283,15 @@ export async function streamUpload({
...extra,
}
const details = await objectStore.upload(params).promise()
const headDetails = await objectStore
.headObject({
Bucket: bucket,
Key: objKey,
})
.promise()
const upload = new Upload({
client: objectStore,
params,
})
const details = await upload.done()
const headDetails = await objectStore.headObject({
Bucket: bucket,
Key: objKey,
})
return {
...details,
ContentLength: headDetails.ContentLength,
@ -284,35 +302,42 @@ export async function streamUpload({
* retrieves the contents of a file from the object store, if it is a known content type it
* will be converted, otherwise it will be returned as a buffer stream.
*/
export async function retrieve(bucketName: string, filepath: string) {
const objectStore = ObjectStore(bucketName)
export async function retrieve(
bucketName: string,
filepath: string
): Promise<string | stream.Readable> {
const objectStore = ObjectStore()
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filepath),
}
const response: any = await objectStore.getObject(params).promise()
const response = await objectStore.getObject(params)
if (!response.Body) {
throw new Error("Unable to retrieve object")
}
// currently these are all strings
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
return response.Body.toString("utf8")
return response.Body.toString()
} else {
return response.Body
return response.Body as stream.Readable
}
}
export async function listAllObjects(bucketName: string, path: string) {
const objectStore = ObjectStore(bucketName)
export async function listAllObjects(
bucketName: string,
path: string
): Promise<S3Object[]> {
const objectStore = ObjectStore()
const list = (params: ListParams = {}) => {
return objectStore
.listObjectsV2({
...params,
Bucket: sanitizeBucket(bucketName),
Prefix: sanitizeKey(path),
})
.promise()
return objectStore.listObjectsV2({
...params,
Bucket: sanitizeBucket(bucketName),
Prefix: sanitizeKey(path),
})
}
let isTruncated = false,
token,
objects: AWS.S3.Types.Object[] = []
objects: Object[] = []
do {
let params: ListParams = {}
if (token) {
@ -331,18 +356,19 @@ export async function listAllObjects(bucketName: string, path: string) {
/**
* Generate a presigned url with a default TTL of 1 hour
*/
export function getPresignedUrl(
export async function getPresignedUrl(
bucketName: string,
key: string,
durationSeconds = 3600
) {
const objectStore = ObjectStore(bucketName, { presigning: true })
const objectStore = ObjectStore({ presigning: true })
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(key),
Expires: durationSeconds,
}
const url = objectStore.getSignedUrl("getObject", params)
const url = await getSignedUrl(objectStore, new GetObjectCommand(params), {
expiresIn: durationSeconds,
})
if (!env.MINIO_ENABLED) {
// return the full URL to the client
@ -366,7 +392,11 @@ export async function retrieveToTmp(bucketName: string, filepath: string) {
filepath = sanitizeKey(filepath)
const data = await retrieve(bucketName, filepath)
const outputPath = join(budibaseTempDir(), v4())
fs.writeFileSync(outputPath, data)
if (data instanceof stream.Readable) {
data.pipe(fs.createWriteStream(outputPath))
} else {
fs.writeFileSync(outputPath, data)
}
return outputPath
}
@ -408,17 +438,17 @@ export async function retrieveDirectory(bucketName: string, path: string) {
* Delete a single file.
*/
export async function deleteFile(bucketName: string, filepath: string) {
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
await createBucketIfNotExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
Key: sanitizeKey(filepath),
}
return objectStore.deleteObject(params).promise()
return objectStore.deleteObject(params)
}
export async function deleteFiles(bucketName: string, filepaths: string[]) {
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
await createBucketIfNotExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
@ -426,7 +456,7 @@ export async function deleteFiles(bucketName: string, filepaths: string[]) {
Objects: filepaths.map((path: any) => ({ Key: sanitizeKey(path) })),
},
}
return objectStore.deleteObjects(params).promise()
return objectStore.deleteObjects(params)
}
/**
@ -438,13 +468,16 @@ export async function deleteFolder(
): Promise<any> {
bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder)
const client = ObjectStore(bucketName)
const client = ObjectStore()
const listParams = {
Bucket: bucketName,
Prefix: folder,
}
const existingObjectsResponse = await client.listObjects(listParams).promise()
const existingObjectsResponse =
await // The `.promise()` call might be on an JS SDK v2 client API.
// If yes, please remove .promise(). If not, remove this comment.
client.listObjects(listParams)
if (existingObjectsResponse.Contents?.length === 0) {
return
}
@ -459,7 +492,7 @@ export async function deleteFolder(
deleteParams.Delete.Objects.push({ Key: content.Key })
})
const deleteResponse = await client.deleteObjects(deleteParams).promise()
const deleteResponse = await client.deleteObjects(deleteParams)
// can only empty 1000 items at once
if (deleteResponse.Deleted?.length === 1000) {
return deleteFolder(bucketName, folder)
@ -534,29 +567,33 @@ export async function getReadStream(
): Promise<Readable> {
bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path)
const client = ObjectStore(bucketName)
const client = ObjectStore()
const params = {
Bucket: bucketName,
Key: path,
}
return client.getObject(params).createReadStream()
const response = await client.getObject(params)
if (!response.Body || !(response.Body instanceof stream.Readable)) {
throw new Error("Unable to retrieve stream - invalid response")
}
return response.Body
}
export async function getObjectMetadata(
bucket: string,
path: string
): Promise<HeadObjectOutput> {
): Promise<HeadObjectCommandOutput> {
bucket = sanitizeBucket(bucket)
path = sanitizeKey(path)
const client = ObjectStore(bucket)
const client = ObjectStore()
const params = {
Bucket: bucket,
Key: path,
}
try {
return await client.headObject(params).promise()
return await client.headObject(params)
} catch (err: any) {
throw new Error("Unable to retrieve metadata from object")
}

View File

@ -2,7 +2,10 @@ import path, { join } from "path"
import { tmpdir } from "os"
import fs from "fs"
import env from "../environment"
import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3"
import {
LifecycleRule,
PutBucketLifecycleConfigurationCommandInput,
} from "@aws-sdk/client-s3"
import * as objectStore from "./objectStore"
import {
AutomationAttachment,
@ -43,8 +46,8 @@ export function budibaseTempDir() {
export const bucketTTLConfig = (
bucketName: string,
days: number
): PutBucketLifecycleConfigurationRequest => {
const lifecycleRule = {
): PutBucketLifecycleConfigurationCommandInput => {
const lifecycleRule: LifecycleRule = {
ID: `${bucketName}-ExpireAfter${days}days`,
Prefix: "",
Status: "Enabled",

View File

@ -3,6 +3,7 @@ import fs from "fs"
import { join } from "path"
import { TEMP_DIR, MINIO_DIR } from "./utils"
import { progressBar } from "../utils"
import * as stream from "node:stream"
const {
ObjectStoreBuckets,
@ -20,15 +21,21 @@ export async function exportObjects() {
let fullList: any[] = []
let errorCount = 0
for (let bucket of bucketList) {
const client = ObjectStore(bucket)
const client = ObjectStore()
try {
await client.headBucket().promise()
await client.headBucket({
Bucket: bucket,
})
} catch (err) {
errorCount++
continue
}
const list = (await client.listObjectsV2().promise()) as { Contents: any[] }
fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket })))
const list = await client.listObjectsV2({
Bucket: bucket,
})
fullList = fullList.concat(
list.Contents?.map(el => ({ ...el, bucket })) || []
)
}
if (errorCount === bucketList.length) {
throw new Error("Unable to access MinIO/S3 - check environment config.")
@ -43,7 +50,13 @@ export async function exportObjects() {
const dirs = possiblePath.slice(0, possiblePath.length - 1)
fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true })
}
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
if (data instanceof stream.Readable) {
data.pipe(
fs.createWriteStream(join(path, object.bucket, ...possiblePath))
)
} else {
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
}
bar.update(++count)
}
bar.stop()
@ -60,7 +73,7 @@ export async function importObjects() {
const bar = progressBar(total)
let count = 0
for (let bucket of buckets) {
const client = ObjectStore(bucket)
const client = ObjectStore()
await createBucketIfNotExists(client, bucket)
const files = await uploadDirectory(bucket, join(path, bucket), "/")
count += files.length

View File

@ -50,6 +50,10 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@aws-sdk/client-dynamodb": "3.709.0",
"@aws-sdk/client-s3": "3.709.0",
"@aws-sdk/lib-dynamodb": "3.709.0",
"@aws-sdk/s3-request-presigner": "3.709.0",
"@azure/msal-node": "^2.5.1",
"@budibase/backend-core": "*",
"@budibase/client": "*",
@ -70,7 +74,6 @@
"airtable": "0.12.2",
"arangojs": "7.2.0",
"archiver": "7.0.1",
"aws-sdk": "2.1692.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
"bson": "^6.9.0",

View File

@ -225,7 +225,7 @@ export async function fetchAppPackage(
const license = await licensing.cache.getCachedLicense()
// Enrich plugin URLs
application.usedPlugins = objectStore.enrichPluginURLs(
application.usedPlugins = await objectStore.enrichPluginURLs(
application.usedPlugins
)

View File

@ -18,7 +18,8 @@ import {
objectStore,
utils,
} from "@budibase/backend-core"
import AWS from "aws-sdk"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import { PutObjectCommand, S3 } from "@aws-sdk/client-s3"
import fs from "fs"
import sdk from "../../../sdk"
import * as pro from "@budibase/pro"
@ -128,9 +129,9 @@ export const uploadFile = async function (
return {
size: file.size,
name: file.name,
url: objectStore.getAppFileUrl(s3Key),
url: await objectStore.getAppFileUrl(s3Key),
extension,
key: response.Key,
key: response.Key!,
}
})
)
@ -210,11 +211,11 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
usedPlugins: plugins,
favicon:
branding.faviconUrl !== ""
? objectStore.getGlobalFileUrl("settings", "faviconUrl")
? await objectStore.getGlobalFileUrl("settings", "faviconUrl")
: "",
logo:
config?.logoUrl !== ""
? objectStore.getGlobalFileUrl("settings", "logoUrl")
? await objectStore.getGlobalFileUrl("settings", "logoUrl")
: "",
appMigrating: needMigrations,
nonce: ctx.state.nonce,
@ -243,7 +244,7 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
metaDescription: branding?.metaDescription || "",
favicon:
branding.faviconUrl !== ""
? objectStore.getGlobalFileUrl("settings", "faviconUrl")
? await objectStore.getGlobalFileUrl("settings", "faviconUrl")
: "",
})
@ -334,16 +335,17 @@ export const getSignedUploadURL = async function (
ctx.throw(400, "bucket and key values are required")
}
try {
const s3 = new AWS.S3({
const s3 = new S3({
region: awsRegion,
endpoint: datasource?.config?.endpoint || undefined,
accessKeyId: datasource?.config?.accessKeyId as string,
secretAccessKey: datasource?.config?.secretAccessKey as string,
apiVersion: "2006-03-01",
signatureVersion: "v4",
credentials: {
accessKeyId: datasource?.config?.accessKeyId as string,
secretAccessKey: datasource?.config?.secretAccessKey as string,
},
})
const params = { Bucket: bucket, Key: key }
signedUrl = s3.getSignedUrl("putObject", params)
signedUrl = await getSignedUrl(s3, new PutObjectCommand(params))
if (datasource?.config?.endpoint) {
publicUrl = `${datasource.config.endpoint}/${bucket}/${key}`
} else {

View File

@ -154,11 +154,12 @@ describe("test the create row action", () => {
expect(result.steps[1].outputs.row.file_attachment[0]).toHaveProperty("key")
let s3Key = result.steps[1].outputs.row.file_attachment[0].key
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
const client = objectStore.ObjectStore()
const objectData = await client
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
.promise()
const objectData = await client.headObject({
Bucket: objectStore.ObjectStoreBuckets.APPS,
Key: s3Key,
})
expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0)
@ -229,11 +230,12 @@ describe("test the create row action", () => {
)
let s3Key = result.steps[1].outputs.row.single_file_attachment.key
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
const client = objectStore.ObjectStore()
const objectData = await client
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
.promise()
const objectData = await client.headObject({
Bucket: objectStore.ObjectStoreBuckets.APPS,
Key: s3Key,
})
expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0)

View File

@ -7,9 +7,15 @@ import {
ConnectionInfo,
} from "@budibase/types"
import AWS from "aws-sdk"
import {
DynamoDBDocument,
PutCommandInput,
GetCommandInput,
UpdateCommandInput,
DeleteCommandInput,
} from "@aws-sdk/lib-dynamodb"
import { DynamoDB } from "@aws-sdk/client-dynamodb"
import { AWS_REGION } from "../constants"
import { DocumentClient } from "aws-sdk/clients/dynamodb"
interface DynamoDBConfig {
region: string
@ -151,7 +157,7 @@ class DynamoDBIntegration implements IntegrationBase {
region: config.region || AWS_REGION,
endpoint: config.endpoint || undefined,
}
this.client = new AWS.DynamoDB.DocumentClient(this.config)
this.client = DynamoDBDocument.from(new DynamoDB(this.config))
}
async testConnection() {
@ -159,8 +165,8 @@ class DynamoDBIntegration implements IntegrationBase {
connected: false,
}
try {
const scanRes = await new AWS.DynamoDB(this.config).listTables().promise()
response.connected = !!scanRes.$response
const scanRes = await new DynamoDB(this.config).listTables()
response.connected = !!scanRes.$metadata
} catch (e: any) {
response.error = e.message as string
}
@ -169,13 +175,13 @@ class DynamoDBIntegration implements IntegrationBase {
async create(query: {
table: string
json: Omit<DocumentClient.PutItemInput, "TableName">
json: Omit<PutCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.put(params).promise()
return this.client.put(params)
}
async read(query: { table: string; json: object; index: null | string }) {
@ -184,7 +190,7 @@ class DynamoDBIntegration implements IntegrationBase {
IndexName: query.index ? query.index : undefined,
...query.json,
}
const response = await this.client.query(params).promise()
const response = await this.client.query(params)
if (response.Items) {
return response.Items
}
@ -197,7 +203,7 @@ class DynamoDBIntegration implements IntegrationBase {
IndexName: query.index ? query.index : undefined,
...query.json,
}
const response = await this.client.scan(params).promise()
const response = await this.client.scan(params)
if (response.Items) {
return response.Items
}
@ -208,40 +214,40 @@ class DynamoDBIntegration implements IntegrationBase {
const params = {
TableName: query.table,
}
return new AWS.DynamoDB(this.config).describeTable(params).promise()
return new DynamoDB(this.config).describeTable(params)
}
async get(query: {
table: string
json: Omit<DocumentClient.GetItemInput, "TableName">
json: Omit<GetCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.get(params).promise()
return this.client.get(params)
}
async update(query: {
table: string
json: Omit<DocumentClient.UpdateItemInput, "TableName">
json: Omit<UpdateCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.update(params).promise()
return this.client.update(params)
}
async delete(query: {
table: string
json: Omit<DocumentClient.DeleteItemInput, "TableName">
json: Omit<DeleteCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.delete(params).promise()
return this.client.delete(params)
}
}

View File

@ -7,8 +7,9 @@ import {
ConnectionInfo,
} from "@budibase/types"
import AWS from "aws-sdk"
import { S3 } from "@aws-sdk/client-s3"
import csv from "csvtojson"
import stream from "stream"
interface S3Config {
region: string
@ -167,7 +168,7 @@ class S3Integration implements IntegrationBase {
delete this.config.endpoint
}
this.client = new AWS.S3(this.config)
this.client = new S3(this.config)
}
async testConnection() {
@ -175,7 +176,7 @@ class S3Integration implements IntegrationBase {
connected: false,
}
try {
await this.client.listBuckets().promise()
await this.client.listBuckets()
response.connected = true
} catch (e: any) {
response.error = e.message as string
@ -209,7 +210,7 @@ class S3Integration implements IntegrationBase {
LocationConstraint: query.location,
}
}
return await this.client.createBucket(params).promise()
return await this.client.createBucket(params)
}
async read(query: {
@ -220,37 +221,39 @@ class S3Integration implements IntegrationBase {
maxKeys: number
prefix: string
}) {
const response = await this.client
.listObjects({
Bucket: query.bucket,
Delimiter: query.delimiter,
Marker: query.marker,
MaxKeys: query.maxKeys,
Prefix: query.prefix,
})
.promise()
const response = await this.client.listObjects({
Bucket: query.bucket,
Delimiter: query.delimiter,
Marker: query.marker,
MaxKeys: query.maxKeys,
Prefix: query.prefix,
})
return response.Contents
}
async readCsv(query: { bucket: string; key: string }) {
const stream = this.client
.getObject({
Bucket: query.bucket,
Key: query.key,
})
.createReadStream()
const response = await this.client.getObject({
Bucket: query.bucket,
Key: query.key,
})
const fileStream = response.Body?.transformToWebStream()
if (!fileStream || !(fileStream instanceof stream.Readable)) {
throw new Error("Unable to retrieve CSV - invalid stream")
}
let csvError = false
return new Promise((resolve, reject) => {
stream.on("error", (err: Error) => {
fileStream.on("error", (err: Error) => {
reject(err)
})
const response = csv()
.fromStream(stream)
.fromStream(fileStream)
.on("error", () => {
csvError = true
})
stream.on("finish", () => {
fileStream.on("finish", () => {
resolve(response)
})
}).catch(err => {
@ -263,12 +266,10 @@ class S3Integration implements IntegrationBase {
}
async delete(query: { bucket: string; delete: string }) {
return await this.client
.deleteObjects({
Bucket: query.bucket,
Delete: JSON.parse(query.delete),
})
.promise()
return await this.client.deleteObjects({
Bucket: query.bucket,
Delete: JSON.parse(query.delete),
})
}
}

View File

@ -430,7 +430,7 @@ export async function handleFileResponse(
size = details.ContentLength
}
}
presignedUrl = objectStore.getPresignedUrl(bucket, key)
presignedUrl = await objectStore.getPresignedUrl(bucket, key)
return {
data: {
size,

View File

@ -359,9 +359,9 @@ export async function coreOutputProcessing(
if (row[property] == null) {
continue
}
const process = (attachment: RowAttachment) => {
const process = async (attachment: RowAttachment) => {
if (!attachment.url && attachment.key) {
attachment.url = objectStore.getAppFileUrl(attachment.key)
attachment.url = await objectStore.getAppFileUrl(attachment.key)
}
return attachment
}
@ -369,11 +369,13 @@ export async function coreOutputProcessing(
row[property] = JSON.parse(row[property])
}
if (Array.isArray(row[property])) {
row[property].forEach((attachment: RowAttachment) => {
process(attachment)
})
await Promise.all(
row[property].map((attachment: RowAttachment) =>
process(attachment)
)
)
} else {
process(row[property])
await process(row[property])
}
}
} else if (

View File

@ -322,27 +322,27 @@ export async function save(
}
}
function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) {
async function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) {
if (!oidcLogos) {
return
}
oidcLogos.config = Object.keys(oidcLogos.config || {}).reduce(
(acc: any, key: string) => {
if (!key.endsWith("Etag")) {
const etag = oidcLogos.config[`${key}Etag`]
const objectStoreUrl = objectStore.getGlobalFileUrl(
oidcLogos.type,
key,
etag
)
acc[key] = objectStoreUrl
} else {
acc[key] = oidcLogos.config[key]
}
return acc
},
{}
)
const newConfig: Record<string, string> = {}
const keys = Object.keys(oidcLogos.config || {})
for (const key of keys) {
if (!key.endsWith("Etag")) {
const etag = oidcLogos.config[`${key}Etag`]
const objectStoreUrl = await objectStore.getGlobalFileUrl(
oidcLogos.type,
key,
etag
)
newConfig[key] = objectStoreUrl
} else {
newConfig[key] = oidcLogos.config[key]
}
}
oidcLogos.config = newConfig
}
export async function find(ctx: UserCtx<void, FindConfigResponse>) {
@ -370,7 +370,7 @@ export async function find(ctx: UserCtx<void, FindConfigResponse>) {
async function handleConfigType(type: ConfigType, config: Config) {
if (type === ConfigType.OIDC_LOGOS) {
enrichOIDCLogos(config)
await enrichOIDCLogos(config)
} else if (type === ConfigType.AI) {
await handleAIConfig(config)
}
@ -396,7 +396,7 @@ export async function publicOidc(ctx: Ctx<void, GetPublicOIDCConfigResponse>) {
const oidcCustomLogos = await configs.getOIDCLogosDoc()
if (oidcCustomLogos) {
enrichOIDCLogos(oidcCustomLogos)
await enrichOIDCLogos(oidcCustomLogos)
}
if (!oidcConfig) {
@ -427,7 +427,7 @@ export async function publicSettings(
// enrich the logo url - empty url means deleted
if (config.logoUrl && config.logoUrl !== "") {
config.logoUrl = objectStore.getGlobalFileUrl(
config.logoUrl = await objectStore.getGlobalFileUrl(
"settings",
"logoUrl",
config.logoUrlEtag
@ -437,7 +437,7 @@ export async function publicSettings(
// enrich the favicon url - empty url means deleted
const faviconUrl =
branding.faviconUrl && branding.faviconUrl !== ""
? objectStore.getGlobalFileUrl(
? await objectStore.getGlobalFileUrl(
"settings",
"faviconUrl",
branding.faviconUrlEtag
@ -522,7 +522,7 @@ export async function upload(ctx: UserCtx<void, UploadConfigFileResponse>) {
ctx.body = {
message: "File has been uploaded and url stored to config.",
url: objectStore.getGlobalFileUrl(type, name, etag),
url: await objectStore.getGlobalFileUrl(type, name, etag),
}
}

1143
yarn.lock

File diff suppressed because it is too large Load Diff