Merge pull request #15215 from Budibase/chore/aws-v2-to-v3

Update AWS SDK to V3
This commit is contained in:
Michael Drury 2025-01-07 18:01:03 +00:00 committed by GitHub
commit 3a67f7a34d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
31 changed files with 1537 additions and 364 deletions

View File

@ -0,0 +1,28 @@
export class S3 {
headBucket() {
return jest.fn().mockReturnThis()
}
deleteObject() {
return jest.fn().mockReturnThis()
}
deleteObjects() {
return jest.fn().mockReturnThis()
}
createBucket() {
return jest.fn().mockReturnThis()
}
getObject() {
return jest.fn().mockReturnThis()
}
listObject() {
return jest.fn().mockReturnThis()
}
promise() {
return jest.fn().mockReturnThis()
}
catch() {
return jest.fn()
}
}
export const GetObjectCommand = jest.fn(inputs => ({ inputs }))

View File

@ -0,0 +1,4 @@
export const getSignedUrl = jest.fn((_, cmd) => {
const { inputs } = cmd
return `http://s3.example.com/${inputs?.Bucket}/${inputs?.Key}`
})

View File

@ -1,19 +0,0 @@
const mockS3 = {
headBucket: jest.fn().mockReturnThis(),
deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(),
getObject: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}`
}),
promise: jest.fn().mockReturnThis(),
catch: jest.fn(),
}
const AWS = {
S3: jest.fn(() => mockS3),
}
export default AWS

View File

@ -30,6 +30,9 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@aws-sdk/client-s3": "3.709.0",
"@aws-sdk/lib-storage": "3.709.0",
"@aws-sdk/s3-request-presigner": "3.709.0",
"@budibase/nano": "10.1.5",
"@budibase/pouchdb-replication-stream": "1.2.11",
"@budibase/shared-core": "*",
@ -71,11 +74,13 @@
"devDependencies": {
"@jest/types": "^29.6.3",
"@shopify/jest-koa-mocks": "5.1.1",
"@smithy/types": "4.0.0",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
"@types/chance": "1.1.3",
"@types/cookies": "0.7.8",
"@types/jest": "29.5.5",
"@types/koa": "2.13.4",
"@types/lodash": "4.14.200",
"@types/node-fetch": "2.6.4",
"@types/pouchdb": "6.4.2",
@ -83,7 +88,6 @@
"@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4",
"@types/koa": "2.13.4",
"chance": "1.1.8",
"ioredis-mock": "8.9.0",
"jest": "29.7.0",

View File

@ -154,7 +154,7 @@ const environment = {
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN,
AWS_REGION: process.env.AWS_REGION,
AWS_REGION: process.env.AWS_REGION || "eu-west-1",
MINIO_URL: process.env.MINIO_URL,
MINIO_ENABLED: process.env.MINIO_ENABLED || 1,
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,

View File

@ -13,7 +13,7 @@ export function clientLibraryPath(appId: string) {
* due to issues with the domain we were unable to continue doing this - keeping
* incase we are able to switch back to CDN path again in future.
*/
export function clientLibraryCDNUrl(appId: string, version: string) {
export async function clientLibraryCDNUrl(appId: string, version: string) {
let file = clientLibraryPath(appId)
if (env.CLOUDFRONT_CDN) {
// append app version to bust the cache
@ -24,7 +24,7 @@ export function clientLibraryCDNUrl(appId: string, version: string) {
// file is public
return cloudfront.getUrl(file)
} else {
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
}
}
@ -44,10 +44,10 @@ export function clientLibraryUrl(appId: string, version: string) {
return `/api/assets/client?${qs.encode(qsParams)}`
}
export function getAppFileUrl(s3Key: string) {
export async function getAppFileUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key)
} else {
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
}
}

View File

@ -5,7 +5,11 @@ import * as cloudfront from "../cloudfront"
// URLs
export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
export const getGlobalFileUrl = async (
type: string,
name: string,
etag?: string
) => {
let file = getGlobalFileS3Key(type, name)
if (env.CLOUDFRONT_CDN) {
if (etag) {
@ -13,7 +17,7 @@ export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
}
return cloudfront.getPresignedUrl(file)
} else {
return objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
return await objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
}
}

View File

@ -6,23 +6,25 @@ import { Plugin } from "@budibase/types"
// URLS
export function enrichPluginURLs(plugins?: Plugin[]): Plugin[] {
export async function enrichPluginURLs(plugins?: Plugin[]): Promise<Plugin[]> {
if (!plugins || !plugins.length) {
return []
}
return plugins.map(plugin => {
const jsUrl = getPluginJSUrl(plugin)
const iconUrl = getPluginIconUrl(plugin)
return { ...plugin, jsUrl, iconUrl }
})
return await Promise.all(
plugins.map(async plugin => {
const jsUrl = await getPluginJSUrl(plugin)
const iconUrl = await getPluginIconUrl(plugin)
return { ...plugin, jsUrl, iconUrl }
})
)
}
function getPluginJSUrl(plugin: Plugin) {
async function getPluginJSUrl(plugin: Plugin) {
const s3Key = getPluginJSKey(plugin)
return getPluginUrl(s3Key)
}
function getPluginIconUrl(plugin: Plugin): string | undefined {
async function getPluginIconUrl(plugin: Plugin) {
const s3Key = getPluginIconKey(plugin)
if (!s3Key) {
return
@ -30,11 +32,11 @@ function getPluginIconUrl(plugin: Plugin): string | undefined {
return getPluginUrl(s3Key)
}
function getPluginUrl(s3Key: string) {
async function getPluginUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key)
} else {
return objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
return await objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
}
}

View File

@ -93,25 +93,25 @@ describe("app", () => {
testEnv.multiTenant()
})
it("gets url with embedded minio", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
const url = getAppFileUrl()
const url = await getAppFileUrl()
expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
})
it("gets url with custom S3", () => {
it("gets url with custom S3", async () => {
testEnv.withS3()
const url = getAppFileUrl()
const url = await getAppFileUrl()
expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
})
it("gets url with cloudfront + s3", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
const url = getAppFileUrl()
const url = await getAppFileUrl()
// omit rest of signed params
expect(
url.includes("http://cf.example.com/app_123/attachments/image.jpeg?")
@ -126,8 +126,8 @@ describe("app", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
await testEnv.withTenant(() => {
const url = getAppFileUrl()
await testEnv.withTenant(async () => {
const url = await getAppFileUrl()
expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
@ -136,8 +136,8 @@ describe("app", () => {
it("gets url with custom S3", async () => {
testEnv.withS3()
await testEnv.withTenant(() => {
const url = getAppFileUrl()
await testEnv.withTenant(async () => {
const url = await getAppFileUrl()
expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
@ -146,8 +146,8 @@ describe("app", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
await testEnv.withTenant(() => {
const url = getAppFileUrl()
await testEnv.withTenant(async () => {
const url = await getAppFileUrl()
// omit rest of signed params
expect(
url.includes(

View File

@ -3,7 +3,7 @@ import { testEnv } from "../../../../tests/extra"
describe("global", () => {
describe("getGlobalFileUrl", () => {
function getGlobalFileUrl() {
async function getGlobalFileUrl() {
return global.getGlobalFileUrl("settings", "logoUrl", "etag")
}
@ -12,21 +12,21 @@ describe("global", () => {
testEnv.singleTenant()
})
it("gets url with embedded minio", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
const url = getGlobalFileUrl()
const url = await getGlobalFileUrl()
expect(url).toBe("/files/signed/global/settings/logoUrl")
})
it("gets url with custom S3", () => {
it("gets url with custom S3", async () => {
testEnv.withS3()
const url = getGlobalFileUrl()
const url = await getGlobalFileUrl()
expect(url).toBe("http://s3.example.com/global/settings/logoUrl")
})
it("gets url with cloudfront + s3", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
const url = getGlobalFileUrl()
const url = await getGlobalFileUrl()
// omit rest of signed params
expect(
url.includes("http://cf.example.com/settings/logoUrl?etag=etag&")
@ -41,16 +41,16 @@ describe("global", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
await testEnv.withTenant(tenantId => {
const url = getGlobalFileUrl()
await testEnv.withTenant(async tenantId => {
const url = await getGlobalFileUrl()
expect(url).toBe(`/files/signed/global/${tenantId}/settings/logoUrl`)
})
})
it("gets url with custom S3", async () => {
testEnv.withS3()
await testEnv.withTenant(tenantId => {
const url = getGlobalFileUrl()
await testEnv.withTenant(async tenantId => {
const url = await getGlobalFileUrl()
expect(url).toBe(
`http://s3.example.com/global/${tenantId}/settings/logoUrl`
)
@ -59,8 +59,8 @@ describe("global", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
await testEnv.withTenant(tenantId => {
const url = getGlobalFileUrl()
await testEnv.withTenant(async tenantId => {
const url = await getGlobalFileUrl()
// omit rest of signed params
expect(
url.includes(

View File

@ -6,8 +6,8 @@ describe("plugins", () => {
describe("enrichPluginURLs", () => {
const plugin = structures.plugins.plugin()
function getEnrichedPluginUrls() {
const enriched = plugins.enrichPluginURLs([plugin])[0]
async function getEnrichedPluginUrls() {
const enriched = (await plugins.enrichPluginURLs([plugin]))[0]
return {
jsUrl: enriched.jsUrl!,
iconUrl: enriched.iconUrl!,
@ -19,9 +19,9 @@ describe("plugins", () => {
testEnv.singleTenant()
})
it("gets url with embedded minio", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
const urls = getEnrichedPluginUrls()
const urls = await getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`/files/signed/plugins/${plugin.name}/plugin.min.js`
)
@ -30,9 +30,9 @@ describe("plugins", () => {
)
})
it("gets url with custom S3", () => {
it("gets url with custom S3", async () => {
testEnv.withS3()
const urls = getEnrichedPluginUrls()
const urls = await getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`http://s3.example.com/plugins/${plugin.name}/plugin.min.js`
)
@ -41,9 +41,9 @@ describe("plugins", () => {
)
})
it("gets url with cloudfront + s3", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
const urls = getEnrichedPluginUrls()
const urls = await getEnrichedPluginUrls()
// omit rest of signed params
expect(
urls.jsUrl.includes(
@ -65,8 +65,8 @@ describe("plugins", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
await testEnv.withTenant(tenantId => {
const urls = getEnrichedPluginUrls()
await testEnv.withTenant(async tenantId => {
const urls = await getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`/files/signed/plugins/${tenantId}/${plugin.name}/plugin.min.js`
)
@ -78,8 +78,8 @@ describe("plugins", () => {
it("gets url with custom S3", async () => {
testEnv.withS3()
await testEnv.withTenant(tenantId => {
const urls = getEnrichedPluginUrls()
await testEnv.withTenant(async tenantId => {
const urls = await getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`http://s3.example.com/plugins/${tenantId}/${plugin.name}/plugin.min.js`
)
@ -91,8 +91,8 @@ describe("plugins", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
await testEnv.withTenant(tenantId => {
const urls = getEnrichedPluginUrls()
await testEnv.withTenant(async tenantId => {
const urls = await getEnrichedPluginUrls()
// omit rest of signed params
expect(
urls.jsUrl.includes(

View File

@ -1,6 +1,15 @@
const sanitize = require("sanitize-s3-objectkey")
import AWS from "aws-sdk"
import {
HeadObjectCommandOutput,
PutObjectCommandInput,
S3,
S3ClientConfig,
GetObjectCommand,
_Object as S3Object,
} from "@aws-sdk/client-s3"
import { Upload } from "@aws-sdk/lib-storage"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import stream, { Readable } from "stream"
import fetch from "node-fetch"
import tar from "tar-fs"
@ -13,8 +22,8 @@ import { bucketTTLConfig, budibaseTempDir } from "./utils"
import { v4 } from "uuid"
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
import fsp from "fs/promises"
import { HeadObjectOutput } from "aws-sdk/clients/s3"
import { ReadableStream } from "stream/web"
import { NodeJsRuntimeStreamingBlobPayloadOutputTypes } from "@smithy/types"
const streamPipeline = promisify(stream.pipeline)
// use this as a temporary store of buckets that are being created
@ -84,26 +93,24 @@ export function sanitizeBucket(input: string) {
* @constructor
*/
export function ObjectStore(
bucket: string,
opts: { presigning: boolean } = { presigning: false }
) {
const config: AWS.S3.ClientConfiguration = {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
const config: S3ClientConfig = {
forcePathStyle: true,
credentials: {
accessKeyId: env.MINIO_ACCESS_KEY!,
secretAccessKey: env.MINIO_SECRET_KEY!,
},
region: env.AWS_REGION,
}
if (bucket) {
config.params = {
Bucket: sanitizeBucket(bucket),
}
}
// for AWS Credentials using temporary session token
if (!env.MINIO_ENABLED && env.AWS_SESSION_TOKEN) {
config.sessionToken = env.AWS_SESSION_TOKEN
config.credentials = {
accessKeyId: env.MINIO_ACCESS_KEY!,
secretAccessKey: env.MINIO_SECRET_KEY!,
sessionToken: env.AWS_SESSION_TOKEN,
}
}
// custom S3 is in use i.e. minio
@ -113,13 +120,13 @@ export function ObjectStore(
// Normally a signed url will need to be generated with a specified host in mind.
// To support dynamic hosts, e.g. some unknown self-hosted installation url,
// use a predefined host. The host 'minio-service' is also forwarded to minio requests via nginx
config.endpoint = "minio-service"
config.endpoint = "http://minio-service"
} else {
config.endpoint = env.MINIO_URL
}
}
return new AWS.S3(config)
return new S3(config)
}
/**
@ -132,26 +139,25 @@ export async function createBucketIfNotExists(
): Promise<{ created: boolean; exists: boolean }> {
bucketName = sanitizeBucket(bucketName)
try {
await client
.headBucket({
Bucket: bucketName,
})
.promise()
await client.headBucket({
Bucket: bucketName,
})
return { created: false, exists: true }
} catch (err: any) {
const promises: any = STATE.bucketCreationPromises
const doesntExist = err.statusCode === 404,
noAccess = err.statusCode === 403
const statusCode = err.statusCode || err.$response?.statusCode
const promises: Record<string, Promise<any> | undefined> =
STATE.bucketCreationPromises
const doesntExist = statusCode === 404,
noAccess = statusCode === 403
if (promises[bucketName]) {
await promises[bucketName]
return { created: false, exists: true }
} else if (doesntExist || noAccess) {
if (doesntExist) {
promises[bucketName] = client
.createBucket({
Bucket: bucketName,
})
.promise()
promises[bucketName] = client.createBucket({
Bucket: bucketName,
})
await promises[bucketName]
delete promises[bucketName]
return { created: true, exists: false }
@ -180,25 +186,26 @@ export async function upload({
const fileBytes = path ? (await fsp.open(path)).createReadStream() : body
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && bucketCreated.created) {
let ttlConfig = bucketTTLConfig(bucketName, ttl)
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
await objectStore.putBucketLifecycleConfiguration(ttlConfig)
}
let contentType = type
if (!contentType) {
contentType = extension
? CONTENT_TYPE_MAP[extension.toLowerCase()]
: CONTENT_TYPE_MAP.txt
}
const config: any = {
const finalContentType = contentType
? contentType
: extension
? CONTENT_TYPE_MAP[extension.toLowerCase()]
: CONTENT_TYPE_MAP.txt
const config: PutObjectCommandInput = {
// windows file paths need to be converted to forward slashes for s3
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename),
Body: fileBytes,
ContentType: contentType,
Body: fileBytes as stream.Readable | Buffer,
ContentType: finalContentType,
}
if (metadata && typeof metadata === "object") {
// remove any nullish keys from the metadata object, as these may be considered invalid
@ -207,10 +214,15 @@ export async function upload({
delete metadata[key]
}
}
config.Metadata = metadata
config.Metadata = metadata as Record<string, string>
}
return objectStore.upload(config).promise()
const upload = new Upload({
client: objectStore,
params: config,
})
return upload.done()
}
/**
@ -229,12 +241,12 @@ export async function streamUpload({
throw new Error("Stream to upload is invalid/undefined")
}
const extension = filename.split(".").pop()
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && bucketCreated.created) {
let ttlConfig = bucketTTLConfig(bucketName, ttl)
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
await objectStore.putBucketLifecycleConfiguration(ttlConfig)
}
// Set content type for certain known extensions
@ -267,13 +279,15 @@ export async function streamUpload({
...extra,
}
const details = await objectStore.upload(params).promise()
const headDetails = await objectStore
.headObject({
Bucket: bucket,
Key: objKey,
})
.promise()
const upload = new Upload({
client: objectStore,
params,
})
const details = await upload.done()
const headDetails = await objectStore.headObject({
Bucket: bucket,
Key: objKey,
})
return {
...details,
ContentLength: headDetails.ContentLength,
@ -284,35 +298,44 @@ export async function streamUpload({
* retrieves the contents of a file from the object store, if it is a known content type it
* will be converted, otherwise it will be returned as a buffer stream.
*/
export async function retrieve(bucketName: string, filepath: string) {
const objectStore = ObjectStore(bucketName)
export async function retrieve(
bucketName: string,
filepath: string
): Promise<string | stream.Readable> {
const objectStore = ObjectStore()
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filepath),
}
const response: any = await objectStore.getObject(params).promise()
const response = await objectStore.getObject(params)
if (!response.Body) {
throw new Error("Unable to retrieve object")
}
const nodeResponse =
response.Body as NodeJsRuntimeStreamingBlobPayloadOutputTypes
// currently these are all strings
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
return response.Body.toString("utf8")
return nodeResponse.toString()
} else {
return response.Body
return nodeResponse
}
}
export async function listAllObjects(bucketName: string, path: string) {
const objectStore = ObjectStore(bucketName)
export async function listAllObjects(
bucketName: string,
path: string
): Promise<S3Object[]> {
const objectStore = ObjectStore()
const list = (params: ListParams = {}) => {
return objectStore
.listObjectsV2({
...params,
Bucket: sanitizeBucket(bucketName),
Prefix: sanitizeKey(path),
})
.promise()
return objectStore.listObjectsV2({
...params,
Bucket: sanitizeBucket(bucketName),
Prefix: sanitizeKey(path),
})
}
let isTruncated = false,
token,
objects: AWS.S3.Types.Object[] = []
objects: Object[] = []
do {
let params: ListParams = {}
if (token) {
@ -331,18 +354,19 @@ export async function listAllObjects(bucketName: string, path: string) {
/**
* Generate a presigned url with a default TTL of 1 hour
*/
export function getPresignedUrl(
export async function getPresignedUrl(
bucketName: string,
key: string,
durationSeconds = 3600
) {
const objectStore = ObjectStore(bucketName, { presigning: true })
const objectStore = ObjectStore({ presigning: true })
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(key),
Expires: durationSeconds,
}
const url = objectStore.getSignedUrl("getObject", params)
const url = await getSignedUrl(objectStore, new GetObjectCommand(params), {
expiresIn: durationSeconds,
})
if (!env.MINIO_ENABLED) {
// return the full URL to the client
@ -366,7 +390,11 @@ export async function retrieveToTmp(bucketName: string, filepath: string) {
filepath = sanitizeKey(filepath)
const data = await retrieve(bucketName, filepath)
const outputPath = join(budibaseTempDir(), v4())
fs.writeFileSync(outputPath, data)
if (data instanceof stream.Readable) {
data.pipe(fs.createWriteStream(outputPath))
} else {
fs.writeFileSync(outputPath, data)
}
return outputPath
}
@ -408,17 +436,17 @@ export async function retrieveDirectory(bucketName: string, path: string) {
* Delete a single file.
*/
export async function deleteFile(bucketName: string, filepath: string) {
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
await createBucketIfNotExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
Key: sanitizeKey(filepath),
}
return objectStore.deleteObject(params).promise()
return objectStore.deleteObject(params)
}
export async function deleteFiles(bucketName: string, filepaths: string[]) {
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
await createBucketIfNotExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
@ -426,7 +454,7 @@ export async function deleteFiles(bucketName: string, filepaths: string[]) {
Objects: filepaths.map((path: any) => ({ Key: sanitizeKey(path) })),
},
}
return objectStore.deleteObjects(params).promise()
return objectStore.deleteObjects(params)
}
/**
@ -438,13 +466,13 @@ export async function deleteFolder(
): Promise<any> {
bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder)
const client = ObjectStore(bucketName)
const client = ObjectStore()
const listParams = {
Bucket: bucketName,
Prefix: folder,
}
const existingObjectsResponse = await client.listObjects(listParams).promise()
const existingObjectsResponse = await client.listObjects(listParams)
if (existingObjectsResponse.Contents?.length === 0) {
return
}
@ -459,7 +487,7 @@ export async function deleteFolder(
deleteParams.Delete.Objects.push({ Key: content.Key })
})
const deleteResponse = await client.deleteObjects(deleteParams).promise()
const deleteResponse = await client.deleteObjects(deleteParams)
// can only empty 1000 items at once
if (deleteResponse.Deleted?.length === 1000) {
return deleteFolder(bucketName, folder)
@ -534,29 +562,33 @@ export async function getReadStream(
): Promise<Readable> {
bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path)
const client = ObjectStore(bucketName)
const client = ObjectStore()
const params = {
Bucket: bucketName,
Key: path,
}
return client.getObject(params).createReadStream()
const response = await client.getObject(params)
if (!response.Body || !(response.Body instanceof stream.Readable)) {
throw new Error("Unable to retrieve stream - invalid response")
}
return response.Body
}
export async function getObjectMetadata(
bucket: string,
path: string
): Promise<HeadObjectOutput> {
): Promise<HeadObjectCommandOutput> {
bucket = sanitizeBucket(bucket)
path = sanitizeKey(path)
const client = ObjectStore(bucket)
const client = ObjectStore()
const params = {
Bucket: bucket,
Key: path,
}
try {
return await client.headObject(params).promise()
return await client.headObject(params)
} catch (err: any) {
throw new Error("Unable to retrieve metadata from object")
}

View File

@ -2,7 +2,10 @@ import path, { join } from "path"
import { tmpdir } from "os"
import fs from "fs"
import env from "../environment"
import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3"
import {
LifecycleRule,
PutBucketLifecycleConfigurationCommandInput,
} from "@aws-sdk/client-s3"
import * as objectStore from "./objectStore"
import {
AutomationAttachment,
@ -43,8 +46,8 @@ export function budibaseTempDir() {
export const bucketTTLConfig = (
bucketName: string,
days: number
): PutBucketLifecycleConfigurationRequest => {
const lifecycleRule = {
): PutBucketLifecycleConfigurationCommandInput => {
const lifecycleRule: LifecycleRule = {
ID: `${bucketName}-ExpireAfter${days}days`,
Prefix: "",
Status: "Enabled",

View File

@ -3,6 +3,7 @@ import fs from "fs"
import { join } from "path"
import { TEMP_DIR, MINIO_DIR } from "./utils"
import { progressBar } from "../utils"
import * as stream from "node:stream"
const {
ObjectStoreBuckets,
@ -20,15 +21,21 @@ export async function exportObjects() {
let fullList: any[] = []
let errorCount = 0
for (let bucket of bucketList) {
const client = ObjectStore(bucket)
const client = ObjectStore()
try {
await client.headBucket().promise()
await client.headBucket({
Bucket: bucket,
})
} catch (err) {
errorCount++
continue
}
const list = (await client.listObjectsV2().promise()) as { Contents: any[] }
fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket })))
const list = await client.listObjectsV2({
Bucket: bucket,
})
fullList = fullList.concat(
list.Contents?.map(el => ({ ...el, bucket })) || []
)
}
if (errorCount === bucketList.length) {
throw new Error("Unable to access MinIO/S3 - check environment config.")
@ -43,7 +50,13 @@ export async function exportObjects() {
const dirs = possiblePath.slice(0, possiblePath.length - 1)
fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true })
}
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
if (data instanceof stream.Readable) {
data.pipe(
fs.createWriteStream(join(path, object.bucket, ...possiblePath))
)
} else {
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
}
bar.update(++count)
}
bar.stop()
@ -60,7 +73,7 @@ export async function importObjects() {
const bar = progressBar(total)
let count = 0
for (let bucket of buckets) {
const client = ObjectStore(bucket)
const client = ObjectStore()
await createBucketIfNotExists(client, bucket)
const files = await uploadDirectory(bucket, join(path, bucket), "/")
count += files.length

View File

@ -50,6 +50,10 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@aws-sdk/client-dynamodb": "3.709.0",
"@aws-sdk/client-s3": "3.709.0",
"@aws-sdk/lib-dynamodb": "3.709.0",
"@aws-sdk/s3-request-presigner": "3.709.0",
"@azure/msal-node": "^2.5.1",
"@budibase/backend-core": "*",
"@budibase/client": "*",
@ -70,7 +74,6 @@
"airtable": "0.12.2",
"arangojs": "7.2.0",
"archiver": "7.0.1",
"aws-sdk": "2.1692.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
"bson": "^6.9.0",

View File

@ -230,7 +230,7 @@ export async function fetchAppPackage(
const license = await licensing.cache.getCachedLicense()
// Enrich plugin URLs
application.usedPlugins = objectStore.enrichPluginURLs(
application.usedPlugins = await objectStore.enrichPluginURLs(
application.usedPlugins
)

View File

@ -18,7 +18,8 @@ import {
objectStore,
utils,
} from "@budibase/backend-core"
import AWS from "aws-sdk"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import { PutObjectCommand, S3 } from "@aws-sdk/client-s3"
import fs from "fs"
import sdk from "../../../sdk"
import * as pro from "@budibase/pro"
@ -128,9 +129,9 @@ export const uploadFile = async function (
return {
size: file.size,
name: file.name,
url: objectStore.getAppFileUrl(s3Key),
url: await objectStore.getAppFileUrl(s3Key),
extension,
key: response.Key,
key: response.Key!,
}
})
)
@ -210,11 +211,11 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
usedPlugins: plugins,
favicon:
branding.faviconUrl !== ""
? objectStore.getGlobalFileUrl("settings", "faviconUrl")
? await objectStore.getGlobalFileUrl("settings", "faviconUrl")
: "",
logo:
config?.logoUrl !== ""
? objectStore.getGlobalFileUrl("settings", "logoUrl")
? await objectStore.getGlobalFileUrl("settings", "logoUrl")
: "",
appMigrating: needMigrations,
nonce: ctx.state.nonce,
@ -243,7 +244,7 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
metaDescription: branding?.metaDescription || "",
favicon:
branding.faviconUrl !== ""
? objectStore.getGlobalFileUrl("settings", "faviconUrl")
? await objectStore.getGlobalFileUrl("settings", "faviconUrl")
: "",
})
@ -334,16 +335,17 @@ export const getSignedUploadURL = async function (
ctx.throw(400, "bucket and key values are required")
}
try {
const s3 = new AWS.S3({
const s3 = new S3({
region: awsRegion,
endpoint: datasource?.config?.endpoint || undefined,
accessKeyId: datasource?.config?.accessKeyId as string,
secretAccessKey: datasource?.config?.secretAccessKey as string,
apiVersion: "2006-03-01",
signatureVersion: "v4",
credentials: {
accessKeyId: datasource?.config?.accessKeyId as string,
secretAccessKey: datasource?.config?.secretAccessKey as string,
},
})
const params = { Bucket: bucket, Key: key }
signedUrl = s3.getSignedUrl("putObject", params)
signedUrl = await getSignedUrl(s3, new PutObjectCommand(params))
if (datasource?.config?.endpoint) {
publicUrl = `${datasource.config.endpoint}/${bucket}/${key}`
} else {

View File

@ -1,12 +1,10 @@
// Directly mock the AWS SDK
jest.mock("aws-sdk", () => ({
S3: jest.fn(() => ({
getSignedUrl: jest.fn(
(operation, params) => `http://example.com/${params.Bucket}/${params.Key}`
),
upload: jest.fn(() => ({ Contents: {} })),
})),
jest.mock("@aws-sdk/s3-request-presigner", () => ({
getSignedUrl: jest.fn(() => {
return `http://example.com`
}),
}))
jest.mock("@aws-sdk/client-s3")
import { Datasource, SourceName } from "@budibase/types"
import { setEnv } from "../../../environment"
@ -77,7 +75,10 @@ describe("/static", () => {
type: "datasource",
name: "Test",
source: SourceName.S3,
config: {},
config: {
accessKeyId: "bb",
secretAccessKey: "bb",
},
},
})
})
@ -91,7 +92,7 @@ describe("/static", () => {
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.signedUrl).toEqual("http://example.com/foo/bar")
expect(res.body.signedUrl).toEqual("http://example.com")
expect(res.body.publicUrl).toEqual(
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
)

View File

@ -154,11 +154,12 @@ describe("test the create row action", () => {
expect(result.steps[1].outputs.row.file_attachment[0]).toHaveProperty("key")
let s3Key = result.steps[1].outputs.row.file_attachment[0].key
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
const client = objectStore.ObjectStore()
const objectData = await client
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
.promise()
const objectData = await client.headObject({
Bucket: objectStore.ObjectStoreBuckets.APPS,
Key: s3Key,
})
expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0)
@ -229,11 +230,12 @@ describe("test the create row action", () => {
)
let s3Key = result.steps[1].outputs.row.single_file_attachment.key
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
const client = objectStore.ObjectStore()
const objectData = await client
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
.promise()
const objectData = await client.headObject({
Bucket: objectStore.ObjectStoreBuckets.APPS,
Key: s3Key,
})
expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0)

View File

@ -7,9 +7,15 @@ import {
ConnectionInfo,
} from "@budibase/types"
import AWS from "aws-sdk"
import {
DynamoDBDocument,
PutCommandInput,
GetCommandInput,
UpdateCommandInput,
DeleteCommandInput,
} from "@aws-sdk/lib-dynamodb"
import { DynamoDB } from "@aws-sdk/client-dynamodb"
import { AWS_REGION } from "../constants"
import { DocumentClient } from "aws-sdk/clients/dynamodb"
interface DynamoDBConfig {
region: string
@ -151,7 +157,7 @@ class DynamoDBIntegration implements IntegrationBase {
region: config.region || AWS_REGION,
endpoint: config.endpoint || undefined,
}
this.client = new AWS.DynamoDB.DocumentClient(this.config)
this.client = DynamoDBDocument.from(new DynamoDB(this.config))
}
async testConnection() {
@ -159,8 +165,8 @@ class DynamoDBIntegration implements IntegrationBase {
connected: false,
}
try {
const scanRes = await new AWS.DynamoDB(this.config).listTables().promise()
response.connected = !!scanRes.$response
const scanRes = await new DynamoDB(this.config).listTables()
response.connected = !!scanRes.$metadata
} catch (e: any) {
response.error = e.message as string
}
@ -169,13 +175,13 @@ class DynamoDBIntegration implements IntegrationBase {
async create(query: {
table: string
json: Omit<DocumentClient.PutItemInput, "TableName">
json: Omit<PutCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.put(params).promise()
return this.client.put(params)
}
async read(query: { table: string; json: object; index: null | string }) {
@ -184,7 +190,7 @@ class DynamoDBIntegration implements IntegrationBase {
IndexName: query.index ? query.index : undefined,
...query.json,
}
const response = await this.client.query(params).promise()
const response = await this.client.query(params)
if (response.Items) {
return response.Items
}
@ -197,7 +203,7 @@ class DynamoDBIntegration implements IntegrationBase {
IndexName: query.index ? query.index : undefined,
...query.json,
}
const response = await this.client.scan(params).promise()
const response = await this.client.scan(params)
if (response.Items) {
return response.Items
}
@ -208,40 +214,40 @@ class DynamoDBIntegration implements IntegrationBase {
const params = {
TableName: query.table,
}
return new AWS.DynamoDB(this.config).describeTable(params).promise()
return new DynamoDB(this.config).describeTable(params)
}
async get(query: {
table: string
json: Omit<DocumentClient.GetItemInput, "TableName">
json: Omit<GetCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.get(params).promise()
return this.client.get(params)
}
async update(query: {
table: string
json: Omit<DocumentClient.UpdateItemInput, "TableName">
json: Omit<UpdateCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.update(params).promise()
return this.client.update(params)
}
async delete(query: {
table: string
json: Omit<DocumentClient.DeleteItemInput, "TableName">
json: Omit<DeleteCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.delete(params).promise()
return this.client.delete(params)
}
}

View File

@ -7,8 +7,9 @@ import {
ConnectionInfo,
} from "@budibase/types"
import AWS from "aws-sdk"
import { S3 } from "@aws-sdk/client-s3"
import csv from "csvtojson"
import stream from "stream"
interface S3Config {
region: string
@ -167,7 +168,7 @@ class S3Integration implements IntegrationBase {
delete this.config.endpoint
}
this.client = new AWS.S3(this.config)
this.client = new S3(this.config)
}
async testConnection() {
@ -175,7 +176,7 @@ class S3Integration implements IntegrationBase {
connected: false,
}
try {
await this.client.listBuckets().promise()
await this.client.listBuckets()
response.connected = true
} catch (e: any) {
response.error = e.message as string
@ -209,7 +210,7 @@ class S3Integration implements IntegrationBase {
LocationConstraint: query.location,
}
}
return await this.client.createBucket(params).promise()
return await this.client.createBucket(params)
}
async read(query: {
@ -220,37 +221,39 @@ class S3Integration implements IntegrationBase {
maxKeys: number
prefix: string
}) {
const response = await this.client
.listObjects({
Bucket: query.bucket,
Delimiter: query.delimiter,
Marker: query.marker,
MaxKeys: query.maxKeys,
Prefix: query.prefix,
})
.promise()
const response = await this.client.listObjects({
Bucket: query.bucket,
Delimiter: query.delimiter,
Marker: query.marker,
MaxKeys: query.maxKeys,
Prefix: query.prefix,
})
return response.Contents
}
async readCsv(query: { bucket: string; key: string }) {
const stream = this.client
.getObject({
Bucket: query.bucket,
Key: query.key,
})
.createReadStream()
const response = await this.client.getObject({
Bucket: query.bucket,
Key: query.key,
})
const fileStream = response.Body?.transformToWebStream()
if (!fileStream || !(fileStream instanceof stream.Readable)) {
throw new Error("Unable to retrieve CSV - invalid stream")
}
let csvError = false
return new Promise((resolve, reject) => {
stream.on("error", (err: Error) => {
fileStream.on("error", (err: Error) => {
reject(err)
})
const response = csv()
.fromStream(stream)
.fromStream(fileStream)
.on("error", () => {
csvError = true
})
stream.on("finish", () => {
fileStream.on("finish", () => {
resolve(response)
})
}).catch(err => {
@ -263,12 +266,10 @@ class S3Integration implements IntegrationBase {
}
async delete(query: { bucket: string; delete: string }) {
return await this.client
.deleteObjects({
Bucket: query.bucket,
Delete: JSON.parse(query.delete),
})
.promise()
return await this.client.deleteObjects({
Bucket: query.bucket,
Delete: JSON.parse(query.delete),
})
}
}

View File

@ -1,76 +0,0 @@
const response = (body: any, extra?: any) => () => ({
promise: () => body,
...extra,
})
class DocumentClient {
put = jest.fn(response({}))
query = jest.fn(
response({
Items: [],
})
)
scan = jest.fn(
response({
Items: [
{
Name: "test",
},
],
})
)
get = jest.fn(response({}))
update = jest.fn(response({}))
delete = jest.fn(response({}))
}
class S3 {
listObjects = jest.fn(
response({
Contents: [],
})
)
createBucket = jest.fn(
response({
Contents: {},
})
)
deleteObjects = jest.fn(
response({
Contents: {},
})
)
getSignedUrl = jest.fn((operation, params) => {
return `http://example.com/${params.Bucket}/${params.Key}`
})
headBucket = jest.fn(
response({
Contents: {},
})
)
upload = jest.fn(
response({
Contents: {},
})
)
getObject = jest.fn(
response(
{
Body: "",
},
{
createReadStream: jest.fn().mockReturnValue("stream"),
}
)
)
}
module.exports = {
DynamoDB: {
DocumentClient,
},
S3,
config: {
update: jest.fn(),
},
}

View File

@ -1,4 +1,20 @@
jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
jest.mock("@aws-sdk/lib-dynamodb", () => ({
DynamoDBDocument: {
from: jest.fn(() => ({
update: jest.fn(),
put: jest.fn(),
query: jest.fn(() => ({
Items: [],
})),
scan: jest.fn(() => ({
Items: [],
})),
delete: jest.fn(),
get: jest.fn(),
})),
},
}))
jest.mock("@aws-sdk/client-dynamodb")
import { default as DynamoDBIntegration } from "../dynamodb"
class TestConfiguration {
@ -57,11 +73,7 @@ describe("DynamoDB Integration", () => {
TableName: tableName,
IndexName: indexName,
})
expect(response).toEqual([
{
Name: "test",
},
])
expect(response).toEqual([])
})
it("calls the get method with the correct params", async () => {

View File

@ -1,5 +1,52 @@
jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
import { default as S3Integration } from "../s3"
jest.mock("@aws-sdk/client-s3", () => {
class S3Mock {
response(body: any, extra?: any) {
return () => ({
promise: () => body,
...extra,
})
}
listObjects = jest.fn(
this.response({
Contents: [],
})
)
createBucket = jest.fn(
this.response({
Contents: {},
})
)
deleteObjects = jest.fn(
this.response({
Contents: {},
})
)
headBucket = jest.fn(
this.response({
Contents: {},
})
)
upload = jest.fn(
this.response({
Contents: {},
})
)
getObject = jest.fn(
this.response(
{
Body: "",
},
{
createReadStream: jest.fn().mockReturnValue("stream"),
}
)
)
}
return { S3: S3Mock }
})
class TestConfiguration {
integration: any

View File

@ -430,7 +430,7 @@ export async function handleFileResponse(
size = details.ContentLength
}
}
presignedUrl = objectStore.getPresignedUrl(bucket, key)
presignedUrl = await objectStore.getPresignedUrl(bucket, key)
return {
data: {
size,

View File

@ -18,7 +18,7 @@ export async function fetch(type?: PluginType): Promise<Plugin[]> {
})
)
let plugins = response.rows.map((row: any) => row.doc) as Plugin[]
plugins = objectStore.enrichPluginURLs(plugins)
plugins = await objectStore.enrichPluginURLs(plugins)
if (type) {
return plugins.filter((plugin: Plugin) => plugin.schema?.type === type)
} else {

View File

@ -78,7 +78,7 @@ export const getComponentLibraryManifest = async (library: string) => {
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
}
if (typeof resp !== "string") {
resp = resp.toString("utf8")
resp = resp.toString()
}
return JSON.parse(resp)
}

View File

@ -3,6 +3,7 @@ import { budibaseTempDir } from "../budibaseDir"
import fs from "fs"
import { join } from "path"
import { objectStore } from "@budibase/backend-core"
import stream from "stream"
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
const AUTOMATION_PATH = join(budibaseTempDir(), "automation")
@ -58,7 +59,11 @@ async function getPluginImpl(path: string, plugin: Plugin) {
pluginKey
)
fs.writeFileSync(filename, pluginJs)
if (pluginJs instanceof stream.Readable) {
pluginJs.pipe(fs.createWriteStream(filename))
} else {
fs.writeFileSync(filename, pluginJs)
}
fs.writeFileSync(metadataName, hash)
return require(filename)

View File

@ -359,9 +359,9 @@ export async function coreOutputProcessing(
if (row[property] == null) {
continue
}
const process = (attachment: RowAttachment) => {
const process = async (attachment: RowAttachment) => {
if (!attachment.url && attachment.key) {
attachment.url = objectStore.getAppFileUrl(attachment.key)
attachment.url = await objectStore.getAppFileUrl(attachment.key)
}
return attachment
}
@ -369,11 +369,13 @@ export async function coreOutputProcessing(
row[property] = JSON.parse(row[property])
}
if (Array.isArray(row[property])) {
row[property].forEach((attachment: RowAttachment) => {
process(attachment)
})
await Promise.all(
row[property].map((attachment: RowAttachment) =>
process(attachment)
)
)
} else {
process(row[property])
await process(row[property])
}
}
} else if (

View File

@ -322,27 +322,27 @@ export async function save(
}
}
function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) {
async function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) {
if (!oidcLogos) {
return
}
oidcLogos.config = Object.keys(oidcLogos.config || {}).reduce(
(acc: any, key: string) => {
if (!key.endsWith("Etag")) {
const etag = oidcLogos.config[`${key}Etag`]
const objectStoreUrl = objectStore.getGlobalFileUrl(
oidcLogos.type,
key,
etag
)
acc[key] = objectStoreUrl
} else {
acc[key] = oidcLogos.config[key]
}
return acc
},
{}
)
const newConfig: Record<string, string> = {}
const keys = Object.keys(oidcLogos.config || {})
for (const key of keys) {
if (!key.endsWith("Etag")) {
const etag = oidcLogos.config[`${key}Etag`]
const objectStoreUrl = await objectStore.getGlobalFileUrl(
oidcLogos.type,
key,
etag
)
newConfig[key] = objectStoreUrl
} else {
newConfig[key] = oidcLogos.config[key]
}
}
oidcLogos.config = newConfig
}
export async function find(ctx: UserCtx<void, FindConfigResponse>) {
@ -370,7 +370,7 @@ export async function find(ctx: UserCtx<void, FindConfigResponse>) {
async function handleConfigType(type: ConfigType, config: Config) {
if (type === ConfigType.OIDC_LOGOS) {
enrichOIDCLogos(config)
await enrichOIDCLogos(config)
} else if (type === ConfigType.AI) {
await handleAIConfig(config)
}
@ -396,7 +396,7 @@ export async function publicOidc(ctx: Ctx<void, GetPublicOIDCConfigResponse>) {
const oidcCustomLogos = await configs.getOIDCLogosDoc()
if (oidcCustomLogos) {
enrichOIDCLogos(oidcCustomLogos)
await enrichOIDCLogos(oidcCustomLogos)
}
if (!oidcConfig) {
@ -427,7 +427,7 @@ export async function publicSettings(
// enrich the logo url - empty url means deleted
if (config.logoUrl && config.logoUrl !== "") {
config.logoUrl = objectStore.getGlobalFileUrl(
config.logoUrl = await objectStore.getGlobalFileUrl(
"settings",
"logoUrl",
config.logoUrlEtag
@ -437,7 +437,7 @@ export async function publicSettings(
// enrich the favicon url - empty url means deleted
const faviconUrl =
branding.faviconUrl && branding.faviconUrl !== ""
? objectStore.getGlobalFileUrl(
? await objectStore.getGlobalFileUrl(
"settings",
"faviconUrl",
branding.faviconUrlEtag
@ -522,7 +522,7 @@ export async function upload(ctx: UserCtx<void, UploadConfigFileResponse>) {
ctx.body = {
message: "File has been uploaded and url stored to config.",
url: objectStore.getGlobalFileUrl(type, name, etag),
url: await objectStore.getGlobalFileUrl(type, name, etag),
}
}

1107
yarn.lock

File diff suppressed because it is too large Load Diff