Merge branch 'master' of github.com:budibase/budibase into budi-8833-allow-ai-column-to-be-used-in-view-calculations

This commit is contained in:
Sam Rose 2025-01-08 17:03:47 +00:00
commit 1a6d27ae83
No known key found for this signature in database
34 changed files with 514 additions and 1676 deletions

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.2.35", "version": "3.2.37",
"npmClient": "yarn", "npmClient": "yarn",
"concurrency": 20, "concurrency": 20,
"command": { "command": {

View File

@ -1,28 +0,0 @@
export class S3 {
headBucket() {
return jest.fn().mockReturnThis()
}
deleteObject() {
return jest.fn().mockReturnThis()
}
deleteObjects() {
return jest.fn().mockReturnThis()
}
createBucket() {
return jest.fn().mockReturnThis()
}
getObject() {
return jest.fn().mockReturnThis()
}
listObject() {
return jest.fn().mockReturnThis()
}
promise() {
return jest.fn().mockReturnThis()
}
catch() {
return jest.fn()
}
}
export const GetObjectCommand = jest.fn(inputs => ({ inputs }))

View File

@ -1,4 +0,0 @@
export const getSignedUrl = jest.fn((_, cmd) => {
const { inputs } = cmd
return `http://s3.example.com/${inputs?.Bucket}/${inputs?.Key}`
})

View File

@ -0,0 +1,19 @@
const mockS3 = {
headBucket: jest.fn().mockReturnThis(),
deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(),
getObject: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}`
}),
promise: jest.fn().mockReturnThis(),
catch: jest.fn(),
}
const AWS = {
S3: jest.fn(() => mockS3),
}
export default AWS

View File

@ -30,9 +30,6 @@
"test:watch": "jest --watchAll" "test:watch": "jest --watchAll"
}, },
"dependencies": { "dependencies": {
"@aws-sdk/client-s3": "3.709.0",
"@aws-sdk/lib-storage": "3.709.0",
"@aws-sdk/s3-request-presigner": "3.709.0",
"@budibase/nano": "10.1.5", "@budibase/nano": "10.1.5",
"@budibase/pouchdb-replication-stream": "1.2.11", "@budibase/pouchdb-replication-stream": "1.2.11",
"@budibase/shared-core": "*", "@budibase/shared-core": "*",
@ -74,13 +71,11 @@
"devDependencies": { "devDependencies": {
"@jest/types": "^29.6.3", "@jest/types": "^29.6.3",
"@shopify/jest-koa-mocks": "5.1.1", "@shopify/jest-koa-mocks": "5.1.1",
"@smithy/types": "4.0.0",
"@swc/core": "1.3.71", "@swc/core": "1.3.71",
"@swc/jest": "0.2.27", "@swc/jest": "0.2.27",
"@types/chance": "1.1.3", "@types/chance": "1.1.3",
"@types/cookies": "0.7.8", "@types/cookies": "0.7.8",
"@types/jest": "29.5.5", "@types/jest": "29.5.5",
"@types/koa": "2.13.4",
"@types/lodash": "4.14.200", "@types/lodash": "4.14.200",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",
"@types/pouchdb": "6.4.2", "@types/pouchdb": "6.4.2",
@ -88,6 +83,7 @@
"@types/semver": "7.3.7", "@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1", "@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4", "@types/uuid": "8.3.4",
"@types/koa": "2.13.4",
"chance": "1.1.8", "chance": "1.1.8",
"ioredis-mock": "8.9.0", "ioredis-mock": "8.9.0",
"jest": "29.7.0", "jest": "29.7.0",

View File

@ -154,7 +154,7 @@ const environment = {
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY, MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN, AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN,
AWS_REGION: process.env.AWS_REGION || "eu-west-1", AWS_REGION: process.env.AWS_REGION,
MINIO_URL: process.env.MINIO_URL, MINIO_URL: process.env.MINIO_URL,
MINIO_ENABLED: process.env.MINIO_ENABLED || 1, MINIO_ENABLED: process.env.MINIO_ENABLED || 1,
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY, INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,

View File

@ -13,7 +13,7 @@ export function clientLibraryPath(appId: string) {
* due to issues with the domain we were unable to continue doing this - keeping * due to issues with the domain we were unable to continue doing this - keeping
* incase we are able to switch back to CDN path again in future. * incase we are able to switch back to CDN path again in future.
*/ */
export async function clientLibraryCDNUrl(appId: string, version: string) { export function clientLibraryCDNUrl(appId: string, version: string) {
let file = clientLibraryPath(appId) let file = clientLibraryPath(appId)
if (env.CLOUDFRONT_CDN) { if (env.CLOUDFRONT_CDN) {
// append app version to bust the cache // append app version to bust the cache
@ -24,7 +24,7 @@ export async function clientLibraryCDNUrl(appId: string, version: string) {
// file is public // file is public
return cloudfront.getUrl(file) return cloudfront.getUrl(file)
} else { } else {
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file) return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
} }
} }
@ -44,10 +44,10 @@ export function clientLibraryUrl(appId: string, version: string) {
return `/api/assets/client?${qs.encode(qsParams)}` return `/api/assets/client?${qs.encode(qsParams)}`
} }
export async function getAppFileUrl(s3Key: string) { export function getAppFileUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) { if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key) return cloudfront.getPresignedUrl(s3Key)
} else { } else {
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key) return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
} }
} }

View File

@ -5,11 +5,7 @@ import * as cloudfront from "../cloudfront"
// URLs // URLs
export const getGlobalFileUrl = async ( export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
type: string,
name: string,
etag?: string
) => {
let file = getGlobalFileS3Key(type, name) let file = getGlobalFileS3Key(type, name)
if (env.CLOUDFRONT_CDN) { if (env.CLOUDFRONT_CDN) {
if (etag) { if (etag) {
@ -17,7 +13,7 @@ export const getGlobalFileUrl = async (
} }
return cloudfront.getPresignedUrl(file) return cloudfront.getPresignedUrl(file)
} else { } else {
return await objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file) return objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
} }
} }

View File

@ -6,25 +6,23 @@ import { Plugin } from "@budibase/types"
// URLS // URLS
export async function enrichPluginURLs(plugins?: Plugin[]): Promise<Plugin[]> { export function enrichPluginURLs(plugins?: Plugin[]): Plugin[] {
if (!plugins || !plugins.length) { if (!plugins || !plugins.length) {
return [] return []
} }
return await Promise.all( return plugins.map(plugin => {
plugins.map(async plugin => { const jsUrl = getPluginJSUrl(plugin)
const jsUrl = await getPluginJSUrl(plugin) const iconUrl = getPluginIconUrl(plugin)
const iconUrl = await getPluginIconUrl(plugin)
return { ...plugin, jsUrl, iconUrl } return { ...plugin, jsUrl, iconUrl }
}) })
)
} }
async function getPluginJSUrl(plugin: Plugin) { function getPluginJSUrl(plugin: Plugin) {
const s3Key = getPluginJSKey(plugin) const s3Key = getPluginJSKey(plugin)
return getPluginUrl(s3Key) return getPluginUrl(s3Key)
} }
async function getPluginIconUrl(plugin: Plugin) { function getPluginIconUrl(plugin: Plugin): string | undefined {
const s3Key = getPluginIconKey(plugin) const s3Key = getPluginIconKey(plugin)
if (!s3Key) { if (!s3Key) {
return return
@ -32,11 +30,11 @@ async function getPluginIconUrl(plugin: Plugin) {
return getPluginUrl(s3Key) return getPluginUrl(s3Key)
} }
async function getPluginUrl(s3Key: string) { function getPluginUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) { if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key) return cloudfront.getPresignedUrl(s3Key)
} else { } else {
return await objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key) return objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
} }
} }

View File

@ -93,25 +93,25 @@ describe("app", () => {
testEnv.multiTenant() testEnv.multiTenant()
}) })
it("gets url with embedded minio", async () => { it("gets url with embedded minio", () => {
testEnv.withMinio() testEnv.withMinio()
const url = await getAppFileUrl() const url = getAppFileUrl()
expect(url).toBe( expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg" "/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
) )
}) })
it("gets url with custom S3", async () => { it("gets url with custom S3", () => {
testEnv.withS3() testEnv.withS3()
const url = await getAppFileUrl() const url = getAppFileUrl()
expect(url).toBe( expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg" "http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
) )
}) })
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront() testEnv.withCloudfront()
const url = await getAppFileUrl() const url = getAppFileUrl()
// omit rest of signed params // omit rest of signed params
expect( expect(
url.includes("http://cf.example.com/app_123/attachments/image.jpeg?") url.includes("http://cf.example.com/app_123/attachments/image.jpeg?")
@ -126,8 +126,8 @@ describe("app", () => {
it("gets url with embedded minio", async () => { it("gets url with embedded minio", async () => {
testEnv.withMinio() testEnv.withMinio()
await testEnv.withTenant(async () => { await testEnv.withTenant(() => {
const url = await getAppFileUrl() const url = getAppFileUrl()
expect(url).toBe( expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg" "/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
) )
@ -136,8 +136,8 @@ describe("app", () => {
it("gets url with custom S3", async () => { it("gets url with custom S3", async () => {
testEnv.withS3() testEnv.withS3()
await testEnv.withTenant(async () => { await testEnv.withTenant(() => {
const url = await getAppFileUrl() const url = getAppFileUrl()
expect(url).toBe( expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg" "http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
) )
@ -146,8 +146,8 @@ describe("app", () => {
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront() testEnv.withCloudfront()
await testEnv.withTenant(async () => { await testEnv.withTenant(() => {
const url = await getAppFileUrl() const url = getAppFileUrl()
// omit rest of signed params // omit rest of signed params
expect( expect(
url.includes( url.includes(

View File

@ -3,7 +3,7 @@ import { testEnv } from "../../../../tests/extra"
describe("global", () => { describe("global", () => {
describe("getGlobalFileUrl", () => { describe("getGlobalFileUrl", () => {
async function getGlobalFileUrl() { function getGlobalFileUrl() {
return global.getGlobalFileUrl("settings", "logoUrl", "etag") return global.getGlobalFileUrl("settings", "logoUrl", "etag")
} }
@ -12,21 +12,21 @@ describe("global", () => {
testEnv.singleTenant() testEnv.singleTenant()
}) })
it("gets url with embedded minio", async () => { it("gets url with embedded minio", () => {
testEnv.withMinio() testEnv.withMinio()
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
expect(url).toBe("/files/signed/global/settings/logoUrl") expect(url).toBe("/files/signed/global/settings/logoUrl")
}) })
it("gets url with custom S3", async () => { it("gets url with custom S3", () => {
testEnv.withS3() testEnv.withS3()
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
expect(url).toBe("http://s3.example.com/global/settings/logoUrl") expect(url).toBe("http://s3.example.com/global/settings/logoUrl")
}) })
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront() testEnv.withCloudfront()
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
// omit rest of signed params // omit rest of signed params
expect( expect(
url.includes("http://cf.example.com/settings/logoUrl?etag=etag&") url.includes("http://cf.example.com/settings/logoUrl?etag=etag&")
@ -41,16 +41,16 @@ describe("global", () => {
it("gets url with embedded minio", async () => { it("gets url with embedded minio", async () => {
testEnv.withMinio() testEnv.withMinio()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
expect(url).toBe(`/files/signed/global/${tenantId}/settings/logoUrl`) expect(url).toBe(`/files/signed/global/${tenantId}/settings/logoUrl`)
}) })
}) })
it("gets url with custom S3", async () => { it("gets url with custom S3", async () => {
testEnv.withS3() testEnv.withS3()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
expect(url).toBe( expect(url).toBe(
`http://s3.example.com/global/${tenantId}/settings/logoUrl` `http://s3.example.com/global/${tenantId}/settings/logoUrl`
) )
@ -59,8 +59,8 @@ describe("global", () => {
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront() testEnv.withCloudfront()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const url = await getGlobalFileUrl() const url = getGlobalFileUrl()
// omit rest of signed params // omit rest of signed params
expect( expect(
url.includes( url.includes(

View File

@ -6,8 +6,8 @@ describe("plugins", () => {
describe("enrichPluginURLs", () => { describe("enrichPluginURLs", () => {
const plugin = structures.plugins.plugin() const plugin = structures.plugins.plugin()
async function getEnrichedPluginUrls() { function getEnrichedPluginUrls() {
const enriched = (await plugins.enrichPluginURLs([plugin]))[0] const enriched = plugins.enrichPluginURLs([plugin])[0]
return { return {
jsUrl: enriched.jsUrl!, jsUrl: enriched.jsUrl!,
iconUrl: enriched.iconUrl!, iconUrl: enriched.iconUrl!,
@ -19,9 +19,9 @@ describe("plugins", () => {
testEnv.singleTenant() testEnv.singleTenant()
}) })
it("gets url with embedded minio", async () => { it("gets url with embedded minio", () => {
testEnv.withMinio() testEnv.withMinio()
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe( expect(urls.jsUrl).toBe(
`/files/signed/plugins/${plugin.name}/plugin.min.js` `/files/signed/plugins/${plugin.name}/plugin.min.js`
) )
@ -30,9 +30,9 @@ describe("plugins", () => {
) )
}) })
it("gets url with custom S3", async () => { it("gets url with custom S3", () => {
testEnv.withS3() testEnv.withS3()
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe( expect(urls.jsUrl).toBe(
`http://s3.example.com/plugins/${plugin.name}/plugin.min.js` `http://s3.example.com/plugins/${plugin.name}/plugin.min.js`
) )
@ -41,9 +41,9 @@ describe("plugins", () => {
) )
}) })
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront() testEnv.withCloudfront()
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
// omit rest of signed params // omit rest of signed params
expect( expect(
urls.jsUrl.includes( urls.jsUrl.includes(
@ -65,8 +65,8 @@ describe("plugins", () => {
it("gets url with embedded minio", async () => { it("gets url with embedded minio", async () => {
testEnv.withMinio() testEnv.withMinio()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe( expect(urls.jsUrl).toBe(
`/files/signed/plugins/${tenantId}/${plugin.name}/plugin.min.js` `/files/signed/plugins/${tenantId}/${plugin.name}/plugin.min.js`
) )
@ -78,8 +78,8 @@ describe("plugins", () => {
it("gets url with custom S3", async () => { it("gets url with custom S3", async () => {
testEnv.withS3() testEnv.withS3()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe( expect(urls.jsUrl).toBe(
`http://s3.example.com/plugins/${tenantId}/${plugin.name}/plugin.min.js` `http://s3.example.com/plugins/${tenantId}/${plugin.name}/plugin.min.js`
) )
@ -91,8 +91,8 @@ describe("plugins", () => {
it("gets url with cloudfront + s3", async () => { it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront() testEnv.withCloudfront()
await testEnv.withTenant(async tenantId => { await testEnv.withTenant(tenantId => {
const urls = await getEnrichedPluginUrls() const urls = getEnrichedPluginUrls()
// omit rest of signed params // omit rest of signed params
expect( expect(
urls.jsUrl.includes( urls.jsUrl.includes(

View File

@ -1,15 +1,6 @@
const sanitize = require("sanitize-s3-objectkey") const sanitize = require("sanitize-s3-objectkey")
import { import AWS from "aws-sdk"
HeadObjectCommandOutput,
PutObjectCommandInput,
S3,
S3ClientConfig,
GetObjectCommand,
_Object as S3Object,
} from "@aws-sdk/client-s3"
import { Upload } from "@aws-sdk/lib-storage"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import stream, { Readable } from "stream" import stream, { Readable } from "stream"
import fetch from "node-fetch" import fetch from "node-fetch"
import tar from "tar-fs" import tar from "tar-fs"
@ -22,8 +13,8 @@ import { bucketTTLConfig, budibaseTempDir } from "./utils"
import { v4 } from "uuid" import { v4 } from "uuid"
import { APP_PREFIX, APP_DEV_PREFIX } from "../db" import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
import fsp from "fs/promises" import fsp from "fs/promises"
import { HeadObjectOutput } from "aws-sdk/clients/s3"
import { ReadableStream } from "stream/web" import { ReadableStream } from "stream/web"
import { NodeJsRuntimeStreamingBlobPayloadOutputTypes } from "@smithy/types"
const streamPipeline = promisify(stream.pipeline) const streamPipeline = promisify(stream.pipeline)
// use this as a temporary store of buckets that are being created // use this as a temporary store of buckets that are being created
@ -93,24 +84,26 @@ export function sanitizeBucket(input: string) {
* @constructor * @constructor
*/ */
export function ObjectStore( export function ObjectStore(
bucket: string,
opts: { presigning: boolean } = { presigning: false } opts: { presigning: boolean } = { presigning: false }
) { ) {
const config: S3ClientConfig = { const config: AWS.S3.ClientConfiguration = {
forcePathStyle: true, s3ForcePathStyle: true,
credentials: { signatureVersion: "v4",
accessKeyId: env.MINIO_ACCESS_KEY!, apiVersion: "2006-03-01",
secretAccessKey: env.MINIO_SECRET_KEY!, accessKeyId: env.MINIO_ACCESS_KEY,
}, secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION, region: env.AWS_REGION,
} }
if (bucket) {
config.params = {
Bucket: sanitizeBucket(bucket),
}
}
// for AWS Credentials using temporary session token // for AWS Credentials using temporary session token
if (!env.MINIO_ENABLED && env.AWS_SESSION_TOKEN) { if (!env.MINIO_ENABLED && env.AWS_SESSION_TOKEN) {
config.credentials = { config.sessionToken = env.AWS_SESSION_TOKEN
accessKeyId: env.MINIO_ACCESS_KEY!,
secretAccessKey: env.MINIO_SECRET_KEY!,
sessionToken: env.AWS_SESSION_TOKEN,
}
} }
// custom S3 is in use i.e. minio // custom S3 is in use i.e. minio
@ -120,13 +113,13 @@ export function ObjectStore(
// Normally a signed url will need to be generated with a specified host in mind. // Normally a signed url will need to be generated with a specified host in mind.
// To support dynamic hosts, e.g. some unknown self-hosted installation url, // To support dynamic hosts, e.g. some unknown self-hosted installation url,
// use a predefined host. The host 'minio-service' is also forwarded to minio requests via nginx // use a predefined host. The host 'minio-service' is also forwarded to minio requests via nginx
config.endpoint = "http://minio-service" config.endpoint = "minio-service"
} else { } else {
config.endpoint = env.MINIO_URL config.endpoint = env.MINIO_URL
} }
} }
return new S3(config) return new AWS.S3(config)
} }
/** /**
@ -139,25 +132,26 @@ export async function createBucketIfNotExists(
): Promise<{ created: boolean; exists: boolean }> { ): Promise<{ created: boolean; exists: boolean }> {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
try { try {
await client.headBucket({ await client
.headBucket({
Bucket: bucketName, Bucket: bucketName,
}) })
.promise()
return { created: false, exists: true } return { created: false, exists: true }
} catch (err: any) { } catch (err: any) {
const statusCode = err.statusCode || err.$response?.statusCode const promises: any = STATE.bucketCreationPromises
const promises: Record<string, Promise<any> | undefined> = const doesntExist = err.statusCode === 404,
STATE.bucketCreationPromises noAccess = err.statusCode === 403
const doesntExist = statusCode === 404,
noAccess = statusCode === 403
if (promises[bucketName]) { if (promises[bucketName]) {
await promises[bucketName] await promises[bucketName]
return { created: false, exists: true } return { created: false, exists: true }
} else if (doesntExist || noAccess) { } else if (doesntExist || noAccess) {
if (doesntExist) { if (doesntExist) {
promises[bucketName] = client.createBucket({ promises[bucketName] = client
.createBucket({
Bucket: bucketName, Bucket: bucketName,
}) })
.promise()
await promises[bucketName] await promises[bucketName]
delete promises[bucketName] delete promises[bucketName]
return { created: true, exists: false } return { created: true, exists: false }
@ -186,26 +180,25 @@ export async function upload({
const fileBytes = path ? (await fsp.open(path)).createReadStream() : body const fileBytes = path ? (await fsp.open(path)).createReadStream() : body
const objectStore = ObjectStore() const objectStore = ObjectStore(bucketName)
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName) const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && bucketCreated.created) { if (ttl && bucketCreated.created) {
let ttlConfig = bucketTTLConfig(bucketName, ttl) let ttlConfig = bucketTTLConfig(bucketName, ttl)
await objectStore.putBucketLifecycleConfiguration(ttlConfig) await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
} }
let contentType = type let contentType = type
const finalContentType = contentType if (!contentType) {
? contentType contentType = extension
: extension
? CONTENT_TYPE_MAP[extension.toLowerCase()] ? CONTENT_TYPE_MAP[extension.toLowerCase()]
: CONTENT_TYPE_MAP.txt : CONTENT_TYPE_MAP.txt
const config: PutObjectCommandInput = { }
const config: any = {
// windows file paths need to be converted to forward slashes for s3 // windows file paths need to be converted to forward slashes for s3
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename), Key: sanitizeKey(filename),
Body: fileBytes as stream.Readable | Buffer, Body: fileBytes,
ContentType: finalContentType, ContentType: contentType,
} }
if (metadata && typeof metadata === "object") { if (metadata && typeof metadata === "object") {
// remove any nullish keys from the metadata object, as these may be considered invalid // remove any nullish keys from the metadata object, as these may be considered invalid
@ -214,15 +207,10 @@ export async function upload({
delete metadata[key] delete metadata[key]
} }
} }
config.Metadata = metadata as Record<string, string> config.Metadata = metadata
} }
const upload = new Upload({ return objectStore.upload(config).promise()
client: objectStore,
params: config,
})
return upload.done()
} }
/** /**
@ -241,12 +229,12 @@ export async function streamUpload({
throw new Error("Stream to upload is invalid/undefined") throw new Error("Stream to upload is invalid/undefined")
} }
const extension = filename.split(".").pop() const extension = filename.split(".").pop()
const objectStore = ObjectStore() const objectStore = ObjectStore(bucketName)
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName) const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && bucketCreated.created) { if (ttl && bucketCreated.created) {
let ttlConfig = bucketTTLConfig(bucketName, ttl) let ttlConfig = bucketTTLConfig(bucketName, ttl)
await objectStore.putBucketLifecycleConfiguration(ttlConfig) await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
} }
// Set content type for certain known extensions // Set content type for certain known extensions
@ -279,15 +267,13 @@ export async function streamUpload({
...extra, ...extra,
} }
const upload = new Upload({ const details = await objectStore.upload(params).promise()
client: objectStore, const headDetails = await objectStore
params, .headObject({
})
const details = await upload.done()
const headDetails = await objectStore.headObject({
Bucket: bucket, Bucket: bucket,
Key: objKey, Key: objKey,
}) })
.promise()
return { return {
...details, ...details,
ContentLength: headDetails.ContentLength, ContentLength: headDetails.ContentLength,
@ -298,44 +284,35 @@ export async function streamUpload({
* retrieves the contents of a file from the object store, if it is a known content type it * retrieves the contents of a file from the object store, if it is a known content type it
* will be converted, otherwise it will be returned as a buffer stream. * will be converted, otherwise it will be returned as a buffer stream.
*/ */
export async function retrieve( export async function retrieve(bucketName: string, filepath: string) {
bucketName: string, const objectStore = ObjectStore(bucketName)
filepath: string
): Promise<string | stream.Readable> {
const objectStore = ObjectStore()
const params = { const params = {
Bucket: sanitizeBucket(bucketName), Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filepath), Key: sanitizeKey(filepath),
} }
const response = await objectStore.getObject(params) const response: any = await objectStore.getObject(params).promise()
if (!response.Body) {
throw new Error("Unable to retrieve object")
}
const nodeResponse =
response.Body as NodeJsRuntimeStreamingBlobPayloadOutputTypes
// currently these are all strings // currently these are all strings
if (STRING_CONTENT_TYPES.includes(response.ContentType)) { if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
return nodeResponse.toString() return response.Body.toString("utf8")
} else { } else {
return nodeResponse return response.Body
} }
} }
export async function listAllObjects( export async function listAllObjects(bucketName: string, path: string) {
bucketName: string, const objectStore = ObjectStore(bucketName)
path: string
): Promise<S3Object[]> {
const objectStore = ObjectStore()
const list = (params: ListParams = {}) => { const list = (params: ListParams = {}) => {
return objectStore.listObjectsV2({ return objectStore
.listObjectsV2({
...params, ...params,
Bucket: sanitizeBucket(bucketName), Bucket: sanitizeBucket(bucketName),
Prefix: sanitizeKey(path), Prefix: sanitizeKey(path),
}) })
.promise()
} }
let isTruncated = false, let isTruncated = false,
token, token,
objects: Object[] = [] objects: AWS.S3.Types.Object[] = []
do { do {
let params: ListParams = {} let params: ListParams = {}
if (token) { if (token) {
@ -354,19 +331,18 @@ export async function listAllObjects(
/** /**
* Generate a presigned url with a default TTL of 1 hour * Generate a presigned url with a default TTL of 1 hour
*/ */
export async function getPresignedUrl( export function getPresignedUrl(
bucketName: string, bucketName: string,
key: string, key: string,
durationSeconds = 3600 durationSeconds = 3600
) { ) {
const objectStore = ObjectStore({ presigning: true }) const objectStore = ObjectStore(bucketName, { presigning: true })
const params = { const params = {
Bucket: sanitizeBucket(bucketName), Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(key), Key: sanitizeKey(key),
Expires: durationSeconds,
} }
const url = await getSignedUrl(objectStore, new GetObjectCommand(params), { const url = objectStore.getSignedUrl("getObject", params)
expiresIn: durationSeconds,
})
if (!env.MINIO_ENABLED) { if (!env.MINIO_ENABLED) {
// return the full URL to the client // return the full URL to the client
@ -390,11 +366,7 @@ export async function retrieveToTmp(bucketName: string, filepath: string) {
filepath = sanitizeKey(filepath) filepath = sanitizeKey(filepath)
const data = await retrieve(bucketName, filepath) const data = await retrieve(bucketName, filepath)
const outputPath = join(budibaseTempDir(), v4()) const outputPath = join(budibaseTempDir(), v4())
if (data instanceof stream.Readable) {
data.pipe(fs.createWriteStream(outputPath))
} else {
fs.writeFileSync(outputPath, data) fs.writeFileSync(outputPath, data)
}
return outputPath return outputPath
} }
@ -436,17 +408,17 @@ export async function retrieveDirectory(bucketName: string, path: string) {
* Delete a single file. * Delete a single file.
*/ */
export async function deleteFile(bucketName: string, filepath: string) { export async function deleteFile(bucketName: string, filepath: string) {
const objectStore = ObjectStore() const objectStore = ObjectStore(bucketName)
await createBucketIfNotExists(objectStore, bucketName) await createBucketIfNotExists(objectStore, bucketName)
const params = { const params = {
Bucket: bucketName, Bucket: bucketName,
Key: sanitizeKey(filepath), Key: sanitizeKey(filepath),
} }
return objectStore.deleteObject(params) return objectStore.deleteObject(params).promise()
} }
export async function deleteFiles(bucketName: string, filepaths: string[]) { export async function deleteFiles(bucketName: string, filepaths: string[]) {
const objectStore = ObjectStore() const objectStore = ObjectStore(bucketName)
await createBucketIfNotExists(objectStore, bucketName) await createBucketIfNotExists(objectStore, bucketName)
const params = { const params = {
Bucket: bucketName, Bucket: bucketName,
@ -454,7 +426,7 @@ export async function deleteFiles(bucketName: string, filepaths: string[]) {
Objects: filepaths.map((path: any) => ({ Key: sanitizeKey(path) })), Objects: filepaths.map((path: any) => ({ Key: sanitizeKey(path) })),
}, },
} }
return objectStore.deleteObjects(params) return objectStore.deleteObjects(params).promise()
} }
/** /**
@ -466,13 +438,13 @@ export async function deleteFolder(
): Promise<any> { ): Promise<any> {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder) folder = sanitizeKey(folder)
const client = ObjectStore() const client = ObjectStore(bucketName)
const listParams = { const listParams = {
Bucket: bucketName, Bucket: bucketName,
Prefix: folder, Prefix: folder,
} }
const existingObjectsResponse = await client.listObjects(listParams) const existingObjectsResponse = await client.listObjects(listParams).promise()
if (existingObjectsResponse.Contents?.length === 0) { if (existingObjectsResponse.Contents?.length === 0) {
return return
} }
@ -487,7 +459,7 @@ export async function deleteFolder(
deleteParams.Delete.Objects.push({ Key: content.Key }) deleteParams.Delete.Objects.push({ Key: content.Key })
}) })
const deleteResponse = await client.deleteObjects(deleteParams) const deleteResponse = await client.deleteObjects(deleteParams).promise()
// can only empty 1000 items at once // can only empty 1000 items at once
if (deleteResponse.Deleted?.length === 1000) { if (deleteResponse.Deleted?.length === 1000) {
return deleteFolder(bucketName, folder) return deleteFolder(bucketName, folder)
@ -562,33 +534,29 @@ export async function getReadStream(
): Promise<Readable> { ): Promise<Readable> {
bucketName = sanitizeBucket(bucketName) bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path) path = sanitizeKey(path)
const client = ObjectStore() const client = ObjectStore(bucketName)
const params = { const params = {
Bucket: bucketName, Bucket: bucketName,
Key: path, Key: path,
} }
const response = await client.getObject(params) return client.getObject(params).createReadStream()
if (!response.Body || !(response.Body instanceof stream.Readable)) {
throw new Error("Unable to retrieve stream - invalid response")
}
return response.Body
} }
export async function getObjectMetadata( export async function getObjectMetadata(
bucket: string, bucket: string,
path: string path: string
): Promise<HeadObjectCommandOutput> { ): Promise<HeadObjectOutput> {
bucket = sanitizeBucket(bucket) bucket = sanitizeBucket(bucket)
path = sanitizeKey(path) path = sanitizeKey(path)
const client = ObjectStore() const client = ObjectStore(bucket)
const params = { const params = {
Bucket: bucket, Bucket: bucket,
Key: path, Key: path,
} }
try { try {
return await client.headObject(params) return await client.headObject(params).promise()
} catch (err: any) { } catch (err: any) {
throw new Error("Unable to retrieve metadata from object") throw new Error("Unable to retrieve metadata from object")
} }

View File

@ -2,10 +2,7 @@ import path, { join } from "path"
import { tmpdir } from "os" import { tmpdir } from "os"
import fs from "fs" import fs from "fs"
import env from "../environment" import env from "../environment"
import { import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3"
LifecycleRule,
PutBucketLifecycleConfigurationCommandInput,
} from "@aws-sdk/client-s3"
import * as objectStore from "./objectStore" import * as objectStore from "./objectStore"
import { import {
AutomationAttachment, AutomationAttachment,
@ -46,8 +43,8 @@ export function budibaseTempDir() {
export const bucketTTLConfig = ( export const bucketTTLConfig = (
bucketName: string, bucketName: string,
days: number days: number
): PutBucketLifecycleConfigurationCommandInput => { ): PutBucketLifecycleConfigurationRequest => {
const lifecycleRule: LifecycleRule = { const lifecycleRule = {
ID: `${bucketName}-ExpireAfter${days}days`, ID: `${bucketName}-ExpireAfter${days}days`,
Prefix: "", Prefix: "",
Status: "Enabled", Status: "Enabled",

View File

@ -1,138 +0,0 @@
import { derived } from "svelte/store"
import { admin } from "./admin"
import { auth } from "./auth"
import { isEnabled } from "@/helpers/featureFlags"
import { sdk } from "@budibase/shared-core"
import { FeatureFlag } from "@budibase/types"
export const menu = derived([admin, auth], ([$admin, $auth]) => {
const user = $auth?.user
const isAdmin = sdk.users.isAdmin(user)
const cloud = $admin?.cloud
// Determine user sub pages
let userSubPages = [
{
title: "Users",
href: "/builder/portal/users/users",
},
]
userSubPages.push({
title: "Groups",
href: "/builder/portal/users/groups",
})
// Pages that all devs and admins can access
let menu = [
{
title: "Apps",
href: "/builder/portal/apps",
},
]
if (sdk.users.isGlobalBuilder(user)) {
menu.push({
title: "Users",
href: "/builder/portal/users",
subPages: userSubPages,
})
menu.push({
title: "Plugins",
href: "/builder/portal/plugins",
})
}
// Add settings page for admins
if (isAdmin) {
let settingsSubPages = [
{
title: "Auth",
href: "/builder/portal/settings/auth",
},
{
title: "Email",
href: "/builder/portal/settings/email",
},
{
title: "Organisation",
href: "/builder/portal/settings/organisation",
},
{
title: "Branding",
href: "/builder/portal/settings/branding",
},
{
title: "Environment",
href: "/builder/portal/settings/environment",
},
]
if (isEnabled(FeatureFlag.AI_CUSTOM_CONFIGS)) {
settingsSubPages.push({
title: "AI",
href: "/builder/portal/settings/ai",
})
}
if (!cloud) {
settingsSubPages.push({
title: "Version",
href: "/builder/portal/settings/version",
})
settingsSubPages.push({
title: "Diagnostics",
href: "/builder/portal/settings/diagnostics",
})
}
menu.push({
title: "Settings",
href: "/builder/portal/settings",
subPages: [...settingsSubPages].sort((a, b) =>
a.title.localeCompare(b.title)
),
})
}
// Add account page
let accountSubPages = [
{
title: "Usage",
href: "/builder/portal/account/usage",
},
]
if (isAdmin) {
accountSubPages.push({
title: "Audit Logs",
href: "/builder/portal/account/auditLogs",
})
if (!cloud) {
accountSubPages.push({
title: "System Logs",
href: "/builder/portal/account/systemLogs",
})
}
}
if (cloud && user?.accountPortalAccess) {
accountSubPages.push({
title: "Upgrade",
href: $admin?.accountPortalUrl + "/portal/upgrade",
})
} else if (!cloud && isAdmin) {
accountSubPages.push({
title: "Upgrade",
href: "/builder/portal/account/upgrade",
})
}
// add license check here
if (user?.accountPortalAccess && user.account.stripeCustomerId) {
accountSubPages.push({
title: "Billing",
href: $admin?.accountPortalUrl + "/portal/billing",
})
}
menu.push({
title: "Account",
href: "/builder/portal/account",
subPages: accountSubPages,
})
return menu
})

View File

@ -0,0 +1,149 @@
import { derived, Readable } from "svelte/store"
import { admin } from "./admin"
import { auth } from "./auth"
import { isEnabled } from "@/helpers/featureFlags"
import { sdk } from "@budibase/shared-core"
import { FeatureFlag } from "@budibase/types"
interface MenuItem {
title: string
href: string
subPages?: MenuItem[]
}
export const menu: Readable<MenuItem[]> = derived(
[admin, auth],
([$admin, $auth]) => {
const user = $auth?.user
const isAdmin = user != null && sdk.users.isAdmin(user)
const isGlobalBuilder = user != null && sdk.users.isGlobalBuilder(user)
const cloud = $admin?.cloud
// Determine user sub pages
let userSubPages: MenuItem[] = [
{
title: "Users",
href: "/builder/portal/users/users",
},
]
userSubPages.push({
title: "Groups",
href: "/builder/portal/users/groups",
})
// Pages that all devs and admins can access
let menu: MenuItem[] = [
{
title: "Apps",
href: "/builder/portal/apps",
},
]
if (isGlobalBuilder) {
menu.push({
title: "Users",
href: "/builder/portal/users",
subPages: userSubPages,
})
menu.push({
title: "Plugins",
href: "/builder/portal/plugins",
})
}
// Add settings page for admins
if (isAdmin) {
let settingsSubPages: MenuItem[] = [
{
title: "Auth",
href: "/builder/portal/settings/auth",
},
{
title: "Email",
href: "/builder/portal/settings/email",
},
{
title: "Organisation",
href: "/builder/portal/settings/organisation",
},
{
title: "Branding",
href: "/builder/portal/settings/branding",
},
{
title: "Environment",
href: "/builder/portal/settings/environment",
},
]
if (isEnabled(FeatureFlag.AI_CUSTOM_CONFIGS)) {
settingsSubPages.push({
title: "AI",
href: "/builder/portal/settings/ai",
})
}
if (!cloud) {
settingsSubPages.push({
title: "Version",
href: "/builder/portal/settings/version",
})
settingsSubPages.push({
title: "Diagnostics",
href: "/builder/portal/settings/diagnostics",
})
}
menu.push({
title: "Settings",
href: "/builder/portal/settings",
subPages: [...settingsSubPages].sort((a, b) =>
a.title.localeCompare(b.title)
),
})
}
// Add account page
let accountSubPages: MenuItem[] = [
{
title: "Usage",
href: "/builder/portal/account/usage",
},
]
if (isAdmin) {
accountSubPages.push({
title: "Audit Logs",
href: "/builder/portal/account/auditLogs",
})
if (!cloud) {
accountSubPages.push({
title: "System Logs",
href: "/builder/portal/account/systemLogs",
})
}
}
if (cloud && user?.accountPortalAccess) {
accountSubPages.push({
title: "Upgrade",
href: $admin?.accountPortalUrl + "/portal/upgrade",
})
} else if (!cloud && isAdmin) {
accountSubPages.push({
title: "Upgrade",
href: "/builder/portal/account/upgrade",
})
}
// add license check here
if (user?.accountPortalAccess && user?.account?.stripeCustomerId) {
accountSubPages.push({
title: "Billing",
href: $admin?.accountPortalUrl + "/portal/billing",
})
}
menu.push({
title: "Account",
href: "/builder/portal/account",
subPages: accountSubPages,
})
return menu
}
)

View File

@ -3,7 +3,6 @@ import fs from "fs"
import { join } from "path" import { join } from "path"
import { TEMP_DIR, MINIO_DIR } from "./utils" import { TEMP_DIR, MINIO_DIR } from "./utils"
import { progressBar } from "../utils" import { progressBar } from "../utils"
import * as stream from "node:stream"
const { const {
ObjectStoreBuckets, ObjectStoreBuckets,
@ -21,21 +20,15 @@ export async function exportObjects() {
let fullList: any[] = [] let fullList: any[] = []
let errorCount = 0 let errorCount = 0
for (let bucket of bucketList) { for (let bucket of bucketList) {
const client = ObjectStore() const client = ObjectStore(bucket)
try { try {
await client.headBucket({ await client.headBucket().promise()
Bucket: bucket,
})
} catch (err) { } catch (err) {
errorCount++ errorCount++
continue continue
} }
const list = await client.listObjectsV2({ const list = (await client.listObjectsV2().promise()) as { Contents: any[] }
Bucket: bucket, fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket })))
})
fullList = fullList.concat(
list.Contents?.map(el => ({ ...el, bucket })) || []
)
} }
if (errorCount === bucketList.length) { if (errorCount === bucketList.length) {
throw new Error("Unable to access MinIO/S3 - check environment config.") throw new Error("Unable to access MinIO/S3 - check environment config.")
@ -50,13 +43,7 @@ export async function exportObjects() {
const dirs = possiblePath.slice(0, possiblePath.length - 1) const dirs = possiblePath.slice(0, possiblePath.length - 1)
fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true }) fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true })
} }
if (data instanceof stream.Readable) {
data.pipe(
fs.createWriteStream(join(path, object.bucket, ...possiblePath))
)
} else {
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data) fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
}
bar.update(++count) bar.update(++count)
} }
bar.stop() bar.stop()
@ -73,7 +60,7 @@ export async function importObjects() {
const bar = progressBar(total) const bar = progressBar(total)
let count = 0 let count = 0
for (let bucket of buckets) { for (let bucket of buckets) {
const client = ObjectStore() const client = ObjectStore(bucket)
await createBucketIfNotExists(client, bucket) await createBucketIfNotExists(client, bucket)
const files = await uploadDirectory(bucket, join(path, bucket), "/") const files = await uploadDirectory(bucket, join(path, bucket), "/")
count += files.length count += files.length

View File

@ -50,10 +50,6 @@
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@apidevtools/swagger-parser": "10.0.3", "@apidevtools/swagger-parser": "10.0.3",
"@aws-sdk/client-dynamodb": "3.709.0",
"@aws-sdk/client-s3": "3.709.0",
"@aws-sdk/lib-dynamodb": "3.709.0",
"@aws-sdk/s3-request-presigner": "3.709.0",
"@azure/msal-node": "^2.5.1", "@azure/msal-node": "^2.5.1",
"@budibase/backend-core": "*", "@budibase/backend-core": "*",
"@budibase/client": "*", "@budibase/client": "*",
@ -74,6 +70,7 @@
"airtable": "0.12.2", "airtable": "0.12.2",
"arangojs": "7.2.0", "arangojs": "7.2.0",
"archiver": "7.0.1", "archiver": "7.0.1",
"aws-sdk": "2.1692.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"bson": "^6.9.0", "bson": "^6.9.0",

View File

@ -230,7 +230,7 @@ export async function fetchAppPackage(
const license = await licensing.cache.getCachedLicense() const license = await licensing.cache.getCachedLicense()
// Enrich plugin URLs // Enrich plugin URLs
application.usedPlugins = await objectStore.enrichPluginURLs( application.usedPlugins = objectStore.enrichPluginURLs(
application.usedPlugins application.usedPlugins
) )

View File

@ -18,8 +18,7 @@ import {
objectStore, objectStore,
utils, utils,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner" import AWS from "aws-sdk"
import { PutObjectCommand, S3 } from "@aws-sdk/client-s3"
import fs from "fs" import fs from "fs"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
@ -129,9 +128,9 @@ export const uploadFile = async function (
return { return {
size: file.size, size: file.size,
name: file.name, name: file.name,
url: await objectStore.getAppFileUrl(s3Key), url: objectStore.getAppFileUrl(s3Key),
extension, extension,
key: response.Key!, key: response.Key,
} }
}) })
) )
@ -211,11 +210,11 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
usedPlugins: plugins, usedPlugins: plugins,
favicon: favicon:
branding.faviconUrl !== "" branding.faviconUrl !== ""
? await objectStore.getGlobalFileUrl("settings", "faviconUrl") ? objectStore.getGlobalFileUrl("settings", "faviconUrl")
: "", : "",
logo: logo:
config?.logoUrl !== "" config?.logoUrl !== ""
? await objectStore.getGlobalFileUrl("settings", "logoUrl") ? objectStore.getGlobalFileUrl("settings", "logoUrl")
: "", : "",
appMigrating: needMigrations, appMigrating: needMigrations,
nonce: ctx.state.nonce, nonce: ctx.state.nonce,
@ -244,7 +243,7 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
metaDescription: branding?.metaDescription || "", metaDescription: branding?.metaDescription || "",
favicon: favicon:
branding.faviconUrl !== "" branding.faviconUrl !== ""
? await objectStore.getGlobalFileUrl("settings", "faviconUrl") ? objectStore.getGlobalFileUrl("settings", "faviconUrl")
: "", : "",
}) })
@ -335,17 +334,16 @@ export const getSignedUploadURL = async function (
ctx.throw(400, "bucket and key values are required") ctx.throw(400, "bucket and key values are required")
} }
try { try {
const s3 = new S3({ const s3 = new AWS.S3({
region: awsRegion, region: awsRegion,
endpoint: datasource?.config?.endpoint || undefined, endpoint: datasource?.config?.endpoint || undefined,
credentials: {
accessKeyId: datasource?.config?.accessKeyId as string, accessKeyId: datasource?.config?.accessKeyId as string,
secretAccessKey: datasource?.config?.secretAccessKey as string, secretAccessKey: datasource?.config?.secretAccessKey as string,
}, apiVersion: "2006-03-01",
signatureVersion: "v4",
}) })
const params = { Bucket: bucket, Key: key } const params = { Bucket: bucket, Key: key }
signedUrl = await getSignedUrl(s3, new PutObjectCommand(params)) signedUrl = s3.getSignedUrl("putObject", params)
if (datasource?.config?.endpoint) { if (datasource?.config?.endpoint) {
publicUrl = `${datasource.config.endpoint}/${bucket}/${key}` publicUrl = `${datasource.config.endpoint}/${bucket}/${key}`
} else { } else {

View File

@ -1,10 +1,12 @@
// Directly mock the AWS SDK // Directly mock the AWS SDK
jest.mock("@aws-sdk/s3-request-presigner", () => ({ jest.mock("aws-sdk", () => ({
getSignedUrl: jest.fn(() => { S3: jest.fn(() => ({
return `http://example.com` getSignedUrl: jest.fn(
}), (operation, params) => `http://example.com/${params.Bucket}/${params.Key}`
),
upload: jest.fn(() => ({ Contents: {} })),
})),
})) }))
jest.mock("@aws-sdk/client-s3")
import { Datasource, SourceName } from "@budibase/types" import { Datasource, SourceName } from "@budibase/types"
import { setEnv } from "../../../environment" import { setEnv } from "../../../environment"
@ -75,10 +77,7 @@ describe("/static", () => {
type: "datasource", type: "datasource",
name: "Test", name: "Test",
source: SourceName.S3, source: SourceName.S3,
config: { config: {},
accessKeyId: "bb",
secretAccessKey: "bb",
},
}, },
}) })
}) })
@ -92,7 +91,7 @@ describe("/static", () => {
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.signedUrl).toEqual("http://example.com") expect(res.body.signedUrl).toEqual("http://example.com/foo/bar")
expect(res.body.publicUrl).toEqual( expect(res.body.publicUrl).toEqual(
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}` `https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
) )

View File

@ -154,12 +154,11 @@ describe("test the create row action", () => {
expect(result.steps[1].outputs.row.file_attachment[0]).toHaveProperty("key") expect(result.steps[1].outputs.row.file_attachment[0]).toHaveProperty("key")
let s3Key = result.steps[1].outputs.row.file_attachment[0].key let s3Key = result.steps[1].outputs.row.file_attachment[0].key
const client = objectStore.ObjectStore() const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
const objectData = await client.headObject({ const objectData = await client
Bucket: objectStore.ObjectStoreBuckets.APPS, .headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
Key: s3Key, .promise()
})
expect(objectData).toBeDefined() expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0) expect(objectData.ContentLength).toBeGreaterThan(0)
@ -230,12 +229,11 @@ describe("test the create row action", () => {
) )
let s3Key = result.steps[1].outputs.row.single_file_attachment.key let s3Key = result.steps[1].outputs.row.single_file_attachment.key
const client = objectStore.ObjectStore() const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
const objectData = await client.headObject({ const objectData = await client
Bucket: objectStore.ObjectStoreBuckets.APPS, .headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
Key: s3Key, .promise()
})
expect(objectData).toBeDefined() expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0) expect(objectData.ContentLength).toBeGreaterThan(0)

View File

@ -7,15 +7,9 @@ import {
ConnectionInfo, ConnectionInfo,
} from "@budibase/types" } from "@budibase/types"
import { import AWS from "aws-sdk"
DynamoDBDocument,
PutCommandInput,
GetCommandInput,
UpdateCommandInput,
DeleteCommandInput,
} from "@aws-sdk/lib-dynamodb"
import { DynamoDB } from "@aws-sdk/client-dynamodb"
import { AWS_REGION } from "../constants" import { AWS_REGION } from "../constants"
import { DocumentClient } from "aws-sdk/clients/dynamodb"
interface DynamoDBConfig { interface DynamoDBConfig {
region: string region: string
@ -157,7 +151,7 @@ class DynamoDBIntegration implements IntegrationBase {
region: config.region || AWS_REGION, region: config.region || AWS_REGION,
endpoint: config.endpoint || undefined, endpoint: config.endpoint || undefined,
} }
this.client = DynamoDBDocument.from(new DynamoDB(this.config)) this.client = new AWS.DynamoDB.DocumentClient(this.config)
} }
async testConnection() { async testConnection() {
@ -165,8 +159,8 @@ class DynamoDBIntegration implements IntegrationBase {
connected: false, connected: false,
} }
try { try {
const scanRes = await new DynamoDB(this.config).listTables() const scanRes = await new AWS.DynamoDB(this.config).listTables().promise()
response.connected = !!scanRes.$metadata response.connected = !!scanRes.$response
} catch (e: any) { } catch (e: any) {
response.error = e.message as string response.error = e.message as string
} }
@ -175,13 +169,13 @@ class DynamoDBIntegration implements IntegrationBase {
async create(query: { async create(query: {
table: string table: string
json: Omit<PutCommandInput, "TableName"> json: Omit<DocumentClient.PutItemInput, "TableName">
}) { }) {
const params = { const params = {
TableName: query.table, TableName: query.table,
...query.json, ...query.json,
} }
return this.client.put(params) return this.client.put(params).promise()
} }
async read(query: { table: string; json: object; index: null | string }) { async read(query: { table: string; json: object; index: null | string }) {
@ -190,7 +184,7 @@ class DynamoDBIntegration implements IntegrationBase {
IndexName: query.index ? query.index : undefined, IndexName: query.index ? query.index : undefined,
...query.json, ...query.json,
} }
const response = await this.client.query(params) const response = await this.client.query(params).promise()
if (response.Items) { if (response.Items) {
return response.Items return response.Items
} }
@ -203,7 +197,7 @@ class DynamoDBIntegration implements IntegrationBase {
IndexName: query.index ? query.index : undefined, IndexName: query.index ? query.index : undefined,
...query.json, ...query.json,
} }
const response = await this.client.scan(params) const response = await this.client.scan(params).promise()
if (response.Items) { if (response.Items) {
return response.Items return response.Items
} }
@ -214,40 +208,40 @@ class DynamoDBIntegration implements IntegrationBase {
const params = { const params = {
TableName: query.table, TableName: query.table,
} }
return new DynamoDB(this.config).describeTable(params) return new AWS.DynamoDB(this.config).describeTable(params).promise()
} }
async get(query: { async get(query: {
table: string table: string
json: Omit<GetCommandInput, "TableName"> json: Omit<DocumentClient.GetItemInput, "TableName">
}) { }) {
const params = { const params = {
TableName: query.table, TableName: query.table,
...query.json, ...query.json,
} }
return this.client.get(params) return this.client.get(params).promise()
} }
async update(query: { async update(query: {
table: string table: string
json: Omit<UpdateCommandInput, "TableName"> json: Omit<DocumentClient.UpdateItemInput, "TableName">
}) { }) {
const params = { const params = {
TableName: query.table, TableName: query.table,
...query.json, ...query.json,
} }
return this.client.update(params) return this.client.update(params).promise()
} }
async delete(query: { async delete(query: {
table: string table: string
json: Omit<DeleteCommandInput, "TableName"> json: Omit<DocumentClient.DeleteItemInput, "TableName">
}) { }) {
const params = { const params = {
TableName: query.table, TableName: query.table,
...query.json, ...query.json,
} }
return this.client.delete(params) return this.client.delete(params).promise()
} }
} }

View File

@ -7,9 +7,8 @@ import {
ConnectionInfo, ConnectionInfo,
} from "@budibase/types" } from "@budibase/types"
import { S3 } from "@aws-sdk/client-s3" import AWS from "aws-sdk"
import csv from "csvtojson" import csv from "csvtojson"
import stream from "stream"
interface S3Config { interface S3Config {
region: string region: string
@ -168,7 +167,7 @@ class S3Integration implements IntegrationBase {
delete this.config.endpoint delete this.config.endpoint
} }
this.client = new S3(this.config) this.client = new AWS.S3(this.config)
} }
async testConnection() { async testConnection() {
@ -176,7 +175,7 @@ class S3Integration implements IntegrationBase {
connected: false, connected: false,
} }
try { try {
await this.client.listBuckets() await this.client.listBuckets().promise()
response.connected = true response.connected = true
} catch (e: any) { } catch (e: any) {
response.error = e.message as string response.error = e.message as string
@ -210,7 +209,7 @@ class S3Integration implements IntegrationBase {
LocationConstraint: query.location, LocationConstraint: query.location,
} }
} }
return await this.client.createBucket(params) return await this.client.createBucket(params).promise()
} }
async read(query: { async read(query: {
@ -221,39 +220,37 @@ class S3Integration implements IntegrationBase {
maxKeys: number maxKeys: number
prefix: string prefix: string
}) { }) {
const response = await this.client.listObjects({ const response = await this.client
.listObjects({
Bucket: query.bucket, Bucket: query.bucket,
Delimiter: query.delimiter, Delimiter: query.delimiter,
Marker: query.marker, Marker: query.marker,
MaxKeys: query.maxKeys, MaxKeys: query.maxKeys,
Prefix: query.prefix, Prefix: query.prefix,
}) })
.promise()
return response.Contents return response.Contents
} }
async readCsv(query: { bucket: string; key: string }) { async readCsv(query: { bucket: string; key: string }) {
const response = await this.client.getObject({ const stream = this.client
.getObject({
Bucket: query.bucket, Bucket: query.bucket,
Key: query.key, Key: query.key,
}) })
.createReadStream()
const fileStream = response.Body?.transformToWebStream()
if (!fileStream || !(fileStream instanceof stream.Readable)) {
throw new Error("Unable to retrieve CSV - invalid stream")
}
let csvError = false let csvError = false
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
fileStream.on("error", (err: Error) => { stream.on("error", (err: Error) => {
reject(err) reject(err)
}) })
const response = csv() const response = csv()
.fromStream(fileStream) .fromStream(stream)
.on("error", () => { .on("error", () => {
csvError = true csvError = true
}) })
fileStream.on("finish", () => { stream.on("finish", () => {
resolve(response) resolve(response)
}) })
}).catch(err => { }).catch(err => {
@ -266,10 +263,12 @@ class S3Integration implements IntegrationBase {
} }
async delete(query: { bucket: string; delete: string }) { async delete(query: { bucket: string; delete: string }) {
return await this.client.deleteObjects({ return await this.client
.deleteObjects({
Bucket: query.bucket, Bucket: query.bucket,
Delete: JSON.parse(query.delete), Delete: JSON.parse(query.delete),
}) })
.promise()
} }
} }

View File

@ -0,0 +1,76 @@
const response = (body: any, extra?: any) => () => ({
promise: () => body,
...extra,
})
class DocumentClient {
put = jest.fn(response({}))
query = jest.fn(
response({
Items: [],
})
)
scan = jest.fn(
response({
Items: [
{
Name: "test",
},
],
})
)
get = jest.fn(response({}))
update = jest.fn(response({}))
delete = jest.fn(response({}))
}
class S3 {
listObjects = jest.fn(
response({
Contents: [],
})
)
createBucket = jest.fn(
response({
Contents: {},
})
)
deleteObjects = jest.fn(
response({
Contents: {},
})
)
getSignedUrl = jest.fn((operation, params) => {
return `http://example.com/${params.Bucket}/${params.Key}`
})
headBucket = jest.fn(
response({
Contents: {},
})
)
upload = jest.fn(
response({
Contents: {},
})
)
getObject = jest.fn(
response(
{
Body: "",
},
{
createReadStream: jest.fn().mockReturnValue("stream"),
}
)
)
}
module.exports = {
DynamoDB: {
DocumentClient,
},
S3,
config: {
update: jest.fn(),
},
}

View File

@ -1,20 +1,4 @@
jest.mock("@aws-sdk/lib-dynamodb", () => ({ jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
DynamoDBDocument: {
from: jest.fn(() => ({
update: jest.fn(),
put: jest.fn(),
query: jest.fn(() => ({
Items: [],
})),
scan: jest.fn(() => ({
Items: [],
})),
delete: jest.fn(),
get: jest.fn(),
})),
},
}))
jest.mock("@aws-sdk/client-dynamodb")
import { default as DynamoDBIntegration } from "../dynamodb" import { default as DynamoDBIntegration } from "../dynamodb"
class TestConfiguration { class TestConfiguration {
@ -73,7 +57,11 @@ describe("DynamoDB Integration", () => {
TableName: tableName, TableName: tableName,
IndexName: indexName, IndexName: indexName,
}) })
expect(response).toEqual([]) expect(response).toEqual([
{
Name: "test",
},
])
}) })
it("calls the get method with the correct params", async () => { it("calls the get method with the correct params", async () => {

View File

@ -1,52 +1,5 @@
jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
import { default as S3Integration } from "../s3" import { default as S3Integration } from "../s3"
jest.mock("@aws-sdk/client-s3", () => {
class S3Mock {
response(body: any, extra?: any) {
return () => ({
promise: () => body,
...extra,
})
}
listObjects = jest.fn(
this.response({
Contents: [],
})
)
createBucket = jest.fn(
this.response({
Contents: {},
})
)
deleteObjects = jest.fn(
this.response({
Contents: {},
})
)
headBucket = jest.fn(
this.response({
Contents: {},
})
)
upload = jest.fn(
this.response({
Contents: {},
})
)
getObject = jest.fn(
this.response(
{
Body: "",
},
{
createReadStream: jest.fn().mockReturnValue("stream"),
}
)
)
}
return { S3: S3Mock }
})
class TestConfiguration { class TestConfiguration {
integration: any integration: any

View File

@ -430,7 +430,7 @@ export async function handleFileResponse(
size = details.ContentLength size = details.ContentLength
} }
} }
presignedUrl = await objectStore.getPresignedUrl(bucket, key) presignedUrl = objectStore.getPresignedUrl(bucket, key)
return { return {
data: { data: {
size, size,

View File

@ -18,7 +18,7 @@ export async function fetch(type?: PluginType): Promise<Plugin[]> {
}) })
) )
let plugins = response.rows.map((row: any) => row.doc) as Plugin[] let plugins = response.rows.map((row: any) => row.doc) as Plugin[]
plugins = await objectStore.enrichPluginURLs(plugins) plugins = objectStore.enrichPluginURLs(plugins)
if (type) { if (type) {
return plugins.filter((plugin: Plugin) => plugin.schema?.type === type) return plugins.filter((plugin: Plugin) => plugin.schema?.type === type)
} else { } else {

View File

@ -78,7 +78,7 @@ export const getComponentLibraryManifest = async (library: string) => {
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path) resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
} }
if (typeof resp !== "string") { if (typeof resp !== "string") {
resp = resp.toString() resp = resp.toString("utf8")
} }
return JSON.parse(resp) return JSON.parse(resp)
} }

View File

@ -3,7 +3,6 @@ import { budibaseTempDir } from "../budibaseDir"
import fs from "fs" import fs from "fs"
import { join } from "path" import { join } from "path"
import { objectStore } from "@budibase/backend-core" import { objectStore } from "@budibase/backend-core"
import stream from "stream"
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource") const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
const AUTOMATION_PATH = join(budibaseTempDir(), "automation") const AUTOMATION_PATH = join(budibaseTempDir(), "automation")
@ -59,11 +58,7 @@ async function getPluginImpl(path: string, plugin: Plugin) {
pluginKey pluginKey
) )
if (pluginJs instanceof stream.Readable) {
pluginJs.pipe(fs.createWriteStream(filename))
} else {
fs.writeFileSync(filename, pluginJs) fs.writeFileSync(filename, pluginJs)
}
fs.writeFileSync(metadataName, hash) fs.writeFileSync(metadataName, hash)
return require(filename) return require(filename)

View File

@ -359,9 +359,9 @@ export async function coreOutputProcessing(
if (row[property] == null) { if (row[property] == null) {
continue continue
} }
const process = async (attachment: RowAttachment) => { const process = (attachment: RowAttachment) => {
if (!attachment.url && attachment.key) { if (!attachment.url && attachment.key) {
attachment.url = await objectStore.getAppFileUrl(attachment.key) attachment.url = objectStore.getAppFileUrl(attachment.key)
} }
return attachment return attachment
} }
@ -369,13 +369,11 @@ export async function coreOutputProcessing(
row[property] = JSON.parse(row[property]) row[property] = JSON.parse(row[property])
} }
if (Array.isArray(row[property])) { if (Array.isArray(row[property])) {
await Promise.all( row[property].forEach((attachment: RowAttachment) => {
row[property].map((attachment: RowAttachment) =>
process(attachment) process(attachment)
) })
)
} else { } else {
await process(row[property]) process(row[property])
} }
} }
} else if ( } else if (

View File

@ -322,27 +322,27 @@ export async function save(
} }
} }
async function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) { function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) {
if (!oidcLogos) { if (!oidcLogos) {
return return
} }
const newConfig: Record<string, string> = {} oidcLogos.config = Object.keys(oidcLogos.config || {}).reduce(
const keys = Object.keys(oidcLogos.config || {}) (acc: any, key: string) => {
for (const key of keys) {
if (!key.endsWith("Etag")) { if (!key.endsWith("Etag")) {
const etag = oidcLogos.config[`${key}Etag`] const etag = oidcLogos.config[`${key}Etag`]
const objectStoreUrl = await objectStore.getGlobalFileUrl( const objectStoreUrl = objectStore.getGlobalFileUrl(
oidcLogos.type, oidcLogos.type,
key, key,
etag etag
) )
newConfig[key] = objectStoreUrl acc[key] = objectStoreUrl
} else { } else {
newConfig[key] = oidcLogos.config[key] acc[key] = oidcLogos.config[key]
} }
} return acc
oidcLogos.config = newConfig },
{}
)
} }
export async function find(ctx: UserCtx<void, FindConfigResponse>) { export async function find(ctx: UserCtx<void, FindConfigResponse>) {
@ -370,7 +370,7 @@ export async function find(ctx: UserCtx<void, FindConfigResponse>) {
async function handleConfigType(type: ConfigType, config: Config) { async function handleConfigType(type: ConfigType, config: Config) {
if (type === ConfigType.OIDC_LOGOS) { if (type === ConfigType.OIDC_LOGOS) {
await enrichOIDCLogos(config) enrichOIDCLogos(config)
} else if (type === ConfigType.AI) { } else if (type === ConfigType.AI) {
await handleAIConfig(config) await handleAIConfig(config)
} }
@ -396,7 +396,7 @@ export async function publicOidc(ctx: Ctx<void, GetPublicOIDCConfigResponse>) {
const oidcCustomLogos = await configs.getOIDCLogosDoc() const oidcCustomLogos = await configs.getOIDCLogosDoc()
if (oidcCustomLogos) { if (oidcCustomLogos) {
await enrichOIDCLogos(oidcCustomLogos) enrichOIDCLogos(oidcCustomLogos)
} }
if (!oidcConfig) { if (!oidcConfig) {
@ -427,7 +427,7 @@ export async function publicSettings(
// enrich the logo url - empty url means deleted // enrich the logo url - empty url means deleted
if (config.logoUrl && config.logoUrl !== "") { if (config.logoUrl && config.logoUrl !== "") {
config.logoUrl = await objectStore.getGlobalFileUrl( config.logoUrl = objectStore.getGlobalFileUrl(
"settings", "settings",
"logoUrl", "logoUrl",
config.logoUrlEtag config.logoUrlEtag
@ -437,7 +437,7 @@ export async function publicSettings(
// enrich the favicon url - empty url means deleted // enrich the favicon url - empty url means deleted
const faviconUrl = const faviconUrl =
branding.faviconUrl && branding.faviconUrl !== "" branding.faviconUrl && branding.faviconUrl !== ""
? await objectStore.getGlobalFileUrl( ? objectStore.getGlobalFileUrl(
"settings", "settings",
"faviconUrl", "faviconUrl",
branding.faviconUrlEtag branding.faviconUrlEtag
@ -522,7 +522,7 @@ export async function upload(ctx: UserCtx<void, UploadConfigFileResponse>) {
ctx.body = { ctx.body = {
message: "File has been uploaded and url stored to config.", message: "File has been uploaded and url stored to config.",
url: await objectStore.getGlobalFileUrl(type, name, etag), url: objectStore.getGlobalFileUrl(type, name, etag),
} }
} }

1107
yarn.lock

File diff suppressed because it is too large Load Diff