diff --git a/packages/backend-core/src/objectStore/buckets/app.ts b/packages/backend-core/src/objectStore/buckets/app.ts index 43bc965c65..be9fddeaa6 100644 --- a/packages/backend-core/src/objectStore/buckets/app.ts +++ b/packages/backend-core/src/objectStore/buckets/app.ts @@ -1,50 +1,37 @@ import env from "../../environment" import * as objectStore from "../objectStore" import * as cloudfront from "../cloudfront" -import qs from "querystring" -import { DEFAULT_TENANT_ID, getTenantId } from "../../context" - -export function clientLibraryPath(appId: string) { - return `${objectStore.sanitizeKey(appId)}/budibase-client.js` -} /** - * Previously we used to serve the client library directly from Cloudfront, however - * due to issues with the domain we were unable to continue doing this - keeping - * incase we are able to switch back to CDN path again in future. + * In production the client library is stored in the object store, however in development + * we use the symlinked version produced by lerna, located in node modules. We link to this + * via a specific endpoint (under /api/assets/client). + * @param appId In production we need the appId to look up the correct bucket, as the + * version of the client lib may differ between apps. + * @param version The version to retrieve. + * @return The URL to be inserted into appPackage response or server rendered + * app index file. */ -export function clientLibraryCDNUrl(appId: string, version: string) { - let file = clientLibraryPath(appId) - if (env.CLOUDFRONT_CDN) { - // append app version to bust the cache - if (version) { - file += `?v=${version}` +export const clientLibraryUrl = (appId: string, version: string) => { + if (env.isProd()) { + let file = `${objectStore.sanitizeKey(appId)}/budibase-client.js` + if (env.CLOUDFRONT_CDN) { + // append app version to bust the cache + if (version) { + file += `?v=${version}` + } + // don't need to use presigned for client with cloudfront + // file is public + return cloudfront.getUrl(file) + } else { + return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file) } - // don't need to use presigned for client with cloudfront - // file is public - return cloudfront.getUrl(file) } else { - return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file) + return `/api/assets/client` } } -export function clientLibraryUrl(appId: string, version: string) { - let tenantId, qsParams: { appId: string; version: string; tenantId?: string } - try { - tenantId = getTenantId() - } finally { - qsParams = { - appId, - version, - } - } - if (tenantId && tenantId !== DEFAULT_TENANT_ID) { - qsParams.tenantId = tenantId - } - return `/api/assets/client?${qs.encode(qsParams)}` -} - -export function getAppFileUrl(s3Key: string) { +export const getAppFileUrl = (s3Key: string) => { if (env.CLOUDFRONT_CDN) { return cloudfront.getPresignedUrl(s3Key) } else { diff --git a/packages/backend-core/src/objectStore/buckets/plugins.ts b/packages/backend-core/src/objectStore/buckets/plugins.ts index 6f1b7116ae..f7721afb23 100644 --- a/packages/backend-core/src/objectStore/buckets/plugins.ts +++ b/packages/backend-core/src/objectStore/buckets/plugins.ts @@ -6,7 +6,7 @@ import { Plugin } from "@budibase/types" // URLS -export function enrichPluginURLs(plugins: Plugin[]) { +export const enrichPluginURLs = (plugins: Plugin[]) => { if (!plugins || !plugins.length) { return [] } @@ -17,12 +17,12 @@ export function enrichPluginURLs(plugins: Plugin[]) { }) } -function getPluginJSUrl(plugin: Plugin) { +const getPluginJSUrl = (plugin: Plugin) => { const s3Key = getPluginJSKey(plugin) return getPluginUrl(s3Key) } -function getPluginIconUrl(plugin: Plugin): string | undefined { +const getPluginIconUrl = (plugin: Plugin): string | undefined => { const s3Key = getPluginIconKey(plugin) if (!s3Key) { return @@ -30,7 +30,7 @@ function getPluginIconUrl(plugin: Plugin): string | undefined { return getPluginUrl(s3Key) } -function getPluginUrl(s3Key: string) { +const getPluginUrl = (s3Key: string) => { if (env.CLOUDFRONT_CDN) { return cloudfront.getPresignedUrl(s3Key) } else { @@ -40,11 +40,11 @@ function getPluginUrl(s3Key: string) { // S3 KEYS -export function getPluginJSKey(plugin: Plugin) { +export const getPluginJSKey = (plugin: Plugin) => { return getPluginS3Key(plugin, "plugin.min.js") } -export function getPluginIconKey(plugin: Plugin) { +export const getPluginIconKey = (plugin: Plugin) => { // stored iconUrl is deprecated - hardcode to icon.svg in this case const iconFileName = plugin.iconUrl ? "icon.svg" : plugin.iconFileName if (!iconFileName) { @@ -53,12 +53,12 @@ export function getPluginIconKey(plugin: Plugin) { return getPluginS3Key(plugin, iconFileName) } -function getPluginS3Key(plugin: Plugin, fileName: string) { +const getPluginS3Key = (plugin: Plugin, fileName: string) => { const s3Key = getPluginS3Dir(plugin.name) return `${s3Key}/${fileName}` } -export function getPluginS3Dir(pluginName: string) { +export const getPluginS3Dir = (pluginName: string) => { let s3Key = `${pluginName}` if (env.MULTI_TENANCY) { const tenantId = context.getTenantId() diff --git a/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts b/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts index cbbbee6255..aaa07ec9d3 100644 --- a/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts +++ b/packages/backend-core/src/objectStore/buckets/tests/app.spec.ts @@ -1,4 +1,5 @@ import * as app from "../app" +import { getAppFileUrl } from "../app" import { testEnv } from "../../../../tests/extra" describe("app", () => { @@ -6,15 +7,6 @@ describe("app", () => { testEnv.nodeJest() }) - function baseCheck(url: string, tenantId?: string) { - expect(url).toContain("/api/assets/client") - if (tenantId) { - expect(url).toContain(`tenantId=${tenantId}`) - } - expect(url).toContain("appId=app_123") - expect(url).toContain("version=2.0.0") - } - describe("clientLibraryUrl", () => { function getClientUrl() { return app.clientLibraryUrl("app_123/budibase-client.js", "2.0.0") @@ -28,19 +20,31 @@ describe("app", () => { it("gets url in dev", () => { testEnv.nodeDev() const url = getClientUrl() - baseCheck(url) + expect(url).toBe("/api/assets/client") + }) + + it("gets url with embedded minio", () => { + testEnv.withMinio() + const url = getClientUrl() + expect(url).toBe( + "/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" + ) }) it("gets url with custom S3", () => { testEnv.withS3() const url = getClientUrl() - baseCheck(url) + expect(url).toBe( + "http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" + ) }) it("gets url with cloudfront + s3", () => { testEnv.withCloudfront() const url = getClientUrl() - baseCheck(url) + expect(url).toBe( + "http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0" + ) }) }) @@ -53,7 +57,7 @@ describe("app", () => { testEnv.nodeDev() await testEnv.withTenant(tenantId => { const url = getClientUrl() - baseCheck(url, tenantId) + expect(url).toBe("/api/assets/client") }) }) @@ -61,7 +65,9 @@ describe("app", () => { await testEnv.withTenant(tenantId => { testEnv.withMinio() const url = getClientUrl() - baseCheck(url, tenantId) + expect(url).toBe( + "/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" + ) }) }) @@ -69,7 +75,9 @@ describe("app", () => { await testEnv.withTenant(tenantId => { testEnv.withS3() const url = getClientUrl() - baseCheck(url, tenantId) + expect(url).toBe( + "http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js" + ) }) }) @@ -77,7 +85,9 @@ describe("app", () => { await testEnv.withTenant(tenantId => { testEnv.withCloudfront() const url = getClientUrl() - baseCheck(url, tenantId) + expect(url).toBe( + "http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0" + ) }) }) }) diff --git a/packages/backend-core/src/objectStore/objectStore.ts b/packages/backend-core/src/objectStore/objectStore.ts index 76d2dd6689..c36a09915e 100644 --- a/packages/backend-core/src/objectStore/objectStore.ts +++ b/packages/backend-core/src/objectStore/objectStore.ts @@ -1,6 +1,6 @@ const sanitize = require("sanitize-s3-objectkey") import AWS from "aws-sdk" -import stream, { Readable } from "stream" +import stream from "stream" import fetch from "node-fetch" import tar from "tar-fs" import zlib from "zlib" @@ -66,10 +66,10 @@ export function sanitizeBucket(input: string) { * @return an S3 object store object, check S3 Nodejs SDK for usage. * @constructor */ -export function ObjectStore( +export const ObjectStore = ( bucket: string, opts: { presigning: boolean } = { presigning: false } -) { +) => { const config: any = { s3ForcePathStyle: true, signatureVersion: "v4", @@ -104,7 +104,7 @@ export function ObjectStore( * Given an object store and a bucket name this will make sure the bucket exists, * if it does not exist then it will create it. */ -export async function makeSureBucketExists(client: any, bucketName: string) { +export const makeSureBucketExists = async (client: any, bucketName: string) => { bucketName = sanitizeBucket(bucketName) try { await client @@ -139,13 +139,13 @@ export async function makeSureBucketExists(client: any, bucketName: string) { * Uploads the contents of a file given the required parameters, useful when * temp files in use (for example file uploaded as an attachment). */ -export async function upload({ +export const upload = async ({ bucket: bucketName, filename, path, type, metadata, -}: UploadParams) { +}: UploadParams) => { const extension = filename.split(".").pop() const fileBytes = fs.readFileSync(path) @@ -180,12 +180,12 @@ export async function upload({ * Similar to the upload function but can be used to send a file stream * through to the object store. */ -export async function streamUpload( +export const streamUpload = async ( bucketName: string, filename: string, stream: any, extra = {} -) { +) => { const objectStore = ObjectStore(bucketName) await makeSureBucketExists(objectStore, bucketName) @@ -215,7 +215,7 @@ export async function streamUpload( * retrieves the contents of a file from the object store, if it is a known content type it * will be converted, otherwise it will be returned as a buffer stream. */ -export async function retrieve(bucketName: string, filepath: string) { +export const retrieve = async (bucketName: string, filepath: string) => { const objectStore = ObjectStore(bucketName) const params = { Bucket: sanitizeBucket(bucketName), @@ -230,7 +230,7 @@ export async function retrieve(bucketName: string, filepath: string) { } } -export async function listAllObjects(bucketName: string, path: string) { +export const listAllObjects = async (bucketName: string, path: string) => { const objectStore = ObjectStore(bucketName) const list = (params: ListParams = {}) => { return objectStore @@ -261,11 +261,11 @@ export async function listAllObjects(bucketName: string, path: string) { /** * Generate a presigned url with a default TTL of 1 hour */ -export function getPresignedUrl( +export const getPresignedUrl = ( bucketName: string, key: string, durationSeconds: number = 3600 -) { +) => { const objectStore = ObjectStore(bucketName, { presigning: true }) const params = { Bucket: sanitizeBucket(bucketName), @@ -291,7 +291,7 @@ export function getPresignedUrl( /** * Same as retrieval function but puts to a temporary file. */ -export async function retrieveToTmp(bucketName: string, filepath: string) { +export const retrieveToTmp = async (bucketName: string, filepath: string) => { bucketName = sanitizeBucket(bucketName) filepath = sanitizeKey(filepath) const data = await retrieve(bucketName, filepath) @@ -300,7 +300,7 @@ export async function retrieveToTmp(bucketName: string, filepath: string) { return outputPath } -export async function retrieveDirectory(bucketName: string, path: string) { +export const retrieveDirectory = async (bucketName: string, path: string) => { let writePath = join(budibaseTempDir(), v4()) fs.mkdirSync(writePath) const objects = await listAllObjects(bucketName, path) @@ -324,7 +324,7 @@ export async function retrieveDirectory(bucketName: string, path: string) { /** * Delete a single file. */ -export async function deleteFile(bucketName: string, filepath: string) { +export const deleteFile = async (bucketName: string, filepath: string) => { const objectStore = ObjectStore(bucketName) await makeSureBucketExists(objectStore, bucketName) const params = { @@ -334,7 +334,7 @@ export async function deleteFile(bucketName: string, filepath: string) { return objectStore.deleteObject(params).promise() } -export async function deleteFiles(bucketName: string, filepaths: string[]) { +export const deleteFiles = async (bucketName: string, filepaths: string[]) => { const objectStore = ObjectStore(bucketName) await makeSureBucketExists(objectStore, bucketName) const params = { @@ -349,10 +349,10 @@ export async function deleteFiles(bucketName: string, filepaths: string[]) { /** * Delete a path, including everything within. */ -export async function deleteFolder( +export const deleteFolder = async ( bucketName: string, folder: string -): Promise { +): Promise => { bucketName = sanitizeBucket(bucketName) folder = sanitizeKey(folder) const client = ObjectStore(bucketName) @@ -383,11 +383,11 @@ export async function deleteFolder( } } -export async function uploadDirectory( +export const uploadDirectory = async ( bucketName: string, localPath: string, bucketPath: string -) { +) => { bucketName = sanitizeBucket(bucketName) let uploads = [] const files = fs.readdirSync(localPath, { withFileTypes: true }) @@ -404,11 +404,11 @@ export async function uploadDirectory( return files } -export async function downloadTarballDirect( +export const downloadTarballDirect = async ( url: string, path: string, headers = {} -) { +) => { path = sanitizeKey(path) const response = await fetch(url, { headers }) if (!response.ok) { @@ -418,11 +418,11 @@ export async function downloadTarballDirect( await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path)) } -export async function downloadTarball( +export const downloadTarball = async ( url: string, bucketName: string, path: string -) { +) => { bucketName = sanitizeBucket(bucketName) path = sanitizeKey(path) const response = await fetch(url) @@ -438,17 +438,3 @@ export async function downloadTarball( // return the temporary path incase there is a use for it return tmpPath } - -export async function getReadStream( - bucketName: string, - path: string -): Promise { - bucketName = sanitizeBucket(bucketName) - path = sanitizeKey(path) - const client = ObjectStore(bucketName) - const params = { - Bucket: bucketName, - Key: path, - } - return client.getObject(params).createReadStream() -} diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts index d2e887dcf8..bbf9dd34f5 100644 --- a/packages/server/src/api/controllers/static/index.ts +++ b/packages/server/src/api/controllers/static/index.ts @@ -16,7 +16,7 @@ import AWS from "aws-sdk" import fs from "fs" import sdk from "../../../sdk" import * as pro from "@budibase/pro" -import { App, Ctx } from "@budibase/types" +import { App } from "@budibase/types" const send = require("koa-send") @@ -39,7 +39,7 @@ async function prepareUpload({ s3Key, bucket, metadata, file }: any) { } } -export const toggleBetaUiFeature = async function (ctx: Ctx) { +export const toggleBetaUiFeature = async function (ctx: any) { const cookieName = `beta:${ctx.params.feature}` if (ctx.cookies.get(cookieName)) { @@ -67,14 +67,16 @@ export const toggleBetaUiFeature = async function (ctx: Ctx) { } } -export const serveBuilder = async function (ctx: Ctx) { +export const serveBuilder = async function (ctx: any) { const builderPath = join(TOP_LEVEL_PATH, "builder") await send(ctx, ctx.file, { root: builderPath }) } -export const uploadFile = async function (ctx: Ctx) { - const file = ctx.request?.files?.file - let files = file && Array.isArray(file) ? Array.from(file) : [file] +export const uploadFile = async function (ctx: any) { + let files = + ctx.request.files.file.length > 1 + ? Array.from(ctx.request.files.file) + : [ctx.request.files.file] const uploads = files.map(async (file: any) => { const fileExtension = [...file.name.split(".")].pop() @@ -91,14 +93,14 @@ export const uploadFile = async function (ctx: Ctx) { ctx.body = await Promise.all(uploads) } -export const deleteObjects = async function (ctx: Ctx) { +export const deleteObjects = async function (ctx: any) { ctx.body = await objectStore.deleteFiles( ObjectStoreBuckets.APPS, ctx.request.body.keys ) } -export const serveApp = async function (ctx: Ctx) { +export const serveApp = async function (ctx: any) { const bbHeaderEmbed = ctx.request.get("x-budibase-embed")?.toLowerCase() === "true" @@ -179,7 +181,7 @@ export const serveApp = async function (ctx: Ctx) { } } -export const serveBuilderPreview = async function (ctx: Ctx) { +export const serveBuilderPreview = async function (ctx: any) { const db = context.getAppDB({ skip_setup: true }) const appInfo = await db.get(DocumentType.APP_METADATA) @@ -195,29 +197,18 @@ export const serveBuilderPreview = async function (ctx: Ctx) { } } -export const serveClientLibrary = async function (ctx: Ctx) { - const appId = context.getAppId() || (ctx.request.query.appId as string) +export const serveClientLibrary = async function (ctx: any) { let rootPath = join(NODE_MODULES_PATH, "@budibase", "client", "dist") - if (!appId) { - ctx.throw(400, "No app ID provided - cannot fetch client library.") - } - if (env.isProd()) { - ctx.body = await objectStore.getReadStream( - ObjectStoreBuckets.APPS, - objectStore.clientLibraryPath(appId!) - ) - } else if (env.isDev()) { - // incase running from TS directly - const tsPath = join(require.resolve("@budibase/client"), "..") - return send(ctx, "budibase-client.js", { - root: !fs.existsSync(rootPath) ? tsPath : rootPath, - }) - } else { - ctx.throw(500, "Unable to retrieve client library.") + // incase running from TS directly + if (env.isDev() && !fs.existsSync(rootPath)) { + rootPath = join(require.resolve("@budibase/client"), "..") } + return send(ctx, "budibase-client.js", { + root: rootPath, + }) } -export const getSignedUploadURL = async function (ctx: Ctx) { +export const getSignedUploadURL = async function (ctx: any) { // Ensure datasource is valid let datasource try { @@ -256,7 +247,7 @@ export const getSignedUploadURL = async function (ctx: Ctx) { const params = { Bucket: bucket, Key: key } signedUrl = s3.getSignedUrl("putObject", params) publicUrl = `https://${bucket}.s3.${awsRegion}.amazonaws.com/${key}` - } catch (error: any) { + } catch (error) { ctx.throw(400, error) } }