Moving the client library to be retrieved using the same mechanism as we do in development, through the server instance (same as the builder itself). This should help with the CDN issues that we have been having, although attachments are still currently affected.
This commit is contained in:
parent
5506c77b90
commit
b53158ac08
|
@ -1,37 +1,41 @@
|
||||||
import env from "../../environment"
|
import env from "../../environment"
|
||||||
import * as objectStore from "../objectStore"
|
import * as objectStore from "../objectStore"
|
||||||
import * as cloudfront from "../cloudfront"
|
import * as cloudfront from "../cloudfront"
|
||||||
|
import qs from "querystring"
|
||||||
|
|
||||||
|
export function clientLibraryPath(appId: string) {
|
||||||
|
return `${objectStore.sanitizeKey(appId)}/budibase-client.js`
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* In production the client library is stored in the object store, however in development
|
* Previously we used to serve the client library directly from Cloudfront, however
|
||||||
* we use the symlinked version produced by lerna, located in node modules. We link to this
|
* due to issues with the domain we were unable to continue doing this - keeping
|
||||||
* via a specific endpoint (under /api/assets/client).
|
* incase we are able to switch back to CDN path again in future.
|
||||||
* @param appId In production we need the appId to look up the correct bucket, as the
|
|
||||||
* version of the client lib may differ between apps.
|
|
||||||
* @param version The version to retrieve.
|
|
||||||
* @return The URL to be inserted into appPackage response or server rendered
|
|
||||||
* app index file.
|
|
||||||
*/
|
*/
|
||||||
export const clientLibraryUrl = (appId: string, version: string) => {
|
export function clientLibraryCDNUrl(appId: string, version: string) {
|
||||||
if (env.isProd()) {
|
let file = clientLibraryPath(appId)
|
||||||
let file = `${objectStore.sanitizeKey(appId)}/budibase-client.js`
|
if (env.CLOUDFRONT_CDN) {
|
||||||
if (env.CLOUDFRONT_CDN) {
|
// append app version to bust the cache
|
||||||
// append app version to bust the cache
|
if (version) {
|
||||||
if (version) {
|
file += `?v=${version}`
|
||||||
file += `?v=${version}`
|
|
||||||
}
|
|
||||||
// don't need to use presigned for client with cloudfront
|
|
||||||
// file is public
|
|
||||||
return cloudfront.getUrl(file)
|
|
||||||
} else {
|
|
||||||
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
|
|
||||||
}
|
}
|
||||||
|
// don't need to use presigned for client with cloudfront
|
||||||
|
// file is public
|
||||||
|
return cloudfront.getUrl(file)
|
||||||
} else {
|
} else {
|
||||||
return `/api/assets/client`
|
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getAppFileUrl = (s3Key: string) => {
|
export function clientLibraryUrl(appId: string, version: string) {
|
||||||
|
const queryString = qs.encode({
|
||||||
|
appId,
|
||||||
|
version,
|
||||||
|
})
|
||||||
|
return `/api/assets/client?${queryString}`
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getAppFileUrl(s3Key: string) {
|
||||||
if (env.CLOUDFRONT_CDN) {
|
if (env.CLOUDFRONT_CDN) {
|
||||||
return cloudfront.getPresignedUrl(s3Key)
|
return cloudfront.getPresignedUrl(s3Key)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -6,7 +6,7 @@ import { Plugin } from "@budibase/types"
|
||||||
|
|
||||||
// URLS
|
// URLS
|
||||||
|
|
||||||
export const enrichPluginURLs = (plugins: Plugin[]) => {
|
export function enrichPluginURLs(plugins: Plugin[]) {
|
||||||
if (!plugins || !plugins.length) {
|
if (!plugins || !plugins.length) {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
@ -17,12 +17,12 @@ export const enrichPluginURLs = (plugins: Plugin[]) => {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const getPluginJSUrl = (plugin: Plugin) => {
|
function getPluginJSUrl(plugin: Plugin) {
|
||||||
const s3Key = getPluginJSKey(plugin)
|
const s3Key = getPluginJSKey(plugin)
|
||||||
return getPluginUrl(s3Key)
|
return getPluginUrl(s3Key)
|
||||||
}
|
}
|
||||||
|
|
||||||
const getPluginIconUrl = (plugin: Plugin): string | undefined => {
|
function getPluginIconUrl(plugin: Plugin): string | undefined {
|
||||||
const s3Key = getPluginIconKey(plugin)
|
const s3Key = getPluginIconKey(plugin)
|
||||||
if (!s3Key) {
|
if (!s3Key) {
|
||||||
return
|
return
|
||||||
|
@ -30,7 +30,7 @@ const getPluginIconUrl = (plugin: Plugin): string | undefined => {
|
||||||
return getPluginUrl(s3Key)
|
return getPluginUrl(s3Key)
|
||||||
}
|
}
|
||||||
|
|
||||||
const getPluginUrl = (s3Key: string) => {
|
function getPluginUrl(s3Key: string) {
|
||||||
if (env.CLOUDFRONT_CDN) {
|
if (env.CLOUDFRONT_CDN) {
|
||||||
return cloudfront.getPresignedUrl(s3Key)
|
return cloudfront.getPresignedUrl(s3Key)
|
||||||
} else {
|
} else {
|
||||||
|
@ -40,11 +40,11 @@ const getPluginUrl = (s3Key: string) => {
|
||||||
|
|
||||||
// S3 KEYS
|
// S3 KEYS
|
||||||
|
|
||||||
export const getPluginJSKey = (plugin: Plugin) => {
|
export function getPluginJSKey(plugin: Plugin) {
|
||||||
return getPluginS3Key(plugin, "plugin.min.js")
|
return getPluginS3Key(plugin, "plugin.min.js")
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getPluginIconKey = (plugin: Plugin) => {
|
export function getPluginIconKey(plugin: Plugin) {
|
||||||
// stored iconUrl is deprecated - hardcode to icon.svg in this case
|
// stored iconUrl is deprecated - hardcode to icon.svg in this case
|
||||||
const iconFileName = plugin.iconUrl ? "icon.svg" : plugin.iconFileName
|
const iconFileName = plugin.iconUrl ? "icon.svg" : plugin.iconFileName
|
||||||
if (!iconFileName) {
|
if (!iconFileName) {
|
||||||
|
@ -53,12 +53,12 @@ export const getPluginIconKey = (plugin: Plugin) => {
|
||||||
return getPluginS3Key(plugin, iconFileName)
|
return getPluginS3Key(plugin, iconFileName)
|
||||||
}
|
}
|
||||||
|
|
||||||
const getPluginS3Key = (plugin: Plugin, fileName: string) => {
|
function getPluginS3Key(plugin: Plugin, fileName: string) {
|
||||||
const s3Key = getPluginS3Dir(plugin.name)
|
const s3Key = getPluginS3Dir(plugin.name)
|
||||||
return `${s3Key}/${fileName}`
|
return `${s3Key}/${fileName}`
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getPluginS3Dir = (pluginName: string) => {
|
export function getPluginS3Dir(pluginName: string) {
|
||||||
let s3Key = `${pluginName}`
|
let s3Key = `${pluginName}`
|
||||||
if (env.MULTI_TENANCY) {
|
if (env.MULTI_TENANCY) {
|
||||||
const tenantId = context.getTenantId()
|
const tenantId = context.getTenantId()
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
const sanitize = require("sanitize-s3-objectkey")
|
const sanitize = require("sanitize-s3-objectkey")
|
||||||
import AWS from "aws-sdk"
|
import AWS from "aws-sdk"
|
||||||
import stream from "stream"
|
import stream, { Readable } from "stream"
|
||||||
import fetch from "node-fetch"
|
import fetch from "node-fetch"
|
||||||
import tar from "tar-fs"
|
import tar from "tar-fs"
|
||||||
import zlib from "zlib"
|
import zlib from "zlib"
|
||||||
|
@ -66,10 +66,10 @@ export function sanitizeBucket(input: string) {
|
||||||
* @return an S3 object store object, check S3 Nodejs SDK for usage.
|
* @return an S3 object store object, check S3 Nodejs SDK for usage.
|
||||||
* @constructor
|
* @constructor
|
||||||
*/
|
*/
|
||||||
export const ObjectStore = (
|
export function ObjectStore(
|
||||||
bucket: string,
|
bucket: string,
|
||||||
opts: { presigning: boolean } = { presigning: false }
|
opts: { presigning: boolean } = { presigning: false }
|
||||||
) => {
|
) {
|
||||||
const config: any = {
|
const config: any = {
|
||||||
s3ForcePathStyle: true,
|
s3ForcePathStyle: true,
|
||||||
signatureVersion: "v4",
|
signatureVersion: "v4",
|
||||||
|
@ -104,7 +104,7 @@ export const ObjectStore = (
|
||||||
* Given an object store and a bucket name this will make sure the bucket exists,
|
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||||
* if it does not exist then it will create it.
|
* if it does not exist then it will create it.
|
||||||
*/
|
*/
|
||||||
export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
export async function makeSureBucketExists(client: any, bucketName: string) {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
try {
|
try {
|
||||||
await client
|
await client
|
||||||
|
@ -139,13 +139,13 @@ export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
||||||
* Uploads the contents of a file given the required parameters, useful when
|
* Uploads the contents of a file given the required parameters, useful when
|
||||||
* temp files in use (for example file uploaded as an attachment).
|
* temp files in use (for example file uploaded as an attachment).
|
||||||
*/
|
*/
|
||||||
export const upload = async ({
|
export async function upload({
|
||||||
bucket: bucketName,
|
bucket: bucketName,
|
||||||
filename,
|
filename,
|
||||||
path,
|
path,
|
||||||
type,
|
type,
|
||||||
metadata,
|
metadata,
|
||||||
}: UploadParams) => {
|
}: UploadParams) {
|
||||||
const extension = filename.split(".").pop()
|
const extension = filename.split(".").pop()
|
||||||
const fileBytes = fs.readFileSync(path)
|
const fileBytes = fs.readFileSync(path)
|
||||||
|
|
||||||
|
@ -180,12 +180,12 @@ export const upload = async ({
|
||||||
* Similar to the upload function but can be used to send a file stream
|
* Similar to the upload function but can be used to send a file stream
|
||||||
* through to the object store.
|
* through to the object store.
|
||||||
*/
|
*/
|
||||||
export const streamUpload = async (
|
export async function streamUpload(
|
||||||
bucketName: string,
|
bucketName: string,
|
||||||
filename: string,
|
filename: string,
|
||||||
stream: any,
|
stream: any,
|
||||||
extra = {}
|
extra = {}
|
||||||
) => {
|
) {
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
|
|
||||||
|
@ -215,7 +215,7 @@ export const streamUpload = async (
|
||||||
* retrieves the contents of a file from the object store, if it is a known content type it
|
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||||
* will be converted, otherwise it will be returned as a buffer stream.
|
* will be converted, otherwise it will be returned as a buffer stream.
|
||||||
*/
|
*/
|
||||||
export const retrieve = async (bucketName: string, filepath: string) => {
|
export async function retrieve(bucketName: string, filepath: string) {
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: sanitizeBucket(bucketName),
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
@ -230,7 +230,7 @@ export const retrieve = async (bucketName: string, filepath: string) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const listAllObjects = async (bucketName: string, path: string) => {
|
export async function listAllObjects(bucketName: string, path: string) {
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
const list = (params: ListParams = {}) => {
|
const list = (params: ListParams = {}) => {
|
||||||
return objectStore
|
return objectStore
|
||||||
|
@ -261,11 +261,11 @@ export const listAllObjects = async (bucketName: string, path: string) => {
|
||||||
/**
|
/**
|
||||||
* Generate a presigned url with a default TTL of 1 hour
|
* Generate a presigned url with a default TTL of 1 hour
|
||||||
*/
|
*/
|
||||||
export const getPresignedUrl = (
|
export function getPresignedUrl(
|
||||||
bucketName: string,
|
bucketName: string,
|
||||||
key: string,
|
key: string,
|
||||||
durationSeconds: number = 3600
|
durationSeconds: number = 3600
|
||||||
) => {
|
) {
|
||||||
const objectStore = ObjectStore(bucketName, { presigning: true })
|
const objectStore = ObjectStore(bucketName, { presigning: true })
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: sanitizeBucket(bucketName),
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
@ -291,7 +291,7 @@ export const getPresignedUrl = (
|
||||||
/**
|
/**
|
||||||
* Same as retrieval function but puts to a temporary file.
|
* Same as retrieval function but puts to a temporary file.
|
||||||
*/
|
*/
|
||||||
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
|
export async function retrieveToTmp(bucketName: string, filepath: string) {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
filepath = sanitizeKey(filepath)
|
filepath = sanitizeKey(filepath)
|
||||||
const data = await retrieve(bucketName, filepath)
|
const data = await retrieve(bucketName, filepath)
|
||||||
|
@ -300,7 +300,7 @@ export const retrieveToTmp = async (bucketName: string, filepath: string) => {
|
||||||
return outputPath
|
return outputPath
|
||||||
}
|
}
|
||||||
|
|
||||||
export const retrieveDirectory = async (bucketName: string, path: string) => {
|
export async function retrieveDirectory(bucketName: string, path: string) {
|
||||||
let writePath = join(budibaseTempDir(), v4())
|
let writePath = join(budibaseTempDir(), v4())
|
||||||
fs.mkdirSync(writePath)
|
fs.mkdirSync(writePath)
|
||||||
const objects = await listAllObjects(bucketName, path)
|
const objects = await listAllObjects(bucketName, path)
|
||||||
|
@ -324,7 +324,7 @@ export const retrieveDirectory = async (bucketName: string, path: string) => {
|
||||||
/**
|
/**
|
||||||
* Delete a single file.
|
* Delete a single file.
|
||||||
*/
|
*/
|
||||||
export const deleteFile = async (bucketName: string, filepath: string) => {
|
export async function deleteFile(bucketName: string, filepath: string) {
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -334,7 +334,7 @@ export const deleteFile = async (bucketName: string, filepath: string) => {
|
||||||
return objectStore.deleteObject(params).promise()
|
return objectStore.deleteObject(params).promise()
|
||||||
}
|
}
|
||||||
|
|
||||||
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
|
export async function deleteFiles(bucketName: string, filepaths: string[]) {
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -349,10 +349,10 @@ export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
|
||||||
/**
|
/**
|
||||||
* Delete a path, including everything within.
|
* Delete a path, including everything within.
|
||||||
*/
|
*/
|
||||||
export const deleteFolder = async (
|
export async function deleteFolder(
|
||||||
bucketName: string,
|
bucketName: string,
|
||||||
folder: string
|
folder: string
|
||||||
): Promise<any> => {
|
): Promise<any> {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
folder = sanitizeKey(folder)
|
folder = sanitizeKey(folder)
|
||||||
const client = ObjectStore(bucketName)
|
const client = ObjectStore(bucketName)
|
||||||
|
@ -383,11 +383,11 @@ export const deleteFolder = async (
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const uploadDirectory = async (
|
export async function uploadDirectory(
|
||||||
bucketName: string,
|
bucketName: string,
|
||||||
localPath: string,
|
localPath: string,
|
||||||
bucketPath: string
|
bucketPath: string
|
||||||
) => {
|
) {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
let uploads = []
|
let uploads = []
|
||||||
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
const files = fs.readdirSync(localPath, { withFileTypes: true })
|
||||||
|
@ -404,11 +404,11 @@ export const uploadDirectory = async (
|
||||||
return files
|
return files
|
||||||
}
|
}
|
||||||
|
|
||||||
export const downloadTarballDirect = async (
|
export async function downloadTarballDirect(
|
||||||
url: string,
|
url: string,
|
||||||
path: string,
|
path: string,
|
||||||
headers = {}
|
headers = {}
|
||||||
) => {
|
) {
|
||||||
path = sanitizeKey(path)
|
path = sanitizeKey(path)
|
||||||
const response = await fetch(url, { headers })
|
const response = await fetch(url, { headers })
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
|
@ -418,11 +418,11 @@ export const downloadTarballDirect = async (
|
||||||
await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path))
|
await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path))
|
||||||
}
|
}
|
||||||
|
|
||||||
export const downloadTarball = async (
|
export async function downloadTarball(
|
||||||
url: string,
|
url: string,
|
||||||
bucketName: string,
|
bucketName: string,
|
||||||
path: string
|
path: string
|
||||||
) => {
|
) {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
path = sanitizeKey(path)
|
path = sanitizeKey(path)
|
||||||
const response = await fetch(url)
|
const response = await fetch(url)
|
||||||
|
@ -438,3 +438,17 @@ export const downloadTarball = async (
|
||||||
// return the temporary path incase there is a use for it
|
// return the temporary path incase there is a use for it
|
||||||
return tmpPath
|
return tmpPath
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function getReadStream(
|
||||||
|
bucketName: string,
|
||||||
|
path: string
|
||||||
|
): Promise<Readable> {
|
||||||
|
bucketName = sanitizeBucket(bucketName)
|
||||||
|
path = sanitizeKey(path)
|
||||||
|
const client = ObjectStore(bucketName)
|
||||||
|
const params = {
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: path,
|
||||||
|
}
|
||||||
|
return client.getObject(params).createReadStream()
|
||||||
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ import AWS from "aws-sdk"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import * as pro from "@budibase/pro"
|
import * as pro from "@budibase/pro"
|
||||||
import { App } from "@budibase/types"
|
import { App, Ctx } from "@budibase/types"
|
||||||
|
|
||||||
const send = require("koa-send")
|
const send = require("koa-send")
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const toggleBetaUiFeature = async function (ctx: any) {
|
export const toggleBetaUiFeature = async function (ctx: Ctx) {
|
||||||
const cookieName = `beta:${ctx.params.feature}`
|
const cookieName = `beta:${ctx.params.feature}`
|
||||||
|
|
||||||
if (ctx.cookies.get(cookieName)) {
|
if (ctx.cookies.get(cookieName)) {
|
||||||
|
@ -67,16 +67,14 @@ export const toggleBetaUiFeature = async function (ctx: any) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const serveBuilder = async function (ctx: any) {
|
export const serveBuilder = async function (ctx: Ctx) {
|
||||||
const builderPath = join(TOP_LEVEL_PATH, "builder")
|
const builderPath = join(TOP_LEVEL_PATH, "builder")
|
||||||
await send(ctx, ctx.file, { root: builderPath })
|
await send(ctx, ctx.file, { root: builderPath })
|
||||||
}
|
}
|
||||||
|
|
||||||
export const uploadFile = async function (ctx: any) {
|
export const uploadFile = async function (ctx: Ctx) {
|
||||||
let files =
|
const file = ctx.request?.files?.file
|
||||||
ctx.request.files.file.length > 1
|
let files = file && Array.isArray(file) ? Array.from(file) : [file]
|
||||||
? Array.from(ctx.request.files.file)
|
|
||||||
: [ctx.request.files.file]
|
|
||||||
|
|
||||||
const uploads = files.map(async (file: any) => {
|
const uploads = files.map(async (file: any) => {
|
||||||
const fileExtension = [...file.name.split(".")].pop()
|
const fileExtension = [...file.name.split(".")].pop()
|
||||||
|
@ -93,14 +91,14 @@ export const uploadFile = async function (ctx: any) {
|
||||||
ctx.body = await Promise.all(uploads)
|
ctx.body = await Promise.all(uploads)
|
||||||
}
|
}
|
||||||
|
|
||||||
export const deleteObjects = async function (ctx: any) {
|
export const deleteObjects = async function (ctx: Ctx) {
|
||||||
ctx.body = await objectStore.deleteFiles(
|
ctx.body = await objectStore.deleteFiles(
|
||||||
ObjectStoreBuckets.APPS,
|
ObjectStoreBuckets.APPS,
|
||||||
ctx.request.body.keys
|
ctx.request.body.keys
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export const serveApp = async function (ctx: any) {
|
export const serveApp = async function (ctx: Ctx) {
|
||||||
const bbHeaderEmbed =
|
const bbHeaderEmbed =
|
||||||
ctx.request.get("x-budibase-embed")?.toLowerCase() === "true"
|
ctx.request.get("x-budibase-embed")?.toLowerCase() === "true"
|
||||||
|
|
||||||
|
@ -181,7 +179,7 @@ export const serveApp = async function (ctx: any) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const serveBuilderPreview = async function (ctx: any) {
|
export const serveBuilderPreview = async function (ctx: Ctx) {
|
||||||
const db = context.getAppDB({ skip_setup: true })
|
const db = context.getAppDB({ skip_setup: true })
|
||||||
const appInfo = await db.get<App>(DocumentType.APP_METADATA)
|
const appInfo = await db.get<App>(DocumentType.APP_METADATA)
|
||||||
|
|
||||||
|
@ -197,18 +195,29 @@ export const serveBuilderPreview = async function (ctx: any) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const serveClientLibrary = async function (ctx: any) {
|
export const serveClientLibrary = async function (ctx: Ctx) {
|
||||||
|
const appId = context.getAppId() || (ctx.request.query.appId as string)
|
||||||
let rootPath = join(NODE_MODULES_PATH, "@budibase", "client", "dist")
|
let rootPath = join(NODE_MODULES_PATH, "@budibase", "client", "dist")
|
||||||
// incase running from TS directly
|
if (!appId) {
|
||||||
if (env.isDev() && !fs.existsSync(rootPath)) {
|
ctx.throw(400, "No app ID provided - cannot fetch client library.")
|
||||||
rootPath = join(require.resolve("@budibase/client"), "..")
|
}
|
||||||
|
if (env.isProd()) {
|
||||||
|
ctx.body = await objectStore.getReadStream(
|
||||||
|
ObjectStoreBuckets.APPS,
|
||||||
|
objectStore.clientLibraryPath(appId!)
|
||||||
|
)
|
||||||
|
} else if (env.isDev()) {
|
||||||
|
// incase running from TS directly
|
||||||
|
const tsPath = join(require.resolve("@budibase/client"), "..")
|
||||||
|
return send(ctx, "budibase-client.js", {
|
||||||
|
root: !fs.existsSync(rootPath) ? tsPath : rootPath,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
ctx.throw(500, "Unable to retrieve client library.")
|
||||||
}
|
}
|
||||||
return send(ctx, "budibase-client.js", {
|
|
||||||
root: rootPath,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getSignedUploadURL = async function (ctx: any) {
|
export const getSignedUploadURL = async function (ctx: Ctx) {
|
||||||
// Ensure datasource is valid
|
// Ensure datasource is valid
|
||||||
let datasource
|
let datasource
|
||||||
try {
|
try {
|
||||||
|
@ -247,7 +256,7 @@ export const getSignedUploadURL = async function (ctx: any) {
|
||||||
const params = { Bucket: bucket, Key: key }
|
const params = { Bucket: bucket, Key: key }
|
||||||
signedUrl = s3.getSignedUrl("putObject", params)
|
signedUrl = s3.getSignedUrl("putObject", params)
|
||||||
publicUrl = `https://${bucket}.s3.${awsRegion}.amazonaws.com/${key}`
|
publicUrl = `https://${bucket}.s3.${awsRegion}.amazonaws.com/${key}`
|
||||||
} catch (error) {
|
} catch (error: any) {
|
||||||
ctx.throw(400, error)
|
ctx.throw(400, error)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue