Merge branch 'master' of github.com:Budibase/budibase into table-settings-updates

This commit is contained in:
Andrew Kingston 2025-02-11 16:19:42 +00:00
commit c21168ed34
No known key found for this signature in database
78 changed files with 2455 additions and 787 deletions

View File

@ -202,6 +202,9 @@ jobs:
- run: yarn --frozen-lockfile
- name: Build client library - necessary for component tests
run: yarn build:client
- name: Set up PostgreSQL 16
if: matrix.datasource == 'postgres'
run: |

View File

@ -41,6 +41,11 @@ server {
}
location ~ ^/api/(system|admin|global)/ {
# Enable buffering for potentially large OIDC configs
proxy_buffering on;
proxy_buffer_size 16k;
proxy_buffers 4 32k;
proxy_pass http://127.0.0.1:4002;
}

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.4.4",
"version": "3.4.6",
"npmClient": "yarn",
"concurrency": 20,
"command": {

View File

@ -67,6 +67,7 @@
"lint:fix:eslint": "eslint --fix --max-warnings=0 packages",
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:eslint && yarn run lint:fix:prettier",
"build:client": "lerna run --stream build --scope @budibase/client",
"build:specs": "lerna run --stream specs",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",

View File

@ -0,0 +1,28 @@
export class S3 {
headBucket() {
return jest.fn().mockReturnThis()
}
deleteObject() {
return jest.fn().mockReturnThis()
}
deleteObjects() {
return jest.fn().mockReturnThis()
}
createBucket() {
return jest.fn().mockReturnThis()
}
getObject() {
return jest.fn().mockReturnThis()
}
listObject() {
return jest.fn().mockReturnThis()
}
promise() {
return jest.fn().mockReturnThis()
}
catch() {
return jest.fn()
}
}
export const GetObjectCommand = jest.fn(inputs => ({ inputs }))

View File

@ -0,0 +1,4 @@
export const getSignedUrl = jest.fn((_, cmd) => {
const { inputs } = cmd
return `http://s3.example.com/${inputs?.Bucket}/${inputs?.Key}`
})

View File

@ -1,19 +0,0 @@
const mockS3 = {
headBucket: jest.fn().mockReturnThis(),
deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(),
getObject: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}`
}),
promise: jest.fn().mockReturnThis(),
catch: jest.fn(),
}
const AWS = {
S3: jest.fn(() => mockS3),
}
export default AWS

View File

@ -30,6 +30,9 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@aws-sdk/client-s3": "3.709.0",
"@aws-sdk/lib-storage": "3.709.0",
"@aws-sdk/s3-request-presigner": "3.709.0",
"@budibase/nano": "10.1.5",
"@budibase/pouchdb-replication-stream": "1.2.11",
"@budibase/shared-core": "*",
@ -71,11 +74,13 @@
"devDependencies": {
"@jest/types": "^29.6.3",
"@shopify/jest-koa-mocks": "5.1.1",
"@smithy/types": "4.0.0",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
"@types/chance": "1.1.3",
"@types/cookies": "0.7.8",
"@types/jest": "29.5.5",
"@types/koa": "2.13.4",
"@types/lodash": "4.14.200",
"@types/node-fetch": "2.6.4",
"@types/pouchdb": "6.4.2",
@ -83,7 +88,6 @@
"@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4",
"@types/koa": "2.13.4",
"chance": "1.1.8",
"ioredis-mock": "8.9.0",
"jest": "29.7.0",

View File

@ -8,6 +8,10 @@ import {
import { getProdAppID } from "./conversions"
import { DatabaseQueryOpts, VirtualDocumentType } from "@budibase/types"
const EXTERNAL_TABLE_ID_REGEX = new RegExp(
`^${DocumentType.DATASOURCE_PLUS}_(.+)__(.+)$`
)
/**
* If creating DB allDocs/query params with only a single top level ID this can be used, this
* is usually the case as most of our docs are top level e.g. tables, automations, users and so on.
@ -64,6 +68,11 @@ export function getQueryIndex(viewName: ViewName) {
return `database/${viewName}`
}
export const isExternalTableId = (id: string): boolean => {
const matches = id.match(EXTERNAL_TABLE_ID_REGEX)
return !!id && matches !== null
}
/**
* Check if a given ID is that of a table.
*/
@ -72,7 +81,7 @@ export const isTableId = (id: string): boolean => {
return (
!!id &&
(id.startsWith(`${DocumentType.TABLE}${SEPARATOR}`) ||
id.startsWith(`${DocumentType.DATASOURCE_PLUS}${SEPARATOR}`))
isExternalTableId(id))
)
}

View File

@ -154,7 +154,7 @@ const environment = {
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN,
AWS_REGION: process.env.AWS_REGION,
AWS_REGION: process.env.AWS_REGION || "eu-west-1",
MINIO_URL: process.env.MINIO_URL,
MINIO_ENABLED: process.env.MINIO_ENABLED || 1,
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,

View File

@ -13,7 +13,7 @@ export function clientLibraryPath(appId: string) {
* due to issues with the domain we were unable to continue doing this - keeping
* incase we are able to switch back to CDN path again in future.
*/
export function clientLibraryCDNUrl(appId: string, version: string) {
export async function clientLibraryCDNUrl(appId: string, version: string) {
let file = clientLibraryPath(appId)
if (env.CLOUDFRONT_CDN) {
// append app version to bust the cache
@ -24,7 +24,7 @@ export function clientLibraryCDNUrl(appId: string, version: string) {
// file is public
return cloudfront.getUrl(file)
} else {
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
}
}
@ -44,10 +44,10 @@ export function clientLibraryUrl(appId: string, version: string) {
return `/api/assets/client?${qs.encode(qsParams)}`
}
export function getAppFileUrl(s3Key: string) {
export async function getAppFileUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key)
} else {
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
return await objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
}
}

View File

@ -5,7 +5,11 @@ import * as cloudfront from "../cloudfront"
// URLs
export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
export const getGlobalFileUrl = async (
type: string,
name: string,
etag?: string
) => {
let file = getGlobalFileS3Key(type, name)
if (env.CLOUDFRONT_CDN) {
if (etag) {
@ -13,7 +17,7 @@ export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
}
return cloudfront.getPresignedUrl(file)
} else {
return objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
return await objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
}
}

View File

@ -6,23 +6,25 @@ import { Plugin } from "@budibase/types"
// URLS
export function enrichPluginURLs(plugins?: Plugin[]): Plugin[] {
export async function enrichPluginURLs(plugins?: Plugin[]): Promise<Plugin[]> {
if (!plugins || !plugins.length) {
return []
}
return plugins.map(plugin => {
const jsUrl = getPluginJSUrl(plugin)
const iconUrl = getPluginIconUrl(plugin)
return { ...plugin, jsUrl, iconUrl }
})
return await Promise.all(
plugins.map(async plugin => {
const jsUrl = await getPluginJSUrl(plugin)
const iconUrl = await getPluginIconUrl(plugin)
return { ...plugin, jsUrl, iconUrl }
})
)
}
function getPluginJSUrl(plugin: Plugin) {
async function getPluginJSUrl(plugin: Plugin) {
const s3Key = getPluginJSKey(plugin)
return getPluginUrl(s3Key)
}
function getPluginIconUrl(plugin: Plugin): string | undefined {
async function getPluginIconUrl(plugin: Plugin) {
const s3Key = getPluginIconKey(plugin)
if (!s3Key) {
return
@ -30,11 +32,11 @@ function getPluginIconUrl(plugin: Plugin): string | undefined {
return getPluginUrl(s3Key)
}
function getPluginUrl(s3Key: string) {
async function getPluginUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key)
} else {
return objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
return await objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
}
}

View File

@ -93,25 +93,25 @@ describe("app", () => {
testEnv.multiTenant()
})
it("gets url with embedded minio", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
const url = getAppFileUrl()
const url = await getAppFileUrl()
expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
})
it("gets url with custom S3", () => {
it("gets url with custom S3", async () => {
testEnv.withS3()
const url = getAppFileUrl()
const url = await getAppFileUrl()
expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
})
it("gets url with cloudfront + s3", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
const url = getAppFileUrl()
const url = await getAppFileUrl()
// omit rest of signed params
expect(
url.includes("http://cf.example.com/app_123/attachments/image.jpeg?")
@ -126,8 +126,8 @@ describe("app", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
await testEnv.withTenant(() => {
const url = getAppFileUrl()
await testEnv.withTenant(async () => {
const url = await getAppFileUrl()
expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
@ -136,8 +136,8 @@ describe("app", () => {
it("gets url with custom S3", async () => {
testEnv.withS3()
await testEnv.withTenant(() => {
const url = getAppFileUrl()
await testEnv.withTenant(async () => {
const url = await getAppFileUrl()
expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
@ -146,8 +146,8 @@ describe("app", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
await testEnv.withTenant(() => {
const url = getAppFileUrl()
await testEnv.withTenant(async () => {
const url = await getAppFileUrl()
// omit rest of signed params
expect(
url.includes(

View File

@ -3,7 +3,7 @@ import { testEnv } from "../../../../tests/extra"
describe("global", () => {
describe("getGlobalFileUrl", () => {
function getGlobalFileUrl() {
async function getGlobalFileUrl() {
return global.getGlobalFileUrl("settings", "logoUrl", "etag")
}
@ -12,21 +12,21 @@ describe("global", () => {
testEnv.singleTenant()
})
it("gets url with embedded minio", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
const url = getGlobalFileUrl()
const url = await getGlobalFileUrl()
expect(url).toBe("/files/signed/global/settings/logoUrl")
})
it("gets url with custom S3", () => {
it("gets url with custom S3", async () => {
testEnv.withS3()
const url = getGlobalFileUrl()
const url = await getGlobalFileUrl()
expect(url).toBe("http://s3.example.com/global/settings/logoUrl")
})
it("gets url with cloudfront + s3", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
const url = getGlobalFileUrl()
const url = await getGlobalFileUrl()
// omit rest of signed params
expect(
url.includes("http://cf.example.com/settings/logoUrl?etag=etag&")
@ -41,16 +41,16 @@ describe("global", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
await testEnv.withTenant(tenantId => {
const url = getGlobalFileUrl()
await testEnv.withTenant(async tenantId => {
const url = await getGlobalFileUrl()
expect(url).toBe(`/files/signed/global/${tenantId}/settings/logoUrl`)
})
})
it("gets url with custom S3", async () => {
testEnv.withS3()
await testEnv.withTenant(tenantId => {
const url = getGlobalFileUrl()
await testEnv.withTenant(async tenantId => {
const url = await getGlobalFileUrl()
expect(url).toBe(
`http://s3.example.com/global/${tenantId}/settings/logoUrl`
)
@ -59,8 +59,8 @@ describe("global", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
await testEnv.withTenant(tenantId => {
const url = getGlobalFileUrl()
await testEnv.withTenant(async tenantId => {
const url = await getGlobalFileUrl()
// omit rest of signed params
expect(
url.includes(

View File

@ -6,8 +6,8 @@ describe("plugins", () => {
describe("enrichPluginURLs", () => {
const plugin = structures.plugins.plugin()
function getEnrichedPluginUrls() {
const enriched = plugins.enrichPluginURLs([plugin])[0]
async function getEnrichedPluginUrls() {
const enriched = (await plugins.enrichPluginURLs([plugin]))[0]
return {
jsUrl: enriched.jsUrl!,
iconUrl: enriched.iconUrl!,
@ -19,9 +19,9 @@ describe("plugins", () => {
testEnv.singleTenant()
})
it("gets url with embedded minio", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
const urls = getEnrichedPluginUrls()
const urls = await getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`/files/signed/plugins/${plugin.name}/plugin.min.js`
)
@ -30,9 +30,9 @@ describe("plugins", () => {
)
})
it("gets url with custom S3", () => {
it("gets url with custom S3", async () => {
testEnv.withS3()
const urls = getEnrichedPluginUrls()
const urls = await getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`http://s3.example.com/plugins/${plugin.name}/plugin.min.js`
)
@ -41,9 +41,9 @@ describe("plugins", () => {
)
})
it("gets url with cloudfront + s3", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
const urls = getEnrichedPluginUrls()
const urls = await getEnrichedPluginUrls()
// omit rest of signed params
expect(
urls.jsUrl.includes(
@ -65,8 +65,8 @@ describe("plugins", () => {
it("gets url with embedded minio", async () => {
testEnv.withMinio()
await testEnv.withTenant(tenantId => {
const urls = getEnrichedPluginUrls()
await testEnv.withTenant(async tenantId => {
const urls = await getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`/files/signed/plugins/${tenantId}/${plugin.name}/plugin.min.js`
)
@ -78,8 +78,8 @@ describe("plugins", () => {
it("gets url with custom S3", async () => {
testEnv.withS3()
await testEnv.withTenant(tenantId => {
const urls = getEnrichedPluginUrls()
await testEnv.withTenant(async tenantId => {
const urls = await getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`http://s3.example.com/plugins/${tenantId}/${plugin.name}/plugin.min.js`
)
@ -91,8 +91,8 @@ describe("plugins", () => {
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
await testEnv.withTenant(tenantId => {
const urls = getEnrichedPluginUrls()
await testEnv.withTenant(async tenantId => {
const urls = await getEnrichedPluginUrls()
// omit rest of signed params
expect(
urls.jsUrl.includes(

View File

@ -1,6 +1,15 @@
const sanitize = require("sanitize-s3-objectkey")
import AWS from "aws-sdk"
import {
HeadObjectCommandOutput,
PutObjectCommandInput,
S3,
S3ClientConfig,
GetObjectCommand,
_Object as S3Object,
} from "@aws-sdk/client-s3"
import { Upload } from "@aws-sdk/lib-storage"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import stream, { Readable } from "stream"
import fetch from "node-fetch"
import tar from "tar-fs"
@ -13,8 +22,8 @@ import { bucketTTLConfig, budibaseTempDir } from "./utils"
import { v4 } from "uuid"
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
import fsp from "fs/promises"
import { HeadObjectOutput } from "aws-sdk/clients/s3"
import { ReadableStream } from "stream/web"
import { NodeJsClient } from "@smithy/types"
const streamPipeline = promisify(stream.pipeline)
// use this as a temporary store of buckets that are being created
@ -84,26 +93,24 @@ export function sanitizeBucket(input: string) {
* @constructor
*/
export function ObjectStore(
bucket: string,
opts: { presigning: boolean } = { presigning: false }
) {
const config: AWS.S3.ClientConfiguration = {
s3ForcePathStyle: true,
signatureVersion: "v4",
apiVersion: "2006-03-01",
accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY,
const config: S3ClientConfig = {
forcePathStyle: true,
credentials: {
accessKeyId: env.MINIO_ACCESS_KEY!,
secretAccessKey: env.MINIO_SECRET_KEY!,
},
region: env.AWS_REGION,
}
if (bucket) {
config.params = {
Bucket: sanitizeBucket(bucket),
}
}
// for AWS Credentials using temporary session token
if (!env.MINIO_ENABLED && env.AWS_SESSION_TOKEN) {
config.sessionToken = env.AWS_SESSION_TOKEN
config.credentials = {
accessKeyId: env.MINIO_ACCESS_KEY!,
secretAccessKey: env.MINIO_SECRET_KEY!,
sessionToken: env.AWS_SESSION_TOKEN,
}
}
// custom S3 is in use i.e. minio
@ -113,13 +120,13 @@ export function ObjectStore(
// Normally a signed url will need to be generated with a specified host in mind.
// To support dynamic hosts, e.g. some unknown self-hosted installation url,
// use a predefined host. The host 'minio-service' is also forwarded to minio requests via nginx
config.endpoint = "minio-service"
config.endpoint = "http://minio-service"
} else {
config.endpoint = env.MINIO_URL
}
}
return new AWS.S3(config)
return new S3(config) as NodeJsClient<S3>
}
/**
@ -132,26 +139,25 @@ export async function createBucketIfNotExists(
): Promise<{ created: boolean; exists: boolean }> {
bucketName = sanitizeBucket(bucketName)
try {
await client
.headBucket({
Bucket: bucketName,
})
.promise()
await client.headBucket({
Bucket: bucketName,
})
return { created: false, exists: true }
} catch (err: any) {
const promises: any = STATE.bucketCreationPromises
const doesntExist = err.statusCode === 404,
noAccess = err.statusCode === 403
const statusCode = err.statusCode || err.$response?.statusCode
const promises: Record<string, Promise<any> | undefined> =
STATE.bucketCreationPromises
const doesntExist = statusCode === 404,
noAccess = statusCode === 403
if (promises[bucketName]) {
await promises[bucketName]
return { created: false, exists: true }
} else if (doesntExist || noAccess) {
if (doesntExist) {
promises[bucketName] = client
.createBucket({
Bucket: bucketName,
})
.promise()
promises[bucketName] = client.createBucket({
Bucket: bucketName,
})
await promises[bucketName]
delete promises[bucketName]
return { created: true, exists: false }
@ -180,25 +186,26 @@ export async function upload({
const fileBytes = path ? (await fsp.open(path)).createReadStream() : body
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && bucketCreated.created) {
let ttlConfig = bucketTTLConfig(bucketName, ttl)
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
await objectStore.putBucketLifecycleConfiguration(ttlConfig)
}
let contentType = type
if (!contentType) {
contentType = extension
? CONTENT_TYPE_MAP[extension.toLowerCase()]
: CONTENT_TYPE_MAP.txt
}
const config: any = {
const finalContentType = contentType
? contentType
: extension
? CONTENT_TYPE_MAP[extension.toLowerCase()]
: CONTENT_TYPE_MAP.txt
const config: PutObjectCommandInput = {
// windows file paths need to be converted to forward slashes for s3
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filename),
Body: fileBytes,
ContentType: contentType,
Body: fileBytes as stream.Readable | Buffer,
ContentType: finalContentType,
}
if (metadata && typeof metadata === "object") {
// remove any nullish keys from the metadata object, as these may be considered invalid
@ -207,10 +214,15 @@ export async function upload({
delete metadata[key]
}
}
config.Metadata = metadata
config.Metadata = metadata as Record<string, string>
}
return objectStore.upload(config).promise()
const upload = new Upload({
client: objectStore,
params: config,
})
return upload.done()
}
/**
@ -229,12 +241,12 @@ export async function streamUpload({
throw new Error("Stream to upload is invalid/undefined")
}
const extension = filename.split(".").pop()
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
if (ttl && bucketCreated.created) {
let ttlConfig = bucketTTLConfig(bucketName, ttl)
await objectStore.putBucketLifecycleConfiguration(ttlConfig).promise()
await objectStore.putBucketLifecycleConfiguration(ttlConfig)
}
// Set content type for certain known extensions
@ -267,13 +279,15 @@ export async function streamUpload({
...extra,
}
const details = await objectStore.upload(params).promise()
const headDetails = await objectStore
.headObject({
Bucket: bucket,
Key: objKey,
})
.promise()
const upload = new Upload({
client: objectStore,
params,
})
const details = await upload.done()
const headDetails = await objectStore.headObject({
Bucket: bucket,
Key: objKey,
})
return {
...details,
ContentLength: headDetails.ContentLength,
@ -284,35 +298,46 @@ export async function streamUpload({
* retrieves the contents of a file from the object store, if it is a known content type it
* will be converted, otherwise it will be returned as a buffer stream.
*/
export async function retrieve(bucketName: string, filepath: string) {
const objectStore = ObjectStore(bucketName)
export async function retrieve(
bucketName: string,
filepath: string
): Promise<string | stream.Readable> {
const objectStore = ObjectStore()
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(filepath),
}
const response: any = await objectStore.getObject(params).promise()
// currently these are all strings
const response = await objectStore.getObject(params)
if (!response.Body) {
throw new Error("Unable to retrieve object")
}
if (STRING_CONTENT_TYPES.includes(response.ContentType)) {
return response.Body.toString("utf8")
return response.Body.transformToString()
} else {
return response.Body
// this typecast is required - for some reason the AWS SDK V3 defines its own "ReadableStream"
// found in the @aws-sdk/types package which is meant to be the Node type, but due to the SDK
// supporting both the browser and Nodejs it is a polyfill which causes a type clash with Node.
const readableStream =
response.Body.transformToWebStream() as ReadableStream
return stream.Readable.fromWeb(readableStream)
}
}
export async function listAllObjects(bucketName: string, path: string) {
const objectStore = ObjectStore(bucketName)
export async function listAllObjects(
bucketName: string,
path: string
): Promise<S3Object[]> {
const objectStore = ObjectStore()
const list = (params: ListParams = {}) => {
return objectStore
.listObjectsV2({
...params,
Bucket: sanitizeBucket(bucketName),
Prefix: sanitizeKey(path),
})
.promise()
return objectStore.listObjectsV2({
...params,
Bucket: sanitizeBucket(bucketName),
Prefix: sanitizeKey(path),
})
}
let isTruncated = false,
token,
objects: AWS.S3.Types.Object[] = []
objects: Object[] = []
do {
let params: ListParams = {}
if (token) {
@ -331,18 +356,19 @@ export async function listAllObjects(bucketName: string, path: string) {
/**
* Generate a presigned url with a default TTL of 1 hour
*/
export function getPresignedUrl(
export async function getPresignedUrl(
bucketName: string,
key: string,
durationSeconds = 3600
) {
const objectStore = ObjectStore(bucketName, { presigning: true })
const objectStore = ObjectStore({ presigning: true })
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(key),
Expires: durationSeconds,
}
const url = objectStore.getSignedUrl("getObject", params)
const url = await getSignedUrl(objectStore, new GetObjectCommand(params), {
expiresIn: durationSeconds,
})
if (!env.MINIO_ENABLED) {
// return the full URL to the client
@ -366,7 +392,11 @@ export async function retrieveToTmp(bucketName: string, filepath: string) {
filepath = sanitizeKey(filepath)
const data = await retrieve(bucketName, filepath)
const outputPath = join(budibaseTempDir(), v4())
fs.writeFileSync(outputPath, data)
if (data instanceof stream.Readable) {
data.pipe(fs.createWriteStream(outputPath))
} else {
fs.writeFileSync(outputPath, data)
}
return outputPath
}
@ -408,17 +438,17 @@ export async function retrieveDirectory(bucketName: string, path: string) {
* Delete a single file.
*/
export async function deleteFile(bucketName: string, filepath: string) {
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
await createBucketIfNotExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
Key: sanitizeKey(filepath),
}
return objectStore.deleteObject(params).promise()
return objectStore.deleteObject(params)
}
export async function deleteFiles(bucketName: string, filepaths: string[]) {
const objectStore = ObjectStore(bucketName)
const objectStore = ObjectStore()
await createBucketIfNotExists(objectStore, bucketName)
const params = {
Bucket: bucketName,
@ -426,7 +456,7 @@ export async function deleteFiles(bucketName: string, filepaths: string[]) {
Objects: filepaths.map((path: any) => ({ Key: sanitizeKey(path) })),
},
}
return objectStore.deleteObjects(params).promise()
return objectStore.deleteObjects(params)
}
/**
@ -438,13 +468,13 @@ export async function deleteFolder(
): Promise<any> {
bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder)
const client = ObjectStore(bucketName)
const client = ObjectStore()
const listParams = {
Bucket: bucketName,
Prefix: folder,
}
const existingObjectsResponse = await client.listObjects(listParams).promise()
const existingObjectsResponse = await client.listObjects(listParams)
if (existingObjectsResponse.Contents?.length === 0) {
return
}
@ -459,7 +489,7 @@ export async function deleteFolder(
deleteParams.Delete.Objects.push({ Key: content.Key })
})
const deleteResponse = await client.deleteObjects(deleteParams).promise()
const deleteResponse = await client.deleteObjects(deleteParams)
// can only empty 1000 items at once
if (deleteResponse.Deleted?.length === 1000) {
return deleteFolder(bucketName, folder)
@ -534,29 +564,33 @@ export async function getReadStream(
): Promise<Readable> {
bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path)
const client = ObjectStore(bucketName)
const client = ObjectStore()
const params = {
Bucket: bucketName,
Key: path,
}
return client.getObject(params).createReadStream()
const response = await client.getObject(params)
if (!response.Body || !(response.Body instanceof stream.Readable)) {
throw new Error("Unable to retrieve stream - invalid response")
}
return response.Body
}
export async function getObjectMetadata(
bucket: string,
path: string
): Promise<HeadObjectOutput> {
): Promise<HeadObjectCommandOutput> {
bucket = sanitizeBucket(bucket)
path = sanitizeKey(path)
const client = ObjectStore(bucket)
const client = ObjectStore()
const params = {
Bucket: bucket,
Key: path,
}
try {
return await client.headObject(params).promise()
return await client.headObject(params)
} catch (err: any) {
throw new Error("Unable to retrieve metadata from object")
}

View File

@ -2,7 +2,10 @@ import path, { join } from "path"
import { tmpdir } from "os"
import fs from "fs"
import env from "../environment"
import { PutBucketLifecycleConfigurationRequest } from "aws-sdk/clients/s3"
import {
LifecycleRule,
PutBucketLifecycleConfigurationCommandInput,
} from "@aws-sdk/client-s3"
import * as objectStore from "./objectStore"
import {
AutomationAttachment,
@ -43,8 +46,8 @@ export function budibaseTempDir() {
export const bucketTTLConfig = (
bucketName: string,
days: number
): PutBucketLifecycleConfigurationRequest => {
const lifecycleRule = {
): PutBucketLifecycleConfigurationCommandInput => {
const lifecycleRule: LifecycleRule = {
ID: `${bucketName}-ExpireAfter${days}days`,
Prefix: "",
Status: "Enabled",

View File

@ -199,6 +199,12 @@ class InMemoryQueue implements Partial<Queue> {
return this as unknown as Queue
}
off(event: string, callback: (...args: any[]) => void): Queue {
// @ts-expect-error - this callback can be one of many types
this._emitter.off(event, callback)
return this as unknown as Queue
}
async count() {
return this._messages.length
}

View File

@ -264,7 +264,9 @@ export class UserDB {
const creatorsChange =
(await isCreator(dbUser)) !== (await isCreator(user)) ? 1 : 0
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
await validateUniqueUser(email, tenantId)
if (!opts.isAccountHolder) {
await validateUniqueUser(email, tenantId)
}
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
// don't allow a user to update its own roles/perms
@ -569,6 +571,7 @@ export class UserDB {
hashPassword: opts?.hashPassword,
requirePassword: opts?.requirePassword,
skipPasswordValidation: opts?.skipPasswordValidation,
isAccountHolder: true,
})
}

View File

@ -1,25 +1,25 @@
<script>
<script lang="ts">
import "@spectrum-css/actionbutton/dist/index-vars.css"
import Tooltip from "../Tooltip/Tooltip.svelte"
import { fade } from "svelte/transition"
import { hexToRGBA } from "../helpers"
export let quiet = false
export let selected = false
export let disabled = false
export let icon = ""
export let size = "M"
export let active = false
export let fullWidth = false
export let noPadding = false
export let tooltip = ""
export let accentColor = null
export let quiet: boolean = false
export let selected: boolean = false
export let disabled: boolean = false
export let icon: string = ""
export let size: "S" | "M" | "L" = "M"
export let active: boolean = false
export let fullWidth: boolean = false
export let noPadding: boolean = false
export let tooltip: string = ""
export let accentColor: string | null = null
let showTooltip = false
$: accentStyle = getAccentStyle(accentColor)
const getAccentStyle = color => {
const getAccentStyle = (color: string | null) => {
if (!color) {
return ""
}

View File

@ -93,7 +93,10 @@ const handleMouseDown = (e: MouseEvent) => {
// Handle iframe clicks by detecting a loss of focus on the main window
const handleBlur = () => {
if (document.activeElement?.tagName === "IFRAME") {
if (
document.activeElement &&
["IFRAME", "BODY"].includes(document.activeElement.tagName)
) {
handleClick(
new MouseEvent("click", { relatedTarget: document.activeElement })
)

View File

@ -1,7 +1,7 @@
<script>
<script lang="ts">
import "@spectrum-css/divider/dist/index-vars.css"
export let size = "M"
export let size: "S" | "M" | "L" = "M"
export let vertical = false
export let noMargin = false

View File

@ -1,18 +1,18 @@
<script>
<script lang="ts">
import "@spectrum-css/link/dist/index-vars.css"
import { createEventDispatcher } from "svelte"
import Tooltip from "../Tooltip/Tooltip.svelte"
export let href = "#"
export let size = "M"
export let quiet = false
export let primary = false
export let secondary = false
export let overBackground = false
export let target = undefined
export let download = undefined
export let disabled = false
export let tooltip = null
export let href: string | null = "#"
export let size: "S" | "M" | "L" = "M"
export let quiet: boolean = false
export let primary: boolean = false
export let secondary: boolean = false
export let overBackground: boolean = false
export let target: string | undefined = undefined
export let download: boolean | undefined = undefined
export let disabled: boolean = false
export let tooltip: string | null = null
const dispatch = createEventDispatcher()

View File

@ -1,15 +1,15 @@
<script>
<script lang="ts">
import Icon from "../Icon/Icon.svelte"
import StatusLight from "../StatusLight/StatusLight.svelte"
export let icon = null
export let iconColor = null
export let title = null
export let subtitle = null
export let url = null
export let hoverable = false
export let showArrow = false
export let selected = false
export let icon: string | undefined = undefined
export let iconColor: string | undefined = undefined
export let title: string | undefined = undefined
export let subtitle: string | undefined = undefined
export let url: string | undefined = undefined
export let hoverable: boolean = false
export let showArrow: boolean = false
export let selected: boolean = false
</script>
<a

View File

@ -1,20 +1,29 @@
<script>
import { ActionButton, List, ListItem, Button } from "@budibase/bbui"
import DetailPopover from "@/components/common/DetailPopover.svelte"
import { screenStore, appStore } from "@/stores/builder"
<script lang="ts">
import { Button } from "@budibase/bbui"
import ScreensPopover from "@/components/common/ScreensPopover.svelte"
import { screenStore } from "@/stores/builder"
import { getContext, createEventDispatcher } from "svelte"
const { datasource } = getContext("grid")
import type { Screen, ScreenUsage } from "@budibase/types"
const dispatch = createEventDispatcher()
let popover
const { datasource }: { datasource: any } = getContext("grid")
let popover: any
$: ds = $datasource
$: resourceId = ds?.type === "table" ? ds.tableId : ds?.id
$: connectedScreens = findConnectedScreens($screenStore.screens, resourceId)
$: screenCount = connectedScreens.length
$: screenUsage = connectedScreens.map(
(screen: Screen): ScreenUsage => ({
url: screen.routing?.route,
_id: screen._id!,
})
)
const findConnectedScreens = (screens, resourceId) => {
const findConnectedScreens = (
screens: Screen[],
resourceId: string
): Screen[] => {
return screens.filter(screen => {
return JSON.stringify(screen).includes(`"${resourceId}"`)
})
@ -26,34 +35,16 @@
}
</script>
<DetailPopover title="Screens" bind:this={popover}>
<svelte:fragment slot="anchor" let:open>
<ActionButton
icon="WebPage"
selected={open || screenCount}
quiet
accentColor="#364800"
>
Screens{screenCount ? `: ${screenCount}` : ""}
</ActionButton>
</svelte:fragment>
{#if !connectedScreens.length}
There aren't any screens connected to this data.
{:else}
The following screens are connected to this data.
<List>
{#each connectedScreens as screen}
<ListItem
title={screen.routing.route}
url={`/builder/app/${$appStore.appId}/design/${screen._id}`}
showArrow
/>
{/each}
</List>
{/if}
<div>
<ScreensPopover
bind:this={popover}
screens={screenUsage}
icon="WebPage"
accentColor="#364800"
showCount
>
<svelte:fragment slot="footer">
<Button secondary icon="WebPage" on:click={generateScreen}>
Generate app screen
</Button>
</div>
</DetailPopover>
</svelte:fragment>
</ScreensPopover>

View File

@ -151,6 +151,8 @@
const screenCount = affectedScreens.length
let message = `Removing ${source?.name} `
let initialLength = message.length
const hasChanged = () => message.length !== initialLength
if (sourceType === SourceType.TABLE) {
const views = "views" in source ? Object.values(source?.views ?? []) : []
message += `will delete its data${
@ -169,10 +171,10 @@
initialLength !== message.length
? ", and break connected screens:"
: "will break connected screens:"
} else {
} else if (hasChanged()) {
message += "."
}
return message.length !== initialLength ? message : ""
return hasChanged() ? message : ""
}
</script>

View File

@ -1,3 +1,26 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 8 8" width="8" height="8">
<circle cx="4" cy="4" r="4" stroke-width="0" fill="currentColor" />
<script lang="ts">
export let color = "currentColor"
export let size: "S" | "M" = "M"
const sizes = {
S: 6,
M: 8,
}
$: sizePx = sizes[size]
</script>
<svg
xmlns="http://www.w3.org/2000/svg"
viewBox={`0 0 ${sizePx} ${sizePx}`}
width={`${sizePx}`}
height={`${sizePx}`}
>
<circle
cx={sizePx / 2}
cy={sizePx / 2}
r={sizePx / 2}
stroke-width="0"
fill={color}
/>
</svg>

Before

Width:  |  Height:  |  Size: 157 B

After

Width:  |  Height:  |  Size: 417 B

View File

@ -0,0 +1,58 @@
<script lang="ts">
import {
List,
ListItem,
ActionButton,
PopoverAlignment,
} from "@budibase/bbui"
import DetailPopover from "@/components/common/DetailPopover.svelte"
import { appStore } from "@/stores/builder"
import type { ScreenUsage } from "@budibase/types"
export let screens: ScreenUsage[] = []
export let icon = "DeviceDesktop"
export let accentColor: string | null | undefined = null
export let showCount = false
export let align = PopoverAlignment.Left
let popover: any
export function show() {
popover?.show()
}
export function hide() {
popover?.hide()
}
</script>
<DetailPopover title="Screens" bind:this={popover} {align}>
<svelte:fragment slot="anchor" let:open>
<ActionButton
{icon}
quiet
selected={open || !!(showCount && screens.length)}
{accentColor}
on:click={show}
>
Screens{showCount && screens.length ? `: ${screens.length}` : ""}
</ActionButton>
</svelte:fragment>
{#if !screens.length}
There aren't any screens connected to this data.
{:else}
The following screens are connected to this data.
<List>
{#each screens as screen}
<ListItem
title={screen.url}
url={`/builder/app/${$appStore.appId}/design/${screen._id}`}
showArrow
/>
{/each}
</List>
{/if}
<slot name="footer" />
</DetailPopover>

View File

@ -26,6 +26,7 @@
export let contextAccess = null
let highlightType
let domElement
$: highlightedProp = $builderStore.highlightedSetting
$: allBindings = getAllBindings(
@ -36,10 +37,9 @@
)
$: safeValue = getSafeValue(value, defaultValue, allBindings)
$: replaceBindings = val => readableToRuntimeBinding(allBindings, val)
$: if (value) {
highlightType =
highlightedProp?.key === key ? `highlighted-${highlightedProp?.type}` : ""
}
$: isHighlighted = highlightedProp?.key === key
$: highlightType = isHighlighted ? `highlighted-${highlightedProp?.type}` : ""
$: highlightedProp && isHighlighted && scrollToElement(domElement)
const getAllBindings = (
bindings,
@ -108,9 +108,17 @@
? defaultValue
: enriched
}
function scrollToElement(element) {
element?.scrollIntoView({
behavior: "smooth",
block: "center",
})
}
</script>
<div
bind:this={domElement}
id={`${key}-prop-control-wrap`}
class={`property-control ${highlightType}`}
class:wide={!label || labelHidden || wide === true}

View File

@ -0,0 +1,33 @@
<script lang="ts">
import { onMount } from "svelte"
import { screenStore } from "@/stores/builder"
import ScreensPopover from "@/components/common/ScreensPopover.svelte"
import type { ScreenUsage } from "@budibase/types"
export let sourceId: string
let screens: ScreenUsage[] = []
let popover: any
export function show() {
popover?.show()
}
export function hide() {
popover?.hide()
}
onMount(async () => {
let response = await screenStore.usageInScreens(sourceId)
screens = response?.screens
})
</script>
<ScreensPopover
bind:this={popover}
{screens}
icon="WebPage"
accentColor="#364800"
showCount
/>

View File

@ -23,6 +23,7 @@
import ExtraQueryConfig from "./ExtraQueryConfig.svelte"
import QueryViewerSavePromptModal from "./QueryViewerSavePromptModal.svelte"
import { Utils } from "@budibase/frontend-core"
import ConnectedQueryScreens from "./ConnectedQueryScreens.svelte"
export let query
let queryHash
@ -170,6 +171,7 @@
</Body>
</div>
<div class="controls">
<ConnectedQueryScreens sourceId={query._id} />
<Button disabled={loading} on:click={runQuery} overBackground>
<Icon size="S" name="Play" />
Run query</Button
@ -384,6 +386,8 @@
}
.controls {
display: flex;
align-items: center;
flex-shrink: 0;
}

View File

@ -49,6 +49,7 @@
runtimeToReadableMap,
toBindingsArray,
} from "@/dataBinding"
import ConnectedQueryScreens from "./ConnectedQueryScreens.svelte"
export let queryId
@ -502,9 +503,12 @@
on:change={() => (query.flags.urlName = false)}
on:save={saveQuery}
/>
<div class="access">
<Label>Access</Label>
<AccessLevelSelect {query} {saveId} />
<div class="controls">
<ConnectedQueryScreens sourceId={query._id} />
<div class="access">
<Label>Access</Label>
<AccessLevelSelect {query} {saveId} />
</div>
</div>
</div>
<div class="url-block">
@ -825,6 +829,12 @@
justify-content: space-between;
}
.controls {
display: flex;
align-items: center;
gap: var(--spacing-m);
}
.access {
display: flex;
gap: var(--spacing-m);

View File

@ -3,6 +3,8 @@
import AppPreview from "./AppPreview.svelte"
import { screenStore, appStore } from "@/stores/builder"
import UndoRedoControl from "@/components/common/UndoRedoControl.svelte"
import ScreenErrorsButton from "./ScreenErrorsButton.svelte"
import { Divider } from "@budibase/bbui"
</script>
<div class="app-panel">
@ -15,6 +17,8 @@
{#if $appStore.clientFeatures.devicePreview}
<DevicePreviewSelect />
{/if}
<Divider vertical />
<ScreenErrorsButton />
</div>
</div>
<div class="content">
@ -62,7 +66,7 @@
flex-direction: row;
justify-content: flex-start;
align-items: center;
gap: var(--spacing-xl);
gap: var(--spacing-l);
}
.content {
flex: 1 1 auto;

View File

@ -183,16 +183,6 @@
toggleAddComponent()
} else if (type === "highlight-setting") {
builderStore.highlightSetting(data.setting, "error")
// Also scroll setting into view
const selector = `#${data.setting}-prop-control`
const element = document.querySelector(selector)?.parentElement
if (element) {
element.scrollIntoView({
behavior: "smooth",
block: "center",
})
}
} else if (type === "eject-block") {
const { id, definition } = data
await componentStore.handleEjectBlock(id, definition)

View File

@ -0,0 +1,124 @@
<script lang="ts">
import type { UIComponentError } from "@budibase/types"
import {
builderStore,
componentStore,
screenComponentErrorList,
screenComponentsList,
} from "@/stores/builder"
import {
AbsTooltip,
ActionButton,
Icon,
Link,
Popover,
PopoverAlignment,
TooltipPosition,
} from "@budibase/bbui"
import CircleIndicator from "@/components/common/Icons/CircleIndicator.svelte"
let button: any
let popover: any
$: hasErrors = !!$screenComponentErrorList.length
function getErrorTitle(error: UIComponentError) {
const titleParts = [
$screenComponentsList.find(c => c._id === error.componentId)!
._instanceName,
]
if (error.errorType === "setting" && error.cause === "invalid") {
titleParts.push(error.label)
}
return titleParts.join(" - ")
}
async function onErrorClick(error: UIComponentError) {
componentStore.select(error.componentId)
if (error.errorType === "setting") {
builderStore.highlightSetting(error.key, "error")
}
}
</script>
<div bind:this={button} class="error-button">
<AbsTooltip
text={!hasErrors ? "No errors found!" : ""}
position={TooltipPosition.Top}
>
<ActionButton
quiet
disabled={!hasErrors}
on:click={() => popover.show()}
size="M"
icon="Alert"
/>
{#if hasErrors}
<div class="error-indicator">
<CircleIndicator
size="S"
color="var(--spectrum-global-color-static-red-600)"
/>
</div>
{/if}
</AbsTooltip>
</div>
<Popover
bind:this={popover}
anchor={button}
align={PopoverAlignment.Right}
maxWidth={400}
showPopover={hasErrors}
>
<div class="error-popover">
{#each $screenComponentErrorList as error}
<div class="error">
<Icon
name="Alert"
color="var(--spectrum-global-color-static-red-600)"
size="S"
/>
<div>
<Link overBackground on:click={() => onErrorClick(error)}>
{getErrorTitle(error)}
</Link>:
<!-- eslint-disable-next-line svelte/no-at-html-tags-->
{@html error.message}
</div>
</div>
{/each}
</div>
</Popover>
<style>
.error-button {
position: relative;
}
.error-indicator {
position: absolute;
top: 0;
right: 8px;
}
.error-popover {
display: flex;
flex-direction: column;
}
.error-popover .error {
display: inline-flex;
flex-direction: row;
padding: var(--spacing-m);
gap: var(--spacing-s);
align-items: start;
}
.error-popover .error:not(:last-child) {
border-bottom: 1px solid var(--spectrum-global-color-gray-300);
}
.error-popover .error :global(mark) {
background: unset;
color: unset;
}
.error-popover .error :global(.spectrum-Link) {
display: inline-block;
}
</style>

View File

@ -21,7 +21,7 @@ import {
tables,
componentTreeNodesStore,
builderStore,
screenComponents,
screenComponentsList,
} from "@/stores/builder"
import { buildFormSchema, getSchemaForDatasource } from "@/dataBinding"
import {
@ -450,7 +450,7 @@ export class ComponentStore extends BudiStore<ComponentState> {
}
const componentName = getSequentialName(
get(screenComponents),
get(screenComponentsList),
`New ${definition.friendlyName || definition.name}`,
{
getName: c => c._instanceName,

View File

@ -17,9 +17,9 @@ import { deploymentStore } from "./deployments.js"
import { contextMenuStore } from "./contextMenu.js"
import { snippets } from "./snippets"
import {
screenComponents,
screenComponentsList,
screenComponentErrors,
findComponentsBySettingsType,
screenComponentErrorList,
} from "./screenComponent"
// Backend
@ -72,9 +72,9 @@ export {
snippets,
rowActions,
appPublished,
screenComponents,
screenComponentsList,
screenComponentErrors,
findComponentsBySettingsType,
screenComponentErrorList,
}
export const reset = () => {

View File

@ -4,11 +4,10 @@ import { selectedScreen } from "./screens"
import { viewsV2 } from "./viewsV2"
import {
UIDatasourceType,
Screen,
Component,
UIComponentError,
ScreenProps,
ComponentDefinition,
DependsOnComponentSetting,
} from "@budibase/types"
import { queries } from "./queries"
import { views } from "./views"
@ -21,14 +20,11 @@ import { getSettingsDefinition } from "@budibase/frontend-core"
function reduceBy<TItem extends {}, TKey extends keyof TItem>(
key: TKey,
list: TItem[]
): Record<string, any> {
return list.reduce(
(result, item) => ({
...result,
[item[key] as string]: item,
}),
{}
)
): Record<string, TItem> {
return list.reduce<Record<string, TItem>>((result, item) => {
result[item[key] as string] = item
return result
}, {})
}
const friendlyNameByType: Partial<Record<UIDatasourceType, string>> = {
@ -46,7 +42,18 @@ const validationKeyByType: Record<UIDatasourceType, string | null> = {
jsonarray: "value",
}
export const screenComponentErrors = derived(
export const screenComponentsList = derived(
[selectedScreen],
([$selectedScreen]): Component[] => {
if (!$selectedScreen) {
return []
}
return findAllComponents($selectedScreen.props)
}
)
export const screenComponentErrorList = derived(
[selectedScreen, tables, views, viewsV2, queries, componentStore],
([
$selectedScreen,
@ -55,9 +62,9 @@ export const screenComponentErrors = derived(
$viewsV2,
$queries,
$componentStore,
]): Record<string, UIComponentError[]> => {
]): UIComponentError[] => {
if (!$selectedScreen) {
return {}
return []
}
const datasources = {
@ -69,116 +76,152 @@ export const screenComponentErrors = derived(
const { components: definitions } = $componentStore
const errors = {
...getInvalidDatasources($selectedScreen, datasources, definitions),
...getMissingAncestors($selectedScreen, definitions),
...getMissingRequiredSettings($selectedScreen, definitions),
const errors: UIComponentError[] = []
function checkComponentErrors(component: Component, ancestors: string[]) {
errors.push(...getInvalidDatasources(component, datasources, definitions))
errors.push(...getMissingRequiredSettings(component, definitions))
errors.push(...getMissingAncestors(component, definitions, ancestors))
for (const child of component._children || []) {
checkComponentErrors(child, [...ancestors, component._component])
}
}
checkComponentErrors($selectedScreen?.props, [])
return errors
}
)
function getInvalidDatasources(
screen: Screen,
component: Component,
datasources: Record<string, any>,
definitions: Record<string, ComponentDefinition>
) {
const result: Record<string, UIComponentError[]> = {}
for (const { component, setting } of findComponentsBySettingsType(
screen,
["table", "dataSource"],
definitions
)) {
const componentSettings = component[setting.key]
if (!componentSettings) {
continue
}
const result: UIComponentError[] = []
const { label } = componentSettings
const type = componentSettings.type as UIDatasourceType
const datasourceTypes = ["table", "dataSource"]
const validationKey = validationKeyByType[type]
if (!validationKey) {
continue
}
const possibleSettings = definitions[component._component]?.settings?.filter(
s => datasourceTypes.includes(s.type)
)
if (possibleSettings) {
for (const setting of possibleSettings) {
const componentSettings = component[setting.key]
if (!componentSettings) {
continue
}
const componentBindings = getBindableProperties(screen, component._id)
const { label } = componentSettings
const type = componentSettings.type as UIDatasourceType
const componentDatasources = {
...reduceBy("rowId", bindings.extractRelationships(componentBindings)),
...reduceBy("value", bindings.extractFields(componentBindings)),
...reduceBy("value", bindings.extractJSONArrayFields(componentBindings)),
}
const validationKey = validationKeyByType[type]
if (!validationKey) {
continue
}
const resourceId = componentSettings[validationKey]
if (!{ ...datasources, ...componentDatasources }[resourceId]) {
const friendlyTypeName = friendlyNameByType[type] ?? type
result[component._id!] = [
{
const componentBindings = getBindableProperties(screen, component._id)
const componentDatasources = {
...reduceBy("rowId", bindings.extractRelationships(componentBindings)),
...reduceBy("value", bindings.extractFields(componentBindings)),
...reduceBy(
"value",
bindings.extractJSONArrayFields(componentBindings)
),
}
const resourceId = componentSettings[validationKey]
if (!{ ...datasources, ...componentDatasources }[resourceId]) {
const friendlyTypeName = friendlyNameByType[type] ?? type
result.push({
componentId: component._id!,
key: setting.key,
label: setting.label || setting.key,
message: `The ${friendlyTypeName} named "${label}" could not be found`,
errorType: "setting",
},
]
cause: "invalid",
})
}
}
}
return result
}
function parseDependsOn(dependsOn: DependsOnComponentSetting | undefined): {
key?: string
value?: string
} {
if (dependsOn === undefined) {
return {}
}
if (typeof dependsOn === "string") {
return { key: dependsOn }
}
return { key: dependsOn.setting, value: dependsOn.value }
}
function getMissingRequiredSettings(
screen: Screen,
component: Component,
definitions: Record<string, ComponentDefinition>
) {
const allComponents = findAllComponents(screen.props) as Component[]
const result: UIComponentError[] = []
const result: Record<string, UIComponentError[]> = {}
for (const component of allComponents) {
const definition = definitions[component._component]
const definition = definitions[component._component]
const settings = getSettingsDefinition(definition)
const settings = getSettingsDefinition(definition)
const missingRequiredSettings = settings.filter((setting: any) => {
let empty =
component[setting.key] == null || component[setting.key] === ""
let missing = setting.required && empty
const missingRequiredSettings = settings.filter(setting => {
let empty = component[setting.key] == null || component[setting.key] === ""
let missing = setting.required && empty
// Check if this setting depends on another, as it may not be required
if (setting.dependsOn) {
const dependsOnKey = setting.dependsOn.setting || setting.dependsOn
const dependsOnValue = setting.dependsOn.value
const realDependentValue = component[dependsOnKey]
// Check if this setting depends on another, as it may not be required
if (setting.dependsOn) {
const { key: dependsOnKey, value: dependsOnValue } = parseDependsOn(
setting.dependsOn
)
const realDependentValue =
component[dependsOnKey as keyof typeof component]
const sectionDependsOnKey =
setting.sectionDependsOn?.setting || setting.sectionDependsOn
const sectionDependsOnValue = setting.sectionDependsOn?.value
const sectionRealDependentValue = component[sectionDependsOnKey]
const { key: sectionDependsOnKey, value: sectionDependsOnValue } =
parseDependsOn(setting.sectionDependsOn)
const sectionRealDependentValue =
component[sectionDependsOnKey as keyof typeof component]
if (dependsOnValue == null && realDependentValue == null) {
return false
}
if (dependsOnValue != null && dependsOnValue !== realDependentValue) {
return false
}
if (
sectionDependsOnValue != null &&
sectionDependsOnValue !== sectionRealDependentValue
) {
return false
}
if (dependsOnValue == null && realDependentValue == null) {
return false
}
if (dependsOnValue != null && dependsOnValue !== realDependentValue) {
return false
}
return missing
})
if (
sectionDependsOnValue != null &&
sectionDependsOnValue !== sectionRealDependentValue
) {
return false
}
}
if (missingRequiredSettings?.length) {
result[component._id!] = missingRequiredSettings.map((s: any) => ({
return missing
})
if (missingRequiredSettings?.length) {
result.push(
...missingRequiredSettings.map<UIComponentError>(s => ({
componentId: component._id!,
key: s.key,
label: s.label || s.key,
message: `Add the <mark>${s.label}</mark> setting to start using your component`,
errorType: "setting",
cause: "missing",
}))
}
)
}
return result
@ -186,34 +229,31 @@ function getMissingRequiredSettings(
const BudibasePrefix = "@budibase/standard-components/"
function getMissingAncestors(
screen: Screen,
definitions: Record<string, ComponentDefinition>
) {
const result: Record<string, UIComponentError[]> = {}
component: Component,
definitions: Record<string, ComponentDefinition>,
ancestors: string[]
): UIComponentError[] {
const definition = definitions[component._component]
function checkMissingAncestors(component: Component, ancestors: string[]) {
for (const child of component._children || []) {
checkMissingAncestors(child, [...ancestors, component._component])
if (!definition?.requiredAncestors?.length) {
return []
}
const result: UIComponentError[] = []
const missingAncestors = definition.requiredAncestors.filter(
ancestor => !ancestors.includes(`${BudibasePrefix}${ancestor}`)
)
if (missingAncestors.length) {
const pluralise = (name: string) => {
return name.endsWith("s") ? `${name}'` : `${name}s`
}
const definition = definitions[component._component]
if (!definition?.requiredAncestors?.length) {
return
}
const missingAncestors = definition.requiredAncestors.filter(
ancestor => !ancestors.includes(`${BudibasePrefix}${ancestor}`)
)
if (missingAncestors.length) {
const pluralise = (name: string) => {
return name.endsWith("s") ? `${name}'` : `${name}s`
}
result[component._id!] = missingAncestors.map(ancestor => {
result.push(
...missingAncestors.map<UIComponentError>(ancestor => {
const ancestorDefinition = definitions[`${BudibasePrefix}${ancestor}`]
return {
componentId: component._id!,
message: `${pluralise(definition.name)} need to be inside a
<mark>${ancestorDefinition.name}</mark>`,
errorType: "ancestor-setting",
@ -223,59 +263,19 @@ function getMissingAncestors(
},
}
})
}
}
checkMissingAncestors(screen.props, [])
return result
}
export function findComponentsBySettingsType(
screen: Screen,
type: string | string[],
definitions: Record<string, ComponentDefinition>
) {
const typesArray = Array.isArray(type) ? type : [type]
const result: {
component: Component
setting: {
type: string
key: string
}
}[] = []
function recurseFieldComponentsInChildren(component: ScreenProps) {
if (!component) {
return
}
const definition = definitions[component._component]
const setting = definition?.settings?.find((s: any) =>
typesArray.includes(s.type)
)
if (setting) {
result.push({
component,
setting: { type: setting.type, key: setting.key },
})
}
component._children?.forEach(child => {
recurseFieldComponentsInChildren(child)
})
}
recurseFieldComponentsInChildren(screen?.props)
return result
}
export const screenComponents = derived(
[selectedScreen],
([$selectedScreen]) => {
if (!$selectedScreen) {
return []
}
return findAllComponents($selectedScreen.props) as Component[]
export const screenComponentErrors = derived(
[screenComponentErrorList],
([$list]): Record<string, UIComponentError[]> => {
return $list.reduce<Record<string, UIComponentError[]>>((obj, error) => {
obj[error.componentId] ??= []
obj[error.componentId].push(error)
return obj
}, {})
}
)

View File

@ -3,6 +3,7 @@ import fs from "fs"
import { join } from "path"
import { TEMP_DIR, MINIO_DIR } from "./utils"
import { progressBar } from "../utils"
import * as stream from "node:stream"
const {
ObjectStoreBuckets,
@ -20,15 +21,21 @@ export async function exportObjects() {
let fullList: any[] = []
let errorCount = 0
for (let bucket of bucketList) {
const client = ObjectStore(bucket)
const client = ObjectStore()
try {
await client.headBucket().promise()
await client.headBucket({
Bucket: bucket,
})
} catch (err) {
errorCount++
continue
}
const list = (await client.listObjectsV2().promise()) as { Contents: any[] }
fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket })))
const list = await client.listObjectsV2({
Bucket: bucket,
})
fullList = fullList.concat(
list.Contents?.map(el => ({ ...el, bucket })) || []
)
}
if (errorCount === bucketList.length) {
throw new Error("Unable to access MinIO/S3 - check environment config.")
@ -43,7 +50,13 @@ export async function exportObjects() {
const dirs = possiblePath.slice(0, possiblePath.length - 1)
fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true })
}
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
if (data instanceof stream.Readable) {
data.pipe(
fs.createWriteStream(join(path, object.bucket, ...possiblePath))
)
} else {
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
}
bar.update(++count)
}
bar.stop()
@ -60,7 +73,7 @@ export async function importObjects() {
const bar = progressBar(total)
let count = 0
for (let bucket of buckets) {
const client = ObjectStore(bucket)
const client = ObjectStore()
await createBucketIfNotExists(client, bucket)
const files = await uploadDirectory(bucket, join(path, bucket), "/")
count += files.length

@ -1 +1 @@
Subproject commit 8cbaa80a9cc1152c6cd53722e64da7d824da6e16
Subproject commit eb96d8b2f2029033b0f758078ed30c888e8fb249

View File

@ -50,6 +50,10 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@aws-sdk/client-dynamodb": "3.709.0",
"@aws-sdk/client-s3": "3.709.0",
"@aws-sdk/lib-dynamodb": "3.709.0",
"@aws-sdk/s3-request-presigner": "3.709.0",
"@azure/msal-node": "^2.5.1",
"@budibase/backend-core": "*",
"@budibase/client": "*",
@ -70,7 +74,6 @@
"airtable": "0.12.2",
"arangojs": "7.2.0",
"archiver": "7.0.1",
"aws-sdk": "2.1692.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
"bson": "^6.9.0",

View File

@ -6,8 +6,8 @@ import {
} from "../../db/views/staticViews"
import {
backupClientLibrary,
createApp,
deleteApp,
uploadAppFiles,
deleteAppFiles,
revertClientLibrary,
updateClientLibrary,
} from "../../utilities/fileSystem"
@ -228,7 +228,7 @@ export async function fetchAppPackage(
const license = await licensing.cache.getCachedLicense()
// Enrich plugin URLs
application.usedPlugins = objectStore.enrichPluginURLs(
application.usedPlugins = await objectStore.enrichPluginURLs(
application.usedPlugins
)
@ -375,9 +375,8 @@ async function performAppCreate(
const response = await db.put(newApplication, { force: true })
newApplication._rev = response.rev
/* istanbul ignore next */
if (!env.isTest()) {
await createApp(appId)
if (!env.USE_LOCAL_COMPONENT_LIBS) {
await uploadAppFiles(appId)
}
const latestMigrationId = appMigrations.getLatestEnabledMigrationId()
@ -656,7 +655,7 @@ async function destroyApp(ctx: UserCtx) {
await events.app.deleted(app)
if (!env.isTest()) {
await deleteApp(appId)
await deleteAppFiles(appId)
}
await removeAppFromUserRoles(ctx, appId)

View File

@ -18,7 +18,8 @@ import {
objectStore,
utils,
} from "@budibase/backend-core"
import AWS from "aws-sdk"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import { PutObjectCommand, S3 } from "@aws-sdk/client-s3"
import fs from "fs"
import sdk from "../../../sdk"
import * as pro from "@budibase/pro"
@ -128,9 +129,9 @@ export const uploadFile = async function (
return {
size: file.size,
name: file.name,
url: objectStore.getAppFileUrl(s3Key),
url: await objectStore.getAppFileUrl(s3Key),
extension,
key: response.Key,
key: response.Key!,
}
})
)
@ -210,11 +211,11 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
usedPlugins: plugins,
favicon:
branding.faviconUrl !== ""
? objectStore.getGlobalFileUrl("settings", "faviconUrl")
? await objectStore.getGlobalFileUrl("settings", "faviconUrl")
: "",
logo:
config?.logoUrl !== ""
? objectStore.getGlobalFileUrl("settings", "logoUrl")
? await objectStore.getGlobalFileUrl("settings", "logoUrl")
: "",
appMigrating: needMigrations,
nonce: ctx.state.nonce,
@ -243,7 +244,7 @@ export const serveApp = async function (ctx: UserCtx<void, ServeAppResponse>) {
metaDescription: branding?.metaDescription || "",
favicon:
branding.faviconUrl !== ""
? objectStore.getGlobalFileUrl("settings", "faviconUrl")
? await objectStore.getGlobalFileUrl("settings", "faviconUrl")
: "",
})
@ -334,16 +335,17 @@ export const getSignedUploadURL = async function (
ctx.throw(400, "bucket and key values are required")
}
try {
const s3 = new AWS.S3({
const s3 = new S3({
region: awsRegion,
endpoint: datasource?.config?.endpoint || undefined,
accessKeyId: datasource?.config?.accessKeyId as string,
secretAccessKey: datasource?.config?.secretAccessKey as string,
apiVersion: "2006-03-01",
signatureVersion: "v4",
credentials: {
accessKeyId: datasource?.config?.accessKeyId as string,
secretAccessKey: datasource?.config?.secretAccessKey as string,
},
})
const params = { Bucket: bucket, Key: key }
signedUrl = s3.getSignedUrl("putObject", params)
signedUrl = await getSignedUrl(s3, new PutObjectCommand(params))
if (datasource?.config?.endpoint) {
publicUrl = `${datasource.config.endpoint}/${bucket}/${key}`
} else {

View File

@ -1,32 +0,0 @@
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const setup = require("./utilities")
describe("/component", () => {
let request = setup.getRequest()
let config = setup.getConfig()
afterAll(setup.afterAll)
beforeAll(async () => {
await config.init()
})
describe("fetch definitions", () => {
it("should be able to fetch definitions", async () => {
const res = await request
.get(`/api/${config.getAppId()}/components/definitions`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body["@budibase/standard-components/container"]).toBeDefined()
})
it("should apply authorization to endpoint", async () => {
await checkBuilderEndpoint({
config,
method: "GET",
url: `/api/${config.getAppId()}/components/definitions`,
})
})
})
})

View File

@ -0,0 +1,62 @@
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as env from "../../../environment"
import * as setup from "./utilities"
describe("/component", () => {
let request = setup.getRequest()
let config = setup.getConfig()
afterAll(setup.afterAll)
beforeAll(async () => {
await config.init()
})
describe("fetch definitions", () => {
it("should be able to fetch definitions locally", async () => {
await env.withEnv(
{
USE_LOCAL_COMPONENT_LIBS: "1",
},
async () => {
const res = await request
.get(`/api/${config.getAppId()}/components/definitions`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(
res.body["@budibase/standard-components/container"]
).toBeDefined()
}
)
})
it("should be able to fetch definitions from object store", async () => {
await env.withEnv(
{
USE_LOCAL_COMPONENT_LIBS: "0",
},
async () => {
// init again to make an app with a real component lib
await config.init()
const res = await request
.get(`/api/${config.getAppId()}/components/definitions`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(
res.body["@budibase/standard-components/container"]
).toBeDefined()
}
)
})
it("should apply authorization to endpoint", async () => {
await checkBuilderEndpoint({
config,
method: "GET",
url: `/api/${config.getAppId()}/components/definitions`,
})
})
})
})

View File

@ -182,7 +182,7 @@ if (descriptions.length) {
},
})
await config.api.application.publish(config.getAppId())
await config.api.application.publish()
const prodQuery = await config.api.query.getProd(query._id!)
expect(prodQuery._id).toEqual(query._id)

View File

@ -8,6 +8,7 @@ import {
SourceType,
UsageInScreensResponse,
} from "@budibase/types"
import { basicDatasourcePlus } from "../../../tests/utilities/structures"
const {
basicScreen,
@ -17,7 +18,6 @@ const {
basicTable,
viewV2,
basicQuery,
basicDatasource,
} = setup.structures
describe("/screens", () => {
@ -225,7 +225,7 @@ describe("/screens", () => {
it("should find datasource/query usage", async () => {
const datasource = await config.api.datasource.create(
basicDatasource().datasource
basicDatasourcePlus().datasource
)
const query = await config.api.query.save(basicQuery(datasource._id!))
const screen = await config.api.screen.save(

View File

@ -1,12 +1,10 @@
// Directly mock the AWS SDK
jest.mock("aws-sdk", () => ({
S3: jest.fn(() => ({
getSignedUrl: jest.fn(
(operation, params) => `http://example.com/${params.Bucket}/${params.Key}`
),
upload: jest.fn(() => ({ Contents: {} })),
})),
jest.mock("@aws-sdk/s3-request-presigner", () => ({
getSignedUrl: jest.fn(() => {
return `http://example.com`
}),
}))
jest.mock("@aws-sdk/client-s3")
import { Datasource, SourceName } from "@budibase/types"
import { setEnv } from "../../../environment"
@ -77,7 +75,10 @@ describe("/static", () => {
type: "datasource",
name: "Test",
source: SourceName.S3,
config: {},
config: {
accessKeyId: "bb",
secretAccessKey: "bb",
},
},
})
})
@ -91,7 +92,7 @@ describe("/static", () => {
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.signedUrl).toEqual("http://example.com/foo/bar")
expect(res.body.signedUrl).toEqual("http://example.com")
expect(res.body.publicUrl).toEqual(
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
)

View File

@ -146,11 +146,12 @@ describe("test the create row action", () => {
expect(result.steps[1].outputs.row.file_attachment[0]).toHaveProperty("key")
let s3Key = result.steps[1].outputs.row.file_attachment[0].key
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
const client = objectStore.ObjectStore()
const objectData = await client
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
.promise()
const objectData = await client.headObject({
Bucket: objectStore.ObjectStoreBuckets.APPS,
Key: s3Key,
})
expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0)
@ -217,11 +218,12 @@ describe("test the create row action", () => {
)
let s3Key = result.steps[1].outputs.row.single_file_attachment.key
const client = objectStore.ObjectStore(objectStore.ObjectStoreBuckets.APPS)
const client = objectStore.ObjectStore()
const objectData = await client
.headObject({ Bucket: objectStore.ObjectStoreBuckets.APPS, Key: s3Key })
.promise()
const objectData = await client.headObject({
Bucket: objectStore.ObjectStoreBuckets.APPS,
Key: s3Key,
})
expect(objectData).toBeDefined()
expect(objectData.ContentLength).toBeGreaterThan(0)

View File

@ -6,6 +6,7 @@ import {
} from "../../../integrations/tests/utils"
import { Knex } from "knex"
import { generator } from "@budibase/backend-core/tests"
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
const descriptions = datasourceDescribe({
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
@ -41,39 +42,34 @@ if (descriptions.length) {
})
it("should be able to execute a query", async () => {
let res = await setup.runStep(
config,
setup.actions.EXECUTE_QUERY.stepId,
{
query: { queryId: query._id },
}
)
expect(res.response).toEqual([{ a: "string", b: 1 }])
expect(res.success).toEqual(true)
const { steps } = await createAutomationBuilder(config)
.onAppAction()
.executeQuery({ query: { queryId: query._id! } })
.test({ fields: {} })
expect(steps[0].outputs.response).toEqual([{ a: "string", b: 1 }])
expect(steps[0].outputs.success).toEqual(true)
})
it("should handle a null query value", async () => {
let res = await setup.runStep(
config,
setup.actions.EXECUTE_QUERY.stepId,
{
query: null,
}
)
expect(res.response.message).toEqual("Invalid inputs")
expect(res.success).toEqual(false)
const { steps } = await createAutomationBuilder(config)
.onAppAction()
// @ts-expect-error - intentionally passing null
.executeQuery({ query: { queryId: null } })
.test({ fields: {} })
expect(steps[0].outputs.response).toStartWith("Error:")
expect(steps[0].outputs.success).toEqual(false)
})
it("should handle an error executing a query", async () => {
let res = await setup.runStep(
config,
setup.actions.EXECUTE_QUERY.stepId,
{
query: { queryId: "wrong_id" },
}
)
expect(res.response).toBeDefined()
expect(res.success).toEqual(false)
const { steps } = await createAutomationBuilder(config)
.onAppAction()
.executeQuery({ query: { queryId: "wrong_id" } })
.test({ fields: {} })
expect(steps[0].outputs.response).toStartWith("Error:")
expect(steps[0].outputs.success).toEqual(false)
})
}
)

View File

@ -19,7 +19,7 @@ function generateResponse(to: string, from: string) {
}
}
import * as setup from "../utilities"
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
describe("test the outgoing webhook action", () => {
const config = new TestConfiguration()
@ -60,13 +60,14 @@ describe("test the outgoing webhook action", () => {
...invite,
}
let resp = generateResponse(inputs.to, inputs.from)
const res = await setup.runStep(
config,
setup.actions.SEND_EMAIL_SMTP.stepId,
inputs
)
expect(res.response).toEqual(resp)
expect(res.success).toEqual(true)
const { steps } = await createAutomationBuilder(config)
.onAppAction()
.sendSmtpEmail(inputs)
.test({ fields: {} })
expect(steps[0].outputs.response).toEqual(resp)
expect(steps[0].outputs.success).toEqual(true)
expect(workerRequests.sendSmtpEmail).toHaveBeenCalledTimes(1)
expect(workerRequests.sendSmtpEmail).toHaveBeenCalledWith({
to: "user1@example.com",
@ -75,7 +76,11 @@ describe("test the outgoing webhook action", () => {
contents: "testing",
cc: "cc",
bcc: "bcc",
invite,
invite: {
...invite,
startTime: invite.startTime.toISOString(),
endTime: invite.endTime.toISOString(),
},
automation: true,
attachments: [
{ url: "attachment1", filename: "attachment1.txt" },

View File

@ -1,7 +1,6 @@
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
import { getQueue } from "../.."
import { Job } from "bull"
import { captureAutomationResults } from "../utilities"
describe("cron trigger", () => {
const config = new TestConfiguration()
@ -15,28 +14,19 @@ describe("cron trigger", () => {
})
it("should queue a Bull cron job", async () => {
const queue = getQueue()
expect(await queue.getCompletedCount()).toEqual(0)
const jobPromise = new Promise<Job>(resolve => {
queue.on("completed", async job => {
resolve(job)
})
})
await createAutomationBuilder(config)
const { automation } = await createAutomationBuilder(config)
.onCron({ cron: "* * * * *" })
.serverLog({
text: "Hello, world!",
})
.save()
await config.api.application.publish(config.getAppId())
const jobs = await captureAutomationResults(automation, () =>
config.api.application.publish()
)
expect(jobs).toHaveLength(1)
expect(await queue.getCompletedCount()).toEqual(1)
const job = await jobPromise
const repeat = job.opts?.repeat
const repeat = jobs[0].opts?.repeat
if (!repeat || !("cron" in repeat)) {
throw new Error("Expected cron repeat")
}

View File

@ -0,0 +1,60 @@
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
import { captureAutomationResults } from "../utilities"
import { Automation, Table } from "@budibase/types"
import { basicTable } from "../../../tests/utilities/structures"
describe("row deleted trigger", () => {
const config = new TestConfiguration()
let table: Table
let automation: Automation
beforeAll(async () => {
await config.init()
table = await config.api.table.save(basicTable())
automation = await createAutomationBuilder(config)
.onRowDeleted({ tableId: table._id! })
.serverLog({
text: "Row was deleted",
})
.save()
.then(({ automation }) => automation)
await config.api.application.publish()
})
afterAll(() => {
config.end()
})
it("should trigger when a row is deleted", async () => {
const jobs = await captureAutomationResults(automation, async () => {
await config.withProdApp(async () => {
const row = await config.api.row.save(table._id!, { name: "foo" })
await config.api.row.delete(table._id!, { _id: row._id! })
})
})
expect(jobs).toHaveLength(1)
expect(jobs[0].data.event).toEqual(
expect.objectContaining({
tableId: table._id!,
row: expect.objectContaining({ name: "foo" }),
})
)
})
it("should not trigger when a row is deleted in a different table", async () => {
const otherTable = await config.api.table.save(basicTable())
await config.api.application.publish()
const jobs = await captureAutomationResults(automation, async () => {
await config.withProdApp(async () => {
const row = await config.api.row.save(otherTable._id!, { name: "bar" })
await config.api.row.delete(otherTable._id!, { _id: row._id! })
})
})
expect(jobs).toHaveLength(0)
})
})

View File

@ -0,0 +1,54 @@
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
import { Automation, Table } from "@budibase/types"
import { basicTable } from "../../../tests/utilities/structures"
import { captureAutomationResults } from "../utilities"
describe("row saved trigger", () => {
const config = new TestConfiguration()
let table: Table
let automation: Automation
beforeAll(async () => {
await config.init()
table = await config.api.table.save(basicTable())
automation = await createAutomationBuilder(config)
.onRowSaved({ tableId: table._id! })
.serverLog({ text: "Row created!" })
.save()
.then(({ automation }) => automation)
await config.api.application.publish()
})
afterAll(() => {
config.end()
})
it("should queue a Bull job when a row is created", async () => {
const results = await captureAutomationResults(automation, () =>
config.withProdApp(() => config.api.row.save(table._id!, { name: "foo" }))
)
expect(results).toHaveLength(1)
expect(results[0].data.event).toEqual(
expect.objectContaining({
tableId: table._id!,
row: expect.objectContaining({ name: "foo" }),
})
)
})
it("should not fire for rows created in other tables", async () => {
const otherTable = await config.api.table.save(basicTable())
await config.api.application.publish()
const results = await captureAutomationResults(automation, () =>
config.withProdApp(() =>
config.api.row.save(otherTable._id!, { name: "foo" })
)
)
expect(results).toBeEmpty()
})
})

View File

@ -0,0 +1,61 @@
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
import { Automation, Table } from "@budibase/types"
import { basicTable } from "../../../tests/utilities/structures"
import { captureAutomationResults } from "../utilities"
describe("row updated trigger", () => {
const config = new TestConfiguration()
let table: Table
let automation: Automation
beforeAll(async () => {
await config.init()
table = await config.api.table.save(basicTable())
automation = await createAutomationBuilder(config)
.onRowUpdated({ tableId: table._id! })
.serverLog({ text: "Row updated!" })
.save()
.then(({ automation }) => automation)
await config.api.application.publish()
})
afterAll(() => {
config.end()
})
it("should queue a Bull job when a row is updated", async () => {
const results = await captureAutomationResults(automation, async () => {
await config.withProdApp(async () => {
const row = await config.api.row.save(table._id!, { name: "foo" })
await config.api.row.save(table._id!, { _id: row._id!, name: "bar" })
})
})
expect(results).toHaveLength(1)
expect(results[0].data.event).toEqual(
expect.objectContaining({
tableId: table._id!,
row: expect.objectContaining({ name: "bar" }),
})
)
})
it("should not fire for rows updated in other tables", async () => {
const otherTable = await config.api.table.save(basicTable())
await config.api.application.publish()
const results = await captureAutomationResults(automation, async () => {
await config.withProdApp(async () => {
const row = await config.api.row.save(otherTable._id!, { name: "foo" })
await config.api.row.save(otherTable._id!, {
_id: row._id!,
name: "bar",
})
})
})
expect(results).toBeEmpty()
})
})

View File

@ -1,10 +1,11 @@
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
import { context } from "@budibase/backend-core"
import { BUILTIN_ACTION_DEFINITIONS, getAction } from "../../actions"
import emitter from "../../../events/index"
import { BUILTIN_ACTION_DEFINITIONS } from "../../actions"
import env from "../../../environment"
import { AutomationActionStepId, Datasource } from "@budibase/types"
import { Automation, AutomationData, Datasource } from "@budibase/types"
import { Knex } from "knex"
import { getQueue } from "../.."
import { Job } from "bull"
import { helpers } from "@budibase/shared-core"
let config: TestConfiguration
@ -33,34 +34,54 @@ export async function runInProd(fn: any) {
}
}
export async function runStep(
config: TestConfiguration,
stepId: string,
inputs: any,
stepContext?: any
) {
async function run() {
let step = await getAction(stepId as AutomationActionStepId)
expect(step).toBeDefined()
if (!step) {
throw new Error("No step found")
/**
* Capture all automation runs that occur during the execution of a function.
* This function will wait for all messages to be processed before returning.
*/
export async function captureAllAutomationResults(
f: () => Promise<unknown>
): Promise<Job<AutomationData>[]> {
const runs: Job<AutomationData>[] = []
const queue = getQueue()
let messagesReceived = 0
const completedListener = async (job: Job<AutomationData>) => {
runs.push(job)
messagesReceived--
}
const messageListener = async () => {
messagesReceived++
}
queue.on("message", messageListener)
queue.on("completed", completedListener)
try {
await f()
// Queue messages tend to be send asynchronously in API handlers, so there's
// no guarantee that awaiting this function will have queued anything yet.
// We wait here to make sure we're queued _after_ any existing async work.
await helpers.wait(100)
} finally {
// eslint-disable-next-line no-unmodified-loop-condition
while (messagesReceived > 0) {
await helpers.wait(50)
}
return step({
context: stepContext || {},
inputs,
appId: config ? config.getAppId() : "",
// don't really need an API key, mocked out usage quota, not being tested here
apiKey,
emitter,
})
}
if (config.appId) {
return context.doInContext(config?.appId, async () => {
return run()
})
} else {
return run()
queue.off("completed", completedListener)
queue.off("message", messageListener)
}
return runs
}
export async function captureAutomationResults(
automation: Automation | string,
f: () => Promise<unknown>
) {
const results = await captureAllAutomationResults(f)
return results.filter(
r =>
r.data.automation._id ===
(typeof automation === "string" ? automation : automation._id)
)
}
export async function saveTestQuery(

View File

@ -29,6 +29,7 @@ const DEFAULTS = {
PLUGINS_DIR: "/plugins",
FORKED_PROCESS_NAME: "main",
JS_RUNNER_MEMORY_LIMIT: 64,
USE_LOCAL_COMPONENT_LIBS: coreEnv.isDev() || coreEnv.isTest(),
}
const QUERY_THREAD_TIMEOUT =
@ -113,6 +114,8 @@ const environment = {
DEFAULTS.JS_RUNNER_MEMORY_LIMIT,
LOG_JS_ERRORS: process.env.LOG_JS_ERRORS,
DISABLE_USER_SYNC: process.env.DISABLE_USER_SYNC,
USE_LOCAL_COMPONENT_LIBS:
process.env.USE_LOCAL_COMPONENT_LIBS || DEFAULTS.USE_LOCAL_COMPONENT_LIBS,
// old
CLIENT_ID: process.env.CLIENT_ID,
_set(key: string, value: any) {

View File

@ -7,9 +7,15 @@ import {
ConnectionInfo,
} from "@budibase/types"
import AWS from "aws-sdk"
import {
DynamoDBDocument,
PutCommandInput,
GetCommandInput,
UpdateCommandInput,
DeleteCommandInput,
} from "@aws-sdk/lib-dynamodb"
import { DynamoDB } from "@aws-sdk/client-dynamodb"
import { AWS_REGION } from "../constants"
import { DocumentClient } from "aws-sdk/clients/dynamodb"
interface DynamoDBConfig {
region: string
@ -151,7 +157,7 @@ class DynamoDBIntegration implements IntegrationBase {
region: config.region || AWS_REGION,
endpoint: config.endpoint || undefined,
}
this.client = new AWS.DynamoDB.DocumentClient(this.config)
this.client = DynamoDBDocument.from(new DynamoDB(this.config))
}
async testConnection() {
@ -159,8 +165,8 @@ class DynamoDBIntegration implements IntegrationBase {
connected: false,
}
try {
const scanRes = await new AWS.DynamoDB(this.config).listTables().promise()
response.connected = !!scanRes.$response
const scanRes = await new DynamoDB(this.config).listTables()
response.connected = !!scanRes.$metadata
} catch (e: any) {
response.error = e.message as string
}
@ -169,13 +175,13 @@ class DynamoDBIntegration implements IntegrationBase {
async create(query: {
table: string
json: Omit<DocumentClient.PutItemInput, "TableName">
json: Omit<PutCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.put(params).promise()
return this.client.put(params)
}
async read(query: { table: string; json: object; index: null | string }) {
@ -184,7 +190,7 @@ class DynamoDBIntegration implements IntegrationBase {
IndexName: query.index ? query.index : undefined,
...query.json,
}
const response = await this.client.query(params).promise()
const response = await this.client.query(params)
if (response.Items) {
return response.Items
}
@ -197,7 +203,7 @@ class DynamoDBIntegration implements IntegrationBase {
IndexName: query.index ? query.index : undefined,
...query.json,
}
const response = await this.client.scan(params).promise()
const response = await this.client.scan(params)
if (response.Items) {
return response.Items
}
@ -208,40 +214,40 @@ class DynamoDBIntegration implements IntegrationBase {
const params = {
TableName: query.table,
}
return new AWS.DynamoDB(this.config).describeTable(params).promise()
return new DynamoDB(this.config).describeTable(params)
}
async get(query: {
table: string
json: Omit<DocumentClient.GetItemInput, "TableName">
json: Omit<GetCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.get(params).promise()
return this.client.get(params)
}
async update(query: {
table: string
json: Omit<DocumentClient.UpdateItemInput, "TableName">
json: Omit<UpdateCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.update(params).promise()
return this.client.update(params)
}
async delete(query: {
table: string
json: Omit<DocumentClient.DeleteItemInput, "TableName">
json: Omit<DeleteCommandInput, "TableName">
}) {
const params = {
TableName: query.table,
...query.json,
}
return this.client.delete(params).promise()
return this.client.delete(params)
}
}

View File

@ -7,8 +7,9 @@ import {
ConnectionInfo,
} from "@budibase/types"
import AWS from "aws-sdk"
import { S3 } from "@aws-sdk/client-s3"
import csv from "csvtojson"
import stream from "stream"
interface S3Config {
region: string
@ -167,7 +168,7 @@ class S3Integration implements IntegrationBase {
delete this.config.endpoint
}
this.client = new AWS.S3(this.config)
this.client = new S3(this.config)
}
async testConnection() {
@ -175,7 +176,7 @@ class S3Integration implements IntegrationBase {
connected: false,
}
try {
await this.client.listBuckets().promise()
await this.client.listBuckets()
response.connected = true
} catch (e: any) {
response.error = e.message as string
@ -209,7 +210,7 @@ class S3Integration implements IntegrationBase {
LocationConstraint: query.location,
}
}
return await this.client.createBucket(params).promise()
return await this.client.createBucket(params)
}
async read(query: {
@ -220,37 +221,39 @@ class S3Integration implements IntegrationBase {
maxKeys: number
prefix: string
}) {
const response = await this.client
.listObjects({
Bucket: query.bucket,
Delimiter: query.delimiter,
Marker: query.marker,
MaxKeys: query.maxKeys,
Prefix: query.prefix,
})
.promise()
const response = await this.client.listObjects({
Bucket: query.bucket,
Delimiter: query.delimiter,
Marker: query.marker,
MaxKeys: query.maxKeys,
Prefix: query.prefix,
})
return response.Contents
}
async readCsv(query: { bucket: string; key: string }) {
const stream = this.client
.getObject({
Bucket: query.bucket,
Key: query.key,
})
.createReadStream()
const response = await this.client.getObject({
Bucket: query.bucket,
Key: query.key,
})
const fileStream = response.Body?.transformToWebStream()
if (!fileStream || !(fileStream instanceof stream.Readable)) {
throw new Error("Unable to retrieve CSV - invalid stream")
}
let csvError = false
return new Promise((resolve, reject) => {
stream.on("error", (err: Error) => {
fileStream.on("error", (err: Error) => {
reject(err)
})
const response = csv()
.fromStream(stream)
.fromStream(fileStream)
.on("error", () => {
csvError = true
})
stream.on("finish", () => {
fileStream.on("finish", () => {
resolve(response)
})
}).catch(err => {
@ -263,12 +266,10 @@ class S3Integration implements IntegrationBase {
}
async delete(query: { bucket: string; delete: string }) {
return await this.client
.deleteObjects({
Bucket: query.bucket,
Delete: JSON.parse(query.delete),
})
.promise()
return await this.client.deleteObjects({
Bucket: query.bucket,
Delete: JSON.parse(query.delete),
})
}
}

View File

@ -1,76 +0,0 @@
const response = (body: any, extra?: any) => () => ({
promise: () => body,
...extra,
})
class DocumentClient {
put = jest.fn(response({}))
query = jest.fn(
response({
Items: [],
})
)
scan = jest.fn(
response({
Items: [
{
Name: "test",
},
],
})
)
get = jest.fn(response({}))
update = jest.fn(response({}))
delete = jest.fn(response({}))
}
class S3 {
listObjects = jest.fn(
response({
Contents: [],
})
)
createBucket = jest.fn(
response({
Contents: {},
})
)
deleteObjects = jest.fn(
response({
Contents: {},
})
)
getSignedUrl = jest.fn((operation, params) => {
return `http://example.com/${params.Bucket}/${params.Key}`
})
headBucket = jest.fn(
response({
Contents: {},
})
)
upload = jest.fn(
response({
Contents: {},
})
)
getObject = jest.fn(
response(
{
Body: "",
},
{
createReadStream: jest.fn().mockReturnValue("stream"),
}
)
)
}
module.exports = {
DynamoDB: {
DocumentClient,
},
S3,
config: {
update: jest.fn(),
},
}

View File

@ -1,4 +1,20 @@
jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
jest.mock("@aws-sdk/lib-dynamodb", () => ({
DynamoDBDocument: {
from: jest.fn(() => ({
update: jest.fn(),
put: jest.fn(),
query: jest.fn(() => ({
Items: [],
})),
scan: jest.fn(() => ({
Items: [],
})),
delete: jest.fn(),
get: jest.fn(),
})),
},
}))
jest.mock("@aws-sdk/client-dynamodb")
import { default as DynamoDBIntegration } from "../dynamodb"
class TestConfiguration {
@ -57,11 +73,7 @@ describe("DynamoDB Integration", () => {
TableName: tableName,
IndexName: indexName,
})
expect(response).toEqual([
{
Name: "test",
},
])
expect(response).toEqual([])
})
it("calls the get method with the correct params", async () => {

View File

@ -1,5 +1,52 @@
jest.mock("aws-sdk", () => require("./aws-sdk.mock"))
import { default as S3Integration } from "../s3"
jest.mock("@aws-sdk/client-s3", () => {
class S3Mock {
response(body: any, extra?: any) {
return () => ({
promise: () => body,
...extra,
})
}
listObjects = jest.fn(
this.response({
Contents: [],
})
)
createBucket = jest.fn(
this.response({
Contents: {},
})
)
deleteObjects = jest.fn(
this.response({
Contents: {},
})
)
headBucket = jest.fn(
this.response({
Contents: {},
})
)
upload = jest.fn(
this.response({
Contents: {},
})
)
getObject = jest.fn(
this.response(
{
Body: "",
},
{
createReadStream: jest.fn().mockReturnValue("stream"),
}
)
)
}
return { S3: S3Mock }
})
class TestConfiguration {
integration: any

View File

@ -430,7 +430,7 @@ export async function handleFileResponse(
size = details.ContentLength
}
}
presignedUrl = objectStore.getPresignedUrl(bucket, key)
presignedUrl = await objectStore.getPresignedUrl(bucket, key)
return {
data: {
size,

View File

@ -18,7 +18,7 @@ export async function fetch(type?: PluginType): Promise<Plugin[]> {
})
)
let plugins = response.rows.map((row: any) => row.doc) as Plugin[]
plugins = objectStore.enrichPluginURLs(plugins)
plugins = await objectStore.enrichPluginURLs(plugins)
if (type) {
return plugins.filter((plugin: Plugin) => plugin.schema?.type === type)
} else {

View File

@ -34,9 +34,12 @@ export class ApplicationAPI extends TestAPI {
}
publish = async (
appId: string,
appId?: string,
expectations?: Expectations
): Promise<PublishResponse> => {
if (!appId) {
appId = this.config.getAppId()
}
return await this._post<PublishResponse>(
`/api/applications/${appId}/publish`,
{

View File

@ -492,6 +492,15 @@ export function basicDatasource(): { datasource: Datasource } {
}
}
export function basicDatasourcePlus(): { datasource: Datasource } {
return {
datasource: {
...basicDatasource().datasource,
plus: true,
},
}
}
export function basicQuery(datasourceId: string): Query {
return {
datasourceId,

View File

@ -16,7 +16,7 @@ export const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
* @param appId The ID of the app which is being created.
* @return once promise completes app resources should be ready in object store.
*/
export const createApp = async (appId: string) => {
export const uploadAppFiles = async (appId: string) => {
await updateClientLibrary(appId)
}
@ -25,7 +25,7 @@ export const createApp = async (appId: string) => {
* @param appId The ID of the app which is being deleted.
* @return once promise completes the app resources will be removed from object store.
*/
export const deleteApp = async (appId: string) => {
export const deleteAppFiles = async (appId: string) => {
await objectStore.deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)
}
@ -36,11 +36,11 @@ export const getComponentLibraryManifest = async (library: string) => {
const appId = context.getAppId()
const filename = "manifest.json"
if (env.isDev() || env.isTest()) {
if (env.USE_LOCAL_COMPONENT_LIBS) {
const db = context.getAppDB()
const app = await db.get<App>(DocumentType.APP_METADATA)
if (shouldServeLocally(app.version) || env.isTest()) {
if (shouldServeLocally(app.version) || env.USE_LOCAL_COMPONENT_LIBS) {
const paths = [
join(TOP_LEVEL_PATH, "packages/client", filename),
join(process.cwd(), "client", filename),
@ -78,7 +78,7 @@ export const getComponentLibraryManifest = async (library: string) => {
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
}
if (typeof resp !== "string") {
resp = resp.toString("utf8")
resp = resp.toString()
}
return JSON.parse(resp)
}

View File

@ -3,6 +3,7 @@ import { budibaseTempDir } from "../budibaseDir"
import fs from "fs"
import { join } from "path"
import { objectStore } from "@budibase/backend-core"
import stream from "stream"
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
const AUTOMATION_PATH = join(budibaseTempDir(), "automation")
@ -58,7 +59,11 @@ async function getPluginImpl(path: string, plugin: Plugin) {
pluginKey
)
fs.writeFileSync(filename, pluginJs)
if (pluginJs instanceof stream.Readable) {
pluginJs.pipe(fs.createWriteStream(filename))
} else {
fs.writeFileSync(filename, pluginJs)
}
fs.writeFileSync(metadataName, hash)
return require(filename)

View File

@ -359,9 +359,9 @@ export async function coreOutputProcessing(
if (row[property] == null) {
continue
}
const process = (attachment: RowAttachment) => {
const process = async (attachment: RowAttachment) => {
if (!attachment.url && attachment.key) {
attachment.url = objectStore.getAppFileUrl(attachment.key)
attachment.url = await objectStore.getAppFileUrl(attachment.key)
}
return attachment
}
@ -369,11 +369,13 @@ export async function coreOutputProcessing(
row[property] = JSON.parse(row[property])
}
if (Array.isArray(row[property])) {
row[property].forEach((attachment: RowAttachment) => {
process(attachment)
})
await Promise.all(
row[property].map((attachment: RowAttachment) =>
process(attachment)
)
)
} else {
process(row[property])
await process(row[property])
}
}
} else if (

View File

@ -4,4 +4,5 @@ export interface SaveUserOpts {
currentUserId?: string
skipPasswordValidation?: boolean
allowChangingEmail?: boolean
isAccountHolder?: boolean
}

View File

@ -1,10 +1,13 @@
interface BaseUIComponentError {
componentId: string
message: string
}
interface UISettingComponentError extends BaseUIComponentError {
errorType: "setting"
key: string
label: string
cause: "missing" | "invalid"
}
interface UIAncestorComponentError extends BaseUIComponentError {

View File

@ -15,22 +15,29 @@ export interface ComponentDefinition {
illegalChildren: string[]
}
export type DependsOnComponentSetting =
| string
| {
setting: string
value: string
}
export interface ComponentSetting {
key: string
type: string
label?: string
section?: string
name?: string
required?: boolean
defaultValue?: any
selectAllFields?: boolean
resetOn?: string | string[]
settings?: ComponentSetting[]
nested?: boolean
isolated?: boolean
dependsOn?:
| string
| {
setting: string
value: string
}
dependsOn?: DependsOnComponentSetting
sectionDependsOn?: DependsOnComponentSetting
contextAccess?: {
global: boolean
self: boolean
}
}

View File

@ -322,27 +322,27 @@ export async function save(
}
}
function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) {
async function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) {
if (!oidcLogos) {
return
}
oidcLogos.config = Object.keys(oidcLogos.config || {}).reduce(
(acc: any, key: string) => {
if (!key.endsWith("Etag")) {
const etag = oidcLogos.config[`${key}Etag`]
const objectStoreUrl = objectStore.getGlobalFileUrl(
oidcLogos.type,
key,
etag
)
acc[key] = objectStoreUrl
} else {
acc[key] = oidcLogos.config[key]
}
return acc
},
{}
)
const newConfig: Record<string, string> = {}
const keys = Object.keys(oidcLogos.config || {})
for (const key of keys) {
if (!key.endsWith("Etag")) {
const etag = oidcLogos.config[`${key}Etag`]
const objectStoreUrl = await objectStore.getGlobalFileUrl(
oidcLogos.type,
key,
etag
)
newConfig[key] = objectStoreUrl
} else {
newConfig[key] = oidcLogos.config[key]
}
}
oidcLogos.config = newConfig
}
export async function find(ctx: UserCtx<void, FindConfigResponse>) {
@ -370,7 +370,7 @@ export async function find(ctx: UserCtx<void, FindConfigResponse>) {
async function handleConfigType(type: ConfigType, config: Config) {
if (type === ConfigType.OIDC_LOGOS) {
enrichOIDCLogos(config)
await enrichOIDCLogos(config)
} else if (type === ConfigType.AI) {
await handleAIConfig(config)
}
@ -396,7 +396,7 @@ export async function publicOidc(ctx: Ctx<void, GetPublicOIDCConfigResponse>) {
const oidcCustomLogos = await configs.getOIDCLogosDoc()
if (oidcCustomLogos) {
enrichOIDCLogos(oidcCustomLogos)
await enrichOIDCLogos(oidcCustomLogos)
}
if (!oidcConfig) {
@ -427,7 +427,7 @@ export async function publicSettings(
// enrich the logo url - empty url means deleted
if (config.logoUrl && config.logoUrl !== "") {
config.logoUrl = objectStore.getGlobalFileUrl(
config.logoUrl = await objectStore.getGlobalFileUrl(
"settings",
"logoUrl",
config.logoUrlEtag
@ -437,7 +437,7 @@ export async function publicSettings(
// enrich the favicon url - empty url means deleted
const faviconUrl =
branding.faviconUrl && branding.faviconUrl !== ""
? objectStore.getGlobalFileUrl(
? await objectStore.getGlobalFileUrl(
"settings",
"faviconUrl",
branding.faviconUrlEtag
@ -522,7 +522,7 @@ export async function upload(ctx: UserCtx<void, UploadConfigFileResponse>) {
ctx.body = {
message: "File has been uploaded and url stored to config.",
url: objectStore.getGlobalFileUrl(type, name, etag),
url: await objectStore.getGlobalFileUrl(type, name, etag),
}
}

View File

@ -1,4 +1,5 @@
import { TestConfiguration } from "../../../../tests"
import { withEnv } from "../../../../environment"
jest.unmock("node-fetch")
@ -32,7 +33,7 @@ describe("/api/system/environment", () => {
})
it("returns the expected environment for self hosters", async () => {
await config.withEnv({ SELF_HOSTED: true }, async () => {
await withEnv({ SELF_HOSTED: true }, async () => {
const env = await config.api.environment.getEnvironment()
expect(env.body).toEqual({
cloud: false,

View File

@ -1,6 +1,7 @@
import { env as coreEnv } from "@budibase/backend-core"
import { ServiceType } from "@budibase/types"
import { join } from "path"
import cloneDeep from "lodash/cloneDeep"
coreEnv._set("SERVICE_TYPE", ServiceType.WORKER)
@ -92,6 +93,32 @@ if (!environment.APPS_URL) {
: "http://app-service:4002"
}
export function setEnv(newEnvVars: Partial<typeof environment>): () => void {
const oldEnv = cloneDeep(environment)
let key: keyof typeof newEnvVars
for (key in newEnvVars) {
environment._set(key, newEnvVars[key])
}
return () => {
for (const [key, value] of Object.entries(oldEnv)) {
environment._set(key, value)
}
}
}
export function withEnv<T>(envVars: Partial<typeof environment>, f: () => T) {
const cleanup = setEnv(envVars)
const result = f()
if (result instanceof Promise) {
return result.finally(cleanup)
} else {
cleanup()
return result
}
}
// clean up any environment variable edge cases
for (let [key, value] of Object.entries(environment)) {
// handle the edge case of "0" to disable an environment variable

View File

@ -35,7 +35,6 @@ import {
} from "@budibase/types"
import API from "./api"
import jwt, { Secret } from "jsonwebtoken"
import cloneDeep from "lodash/fp/cloneDeep"
class TestConfiguration {
server: any
@ -247,34 +246,6 @@ class TestConfiguration {
return { message: "Admin user only endpoint.", status: 403 }
}
async withEnv(newEnvVars: Partial<typeof env>, f: () => Promise<void>) {
let cleanup = this.setEnv(newEnvVars)
try {
await f()
} finally {
cleanup()
}
}
/*
* Sets the environment variables to the given values and returns a function
* that can be called to reset the environment variables to their original values.
*/
setEnv(newEnvVars: Partial<typeof env>): () => void {
const oldEnv = cloneDeep(env)
let key: keyof typeof newEnvVars
for (key in newEnvVars) {
env._set(key, newEnvVars[key])
}
return () => {
for (const [key, value] of Object.entries(oldEnv)) {
env._set(key, value)
}
}
}
// USERS
async createDefaultUser() {

1076
yarn.lock

File diff suppressed because it is too large Load Diff