File authentication using presigned URLs (#8883)
* file auth wip * Private plugin bucket wip * Add etag to logo request, fix circular dependency * Resolve cyclic dependency * Ensure browser doesn't cache custom component * Global file fixes * File fixes / remove permaUrl * Add ctx local storage * Enable presigned url to work from any host * Test fix * Self hosting w/ S3 + other fixes * Fix for serving dev app in multi tenant * Fix backup restore and import attachment url processing, never store attachment urls * Test fixes * Unit tests WIP * Replace file path with base64 encoded private key * Multi tenancy tests for files * Environment files fixes * Post-merge build and test fixes * Add debug conditional to all jest logging and revert/fix prod minio proxy_pass * Re-add default '/' minio route :( * Rename iconFile -> iconFileName * Add cloudfront vars to k8s templates * Remove public bucket policy * Remove duplicated test
This commit is contained in:
parent
e873fa828e
commit
786039055e
|
@ -67,6 +67,8 @@ spec:
|
|||
- name: AWS_REGION
|
||||
value: {{ .Values.services.objectStore.region }}
|
||||
{{ end }}
|
||||
- name: MINIO_ENABLED
|
||||
value: {{ .Values.services.objectStore.minio }}
|
||||
- name: MINIO_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
|
@ -77,13 +79,19 @@ spec:
|
|||
secretKeyRef:
|
||||
name: {{ template "budibase.fullname" . }}
|
||||
key: objectStoreSecret
|
||||
- name: CLOUDFRONT_CDN
|
||||
value: {{ .Values.services.objectStore.cloudfront.cdn | quote }}
|
||||
- name: CLOUDFRONT_PUBLIC_KEY_ID
|
||||
value: {{ .Values.services.objectStore.cloudfront.publicKeyId | quote }}
|
||||
- name: CLOUDFRONT_PRIVATE_KEY_64
|
||||
value: {{ .Values.services.objectStore.cloudfront.privateKey64 | quote }}
|
||||
- name: MINIO_URL
|
||||
value: {{ .Values.services.objectStore.url }}
|
||||
- name: PLUGIN_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.pluginBucketName | quote }}
|
||||
- name: APPS_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.appsBucketName | quote }}
|
||||
- name: GLOBAL_CLOUD_BUCKET_NAME
|
||||
- name: GLOBAL_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.globalBucketName | quote }}
|
||||
- name: BACKUPS_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
|
||||
|
|
|
@ -68,6 +68,8 @@ spec:
|
|||
- name: AWS_REGION
|
||||
value: {{ .Values.services.objectStore.region }}
|
||||
{{ end }}
|
||||
- name: MINIO_ENABLED
|
||||
value: {{ .Values.services.objectStore.minio }}
|
||||
- name: MINIO_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
|
@ -80,11 +82,17 @@ spec:
|
|||
key: objectStoreSecret
|
||||
- name: MINIO_URL
|
||||
value: {{ .Values.services.objectStore.url }}
|
||||
- name: CLOUDFRONT_CDN
|
||||
value: {{ .Values.services.objectStore.cloudfront.cdn | quote }}
|
||||
- name: CLOUDFRONT_PUBLIC_KEY_ID
|
||||
value: {{ .Values.services.objectStore.cloudfront.publicKeyId | quote }}
|
||||
- name: CLOUDFRONT_PRIVATE_KEY_64
|
||||
value: {{ .Values.services.objectStore.cloudfront.privateKey64 | quote }}
|
||||
- name: PLUGIN_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.pluginBucketName | quote }}
|
||||
- name: APPS_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.appsBucketName | quote }}
|
||||
- name: GLOBAL_CLOUD_BUCKET_NAME
|
||||
- name: GLOBAL_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.globalBucketName | quote }}
|
||||
- name: BACKUPS_BUCKET_NAME
|
||||
value: {{ .Values.services.objectStore.backupsBucketName | quote }}
|
||||
|
|
|
@ -167,6 +167,7 @@ services:
|
|||
resources: {}
|
||||
|
||||
objectStore:
|
||||
# Set to false if using another object store such as S3
|
||||
minio: true
|
||||
browser: true
|
||||
port: 9000
|
||||
|
@ -182,6 +183,13 @@ services:
|
|||
## set, choosing the default provisioner.
|
||||
storageClass: ""
|
||||
resources: {}
|
||||
cloudfront:
|
||||
# Set the url of a distribution to enable cloudfront
|
||||
cdn: ""
|
||||
# ID of public key stored in cloudfront
|
||||
publicKeyId: ""
|
||||
# Base64 encoded private key for the above public key
|
||||
privateKey64: ""
|
||||
|
||||
# Override values in couchDB subchart
|
||||
couchdb:
|
||||
|
|
|
@ -186,6 +186,26 @@ http {
|
|||
proxy_pass http://minio-service:9000;
|
||||
}
|
||||
|
||||
location /files/signed/ {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# IMPORTANT: Signed urls will inspect the host header of the request.
|
||||
# Normally a signed url will need to be generated with a specified client host in mind.
|
||||
# To support dynamic hosts, e.g. some unknown self-hosted installation url,
|
||||
# use a predefined host header. The host 'minio-service' is also used at the time of url signing.
|
||||
proxy_set_header Host minio-service;
|
||||
|
||||
proxy_connect_timeout 300;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
chunked_transfer_encoding off;
|
||||
|
||||
proxy_pass http://minio-service:9000;
|
||||
rewrite ^/files/signed/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
client_header_timeout 60;
|
||||
client_body_timeout 60;
|
||||
keepalive_timeout 60;
|
||||
|
|
|
@ -208,6 +208,26 @@ http {
|
|||
proxy_pass http://$minio:9000;
|
||||
}
|
||||
|
||||
location /files/signed/ {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# IMPORTANT: Signed urls will inspect the host header of the request.
|
||||
# Normally a signed url will need to be generated with a specified client host in mind.
|
||||
# To support dynamic hosts, e.g. some unknown self-hosted installation url,
|
||||
# use a predefined host header. The host 'minio-service' is also used at the time of url signing.
|
||||
proxy_set_header Host minio-service;
|
||||
|
||||
proxy_connect_timeout 300;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
chunked_transfer_encoding off;
|
||||
|
||||
proxy_pass http://$minio:9000;
|
||||
rewrite ^/files/signed/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
client_header_timeout 60;
|
||||
client_body_timeout 60;
|
||||
keepalive_timeout 60;
|
||||
|
|
|
@ -98,14 +98,36 @@ server {
|
|||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $http_host;
|
||||
|
||||
proxy_connect_timeout 300;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
chunked_transfer_encoding off;
|
||||
|
||||
proxy_pass http://127.0.0.1:9000;
|
||||
}
|
||||
|
||||
location /files/signed/ {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# IMPORTANT: Signed urls will inspect the host header of the request.
|
||||
# Normally a signed url will need to be generated with a specified client host in mind.
|
||||
# To support dynamic hosts, e.g. some unknown self-hosted installation url,
|
||||
# use a predefined host header. The host 'minio-service' is also used at the time of url signing.
|
||||
proxy_set_header Host minio-service;
|
||||
|
||||
proxy_connect_timeout 300;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
chunked_transfer_encoding off;
|
||||
|
||||
proxy_pass http://127.0.0.1:9000;
|
||||
rewrite ^/files/signed/(.*)$ /$1 break;
|
||||
}
|
||||
|
||||
client_header_timeout 60;
|
||||
client_body_timeout 60;
|
||||
keepalive_timeout 60;
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
|
||||
"build": "lerna run build",
|
||||
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
|
||||
"build:backend": "lerna run build --ignore @budibase/client --ignore @budibase/bbui --ignore @budibase/builder --ignore @budibase/cli",
|
||||
"build:sdk": "lerna run build:sdk",
|
||||
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
|
||||
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
|
||||
|
|
|
@ -3,7 +3,10 @@ const mockS3 = {
|
|||
deleteObject: jest.fn().mockReturnThis(),
|
||||
deleteObjects: jest.fn().mockReturnThis(),
|
||||
createBucket: jest.fn().mockReturnThis(),
|
||||
listObjects: jest.fn().mockReturnThis(),
|
||||
listObject: jest.fn().mockReturnThis(),
|
||||
getSignedUrl: jest.fn((operation: string, params: any) => {
|
||||
return `http://s3.example.com/${params.Bucket}/${params.Key}`
|
||||
}),
|
||||
promise: jest.fn().mockReturnThis(),
|
||||
catch: jest.fn(),
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
"@budibase/types": "2.1.46-alpha.13",
|
||||
"@shopify/jest-koa-mocks": "5.0.1",
|
||||
"@techpass/passport-openidconnect": "0.3.2",
|
||||
"aws-cloudfront-sign": "2.2.0",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.0.1",
|
||||
"bcryptjs": "2.4.3",
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
// store an app ID to pretend there is a context
|
||||
import env from "../environment"
|
||||
import Context from "./Context"
|
||||
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
|
||||
import * as conversions from "../db/conversions"
|
||||
import { getDB } from "../db/db"
|
||||
import {
|
||||
DocumentType,
|
||||
|
@ -181,6 +181,14 @@ export function getAppId(): string | undefined {
|
|||
}
|
||||
}
|
||||
|
||||
export const getProdAppId = () => {
|
||||
const appId = getAppId()
|
||||
if (!appId) {
|
||||
throw new Error("Could not get appId")
|
||||
}
|
||||
return conversions.getProdAppID(appId)
|
||||
}
|
||||
|
||||
export function updateTenantId(tenantId?: string) {
|
||||
let context: ContextMap = updateContext({
|
||||
tenantId,
|
||||
|
@ -229,7 +237,7 @@ export function getProdAppDB(opts?: any): Database {
|
|||
if (!appId) {
|
||||
throw new Error("Unable to retrieve prod DB - no app ID.")
|
||||
}
|
||||
return getDB(getProdAppID(appId), opts)
|
||||
return getDB(conversions.getProdAppID(appId), opts)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -241,5 +249,5 @@ export function getDevAppDB(opts?: any): Database {
|
|||
if (!appId) {
|
||||
throw new Error("Unable to retrieve dev DB - no app ID.")
|
||||
}
|
||||
return getDB(getDevelopmentAppID(appId), opts)
|
||||
return getDB(conversions.getDevelopmentAppID(appId), opts)
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ import { doWithDB, allDbs, directCouchAllDbs } from "./db"
|
|||
import { getAppMetadata } from "../cache/appMetadata"
|
||||
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
|
||||
import * as events from "../events"
|
||||
import { App, Database, ConfigType } from "@budibase/types"
|
||||
import { App, Database, ConfigType, isSettingsConfig } from "@budibase/types"
|
||||
|
||||
/**
|
||||
* Generates a new app ID.
|
||||
|
@ -489,18 +489,12 @@ export const getScopedFullConfig = async function (
|
|||
|
||||
// custom logic for settings doc
|
||||
if (type === ConfigType.SETTINGS) {
|
||||
if (scopedConfig && scopedConfig.doc) {
|
||||
// overrides affected by environment variables
|
||||
scopedConfig.doc.config.platformUrl = await getPlatformUrl({
|
||||
tenantAware: true,
|
||||
})
|
||||
scopedConfig.doc.config.analyticsEnabled =
|
||||
await events.analytics.enabled()
|
||||
} else {
|
||||
if (!scopedConfig || !scopedConfig.doc) {
|
||||
// defaults
|
||||
scopedConfig = {
|
||||
doc: {
|
||||
_id: generateConfigID({ type, user, workspace }),
|
||||
type: ConfigType.SETTINGS,
|
||||
config: {
|
||||
platformUrl: await getPlatformUrl({ tenantAware: true }),
|
||||
analyticsEnabled: await events.analytics.enabled(),
|
||||
|
@ -508,6 +502,16 @@ export const getScopedFullConfig = async function (
|
|||
},
|
||||
}
|
||||
}
|
||||
|
||||
// will always be true - use assertion function to get type access
|
||||
if (isSettingsConfig(scopedConfig.doc)) {
|
||||
// overrides affected by environment
|
||||
scopedConfig.doc.config.platformUrl = await getPlatformUrl({
|
||||
tenantAware: true,
|
||||
})
|
||||
scopedConfig.doc.config.analyticsEnabled =
|
||||
await events.analytics.enabled()
|
||||
}
|
||||
}
|
||||
|
||||
return scopedConfig && scopedConfig.doc
|
||||
|
|
|
@ -25,7 +25,6 @@ const DefaultBucketName = {
|
|||
APPS: "prod-budi-app-assets",
|
||||
TEMPLATES: "templates",
|
||||
GLOBAL: "global",
|
||||
CLOUD: "prod-budi-tenant-uploads",
|
||||
PLUGINS: "plugins",
|
||||
}
|
||||
|
||||
|
@ -33,6 +32,9 @@ const environment = {
|
|||
isTest,
|
||||
isJest,
|
||||
isDev,
|
||||
isProd: () => {
|
||||
return !isDev()
|
||||
},
|
||||
JS_BCRYPT: process.env.JS_BCRYPT,
|
||||
JWT_SECRET: process.env.JWT_SECRET,
|
||||
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
||||
|
@ -47,6 +49,7 @@ const environment = {
|
|||
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||
AWS_REGION: process.env.AWS_REGION,
|
||||
MINIO_URL: process.env.MINIO_URL,
|
||||
MINIO_ENABLED: process.env.MINIO_ENABLED || 1,
|
||||
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
|
||||
MULTI_TENANCY: process.env.MULTI_TENANCY,
|
||||
ACCOUNT_PORTAL_URL:
|
||||
|
@ -59,6 +62,9 @@ const environment = {
|
|||
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
|
||||
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
||||
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
|
||||
CLOUDFRONT_CDN: process.env.CLOUDFRONT_CDN,
|
||||
CLOUDFRONT_PRIVATE_KEY_64: process.env.CLOUDFRONT_PRIVATE_KEY_64,
|
||||
CLOUDFRONT_PUBLIC_KEY_ID: process.env.CLOUDFRONT_PUBLIC_KEY_ID,
|
||||
BACKUPS_BUCKET_NAME:
|
||||
process.env.BACKUPS_BUCKET_NAME || DefaultBucketName.BACKUPS,
|
||||
APPS_BUCKET_NAME: process.env.APPS_BUCKET_NAME || DefaultBucketName.APPS,
|
||||
|
@ -66,8 +72,6 @@ const environment = {
|
|||
process.env.TEMPLATES_BUCKET_NAME || DefaultBucketName.TEMPLATES,
|
||||
GLOBAL_BUCKET_NAME:
|
||||
process.env.GLOBAL_BUCKET_NAME || DefaultBucketName.GLOBAL,
|
||||
GLOBAL_CLOUD_BUCKET_NAME:
|
||||
process.env.GLOBAL_CLOUD_BUCKET_NAME || DefaultBucketName.CLOUD,
|
||||
PLUGIN_BUCKET_NAME:
|
||||
process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS,
|
||||
USE_COUCH: process.env.USE_COUCH || true,
|
||||
|
@ -91,6 +95,11 @@ for (let [key, value] of Object.entries(environment)) {
|
|||
// @ts-ignore
|
||||
environment[key] = 0
|
||||
}
|
||||
// handle the edge case of "false" to disable an environment variable
|
||||
if (value === "false") {
|
||||
// @ts-ignore
|
||||
environment[key] = 0
|
||||
}
|
||||
}
|
||||
|
||||
export = environment
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
import env from "../../environment"
|
||||
import * as objectStore from "../objectStore"
|
||||
import * as cloudfront from "../cloudfront"
|
||||
|
||||
/**
|
||||
* In production the client library is stored in the object store, however in development
|
||||
* we use the symlinked version produced by lerna, located in node modules. We link to this
|
||||
* via a specific endpoint (under /api/assets/client).
|
||||
* @param {string} appId In production we need the appId to look up the correct bucket, as the
|
||||
* version of the client lib may differ between apps.
|
||||
* @param {string} version The version to retrieve.
|
||||
* @return {string} The URL to be inserted into appPackage response or server rendered
|
||||
* app index file.
|
||||
*/
|
||||
export const clientLibraryUrl = (appId: string, version: string) => {
|
||||
if (env.isProd()) {
|
||||
let file = `${objectStore.sanitizeKey(appId)}/budibase-client.js`
|
||||
if (env.CLOUDFRONT_CDN) {
|
||||
// append app version to bust the cache
|
||||
if (version) {
|
||||
file += `?v=${version}`
|
||||
}
|
||||
// don't need to use presigned for client with cloudfront
|
||||
// file is public
|
||||
return cloudfront.getUrl(file)
|
||||
} else {
|
||||
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
|
||||
}
|
||||
} else {
|
||||
return `/api/assets/client`
|
||||
}
|
||||
}
|
||||
|
||||
export const getAppFileUrl = (s3Key: string) => {
|
||||
if (env.CLOUDFRONT_CDN) {
|
||||
return cloudfront.getPresignedUrl(s3Key)
|
||||
} else {
|
||||
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
import env from "../../environment"
|
||||
import * as tenancy from "../../tenancy"
|
||||
import * as objectStore from "../objectStore"
|
||||
import * as cloudfront from "../cloudfront"
|
||||
|
||||
// URLs
|
||||
|
||||
export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
|
||||
let file = getGlobalFileS3Key(type, name)
|
||||
if (env.CLOUDFRONT_CDN) {
|
||||
if (etag) {
|
||||
file = `${file}?etag=${etag}`
|
||||
}
|
||||
return cloudfront.getPresignedUrl(file)
|
||||
} else {
|
||||
return objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
|
||||
}
|
||||
}
|
||||
|
||||
// KEYS
|
||||
|
||||
export const getGlobalFileS3Key = (type: string, name: string) => {
|
||||
let file = `${type}/${name}`
|
||||
if (env.MULTI_TENANCY) {
|
||||
const tenantId = tenancy.getTenantId()
|
||||
file = `${tenantId}/${file}`
|
||||
}
|
||||
return file
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
export * from "./app"
|
||||
export * from "./global"
|
||||
export * from "./plugins"
|
|
@ -0,0 +1,71 @@
|
|||
import env from "../../environment"
|
||||
import * as objectStore from "../objectStore"
|
||||
import * as tenancy from "../../tenancy"
|
||||
import * as cloudfront from "../cloudfront"
|
||||
import { Plugin } from "@budibase/types"
|
||||
|
||||
// URLS
|
||||
|
||||
export const enrichPluginURLs = (plugins: Plugin[]) => {
|
||||
if (!plugins || !plugins.length) {
|
||||
return []
|
||||
}
|
||||
return plugins.map(plugin => {
|
||||
const jsUrl = getPluginJSUrl(plugin)
|
||||
const iconUrl = getPluginIconUrl(plugin)
|
||||
return { ...plugin, jsUrl, iconUrl }
|
||||
})
|
||||
}
|
||||
|
||||
const getPluginJSUrl = (plugin: Plugin) => {
|
||||
const s3Key = getPluginJSKey(plugin)
|
||||
return getPluginUrl(s3Key)
|
||||
}
|
||||
|
||||
const getPluginIconUrl = (plugin: Plugin): string | undefined => {
|
||||
const s3Key = getPluginIconKey(plugin)
|
||||
if (!s3Key) {
|
||||
return
|
||||
}
|
||||
return getPluginUrl(s3Key)
|
||||
}
|
||||
|
||||
const getPluginUrl = (s3Key: string) => {
|
||||
if (env.CLOUDFRONT_CDN) {
|
||||
return cloudfront.getPresignedUrl(s3Key)
|
||||
} else {
|
||||
return objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
|
||||
}
|
||||
}
|
||||
|
||||
// S3 KEYS
|
||||
|
||||
export const getPluginJSKey = (plugin: Plugin) => {
|
||||
return getPluginS3Key(plugin, "plugin.min.js")
|
||||
}
|
||||
|
||||
export const getPluginIconKey = (plugin: Plugin) => {
|
||||
// stored iconUrl is deprecated - hardcode to icon.svg in this case
|
||||
const iconFileName = plugin.iconUrl ? "icon.svg" : plugin.iconFileName
|
||||
if (!iconFileName) {
|
||||
return
|
||||
}
|
||||
return getPluginS3Key(plugin, iconFileName)
|
||||
}
|
||||
|
||||
const getPluginS3Key = (plugin: Plugin, fileName: string) => {
|
||||
const s3Key = getPluginS3Dir(plugin.name)
|
||||
return `${s3Key}/${fileName}`
|
||||
}
|
||||
|
||||
export const getPluginS3Dir = (pluginName: string) => {
|
||||
let s3Key = `${pluginName}`
|
||||
if (env.MULTI_TENANCY) {
|
||||
const tenantId = tenancy.getTenantId()
|
||||
s3Key = `${tenantId}/${s3Key}`
|
||||
}
|
||||
if (env.CLOUDFRONT_CDN) {
|
||||
s3Key = `plugins/${s3Key}`
|
||||
}
|
||||
return s3Key
|
||||
}
|
|
@ -0,0 +1,171 @@
|
|||
import * as app from "../app"
|
||||
import { getAppFileUrl } from "../app"
|
||||
import { testEnv } from "../../../../tests"
|
||||
|
||||
describe("app", () => {
|
||||
beforeEach(() => {
|
||||
testEnv.nodeJest()
|
||||
})
|
||||
|
||||
describe("clientLibraryUrl", () => {
|
||||
function getClientUrl() {
|
||||
return app.clientLibraryUrl("app_123/budibase-client.js", "2.0.0")
|
||||
}
|
||||
|
||||
describe("single tenant", () => {
|
||||
beforeAll(() => {
|
||||
testEnv.singleTenant()
|
||||
})
|
||||
|
||||
it("gets url in dev", () => {
|
||||
testEnv.nodeDev()
|
||||
const url = getClientUrl()
|
||||
expect(url).toBe("/api/assets/client")
|
||||
})
|
||||
|
||||
it("gets url with embedded minio", () => {
|
||||
testEnv.withMinio()
|
||||
const url = getClientUrl()
|
||||
expect(url).toBe(
|
||||
"/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
|
||||
)
|
||||
})
|
||||
|
||||
it("gets url with custom S3", () => {
|
||||
testEnv.withS3()
|
||||
const url = getClientUrl()
|
||||
expect(url).toBe(
|
||||
"http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
|
||||
)
|
||||
})
|
||||
|
||||
it("gets url with cloudfront + s3", () => {
|
||||
testEnv.withCloudfront()
|
||||
const url = getClientUrl()
|
||||
expect(url).toBe(
|
||||
"http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0"
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("multi tenant", () => {
|
||||
beforeAll(() => {
|
||||
testEnv.multiTenant()
|
||||
})
|
||||
|
||||
it("gets url in dev", async () => {
|
||||
testEnv.nodeDev()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const url = getClientUrl()
|
||||
expect(url).toBe("/api/assets/client")
|
||||
})
|
||||
})
|
||||
|
||||
it("gets url with embedded minio", async () => {
|
||||
await testEnv.withTenant(tenantId => {
|
||||
testEnv.withMinio()
|
||||
const url = getClientUrl()
|
||||
expect(url).toBe(
|
||||
"/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets url with custom S3", async () => {
|
||||
await testEnv.withTenant(tenantId => {
|
||||
testEnv.withS3()
|
||||
const url = getClientUrl()
|
||||
expect(url).toBe(
|
||||
"http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
await testEnv.withTenant(tenantId => {
|
||||
testEnv.withCloudfront()
|
||||
const url = getClientUrl()
|
||||
expect(url).toBe(
|
||||
"http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0"
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("getAppFileUrl", () => {
|
||||
function getAppFileUrl() {
|
||||
return app.getAppFileUrl("app_123/attachments/image.jpeg")
|
||||
}
|
||||
|
||||
describe("single tenant", () => {
|
||||
beforeAll(() => {
|
||||
testEnv.multiTenant()
|
||||
})
|
||||
|
||||
it("gets url with embedded minio", () => {
|
||||
testEnv.withMinio()
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
)
|
||||
})
|
||||
|
||||
it("gets url with custom S3", () => {
|
||||
testEnv.withS3()
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
)
|
||||
})
|
||||
|
||||
it("gets url with cloudfront + s3", () => {
|
||||
testEnv.withCloudfront()
|
||||
const url = getAppFileUrl()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
url.includes("http://cf.example.com/app_123/attachments/image.jpeg?")
|
||||
).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("multi tenant", () => {
|
||||
beforeAll(() => {
|
||||
testEnv.multiTenant()
|
||||
})
|
||||
|
||||
it("gets url with embedded minio", async () => {
|
||||
testEnv.withMinio()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets url with custom S3", async () => {
|
||||
testEnv.withS3()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const url = getAppFileUrl()
|
||||
expect(url).toBe(
|
||||
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
testEnv.withCloudfront()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const url = getAppFileUrl()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
url.includes(
|
||||
"http://cf.example.com/app_123/attachments/image.jpeg?"
|
||||
)
|
||||
).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,74 @@
|
|||
import * as global from "../global"
|
||||
import { testEnv } from "../../../../tests"
|
||||
|
||||
describe("global", () => {
|
||||
describe("getGlobalFileUrl", () => {
|
||||
function getGlobalFileUrl() {
|
||||
return global.getGlobalFileUrl("settings", "logoUrl", "etag")
|
||||
}
|
||||
|
||||
describe("single tenant", () => {
|
||||
beforeAll(() => {
|
||||
testEnv.singleTenant()
|
||||
})
|
||||
|
||||
it("gets url with embedded minio", () => {
|
||||
testEnv.withMinio()
|
||||
const url = getGlobalFileUrl()
|
||||
expect(url).toBe("/files/signed/global/settings/logoUrl")
|
||||
})
|
||||
|
||||
it("gets url with custom S3", () => {
|
||||
testEnv.withS3()
|
||||
const url = getGlobalFileUrl()
|
||||
expect(url).toBe("http://s3.example.com/global/settings/logoUrl")
|
||||
})
|
||||
|
||||
it("gets url with cloudfront + s3", () => {
|
||||
testEnv.withCloudfront()
|
||||
const url = getGlobalFileUrl()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
url.includes("http://cf.example.com/settings/logoUrl?etag=etag&")
|
||||
).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("multi tenant", () => {
|
||||
beforeAll(() => {
|
||||
testEnv.multiTenant()
|
||||
})
|
||||
|
||||
it("gets url with embedded minio", async () => {
|
||||
testEnv.withMinio()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const url = getGlobalFileUrl()
|
||||
expect(url).toBe(`/files/signed/global/${tenantId}/settings/logoUrl`)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets url with custom S3", async () => {
|
||||
testEnv.withS3()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const url = getGlobalFileUrl()
|
||||
expect(url).toBe(
|
||||
`http://s3.example.com/global/${tenantId}/settings/logoUrl`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
testEnv.withCloudfront()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const url = getGlobalFileUrl()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
url.includes(
|
||||
`http://cf.example.com/${tenantId}/settings/logoUrl?etag=etag&`
|
||||
)
|
||||
).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,110 @@
|
|||
import * as plugins from "../plugins"
|
||||
import { structures, testEnv } from "../../../../tests"
|
||||
|
||||
describe("plugins", () => {
|
||||
describe("enrichPluginURLs", () => {
|
||||
const plugin = structures.plugins.plugin()
|
||||
|
||||
function getEnrichedPluginUrls() {
|
||||
const enriched = plugins.enrichPluginURLs([plugin])[0]
|
||||
return {
|
||||
jsUrl: enriched.jsUrl!,
|
||||
iconUrl: enriched.iconUrl!,
|
||||
}
|
||||
}
|
||||
|
||||
describe("single tenant", () => {
|
||||
beforeAll(() => {
|
||||
testEnv.singleTenant()
|
||||
})
|
||||
|
||||
it("gets url with embedded minio", () => {
|
||||
testEnv.withMinio()
|
||||
const urls = getEnrichedPluginUrls()
|
||||
expect(urls.jsUrl).toBe(
|
||||
`/files/signed/plugins/${plugin.name}/plugin.min.js`
|
||||
)
|
||||
expect(urls.iconUrl).toBe(
|
||||
`/files/signed/plugins/${plugin.name}/icon.svg`
|
||||
)
|
||||
})
|
||||
|
||||
it("gets url with custom S3", () => {
|
||||
testEnv.withS3()
|
||||
const urls = getEnrichedPluginUrls()
|
||||
expect(urls.jsUrl).toBe(
|
||||
`http://s3.example.com/plugins/${plugin.name}/plugin.min.js`
|
||||
)
|
||||
expect(urls.iconUrl).toBe(
|
||||
`http://s3.example.com/plugins/${plugin.name}/icon.svg`
|
||||
)
|
||||
})
|
||||
|
||||
it("gets url with cloudfront + s3", () => {
|
||||
testEnv.withCloudfront()
|
||||
const urls = getEnrichedPluginUrls()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
urls.jsUrl.includes(
|
||||
`http://cf.example.com/plugins/${plugin.name}/plugin.min.js?`
|
||||
)
|
||||
).toBe(true)
|
||||
expect(
|
||||
urls.iconUrl.includes(
|
||||
`http://cf.example.com/plugins/${plugin.name}/icon.svg?`
|
||||
)
|
||||
).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("multi tenant", () => {
|
||||
beforeAll(() => {
|
||||
testEnv.multiTenant()
|
||||
})
|
||||
|
||||
it("gets url with embedded minio", async () => {
|
||||
testEnv.withMinio()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const urls = getEnrichedPluginUrls()
|
||||
expect(urls.jsUrl).toBe(
|
||||
`/files/signed/plugins/${tenantId}/${plugin.name}/plugin.min.js`
|
||||
)
|
||||
expect(urls.iconUrl).toBe(
|
||||
`/files/signed/plugins/${tenantId}/${plugin.name}/icon.svg`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets url with custom S3", async () => {
|
||||
testEnv.withS3()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const urls = getEnrichedPluginUrls()
|
||||
expect(urls.jsUrl).toBe(
|
||||
`http://s3.example.com/plugins/${tenantId}/${plugin.name}/plugin.min.js`
|
||||
)
|
||||
expect(urls.iconUrl).toBe(
|
||||
`http://s3.example.com/plugins/${tenantId}/${plugin.name}/icon.svg`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("gets url with cloudfront + s3", async () => {
|
||||
testEnv.withCloudfront()
|
||||
await testEnv.withTenant(tenantId => {
|
||||
const urls = getEnrichedPluginUrls()
|
||||
// omit rest of signed params
|
||||
expect(
|
||||
urls.jsUrl.includes(
|
||||
`http://cf.example.com/plugins/${tenantId}/${plugin.name}/plugin.min.js?`
|
||||
)
|
||||
).toBe(true)
|
||||
expect(
|
||||
urls.iconUrl.includes(
|
||||
`http://cf.example.com/plugins/${tenantId}/${plugin.name}/icon.svg?`
|
||||
)
|
||||
).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,41 @@
|
|||
import env from "../environment"
|
||||
const cfsign = require("aws-cloudfront-sign")
|
||||
|
||||
let PRIVATE_KEY: string | undefined
|
||||
|
||||
function getPrivateKey() {
|
||||
if (!env.CLOUDFRONT_PRIVATE_KEY_64) {
|
||||
throw new Error("CLOUDFRONT_PRIVATE_KEY_64 is not set")
|
||||
}
|
||||
|
||||
if (PRIVATE_KEY) {
|
||||
return PRIVATE_KEY
|
||||
}
|
||||
|
||||
PRIVATE_KEY = Buffer.from(env.CLOUDFRONT_PRIVATE_KEY_64, "base64").toString(
|
||||
"utf-8"
|
||||
)
|
||||
|
||||
return PRIVATE_KEY
|
||||
}
|
||||
|
||||
const getCloudfrontSignParams = () => {
|
||||
return {
|
||||
keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID,
|
||||
privateKeyString: getPrivateKey(),
|
||||
expireTime: new Date().getTime() + 1000 * 60 * 60, // 1 hour
|
||||
}
|
||||
}
|
||||
|
||||
export const getPresignedUrl = (s3Key: string) => {
|
||||
const url = getUrl(s3Key)
|
||||
return cfsign.getSignedUrl(url, getCloudfrontSignParams())
|
||||
}
|
||||
|
||||
export const getUrl = (s3Key: string) => {
|
||||
let prefix = "/"
|
||||
if (s3Key.startsWith("/")) {
|
||||
prefix = ""
|
||||
}
|
||||
return `${env.CLOUDFRONT_CDN}${prefix}${s3Key}`
|
||||
}
|
|
@ -1,2 +1,3 @@
|
|||
export * from "./objectStore"
|
||||
export * from "./utils"
|
||||
export * from "./buckets"
|
||||
|
|
|
@ -8,7 +8,7 @@ import { promisify } from "util"
|
|||
import { join } from "path"
|
||||
import fs from "fs"
|
||||
import env from "../environment"
|
||||
import { budibaseTempDir, ObjectStoreBuckets } from "./utils"
|
||||
import { budibaseTempDir } from "./utils"
|
||||
import { v4 } from "uuid"
|
||||
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
||||
|
||||
|
@ -26,7 +26,7 @@ type UploadParams = {
|
|||
bucket: string
|
||||
filename: string
|
||||
path: string
|
||||
type?: string
|
||||
type?: string | null
|
||||
// can be undefined, we will remove it
|
||||
metadata?: {
|
||||
[key: string]: string | undefined
|
||||
|
@ -41,6 +41,7 @@ const CONTENT_TYPE_MAP: any = {
|
|||
json: "application/json",
|
||||
gz: "application/gzip",
|
||||
}
|
||||
|
||||
const STRING_CONTENT_TYPES = [
|
||||
CONTENT_TYPE_MAP.html,
|
||||
CONTENT_TYPE_MAP.css,
|
||||
|
@ -58,35 +59,17 @@ export function sanitizeBucket(input: string) {
|
|||
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
|
||||
}
|
||||
|
||||
function publicPolicy(bucketName: string) {
|
||||
return {
|
||||
Version: "2012-10-17",
|
||||
Statement: [
|
||||
{
|
||||
Effect: "Allow",
|
||||
Principal: {
|
||||
AWS: ["*"],
|
||||
},
|
||||
Action: "s3:GetObject",
|
||||
Resource: [`arn:aws:s3:::${bucketName}/*`],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
const PUBLIC_BUCKETS = [
|
||||
ObjectStoreBuckets.APPS,
|
||||
ObjectStoreBuckets.GLOBAL,
|
||||
ObjectStoreBuckets.PLUGINS,
|
||||
]
|
||||
|
||||
/**
|
||||
* Gets a connection to the object store using the S3 SDK.
|
||||
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
||||
* @param {object} opts configuration for the object store.
|
||||
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
||||
* @constructor
|
||||
*/
|
||||
export const ObjectStore = (bucket: string) => {
|
||||
export const ObjectStore = (
|
||||
bucket: string,
|
||||
opts: { presigning: boolean } = { presigning: false }
|
||||
) => {
|
||||
const config: any = {
|
||||
s3ForcePathStyle: true,
|
||||
signatureVersion: "v4",
|
||||
|
@ -100,9 +83,20 @@ export const ObjectStore = (bucket: string) => {
|
|||
Bucket: sanitizeBucket(bucket),
|
||||
}
|
||||
}
|
||||
|
||||
// custom S3 is in use i.e. minio
|
||||
if (env.MINIO_URL) {
|
||||
if (opts.presigning && !env.MINIO_ENABLED) {
|
||||
// IMPORTANT: Signed urls will inspect the host header of the request.
|
||||
// Normally a signed url will need to be generated with a specified host in mind.
|
||||
// To support dynamic hosts, e.g. some unknown self-hosted installation url,
|
||||
// use a predefined host. The host 'minio-service' is also forwarded to minio requests via nginx
|
||||
config.endpoint = "minio-service"
|
||||
} else {
|
||||
config.endpoint = env.MINIO_URL
|
||||
}
|
||||
}
|
||||
|
||||
return new AWS.S3(config)
|
||||
}
|
||||
|
||||
|
@ -135,16 +129,6 @@ export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
|||
await promises[bucketName]
|
||||
delete promises[bucketName]
|
||||
}
|
||||
// public buckets are quite hidden in the system, make sure
|
||||
// no bucket is set accidentally
|
||||
if (PUBLIC_BUCKETS.includes(bucketName)) {
|
||||
await client
|
||||
.putBucketPolicy({
|
||||
Bucket: bucketName,
|
||||
Policy: JSON.stringify(publicPolicy(bucketName)),
|
||||
})
|
||||
.promise()
|
||||
}
|
||||
} else {
|
||||
throw new Error("Unable to write to object store bucket.")
|
||||
}
|
||||
|
@ -274,6 +258,36 @@ export const listAllObjects = async (bucketName: string, path: string) => {
|
|||
return objects
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a presigned url with a default TTL of 1 hour
|
||||
*/
|
||||
export const getPresignedUrl = (
|
||||
bucketName: string,
|
||||
key: string,
|
||||
durationSeconds: number = 3600
|
||||
) => {
|
||||
const objectStore = ObjectStore(bucketName, { presigning: true })
|
||||
const params = {
|
||||
Bucket: sanitizeBucket(bucketName),
|
||||
Key: sanitizeKey(key),
|
||||
Expires: durationSeconds,
|
||||
}
|
||||
const url = objectStore.getSignedUrl("getObject", params)
|
||||
|
||||
if (!env.MINIO_ENABLED) {
|
||||
// return the full URL to the client
|
||||
return url
|
||||
} else {
|
||||
// return the path only to the client
|
||||
// use the presigned url route to ensure the static
|
||||
// hostname will be used in the request
|
||||
const signedUrl = new URL(url)
|
||||
const path = signedUrl.pathname
|
||||
const query = signedUrl.search
|
||||
return `/files/signed${path}${query}`
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as retrieval function but puts to a temporary file.
|
||||
*/
|
||||
|
|
|
@ -14,7 +14,6 @@ export const ObjectStoreBuckets = {
|
|||
APPS: env.APPS_BUCKET_NAME,
|
||||
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
|
||||
GLOBAL: env.GLOBAL_BUCKET_NAME,
|
||||
GLOBAL_CLOUD: env.GLOBAL_CLOUD_BUCKET_NAME,
|
||||
PLUGINS: env.PLUGIN_BUCKET_NAME,
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { doWithDB, queryPlatformView, getGlobalDBName } from "../db"
|
||||
import { doWithDB, getGlobalDBName } from "../db"
|
||||
import {
|
||||
DEFAULT_TENANT_ID,
|
||||
getTenantId,
|
||||
|
@ -8,11 +8,10 @@ import {
|
|||
import env from "../environment"
|
||||
import {
|
||||
BBContext,
|
||||
PlatformUser,
|
||||
TenantResolutionStrategy,
|
||||
GetTenantIdOptions,
|
||||
} from "@budibase/types"
|
||||
import { Header, StaticDatabases, ViewName } from "../constants"
|
||||
import { Header, StaticDatabases } from "../constants"
|
||||
|
||||
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
|
||||
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
|
||||
|
@ -111,27 +110,7 @@ export async function lookupTenantId(userId: string) {
|
|||
})
|
||||
}
|
||||
|
||||
// lookup, could be email or userId, either will return a doc
|
||||
export async function getTenantUser(
|
||||
identifier: string
|
||||
): Promise<PlatformUser | undefined> {
|
||||
// use the view here and allow to find anyone regardless of casing
|
||||
// Use lowercase to ensure email login is case-insensitive
|
||||
const users = await queryPlatformView<PlatformUser>(
|
||||
ViewName.PLATFORM_USERS_LOWERCASE,
|
||||
{
|
||||
keys: [identifier.toLowerCase()],
|
||||
include_docs: true,
|
||||
}
|
||||
)
|
||||
if (Array.isArray(users)) {
|
||||
return users[0]
|
||||
} else {
|
||||
return users
|
||||
}
|
||||
}
|
||||
|
||||
export function isUserInAppTenant(appId: string, user?: any) {
|
||||
export const isUserInAppTenant = (appId: string, user?: any) => {
|
||||
let userTenantId
|
||||
if (user) {
|
||||
userTenantId = user.tenantId || DEFAULT_TENANT_ID
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
const { structures } = require("../../tests")
|
||||
const utils = require("../utils")
|
||||
const events = require("../events")
|
||||
const { DEFAULT_TENANT_ID } = require("../constants")
|
||||
const { doInTenant } = require("../context")
|
||||
import { structures } from "../../../tests"
|
||||
import * as utils from "../../utils"
|
||||
import * as events from "../../events"
|
||||
import { DEFAULT_TENANT_ID } from "../../constants"
|
||||
import { doInTenant } from "../../context"
|
||||
|
||||
describe("utils", () => {
|
||||
describe("platformLogout", () => {
|
|
@ -1,6 +1,13 @@
|
|||
import { getAllApps, queryGlobalView } from "../db"
|
||||
import { options } from "../middleware/passport/jwt"
|
||||
import { Header, Cookie, MAX_VALID_DATE } from "../constants"
|
||||
import {
|
||||
Header,
|
||||
Cookie,
|
||||
MAX_VALID_DATE,
|
||||
DocumentType,
|
||||
SEPARATOR,
|
||||
ViewName,
|
||||
} from "../constants"
|
||||
import env from "../environment"
|
||||
import * as userCache from "../cache/user"
|
||||
import { getSessionsForUser, invalidateSessions } from "../security/sessions"
|
||||
|
@ -8,12 +15,11 @@ import * as events from "../events"
|
|||
import * as tenancy from "../tenancy"
|
||||
import {
|
||||
App,
|
||||
BBContext,
|
||||
Ctx,
|
||||
PlatformLogoutOpts,
|
||||
TenantResolutionStrategy,
|
||||
} from "@budibase/types"
|
||||
import { SetOption } from "cookies"
|
||||
import { DocumentType, SEPARATOR, ViewName } from "../constants"
|
||||
const jwt = require("jsonwebtoken")
|
||||
|
||||
const APP_PREFIX = DocumentType.APP + SEPARATOR
|
||||
|
@ -25,7 +31,7 @@ function confirmAppId(possibleAppId: string | undefined) {
|
|||
: undefined
|
||||
}
|
||||
|
||||
async function resolveAppUrl(ctx: BBContext) {
|
||||
async function resolveAppUrl(ctx: Ctx) {
|
||||
const appUrl = ctx.path.split("/")[2]
|
||||
let possibleAppUrl = `/${appUrl.toLowerCase()}`
|
||||
|
||||
|
@ -50,7 +56,7 @@ async function resolveAppUrl(ctx: BBContext) {
|
|||
return app && app.appId ? app.appId : undefined
|
||||
}
|
||||
|
||||
export function isServingApp(ctx: BBContext) {
|
||||
export function isServingApp(ctx: Ctx) {
|
||||
// dev app
|
||||
if (ctx.path.startsWith(`/${APP_PREFIX}`)) {
|
||||
return true
|
||||
|
@ -67,7 +73,7 @@ export function isServingApp(ctx: BBContext) {
|
|||
* @param {object} ctx The main request body to look through.
|
||||
* @returns {string|undefined} If an appId was found it will be returned.
|
||||
*/
|
||||
export async function getAppIdFromCtx(ctx: BBContext) {
|
||||
export async function getAppIdFromCtx(ctx: Ctx) {
|
||||
// look in headers
|
||||
const options = [ctx.headers[Header.APP_ID]]
|
||||
let appId
|
||||
|
@ -83,12 +89,16 @@ export async function getAppIdFromCtx(ctx: BBContext) {
|
|||
appId = confirmAppId(ctx.request.body.appId)
|
||||
}
|
||||
|
||||
// look in the url - dev app
|
||||
let appPath =
|
||||
ctx.request.headers.referrer ||
|
||||
ctx.path.split("/").filter(subPath => subPath.startsWith(APP_PREFIX))
|
||||
if (!appId && appPath.length) {
|
||||
appId = confirmAppId(appPath[0])
|
||||
// look in the path
|
||||
const pathId = parseAppIdFromUrl(ctx.path)
|
||||
if (!appId && pathId) {
|
||||
appId = confirmAppId(pathId)
|
||||
}
|
||||
|
||||
// look in the referer
|
||||
const refererId = parseAppIdFromUrl(ctx.request.headers.referer)
|
||||
if (!appId && refererId) {
|
||||
appId = confirmAppId(refererId)
|
||||
}
|
||||
|
||||
// look in the url - prod app
|
||||
|
@ -99,6 +109,13 @@ export async function getAppIdFromCtx(ctx: BBContext) {
|
|||
return appId
|
||||
}
|
||||
|
||||
function parseAppIdFromUrl(url?: string) {
|
||||
if (!url) {
|
||||
return
|
||||
}
|
||||
return url.split("/").find(subPath => subPath.startsWith(APP_PREFIX))
|
||||
}
|
||||
|
||||
/**
|
||||
* opens the contents of the specified encrypted JWT.
|
||||
* @return {object} the contents of the token.
|
||||
|
@ -115,7 +132,7 @@ export function openJwt(token: string) {
|
|||
* @param {object} ctx The request which is to be manipulated.
|
||||
* @param {string} name The name of the cookie to get.
|
||||
*/
|
||||
export function getCookie(ctx: BBContext, name: string) {
|
||||
export function getCookie(ctx: Ctx, name: string) {
|
||||
const cookie = ctx.cookies.get(name)
|
||||
|
||||
if (!cookie) {
|
||||
|
@ -133,7 +150,7 @@ export function getCookie(ctx: BBContext, name: string) {
|
|||
* @param {object} opts options like whether to sign.
|
||||
*/
|
||||
export function setCookie(
|
||||
ctx: BBContext,
|
||||
ctx: Ctx,
|
||||
value: any,
|
||||
name = "builder",
|
||||
opts = { sign: true }
|
||||
|
@ -159,7 +176,7 @@ export function setCookie(
|
|||
/**
|
||||
* Utility function, simply calls setCookie with an empty string for value
|
||||
*/
|
||||
export function clearCookie(ctx: BBContext, name: string) {
|
||||
export function clearCookie(ctx: Ctx, name: string) {
|
||||
setCookie(ctx, null, name)
|
||||
}
|
||||
|
||||
|
@ -169,7 +186,7 @@ export function clearCookie(ctx: BBContext, name: string) {
|
|||
* @param {object} ctx The koa context object to be tested.
|
||||
* @return {boolean} returns true if the call is from the client lib (a built app rather than the builder).
|
||||
*/
|
||||
export function isClient(ctx: BBContext) {
|
||||
export function isClient(ctx: Ctx) {
|
||||
return ctx.headers[Header.TYPE] === "client"
|
||||
}
|
||||
|
||||
|
|
|
@ -17,7 +17,9 @@ env._set("MINIO_URL", "http://localhost")
|
|||
env._set("MINIO_ACCESS_KEY", "test")
|
||||
env._set("MINIO_SECRET_KEY", "test")
|
||||
|
||||
if (!process.env.DEBUG) {
|
||||
global.console.log = jest.fn() // console.log are ignored in tests
|
||||
}
|
||||
|
||||
if (!process.env.CI) {
|
||||
// set a longer timeout in dev for debugging
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
export * as mocks from "./mocks"
|
||||
export * as structures from "./structures"
|
||||
export { generator } from "./structures"
|
||||
export * as testEnv from "./testEnv"
|
||||
|
||||
import * as dbConfig from "./db"
|
||||
dbConfig.init()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import "./posthog"
|
||||
import "./events"
|
||||
export * as accounts from "./accounts"
|
||||
export * as date from "./date"
|
||||
export * as licenses from "./licenses"
|
||||
export { default as fetch } from "./fetch"
|
||||
import "./posthog"
|
||||
import "./events"
|
||||
|
|
|
@ -6,3 +6,4 @@ export const generator = new Chance()
|
|||
export * as koa from "./koa"
|
||||
export * as accounts from "./accounts"
|
||||
export * as licenses from "./licenses"
|
||||
export * as plugins from "./plugins"
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
import { generator } from "."
|
||||
import { Plugin, PluginSource, PluginType } from "@budibase/types"
|
||||
|
||||
export function plugin(): Plugin {
|
||||
return {
|
||||
description: generator.word(),
|
||||
name: generator.word(),
|
||||
version: "1.0.0",
|
||||
source: PluginSource.FILE,
|
||||
package: {
|
||||
name: generator.word,
|
||||
},
|
||||
hash: generator.hash(),
|
||||
schema: {
|
||||
type: PluginType.DATASOURCE,
|
||||
},
|
||||
iconFileName: "icon.svg",
|
||||
}
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
import env from "../../src/environment"
|
||||
import * as tenancy from "../../src/tenancy"
|
||||
import { newid } from "../../src/utils"
|
||||
|
||||
// TENANCY
|
||||
|
||||
export async function withTenant(task: (tenantId: string) => any) {
|
||||
const tenantId = newid()
|
||||
return tenancy.doInTenant(tenantId, async () => {
|
||||
await task(tenantId)
|
||||
})
|
||||
}
|
||||
|
||||
export function singleTenant() {
|
||||
env._set("MULTI_TENANCY", 0)
|
||||
}
|
||||
|
||||
export function multiTenant() {
|
||||
env._set("MULTI_TENANCY", 1)
|
||||
}
|
||||
|
||||
// NODE
|
||||
|
||||
export function nodeDev() {
|
||||
env._set("NODE_ENV", "dev")
|
||||
}
|
||||
|
||||
export function nodeJest() {
|
||||
env._set("NODE_ENV", "jest")
|
||||
}
|
||||
|
||||
// FILES
|
||||
|
||||
export function withS3() {
|
||||
env._set("NODE_ENV", "production")
|
||||
env._set("MINIO_ENABLED", 0)
|
||||
env._set("MINIO_URL", "http://s3.example.com")
|
||||
env._set("CLOUDFRONT_CDN", undefined)
|
||||
}
|
||||
|
||||
const CLOUDFRONT_TEST_KEY =
|
||||
"-----BEGIN RSA PRIVATE KEY-----\n" +
|
||||
"MIIEpAIBAAKCAQEAqXRsir/0Qba1xEnybUs7d7QEAE02GRc+4H7HD5l5VnAxkV1m\n" +
|
||||
"tNTXTmoYkaIhLdebV1EwQs3T9knxoyd4cVcrDkDfDLZErfYWJsuE3/QYNknnZs4/\n" +
|
||||
"Ai0cg+v9ZX3gcizvpYg9GQI3INM0uRG8lJwGP7FQ/kknhA2yVFVCSxX6kkNtOUh5\n" +
|
||||
"dKSG7m6IwswcSwD++Z/94vsFkoZIGY0e1CD/drFJ6+1TFY2YgbDKT5wDFLJ9vHFx\n" +
|
||||
"/5o4POwn3gz/ru2Db9jbRdfEAqRdy46nRKQgBGUmupAgSK1+BJEzafexp8RmCGb0\n" +
|
||||
"WUffxOtj8/jNCeCF0JBgVHAe3crOQ8ySrtoaHQIDAQABAoIBAA+ipW07/u6dTDI7\n" +
|
||||
"XHoHKgqGeqQIe8he47dVG0ruL0rxeTFfe92NkfwzP+cYHZWcQkIRRLG1Six8cCZM\n" +
|
||||
"uwlCML/U7n++xaGDhlG4D5+WZzGDKi3LM/cgcHQfrzbRIYeHa+lLI9AN60ZFFqVI\n" +
|
||||
"5KyVpOH1m3KLD3FYzi6H22EQOxmJpqWlt2uArny5LxlPJKmmGSFjvneb4N2ZAKGQ\n" +
|
||||
"QfClJGz9tRjceWUUdJrpqmTmBQIosKmLPq8PEviUNAVG+6m4r8jiRbf8OKkAm+3L\n" +
|
||||
"LVIsN8HfYB9jEuERYPnbuXdX0kDEkg0xEyTH5YbNZvfm5ptCU9Xn+Jz1trF+wCHD\n" +
|
||||
"2RlxdQUCgYEA3U0nCf6NTmmeMCsAX6gvaPuM0iUfUfS3b3G57I6u46lLGNLsfJw6\n" +
|
||||
"MTpVc164lKYQK9czw/ijKzb8e3mcyzbPorVkajMjUCNWGrMK+vFbOGmqQkhUi30U\n" +
|
||||
"IJuuTktMd+21D/SpLlev4MLria23vUIKEqNenYpV6wkGLt/mKtISaPMCgYEAxAYx\n" +
|
||||
"j+xJLTK9eN+rpekwjYE78hD9VoBkBnr/NBiGV302AsJRuq2+L4zcBnAsH+SidFim\n" +
|
||||
"cwqoj3jeVT8ZQFXlK3fGVaEJsCXd6GWk8ZIWUTn9JZwi2KcCvCU/YiHfx8c7y7Gl\n" +
|
||||
"SiPXUPsvvkcw6RRh2u4J5tHLIqJe3W58ENoBNK8CgYEApxTBDMKrXTBQxn0w4wfQ\n" +
|
||||
"A6soPuDYLMBeXj226eswD6KZmDxnYA1zwgcQzPIO2ewm+XKZGrR2PQJezbqbrrHL\n" +
|
||||
"QkVBcwz49GA5eh8Dg0MGZCki6rhBXK8qqxPfHi2rpkBKG6nUsbBykXeY7XHC75kU\n" +
|
||||
"kc3WeYsgIzvE908EMAA69hECgYEAinbpiYVZh1DBH+G26MIYZswz4OB5YyHcBevZ\n" +
|
||||
"2x27v48VmMtUWe4iWopAXVfdA0ZILrD0Gm0b9gRl4IdqudQyxgqcEZ5oLoIBBwjN\n" +
|
||||
"g0oy83tnwqpQvwLx3p7c79+HqCGmrlK0s/MvQ+e6qMi21t1r5e6hFed5euSA6B8E\n" +
|
||||
"Cg9ELMcCgYB9bGwlNAE+iuzMIhKev1s7h3TzqKtGw37TtHXvxcTQs3uawJQksQ2s\n" +
|
||||
"K0Zy1Ta7vybbwAA5m+LxoMT04WUdJO7Cr8/3rBMrbKKO3H7IgC3G+nXnOBdshzn5\n" +
|
||||
"ifMbhZslFThC/osD5ZV7snXZgTWyPexaINJhHmdrAWpmW1h+UFoiMw==\n" +
|
||||
"-----END RSA PRIVATE KEY-----\n"
|
||||
|
||||
const CLOUDFRONT_TEST_KEY_64 = Buffer.from(
|
||||
CLOUDFRONT_TEST_KEY,
|
||||
"utf-8"
|
||||
).toString("base64")
|
||||
|
||||
export function withCloudfront() {
|
||||
withS3()
|
||||
env._set("CLOUDFRONT_CDN", "http://cf.example.com")
|
||||
env._set("CLOUDFRONT_PUBLIC_KEY_ID", "keypair_123")
|
||||
env._set("CLOUDFRONT_PRIVATE_KEY_64", CLOUDFRONT_TEST_KEY_64)
|
||||
}
|
||||
|
||||
export function withMinio() {
|
||||
env._set("NODE_ENV", "production")
|
||||
env._set("MINIO_ENABLED", 1)
|
||||
env._set("MINIO_URL", "http://minio.example.com")
|
||||
env._set("CLOUDFRONT_CDN", undefined)
|
||||
}
|
|
@ -8,6 +8,10 @@
|
|||
}
|
||||
},
|
||||
"references": [
|
||||
{ "path": "../types" },
|
||||
{ "path": "../types" }
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"dist"
|
||||
]
|
||||
}
|
|
@ -1526,6 +1526,13 @@ asynckit@^0.4.0:
|
|||
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
||||
integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==
|
||||
|
||||
aws-cloudfront-sign@2.2.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/aws-cloudfront-sign/-/aws-cloudfront-sign-2.2.0.tgz#3910f5a6d0d90fec07f2b4ef8ab07f3eefb5625d"
|
||||
integrity sha512-qG+rwZMP3KRTPPbVmWY8DlrT56AkA4iVOeo23vkdK2EXeW/brJFN2haSNKzVz+oYhFMEIzVVloeAcrEzuRkuVQ==
|
||||
dependencies:
|
||||
lodash "^3.6.0"
|
||||
|
||||
aws-sdk@2.1030.0:
|
||||
version "2.1030.0"
|
||||
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1030.0.tgz#24a856af3d2b8b37c14a8f59974993661c66fd82"
|
||||
|
@ -3827,6 +3834,11 @@ lodash@4.17.21, lodash@^4.17.21:
|
|||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
|
||||
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
|
||||
|
||||
lodash@^3.6.0:
|
||||
version "3.10.1"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6"
|
||||
integrity sha512-9mDDwqVIma6OZX79ZlDACZl8sBm0TEnkf99zV3iMA4GzkIT/9hiqP5mY0HoT1iNLCrKc/R1HByV+yJfRWVJryQ==
|
||||
|
||||
lowercase-keys@^1.0.0, lowercase-keys@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f"
|
||||
|
|
|
@ -1,18 +1,15 @@
|
|||
<script>
|
||||
import { getIcon } from "./icons"
|
||||
import CustomSVG from "components/common/CustomSVG.svelte"
|
||||
import { admin } from "stores/portal"
|
||||
|
||||
export let integrationType
|
||||
export let schema
|
||||
export let size = "18"
|
||||
|
||||
$: objectStoreUrl = $admin.cloud ? "https://cdn.budi.live" : ""
|
||||
$: pluginsUrl = `${objectStoreUrl}/plugins`
|
||||
$: iconInfo = getIcon(integrationType, schema)
|
||||
|
||||
async function getSvgFromUrl(info) {
|
||||
const url = `${pluginsUrl}/${info.url}`
|
||||
const url = `${info.url}`
|
||||
const resp = await fetch(url, {
|
||||
headers: {
|
||||
["pragma"]: "no-cache",
|
||||
|
|
|
@ -92,8 +92,8 @@
|
|||
|
||||
<svelte:head>
|
||||
{#if $builderStore.usedPlugins?.length}
|
||||
{#each $builderStore.usedPlugins as plugin (plugin.hash)}
|
||||
<script src={`${plugin.jsUrl}?r=${plugin.hash || ""}`}></script>
|
||||
{#each $builderStore.usedPlugins as plugin}
|
||||
<script src={`${plugin.jsUrl}`}></script>
|
||||
{/each}
|
||||
{/if}
|
||||
</svelte:head>
|
||||
|
|
|
@ -51,6 +51,25 @@ module AwsMock {
|
|||
Contents: {},
|
||||
})
|
||||
)
|
||||
|
||||
// @ts-ignore
|
||||
this.getSignedUrl = (operation, params) => {
|
||||
return `http://test.com/${params.Bucket}/${params.Key}`
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
this.headBucket = jest.fn(
|
||||
response({
|
||||
Contents: {},
|
||||
})
|
||||
)
|
||||
|
||||
// @ts-ignore
|
||||
this.upload = jest.fn(
|
||||
response({
|
||||
Contents: {},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
aws.DynamoDB = { DocumentClient }
|
||||
|
|
|
@ -130,13 +130,15 @@
|
|||
"@types/ioredis": "4.28.10",
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/koa__router": "8.0.11",
|
||||
"@types/koa__router": "8.0.8",
|
||||
"@types/lodash": "4.14.180",
|
||||
"@types/node": "14.18.20",
|
||||
"@types/node-fetch": "2.6.1",
|
||||
"@types/oracledb": "5.2.2",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
"@types/redis": "4.0.11",
|
||||
"@types/server-destroy": "1.0.1",
|
||||
"@types/tar": "6.1.3",
|
||||
"@typescript-eslint/parser": "5.45.0",
|
||||
"apidoc": "0.50.4",
|
||||
"babel-jest": "27.5.1",
|
||||
|
|
|
@ -23,21 +23,18 @@ import {
|
|||
errors,
|
||||
events,
|
||||
migrations,
|
||||
objectStore,
|
||||
} from "@budibase/backend-core"
|
||||
import { USERS_TABLE_SCHEMA } from "../../constants"
|
||||
import { buildDefaultDocs } from "../../db/defaultData/datasource_bb_default"
|
||||
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
|
||||
import {
|
||||
clientLibraryPath,
|
||||
stringToReadStream,
|
||||
isQsTrue,
|
||||
} from "../../utilities"
|
||||
import { stringToReadStream, isQsTrue } from "../../utilities"
|
||||
import { getLocksById } from "../../utilities/redis"
|
||||
import {
|
||||
updateClientLibrary,
|
||||
backupClientLibrary,
|
||||
revertClientLibrary,
|
||||
} from "../../utilities/fileSystem/clientLibrary"
|
||||
} from "../../utilities/fileSystem"
|
||||
import { cleanupAutomations } from "../../automations/utils"
|
||||
import { checkAppMetadata } from "../../automations/logging"
|
||||
import { getUniqueRows } from "../../utilities/usageQuota/rows"
|
||||
|
@ -49,9 +46,9 @@ import {
|
|||
MigrationType,
|
||||
BBContext,
|
||||
Database,
|
||||
UserCtx,
|
||||
} from "@budibase/types"
|
||||
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
|
||||
import { enrichPluginURLs } from "../../utilities/plugins"
|
||||
import sdk from "../../sdk"
|
||||
|
||||
// utility function, need to do away with this
|
||||
|
@ -204,27 +201,34 @@ export async function fetchAppDefinition(ctx: BBContext) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetchAppPackage(ctx: BBContext) {
|
||||
export async function fetchAppPackage(ctx: UserCtx) {
|
||||
const db = context.getAppDB()
|
||||
let application = await db.get(DocumentType.APP_METADATA)
|
||||
const layouts = await getLayouts()
|
||||
let screens = await getScreens()
|
||||
|
||||
// Enrich plugin URLs
|
||||
application.usedPlugins = enrichPluginURLs(application.usedPlugins)
|
||||
application.usedPlugins = objectStore.enrichPluginURLs(
|
||||
application.usedPlugins
|
||||
)
|
||||
|
||||
// Only filter screens if the user is not a builder
|
||||
if (!(ctx.user?.builder && ctx.user.builder.global)) {
|
||||
if (!(ctx.user.builder && ctx.user.builder.global)) {
|
||||
const userRoleId = getUserRoleId(ctx)
|
||||
const accessController = new roles.AccessController()
|
||||
screens = await accessController.checkScreensAccess(screens, userRoleId)
|
||||
}
|
||||
|
||||
const clientLibPath = objectStore.clientLibraryUrl(
|
||||
ctx.params.appId,
|
||||
application.version
|
||||
)
|
||||
|
||||
ctx.body = {
|
||||
application,
|
||||
screens,
|
||||
layouts,
|
||||
clientLibPath: clientLibraryPath(ctx.params.appId, application.version),
|
||||
clientLibPath,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -370,7 +374,7 @@ async function appPostCreate(ctx: BBContext, app: App) {
|
|||
if (err.code && err.code === errors.codes.USAGE_LIMIT_EXCEEDED) {
|
||||
// this import resulted in row usage exceeding the quota
|
||||
// delete the app
|
||||
// skip pre- and post-steps as no rows have been added to quotas yet
|
||||
// skip pre and post-steps as no rows have been added to quotas yet
|
||||
ctx.params.appId = app.appId
|
||||
await destroyApp(ctx)
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ import { InternalTables } from "../../db/utils"
|
|||
import { getFullUser } from "../../utilities/users"
|
||||
import { roles, context } from "@budibase/backend-core"
|
||||
import { groups } from "@budibase/pro"
|
||||
import { ContextUser, User, Row } from "@budibase/types"
|
||||
import { ContextUser, User, Row, UserCtx } from "@budibase/types"
|
||||
|
||||
const PUBLIC_ROLE = roles.BUILTIN_ROLE_IDS.PUBLIC
|
||||
|
||||
|
@ -16,7 +16,7 @@ const addSessionAttributesToUser = (ctx: any) => {
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetchSelf(ctx: any) {
|
||||
export async function fetchSelf(ctx: UserCtx) {
|
||||
let userId = ctx.user.userId || ctx.user._id
|
||||
/* istanbul ignore next */
|
||||
if (!userId || !ctx.isAuthenticated) {
|
||||
|
|
|
@ -5,8 +5,8 @@ import { stringToReadStream } from "../../utilities"
|
|||
import { getDocParams, DocumentType, isDevAppID } from "../../db/utils"
|
||||
import { create } from "./application"
|
||||
import { join } from "path"
|
||||
import { App, BBContext, Database } from "@budibase/types"
|
||||
import sdk from "../../sdk"
|
||||
import { App, Ctx, Database } from "@budibase/types"
|
||||
|
||||
async function createApp(appName: string, appDirectory: string) {
|
||||
const ctx = {
|
||||
|
@ -35,7 +35,7 @@ async function getAllDocType(db: Database, docType: string) {
|
|||
return response.rows.map(row => row.doc)
|
||||
}
|
||||
|
||||
export async function exportApps(ctx: BBContext) {
|
||||
export async function exportApps(ctx: Ctx) {
|
||||
if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
|
||||
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
|
||||
}
|
||||
|
@ -65,13 +65,13 @@ async function checkHasBeenImported() {
|
|||
return apps.length !== 0
|
||||
}
|
||||
|
||||
export async function hasBeenImported(ctx: BBContext) {
|
||||
export async function hasBeenImported(ctx: Ctx) {
|
||||
ctx.body = {
|
||||
imported: await checkHasBeenImported(),
|
||||
}
|
||||
}
|
||||
|
||||
export async function importApps(ctx: BBContext) {
|
||||
export async function importApps(ctx: Ctx) {
|
||||
if (!env.SELF_HOSTED || env.MULTI_TENANCY) {
|
||||
ctx.throw(400, "Importing only allowed in self hosted environments.")
|
||||
}
|
||||
|
@ -82,12 +82,16 @@ export async function importApps(ctx: BBContext) {
|
|||
"Import file is required and environment must be fresh to import apps."
|
||||
)
|
||||
}
|
||||
if (ctx.request.files.importFile.type !== "application/gzip") {
|
||||
const file = ctx.request.files.importFile
|
||||
if (Array.isArray(file)) {
|
||||
ctx.throw(400, "Single file is required")
|
||||
}
|
||||
if (file.type !== "application/gzip") {
|
||||
ctx.throw(400, "Import file must be a gzipped tarball.")
|
||||
}
|
||||
|
||||
// initially get all the app databases out of the tarball
|
||||
const tmpPath = sdk.backups.untarFile(ctx.request.files.importFile)
|
||||
const tmpPath = sdk.backups.untarFile(file)
|
||||
const globalDbImport = sdk.backups.getGlobalDBFile(tmpPath)
|
||||
const appNames = sdk.backups.getListOfAppsInMulti(tmpPath)
|
||||
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
import { npmUpload, urlUpload, githubUpload, fileUpload } from "./uploaders"
|
||||
import { plugins as pluginCore, tenancy } from "@budibase/backend-core"
|
||||
import { PluginType, FileType, PluginSource } from "@budibase/types"
|
||||
import {
|
||||
plugins as pluginCore,
|
||||
db as dbCore,
|
||||
tenancy,
|
||||
objectStore,
|
||||
} from "@budibase/backend-core"
|
||||
import { PluginType, FileType, PluginSource, Plugin } from "@budibase/types"
|
||||
import env from "../../../environment"
|
||||
import { ClientAppSocket } from "../../../websocket"
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import { plugins } from "@budibase/pro"
|
||||
import { sdk as pro } from "@budibase/pro"
|
||||
|
||||
export async function getPlugins(type?: PluginType) {
|
||||
const db = tenancy.getGlobalDB()
|
||||
|
@ -13,9 +17,10 @@ export async function getPlugins(type?: PluginType) {
|
|||
include_docs: true,
|
||||
})
|
||||
)
|
||||
const plugins = response.rows.map((row: any) => row.doc)
|
||||
let plugins = response.rows.map((row: any) => row.doc) as Plugin[]
|
||||
plugins = objectStore.enrichPluginURLs(plugins)
|
||||
if (type) {
|
||||
return plugins.filter((plugin: any) => plugin.schema?.type === type)
|
||||
return plugins.filter((plugin: Plugin) => plugin.schema?.type === type)
|
||||
} else {
|
||||
return plugins
|
||||
}
|
||||
|
@ -84,7 +89,7 @@ export async function create(ctx: any) {
|
|||
)
|
||||
}
|
||||
|
||||
const doc = await plugins.storePlugin(metadata, directory, source)
|
||||
const doc = await pro.plugins.storePlugin(metadata, directory, source)
|
||||
|
||||
ClientAppSocket.emit("plugins-update", { name, hash: doc.hash })
|
||||
ctx.body = {
|
||||
|
@ -107,7 +112,7 @@ export async function destroy(ctx: any) {
|
|||
const { pluginId } = ctx.params
|
||||
|
||||
try {
|
||||
await plugins.deletePlugin(pluginId)
|
||||
await pro.plugins.deletePlugin(pluginId)
|
||||
|
||||
ctx.body = { message: `Plugin ${ctx.params.pluginId} deleted.` }
|
||||
} catch (err: any) {
|
||||
|
@ -127,7 +132,7 @@ export async function processUploadedPlugin(
|
|||
throw new Error("Only component plugins are supported outside of self-host")
|
||||
}
|
||||
|
||||
const doc = await plugins.storePlugin(metadata, directory, source)
|
||||
const doc = await pro.plugins.storePlugin(metadata, directory, source)
|
||||
ClientAppSocket.emit("plugin-update", { name: doc.name, hash: doc.hash })
|
||||
return doc
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@ import { getDatasourceAndQuery } from "./utils"
|
|||
import { FieldTypes, RelationshipTypes } from "../../../constants"
|
||||
import { breakExternalTableId, isSQL } from "../../../integrations/utils"
|
||||
import { processObjectSync } from "@budibase/string-templates"
|
||||
// @ts-ignore
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { processFormulas, processDates } from "../../../utilities/rowProcessor"
|
||||
import { context } from "@budibase/backend-core"
|
||||
|
|
|
@ -13,7 +13,7 @@ import {
|
|||
cleanupAttachments,
|
||||
} from "../../../utilities/rowProcessor"
|
||||
import { FieldTypes } from "../../../constants"
|
||||
import { validate as rowValidate, findRow } from "./utils"
|
||||
import * as utils from "./utils"
|
||||
import { fullSearch, paginatedSearch } from "./internalSearch"
|
||||
import { getGlobalUsersFromMetadata } from "../../../utilities/global"
|
||||
import * as inMemoryViews from "../../../db/inMemoryView"
|
||||
|
@ -30,7 +30,8 @@ import { finaliseRow, updateRelatedFormula } from "./staticFormula"
|
|||
import * as exporters from "../view/exporters"
|
||||
import { apiFileReturn } from "../../../utilities/fileSystem"
|
||||
import {
|
||||
BBContext,
|
||||
Ctx,
|
||||
UserCtx,
|
||||
Database,
|
||||
LinkDocumentValue,
|
||||
Row,
|
||||
|
@ -69,7 +70,7 @@ async function getView(db: Database, viewName: string) {
|
|||
return viewInfo
|
||||
}
|
||||
|
||||
async function getRawTableData(ctx: BBContext, db: Database, tableId: string) {
|
||||
async function getRawTableData(ctx: Ctx, db: Database, tableId: string) {
|
||||
let rows
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
await userController.fetchMetadata(ctx)
|
||||
|
@ -85,7 +86,7 @@ async function getRawTableData(ctx: BBContext, db: Database, tableId: string) {
|
|||
return rows as Row[]
|
||||
}
|
||||
|
||||
export async function patch(ctx: BBContext) {
|
||||
export async function patch(ctx: UserCtx) {
|
||||
const db = context.getAppDB()
|
||||
const inputs = ctx.request.body
|
||||
const tableId = inputs.tableId
|
||||
|
@ -95,7 +96,7 @@ export async function patch(ctx: BBContext) {
|
|||
let dbTable = await db.get(tableId)
|
||||
oldRow = await outputProcessing(
|
||||
dbTable,
|
||||
await findRow(ctx, tableId, inputs._id)
|
||||
await utils.findRow(ctx, tableId, inputs._id)
|
||||
)
|
||||
} catch (err) {
|
||||
if (isUserTable) {
|
||||
|
@ -117,8 +118,8 @@ export async function patch(ctx: BBContext) {
|
|||
}
|
||||
|
||||
// this returns the table and row incase they have been updated
|
||||
let { table, row } = inputProcessing(ctx.user!, dbTable, combinedRow)
|
||||
const validateResult = await rowValidate({
|
||||
let { table, row } = inputProcessing(ctx.user, dbTable, combinedRow)
|
||||
const validateResult = await utils.validate({
|
||||
row,
|
||||
table,
|
||||
})
|
||||
|
@ -150,7 +151,7 @@ export async function patch(ctx: BBContext) {
|
|||
})
|
||||
}
|
||||
|
||||
export async function save(ctx: BBContext) {
|
||||
export async function save(ctx: UserCtx) {
|
||||
const db = context.getAppDB()
|
||||
let inputs = ctx.request.body
|
||||
inputs.tableId = ctx.params.tableId
|
||||
|
@ -161,8 +162,8 @@ export async function save(ctx: BBContext) {
|
|||
|
||||
// this returns the table and row incase they have been updated
|
||||
const dbTable = await db.get(inputs.tableId)
|
||||
let { table, row } = inputProcessing(ctx.user!, dbTable, inputs)
|
||||
const validateResult = await rowValidate({
|
||||
let { table, row } = inputProcessing(ctx.user, dbTable, inputs)
|
||||
const validateResult = await utils.validate({
|
||||
row,
|
||||
table,
|
||||
})
|
||||
|
@ -185,7 +186,7 @@ export async function save(ctx: BBContext) {
|
|||
})
|
||||
}
|
||||
|
||||
export async function fetchView(ctx: BBContext) {
|
||||
export async function fetchView(ctx: Ctx) {
|
||||
const viewName = ctx.params.viewName
|
||||
|
||||
// if this is a table view being looked for just transfer to that
|
||||
|
@ -252,7 +253,7 @@ export async function fetchView(ctx: BBContext) {
|
|||
return rows
|
||||
}
|
||||
|
||||
export async function fetch(ctx: BBContext) {
|
||||
export async function fetch(ctx: Ctx) {
|
||||
const db = context.getAppDB()
|
||||
|
||||
const tableId = ctx.params.tableId
|
||||
|
@ -261,15 +262,15 @@ export async function fetch(ctx: BBContext) {
|
|||
return outputProcessing(table, rows)
|
||||
}
|
||||
|
||||
export async function find(ctx: BBContext) {
|
||||
export async function find(ctx: Ctx) {
|
||||
const db = dbCore.getDB(ctx.appId)
|
||||
const table = await db.get(ctx.params.tableId)
|
||||
let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId)
|
||||
let row = await utils.findRow(ctx, ctx.params.tableId, ctx.params.rowId)
|
||||
row = await outputProcessing(table, row)
|
||||
return row
|
||||
}
|
||||
|
||||
export async function destroy(ctx: BBContext) {
|
||||
export async function destroy(ctx: Ctx) {
|
||||
const db = context.getAppDB()
|
||||
const { _id } = ctx.request.body
|
||||
let row = await db.get(_id)
|
||||
|
@ -305,7 +306,7 @@ export async function destroy(ctx: BBContext) {
|
|||
return { response, row }
|
||||
}
|
||||
|
||||
export async function bulkDestroy(ctx: BBContext) {
|
||||
export async function bulkDestroy(ctx: Ctx) {
|
||||
const db = context.getAppDB()
|
||||
const tableId = ctx.params.tableId
|
||||
const table = await db.get(tableId)
|
||||
|
@ -344,7 +345,7 @@ export async function bulkDestroy(ctx: BBContext) {
|
|||
return { response: { ok: true }, rows: processedRows }
|
||||
}
|
||||
|
||||
export async function search(ctx: BBContext) {
|
||||
export async function search(ctx: Ctx) {
|
||||
// Fetch the whole table when running in cypress, as search doesn't work
|
||||
if (!env.COUCH_DB_URL && env.isCypress()) {
|
||||
return { rows: await fetch(ctx) }
|
||||
|
@ -376,14 +377,14 @@ export async function search(ctx: BBContext) {
|
|||
return response
|
||||
}
|
||||
|
||||
export async function validate(ctx: BBContext) {
|
||||
return rowValidate({
|
||||
export async function validate(ctx: Ctx) {
|
||||
return utils.validate({
|
||||
tableId: ctx.params.tableId,
|
||||
row: ctx.request.body,
|
||||
})
|
||||
}
|
||||
|
||||
export async function exportRows(ctx: BBContext) {
|
||||
export async function exportRows(ctx: Ctx) {
|
||||
const db = context.getAppDB()
|
||||
const table = await db.get(ctx.params.tableId)
|
||||
const rowIds = ctx.request.body.rows
|
||||
|
@ -421,14 +422,14 @@ export async function exportRows(ctx: BBContext) {
|
|||
return apiFileReturn(exporter(headers, rows))
|
||||
}
|
||||
|
||||
export async function fetchEnrichedRow(ctx: BBContext) {
|
||||
export async function fetchEnrichedRow(ctx: Ctx) {
|
||||
const db = context.getAppDB()
|
||||
const tableId = ctx.params.tableId
|
||||
const rowId = ctx.params.rowId
|
||||
// need table to work out where links go in row
|
||||
let [table, row] = await Promise.all([
|
||||
db.get(tableId),
|
||||
findRow(ctx, tableId, rowId),
|
||||
utils.findRow(ctx, tableId, rowId),
|
||||
])
|
||||
// get the link docs
|
||||
const linkVals = (await linkRows.getLinkDocuments({
|
||||
|
|
|
@ -16,7 +16,10 @@ const { cloneDeep } = require("lodash/fp")
|
|||
* updated.
|
||||
* NOTE: this will only for affect static formulas.
|
||||
*/
|
||||
export async function updateRelatedFormula(table: Table, enrichedRows: Row[]) {
|
||||
export async function updateRelatedFormula(
|
||||
table: Table,
|
||||
enrichedRows: Row[] | Row
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
// no formula to update, we're done
|
||||
if (!table.relatedFormula) {
|
||||
|
@ -155,7 +158,7 @@ export async function finaliseRow(
|
|||
enrichedRow = await processFormulas(table, enrichedRow, { dynamic: false })
|
||||
// this updates the related formulas in other rows based on the relations to this row
|
||||
if (updateFormula) {
|
||||
await exports.updateRelatedFormula(table, enrichedRow)
|
||||
await updateRelatedFormula(table, enrichedRow)
|
||||
}
|
||||
return { row: enrichedRow, table }
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ import { BBContext, Row, Table } from "@budibase/types"
|
|||
export { removeKeyNumbering } from "../../../integrations/base/utils"
|
||||
const validateJs = require("validate.js")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
import { Ctx } from "@budibase/types"
|
||||
|
||||
validateJs.extend(validateJs.validators.datetime, {
|
||||
parse: function (value: string) {
|
||||
|
@ -25,7 +26,7 @@ export async function getDatasourceAndQuery(json: any) {
|
|||
return makeExternalQuery(datasource, json)
|
||||
}
|
||||
|
||||
export async function findRow(ctx: BBContext, tableId: string, rowId: string) {
|
||||
export async function findRow(ctx: Ctx, tableId: string, rowId: string) {
|
||||
const db = context.getAppDB()
|
||||
let row
|
||||
// TODO remove special user case in future
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
import { enrichPluginURLs } from "../../../utilities/plugins"
|
||||
|
||||
require("svelte/register")
|
||||
|
||||
const send = require("koa-send")
|
||||
const { resolve, join } = require("../../../utilities/centralPath")
|
||||
const uuid = require("uuid")
|
||||
const { ObjectStoreBuckets } = require("../../../constants")
|
||||
import { ObjectStoreBuckets } from "../../../constants"
|
||||
const { processString } = require("@budibase/string-templates")
|
||||
const {
|
||||
loadHandlebarsFile,
|
||||
|
@ -13,8 +11,6 @@ const {
|
|||
TOP_LEVEL_PATH,
|
||||
} = require("../../../utilities/fileSystem")
|
||||
const env = require("../../../environment")
|
||||
const { clientLibraryPath } = require("../../../utilities")
|
||||
const { attachmentsRelativeURL } = require("../../../utilities")
|
||||
const { DocumentType } = require("../../../db/utils")
|
||||
const { context, objectStore, utils } = require("@budibase/backend-core")
|
||||
const AWS = require("aws-sdk")
|
||||
|
@ -33,7 +29,7 @@ async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
|
|||
return {
|
||||
size: file.size,
|
||||
name: file.name,
|
||||
url: attachmentsRelativeURL(response.Key),
|
||||
url: objectStore.getAppFileUrl(s3Key),
|
||||
extension: [...file.name.split(".")].pop(),
|
||||
key: response.Key,
|
||||
}
|
||||
|
@ -85,7 +81,7 @@ export const uploadFile = async function (ctx: any) {
|
|||
|
||||
return prepareUpload({
|
||||
file,
|
||||
s3Key: `${ctx.appId}/attachments/${processedFileName}`,
|
||||
s3Key: `${context.getProdAppId()}/attachments/${processedFileName}`,
|
||||
bucket: ObjectStoreBuckets.APPS,
|
||||
})
|
||||
})
|
||||
|
@ -107,14 +103,14 @@ export const serveApp = async function (ctx: any) {
|
|||
|
||||
if (!env.isJest()) {
|
||||
const App = require("./templates/BudibaseApp.svelte").default
|
||||
const plugins = enrichPluginURLs(appInfo.usedPlugins)
|
||||
const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins)
|
||||
const { head, html, css } = App.render({
|
||||
metaImage:
|
||||
"https://res.cloudinary.com/daog6scxm/image/upload/v1666109324/meta-images/budibase-meta-image_uukc1m.png",
|
||||
title: appInfo.name,
|
||||
production: env.isProd(),
|
||||
appId,
|
||||
clientLibPath: clientLibraryPath(appId, appInfo.version, ctx),
|
||||
clientLibPath: objectStore.clientLibraryUrl(appId, appInfo.version),
|
||||
usedPlugins: plugins,
|
||||
})
|
||||
|
||||
|
@ -139,7 +135,7 @@ export const serveBuilderPreview = async function (ctx: any) {
|
|||
let appId = context.getAppId()
|
||||
const previewHbs = loadHandlebarsFile(`${__dirname}/templates/preview.hbs`)
|
||||
ctx.body = await processString(previewHbs, {
|
||||
clientLibPath: clientLibraryPath(appId, appInfo.version, ctx),
|
||||
clientLibPath: objectStore.clientLibraryUrl(appId, appInfo.version),
|
||||
})
|
||||
} else {
|
||||
// just return the app info for jest to assert on
|
||||
|
|
|
@ -26,13 +26,14 @@ import cloudRoutes from "./cloud"
|
|||
import migrationRoutes from "./migrations"
|
||||
import pluginRoutes from "./plugin"
|
||||
import Router from "@koa/router"
|
||||
import { api } from "@budibase/pro"
|
||||
import { api as pro } from "@budibase/pro"
|
||||
|
||||
export { default as staticRoutes } from "./static"
|
||||
export { default as publicRoutes } from "./public"
|
||||
|
||||
const appBackupRoutes = api.appBackups
|
||||
const scheduleRoutes = api.schedules
|
||||
const appBackupRoutes = pro.appBackups
|
||||
const scheduleRoutes = pro.schedules
|
||||
|
||||
export const mainRoutes: Router[] = [
|
||||
appBackupRoutes,
|
||||
backupRoutes,
|
||||
|
|
|
@ -10,6 +10,7 @@ const {
|
|||
StaticQuotaName,
|
||||
MonthlyQuotaName,
|
||||
} = require("@budibase/types")
|
||||
const { structures } = require("@budibase/backend-core/tests");
|
||||
|
||||
describe("/rows", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -494,12 +495,13 @@ describe("/rows", () => {
|
|||
describe("attachments", () => {
|
||||
it("should allow enriching attachment rows", async () => {
|
||||
const table = await config.createAttachmentTable()
|
||||
const attachmentId = `${structures.uuid()}.csv`
|
||||
const row = await config.createRow({
|
||||
name: "test",
|
||||
description: "test",
|
||||
attachment: [
|
||||
{
|
||||
key: `${config.getAppId()}/attachments/test/thing.csv`,
|
||||
key: `${config.getAppId()}/attachments/${attachmentId}`,
|
||||
},
|
||||
],
|
||||
tableId: table._id,
|
||||
|
@ -509,7 +511,7 @@ describe("/rows", () => {
|
|||
context.doInAppContext(config.getAppId(), async () => {
|
||||
const enriched = await outputProcessing(table, [row])
|
||||
expect(enriched[0].attachment[0].url).toBe(
|
||||
`/prod-budi-app-assets/${config.getAppId()}/attachments/test/thing.csv`
|
||||
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,20 +1,5 @@
|
|||
jest.mock("node-fetch")
|
||||
jest.mock("aws-sdk", () => ({
|
||||
config: {
|
||||
update: jest.fn(),
|
||||
},
|
||||
DynamoDB: {
|
||||
DocumentClient: jest.fn(),
|
||||
},
|
||||
S3: jest.fn(() => ({
|
||||
getSignedUrl: jest.fn(() => {
|
||||
return "my-url"
|
||||
}),
|
||||
})),
|
||||
}))
|
||||
|
||||
const setup = require("./utilities")
|
||||
const { events, constants } = require("@budibase/backend-core")
|
||||
const { constants } = require("@budibase/backend-core")
|
||||
|
||||
describe("/static", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -102,7 +87,7 @@ describe("/static", () => {
|
|||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body.signedUrl).toEqual("my-url")
|
||||
expect(res.body.signedUrl).toEqual("http://test.com/foo/bar")
|
||||
expect(res.body.publicUrl).toEqual(
|
||||
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
|
||||
)
|
||||
|
|
|
@ -112,12 +112,17 @@ if (isDev() && module.exports.DISABLE_THREADING == null) {
|
|||
}
|
||||
|
||||
// clean up any environment variable edge cases
|
||||
for (let [key, value] of Object.entries(module.exports)) {
|
||||
for (let [key, value] of Object.entries(environment)) {
|
||||
// handle the edge case of "0" to disable an environment variable
|
||||
if (value === "0") {
|
||||
// @ts-ignore
|
||||
environment[key] = 0
|
||||
}
|
||||
// handle the edge case of "false" to disable an environment variable
|
||||
if (value === "false") {
|
||||
// @ts-ignore
|
||||
environment[key] = 0
|
||||
}
|
||||
}
|
||||
|
||||
export = environment
|
||||
|
|
|
@ -98,11 +98,7 @@ export async function getIntegration(integration: string) {
|
|||
for (let plugin of plugins) {
|
||||
if (plugin.name === integration) {
|
||||
// need to use commonJS require due to its dynamic runtime nature
|
||||
const retrieved: any = await getDatasourcePlugin(
|
||||
plugin.name,
|
||||
plugin.jsUrl,
|
||||
plugin.schema?.hash
|
||||
)
|
||||
const retrieved: any = await getDatasourcePlugin(plugin)
|
||||
if (retrieved.integration) {
|
||||
return retrieved.integration
|
||||
} else {
|
||||
|
|
|
@ -5,6 +5,7 @@ import {
|
|||
isGoogleConfig,
|
||||
isOIDCConfig,
|
||||
isSettingsConfig,
|
||||
ConfigType,
|
||||
} from "@budibase/types"
|
||||
import env from "./../../../../environment"
|
||||
|
||||
|
@ -31,15 +32,15 @@ export const backfill = async (
|
|||
await events.email.SMTPCreated(timestamp)
|
||||
}
|
||||
if (isGoogleConfig(config)) {
|
||||
await events.auth.SSOCreated("google", timestamp)
|
||||
await events.auth.SSOCreated(ConfigType.GOOGLE, timestamp)
|
||||
if (config.config.activated) {
|
||||
await events.auth.SSOActivated("google", timestamp)
|
||||
await events.auth.SSOActivated(ConfigType.GOOGLE, timestamp)
|
||||
}
|
||||
}
|
||||
if (isOIDCConfig(config)) {
|
||||
await events.auth.SSOCreated("oidc", timestamp)
|
||||
await events.auth.SSOCreated(ConfigType.OIDC, timestamp)
|
||||
if (config.config.configs[0].activated) {
|
||||
await events.auth.SSOActivated("oidc", timestamp)
|
||||
await events.auth.SSOActivated(ConfigType.OIDC, timestamp)
|
||||
}
|
||||
}
|
||||
if (isSettingsConfig(config)) {
|
||||
|
|
|
@ -55,12 +55,8 @@ async function updateAttachmentColumns(prodAppId: string, db: Database) {
|
|||
continue
|
||||
}
|
||||
row[column] = row[column].map((attachment: RowAttachment) => {
|
||||
// URL looks like: /prod-budi-app-assets/appId/attachments/file.csv
|
||||
const urlParts = attachment.url.split("/")
|
||||
// drop the first empty element
|
||||
urlParts.shift()
|
||||
// get the prefix
|
||||
const prefix = urlParts.shift()
|
||||
// Key looks like: appId/attachments/file.csv
|
||||
const urlParts = attachment.key.split("/")
|
||||
// remove the app ID
|
||||
urlParts.shift()
|
||||
// add new app ID
|
||||
|
@ -69,7 +65,7 @@ async function updateAttachmentColumns(prodAppId: string, db: Database) {
|
|||
return {
|
||||
...attachment,
|
||||
key,
|
||||
url: `/${prefix}/${key}`,
|
||||
url: "", // calculated on retrieval using key
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -19,7 +19,10 @@ import { mocks } from "@budibase/backend-core/tests"
|
|||
const tk = require("timekeeper")
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
if (!process.env.DEBUG) {
|
||||
global.console.log = jest.fn() // console.log are ignored in tests
|
||||
global.console.warn = jest.fn() // console.warn are ignored in tests
|
||||
}
|
||||
|
||||
if (!process.env.CI) {
|
||||
// set a longer timeout in dev for debugging
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
import { budibaseTempDir } from "../budibaseDir"
|
||||
import fs from "fs"
|
||||
import { join } from "path"
|
||||
import { ObjectStoreBuckets } from "../../constants"
|
||||
import { updateClientLibrary } from "./clientLibrary"
|
||||
import env from "../../environment"
|
||||
import { objectStore, context } from "@budibase/backend-core"
|
||||
import { TOP_LEVEL_PATH } from "./filesystem"
|
||||
|
||||
export const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
|
||||
|
||||
/**
|
||||
* Uploads the latest client library to the object store.
|
||||
* @param {string} appId The ID of the app which is being created.
|
||||
* @return {Promise<void>} once promise completes app resources should be ready in object store.
|
||||
*/
|
||||
export const createApp = async (appId: string) => {
|
||||
await updateClientLibrary(appId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all of the assets created for an app in the object store.
|
||||
* @param {string} appId The ID of the app which is being deleted.
|
||||
* @return {Promise<void>} once promise completes the app resources will be removed from object store.
|
||||
*/
|
||||
export const deleteApp = async (appId: string) => {
|
||||
await objectStore.deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves component libraries from object store (or tmp symlink if in local)
|
||||
*/
|
||||
export const getComponentLibraryManifest = async (library: string) => {
|
||||
const appId = context.getAppId()
|
||||
const filename = "manifest.json"
|
||||
/* istanbul ignore next */
|
||||
// when testing in cypress and so on we need to get the package
|
||||
// as the environment may not be fully fleshed out for dev or prod
|
||||
if (env.isTest()) {
|
||||
library = library.replace("standard-components", "client")
|
||||
const lib = library.split("/")[1]
|
||||
const path = require.resolve(library).split(lib)[0]
|
||||
return require(join(path, lib, filename))
|
||||
} else if (env.isDev()) {
|
||||
const path = join(NODE_MODULES_PATH, "@budibase", "client", filename)
|
||||
// always load from new so that updates are refreshed
|
||||
delete require.cache[require.resolve(path)]
|
||||
return require(path)
|
||||
}
|
||||
|
||||
if (!appId) {
|
||||
throw new Error("No app ID found - cannot get component libraries")
|
||||
}
|
||||
|
||||
let resp
|
||||
let path
|
||||
try {
|
||||
// Try to load the manifest from the new file location
|
||||
path = join(appId, filename)
|
||||
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`component-manifest-objectstore=failed appId=${appId} path=${path}`,
|
||||
error
|
||||
)
|
||||
// Fallback to loading it from the old location for old apps
|
||||
path = join(appId, "node_modules", library, "package", filename)
|
||||
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
|
||||
}
|
||||
if (typeof resp !== "string") {
|
||||
resp = resp.toString("utf8")
|
||||
}
|
||||
return JSON.parse(resp)
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a set of app IDs makes sure file system is cleared of any of their temp info.
|
||||
*/
|
||||
export const cleanup = (appIds: string[]) => {
|
||||
for (let appId of appIds) {
|
||||
const path = join(budibaseTempDir(), appId)
|
||||
if (fs.existsSync(path)) {
|
||||
fs.rmdirSync(path, { recursive: true })
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,7 +4,7 @@ import fs from "fs"
|
|||
import { objectStore } from "@budibase/backend-core"
|
||||
import { resolve } from "../centralPath"
|
||||
import env from "../../environment"
|
||||
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
|
||||
import { TOP_LEVEL_PATH } from "./filesystem"
|
||||
|
||||
/**
|
||||
* Client library paths in the object store:
|
||||
|
|
|
@ -0,0 +1,170 @@
|
|||
import { PathLike } from "fs"
|
||||
const { budibaseTempDir } = require("../budibaseDir")
|
||||
const fs = require("fs")
|
||||
const { join } = require("path")
|
||||
const uuid = require("uuid/v4")
|
||||
const env = require("../../environment")
|
||||
import tar from "tar"
|
||||
|
||||
export const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
|
||||
|
||||
/**
|
||||
* Upon first startup of instance there may not be everything we need in tmp directory, set it up.
|
||||
*/
|
||||
export const init = () => {
|
||||
const tempDir = budibaseTempDir()
|
||||
if (!fs.existsSync(tempDir)) {
|
||||
// some test cases fire this quickly enough that
|
||||
// synchronous cases can end up here at the same time
|
||||
try {
|
||||
fs.mkdirSync(tempDir)
|
||||
} catch (err: any) {
|
||||
if (!err || err.code !== "EEXIST") {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
const clientLibPath = join(budibaseTempDir(), "budibase-client.js")
|
||||
if (env.isTest() && !fs.existsSync(clientLibPath)) {
|
||||
fs.copyFileSync(require.resolve("@budibase/client"), clientLibPath)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the system is currently in development mode and if it is makes sure
|
||||
* everything required to function is ready.
|
||||
*/
|
||||
export const checkDevelopmentEnvironment = () => {
|
||||
if (!env.isDev() || env.isTest()) {
|
||||
return
|
||||
}
|
||||
if (!fs.existsSync(budibaseTempDir())) {
|
||||
fs.mkdirSync(budibaseTempDir())
|
||||
}
|
||||
let error
|
||||
if (!fs.existsSync(join(process.cwd(), ".env"))) {
|
||||
error = "Must run via yarn once to generate environment."
|
||||
}
|
||||
if (error) {
|
||||
console.error(error)
|
||||
process.exit(-1)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to retrieve a handlebars file from the system which will be used as a template.
|
||||
* This is allowable as the template handlebars files should be static and identical across
|
||||
* the cluster.
|
||||
* @param {string} path The path to the handlebars file which is to be loaded.
|
||||
* @returns {string} The loaded handlebars file as a string - loaded as utf8.
|
||||
*/
|
||||
export const loadHandlebarsFile = (path: PathLike) => {
|
||||
return fs.readFileSync(path, "utf8")
|
||||
}
|
||||
|
||||
/**
|
||||
* When return a file from the API need to write the file to the system temporarily so we
|
||||
* can create a read stream to send.
|
||||
* @param {string} contents the contents of the file which is to be returned from the API.
|
||||
* @return {Object} the read stream which can be put into the koa context body.
|
||||
*/
|
||||
export const apiFileReturn = (contents: any) => {
|
||||
const path = join(budibaseTempDir(), uuid())
|
||||
fs.writeFileSync(path, contents)
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
export const streamFile = (path: string) => {
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the provided contents to a temporary file, which can be used briefly.
|
||||
* @param {string} fileContents contents which will be written to a temp file.
|
||||
* @return {string} the path to the temp file.
|
||||
*/
|
||||
export const storeTempFile = (fileContents: any) => {
|
||||
const path = join(budibaseTempDir(), uuid())
|
||||
fs.writeFileSync(path, fileContents)
|
||||
return path
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function for getting a file read stream - a simple in memory buffered read
|
||||
* stream doesn't work for pouchdb.
|
||||
*/
|
||||
export const stringToFileStream = (contents: any) => {
|
||||
const path = storeTempFile(contents)
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a temp file and returns it from the API.
|
||||
* @param {string} fileContents the contents to be returned in file.
|
||||
*/
|
||||
export const sendTempFile = (fileContents: any) => {
|
||||
const path = storeTempFile(fileContents)
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
/**
|
||||
* All file reads come through here just to make sure all of them make sense
|
||||
* allows a centralised location to check logic is all good.
|
||||
*/
|
||||
export const readFileSync = (filepath: PathLike, options = "utf8") => {
|
||||
return fs.readFileSync(filepath, options)
|
||||
}
|
||||
|
||||
export const createTempFolder = (item: any) => {
|
||||
const path = join(budibaseTempDir(), item)
|
||||
try {
|
||||
// remove old tmp directories automatically - don't combine
|
||||
if (fs.existsSync(path)) {
|
||||
fs.rmSync(path, { recursive: true, force: true })
|
||||
}
|
||||
fs.mkdirSync(path)
|
||||
} catch (err: any) {
|
||||
throw new Error(`Path cannot be created: ${err.message}`)
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
export const extractTarball = async (fromFilePath: string, toPath: string) => {
|
||||
await tar.extract({
|
||||
file: fromFilePath,
|
||||
C: toPath,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Find for a file recursively from start path applying filter, return first match
|
||||
*/
|
||||
export const findFileRec = (startPath: PathLike, filter: string): any => {
|
||||
if (!fs.existsSync(startPath)) {
|
||||
return
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(startPath)
|
||||
for (let i = 0, len = files.length; i < len; i++) {
|
||||
const filename = join(startPath, files[i])
|
||||
const stat = fs.lstatSync(filename)
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
return findFileRec(filename, filter)
|
||||
} else if (filename.endsWith(filter)) {
|
||||
return filename
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a folder which is not empty from the file system
|
||||
*/
|
||||
export const deleteFolderFileSystem = (path: PathLike) => {
|
||||
if (!fs.existsSync(path)) {
|
||||
return
|
||||
}
|
||||
|
||||
fs.rmSync(path, { recursive: true, force: true })
|
||||
}
|
|
@ -1,348 +1,5 @@
|
|||
import { budibaseTempDir } from "../budibaseDir"
|
||||
import fs from "fs"
|
||||
import { join } from "path"
|
||||
import { context, objectStore } from "@budibase/backend-core"
|
||||
import { ObjectStoreBuckets } from "../../constants"
|
||||
import { updateClientLibrary } from "./clientLibrary"
|
||||
import { checkSlashesInUrl } from "../"
|
||||
import env from "../../environment"
|
||||
import fetch from "node-fetch"
|
||||
const uuid = require("uuid/v4")
|
||||
const tar = require("tar")
|
||||
|
||||
export const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
|
||||
export const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
|
||||
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
|
||||
|
||||
/**
|
||||
* The single stack system (Cloud and Builder) should not make use of the file system where possible,
|
||||
* this file handles all of the file access for the system with the intention of limiting it all to one
|
||||
* place. Keeping all of this logic in one place means that when we need to do file system access (like
|
||||
* downloading a package or opening a temporary file) in can be done in way that we can confirm it shouldn't
|
||||
* be done through an object store instead.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Upon first startup of instance there may not be everything we need in tmp directory, set it up.
|
||||
*/
|
||||
export function init() {
|
||||
const tempDir = budibaseTempDir()
|
||||
if (!fs.existsSync(tempDir)) {
|
||||
// some test cases fire this quickly enough that
|
||||
// synchronous cases can end up here at the same time
|
||||
try {
|
||||
fs.mkdirSync(tempDir)
|
||||
} catch (err: any) {
|
||||
if (!err || err.code !== "EEXIST") {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
const clientLibPath = join(budibaseTempDir(), "budibase-client.js")
|
||||
if (env.isTest() && !fs.existsSync(clientLibPath)) {
|
||||
fs.copyFileSync(require.resolve("@budibase/client"), clientLibPath)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the system is currently in development mode and if it is makes sure
|
||||
* everything required to function is ready.
|
||||
*/
|
||||
exports.checkDevelopmentEnvironment = () => {
|
||||
if (!env.isDev() || env.isTest()) {
|
||||
return
|
||||
}
|
||||
if (!fs.existsSync(budibaseTempDir())) {
|
||||
fs.mkdirSync(budibaseTempDir())
|
||||
}
|
||||
let error
|
||||
if (!fs.existsSync(join(process.cwd(), ".env"))) {
|
||||
error = "Must run via yarn once to generate environment."
|
||||
}
|
||||
if (error) {
|
||||
console.error(error)
|
||||
process.exit(-1)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to retrieve a handlebars file from the system which will be used as a template.
|
||||
* This is allowable as the template handlebars files should be static and identical across
|
||||
* the cluster.
|
||||
* @param {string} path The path to the handlebars file which is to be loaded.
|
||||
* @returns {string} The loaded handlebars file as a string - loaded as utf8.
|
||||
*/
|
||||
export function loadHandlebarsFile(path: string) {
|
||||
return fs.readFileSync(path, "utf8")
|
||||
}
|
||||
|
||||
/**
|
||||
* When return a file from the API need to write the file to the system temporarily so we
|
||||
* can create a read stream to send.
|
||||
* @param {string} contents the contents of the file which is to be returned from the API.
|
||||
* @param {string} encoding the encoding of the file to return (utf8 default)
|
||||
* @return {Object} the read stream which can be put into the koa context body.
|
||||
*/
|
||||
export function apiFileReturn(
|
||||
contents: string,
|
||||
encoding: BufferEncoding = "utf8"
|
||||
) {
|
||||
const path = join(budibaseTempDir(), uuid())
|
||||
fs.writeFileSync(path, contents, { encoding })
|
||||
return fs.createReadStream(path, { encoding })
|
||||
}
|
||||
|
||||
export function streamFile(path: string) {
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the provided contents to a temporary file, which can be used briefly.
|
||||
* @param {string} fileContents contents which will be written to a temp file.
|
||||
* @return {string} the path to the temp file.
|
||||
*/
|
||||
export function storeTempFile(fileContents: string) {
|
||||
const path = join(budibaseTempDir(), uuid())
|
||||
fs.writeFileSync(path, fileContents)
|
||||
return path
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function for getting a file read stream - a simple in memory buffered read
|
||||
* stream doesn't work for pouchdb.
|
||||
*/
|
||||
export function stringToFileStream(contents: string) {
|
||||
const path = exports.storeTempFile(contents)
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a temp file and returns it from the API.
|
||||
* @param {string} fileContents the contents to be returned in file.
|
||||
*/
|
||||
export function sendTempFile(fileContents: string) {
|
||||
const path = exports.storeTempFile(fileContents)
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads the latest client library to the object store.
|
||||
* @param {string} appId The ID of the app which is being created.
|
||||
* @return {Promise<void>} once promise completes app resources should be ready in object store.
|
||||
*/
|
||||
export async function createApp(appId: string) {
|
||||
await updateClientLibrary(appId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes all of the assets created for an app in the object store.
|
||||
* @param {string} appId The ID of the app which is being deleted.
|
||||
* @return {Promise<void>} once promise completes the app resources will be removed from object store.
|
||||
*/
|
||||
export async function deleteApp(appId: string) {
|
||||
await objectStore.deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a template and pipes it to minio as well as making it available temporarily.
|
||||
* @param {string} type The type of template which is to be retrieved.
|
||||
* @param name
|
||||
* @return {Promise<*>}
|
||||
*/
|
||||
export async function downloadTemplate(type: string, name: string) {
|
||||
const DEFAULT_TEMPLATES_BUCKET =
|
||||
"prod-budi-templates.s3-eu-west-1.amazonaws.com"
|
||||
const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
|
||||
return objectStore.downloadTarball(
|
||||
templateUrl,
|
||||
ObjectStoreBuckets.TEMPLATES,
|
||||
type
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves component libraries from object store (or tmp symlink if in local)
|
||||
*/
|
||||
export async function getComponentLibraryManifest(library: string) {
|
||||
const appId = context.getAppId()
|
||||
const filename = "manifest.json"
|
||||
/* istanbul ignore next */
|
||||
// when testing in cypress and so on we need to get the package
|
||||
// as the environment may not be fully fleshed out for dev or prod
|
||||
if (env.isTest()) {
|
||||
library = library.replace("standard-components", "client")
|
||||
const lib = library.split("/")[1]
|
||||
const path = require.resolve(library).split(lib)[0]
|
||||
return require(join(path, lib, filename))
|
||||
} else if (env.isDev()) {
|
||||
const path = join(NODE_MODULES_PATH, "@budibase", "client", filename)
|
||||
// always load from new so that updates are refreshed
|
||||
delete require.cache[require.resolve(path)]
|
||||
return require(path)
|
||||
}
|
||||
|
||||
if (!appId) {
|
||||
throw new Error("No app ID found - cannot get component libraries")
|
||||
}
|
||||
|
||||
let resp
|
||||
let path
|
||||
try {
|
||||
// Try to load the manifest from the new file location
|
||||
path = join(appId, filename)
|
||||
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`component-manifest-objectstore=failed appId=${appId} path=${path}`,
|
||||
error
|
||||
)
|
||||
// Fallback to loading it from the old location for old apps
|
||||
path = join(appId, "node_modules", library, "package", filename)
|
||||
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
|
||||
}
|
||||
if (typeof resp !== "string") {
|
||||
resp = resp.toString("utf8")
|
||||
}
|
||||
return JSON.parse(resp)
|
||||
}
|
||||
|
||||
/**
|
||||
* All file reads come through here just to make sure all of them make sense
|
||||
* allows a centralised location to check logic is all good.
|
||||
*/
|
||||
export function readFileSync(
|
||||
filepath: string,
|
||||
options: BufferEncoding = "utf8"
|
||||
) {
|
||||
return fs.readFileSync(filepath, { encoding: options })
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a set of app IDs makes sure file system is cleared of any of their temp info.
|
||||
*/
|
||||
export function cleanup(appIds: string[]) {
|
||||
for (let appId of appIds) {
|
||||
const path = join(budibaseTempDir(), appId)
|
||||
if (fs.existsSync(path)) {
|
||||
fs.rmdirSync(path, { recursive: true })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function createTempFolder(item: string) {
|
||||
const path = join(budibaseTempDir(), item)
|
||||
try {
|
||||
// remove old tmp directories automatically - don't combine
|
||||
if (fs.existsSync(path)) {
|
||||
fs.rmSync(path, { recursive: true, force: true })
|
||||
}
|
||||
fs.mkdirSync(path)
|
||||
} catch (err: any) {
|
||||
throw new Error(`Path cannot be created: ${err.message}`)
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
export async function extractTarball(fromFilePath: string, toPath: string) {
|
||||
await tar.extract({
|
||||
file: fromFilePath,
|
||||
C: toPath,
|
||||
})
|
||||
}
|
||||
|
||||
export async function getPluginMetadata(path: string) {
|
||||
let metadata: { schema?: any; package?: any } = {}
|
||||
try {
|
||||
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
|
||||
const schema = fs.readFileSync(join(path, "schema.json"), "utf8")
|
||||
|
||||
metadata.schema = JSON.parse(schema)
|
||||
metadata.package = JSON.parse(pkg)
|
||||
|
||||
if (
|
||||
!metadata.package.name ||
|
||||
!metadata.package.version ||
|
||||
!metadata.package.description
|
||||
) {
|
||||
throw new Error(
|
||||
"package.json is missing one of 'name', 'version' or 'description'."
|
||||
)
|
||||
}
|
||||
} catch (err: any) {
|
||||
throw new Error(
|
||||
`Unable to process schema.json/package.json in plugin. ${err.message}`
|
||||
)
|
||||
}
|
||||
|
||||
return { metadata, directory: path }
|
||||
}
|
||||
|
||||
export async function getDatasourcePlugin(
|
||||
name: string,
|
||||
url: string,
|
||||
hash: string
|
||||
) {
|
||||
if (!fs.existsSync(DATASOURCE_PATH)) {
|
||||
fs.mkdirSync(DATASOURCE_PATH)
|
||||
}
|
||||
const filename = join(DATASOURCE_PATH, name)
|
||||
const metadataName = `${filename}.bbmetadata`
|
||||
if (fs.existsSync(filename)) {
|
||||
const currentHash = fs.readFileSync(metadataName, "utf8")
|
||||
// if hash is the same return the file, otherwise remove it and re-download
|
||||
if (currentHash === hash) {
|
||||
return require(filename)
|
||||
} else {
|
||||
console.log(`Updating plugin: ${name}`)
|
||||
delete require.cache[require.resolve(filename)]
|
||||
fs.unlinkSync(filename)
|
||||
}
|
||||
}
|
||||
const fullUrl = checkSlashesInUrl(
|
||||
`${env.MINIO_URL}/${ObjectStoreBuckets.PLUGINS}/${url}`
|
||||
)
|
||||
const response = await fetch(fullUrl)
|
||||
if (response.status === 200) {
|
||||
const content = await response.text()
|
||||
fs.writeFileSync(filename, content)
|
||||
fs.writeFileSync(metadataName, hash)
|
||||
return require(filename)
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unable to retrieve plugin - reason: ${await response.text()}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find for a file recursively from start path applying filter, return first match
|
||||
*/
|
||||
export function findFileRec(startPath: string, filter: any) {
|
||||
if (!fs.existsSync(startPath)) {
|
||||
return
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(startPath)
|
||||
for (let i = 0, len = files.length; i < len; i++) {
|
||||
const filename = join(startPath, files[i])
|
||||
const stat = fs.lstatSync(filename)
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
return exports.findFileRec(filename, filter)
|
||||
} else if (filename.endsWith(filter)) {
|
||||
return filename
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a folder which is not empty from the file system
|
||||
*/
|
||||
export function deleteFolderFileSystem(path: string) {
|
||||
if (!fs.existsSync(path)) {
|
||||
return
|
||||
}
|
||||
|
||||
fs.rmSync(path, { recursive: true, force: true })
|
||||
}
|
||||
export * from "./app"
|
||||
export * from "./clientLibrary"
|
||||
export * from "./filesystem"
|
||||
export * from "./plugin"
|
||||
export * from "./template"
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
import { Plugin } from "@budibase/types"
|
||||
|
||||
const { budibaseTempDir } = require("../budibaseDir")
|
||||
const fs = require("fs")
|
||||
const { join } = require("path")
|
||||
import { objectStore } from "@budibase/backend-core"
|
||||
|
||||
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
|
||||
|
||||
export const getPluginMetadata = async (path: string) => {
|
||||
let metadata: any = {}
|
||||
try {
|
||||
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
|
||||
const schema = fs.readFileSync(join(path, "schema.json"), "utf8")
|
||||
|
||||
metadata.schema = JSON.parse(schema)
|
||||
metadata.package = JSON.parse(pkg)
|
||||
|
||||
if (
|
||||
!metadata.package.name ||
|
||||
!metadata.package.version ||
|
||||
!metadata.package.description
|
||||
) {
|
||||
throw new Error(
|
||||
"package.json is missing one of 'name', 'version' or 'description'."
|
||||
)
|
||||
}
|
||||
} catch (err: any) {
|
||||
throw new Error(
|
||||
`Unable to process schema.json/package.json in plugin. ${err.message}`
|
||||
)
|
||||
}
|
||||
|
||||
return { metadata, directory: path }
|
||||
}
|
||||
|
||||
export const getDatasourcePlugin = async (plugin: Plugin) => {
|
||||
const hash = plugin.schema?.hash
|
||||
if (!fs.existsSync(DATASOURCE_PATH)) {
|
||||
fs.mkdirSync(DATASOURCE_PATH)
|
||||
}
|
||||
const filename = join(DATASOURCE_PATH, plugin.name)
|
||||
const metadataName = `${filename}.bbmetadata`
|
||||
if (fs.existsSync(filename)) {
|
||||
const currentHash = fs.readFileSync(metadataName, "utf8")
|
||||
// if hash is the same return the file, otherwise remove it and re-download
|
||||
if (currentHash === hash) {
|
||||
return require(filename)
|
||||
} else {
|
||||
console.log(`Updating plugin: ${plugin.name}`)
|
||||
delete require.cache[require.resolve(filename)]
|
||||
fs.unlinkSync(filename)
|
||||
}
|
||||
}
|
||||
const pluginKey = objectStore.getPluginJSKey(plugin)
|
||||
const pluginJs = await objectStore.retrieve(
|
||||
objectStore.ObjectStoreBuckets.PLUGINS,
|
||||
pluginKey
|
||||
)
|
||||
|
||||
fs.writeFileSync(filename, pluginJs)
|
||||
fs.writeFileSync(metadataName, hash)
|
||||
|
||||
return require(filename)
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
const fs = require("fs")
|
||||
const { join } = require("path")
|
||||
import { ObjectStoreBuckets } from "../../constants"
|
||||
import { objectStore } from "@budibase/backend-core"
|
||||
|
||||
/**
|
||||
* This function manages temporary template files which are stored by Koa.
|
||||
* @param {Object} template The template object retrieved from the Koa context object.
|
||||
* @returns {Object} Returns an fs read stream which can be loaded into the database.
|
||||
*/
|
||||
export const getTemplateStream = async (template: any) => {
|
||||
if (template.file) {
|
||||
return fs.createReadStream(template.file.path)
|
||||
} else {
|
||||
const [type, name] = template.key.split("/")
|
||||
const tmpPath = await downloadTemplate(type, name)
|
||||
return fs.createReadStream(join(tmpPath, name, "db", "dump.txt"))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a template and pipes it to minio as well as making it available temporarily.
|
||||
* @param {string} type The type of template which is to be retrieved.
|
||||
* @param name
|
||||
* @return {Promise<*>}
|
||||
*/
|
||||
export const downloadTemplate = async (type: string, name: string) => {
|
||||
const DEFAULT_TEMPLATES_BUCKET =
|
||||
"prod-budi-templates.s3-eu-west-1.amazonaws.com"
|
||||
const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
|
||||
return objectStore.downloadTarball(
|
||||
templateUrl,
|
||||
ObjectStoreBuckets.TEMPLATES,
|
||||
type
|
||||
)
|
||||
}
|
|
@ -1,6 +1,5 @@
|
|||
import env from "../environment"
|
||||
import { OBJ_STORE_DIRECTORY } from "../constants"
|
||||
import { objectStore, context } from "@budibase/backend-core"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { generateMetadataID } from "../db/utils"
|
||||
import { Document } from "@budibase/types"
|
||||
import stream from "stream"
|
||||
|
@ -32,49 +31,6 @@ export function checkSlashesInUrl(url: string) {
|
|||
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the address of the object store, depending on whether self hosted or in cloud.
|
||||
* @return {string} The base URL of the object store (MinIO or S3).
|
||||
*/
|
||||
export function objectStoreUrl() {
|
||||
if (env.SELF_HOSTED || env.MINIO_URL) {
|
||||
// can use a relative url for this as all goes through the proxy (this is hosted in minio)
|
||||
return OBJ_STORE_DIRECTORY
|
||||
} else {
|
||||
return env.CDN_URL
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* In production the client library is stored in the object store, however in development
|
||||
* we use the symlinked version produced by lerna, located in node modules. We link to this
|
||||
* via a specific endpoint (under /api/assets/client).
|
||||
* @param {string} appId In production we need the appId to look up the correct bucket, as the
|
||||
* version of the client lib may differ between apps.
|
||||
* @param {string} version The version to retrieve.
|
||||
* @return {string} The URL to be inserted into appPackage response or server rendered
|
||||
* app index file.
|
||||
*/
|
||||
export function clientLibraryPath(appId: string, version: string) {
|
||||
if (env.isProd()) {
|
||||
let url = `${objectStoreUrl()}/${objectStore.sanitizeKey(
|
||||
appId
|
||||
)}/budibase-client.js`
|
||||
|
||||
// append app version to bust the cache
|
||||
if (version) {
|
||||
url += `?v=${version}`
|
||||
}
|
||||
return url
|
||||
} else {
|
||||
return `/api/assets/client`
|
||||
}
|
||||
}
|
||||
|
||||
export function attachmentsRelativeURL(attachmentKey: string) {
|
||||
return checkSlashesInUrl(`${objectStoreUrl()}/${attachmentKey}`)
|
||||
}
|
||||
|
||||
export async function updateEntityMetadata(
|
||||
type: string,
|
||||
entityId: string,
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
import env from "../environment"
|
||||
import { plugins as ProPlugins } from "@budibase/pro"
|
||||
import { objectStore } from "@budibase/backend-core"
|
||||
import { Plugin } from "@budibase/types"
|
||||
|
||||
export function enrichPluginURLs(plugins: Plugin[]) {
|
||||
if (!plugins || !plugins.length) {
|
||||
return []
|
||||
}
|
||||
return plugins.map(plugin => {
|
||||
const cloud = !env.SELF_HOSTED
|
||||
const bucket = objectStore.ObjectStoreBuckets.PLUGINS
|
||||
const jsFileName = "plugin.min.js"
|
||||
|
||||
// In self host we need to prefix the path, as the bucket name is not part
|
||||
// of the bucket path. In cloud, it's already part of the bucket path.
|
||||
let jsUrl = cloud ? `${env.CDN_URL}/` : `/${bucket}/`
|
||||
jsUrl += ProPlugins.getBucketPath(plugin.name)
|
||||
jsUrl += jsFileName
|
||||
return { ...plugin, jsUrl }
|
||||
})
|
||||
}
|
|
@ -1,12 +1,11 @@
|
|||
import * as linkRows from "../../db/linkedRows"
|
||||
import { FieldTypes, AutoFieldSubTypes } from "../../constants"
|
||||
import { attachmentsRelativeURL } from "../index"
|
||||
import { processFormulas, fixAutoColumnSubType } from "./utils"
|
||||
import { ObjectStoreBuckets } from "../../constants"
|
||||
import { context, db as dbCore, objectStore } from "@budibase/backend-core"
|
||||
import { InternalTables } from "../../db/utils"
|
||||
import { TYPE_TRANSFORM_MAP } from "./map"
|
||||
import { Row, Table, ContextUser } from "@budibase/types"
|
||||
import { Row, RowAttachment, Table, ContextUser } from "@budibase/types"
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
export * from "./utils"
|
||||
|
||||
|
@ -35,7 +34,7 @@ function getRemovedAttachmentKeys(
|
|||
return oldKeys
|
||||
}
|
||||
const newKeys = row[attachmentKey].map((attachment: any) => attachment.key)
|
||||
return oldKeys.filter((key: any) => newKeys.indexOf(key) === -1)
|
||||
return oldKeys.filter((key: string) => newKeys.indexOf(key) === -1)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -105,7 +104,7 @@ export function processAutoColumn(
|
|||
* @param {object} type The type fo coerce to
|
||||
* @returns {object} The coerced value
|
||||
*/
|
||||
export function coerce(row: any, type: any) {
|
||||
export function coerce(row: any, type: string) {
|
||||
// no coercion specified for type, skip it
|
||||
if (!TYPE_TRANSFORM_MAP[type]) {
|
||||
return row
|
||||
|
@ -158,6 +157,16 @@ export function inputProcessing(
|
|||
else {
|
||||
clonedRow[key] = coerce(value, field.type)
|
||||
}
|
||||
|
||||
// remove any attachment urls, they are generated on read
|
||||
if (field.type === FieldTypes.ATTACHMENT) {
|
||||
const attachments = clonedRow[key]
|
||||
if (attachments?.length) {
|
||||
attachments.forEach((attachment: RowAttachment) => {
|
||||
delete attachment.url
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!clonedRow._id || !clonedRow._rev) {
|
||||
|
@ -194,15 +203,15 @@ export async function outputProcessing(
|
|||
// process formulas
|
||||
enriched = processFormulas(table, enriched, { dynamic: true }) as Row[]
|
||||
|
||||
// update the attachments URL depending on hosting
|
||||
// set the attachments URLs
|
||||
for (let [property, column] of Object.entries(table.schema)) {
|
||||
if (column.type === FieldTypes.ATTACHMENT) {
|
||||
for (let row of enriched) {
|
||||
if (row[property] == null || !Array.isArray(row[property])) {
|
||||
continue
|
||||
}
|
||||
row[property].forEach((attachment: any) => {
|
||||
attachment.url = attachmentsRelativeURL(attachment.key)
|
||||
row[property].forEach((attachment: RowAttachment) => {
|
||||
attachment.url = objectStore.getAppFileUrl(attachment.key)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -265,6 +274,6 @@ export async function cleanupAttachments(
|
|||
}
|
||||
}
|
||||
if (files.length > 0) {
|
||||
return objectStore.deleteFiles(ObjectStoreBuckets.APPS, files)
|
||||
await objectStore.deleteFiles(ObjectStoreBuckets.APPS, files)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
import { enrichPluginURLs } from "../plugins"
|
||||
const env = require("../../environment")
|
||||
jest.mock("../../environment")
|
||||
|
||||
describe("plugins utility", () => {
|
||||
let pluginsArray: any = [
|
||||
{
|
||||
name: "test-plugin",
|
||||
},
|
||||
]
|
||||
it("enriches the plugins url self-hosted", async () => {
|
||||
let result = enrichPluginURLs(pluginsArray)
|
||||
expect(result[0].jsUrl).toEqual("/plugins/test-plugin/plugin.min.js")
|
||||
})
|
||||
|
||||
it("enriches the plugins url cloud", async () => {
|
||||
env.SELF_HOSTED = 0
|
||||
let result = enrichPluginURLs(pluginsArray)
|
||||
expect(result[0].jsUrl).toEqual(
|
||||
"https://cdn.budi.live/test-plugin/plugin.min.js"
|
||||
)
|
||||
})
|
||||
})
|
|
@ -3125,10 +3125,10 @@
|
|||
"@types/koa-compose" "*"
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/koa__router@8.0.11":
|
||||
version "8.0.11"
|
||||
resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-8.0.11.tgz#d7b37e6db934fc072ea1baa2ab92bc8ac4564f3e"
|
||||
integrity sha512-WXgKWpBsbS14kzmzD9LeFapOIa678h7zvUHxDwXwSx4ETKXhXLVUAToX6jZ/U7EihM7qwyD9W/BZvB0MRu7MTQ==
|
||||
"@types/koa__router@8.0.8":
|
||||
version "8.0.8"
|
||||
resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-8.0.8.tgz#b1e0e9a512498777d3366bbdf0e853df27ec831c"
|
||||
integrity sha512-9pGCaDtzCsj4HJ8HmGuqzk8+s57sPj4njWd08GG5o92n5Xp9io2snc40CPpXFhoKcZ8OKhuu6ht4gNou9e1C2w==
|
||||
dependencies:
|
||||
"@types/koa" "*"
|
||||
|
||||
|
@ -3400,6 +3400,13 @@
|
|||
"@types/mime" "^1"
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/server-destroy@1.0.1":
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/server-destroy/-/server-destroy-1.0.1.tgz#6010a89e2df4f2c15a265fe73c70fd3641486530"
|
||||
integrity sha512-77QGr7waZbE0Y0uF+G+uH3H3SmhyA78Jf2r5r7QSrpg0U3kSXduWpGjzP9PvPLR/KCy+kHjjpnugRHsYTnHopg==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/stack-utils@^1.0.1":
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-1.0.1.tgz#0a851d3bd96498fa25c33ab7278ed3bd65f06c3e"
|
||||
|
@ -3418,6 +3425,14 @@
|
|||
"@types/cookiejar" "*"
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/tar@6.1.3":
|
||||
version "6.1.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.3.tgz#46a2ce7617950c4852dfd7e9cd41aa8161b9d750"
|
||||
integrity sha512-YzDOr5kdAeqS8dcO6NTTHTMJ44MUCBDoLEIyPtwEn7PssKqUYL49R1iCVJPeiPzPlKi6DbH33eZkpeJ27e4vHg==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
minipass "^3.3.5"
|
||||
|
||||
"@types/tough-cookie@*", "@types/tough-cookie@^4.0.2":
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.2.tgz#6286b4c7228d58ab7866d19716f3696e03a09397"
|
||||
|
@ -10535,6 +10550,13 @@ minipass@^3.0.0:
|
|||
dependencies:
|
||||
yallist "^4.0.0"
|
||||
|
||||
minipass@^3.3.5:
|
||||
version "3.3.6"
|
||||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a"
|
||||
integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==
|
||||
dependencies:
|
||||
yallist "^4.0.0"
|
||||
|
||||
minizlib@^2.1.1:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931"
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
"devDependencies": {
|
||||
"@types/json5": "^2.2.0",
|
||||
"@types/koa": "2.13.4",
|
||||
"koa-body": "4.2.0",
|
||||
"@types/node": "14.18.20",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
"nano": "10.1.0",
|
||||
|
|
|
@ -19,9 +19,10 @@ export enum FieldType {
|
|||
export interface RowAttachment {
|
||||
size: number
|
||||
name: string
|
||||
url: string
|
||||
extension: string
|
||||
key: string
|
||||
// Populated on read
|
||||
url?: string
|
||||
}
|
||||
|
||||
export interface Row extends Document {
|
||||
|
|
|
@ -2,6 +2,7 @@ import { Document } from "../document"
|
|||
|
||||
export interface Config extends Document {
|
||||
type: ConfigType
|
||||
config: any
|
||||
}
|
||||
|
||||
export interface SMTPConfig extends Config {
|
||||
|
@ -17,9 +18,12 @@ export interface SMTPConfig extends Config {
|
|||
export interface SettingsConfig extends Config {
|
||||
config: {
|
||||
company: string
|
||||
logoUrl: string
|
||||
// Populated on read
|
||||
logoUrl?: string
|
||||
logoUrlEtag?: boolean
|
||||
platformUrl: string
|
||||
uniqueTenantId?: string
|
||||
analyticsEnabled?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,18 +61,15 @@ export interface OIDCConfig extends Config {
|
|||
}
|
||||
}
|
||||
|
||||
export type NestedConfig =
|
||||
| SMTPConfig
|
||||
| SettingsConfig
|
||||
| GoogleConfig
|
||||
| OIDCConfig
|
||||
|
||||
export const isSettingsConfig = (config: Config): config is SettingsConfig =>
|
||||
config.type === ConfigType.SETTINGS
|
||||
|
||||
export const isSMTPConfig = (config: Config): config is SMTPConfig =>
|
||||
config.type === ConfigType.SMTP
|
||||
|
||||
export const isGoogleConfig = (config: Config): config is GoogleConfig =>
|
||||
config.type === ConfigType.GOOGLE
|
||||
|
||||
export const isOIDCConfig = (config: Config): config is OIDCConfig =>
|
||||
config.type === ConfigType.OIDC
|
||||
|
||||
|
|
|
@ -20,8 +20,6 @@ export interface Plugin extends Document {
|
|||
description: string
|
||||
name: string
|
||||
version: string
|
||||
jsUrl?: string
|
||||
iconUrl?: string
|
||||
source: PluginSource
|
||||
package: { [key: string]: any }
|
||||
hash: string
|
||||
|
@ -29,6 +27,11 @@ export interface Plugin extends Document {
|
|||
type: PluginType
|
||||
[key: string]: any
|
||||
}
|
||||
iconFileName?: string
|
||||
// Populated on read
|
||||
jsUrl?: string
|
||||
// Populated on read
|
||||
iconUrl?: string
|
||||
}
|
||||
|
||||
export const PLUGIN_TYPE_ARR = Object.values(PluginType)
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import { BaseEvent } from "./event"
|
||||
import { ConfigType } from "../../documents"
|
||||
|
||||
export type LoginSource = "local" | "google" | "oidc" | "google-internal"
|
||||
export type SSOType = "oidc" | "google"
|
||||
export type SSOType = ConfigType.OIDC | ConfigType.GOOGLE
|
||||
|
||||
export interface LoginEvent extends BaseEvent {
|
||||
userId: string
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { Context, Request } from "koa"
|
||||
import { User, Role, UserRoles, Account } from "../documents"
|
||||
import { FeatureFlag, License } from "../sdk"
|
||||
import { Files } from "formidable"
|
||||
|
||||
export interface ContextUser extends Omit<User, "roles"> {
|
||||
globalId?: string
|
||||
|
@ -15,12 +16,34 @@ export interface ContextUser extends Omit<User, "roles"> {
|
|||
account?: Account
|
||||
}
|
||||
|
||||
export interface BBRequest extends Request {
|
||||
body: any
|
||||
files?: any
|
||||
/**
|
||||
* Add support for koa-body in context.
|
||||
*/
|
||||
export interface BBRequest<RequestBody> extends Request {
|
||||
body: RequestBody
|
||||
files?: Files
|
||||
}
|
||||
|
||||
export interface BBContext extends Context {
|
||||
request: BBRequest
|
||||
/**
|
||||
* Basic context with no user.
|
||||
*/
|
||||
export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
|
||||
request: BBRequest<RequestBody>
|
||||
body: ResponseBody
|
||||
}
|
||||
|
||||
/**
|
||||
* Authenticated context.
|
||||
*/
|
||||
export interface UserCtx<RequestBody = any, ResponseBody = any>
|
||||
extends Ctx<RequestBody, ResponseBody> {
|
||||
user: ContextUser
|
||||
}
|
||||
|
||||
/**
|
||||
* Deprecated: Use UserCtx / Ctx appropriately
|
||||
* Authenticated context.
|
||||
*/
|
||||
export interface BBContext extends Ctx {
|
||||
user?: ContextUser
|
||||
}
|
||||
|
|
|
@ -65,6 +65,13 @@
|
|||
"@types/qs" "*"
|
||||
"@types/serve-static" "*"
|
||||
|
||||
"@types/formidable@^1.0.31":
|
||||
version "1.2.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/formidable/-/formidable-1.2.5.tgz#561d026e5f09179e5c8ef7b31e8f4652e11abe4c"
|
||||
integrity sha512-zu3mQJa4hDNubEMViSj937602XdDGzK7Q5pJ5QmLUbNxclbo9tZGt5jtwM352ssZ+pqo5V4H14TBvT/ALqQQcA==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/http-assert@*":
|
||||
version "1.5.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/http-assert/-/http-assert-1.5.3.tgz#ef8e3d1a8d46c387f04ab0f2e8ab8cb0c5078661"
|
||||
|
@ -345,6 +352,11 @@ brace-expansion@^1.1.7:
|
|||
balanced-match "^1.0.0"
|
||||
concat-map "0.0.1"
|
||||
|
||||
bytes@3.1.2:
|
||||
version "3.1.2"
|
||||
resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5"
|
||||
integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==
|
||||
|
||||
call-bind@^1.0.0:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c"
|
||||
|
@ -353,6 +365,16 @@ call-bind@^1.0.0:
|
|||
function-bind "^1.1.1"
|
||||
get-intrinsic "^1.0.2"
|
||||
|
||||
co-body@^5.1.1:
|
||||
version "5.2.0"
|
||||
resolved "https://registry.yarnpkg.com/co-body/-/co-body-5.2.0.tgz#5a0a658c46029131e0e3a306f67647302f71c124"
|
||||
integrity sha512-sX/LQ7LqUhgyaxzbe7IqwPeTr2yfpfUIQ/dgpKo6ZI4y4lpQA0YxAomWIY+7I7rHWcG02PG+OuPREzMW/5tszQ==
|
||||
dependencies:
|
||||
inflation "^2.0.0"
|
||||
qs "^6.4.0"
|
||||
raw-body "^2.2.0"
|
||||
type-is "^1.6.14"
|
||||
|
||||
combined-stream@^1.0.8:
|
||||
version "1.0.8"
|
||||
resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
|
||||
|
@ -377,6 +399,11 @@ delayed-stream@~1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
|
||||
integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==
|
||||
|
||||
depd@2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df"
|
||||
integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==
|
||||
|
||||
follow-redirects@^1.15.0:
|
||||
version "1.15.2"
|
||||
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13"
|
||||
|
@ -391,6 +418,11 @@ form-data@^4.0.0:
|
|||
combined-stream "^1.0.8"
|
||||
mime-types "^2.1.12"
|
||||
|
||||
formidable@^1.1.1:
|
||||
version "1.2.6"
|
||||
resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.2.6.tgz#d2a51d60162bbc9b4a055d8457a7c75315d1a168"
|
||||
integrity sha512-KcpbcpuLNOwrEjnbpMC0gS+X8ciDoZE1kkqzat4a8vrprf+s9pKNQ/QIwWfbfs4ltgmFl3MD177SNTkve3BwGQ==
|
||||
|
||||
fs.realpath@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
|
||||
|
@ -441,6 +473,29 @@ http-cookie-agent@^4.0.2:
|
|||
dependencies:
|
||||
agent-base "^6.0.2"
|
||||
|
||||
http-errors@2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3"
|
||||
integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==
|
||||
dependencies:
|
||||
depd "2.0.0"
|
||||
inherits "2.0.4"
|
||||
setprototypeof "1.2.0"
|
||||
statuses "2.0.1"
|
||||
toidentifier "1.0.1"
|
||||
|
||||
iconv-lite@0.4.24:
|
||||
version "0.4.24"
|
||||
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
|
||||
integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
|
||||
dependencies:
|
||||
safer-buffer ">= 2.1.2 < 3"
|
||||
|
||||
inflation@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/inflation/-/inflation-2.0.0.tgz#8b417e47c28f925a45133d914ca1fd389107f30f"
|
||||
integrity sha512-m3xv4hJYR2oXw4o4Y5l6P5P16WYmazYof+el6Al3f+YlggGj6qT9kImBAnzDelRALnP5d3h4jGBPKzYCizjZZw==
|
||||
|
||||
inflight@^1.0.4:
|
||||
version "1.0.6"
|
||||
resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
|
||||
|
@ -449,7 +504,7 @@ inflight@^1.0.4:
|
|||
once "^1.3.0"
|
||||
wrappy "1"
|
||||
|
||||
inherits@2:
|
||||
inherits@2, inherits@2.0.4:
|
||||
version "2.0.4"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
|
||||
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
|
||||
|
@ -459,12 +514,26 @@ json5@*:
|
|||
resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c"
|
||||
integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==
|
||||
|
||||
koa-body@4.2.0:
|
||||
version "4.2.0"
|
||||
resolved "https://registry.yarnpkg.com/koa-body/-/koa-body-4.2.0.tgz#37229208b820761aca5822d14c5fc55cee31b26f"
|
||||
integrity sha512-wdGu7b9amk4Fnk/ytH8GuWwfs4fsB5iNkY8kZPpgQVb04QZSv85T0M8reb+cJmvLE8cjPYvBzRikD3s6qz8OoA==
|
||||
dependencies:
|
||||
"@types/formidable" "^1.0.31"
|
||||
co-body "^5.1.1"
|
||||
formidable "^1.1.1"
|
||||
|
||||
media-typer@0.3.0:
|
||||
version "0.3.0"
|
||||
resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
|
||||
integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==
|
||||
|
||||
mime-db@1.52.0:
|
||||
version "1.52.0"
|
||||
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
|
||||
integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
|
||||
|
||||
mime-types@^2.1.12:
|
||||
mime-types@^2.1.12, mime-types@~2.1.24:
|
||||
version "2.1.35"
|
||||
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
|
||||
integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
|
||||
|
@ -532,7 +601,7 @@ punycode@^2.1.1:
|
|||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
|
||||
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
|
||||
|
||||
qs@^6.11.0:
|
||||
qs@^6.11.0, qs@^6.4.0:
|
||||
version "6.11.0"
|
||||
resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a"
|
||||
integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==
|
||||
|
@ -544,6 +613,16 @@ querystringify@^2.1.1:
|
|||
resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6"
|
||||
integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==
|
||||
|
||||
raw-body@^2.2.0:
|
||||
version "2.5.1"
|
||||
resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857"
|
||||
integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==
|
||||
dependencies:
|
||||
bytes "3.1.2"
|
||||
http-errors "2.0.0"
|
||||
iconv-lite "0.4.24"
|
||||
unpipe "1.0.0"
|
||||
|
||||
requires-port@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
|
||||
|
@ -556,6 +635,16 @@ rimraf@3.0.2:
|
|||
dependencies:
|
||||
glob "^7.1.3"
|
||||
|
||||
"safer-buffer@>= 2.1.2 < 3":
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
|
||||
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
|
||||
|
||||
setprototypeof@1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424"
|
||||
integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==
|
||||
|
||||
side-channel@^1.0.4:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf"
|
||||
|
@ -565,6 +654,16 @@ side-channel@^1.0.4:
|
|||
get-intrinsic "^1.0.2"
|
||||
object-inspect "^1.9.0"
|
||||
|
||||
statuses@2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63"
|
||||
integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==
|
||||
|
||||
toidentifier@1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35"
|
||||
integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==
|
||||
|
||||
tough-cookie@^4.1.2:
|
||||
version "4.1.2"
|
||||
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.2.tgz#e53e84b85f24e0b65dd526f46628db6c85f6b874"
|
||||
|
@ -575,6 +674,14 @@ tough-cookie@^4.1.2:
|
|||
universalify "^0.2.0"
|
||||
url-parse "^1.5.3"
|
||||
|
||||
type-is@^1.6.14:
|
||||
version "1.6.18"
|
||||
resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
|
||||
integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
|
||||
dependencies:
|
||||
media-typer "0.3.0"
|
||||
mime-types "~2.1.24"
|
||||
|
||||
typescript@4.7.3:
|
||||
version "4.7.3"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.7.3.tgz#8364b502d5257b540f9de4c40be84c98e23a129d"
|
||||
|
@ -585,6 +692,11 @@ universalify@^0.2.0:
|
|||
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0"
|
||||
integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==
|
||||
|
||||
unpipe@1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
|
||||
integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==
|
||||
|
||||
url-parse@^1.5.3:
|
||||
version "1.5.10"
|
||||
resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1"
|
||||
|
|
|
@ -73,12 +73,13 @@
|
|||
"@types/jest": "26.0.23",
|
||||
"@types/jsonwebtoken": "8.5.1",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/koa__router": "8.0.11",
|
||||
"@types/koa__router": "8.0.8",
|
||||
"@types/node": "14.18.20",
|
||||
"@types/node-fetch": "2.6.1",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
"@types/uuid": "8.3.4",
|
||||
"@typescript-eslint/parser": "5.45.0",
|
||||
"@types/server-destroy": "1.0.1",
|
||||
"copyfiles": "2.4.1",
|
||||
"eslint": "6.8.0",
|
||||
"jest": "28.1.1",
|
||||
|
|
|
@ -7,6 +7,7 @@ import {
|
|||
objectStore,
|
||||
tenancy,
|
||||
db as dbCore,
|
||||
env as coreEnv,
|
||||
} from "@budibase/backend-core"
|
||||
import { checkAnyUserExists } from "../../../utilities/users"
|
||||
import {
|
||||
|
@ -17,57 +18,50 @@ import {
|
|||
GoogleConfig,
|
||||
OIDCConfig,
|
||||
SettingsConfig,
|
||||
BBContext,
|
||||
isGoogleConfig,
|
||||
isOIDCConfig,
|
||||
isSettingsConfig,
|
||||
isSMTPConfig,
|
||||
Ctx,
|
||||
UserCtx,
|
||||
} from "@budibase/types"
|
||||
|
||||
const getEventFns = async (db: Database, config: ConfigDoc) => {
|
||||
const fns = []
|
||||
const type = config.type
|
||||
|
||||
let existing
|
||||
if (config._id) {
|
||||
existing = await db.get(config._id)
|
||||
}
|
||||
|
||||
const ssoType = type as SSOType
|
||||
if (!existing) {
|
||||
switch (config.type) {
|
||||
case ConfigType.SMTP: {
|
||||
if (isSMTPConfig(config)) {
|
||||
fns.push(events.email.SMTPCreated)
|
||||
break
|
||||
} else if (isGoogleConfig(config)) {
|
||||
fns.push(() => events.auth.SSOCreated(ConfigType.GOOGLE))
|
||||
if (config.config.activated) {
|
||||
fns.push(() => events.auth.SSOActivated(ConfigType.GOOGLE))
|
||||
}
|
||||
case ConfigType.GOOGLE: {
|
||||
const googleCfg = config as GoogleConfig
|
||||
fns.push(() => events.auth.SSOCreated(ssoType))
|
||||
if (googleCfg.config.activated) {
|
||||
fns.push(() => events.auth.SSOActivated(ssoType))
|
||||
} else if (isOIDCConfig(config)) {
|
||||
fns.push(() => events.auth.SSOCreated(ConfigType.OIDC))
|
||||
if (config.config.configs[0].activated) {
|
||||
fns.push(() => events.auth.SSOActivated(ConfigType.OIDC))
|
||||
}
|
||||
break
|
||||
}
|
||||
case ConfigType.OIDC: {
|
||||
const oidcCfg = config as OIDCConfig
|
||||
fns.push(() => events.auth.SSOCreated(ssoType))
|
||||
if (oidcCfg.config.configs[0].activated) {
|
||||
fns.push(() => events.auth.SSOActivated(ssoType))
|
||||
}
|
||||
break
|
||||
}
|
||||
case ConfigType.SETTINGS: {
|
||||
} else if (isSettingsConfig(config)) {
|
||||
// company
|
||||
const settingsCfg = config as SettingsConfig
|
||||
const company = settingsCfg.config.company
|
||||
const company = config.config.company
|
||||
if (company && company !== "Budibase") {
|
||||
fns.push(events.org.nameUpdated)
|
||||
}
|
||||
|
||||
// logo
|
||||
const logoUrl = settingsCfg.config.logoUrl
|
||||
const logoUrl = config.config.logoUrl
|
||||
if (logoUrl) {
|
||||
fns.push(events.org.logoUpdated)
|
||||
}
|
||||
|
||||
// platform url
|
||||
const platformUrl = settingsCfg.config.platformUrl
|
||||
const platformUrl = config.config.platformUrl
|
||||
if (
|
||||
platformUrl &&
|
||||
platformUrl !== "http://localhost:10000" &&
|
||||
|
@ -75,60 +69,48 @@ const getEventFns = async (db: Database, config: ConfigDoc) => {
|
|||
) {
|
||||
fns.push(events.org.platformURLUpdated)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
switch (config.type) {
|
||||
case ConfigType.SMTP: {
|
||||
if (isSMTPConfig(config)) {
|
||||
fns.push(events.email.SMTPUpdated)
|
||||
break
|
||||
} else if (isGoogleConfig(config)) {
|
||||
fns.push(() => events.auth.SSOUpdated(ConfigType.GOOGLE))
|
||||
if (!existing.config.activated && config.config.activated) {
|
||||
fns.push(() => events.auth.SSOActivated(ConfigType.GOOGLE))
|
||||
} else if (existing.config.activated && !config.config.activated) {
|
||||
fns.push(() => events.auth.SSODeactivated(ConfigType.GOOGLE))
|
||||
}
|
||||
case ConfigType.GOOGLE: {
|
||||
const googleCfg = config as GoogleConfig
|
||||
fns.push(() => events.auth.SSOUpdated(ssoType))
|
||||
if (!existing.config.activated && googleCfg.config.activated) {
|
||||
fns.push(() => events.auth.SSOActivated(ssoType))
|
||||
} else if (existing.config.activated && !googleCfg.config.activated) {
|
||||
fns.push(() => events.auth.SSODeactivated(ssoType))
|
||||
}
|
||||
break
|
||||
}
|
||||
case ConfigType.OIDC: {
|
||||
const oidcCfg = config as OIDCConfig
|
||||
fns.push(() => events.auth.SSOUpdated(ssoType))
|
||||
} else if (isOIDCConfig(config)) {
|
||||
fns.push(() => events.auth.SSOUpdated(ConfigType.OIDC))
|
||||
if (
|
||||
!existing.config.configs[0].activated &&
|
||||
oidcCfg.config.configs[0].activated
|
||||
config.config.configs[0].activated
|
||||
) {
|
||||
fns.push(() => events.auth.SSOActivated(ssoType))
|
||||
fns.push(() => events.auth.SSOActivated(ConfigType.OIDC))
|
||||
} else if (
|
||||
existing.config.configs[0].activated &&
|
||||
!oidcCfg.config.configs[0].activated
|
||||
!config.config.configs[0].activated
|
||||
) {
|
||||
fns.push(() => events.auth.SSODeactivated(ssoType))
|
||||
fns.push(() => events.auth.SSODeactivated(ConfigType.OIDC))
|
||||
}
|
||||
break
|
||||
}
|
||||
case ConfigType.SETTINGS: {
|
||||
} else if (isSettingsConfig(config)) {
|
||||
// company
|
||||
const settingsCfg = config as SettingsConfig
|
||||
const existingCompany = existing.config.company
|
||||
const company = settingsCfg.config.company
|
||||
const company = config.config.company
|
||||
if (company && company !== "Budibase" && existingCompany !== company) {
|
||||
fns.push(events.org.nameUpdated)
|
||||
}
|
||||
|
||||
// logo
|
||||
const existingLogoUrl = existing.config.logoUrl
|
||||
const logoUrl = settingsCfg.config.logoUrl
|
||||
const logoUrl = config.config.logoUrl
|
||||
if (logoUrl && existingLogoUrl !== logoUrl) {
|
||||
fns.push(events.org.logoUpdated)
|
||||
}
|
||||
|
||||
// platform url
|
||||
const existingPlatformUrl = existing.config.platformUrl
|
||||
const platformUrl = settingsCfg.config.platformUrl
|
||||
const platformUrl = config.config.platformUrl
|
||||
if (
|
||||
platformUrl &&
|
||||
platformUrl !== "http://localhost:10000" &&
|
||||
|
@ -137,15 +119,13 @@ const getEventFns = async (db: Database, config: ConfigDoc) => {
|
|||
) {
|
||||
fns.push(events.org.platformURLUpdated)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fns
|
||||
}
|
||||
|
||||
export async function save(ctx: BBContext) {
|
||||
export async function save(ctx: UserCtx) {
|
||||
const db = tenancy.getGlobalDB()
|
||||
const { type, workspace, user, config } = ctx.request.body
|
||||
let eventFns = await getEventFns(db, ctx.request.body)
|
||||
|
@ -187,7 +167,7 @@ export async function save(ctx: BBContext) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetch(ctx: BBContext) {
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
const db = tenancy.getGlobalDB()
|
||||
const response = await db.allDocs(
|
||||
dbCore.getConfigParams(
|
||||
|
@ -204,7 +184,7 @@ export async function fetch(ctx: BBContext) {
|
|||
* Gets the most granular config for a particular configuration type.
|
||||
* The hierarchy is type -> workspace -> user.
|
||||
*/
|
||||
export async function find(ctx: BBContext) {
|
||||
export async function find(ctx: UserCtx) {
|
||||
const db = tenancy.getGlobalDB()
|
||||
|
||||
const { userId, workspaceId } = ctx.query
|
||||
|
@ -237,18 +217,18 @@ export async function find(ctx: BBContext) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function publicOidc(ctx: BBContext) {
|
||||
export async function publicOidc(ctx: Ctx) {
|
||||
const db = tenancy.getGlobalDB()
|
||||
try {
|
||||
// Find the config with the most granular scope based on context
|
||||
const oidcConfig = await dbCore.getScopedFullConfig(db, {
|
||||
const oidcConfig: OIDCConfig = await dbCore.getScopedFullConfig(db, {
|
||||
type: ConfigType.OIDC,
|
||||
})
|
||||
|
||||
if (!oidcConfig) {
|
||||
ctx.body = {}
|
||||
} else {
|
||||
ctx.body = oidcConfig.config.configs.map((config: any) => ({
|
||||
ctx.body = oidcConfig.config.configs.map(config => ({
|
||||
logo: config.logo,
|
||||
name: config.name,
|
||||
uuid: config.uuid,
|
||||
|
@ -259,7 +239,7 @@ export async function publicOidc(ctx: BBContext) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function publicSettings(ctx: BBContext) {
|
||||
export async function publicSettings(ctx: Ctx) {
|
||||
const db = tenancy.getGlobalDB()
|
||||
|
||||
try {
|
||||
|
@ -285,6 +265,16 @@ export async function publicSettings(ctx: BBContext) {
|
|||
config = publicConfig
|
||||
}
|
||||
|
||||
// enrich the logo url
|
||||
// empty url means deleted
|
||||
if (config.config.logoUrl !== "") {
|
||||
config.config.logoUrl = objectStore.getGlobalFileUrl(
|
||||
"settings",
|
||||
"logoUrl",
|
||||
config.config.logoUrlEtag
|
||||
)
|
||||
}
|
||||
|
||||
// google button flag
|
||||
if (googleConfig && googleConfig.config) {
|
||||
// activated by default for configs pre-activated flag
|
||||
|
@ -311,28 +301,17 @@ export async function publicSettings(ctx: BBContext) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function upload(ctx: BBContext) {
|
||||
if (ctx.request.files == null || ctx.request.files.file.length > 1) {
|
||||
export async function upload(ctx: UserCtx) {
|
||||
if (ctx.request.files == null || Array.isArray(ctx.request.files.file)) {
|
||||
ctx.throw(400, "One file must be uploaded.")
|
||||
}
|
||||
const file = ctx.request.files.file
|
||||
const { type, name } = ctx.params
|
||||
|
||||
let bucket
|
||||
if (env.SELF_HOSTED) {
|
||||
bucket = objectStore.ObjectStoreBuckets.GLOBAL
|
||||
} else {
|
||||
bucket = objectStore.ObjectStoreBuckets.GLOBAL_CLOUD
|
||||
}
|
||||
let bucket = coreEnv.GLOBAL_BUCKET_NAME
|
||||
const key = objectStore.getGlobalFileS3Key(type, name)
|
||||
|
||||
let key
|
||||
if (env.MULTI_TENANCY) {
|
||||
key = `${tenancy.getTenantId()}/${type}/${name}`
|
||||
} else {
|
||||
key = `${type}/${name}`
|
||||
}
|
||||
|
||||
await objectStore.upload({
|
||||
const result = await objectStore.upload({
|
||||
bucket,
|
||||
filename: key,
|
||||
path: file.path,
|
||||
|
@ -349,24 +328,26 @@ export async function upload(ctx: BBContext) {
|
|||
config: {},
|
||||
}
|
||||
}
|
||||
let url
|
||||
if (env.SELF_HOSTED) {
|
||||
url = `/${bucket}/${key}`
|
||||
} else {
|
||||
url = `${env.CDN_URL}/${key}`
|
||||
|
||||
// save the Etag for cache bursting
|
||||
const etag = result.ETag
|
||||
if (etag) {
|
||||
cfgStructure.config[`${name}Etag`] = etag.replace(/"/g, "")
|
||||
}
|
||||
|
||||
cfgStructure.config[`${name}`] = url
|
||||
// write back to db with url updated
|
||||
// save the file key
|
||||
cfgStructure.config[`${name}`] = key
|
||||
|
||||
// write back to db
|
||||
await db.put(cfgStructure)
|
||||
|
||||
ctx.body = {
|
||||
message: "File has been uploaded and url stored to config.",
|
||||
url,
|
||||
url: objectStore.getGlobalFileUrl(type, name, etag),
|
||||
}
|
||||
}
|
||||
|
||||
export async function destroy(ctx: BBContext) {
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
const db = tenancy.getGlobalDB()
|
||||
const { id, rev } = ctx.params
|
||||
try {
|
||||
|
@ -378,7 +359,7 @@ export async function destroy(ctx: BBContext) {
|
|||
}
|
||||
}
|
||||
|
||||
export async function configChecklist(ctx: BBContext) {
|
||||
export async function configChecklist(ctx: Ctx) {
|
||||
const db = tenancy.getGlobalDB()
|
||||
const tenantId = tenancy.getTenantId()
|
||||
|
||||
|
|
|
@ -178,7 +178,7 @@ export const find = async (ctx: any) => {
|
|||
|
||||
export const tenantUserLookup = async (ctx: any) => {
|
||||
const id = ctx.params.id
|
||||
const user = await tenancy.getTenantUser(id)
|
||||
const user = await sdk.users.getPlatformUser(id)
|
||||
if (user) {
|
||||
ctx.body = user
|
||||
} else {
|
||||
|
|
|
@ -10,4 +10,4 @@ router
|
|||
.delete("/api/global/license/info", controller.deleteInfo)
|
||||
.get("/api/global/license/usage", controller.getQuotaUsage)
|
||||
|
||||
export = router
|
||||
export default router
|
||||
|
|
|
@ -91,6 +91,11 @@ for (let [key, value] of Object.entries(environment)) {
|
|||
// @ts-ignore
|
||||
environment[key] = 0
|
||||
}
|
||||
// handle the edge case of "false" to disable an environment variable
|
||||
if (value === "false") {
|
||||
// @ts-ignore
|
||||
environment[key] = 0
|
||||
}
|
||||
}
|
||||
|
||||
export = environment
|
||||
|
|
|
@ -24,7 +24,7 @@ import * as redis from "./utilities/redis"
|
|||
const Sentry = require("@sentry/node")
|
||||
const koaSession = require("koa-session")
|
||||
const logger = require("koa-pino-logger")
|
||||
const destroyable = require("server-destroy")
|
||||
import destroyable from "server-destroy"
|
||||
|
||||
// this will setup http and https proxies form env variables
|
||||
bootstrap()
|
||||
|
@ -79,7 +79,6 @@ server.on("close", async () => {
|
|||
|
||||
const shutdown = () => {
|
||||
server.close()
|
||||
// @ts-ignore
|
||||
server.destroy()
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import {
|
|||
InviteUsersRequest,
|
||||
InviteUsersResponse,
|
||||
MigrationType,
|
||||
PlatformUser,
|
||||
PlatformUserByEmail,
|
||||
RowResponse,
|
||||
SearchUsersRequest,
|
||||
|
@ -153,10 +154,26 @@ const buildUser = async (
|
|||
return fullUser
|
||||
}
|
||||
|
||||
// lookup, could be email or userId, either will return a doc
|
||||
export const getPlatformUser = async (
|
||||
identifier: string
|
||||
): Promise<PlatformUser | null> => {
|
||||
// use the view here and allow to find anyone regardless of casing
|
||||
// Use lowercase to ensure email login is case insensitive
|
||||
const response = dbUtils.queryPlatformView(
|
||||
ViewName.PLATFORM_USERS_LOWERCASE,
|
||||
{
|
||||
keys: [identifier.toLowerCase()],
|
||||
include_docs: true,
|
||||
}
|
||||
) as Promise<PlatformUser>
|
||||
return response
|
||||
}
|
||||
|
||||
const validateUniqueUser = async (email: string, tenantId: string) => {
|
||||
// check budibase users in other tenants
|
||||
if (env.MULTI_TENANCY) {
|
||||
const tenantUser = await tenancy.getTenantUser(email)
|
||||
const tenantUser = await getPlatformUser(email)
|
||||
if (tenantUser != null && tenantUser.tenantId !== tenantId) {
|
||||
throw `Unavailable`
|
||||
}
|
||||
|
|
|
@ -22,7 +22,9 @@ mocks.fetch.enable()
|
|||
const tk = require("timekeeper")
|
||||
tk.freeze(mocks.date.MOCK_DATE)
|
||||
|
||||
if (!process.env.DEBUG) {
|
||||
global.console.log = jest.fn() // console.log are ignored in tests
|
||||
}
|
||||
|
||||
if (!process.env.CI) {
|
||||
// set a longer timeout in dev for debugging
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
exports.mock = () => {
|
||||
export function mock() {
|
||||
// mock the email system
|
||||
const sendMailMock = jest.fn()
|
||||
const nodemailer = require("nodemailer")
|
|
@ -1,4 +1,4 @@
|
|||
const email = require("./email")
|
||||
import * as email from "./email"
|
||||
import { mocks } from "@budibase/backend-core/tests"
|
||||
|
||||
export = {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const { Config } = require("../../constants")
|
||||
const { utils } = require("@budibase/backend-core")
|
||||
import { Config } from "../../constants"
|
||||
import { utils } from "@budibase/backend-core"
|
||||
|
||||
export function oidc(conf?: any) {
|
||||
return {
|
||||
|
|
|
@ -1325,10 +1325,10 @@
|
|||
"@types/koa-compose" "*"
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/koa__router@8.0.11":
|
||||
version "8.0.11"
|
||||
resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-8.0.11.tgz#d7b37e6db934fc072ea1baa2ab92bc8ac4564f3e"
|
||||
integrity sha512-WXgKWpBsbS14kzmzD9LeFapOIa678h7zvUHxDwXwSx4ETKXhXLVUAToX6jZ/U7EihM7qwyD9W/BZvB0MRu7MTQ==
|
||||
"@types/koa__router@8.0.8":
|
||||
version "8.0.8"
|
||||
resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-8.0.8.tgz#b1e0e9a512498777d3366bbdf0e853df27ec831c"
|
||||
integrity sha512-9pGCaDtzCsj4HJ8HmGuqzk8+s57sPj4njWd08GG5o92n5Xp9io2snc40CPpXFhoKcZ8OKhuu6ht4gNou9e1C2w==
|
||||
dependencies:
|
||||
"@types/koa" "*"
|
||||
|
||||
|
@ -1536,6 +1536,13 @@
|
|||
"@types/mime" "^1"
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/server-destroy@1.0.1":
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/server-destroy/-/server-destroy-1.0.1.tgz#6010a89e2df4f2c15a265fe73c70fd3641486530"
|
||||
integrity sha512-77QGr7waZbE0Y0uF+G+uH3H3SmhyA78Jf2r5r7QSrpg0U3kSXduWpGjzP9PvPLR/KCy+kHjjpnugRHsYTnHopg==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/stack-utils@^2.0.0":
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c"
|
||||
|
|
Loading…
Reference in New Issue