Merge branch 'develop' of github.com:Budibase/budibase into feature/test-image

This commit is contained in:
mike12345567 2022-12-15 15:22:01 +00:00
commit d8e702567b
111 changed files with 2074 additions and 1089 deletions

View File

@ -67,6 +67,8 @@ spec:
- name: AWS_REGION
value: {{ .Values.services.objectStore.region }}
{{ end }}
- name: MINIO_ENABLED
value: {{ .Values.services.objectStore.minio | quote }}
- name: MINIO_ACCESS_KEY
valueFrom:
secretKeyRef:
@ -77,13 +79,19 @@ spec:
secretKeyRef:
name: {{ template "budibase.fullname" . }}
key: objectStoreSecret
- name: CLOUDFRONT_CDN
value: {{ .Values.services.objectStore.cloudfront.cdn | quote }}
- name: CLOUDFRONT_PUBLIC_KEY_ID
value: {{ .Values.services.objectStore.cloudfront.publicKeyId | quote }}
- name: CLOUDFRONT_PRIVATE_KEY_64
value: {{ .Values.services.objectStore.cloudfront.privateKey64 | quote }}
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
- name: PLUGIN_BUCKET_NAME
value: {{ .Values.services.objectStore.pluginBucketName | quote }}
- name: APPS_BUCKET_NAME
value: {{ .Values.services.objectStore.appsBucketName | quote }}
- name: GLOBAL_CLOUD_BUCKET_NAME
- name: GLOBAL_BUCKET_NAME
value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: BACKUPS_BUCKET_NAME
value: {{ .Values.services.objectStore.backupsBucketName | quote }}

View File

@ -68,6 +68,8 @@ spec:
- name: AWS_REGION
value: {{ .Values.services.objectStore.region }}
{{ end }}
- name: MINIO_ENABLED
value: {{ .Values.services.objectStore.minio | quote }}
- name: MINIO_ACCESS_KEY
valueFrom:
secretKeyRef:
@ -80,11 +82,17 @@ spec:
key: objectStoreSecret
- name: MINIO_URL
value: {{ .Values.services.objectStore.url }}
- name: CLOUDFRONT_CDN
value: {{ .Values.services.objectStore.cloudfront.cdn | quote }}
- name: CLOUDFRONT_PUBLIC_KEY_ID
value: {{ .Values.services.objectStore.cloudfront.publicKeyId | quote }}
- name: CLOUDFRONT_PRIVATE_KEY_64
value: {{ .Values.services.objectStore.cloudfront.privateKey64 | quote }}
- name: PLUGIN_BUCKET_NAME
value: {{ .Values.services.objectStore.pluginBucketName | quote }}
- name: APPS_BUCKET_NAME
value: {{ .Values.services.objectStore.appsBucketName | quote }}
- name: GLOBAL_CLOUD_BUCKET_NAME
- name: GLOBAL_BUCKET_NAME
value: {{ .Values.services.objectStore.globalBucketName | quote }}
- name: BACKUPS_BUCKET_NAME
value: {{ .Values.services.objectStore.backupsBucketName | quote }}

View File

@ -167,6 +167,7 @@ services:
resources: {}
objectStore:
# Set to false if using another object store such as S3
minio: true
browser: true
port: 9000
@ -182,6 +183,13 @@ services:
## set, choosing the default provisioner.
storageClass: ""
resources: {}
cloudfront:
# Set the url of a distribution to enable cloudfront
cdn: ""
# ID of public key stored in cloudfront
publicKeyId: ""
# Base64 encoded private key for the above public key
privateKey64: ""
# Override values in couchDB subchart
couchdb:

View File

@ -186,6 +186,26 @@ http {
proxy_pass http://dev-service:9000;
}
location /files/signed/ {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# IMPORTANT: Signed urls will inspect the host header of the request.
# Normally a signed url will need to be generated with a specified client host in mind.
# To support dynamic hosts, e.g. some unknown self-hosted installation url,
# use a predefined host header. The host 'minio-service' is also used at the time of url signing.
proxy_set_header Host minio-service;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://minio-service:9000;
rewrite ^/files/signed/(.*)$ /$1 break;
}
client_header_timeout 60;
client_body_timeout 60;
keepalive_timeout 60;

View File

@ -208,6 +208,26 @@ http {
proxy_pass http://$minio:9000;
}
location /files/signed/ {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# IMPORTANT: Signed urls will inspect the host header of the request.
# Normally a signed url will need to be generated with a specified client host in mind.
# To support dynamic hosts, e.g. some unknown self-hosted installation url,
# use a predefined host header. The host 'minio-service' is also used at the time of url signing.
proxy_set_header Host minio-service;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://$minio:9000;
rewrite ^/files/signed/(.*)$ /$1 break;
}
client_header_timeout 60;
client_body_timeout 60;
keepalive_timeout 60;

View File

@ -95,15 +95,37 @@ server {
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://127.0.0.1:9000;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://127.0.0.1:9000;
}
location /files/signed/ {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# IMPORTANT: Signed urls will inspect the host header of the request.
# Normally a signed url will need to be generated with a specified client host in mind.
# To support dynamic hosts, e.g. some unknown self-hosted installation url,
# use a predefined host header. The host 'minio-service' is also used at the time of url signing.
proxy_set_header Host minio-service;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://127.0.0.1:9000;
rewrite ^/files/signed/(.*)$ /$1 break;
}
client_header_timeout 60;

View File

@ -1,5 +1,5 @@
{
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -25,6 +25,7 @@
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
"build": "lerna run build",
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
"build:backend": "lerna run build --ignore @budibase/client --ignore @budibase/bbui --ignore @budibase/builder --ignore @budibase/cli",
"build:sdk": "lerna run build:sdk",
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",

View File

@ -3,7 +3,10 @@ const mockS3 = {
deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(),
listObjects: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}`
}),
promise: jest.fn().mockReturnThis(),
catch: jest.fn(),
}

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -20,9 +20,11 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "2.1.46-alpha.6",
"@budibase/nano": "10.1.1",
"@budibase/types": "2.2.4-alpha.2",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
"bcryptjs": "2.4.3",
@ -35,7 +37,6 @@
"koa-passport": "4.1.4",
"lodash": "4.17.21",
"lodash.isarguments": "3.1.0",
"nano": "^10.1.0",
"node-fetch": "2.6.7",
"passport-google-oauth": "2.0.0",
"passport-jwt": "4.0.0",

View File

@ -2,7 +2,7 @@
// store an app ID to pretend there is a context
import env from "../environment"
import Context from "./Context"
import { getDevelopmentAppID, getProdAppID } from "../db/conversions"
import * as conversions from "../db/conversions"
import { getDB } from "../db/db"
import {
DocumentType,
@ -181,6 +181,14 @@ export function getAppId(): string | undefined {
}
}
export const getProdAppId = () => {
const appId = getAppId()
if (!appId) {
throw new Error("Could not get appId")
}
return conversions.getProdAppID(appId)
}
export function updateTenantId(tenantId?: string) {
let context: ContextMap = updateContext({
tenantId,
@ -229,7 +237,7 @@ export function getProdAppDB(opts?: any): Database {
if (!appId) {
throw new Error("Unable to retrieve prod DB - no app ID.")
}
return getDB(getProdAppID(appId), opts)
return getDB(conversions.getProdAppID(appId), opts)
}
/**
@ -241,5 +249,5 @@ export function getDevAppDB(opts?: any): Database {
if (!appId) {
throw new Error("Unable to retrieve dev DB - no app ID.")
}
return getDB(getDevelopmentAppID(appId), opts)
return getDB(conversions.getDevelopmentAppID(appId), opts)
}

View File

@ -1,4 +1,4 @@
import Nano from "nano"
import Nano from "@budibase/nano"
import {
AllDocsResponse,
AnyDocument,

View File

@ -14,7 +14,7 @@ import { doWithDB, allDbs, directCouchAllDbs } from "./db"
import { getAppMetadata } from "../cache/appMetadata"
import { isDevApp, isDevAppID, getProdAppID } from "./conversions"
import * as events from "../events"
import { App, Database, ConfigType } from "@budibase/types"
import { App, Database, ConfigType, isSettingsConfig } from "@budibase/types"
/**
* Generates a new app ID.
@ -489,18 +489,12 @@ export const getScopedFullConfig = async function (
// custom logic for settings doc
if (type === ConfigType.SETTINGS) {
if (scopedConfig && scopedConfig.doc) {
// overrides affected by environment variables
scopedConfig.doc.config.platformUrl = await getPlatformUrl({
tenantAware: true,
})
scopedConfig.doc.config.analyticsEnabled =
await events.analytics.enabled()
} else {
if (!scopedConfig || !scopedConfig.doc) {
// defaults
scopedConfig = {
doc: {
_id: generateConfigID({ type, user, workspace }),
type: ConfigType.SETTINGS,
config: {
platformUrl: await getPlatformUrl({ tenantAware: true }),
analyticsEnabled: await events.analytics.enabled(),
@ -508,6 +502,16 @@ export const getScopedFullConfig = async function (
},
}
}
// will always be true - use assertion function to get type access
if (isSettingsConfig(scopedConfig.doc)) {
// overrides affected by environment
scopedConfig.doc.config.platformUrl = await getPlatformUrl({
tenantAware: true,
})
scopedConfig.doc.config.analyticsEnabled =
await events.analytics.enabled()
}
}
return scopedConfig && scopedConfig.doc

View File

@ -25,7 +25,6 @@ const DefaultBucketName = {
APPS: "prod-budi-app-assets",
TEMPLATES: "templates",
GLOBAL: "global",
CLOUD: "prod-budi-tenant-uploads",
PLUGINS: "plugins",
}
@ -33,6 +32,9 @@ const environment = {
isTest,
isJest,
isDev,
isProd: () => {
return !isDev()
},
JS_BCRYPT: process.env.JS_BCRYPT,
JWT_SECRET: process.env.JWT_SECRET,
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
@ -47,6 +49,7 @@ const environment = {
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
AWS_REGION: process.env.AWS_REGION,
MINIO_URL: process.env.MINIO_URL,
MINIO_ENABLED: process.env.MINIO_ENABLED || 1,
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
MULTI_TENANCY: process.env.MULTI_TENANCY,
ACCOUNT_PORTAL_URL:
@ -59,6 +62,9 @@ const environment = {
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
CLOUDFRONT_CDN: process.env.CLOUDFRONT_CDN,
CLOUDFRONT_PRIVATE_KEY_64: process.env.CLOUDFRONT_PRIVATE_KEY_64,
CLOUDFRONT_PUBLIC_KEY_ID: process.env.CLOUDFRONT_PUBLIC_KEY_ID,
BACKUPS_BUCKET_NAME:
process.env.BACKUPS_BUCKET_NAME || DefaultBucketName.BACKUPS,
APPS_BUCKET_NAME: process.env.APPS_BUCKET_NAME || DefaultBucketName.APPS,
@ -66,12 +72,9 @@ const environment = {
process.env.TEMPLATES_BUCKET_NAME || DefaultBucketName.TEMPLATES,
GLOBAL_BUCKET_NAME:
process.env.GLOBAL_BUCKET_NAME || DefaultBucketName.GLOBAL,
GLOBAL_CLOUD_BUCKET_NAME:
process.env.GLOBAL_CLOUD_BUCKET_NAME || DefaultBucketName.CLOUD,
PLUGIN_BUCKET_NAME:
process.env.PLUGIN_BUCKET_NAME || DefaultBucketName.PLUGINS,
USE_COUCH: process.env.USE_COUCH || true,
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
SERVICE: process.env.SERVICE || "budibase",
LOG_LEVEL: process.env.LOG_LEVEL,
@ -92,6 +95,11 @@ for (let [key, value] of Object.entries(environment)) {
// @ts-ignore
environment[key] = 0
}
// handle the edge case of "false" to disable an environment variable
if (value === "false") {
// @ts-ignore
environment[key] = 0
}
}
export = environment

View File

@ -0,0 +1,40 @@
import env from "../../environment"
import * as objectStore from "../objectStore"
import * as cloudfront from "../cloudfront"
/**
* In production the client library is stored in the object store, however in development
* we use the symlinked version produced by lerna, located in node modules. We link to this
* via a specific endpoint (under /api/assets/client).
* @param {string} appId In production we need the appId to look up the correct bucket, as the
* version of the client lib may differ between apps.
* @param {string} version The version to retrieve.
* @return {string} The URL to be inserted into appPackage response or server rendered
* app index file.
*/
export const clientLibraryUrl = (appId: string, version: string) => {
if (env.isProd()) {
let file = `${objectStore.sanitizeKey(appId)}/budibase-client.js`
if (env.CLOUDFRONT_CDN) {
// append app version to bust the cache
if (version) {
file += `?v=${version}`
}
// don't need to use presigned for client with cloudfront
// file is public
return cloudfront.getUrl(file)
} else {
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
}
} else {
return `/api/assets/client`
}
}
export const getAppFileUrl = (s3Key: string) => {
if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key)
} else {
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, s3Key)
}
}

View File

@ -0,0 +1,29 @@
import env from "../../environment"
import * as tenancy from "../../tenancy"
import * as objectStore from "../objectStore"
import * as cloudfront from "../cloudfront"
// URLs
export const getGlobalFileUrl = (type: string, name: string, etag?: string) => {
let file = getGlobalFileS3Key(type, name)
if (env.CLOUDFRONT_CDN) {
if (etag) {
file = `${file}?etag=${etag}`
}
return cloudfront.getPresignedUrl(file)
} else {
return objectStore.getPresignedUrl(env.GLOBAL_BUCKET_NAME, file)
}
}
// KEYS
export const getGlobalFileS3Key = (type: string, name: string) => {
let file = `${type}/${name}`
if (env.MULTI_TENANCY) {
const tenantId = tenancy.getTenantId()
file = `${tenantId}/${file}`
}
return file
}

View File

@ -0,0 +1,3 @@
export * from "./app"
export * from "./global"
export * from "./plugins"

View File

@ -0,0 +1,71 @@
import env from "../../environment"
import * as objectStore from "../objectStore"
import * as tenancy from "../../tenancy"
import * as cloudfront from "../cloudfront"
import { Plugin } from "@budibase/types"
// URLS
export const enrichPluginURLs = (plugins: Plugin[]) => {
if (!plugins || !plugins.length) {
return []
}
return plugins.map(plugin => {
const jsUrl = getPluginJSUrl(plugin)
const iconUrl = getPluginIconUrl(plugin)
return { ...plugin, jsUrl, iconUrl }
})
}
const getPluginJSUrl = (plugin: Plugin) => {
const s3Key = getPluginJSKey(plugin)
return getPluginUrl(s3Key)
}
const getPluginIconUrl = (plugin: Plugin): string | undefined => {
const s3Key = getPluginIconKey(plugin)
if (!s3Key) {
return
}
return getPluginUrl(s3Key)
}
const getPluginUrl = (s3Key: string) => {
if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key)
} else {
return objectStore.getPresignedUrl(env.PLUGIN_BUCKET_NAME, s3Key)
}
}
// S3 KEYS
export const getPluginJSKey = (plugin: Plugin) => {
return getPluginS3Key(plugin, "plugin.min.js")
}
export const getPluginIconKey = (plugin: Plugin) => {
// stored iconUrl is deprecated - hardcode to icon.svg in this case
const iconFileName = plugin.iconUrl ? "icon.svg" : plugin.iconFileName
if (!iconFileName) {
return
}
return getPluginS3Key(plugin, iconFileName)
}
const getPluginS3Key = (plugin: Plugin, fileName: string) => {
const s3Key = getPluginS3Dir(plugin.name)
return `${s3Key}/${fileName}`
}
export const getPluginS3Dir = (pluginName: string) => {
let s3Key = `${pluginName}`
if (env.MULTI_TENANCY) {
const tenantId = tenancy.getTenantId()
s3Key = `${tenantId}/${s3Key}`
}
if (env.CLOUDFRONT_CDN) {
s3Key = `plugins/${s3Key}`
}
return s3Key
}

View File

@ -0,0 +1,171 @@
import * as app from "../app"
import { getAppFileUrl } from "../app"
import { testEnv } from "../../../../tests"
describe("app", () => {
beforeEach(() => {
testEnv.nodeJest()
})
describe("clientLibraryUrl", () => {
function getClientUrl() {
return app.clientLibraryUrl("app_123/budibase-client.js", "2.0.0")
}
describe("single tenant", () => {
beforeAll(() => {
testEnv.singleTenant()
})
it("gets url in dev", () => {
testEnv.nodeDev()
const url = getClientUrl()
expect(url).toBe("/api/assets/client")
})
it("gets url with embedded minio", () => {
testEnv.withMinio()
const url = getClientUrl()
expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
})
it("gets url with custom S3", () => {
testEnv.withS3()
const url = getClientUrl()
expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
})
it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront()
const url = getClientUrl()
expect(url).toBe(
"http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0"
)
})
})
describe("multi tenant", () => {
beforeAll(() => {
testEnv.multiTenant()
})
it("gets url in dev", async () => {
testEnv.nodeDev()
await testEnv.withTenant(tenantId => {
const url = getClientUrl()
expect(url).toBe("/api/assets/client")
})
})
it("gets url with embedded minio", async () => {
await testEnv.withTenant(tenantId => {
testEnv.withMinio()
const url = getClientUrl()
expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
})
})
it("gets url with custom S3", async () => {
await testEnv.withTenant(tenantId => {
testEnv.withS3()
const url = getClientUrl()
expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
})
})
it("gets url with cloudfront + s3", async () => {
await testEnv.withTenant(tenantId => {
testEnv.withCloudfront()
const url = getClientUrl()
expect(url).toBe(
"http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0"
)
})
})
})
})
describe("getAppFileUrl", () => {
function getAppFileUrl() {
return app.getAppFileUrl("app_123/attachments/image.jpeg")
}
describe("single tenant", () => {
beforeAll(() => {
testEnv.multiTenant()
})
it("gets url with embedded minio", () => {
testEnv.withMinio()
const url = getAppFileUrl()
expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
})
it("gets url with custom S3", () => {
testEnv.withS3()
const url = getAppFileUrl()
expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
})
it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront()
const url = getAppFileUrl()
// omit rest of signed params
expect(
url.includes("http://cf.example.com/app_123/attachments/image.jpeg?")
).toBe(true)
})
})
describe("multi tenant", () => {
beforeAll(() => {
testEnv.multiTenant()
})
it("gets url with embedded minio", async () => {
testEnv.withMinio()
await testEnv.withTenant(tenantId => {
const url = getAppFileUrl()
expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
})
})
it("gets url with custom S3", async () => {
testEnv.withS3()
await testEnv.withTenant(tenantId => {
const url = getAppFileUrl()
expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/attachments/image.jpeg"
)
})
})
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
await testEnv.withTenant(tenantId => {
const url = getAppFileUrl()
// omit rest of signed params
expect(
url.includes(
"http://cf.example.com/app_123/attachments/image.jpeg?"
)
).toBe(true)
})
})
})
})
})

View File

@ -0,0 +1,74 @@
import * as global from "../global"
import { testEnv } from "../../../../tests"
describe("global", () => {
describe("getGlobalFileUrl", () => {
function getGlobalFileUrl() {
return global.getGlobalFileUrl("settings", "logoUrl", "etag")
}
describe("single tenant", () => {
beforeAll(() => {
testEnv.singleTenant()
})
it("gets url with embedded minio", () => {
testEnv.withMinio()
const url = getGlobalFileUrl()
expect(url).toBe("/files/signed/global/settings/logoUrl")
})
it("gets url with custom S3", () => {
testEnv.withS3()
const url = getGlobalFileUrl()
expect(url).toBe("http://s3.example.com/global/settings/logoUrl")
})
it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront()
const url = getGlobalFileUrl()
// omit rest of signed params
expect(
url.includes("http://cf.example.com/settings/logoUrl?etag=etag&")
).toBe(true)
})
})
describe("multi tenant", () => {
beforeAll(() => {
testEnv.multiTenant()
})
it("gets url with embedded minio", async () => {
testEnv.withMinio()
await testEnv.withTenant(tenantId => {
const url = getGlobalFileUrl()
expect(url).toBe(`/files/signed/global/${tenantId}/settings/logoUrl`)
})
})
it("gets url with custom S3", async () => {
testEnv.withS3()
await testEnv.withTenant(tenantId => {
const url = getGlobalFileUrl()
expect(url).toBe(
`http://s3.example.com/global/${tenantId}/settings/logoUrl`
)
})
})
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
await testEnv.withTenant(tenantId => {
const url = getGlobalFileUrl()
// omit rest of signed params
expect(
url.includes(
`http://cf.example.com/${tenantId}/settings/logoUrl?etag=etag&`
)
).toBe(true)
})
})
})
})
})

View File

@ -0,0 +1,110 @@
import * as plugins from "../plugins"
import { structures, testEnv } from "../../../../tests"
describe("plugins", () => {
describe("enrichPluginURLs", () => {
const plugin = structures.plugins.plugin()
function getEnrichedPluginUrls() {
const enriched = plugins.enrichPluginURLs([plugin])[0]
return {
jsUrl: enriched.jsUrl!,
iconUrl: enriched.iconUrl!,
}
}
describe("single tenant", () => {
beforeAll(() => {
testEnv.singleTenant()
})
it("gets url with embedded minio", () => {
testEnv.withMinio()
const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`/files/signed/plugins/${plugin.name}/plugin.min.js`
)
expect(urls.iconUrl).toBe(
`/files/signed/plugins/${plugin.name}/icon.svg`
)
})
it("gets url with custom S3", () => {
testEnv.withS3()
const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`http://s3.example.com/plugins/${plugin.name}/plugin.min.js`
)
expect(urls.iconUrl).toBe(
`http://s3.example.com/plugins/${plugin.name}/icon.svg`
)
})
it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront()
const urls = getEnrichedPluginUrls()
// omit rest of signed params
expect(
urls.jsUrl.includes(
`http://cf.example.com/plugins/${plugin.name}/plugin.min.js?`
)
).toBe(true)
expect(
urls.iconUrl.includes(
`http://cf.example.com/plugins/${plugin.name}/icon.svg?`
)
).toBe(true)
})
})
describe("multi tenant", () => {
beforeAll(() => {
testEnv.multiTenant()
})
it("gets url with embedded minio", async () => {
testEnv.withMinio()
await testEnv.withTenant(tenantId => {
const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`/files/signed/plugins/${tenantId}/${plugin.name}/plugin.min.js`
)
expect(urls.iconUrl).toBe(
`/files/signed/plugins/${tenantId}/${plugin.name}/icon.svg`
)
})
})
it("gets url with custom S3", async () => {
testEnv.withS3()
await testEnv.withTenant(tenantId => {
const urls = getEnrichedPluginUrls()
expect(urls.jsUrl).toBe(
`http://s3.example.com/plugins/${tenantId}/${plugin.name}/plugin.min.js`
)
expect(urls.iconUrl).toBe(
`http://s3.example.com/plugins/${tenantId}/${plugin.name}/icon.svg`
)
})
})
it("gets url with cloudfront + s3", async () => {
testEnv.withCloudfront()
await testEnv.withTenant(tenantId => {
const urls = getEnrichedPluginUrls()
// omit rest of signed params
expect(
urls.jsUrl.includes(
`http://cf.example.com/plugins/${tenantId}/${plugin.name}/plugin.min.js?`
)
).toBe(true)
expect(
urls.iconUrl.includes(
`http://cf.example.com/plugins/${tenantId}/${plugin.name}/icon.svg?`
)
).toBe(true)
})
})
})
})
})

View File

@ -0,0 +1,41 @@
import env from "../environment"
const cfsign = require("aws-cloudfront-sign")
let PRIVATE_KEY: string | undefined
function getPrivateKey() {
if (!env.CLOUDFRONT_PRIVATE_KEY_64) {
throw new Error("CLOUDFRONT_PRIVATE_KEY_64 is not set")
}
if (PRIVATE_KEY) {
return PRIVATE_KEY
}
PRIVATE_KEY = Buffer.from(env.CLOUDFRONT_PRIVATE_KEY_64, "base64").toString(
"utf-8"
)
return PRIVATE_KEY
}
const getCloudfrontSignParams = () => {
return {
keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID,
privateKeyString: getPrivateKey(),
expireTime: new Date().getTime() + 1000 * 60 * 60, // 1 hour
}
}
export const getPresignedUrl = (s3Key: string) => {
const url = getUrl(s3Key)
return cfsign.getSignedUrl(url, getCloudfrontSignParams())
}
export const getUrl = (s3Key: string) => {
let prefix = "/"
if (s3Key.startsWith("/")) {
prefix = ""
}
return `${env.CLOUDFRONT_CDN}${prefix}${s3Key}`
}

View File

@ -1,2 +1,3 @@
export * from "./objectStore"
export * from "./utils"
export * from "./buckets"

View File

@ -8,7 +8,7 @@ import { promisify } from "util"
import { join } from "path"
import fs from "fs"
import env from "../environment"
import { budibaseTempDir, ObjectStoreBuckets } from "./utils"
import { budibaseTempDir } from "./utils"
import { v4 } from "uuid"
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
@ -26,7 +26,7 @@ type UploadParams = {
bucket: string
filename: string
path: string
type?: string
type?: string | null
// can be undefined, we will remove it
metadata?: {
[key: string]: string | undefined
@ -41,6 +41,7 @@ const CONTENT_TYPE_MAP: any = {
json: "application/json",
gz: "application/gzip",
}
const STRING_CONTENT_TYPES = [
CONTENT_TYPE_MAP.html,
CONTENT_TYPE_MAP.css,
@ -58,35 +59,17 @@ export function sanitizeBucket(input: string) {
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
}
function publicPolicy(bucketName: string) {
return {
Version: "2012-10-17",
Statement: [
{
Effect: "Allow",
Principal: {
AWS: ["*"],
},
Action: "s3:GetObject",
Resource: [`arn:aws:s3:::${bucketName}/*`],
},
],
}
}
const PUBLIC_BUCKETS = [
ObjectStoreBuckets.APPS,
ObjectStoreBuckets.GLOBAL,
ObjectStoreBuckets.PLUGINS,
]
/**
* Gets a connection to the object store using the S3 SDK.
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
* @param {object} opts configuration for the object store.
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
* @constructor
*/
export const ObjectStore = (bucket: string) => {
export const ObjectStore = (
bucket: string,
opts: { presigning: boolean } = { presigning: false }
) => {
const config: any = {
s3ForcePathStyle: true,
signatureVersion: "v4",
@ -100,9 +83,20 @@ export const ObjectStore = (bucket: string) => {
Bucket: sanitizeBucket(bucket),
}
}
// custom S3 is in use i.e. minio
if (env.MINIO_URL) {
config.endpoint = env.MINIO_URL
if (opts.presigning && !env.MINIO_ENABLED) {
// IMPORTANT: Signed urls will inspect the host header of the request.
// Normally a signed url will need to be generated with a specified host in mind.
// To support dynamic hosts, e.g. some unknown self-hosted installation url,
// use a predefined host. The host 'minio-service' is also forwarded to minio requests via nginx
config.endpoint = "minio-service"
} else {
config.endpoint = env.MINIO_URL
}
}
return new AWS.S3(config)
}
@ -135,16 +129,6 @@ export const makeSureBucketExists = async (client: any, bucketName: string) => {
await promises[bucketName]
delete promises[bucketName]
}
// public buckets are quite hidden in the system, make sure
// no bucket is set accidentally
if (PUBLIC_BUCKETS.includes(bucketName)) {
await client
.putBucketPolicy({
Bucket: bucketName,
Policy: JSON.stringify(publicPolicy(bucketName)),
})
.promise()
}
} else {
throw new Error("Unable to write to object store bucket.")
}
@ -274,6 +258,36 @@ export const listAllObjects = async (bucketName: string, path: string) => {
return objects
}
/**
* Generate a presigned url with a default TTL of 1 hour
*/
export const getPresignedUrl = (
bucketName: string,
key: string,
durationSeconds: number = 3600
) => {
const objectStore = ObjectStore(bucketName, { presigning: true })
const params = {
Bucket: sanitizeBucket(bucketName),
Key: sanitizeKey(key),
Expires: durationSeconds,
}
const url = objectStore.getSignedUrl("getObject", params)
if (!env.MINIO_ENABLED) {
// return the full URL to the client
return url
} else {
// return the path only to the client
// use the presigned url route to ensure the static
// hostname will be used in the request
const signedUrl = new URL(url)
const path = signedUrl.pathname
const query = signedUrl.search
return `/files/signed${path}${query}`
}
}
/**
* Same as retrieval function but puts to a temporary file.
*/

View File

@ -14,7 +14,6 @@ export const ObjectStoreBuckets = {
APPS: env.APPS_BUCKET_NAME,
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
GLOBAL: env.GLOBAL_BUCKET_NAME,
GLOBAL_CLOUD: env.GLOBAL_CLOUD_BUCKET_NAME,
PLUGINS: env.PLUGIN_BUCKET_NAME,
}

View File

@ -1,4 +1,4 @@
import { doWithDB, queryPlatformView, getGlobalDBName } from "../db"
import { doWithDB, getGlobalDBName } from "../db"
import {
DEFAULT_TENANT_ID,
getTenantId,
@ -8,11 +8,10 @@ import {
import env from "../environment"
import {
BBContext,
PlatformUser,
TenantResolutionStrategy,
GetTenantIdOptions,
} from "@budibase/types"
import { Header, StaticDatabases, ViewName } from "../constants"
import { Header, StaticDatabases } from "../constants"
const TENANT_DOC = StaticDatabases.PLATFORM_INFO.docs.tenants
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
@ -111,27 +110,7 @@ export async function lookupTenantId(userId: string) {
})
}
// lookup, could be email or userId, either will return a doc
export async function getTenantUser(
identifier: string
): Promise<PlatformUser | undefined> {
// use the view here and allow to find anyone regardless of casing
// Use lowercase to ensure email login is case-insensitive
const users = await queryPlatformView<PlatformUser>(
ViewName.PLATFORM_USERS_LOWERCASE,
{
keys: [identifier.toLowerCase()],
include_docs: true,
}
)
if (Array.isArray(users)) {
return users[0]
} else {
return users
}
}
export function isUserInAppTenant(appId: string, user?: any) {
export const isUserInAppTenant = (appId: string, user?: any) => {
let userTenantId
if (user) {
userTenantId = user.tenantId || DEFAULT_TENANT_ID

View File

@ -1,8 +1,8 @@
const { structures } = require("../../tests")
const utils = require("../utils")
const events = require("../events")
const { DEFAULT_TENANT_ID } = require("../constants")
const { doInTenant } = require("../context")
import { structures } from "../../../tests"
import * as utils from "../../utils"
import * as events from "../../events"
import { DEFAULT_TENANT_ID } from "../../constants"
import { doInTenant } from "../../context"
describe("utils", () => {
describe("platformLogout", () => {
@ -14,4 +14,4 @@ describe("utils", () => {
})
})
})
})
})

View File

@ -1,6 +1,13 @@
import { getAllApps, queryGlobalView } from "../db"
import { options } from "../middleware/passport/jwt"
import { Header, Cookie, MAX_VALID_DATE } from "../constants"
import {
Header,
Cookie,
MAX_VALID_DATE,
DocumentType,
SEPARATOR,
ViewName,
} from "../constants"
import env from "../environment"
import * as userCache from "../cache/user"
import { getSessionsForUser, invalidateSessions } from "../security/sessions"
@ -8,12 +15,11 @@ import * as events from "../events"
import * as tenancy from "../tenancy"
import {
App,
BBContext,
Ctx,
PlatformLogoutOpts,
TenantResolutionStrategy,
} from "@budibase/types"
import { SetOption } from "cookies"
import { DocumentType, SEPARATOR, ViewName } from "../constants"
const jwt = require("jsonwebtoken")
const APP_PREFIX = DocumentType.APP + SEPARATOR
@ -25,7 +31,7 @@ function confirmAppId(possibleAppId: string | undefined) {
: undefined
}
async function resolveAppUrl(ctx: BBContext) {
async function resolveAppUrl(ctx: Ctx) {
const appUrl = ctx.path.split("/")[2]
let possibleAppUrl = `/${appUrl.toLowerCase()}`
@ -50,7 +56,7 @@ async function resolveAppUrl(ctx: BBContext) {
return app && app.appId ? app.appId : undefined
}
export function isServingApp(ctx: BBContext) {
export function isServingApp(ctx: Ctx) {
// dev app
if (ctx.path.startsWith(`/${APP_PREFIX}`)) {
return true
@ -67,7 +73,7 @@ export function isServingApp(ctx: BBContext) {
* @param {object} ctx The main request body to look through.
* @returns {string|undefined} If an appId was found it will be returned.
*/
export async function getAppIdFromCtx(ctx: BBContext) {
export async function getAppIdFromCtx(ctx: Ctx) {
// look in headers
const options = [ctx.headers[Header.APP_ID]]
let appId
@ -83,12 +89,16 @@ export async function getAppIdFromCtx(ctx: BBContext) {
appId = confirmAppId(ctx.request.body.appId)
}
// look in the url - dev app
let appPath =
ctx.request.headers.referrer ||
ctx.path.split("/").filter(subPath => subPath.startsWith(APP_PREFIX))
if (!appId && appPath.length) {
appId = confirmAppId(appPath[0])
// look in the path
const pathId = parseAppIdFromUrl(ctx.path)
if (!appId && pathId) {
appId = confirmAppId(pathId)
}
// look in the referer
const refererId = parseAppIdFromUrl(ctx.request.headers.referer)
if (!appId && refererId) {
appId = confirmAppId(refererId)
}
// look in the url - prod app
@ -99,6 +109,13 @@ export async function getAppIdFromCtx(ctx: BBContext) {
return appId
}
function parseAppIdFromUrl(url?: string) {
if (!url) {
return
}
return url.split("/").find(subPath => subPath.startsWith(APP_PREFIX))
}
/**
* opens the contents of the specified encrypted JWT.
* @return {object} the contents of the token.
@ -115,7 +132,7 @@ export function openJwt(token: string) {
* @param {object} ctx The request which is to be manipulated.
* @param {string} name The name of the cookie to get.
*/
export function getCookie(ctx: BBContext, name: string) {
export function getCookie(ctx: Ctx, name: string) {
const cookie = ctx.cookies.get(name)
if (!cookie) {
@ -133,7 +150,7 @@ export function getCookie(ctx: BBContext, name: string) {
* @param {object} opts options like whether to sign.
*/
export function setCookie(
ctx: BBContext,
ctx: Ctx,
value: any,
name = "builder",
opts = { sign: true }
@ -159,7 +176,7 @@ export function setCookie(
/**
* Utility function, simply calls setCookie with an empty string for value
*/
export function clearCookie(ctx: BBContext, name: string) {
export function clearCookie(ctx: Ctx, name: string) {
setCookie(ctx, null, name)
}
@ -169,7 +186,7 @@ export function clearCookie(ctx: BBContext, name: string) {
* @param {object} ctx The koa context object to be tested.
* @return {boolean} returns true if the call is from the client lib (a built app rather than the builder).
*/
export function isClient(ctx: BBContext) {
export function isClient(ctx: Ctx) {
return ctx.headers[Header.TYPE] === "client"
}

View File

@ -17,7 +17,9 @@ env._set("MINIO_URL", "http://localhost")
env._set("MINIO_ACCESS_KEY", "test")
env._set("MINIO_SECRET_KEY", "test")
global.console.log = jest.fn() // console.log are ignored in tests
if (!process.env.DEBUG) {
global.console.log = jest.fn() // console.log are ignored in tests
}
if (!process.env.CI) {
// set a longer timeout in dev for debugging

View File

@ -1,6 +1,7 @@
export * as mocks from "./mocks"
export * as structures from "./structures"
export { generator } from "./structures"
export * as testEnv from "./testEnv"
import * as dbConfig from "./db"
dbConfig.init()

View File

@ -1,6 +1,6 @@
import "./posthog"
import "./events"
export * as accounts from "./accounts"
export * as date from "./date"
export * as licenses from "./licenses"
export { default as fetch } from "./fetch"
import "./posthog"
import "./events"

View File

@ -2,14 +2,14 @@ import { Feature, License, Quotas } from "@budibase/types"
import _ from "lodash"
let CLOUD_FREE_LICENSE: License
let TEST_LICENSE: License
let UNLIMITED_LICENSE: License
let getCachedLicense: any
// init for the packages other than pro
export function init(proPkg: any) {
initInternal({
CLOUD_FREE_LICENSE: proPkg.constants.licenses.CLOUD_FREE_LICENSE,
TEST_LICENSE: proPkg.constants.licenses.DEVELOPER_FREE_LICENSE,
UNLIMITED_LICENSE: proPkg.constants.licenses.UNLIMITED_LICENSE,
getCachedLicense: proPkg.licensing.cache.getCachedLicense,
})
}
@ -17,11 +17,11 @@ export function init(proPkg: any) {
// init for the pro package
export function initInternal(opts: {
CLOUD_FREE_LICENSE: License
TEST_LICENSE: License
UNLIMITED_LICENSE: License
getCachedLicense: any
}) {
CLOUD_FREE_LICENSE = opts.CLOUD_FREE_LICENSE
TEST_LICENSE = opts.TEST_LICENSE
UNLIMITED_LICENSE = opts.UNLIMITED_LICENSE
getCachedLicense = opts.getCachedLicense
}
@ -48,7 +48,7 @@ export const useLicense = (license: License, opts?: UseLicenseOpts) => {
}
export const useUnlimited = (opts?: UseLicenseOpts) => {
return useLicense(TEST_LICENSE, opts)
return useLicense(UNLIMITED_LICENSE, opts)
}
export const useCloudFree = () => {
@ -58,7 +58,7 @@ export const useCloudFree = () => {
// FEATURES
const useFeature = (feature: Feature) => {
const license = _.cloneDeep(TEST_LICENSE)
const license = _.cloneDeep(UNLIMITED_LICENSE)
const opts: UseLicenseOpts = {
features: [feature],
}
@ -77,7 +77,7 @@ export const useGroups = () => {
// QUOTAS
export const setAutomationLogsQuota = (value: number) => {
const license = _.cloneDeep(TEST_LICENSE)
const license = _.cloneDeep(UNLIMITED_LICENSE)
license.quotas.constant.automationLogRetentionDays.value = value
return useLicense(license)
}

View File

@ -6,3 +6,4 @@ export const generator = new Chance()
export * as koa from "./koa"
export * as accounts from "./accounts"
export * as licenses from "./licenses"
export * as plugins from "./plugins"

View File

@ -0,0 +1,19 @@
import { generator } from "."
import { Plugin, PluginSource, PluginType } from "@budibase/types"
export function plugin(): Plugin {
return {
description: generator.word(),
name: generator.word(),
version: "1.0.0",
source: PluginSource.FILE,
package: {
name: generator.word,
},
hash: generator.hash(),
schema: {
type: PluginType.DATASOURCE,
},
iconFileName: "icon.svg",
}
}

View File

@ -0,0 +1,87 @@
import env from "../../src/environment"
import * as tenancy from "../../src/tenancy"
import { newid } from "../../src/utils"
// TENANCY
export async function withTenant(task: (tenantId: string) => any) {
const tenantId = newid()
return tenancy.doInTenant(tenantId, async () => {
await task(tenantId)
})
}
export function singleTenant() {
env._set("MULTI_TENANCY", 0)
}
export function multiTenant() {
env._set("MULTI_TENANCY", 1)
}
// NODE
export function nodeDev() {
env._set("NODE_ENV", "dev")
}
export function nodeJest() {
env._set("NODE_ENV", "jest")
}
// FILES
export function withS3() {
env._set("NODE_ENV", "production")
env._set("MINIO_ENABLED", 0)
env._set("MINIO_URL", "http://s3.example.com")
env._set("CLOUDFRONT_CDN", undefined)
}
const CLOUDFRONT_TEST_KEY =
"-----BEGIN RSA PRIVATE KEY-----\n" +
"MIIEpAIBAAKCAQEAqXRsir/0Qba1xEnybUs7d7QEAE02GRc+4H7HD5l5VnAxkV1m\n" +
"tNTXTmoYkaIhLdebV1EwQs3T9knxoyd4cVcrDkDfDLZErfYWJsuE3/QYNknnZs4/\n" +
"Ai0cg+v9ZX3gcizvpYg9GQI3INM0uRG8lJwGP7FQ/kknhA2yVFVCSxX6kkNtOUh5\n" +
"dKSG7m6IwswcSwD++Z/94vsFkoZIGY0e1CD/drFJ6+1TFY2YgbDKT5wDFLJ9vHFx\n" +
"/5o4POwn3gz/ru2Db9jbRdfEAqRdy46nRKQgBGUmupAgSK1+BJEzafexp8RmCGb0\n" +
"WUffxOtj8/jNCeCF0JBgVHAe3crOQ8ySrtoaHQIDAQABAoIBAA+ipW07/u6dTDI7\n" +
"XHoHKgqGeqQIe8he47dVG0ruL0rxeTFfe92NkfwzP+cYHZWcQkIRRLG1Six8cCZM\n" +
"uwlCML/U7n++xaGDhlG4D5+WZzGDKi3LM/cgcHQfrzbRIYeHa+lLI9AN60ZFFqVI\n" +
"5KyVpOH1m3KLD3FYzi6H22EQOxmJpqWlt2uArny5LxlPJKmmGSFjvneb4N2ZAKGQ\n" +
"QfClJGz9tRjceWUUdJrpqmTmBQIosKmLPq8PEviUNAVG+6m4r8jiRbf8OKkAm+3L\n" +
"LVIsN8HfYB9jEuERYPnbuXdX0kDEkg0xEyTH5YbNZvfm5ptCU9Xn+Jz1trF+wCHD\n" +
"2RlxdQUCgYEA3U0nCf6NTmmeMCsAX6gvaPuM0iUfUfS3b3G57I6u46lLGNLsfJw6\n" +
"MTpVc164lKYQK9czw/ijKzb8e3mcyzbPorVkajMjUCNWGrMK+vFbOGmqQkhUi30U\n" +
"IJuuTktMd+21D/SpLlev4MLria23vUIKEqNenYpV6wkGLt/mKtISaPMCgYEAxAYx\n" +
"j+xJLTK9eN+rpekwjYE78hD9VoBkBnr/NBiGV302AsJRuq2+L4zcBnAsH+SidFim\n" +
"cwqoj3jeVT8ZQFXlK3fGVaEJsCXd6GWk8ZIWUTn9JZwi2KcCvCU/YiHfx8c7y7Gl\n" +
"SiPXUPsvvkcw6RRh2u4J5tHLIqJe3W58ENoBNK8CgYEApxTBDMKrXTBQxn0w4wfQ\n" +
"A6soPuDYLMBeXj226eswD6KZmDxnYA1zwgcQzPIO2ewm+XKZGrR2PQJezbqbrrHL\n" +
"QkVBcwz49GA5eh8Dg0MGZCki6rhBXK8qqxPfHi2rpkBKG6nUsbBykXeY7XHC75kU\n" +
"kc3WeYsgIzvE908EMAA69hECgYEAinbpiYVZh1DBH+G26MIYZswz4OB5YyHcBevZ\n" +
"2x27v48VmMtUWe4iWopAXVfdA0ZILrD0Gm0b9gRl4IdqudQyxgqcEZ5oLoIBBwjN\n" +
"g0oy83tnwqpQvwLx3p7c79+HqCGmrlK0s/MvQ+e6qMi21t1r5e6hFed5euSA6B8E\n" +
"Cg9ELMcCgYB9bGwlNAE+iuzMIhKev1s7h3TzqKtGw37TtHXvxcTQs3uawJQksQ2s\n" +
"K0Zy1Ta7vybbwAA5m+LxoMT04WUdJO7Cr8/3rBMrbKKO3H7IgC3G+nXnOBdshzn5\n" +
"ifMbhZslFThC/osD5ZV7snXZgTWyPexaINJhHmdrAWpmW1h+UFoiMw==\n" +
"-----END RSA PRIVATE KEY-----\n"
const CLOUDFRONT_TEST_KEY_64 = Buffer.from(
CLOUDFRONT_TEST_KEY,
"utf-8"
).toString("base64")
export function withCloudfront() {
withS3()
env._set("CLOUDFRONT_CDN", "http://cf.example.com")
env._set("CLOUDFRONT_PUBLIC_KEY_ID", "keypair_123")
env._set("CLOUDFRONT_PRIVATE_KEY_64", CLOUDFRONT_TEST_KEY_64)
}
export function withMinio() {
env._set("NODE_ENV", "production")
env._set("MINIO_ENABLED", 1)
env._set("MINIO_URL", "http://minio.example.com")
env._set("CLOUDFRONT_CDN", undefined)
}

View File

@ -8,6 +8,10 @@
}
},
"references": [
{ "path": "../types" },
{ "path": "../types" }
],
"exclude": [
"node_modules",
"dist"
]
}

View File

@ -470,6 +470,18 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/nano@10.1.1":
version "10.1.1"
resolved "https://registry.yarnpkg.com/@budibase/nano/-/nano-10.1.1.tgz#36ccda4d9bb64b5ee14dd2b27a295b40739b1038"
integrity sha512-kbMIzMkjVtl+xI0UPwVU0/pn8/ccxTyfzwBz6Z+ZiN2oUSb0fJCe0qwA6o8dxwSa8nZu4MbGAeMJl3CJndmWtA==
dependencies:
"@types/tough-cookie" "^4.0.2"
axios "^1.1.3"
http-cookie-agent "^4.0.2"
node-abort-controller "^3.0.1"
qs "^6.11.0"
tough-cookie "^4.1.2"
"@cspotcode/source-map-support@^0.8.0":
version "0.8.1"
resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1"
@ -1526,6 +1538,13 @@ asynckit@^0.4.0:
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==
aws-cloudfront-sign@2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/aws-cloudfront-sign/-/aws-cloudfront-sign-2.2.0.tgz#3910f5a6d0d90fec07f2b4ef8ab07f3eefb5625d"
integrity sha512-qG+rwZMP3KRTPPbVmWY8DlrT56AkA4iVOeo23vkdK2EXeW/brJFN2haSNKzVz+oYhFMEIzVVloeAcrEzuRkuVQ==
dependencies:
lodash "^3.6.0"
aws-sdk@2.1030.0:
version "2.1030.0"
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1030.0.tgz#24a856af3d2b8b37c14a8f59974993661c66fd82"
@ -3827,6 +3846,11 @@ lodash@4.17.21, lodash@^4.17.21:
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
lodash@^3.6.0:
version "3.10.1"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6"
integrity sha512-9mDDwqVIma6OZX79ZlDACZl8sBm0TEnkf99zV3iMA4GzkIT/9hiqP5mY0HoT1iNLCrKc/R1HByV+yJfRWVJryQ==
lowercase-keys@^1.0.0, lowercase-keys@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-1.0.1.tgz#6f9e30b47084d971a7c820ff15a6c5167b74c26f"
@ -4022,18 +4046,6 @@ msgpackr@^1.5.2:
optionalDependencies:
msgpackr-extract "^2.1.2"
nano@^10.1.0:
version "10.1.0"
resolved "https://registry.yarnpkg.com/nano/-/nano-10.1.0.tgz#afdd5a7440e62f09a8e23f41fcea328d27383922"
integrity sha512-COeN2TpLcHuSN44QLnPmfZCoCsKAg8/aelPOVqqm/2/MvRHDEA11/Kld5C4sLzDlWlhFZ3SO2WGJGevCsvcEzQ==
dependencies:
"@types/tough-cookie" "^4.0.2"
axios "^1.1.3"
http-cookie-agent "^4.0.2"
node-abort-controller "^3.0.1"
qs "^6.11.0"
tough-cookie "^4.1.2"
napi-macros@~2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b"

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "1.2.1",
"@budibase/string-templates": "2.1.46-alpha.6",
"@budibase/string-templates": "2.2.4-alpha.2",
"@spectrum-css/actionbutton": "1.0.1",
"@spectrum-css/actiongroup": "1.0.1",
"@spectrum-css/avatar": "3.0.2",

View File

@ -15,6 +15,7 @@
export let value = []
export let id = null
export let disabled = false
export let compact = false
export let fileSizeLimit = BYTES_IN_MB * 20
export let processFiles = null
export let deleteAttachments = null
@ -239,70 +240,72 @@
bind:this={fileInput}
on:change={handleFile}
/>
<svg
class="spectrum-IllustratedMessage-illustration"
width="125"
height="60"
viewBox="0 0 199 97.7"
>
<defs>
<style>
.cls-1,
.cls-2 {
fill: none;
stroke-linecap: round;
stroke-linejoin: round;
}
.cls-1 {
stroke-width: 3px;
}
.cls-2 {
stroke-width: 2px;
}
</style>
</defs>
<path
class="cls-1"
d="M110.53,85.66,100.26,95.89a1.09,1.09,0,0,1-1.52,0L88.47,85.66"
/>
<line class="cls-1" x1="99.5" y1="95.5" x2="99.5" y2="58.5" />
<path class="cls-1" d="M105.5,73.5h19a2,2,0,0,0,2-2v-43" />
<path
class="cls-1"
d="M126.5,22.5h-19a2,2,0,0,1-2-2V1.5h-31a2,2,0,0,0-2,2v68a2,2,0,0,0,2,2h19"
/>
<line class="cls-1" x1="105.5" y1="1.5" x2="126.5" y2="22.5" />
<path
class="cls-2"
d="M47.93,50.49a5,5,0,1,0-4.83-5A4.93,4.93,0,0,0,47.93,50.49Z"
/>
<path
class="cls-2"
d="M36.6,65.93,42.05,60A2.06,2.06,0,0,1,45,60l12.68,13.2"
/>
<path
class="cls-2"
d="M3.14,73.23,22.42,53.76a1.65,1.65,0,0,1,2.38,0l19.05,19.7"
/>
<path
class="cls-1"
d="M139.5,36.5H196A1.49,1.49,0,0,1,197.5,38V72A1.49,1.49,0,0,1,196,73.5H141A1.49,1.49,0,0,1,139.5,72V32A1.49,1.49,0,0,1,141,30.5H154a2.43,2.43,0,0,1,1.67.66l6,5.66"
/>
<rect
class="cls-1"
x="1.5"
y="34.5"
width="58"
height="39"
rx="2"
ry="2"
/>
</svg>
<h2
class="spectrum-Heading spectrum-Heading--sizeL spectrum-Heading--light spectrum-IllustratedMessage-heading"
>
Drag and drop your file
</h2>
{#if !compact}
<svg
class="spectrum-IllustratedMessage-illustration"
width="125"
height="60"
viewBox="0 0 199 97.7"
>
<defs>
<style>
.cls-1,
.cls-2 {
fill: none;
stroke-linecap: round;
stroke-linejoin: round;
}
.cls-1 {
stroke-width: 3px;
}
.cls-2 {
stroke-width: 2px;
}
</style>
</defs>
<path
class="cls-1"
d="M110.53,85.66,100.26,95.89a1.09,1.09,0,0,1-1.52,0L88.47,85.66"
/>
<line class="cls-1" x1="99.5" y1="95.5" x2="99.5" y2="58.5" />
<path class="cls-1" d="M105.5,73.5h19a2,2,0,0,0,2-2v-43" />
<path
class="cls-1"
d="M126.5,22.5h-19a2,2,0,0,1-2-2V1.5h-31a2,2,0,0,0-2,2v68a2,2,0,0,0,2,2h19"
/>
<line class="cls-1" x1="105.5" y1="1.5" x2="126.5" y2="22.5" />
<path
class="cls-2"
d="M47.93,50.49a5,5,0,1,0-4.83-5A4.93,4.93,0,0,0,47.93,50.49Z"
/>
<path
class="cls-2"
d="M36.6,65.93,42.05,60A2.06,2.06,0,0,1,45,60l12.68,13.2"
/>
<path
class="cls-2"
d="M3.14,73.23,22.42,53.76a1.65,1.65,0,0,1,2.38,0l19.05,19.7"
/>
<path
class="cls-1"
d="M139.5,36.5H196A1.49,1.49,0,0,1,197.5,38V72A1.49,1.49,0,0,1,196,73.5H141A1.49,1.49,0,0,1,139.5,72V32A1.49,1.49,0,0,1,141,30.5H154a2.43,2.43,0,0,1,1.67.66l6,5.66"
/>
<rect
class="cls-1"
x="1.5"
y="34.5"
width="58"
height="39"
rx="2"
ry="2"
/>
</svg>
<h2
class="spectrum-Heading spectrum-Heading--sizeL spectrum-Heading--light spectrum-IllustratedMessage-heading"
>
Drag and drop your file
</h2>
{/if}
{#if !disabled}
<p
class="spectrum-Body spectrum-Body--sizeS spectrum-IllustratedMessage-description"
@ -310,8 +313,10 @@
<label for={fieldId} class="spectrum-Link">
Select a file to upload
</label>
<br />
from your computer
{#if !compact}
<br />
from your computer
{/if}
</p>
{#if fileTags.length}
<Tags>

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"license": "GPL-3.0",
"private": true,
"scripts": {
@ -71,10 +71,10 @@
}
},
"dependencies": {
"@budibase/bbui": "2.1.46-alpha.6",
"@budibase/client": "2.1.46-alpha.6",
"@budibase/frontend-core": "2.1.46-alpha.6",
"@budibase/string-templates": "2.1.46-alpha.6",
"@budibase/bbui": "2.2.4-alpha.2",
"@budibase/client": "2.2.4-alpha.2",
"@budibase/frontend-core": "2.2.4-alpha.2",
"@budibase/string-templates": "2.2.4-alpha.2",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",

View File

@ -232,6 +232,7 @@
{filters}
{bindings}
{schemaFields}
datasource={{ type: "table", tableId }}
panel={AutomationBindingPanel}
fillWidth
on:change={e => (tempFilters = e.detail)}

View File

@ -190,6 +190,7 @@
{filters}
on:change={onFilter}
disabled={!hasCols}
tableId={id}
/>
{/key}
</div>

View File

@ -6,6 +6,7 @@
export let schema
export let filters
export let disabled = false
export let tableId
const dispatch = createEventDispatcher()
@ -37,6 +38,7 @@
allowBindings={false}
{filters}
{schemaFields}
datasource={{ type: "table", tableId }}
on:change={e => (tempValue = e.detail)}
/>
</div>

View File

@ -1,18 +1,15 @@
<script>
import { getIcon } from "./icons"
import CustomSVG from "components/common/CustomSVG.svelte"
import { admin } from "stores/portal"
export let integrationType
export let schema
export let size = "18"
$: objectStoreUrl = $admin.cloud ? "https://cdn.budi.live" : ""
$: pluginsUrl = `${objectStoreUrl}/plugins`
$: iconInfo = getIcon(integrationType, schema)
async function getSvgFromUrl(info) {
const url = `${pluginsUrl}/${info.url}`
const url = `${info.url}`
const resp = await fetch(url, {
headers: {
["pragma"]: "no-cache",

View File

@ -25,7 +25,7 @@
export let panel = ClientBindingPanel
export let allowBindings = true
export let fillWidth = false
export let tableId
export let datasource
const dispatch = createEventDispatcher()
const { OperatorOptions } = Constants
@ -41,11 +41,7 @@
$: parseFilters(filters)
$: dispatch("change", enrichFilters(rawFilters, matchAny))
$: enrichedSchemaFields = getFields(
schemaFields || [],
{ allowLinks: true },
tableId
)
$: enrichedSchemaFields = getFields(schemaFields || [], { allowLinks: true })
$: fieldOptions = enrichedSchemaFields.map(field => field.name) || []
$: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"]
@ -119,7 +115,11 @@
const santizeOperator = filter => {
// Ensure a valid operator is selected
const operators = getValidOperatorsForType(filter.type).map(x => x.value)
const operators = getValidOperatorsForType(
filter.type,
filter.field,
datasource
).map(x => x.value)
if (!operators.includes(filter.operator)) {
filter.operator = operators[0] ?? OperatorOptions.Equals.value
}
@ -201,7 +201,11 @@
/>
<Select
disabled={!filter.field}
options={getValidOperatorsForType(filter.type)}
options={getValidOperatorsForType(
filter.type,
filter.field,
datasource
)}
bind:value={filter.operator}
on:change={() => onOperatorChange(filter)}
placeholder={null}

View File

@ -17,8 +17,8 @@
let drawer
$: tempValue = value
$: dataSource = getDatasourceForProvider($currentAsset, componentInstance)
$: schema = getSchemaForDatasource($currentAsset, dataSource)?.schema
$: datasource = getDatasourceForProvider($currentAsset, componentInstance)
$: schema = getSchemaForDatasource($currentAsset, datasource)?.schema
$: schemaFields = Object.values(schema || {})
async function saveFilter() {
@ -36,7 +36,7 @@
filters={value}
{bindings}
{schemaFields}
tableId={dataSource.tableId}
{datasource}
on:change={e => (tempValue = e.detail)}
/>
</Drawer>

View File

@ -16,11 +16,7 @@ export function getTableFields(linkField) {
}))
}
export function getFields(
fields,
{ allowLinks } = { allowLinks: true },
tableId
) {
export function getFields(fields, { allowLinks } = { allowLinks: true }) {
let filteredFields = fields.filter(
field => !BannedSearchTypes.includes(field.type)
)
@ -34,9 +30,5 @@ export function getFields(
const staticFormulaFields = fields.filter(
field => field.type === "formula" && field.formulaType === "static"
)
const table = get(tables).list.find(table => table._id === tableId)
if (table?.type === "external" && table?.sql) {
filteredFields = filteredFields.filter(field => field.name !== "_id")
}
return filteredFields.concat(staticFormulaFields)
}

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {
@ -26,9 +26,9 @@
"outputPath": "build"
},
"dependencies": {
"@budibase/backend-core": "2.1.46-alpha.6",
"@budibase/string-templates": "2.1.46-alpha.6",
"@budibase/types": "2.1.46-alpha.6",
"@budibase/backend-core": "2.2.4-alpha.2",
"@budibase/string-templates": "2.2.4-alpha.2",
"@budibase/types": "2.2.4-alpha.2",
"axios": "0.21.2",
"chalk": "4.1.0",
"cli-progress": "3.11.2",

View File

@ -3440,6 +3440,12 @@
}
]
},
{
"type": "boolean",
"label": "Compact",
"key": "compact",
"defaultValue": false
},
{
"type": "boolean",
"label": "Disabled",
@ -3785,7 +3791,6 @@
"defaultValue": false,
"info": "Row selection is only compatible with internal or SQL tables"
},
{
"section": true,
"name": "On Row Click",
@ -5298,4 +5303,4 @@
"suffix": "repeater"
}
}
}
}

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "2.1.46-alpha.6",
"@budibase/frontend-core": "2.1.46-alpha.6",
"@budibase/string-templates": "2.1.46-alpha.6",
"@budibase/bbui": "2.2.4-alpha.2",
"@budibase/frontend-core": "2.2.4-alpha.2",
"@budibase/string-templates": "2.2.4-alpha.2",
"@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3",

View File

@ -92,8 +92,8 @@
<svelte:head>
{#if $builderStore.usedPlugins?.length}
{#each $builderStore.usedPlugins as plugin (plugin.hash)}
<script src={`${plugin.jsUrl}?r=${plugin.hash || ""}`}></script>
{#each $builderStore.usedPlugins as plugin}
<script src={`${plugin.jsUrl}`}></script>
{/each}
{/if}
</svelte:head>

View File

@ -275,7 +275,6 @@
justify-content: center;
align-items: stretch;
z-index: 1;
border-top: 1px solid var(--spectrum-global-color-gray-300);
overflow: hidden;
position: relative;
}
@ -316,6 +315,12 @@
top: 0;
left: 0;
}
.layout--top .nav-wrapper {
border-bottom: 1px solid var(--spectrum-global-color-gray-300);
}
.layout--left .nav-wrapper {
border-right: 1px solid var(--spectrum-global-color-gray-300);
}
.nav {
display: flex;
@ -390,10 +395,6 @@
align-items: stretch;
flex: 1 1 auto;
z-index: 1;
border-top: 1px solid var(--spectrum-global-color-gray-300);
}
.layout--none .main-wrapper {
border-top: none;
}
.main {
display: flex;
@ -487,7 +488,7 @@
}
/* Desktop nav overrides */
.desktop.layout--left {
.desktop.layout--left .layout-body {
flex-direction: row;
overflow: hidden;
}
@ -523,6 +524,8 @@
top: 0;
left: 0;
box-shadow: 0 0 8px -1px rgba(0, 0, 0, 0.075);
border-bottom: 1px solid var(--spectrum-global-color-gray-300);
border-right: none;
}
/* Show close button in drawer */

View File

@ -21,6 +21,7 @@
schema
$: dataProviderId = dataProvider?.id
$: datasource = dataProvider?.datasource
$: addExtension = getAction(
dataProviderId,
ActionTypes.AddDataProviderQueryExtension
@ -29,7 +30,7 @@
dataProviderId,
ActionTypes.RemoveDataProviderQueryExtension
)
$: fetchSchema(dataProvider || {})
$: fetchSchema(datasource)
$: schemaFields = getSchemaFields(schema, allowedFields)
// Add query extension to data provider
@ -42,8 +43,7 @@
}
}
async function fetchSchema(dataProvider) {
const datasource = dataProvider?.datasource
async function fetchSchema(datasource) {
if (datasource) {
schema = await fetchDatasourceSchema(datasource, {
enrichRelationships: true,
@ -102,7 +102,7 @@
<Modal bind:this={modal}>
<ModalContent title="Edit filters" size="XL" onConfirm={updateQuery}>
<FilterModal bind:filters={tmpFilters} {schemaFields} />
<FilterModal bind:filters={tmpFilters} {schemaFields} {datasource} />
</ModalContent>
</Modal>
{/if}

View File

@ -15,6 +15,7 @@
export let schemaFields
export let filters = []
export let datasource
const context = getContext("context")
const BannedTypes = ["link", "attachment", "json"]
@ -59,7 +60,9 @@
// Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType(
expression.type
expression.type,
expression.field,
datasource
).map(x => x.value)
if (!validOperators.includes(expression.operator)) {
expression.operator =
@ -118,7 +121,11 @@
/>
<Select
disabled={!filter.field}
options={LuceneUtils.getValidOperatorsForType(filter.type)}
options={LuceneUtils.getValidOperatorsForType(
filter.type,
filter.field,
datasource
)}
bind:value={filter.operator}
on:change={e => onOperatorChange(filter, e.detail)}
placeholder={null}

View File

@ -6,6 +6,7 @@
export let field
export let label
export let disabled = false
export let compact = false
export let validation
export let extensions
export let onChange
@ -89,6 +90,7 @@
{handleTooManyFiles}
{maximum}
{extensions}
{compact}
/>
{/if}
</div>
@ -96,6 +98,6 @@
<style>
.minHeightWrapper {
min-height: 220px;
min-height: 80px;
}
</style>

View File

@ -1,12 +1,12 @@
{
"name": "@budibase/frontend-core",
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase",
"license": "MPL-2.0",
"svelte": "src/index.js",
"dependencies": {
"@budibase/bbui": "2.1.46-alpha.6",
"@budibase/bbui": "2.2.4-alpha.2",
"lodash": "^4.17.21",
"svelte": "^3.46.2"
}

View File

@ -7,7 +7,7 @@ const HBS_REGEX = /{{([^{].*?)}}/g
* Returns the valid operator options for a certain data type
* @param type the data type
*/
export const getValidOperatorsForType = type => {
export const getValidOperatorsForType = (type, field, datasource) => {
const Op = OperatorOptions
const stringOps = [
Op.Equals,
@ -27,24 +27,37 @@ export const getValidOperatorsForType = type => {
Op.NotEmpty,
Op.In,
]
let ops = []
if (type === "string") {
return stringOps
ops = stringOps
} else if (type === "number") {
return numOps
ops = numOps
} else if (type === "options") {
return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In]
ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In]
} else if (type === "array") {
return [Op.Contains, Op.NotContains, Op.Empty, Op.NotEmpty, Op.ContainsAny]
ops = [Op.Contains, Op.NotContains, Op.Empty, Op.NotEmpty, Op.ContainsAny]
} else if (type === "boolean") {
return [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty]
ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty]
} else if (type === "longform") {
return stringOps
ops = stringOps
} else if (type === "datetime") {
return numOps
ops = numOps
} else if (type === "formula") {
return stringOps.concat([Op.MoreThan, Op.LessThan])
ops = stringOps.concat([Op.MoreThan, Op.LessThan])
}
return []
// Filter out "like" for internal tables
const externalTable = datasource?.tableId?.includes("datasource_plus")
if (datasource?.type === "table" && !externalTable) {
ops = ops.filter(x => x !== Op.Like)
}
// Only allow equal/not equal for _id in SQL tables
if (field === "_id" && externalTable) {
ops = [Op.Equals, Op.NotEquals]
}
return ops
}
/**

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/sdk",
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"description": "Budibase Public API SDK",
"author": "Budibase",
"license": "MPL-2.0",

View File

@ -51,6 +51,25 @@ module AwsMock {
Contents: {},
})
)
// @ts-ignore
this.getSignedUrl = (operation, params) => {
return `http://test.com/${params.Bucket}/${params.Key}`
}
// @ts-ignore
this.headBucket = jest.fn(
response({
Contents: {},
})
)
// @ts-ignore
this.upload = jest.fn(
response({
Contents: {},
})
)
}
aws.DynamoDB = { DocumentClient }

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"description": "Budibase Web Server",
"main": "src/index.ts",
"repository": {
@ -43,11 +43,11 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "2.1.46-alpha.6",
"@budibase/client": "2.1.46-alpha.6",
"@budibase/pro": "2.1.46-alpha.6",
"@budibase/string-templates": "2.1.46-alpha.6",
"@budibase/types": "2.1.46-alpha.6",
"@budibase/backend-core": "2.2.4-alpha.2",
"@budibase/client": "2.2.4-alpha.2",
"@budibase/pro": "2.2.4-alpha.2",
"@budibase/string-templates": "2.2.4-alpha.2",
"@budibase/types": "2.2.4-alpha.2",
"@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0",
@ -130,13 +130,15 @@
"@types/ioredis": "4.28.10",
"@types/jest": "27.5.1",
"@types/koa": "2.13.4",
"@types/koa__router": "8.0.11",
"@types/koa__router": "8.0.8",
"@types/lodash": "4.14.180",
"@types/node": "14.18.20",
"@types/node-fetch": "2.6.1",
"@types/oracledb": "5.2.2",
"@types/pouchdb": "6.4.0",
"@types/redis": "4.0.11",
"@types/server-destroy": "1.0.1",
"@types/tar": "6.1.3",
"@typescript-eslint/parser": "5.45.0",
"apidoc": "0.50.4",
"babel-jest": "27.5.1",

View File

@ -1,6 +1,6 @@
const Resource = require("./utils/Resource")
const { object } = require("./utils")
const { BaseQueryVerbs } = require("../../src/constants")
const { BaseQueryVerbs } = require("../../dist/constants")
const query = {
_id: "query_datasource_plus_4d8be0c506b9465daf4bf84d890fdab6_454854487c574d45bc4029b1e153219e",

View File

@ -2,7 +2,7 @@ const {
FieldTypes,
RelationshipTypes,
FormulaTypes,
} = require("../../src/constants")
} = require("../../dist/constants")
const { object } = require("./utils")
const Resource = require("./utils/Resource")

View File

@ -23,21 +23,18 @@ import {
errors,
events,
migrations,
objectStore,
} from "@budibase/backend-core"
import { USERS_TABLE_SCHEMA } from "../../constants"
import { buildDefaultDocs } from "../../db/defaultData/datasource_bb_default"
import { removeAppFromUserRoles } from "../../utilities/workerRequests"
import {
clientLibraryPath,
stringToReadStream,
isQsTrue,
} from "../../utilities"
import { stringToReadStream, isQsTrue } from "../../utilities"
import { getLocksById } from "../../utilities/redis"
import {
updateClientLibrary,
backupClientLibrary,
revertClientLibrary,
} from "../../utilities/fileSystem/clientLibrary"
} from "../../utilities/fileSystem"
import { cleanupAutomations } from "../../automations/utils"
import { checkAppMetadata } from "../../automations/logging"
import { getUniqueRows } from "../../utilities/usageQuota/rows"
@ -49,9 +46,9 @@ import {
MigrationType,
BBContext,
Database,
UserCtx,
} from "@budibase/types"
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
import { enrichPluginURLs } from "../../utilities/plugins"
import sdk from "../../sdk"
// utility function, need to do away with this
@ -204,27 +201,34 @@ export async function fetchAppDefinition(ctx: BBContext) {
}
}
export async function fetchAppPackage(ctx: BBContext) {
export async function fetchAppPackage(ctx: UserCtx) {
const db = context.getAppDB()
let application = await db.get(DocumentType.APP_METADATA)
const layouts = await getLayouts()
let screens = await getScreens()
// Enrich plugin URLs
application.usedPlugins = enrichPluginURLs(application.usedPlugins)
application.usedPlugins = objectStore.enrichPluginURLs(
application.usedPlugins
)
// Only filter screens if the user is not a builder
if (!(ctx.user?.builder && ctx.user.builder.global)) {
if (!(ctx.user.builder && ctx.user.builder.global)) {
const userRoleId = getUserRoleId(ctx)
const accessController = new roles.AccessController()
screens = await accessController.checkScreensAccess(screens, userRoleId)
}
const clientLibPath = objectStore.clientLibraryUrl(
ctx.params.appId,
application.version
)
ctx.body = {
application,
screens,
layouts,
clientLibPath: clientLibraryPath(ctx.params.appId, application.version),
clientLibPath,
}
}
@ -370,7 +374,7 @@ async function appPostCreate(ctx: BBContext, app: App) {
if (err.code && err.code === errors.codes.USAGE_LIMIT_EXCEEDED) {
// this import resulted in row usage exceeding the quota
// delete the app
// skip pre- and post-steps as no rows have been added to quotas yet
// skip pre and post-steps as no rows have been added to quotas yet
ctx.params.appId = app.appId
await destroyApp(ctx)
}

View File

@ -3,7 +3,7 @@ import { InternalTables } from "../../db/utils"
import { getFullUser } from "../../utilities/users"
import { roles, context } from "@budibase/backend-core"
import { groups } from "@budibase/pro"
import { ContextUser, User, Row } from "@budibase/types"
import { ContextUser, User, Row, UserCtx } from "@budibase/types"
const PUBLIC_ROLE = roles.BUILTIN_ROLE_IDS.PUBLIC
@ -16,7 +16,7 @@ const addSessionAttributesToUser = (ctx: any) => {
}
}
export async function fetchSelf(ctx: any) {
export async function fetchSelf(ctx: UserCtx) {
let userId = ctx.user.userId || ctx.user._id
/* istanbul ignore next */
if (!userId || !ctx.isAuthenticated) {

View File

@ -5,8 +5,8 @@ import { stringToReadStream } from "../../utilities"
import { getDocParams, DocumentType, isDevAppID } from "../../db/utils"
import { create } from "./application"
import { join } from "path"
import { App, BBContext, Database } from "@budibase/types"
import sdk from "../../sdk"
import { App, Ctx, Database } from "@budibase/types"
async function createApp(appName: string, appDirectory: string) {
const ctx = {
@ -35,7 +35,7 @@ async function getAllDocType(db: Database, docType: string) {
return response.rows.map(row => row.doc)
}
export async function exportApps(ctx: BBContext) {
export async function exportApps(ctx: Ctx) {
if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
}
@ -65,13 +65,13 @@ async function checkHasBeenImported() {
return apps.length !== 0
}
export async function hasBeenImported(ctx: BBContext) {
export async function hasBeenImported(ctx: Ctx) {
ctx.body = {
imported: await checkHasBeenImported(),
}
}
export async function importApps(ctx: BBContext) {
export async function importApps(ctx: Ctx) {
if (!env.SELF_HOSTED || env.MULTI_TENANCY) {
ctx.throw(400, "Importing only allowed in self hosted environments.")
}
@ -82,12 +82,16 @@ export async function importApps(ctx: BBContext) {
"Import file is required and environment must be fresh to import apps."
)
}
if (ctx.request.files.importFile.type !== "application/gzip") {
const file = ctx.request.files.importFile
if (Array.isArray(file)) {
ctx.throw(400, "Single file is required")
}
if (file.type !== "application/gzip") {
ctx.throw(400, "Import file must be a gzipped tarball.")
}
// initially get all the app databases out of the tarball
const tmpPath = sdk.backups.untarFile(ctx.request.files.importFile)
const tmpPath = sdk.backups.untarFile(file)
const globalDbImport = sdk.backups.getGlobalDBFile(tmpPath)
const appNames = sdk.backups.getListOfAppsInMulti(tmpPath)

View File

@ -1,10 +1,14 @@
import { npmUpload, urlUpload, githubUpload, fileUpload } from "./uploaders"
import { plugins as pluginCore, tenancy } from "@budibase/backend-core"
import { PluginType, FileType, PluginSource } from "@budibase/types"
import {
plugins as pluginCore,
db as dbCore,
tenancy,
objectStore,
} from "@budibase/backend-core"
import { PluginType, FileType, PluginSource, Plugin } from "@budibase/types"
import env from "../../../environment"
import { ClientAppSocket } from "../../../websocket"
import { db as dbCore } from "@budibase/backend-core"
import { plugins } from "@budibase/pro"
import { sdk as pro } from "@budibase/pro"
export async function getPlugins(type?: PluginType) {
const db = tenancy.getGlobalDB()
@ -13,9 +17,10 @@ export async function getPlugins(type?: PluginType) {
include_docs: true,
})
)
const plugins = response.rows.map((row: any) => row.doc)
let plugins = response.rows.map((row: any) => row.doc) as Plugin[]
plugins = objectStore.enrichPluginURLs(plugins)
if (type) {
return plugins.filter((plugin: any) => plugin.schema?.type === type)
return plugins.filter((plugin: Plugin) => plugin.schema?.type === type)
} else {
return plugins
}
@ -84,7 +89,7 @@ export async function create(ctx: any) {
)
}
const doc = await plugins.storePlugin(metadata, directory, source)
const doc = await pro.plugins.storePlugin(metadata, directory, source)
ClientAppSocket.emit("plugins-update", { name, hash: doc.hash })
ctx.body = {
@ -107,7 +112,7 @@ export async function destroy(ctx: any) {
const { pluginId } = ctx.params
try {
await plugins.deletePlugin(pluginId)
await pro.plugins.deletePlugin(pluginId)
ctx.body = { message: `Plugin ${ctx.params.pluginId} deleted.` }
} catch (err: any) {
@ -127,7 +132,7 @@ export async function processUploadedPlugin(
throw new Error("Only component plugins are supported outside of self-host")
}
const doc = await plugins.storePlugin(metadata, directory, source)
const doc = await pro.plugins.storePlugin(metadata, directory, source)
ClientAppSocket.emit("plugin-update", { name: doc.name, hash: doc.hash })
return doc
}

View File

@ -21,10 +21,10 @@ import { getDatasourceAndQuery } from "./utils"
import { FieldTypes, RelationshipTypes } from "../../../constants"
import { breakExternalTableId, isSQL } from "../../../integrations/utils"
import { processObjectSync } from "@budibase/string-templates"
// @ts-ignore
import { cloneDeep } from "lodash/fp"
import { processFormulas, processDates } from "../../../utilities/rowProcessor"
import { context } from "@budibase/backend-core"
import { removeKeyNumbering } from "./utils"
export interface ManyRelationship {
tableId?: string
@ -55,15 +55,21 @@ function buildFilters(
let idCopy: undefined | string | any[] = cloneDeep(id)
if (filters) {
// need to map over the filters and make sure the _id field isn't present
for (let filter of Object.values(filters)) {
if (filter._id && primary) {
const parts = breakRowIdField(filter._id)
for (let field of primary) {
filter[field] = parts.shift()
let prefix = 1
for (let operator of Object.values(filters)) {
for (let field of Object.keys(operator || {})) {
if (removeKeyNumbering(field) === "_id") {
if (primary) {
const parts = breakRowIdField(operator[field])
for (let field of primary) {
operator[`${prefix}:${field}`] = parts.shift()
}
prefix++
}
// make sure this field doesn't exist on any filter
delete operator[field]
}
}
// make sure this field doesn't exist on any filter
delete filter._id
}
}
// there is no id, just use the user provided filters

View File

@ -13,7 +13,7 @@ import {
cleanupAttachments,
} from "../../../utilities/rowProcessor"
import { FieldTypes } from "../../../constants"
import { validate as rowValidate, findRow } from "./utils"
import * as utils from "./utils"
import { fullSearch, paginatedSearch } from "./internalSearch"
import { getGlobalUsersFromMetadata } from "../../../utilities/global"
import * as inMemoryViews from "../../../db/inMemoryView"
@ -30,7 +30,8 @@ import { finaliseRow, updateRelatedFormula } from "./staticFormula"
import * as exporters from "../view/exporters"
import { apiFileReturn } from "../../../utilities/fileSystem"
import {
BBContext,
Ctx,
UserCtx,
Database,
LinkDocumentValue,
Row,
@ -69,7 +70,7 @@ async function getView(db: Database, viewName: string) {
return viewInfo
}
async function getRawTableData(ctx: BBContext, db: Database, tableId: string) {
async function getRawTableData(ctx: Ctx, db: Database, tableId: string) {
let rows
if (tableId === InternalTables.USER_METADATA) {
await userController.fetchMetadata(ctx)
@ -85,7 +86,7 @@ async function getRawTableData(ctx: BBContext, db: Database, tableId: string) {
return rows as Row[]
}
export async function patch(ctx: BBContext) {
export async function patch(ctx: UserCtx) {
const db = context.getAppDB()
const inputs = ctx.request.body
const tableId = inputs.tableId
@ -95,7 +96,7 @@ export async function patch(ctx: BBContext) {
let dbTable = await db.get(tableId)
oldRow = await outputProcessing(
dbTable,
await findRow(ctx, tableId, inputs._id)
await utils.findRow(ctx, tableId, inputs._id)
)
} catch (err) {
if (isUserTable) {
@ -117,8 +118,8 @@ export async function patch(ctx: BBContext) {
}
// this returns the table and row incase they have been updated
let { table, row } = inputProcessing(ctx.user!, dbTable, combinedRow)
const validateResult = await rowValidate({
let { table, row } = inputProcessing(ctx.user, dbTable, combinedRow)
const validateResult = await utils.validate({
row,
table,
})
@ -150,7 +151,7 @@ export async function patch(ctx: BBContext) {
})
}
export async function save(ctx: BBContext) {
export async function save(ctx: UserCtx) {
const db = context.getAppDB()
let inputs = ctx.request.body
inputs.tableId = ctx.params.tableId
@ -161,8 +162,8 @@ export async function save(ctx: BBContext) {
// this returns the table and row incase they have been updated
const dbTable = await db.get(inputs.tableId)
let { table, row } = inputProcessing(ctx.user!, dbTable, inputs)
const validateResult = await rowValidate({
let { table, row } = inputProcessing(ctx.user, dbTable, inputs)
const validateResult = await utils.validate({
row,
table,
})
@ -185,7 +186,7 @@ export async function save(ctx: BBContext) {
})
}
export async function fetchView(ctx: BBContext) {
export async function fetchView(ctx: Ctx) {
const viewName = ctx.params.viewName
// if this is a table view being looked for just transfer to that
@ -252,7 +253,7 @@ export async function fetchView(ctx: BBContext) {
return rows
}
export async function fetch(ctx: BBContext) {
export async function fetch(ctx: Ctx) {
const db = context.getAppDB()
const tableId = ctx.params.tableId
@ -261,15 +262,15 @@ export async function fetch(ctx: BBContext) {
return outputProcessing(table, rows)
}
export async function find(ctx: BBContext) {
export async function find(ctx: Ctx) {
const db = dbCore.getDB(ctx.appId)
const table = await db.get(ctx.params.tableId)
let row = await findRow(ctx, ctx.params.tableId, ctx.params.rowId)
let row = await utils.findRow(ctx, ctx.params.tableId, ctx.params.rowId)
row = await outputProcessing(table, row)
return row
}
export async function destroy(ctx: BBContext) {
export async function destroy(ctx: Ctx) {
const db = context.getAppDB()
const { _id } = ctx.request.body
let row = await db.get(_id)
@ -305,7 +306,7 @@ export async function destroy(ctx: BBContext) {
return { response, row }
}
export async function bulkDestroy(ctx: BBContext) {
export async function bulkDestroy(ctx: Ctx) {
const db = context.getAppDB()
const tableId = ctx.params.tableId
const table = await db.get(tableId)
@ -344,7 +345,7 @@ export async function bulkDestroy(ctx: BBContext) {
return { response: { ok: true }, rows: processedRows }
}
export async function search(ctx: BBContext) {
export async function search(ctx: Ctx) {
// Fetch the whole table when running in cypress, as search doesn't work
if (!env.COUCH_DB_URL && env.isCypress()) {
return { rows: await fetch(ctx) }
@ -376,14 +377,14 @@ export async function search(ctx: BBContext) {
return response
}
export async function validate(ctx: BBContext) {
return rowValidate({
export async function validate(ctx: Ctx) {
return utils.validate({
tableId: ctx.params.tableId,
row: ctx.request.body,
})
}
export async function exportRows(ctx: BBContext) {
export async function exportRows(ctx: Ctx) {
const db = context.getAppDB()
const table = await db.get(ctx.params.tableId)
const rowIds = ctx.request.body.rows
@ -421,14 +422,14 @@ export async function exportRows(ctx: BBContext) {
return apiFileReturn(exporter(headers, rows))
}
export async function fetchEnrichedRow(ctx: BBContext) {
export async function fetchEnrichedRow(ctx: Ctx) {
const db = context.getAppDB()
const tableId = ctx.params.tableId
const rowId = ctx.params.rowId
// need table to work out where links go in row
let [table, row] = await Promise.all([
db.get(tableId),
findRow(ctx, tableId, rowId),
utils.findRow(ctx, tableId, rowId),
])
// get the link docs
const linkVals = (await linkRows.getLinkDocuments({

View File

@ -16,7 +16,10 @@ const { cloneDeep } = require("lodash/fp")
* updated.
* NOTE: this will only for affect static formulas.
*/
export async function updateRelatedFormula(table: Table, enrichedRows: Row[]) {
export async function updateRelatedFormula(
table: Table,
enrichedRows: Row[] | Row
) {
const db = context.getAppDB()
// no formula to update, we're done
if (!table.relatedFormula) {
@ -155,7 +158,7 @@ export async function finaliseRow(
enrichedRow = await processFormulas(table, enrichedRow, { dynamic: false })
// this updates the related formulas in other rows based on the relations to this row
if (updateFormula) {
await exports.updateRelatedFormula(table, enrichedRow)
await updateRelatedFormula(table, enrichedRow)
}
return { row: enrichedRow, table }
}

View File

@ -7,6 +7,7 @@ import { BBContext, Row, Table } from "@budibase/types"
export { removeKeyNumbering } from "../../../integrations/base/utils"
const validateJs = require("validate.js")
const { cloneDeep } = require("lodash/fp")
import { Ctx } from "@budibase/types"
validateJs.extend(validateJs.validators.datetime, {
parse: function (value: string) {
@ -25,7 +26,7 @@ export async function getDatasourceAndQuery(json: any) {
return makeExternalQuery(datasource, json)
}
export async function findRow(ctx: BBContext, tableId: string, rowId: string) {
export async function findRow(ctx: Ctx, tableId: string, rowId: string) {
const db = context.getAppDB()
let row
// TODO remove special user case in future

View File

@ -1,11 +1,9 @@
import { enrichPluginURLs } from "../../../utilities/plugins"
require("svelte/register")
const send = require("koa-send")
const { resolve, join } = require("../../../utilities/centralPath")
const uuid = require("uuid")
const { ObjectStoreBuckets } = require("../../../constants")
import { ObjectStoreBuckets } from "../../../constants"
const { processString } = require("@budibase/string-templates")
const {
loadHandlebarsFile,
@ -13,8 +11,6 @@ const {
TOP_LEVEL_PATH,
} = require("../../../utilities/fileSystem")
const env = require("../../../environment")
const { clientLibraryPath } = require("../../../utilities")
const { attachmentsRelativeURL } = require("../../../utilities")
const { DocumentType } = require("../../../db/utils")
const { context, objectStore, utils } = require("@budibase/backend-core")
const AWS = require("aws-sdk")
@ -33,7 +29,7 @@ async function prepareUpload({ s3Key, bucket, metadata, file }: any) {
return {
size: file.size,
name: file.name,
url: attachmentsRelativeURL(response.Key),
url: objectStore.getAppFileUrl(s3Key),
extension: [...file.name.split(".")].pop(),
key: response.Key,
}
@ -85,7 +81,7 @@ export const uploadFile = async function (ctx: any) {
return prepareUpload({
file,
s3Key: `${ctx.appId}/attachments/${processedFileName}`,
s3Key: `${context.getProdAppId()}/attachments/${processedFileName}`,
bucket: ObjectStoreBuckets.APPS,
})
})
@ -107,14 +103,14 @@ export const serveApp = async function (ctx: any) {
if (!env.isJest()) {
const App = require("./templates/BudibaseApp.svelte").default
const plugins = enrichPluginURLs(appInfo.usedPlugins)
const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins)
const { head, html, css } = App.render({
metaImage:
"https://res.cloudinary.com/daog6scxm/image/upload/v1666109324/meta-images/budibase-meta-image_uukc1m.png",
title: appInfo.name,
production: env.isProd(),
appId,
clientLibPath: clientLibraryPath(appId, appInfo.version, ctx),
clientLibPath: objectStore.clientLibraryUrl(appId, appInfo.version),
usedPlugins: plugins,
})
@ -139,7 +135,7 @@ export const serveBuilderPreview = async function (ctx: any) {
let appId = context.getAppId()
const previewHbs = loadHandlebarsFile(`${__dirname}/templates/preview.hbs`)
ctx.body = await processString(previewHbs, {
clientLibPath: clientLibraryPath(appId, appInfo.version, ctx),
clientLibPath: objectStore.clientLibraryUrl(appId, appInfo.version),
})
} else {
// just return the app info for jest to assert on

View File

@ -26,13 +26,14 @@ import cloudRoutes from "./cloud"
import migrationRoutes from "./migrations"
import pluginRoutes from "./plugin"
import Router from "@koa/router"
import { api } from "@budibase/pro"
import { api as pro } from "@budibase/pro"
export { default as staticRoutes } from "./static"
export { default as publicRoutes } from "./public"
const appBackupRoutes = api.appBackups
const scheduleRoutes = api.schedules
const appBackupRoutes = pro.appBackups
const scheduleRoutes = pro.schedules
export const mainRoutes: Router[] = [
appBackupRoutes,
backupRoutes,

View File

@ -10,6 +10,7 @@ const {
StaticQuotaName,
MonthlyQuotaName,
} = require("@budibase/types")
const { structures } = require("@budibase/backend-core/tests");
describe("/rows", () => {
let request = setup.getRequest()
@ -494,12 +495,13 @@ describe("/rows", () => {
describe("attachments", () => {
it("should allow enriching attachment rows", async () => {
const table = await config.createAttachmentTable()
const attachmentId = `${structures.uuid()}.csv`
const row = await config.createRow({
name: "test",
description: "test",
attachment: [
{
key: `${config.getAppId()}/attachments/test/thing.csv`,
key: `${config.getAppId()}/attachments/${attachmentId}`,
},
],
tableId: table._id,
@ -509,7 +511,7 @@ describe("/rows", () => {
context.doInAppContext(config.getAppId(), async () => {
const enriched = await outputProcessing(table, [row])
expect(enriched[0].attachment[0].url).toBe(
`/prod-budi-app-assets/${config.getAppId()}/attachments/test/thing.csv`
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
)
})
})

View File

@ -1,20 +1,5 @@
jest.mock("node-fetch")
jest.mock("aws-sdk", () => ({
config: {
update: jest.fn(),
},
DynamoDB: {
DocumentClient: jest.fn(),
},
S3: jest.fn(() => ({
getSignedUrl: jest.fn(() => {
return "my-url"
}),
})),
}))
const setup = require("./utilities")
const { events, constants } = require("@budibase/backend-core")
const { constants } = require("@budibase/backend-core")
describe("/static", () => {
let request = setup.getRequest()
@ -102,7 +87,7 @@ describe("/static", () => {
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.signedUrl).toEqual("my-url")
expect(res.body.signedUrl).toEqual("http://test.com/foo/bar")
expect(res.body.publicUrl).toEqual(
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
)

View File

@ -79,7 +79,6 @@ const environment = {
// flags
ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS,
DISABLE_THREADING: process.env.DISABLE_THREADING,
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
DISABLE_AUTOMATION_LOGS: process.env.DISABLE_AUTOMATION_LOGS,
MULTI_TENANCY: process.env.MULTI_TENANCY,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
@ -113,12 +112,17 @@ if (isDev() && module.exports.DISABLE_THREADING == null) {
}
// clean up any environment variable edge cases
for (let [key, value] of Object.entries(module.exports)) {
for (let [key, value] of Object.entries(environment)) {
// handle the edge case of "0" to disable an environment variable
if (value === "0") {
// @ts-ignore
environment[key] = 0
}
// handle the edge case of "false" to disable an environment variable
if (value === "false") {
// @ts-ignore
environment[key] = 0
}
}
export = environment

View File

@ -98,11 +98,7 @@ export async function getIntegration(integration: string) {
for (let plugin of plugins) {
if (plugin.name === integration) {
// need to use commonJS require due to its dynamic runtime nature
const retrieved: any = await getDatasourcePlugin(
plugin.name,
plugin.jsUrl,
plugin.schema?.hash
)
const retrieved: any = await getDatasourcePlugin(plugin)
if (retrieved.integration) {
return retrieved.integration
} else {

View File

@ -5,6 +5,7 @@ import {
isGoogleConfig,
isOIDCConfig,
isSettingsConfig,
ConfigType,
} from "@budibase/types"
import env from "./../../../../environment"
@ -31,15 +32,15 @@ export const backfill = async (
await events.email.SMTPCreated(timestamp)
}
if (isGoogleConfig(config)) {
await events.auth.SSOCreated("google", timestamp)
await events.auth.SSOCreated(ConfigType.GOOGLE, timestamp)
if (config.config.activated) {
await events.auth.SSOActivated("google", timestamp)
await events.auth.SSOActivated(ConfigType.GOOGLE, timestamp)
}
}
if (isOIDCConfig(config)) {
await events.auth.SSOCreated("oidc", timestamp)
await events.auth.SSOCreated(ConfigType.OIDC, timestamp)
if (config.config.configs[0].activated) {
await events.auth.SSOActivated("oidc", timestamp)
await events.auth.SSOActivated(ConfigType.OIDC, timestamp)
}
}
if (isSettingsConfig(config)) {

View File

@ -55,12 +55,8 @@ async function updateAttachmentColumns(prodAppId: string, db: Database) {
continue
}
row[column] = row[column].map((attachment: RowAttachment) => {
// URL looks like: /prod-budi-app-assets/appId/attachments/file.csv
const urlParts = attachment.url.split("/")
// drop the first empty element
urlParts.shift()
// get the prefix
const prefix = urlParts.shift()
// Key looks like: appId/attachments/file.csv
const urlParts = attachment.key.split("/")
// remove the app ID
urlParts.shift()
// add new app ID
@ -69,7 +65,7 @@ async function updateAttachmentColumns(prodAppId: string, db: Database) {
return {
...attachment,
key,
url: `/${prefix}/${key}`,
url: "", // calculated on retrieval using key
}
})
}

View File

@ -19,7 +19,10 @@ import { mocks } from "@budibase/backend-core/tests"
const tk = require("timekeeper")
tk.freeze(mocks.date.MOCK_DATE)
global.console.log = jest.fn() // console.log are ignored in tests
if (!process.env.DEBUG) {
global.console.log = jest.fn() // console.log are ignored in tests
global.console.warn = jest.fn() // console.warn are ignored in tests
}
if (!process.env.CI) {
// set a longer timeout in dev for debugging

View File

@ -0,0 +1,86 @@
import { budibaseTempDir } from "../budibaseDir"
import fs from "fs"
import { join } from "path"
import { ObjectStoreBuckets } from "../../constants"
import { updateClientLibrary } from "./clientLibrary"
import env from "../../environment"
import { objectStore, context } from "@budibase/backend-core"
import { TOP_LEVEL_PATH } from "./filesystem"
export const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
/**
* Uploads the latest client library to the object store.
* @param {string} appId The ID of the app which is being created.
* @return {Promise<void>} once promise completes app resources should be ready in object store.
*/
export const createApp = async (appId: string) => {
await updateClientLibrary(appId)
}
/**
* Removes all of the assets created for an app in the object store.
* @param {string} appId The ID of the app which is being deleted.
* @return {Promise<void>} once promise completes the app resources will be removed from object store.
*/
export const deleteApp = async (appId: string) => {
await objectStore.deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)
}
/**
* Retrieves component libraries from object store (or tmp symlink if in local)
*/
export const getComponentLibraryManifest = async (library: string) => {
const appId = context.getAppId()
const filename = "manifest.json"
/* istanbul ignore next */
// when testing in cypress and so on we need to get the package
// as the environment may not be fully fleshed out for dev or prod
if (env.isTest()) {
library = library.replace("standard-components", "client")
const lib = library.split("/")[1]
const path = require.resolve(library).split(lib)[0]
return require(join(path, lib, filename))
} else if (env.isDev()) {
const path = join(NODE_MODULES_PATH, "@budibase", "client", filename)
// always load from new so that updates are refreshed
delete require.cache[require.resolve(path)]
return require(path)
}
if (!appId) {
throw new Error("No app ID found - cannot get component libraries")
}
let resp
let path
try {
// Try to load the manifest from the new file location
path = join(appId, filename)
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
} catch (error) {
console.error(
`component-manifest-objectstore=failed appId=${appId} path=${path}`,
error
)
// Fallback to loading it from the old location for old apps
path = join(appId, "node_modules", library, "package", filename)
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
}
if (typeof resp !== "string") {
resp = resp.toString("utf8")
}
return JSON.parse(resp)
}
/**
* Given a set of app IDs makes sure file system is cleared of any of their temp info.
*/
export const cleanup = (appIds: string[]) => {
for (let appId of appIds) {
const path = join(budibaseTempDir(), appId)
if (fs.existsSync(path)) {
fs.rmdirSync(path, { recursive: true })
}
}
}

View File

@ -4,7 +4,7 @@ import fs from "fs"
import { objectStore } from "@budibase/backend-core"
import { resolve } from "../centralPath"
import env from "../../environment"
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
import { TOP_LEVEL_PATH } from "./filesystem"
/**
* Client library paths in the object store:

View File

@ -0,0 +1,170 @@
import { PathLike } from "fs"
const { budibaseTempDir } = require("../budibaseDir")
const fs = require("fs")
const { join } = require("path")
const uuid = require("uuid/v4")
const env = require("../../environment")
import tar from "tar"
export const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
/**
* Upon first startup of instance there may not be everything we need in tmp directory, set it up.
*/
export const init = () => {
const tempDir = budibaseTempDir()
if (!fs.existsSync(tempDir)) {
// some test cases fire this quickly enough that
// synchronous cases can end up here at the same time
try {
fs.mkdirSync(tempDir)
} catch (err: any) {
if (!err || err.code !== "EEXIST") {
throw err
}
}
}
const clientLibPath = join(budibaseTempDir(), "budibase-client.js")
if (env.isTest() && !fs.existsSync(clientLibPath)) {
fs.copyFileSync(require.resolve("@budibase/client"), clientLibPath)
}
}
/**
* Checks if the system is currently in development mode and if it is makes sure
* everything required to function is ready.
*/
export const checkDevelopmentEnvironment = () => {
if (!env.isDev() || env.isTest()) {
return
}
if (!fs.existsSync(budibaseTempDir())) {
fs.mkdirSync(budibaseTempDir())
}
let error
if (!fs.existsSync(join(process.cwd(), ".env"))) {
error = "Must run via yarn once to generate environment."
}
if (error) {
console.error(error)
process.exit(-1)
}
}
/**
* Used to retrieve a handlebars file from the system which will be used as a template.
* This is allowable as the template handlebars files should be static and identical across
* the cluster.
* @param {string} path The path to the handlebars file which is to be loaded.
* @returns {string} The loaded handlebars file as a string - loaded as utf8.
*/
export const loadHandlebarsFile = (path: PathLike) => {
return fs.readFileSync(path, "utf8")
}
/**
* When return a file from the API need to write the file to the system temporarily so we
* can create a read stream to send.
* @param {string} contents the contents of the file which is to be returned from the API.
* @return {Object} the read stream which can be put into the koa context body.
*/
export const apiFileReturn = (contents: any) => {
const path = join(budibaseTempDir(), uuid())
fs.writeFileSync(path, contents)
return fs.createReadStream(path)
}
export const streamFile = (path: string) => {
return fs.createReadStream(path)
}
/**
* Writes the provided contents to a temporary file, which can be used briefly.
* @param {string} fileContents contents which will be written to a temp file.
* @return {string} the path to the temp file.
*/
export const storeTempFile = (fileContents: any) => {
const path = join(budibaseTempDir(), uuid())
fs.writeFileSync(path, fileContents)
return path
}
/**
* Utility function for getting a file read stream - a simple in memory buffered read
* stream doesn't work for pouchdb.
*/
export const stringToFileStream = (contents: any) => {
const path = storeTempFile(contents)
return fs.createReadStream(path)
}
/**
* Creates a temp file and returns it from the API.
* @param {string} fileContents the contents to be returned in file.
*/
export const sendTempFile = (fileContents: any) => {
const path = storeTempFile(fileContents)
return fs.createReadStream(path)
}
/**
* All file reads come through here just to make sure all of them make sense
* allows a centralised location to check logic is all good.
*/
export const readFileSync = (filepath: PathLike, options = "utf8") => {
return fs.readFileSync(filepath, options)
}
export const createTempFolder = (item: any) => {
const path = join(budibaseTempDir(), item)
try {
// remove old tmp directories automatically - don't combine
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true, force: true })
}
fs.mkdirSync(path)
} catch (err: any) {
throw new Error(`Path cannot be created: ${err.message}`)
}
return path
}
export const extractTarball = async (fromFilePath: string, toPath: string) => {
await tar.extract({
file: fromFilePath,
C: toPath,
})
}
/**
* Find for a file recursively from start path applying filter, return first match
*/
export const findFileRec = (startPath: PathLike, filter: string): any => {
if (!fs.existsSync(startPath)) {
return
}
const files = fs.readdirSync(startPath)
for (let i = 0, len = files.length; i < len; i++) {
const filename = join(startPath, files[i])
const stat = fs.lstatSync(filename)
if (stat.isDirectory()) {
return findFileRec(filename, filter)
} else if (filename.endsWith(filter)) {
return filename
}
}
}
/**
* Remove a folder which is not empty from the file system
*/
export const deleteFolderFileSystem = (path: PathLike) => {
if (!fs.existsSync(path)) {
return
}
fs.rmSync(path, { recursive: true, force: true })
}

View File

@ -1,348 +1,5 @@
import { budibaseTempDir } from "../budibaseDir"
import fs from "fs"
import { join } from "path"
import { context, objectStore } from "@budibase/backend-core"
import { ObjectStoreBuckets } from "../../constants"
import { updateClientLibrary } from "./clientLibrary"
import { checkSlashesInUrl } from "../"
import env from "../../environment"
import fetch from "node-fetch"
const uuid = require("uuid/v4")
const tar = require("tar")
export const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
export const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
/**
* The single stack system (Cloud and Builder) should not make use of the file system where possible,
* this file handles all of the file access for the system with the intention of limiting it all to one
* place. Keeping all of this logic in one place means that when we need to do file system access (like
* downloading a package or opening a temporary file) in can be done in way that we can confirm it shouldn't
* be done through an object store instead.
*/
/**
* Upon first startup of instance there may not be everything we need in tmp directory, set it up.
*/
export function init() {
const tempDir = budibaseTempDir()
if (!fs.existsSync(tempDir)) {
// some test cases fire this quickly enough that
// synchronous cases can end up here at the same time
try {
fs.mkdirSync(tempDir)
} catch (err: any) {
if (!err || err.code !== "EEXIST") {
throw err
}
}
}
const clientLibPath = join(budibaseTempDir(), "budibase-client.js")
if (env.isTest() && !fs.existsSync(clientLibPath)) {
fs.copyFileSync(require.resolve("@budibase/client"), clientLibPath)
}
}
/**
* Checks if the system is currently in development mode and if it is makes sure
* everything required to function is ready.
*/
exports.checkDevelopmentEnvironment = () => {
if (!env.isDev() || env.isTest()) {
return
}
if (!fs.existsSync(budibaseTempDir())) {
fs.mkdirSync(budibaseTempDir())
}
let error
if (!fs.existsSync(join(process.cwd(), ".env"))) {
error = "Must run via yarn once to generate environment."
}
if (error) {
console.error(error)
process.exit(-1)
}
}
/**
* Used to retrieve a handlebars file from the system which will be used as a template.
* This is allowable as the template handlebars files should be static and identical across
* the cluster.
* @param {string} path The path to the handlebars file which is to be loaded.
* @returns {string} The loaded handlebars file as a string - loaded as utf8.
*/
export function loadHandlebarsFile(path: string) {
return fs.readFileSync(path, "utf8")
}
/**
* When return a file from the API need to write the file to the system temporarily so we
* can create a read stream to send.
* @param {string} contents the contents of the file which is to be returned from the API.
* @param {string} encoding the encoding of the file to return (utf8 default)
* @return {Object} the read stream which can be put into the koa context body.
*/
export function apiFileReturn(
contents: string,
encoding: BufferEncoding = "utf8"
) {
const path = join(budibaseTempDir(), uuid())
fs.writeFileSync(path, contents, { encoding })
return fs.createReadStream(path, { encoding })
}
export function streamFile(path: string) {
return fs.createReadStream(path)
}
/**
* Writes the provided contents to a temporary file, which can be used briefly.
* @param {string} fileContents contents which will be written to a temp file.
* @return {string} the path to the temp file.
*/
export function storeTempFile(fileContents: string) {
const path = join(budibaseTempDir(), uuid())
fs.writeFileSync(path, fileContents)
return path
}
/**
* Utility function for getting a file read stream - a simple in memory buffered read
* stream doesn't work for pouchdb.
*/
export function stringToFileStream(contents: string) {
const path = exports.storeTempFile(contents)
return fs.createReadStream(path)
}
/**
* Creates a temp file and returns it from the API.
* @param {string} fileContents the contents to be returned in file.
*/
export function sendTempFile(fileContents: string) {
const path = exports.storeTempFile(fileContents)
return fs.createReadStream(path)
}
/**
* Uploads the latest client library to the object store.
* @param {string} appId The ID of the app which is being created.
* @return {Promise<void>} once promise completes app resources should be ready in object store.
*/
export async function createApp(appId: string) {
await updateClientLibrary(appId)
}
/**
* Removes all of the assets created for an app in the object store.
* @param {string} appId The ID of the app which is being deleted.
* @return {Promise<void>} once promise completes the app resources will be removed from object store.
*/
export async function deleteApp(appId: string) {
await objectStore.deleteFolder(ObjectStoreBuckets.APPS, `${appId}/`)
}
/**
* Retrieves a template and pipes it to minio as well as making it available temporarily.
* @param {string} type The type of template which is to be retrieved.
* @param name
* @return {Promise<*>}
*/
export async function downloadTemplate(type: string, name: string) {
const DEFAULT_TEMPLATES_BUCKET =
"prod-budi-templates.s3-eu-west-1.amazonaws.com"
const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
return objectStore.downloadTarball(
templateUrl,
ObjectStoreBuckets.TEMPLATES,
type
)
}
/**
* Retrieves component libraries from object store (or tmp symlink if in local)
*/
export async function getComponentLibraryManifest(library: string) {
const appId = context.getAppId()
const filename = "manifest.json"
/* istanbul ignore next */
// when testing in cypress and so on we need to get the package
// as the environment may not be fully fleshed out for dev or prod
if (env.isTest()) {
library = library.replace("standard-components", "client")
const lib = library.split("/")[1]
const path = require.resolve(library).split(lib)[0]
return require(join(path, lib, filename))
} else if (env.isDev()) {
const path = join(NODE_MODULES_PATH, "@budibase", "client", filename)
// always load from new so that updates are refreshed
delete require.cache[require.resolve(path)]
return require(path)
}
if (!appId) {
throw new Error("No app ID found - cannot get component libraries")
}
let resp
let path
try {
// Try to load the manifest from the new file location
path = join(appId, filename)
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
} catch (error) {
console.error(
`component-manifest-objectstore=failed appId=${appId} path=${path}`,
error
)
// Fallback to loading it from the old location for old apps
path = join(appId, "node_modules", library, "package", filename)
resp = await objectStore.retrieve(ObjectStoreBuckets.APPS, path)
}
if (typeof resp !== "string") {
resp = resp.toString("utf8")
}
return JSON.parse(resp)
}
/**
* All file reads come through here just to make sure all of them make sense
* allows a centralised location to check logic is all good.
*/
export function readFileSync(
filepath: string,
options: BufferEncoding = "utf8"
) {
return fs.readFileSync(filepath, { encoding: options })
}
/**
* Given a set of app IDs makes sure file system is cleared of any of their temp info.
*/
export function cleanup(appIds: string[]) {
for (let appId of appIds) {
const path = join(budibaseTempDir(), appId)
if (fs.existsSync(path)) {
fs.rmdirSync(path, { recursive: true })
}
}
}
export function createTempFolder(item: string) {
const path = join(budibaseTempDir(), item)
try {
// remove old tmp directories automatically - don't combine
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true, force: true })
}
fs.mkdirSync(path)
} catch (err: any) {
throw new Error(`Path cannot be created: ${err.message}`)
}
return path
}
export async function extractTarball(fromFilePath: string, toPath: string) {
await tar.extract({
file: fromFilePath,
C: toPath,
})
}
export async function getPluginMetadata(path: string) {
let metadata: { schema?: any; package?: any } = {}
try {
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
const schema = fs.readFileSync(join(path, "schema.json"), "utf8")
metadata.schema = JSON.parse(schema)
metadata.package = JSON.parse(pkg)
if (
!metadata.package.name ||
!metadata.package.version ||
!metadata.package.description
) {
throw new Error(
"package.json is missing one of 'name', 'version' or 'description'."
)
}
} catch (err: any) {
throw new Error(
`Unable to process schema.json/package.json in plugin. ${err.message}`
)
}
return { metadata, directory: path }
}
export async function getDatasourcePlugin(
name: string,
url: string,
hash: string
) {
if (!fs.existsSync(DATASOURCE_PATH)) {
fs.mkdirSync(DATASOURCE_PATH)
}
const filename = join(DATASOURCE_PATH, name)
const metadataName = `${filename}.bbmetadata`
if (fs.existsSync(filename)) {
const currentHash = fs.readFileSync(metadataName, "utf8")
// if hash is the same return the file, otherwise remove it and re-download
if (currentHash === hash) {
return require(filename)
} else {
console.log(`Updating plugin: ${name}`)
delete require.cache[require.resolve(filename)]
fs.unlinkSync(filename)
}
}
const fullUrl = checkSlashesInUrl(
`${env.MINIO_URL}/${ObjectStoreBuckets.PLUGINS}/${url}`
)
const response = await fetch(fullUrl)
if (response.status === 200) {
const content = await response.text()
fs.writeFileSync(filename, content)
fs.writeFileSync(metadataName, hash)
return require(filename)
} else {
throw new Error(
`Unable to retrieve plugin - reason: ${await response.text()}`
)
}
}
/**
* Find for a file recursively from start path applying filter, return first match
*/
export function findFileRec(startPath: string, filter: any) {
if (!fs.existsSync(startPath)) {
return
}
const files = fs.readdirSync(startPath)
for (let i = 0, len = files.length; i < len; i++) {
const filename = join(startPath, files[i])
const stat = fs.lstatSync(filename)
if (stat.isDirectory()) {
return exports.findFileRec(filename, filter)
} else if (filename.endsWith(filter)) {
return filename
}
}
}
/**
* Remove a folder which is not empty from the file system
*/
export function deleteFolderFileSystem(path: string) {
if (!fs.existsSync(path)) {
return
}
fs.rmSync(path, { recursive: true, force: true })
}
export * from "./app"
export * from "./clientLibrary"
export * from "./filesystem"
export * from "./plugin"
export * from "./template"

View File

@ -0,0 +1,65 @@
import { Plugin } from "@budibase/types"
const { budibaseTempDir } = require("../budibaseDir")
const fs = require("fs")
const { join } = require("path")
import { objectStore } from "@budibase/backend-core"
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
export const getPluginMetadata = async (path: string) => {
let metadata: any = {}
try {
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
const schema = fs.readFileSync(join(path, "schema.json"), "utf8")
metadata.schema = JSON.parse(schema)
metadata.package = JSON.parse(pkg)
if (
!metadata.package.name ||
!metadata.package.version ||
!metadata.package.description
) {
throw new Error(
"package.json is missing one of 'name', 'version' or 'description'."
)
}
} catch (err: any) {
throw new Error(
`Unable to process schema.json/package.json in plugin. ${err.message}`
)
}
return { metadata, directory: path }
}
export const getDatasourcePlugin = async (plugin: Plugin) => {
const hash = plugin.schema?.hash
if (!fs.existsSync(DATASOURCE_PATH)) {
fs.mkdirSync(DATASOURCE_PATH)
}
const filename = join(DATASOURCE_PATH, plugin.name)
const metadataName = `${filename}.bbmetadata`
if (fs.existsSync(filename)) {
const currentHash = fs.readFileSync(metadataName, "utf8")
// if hash is the same return the file, otherwise remove it and re-download
if (currentHash === hash) {
return require(filename)
} else {
console.log(`Updating plugin: ${plugin.name}`)
delete require.cache[require.resolve(filename)]
fs.unlinkSync(filename)
}
}
const pluginKey = objectStore.getPluginJSKey(plugin)
const pluginJs = await objectStore.retrieve(
objectStore.ObjectStoreBuckets.PLUGINS,
pluginKey
)
fs.writeFileSync(filename, pluginJs)
fs.writeFileSync(metadataName, hash)
return require(filename)
}

View File

@ -0,0 +1,36 @@
const fs = require("fs")
const { join } = require("path")
import { ObjectStoreBuckets } from "../../constants"
import { objectStore } from "@budibase/backend-core"
/**
* This function manages temporary template files which are stored by Koa.
* @param {Object} template The template object retrieved from the Koa context object.
* @returns {Object} Returns an fs read stream which can be loaded into the database.
*/
export const getTemplateStream = async (template: any) => {
if (template.file) {
return fs.createReadStream(template.file.path)
} else {
const [type, name] = template.key.split("/")
const tmpPath = await downloadTemplate(type, name)
return fs.createReadStream(join(tmpPath, name, "db", "dump.txt"))
}
}
/**
* Retrieves a template and pipes it to minio as well as making it available temporarily.
* @param {string} type The type of template which is to be retrieved.
* @param name
* @return {Promise<*>}
*/
export const downloadTemplate = async (type: string, name: string) => {
const DEFAULT_TEMPLATES_BUCKET =
"prod-budi-templates.s3-eu-west-1.amazonaws.com"
const templateUrl = `https://${DEFAULT_TEMPLATES_BUCKET}/templates/${type}/${name}.tar.gz`
return objectStore.downloadTarball(
templateUrl,
ObjectStoreBuckets.TEMPLATES,
type
)
}

View File

@ -1,6 +1,5 @@
import env from "../environment"
import { OBJ_STORE_DIRECTORY } from "../constants"
import { objectStore, context } from "@budibase/backend-core"
import { context } from "@budibase/backend-core"
import { generateMetadataID } from "../db/utils"
import { Document } from "@budibase/types"
import stream from "stream"
@ -32,49 +31,6 @@ export function checkSlashesInUrl(url: string) {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
}
/**
* Gets the address of the object store, depending on whether self hosted or in cloud.
* @return {string} The base URL of the object store (MinIO or S3).
*/
export function objectStoreUrl() {
if (env.SELF_HOSTED || env.MINIO_URL) {
// can use a relative url for this as all goes through the proxy (this is hosted in minio)
return OBJ_STORE_DIRECTORY
} else {
return env.CDN_URL
}
}
/**
* In production the client library is stored in the object store, however in development
* we use the symlinked version produced by lerna, located in node modules. We link to this
* via a specific endpoint (under /api/assets/client).
* @param {string} appId In production we need the appId to look up the correct bucket, as the
* version of the client lib may differ between apps.
* @param {string} version The version to retrieve.
* @return {string} The URL to be inserted into appPackage response or server rendered
* app index file.
*/
export function clientLibraryPath(appId: string, version: string) {
if (env.isProd()) {
let url = `${objectStoreUrl()}/${objectStore.sanitizeKey(
appId
)}/budibase-client.js`
// append app version to bust the cache
if (version) {
url += `?v=${version}`
}
return url
} else {
return `/api/assets/client`
}
}
export function attachmentsRelativeURL(attachmentKey: string) {
return checkSlashesInUrl(`${objectStoreUrl()}/${attachmentKey}`)
}
export async function updateEntityMetadata(
type: string,
entityId: string,

View File

@ -1,22 +0,0 @@
import env from "../environment"
import { plugins as ProPlugins } from "@budibase/pro"
import { objectStore } from "@budibase/backend-core"
import { Plugin } from "@budibase/types"
export function enrichPluginURLs(plugins: Plugin[]) {
if (!plugins || !plugins.length) {
return []
}
return plugins.map(plugin => {
const cloud = !env.SELF_HOSTED
const bucket = objectStore.ObjectStoreBuckets.PLUGINS
const jsFileName = "plugin.min.js"
// In self host we need to prefix the path, as the bucket name is not part
// of the bucket path. In cloud, it's already part of the bucket path.
let jsUrl = cloud ? `${env.CDN_URL}/` : `/${bucket}/`
jsUrl += ProPlugins.getBucketPath(plugin.name)
jsUrl += jsFileName
return { ...plugin, jsUrl }
})
}

View File

@ -1,12 +1,11 @@
import * as linkRows from "../../db/linkedRows"
import { FieldTypes, AutoFieldSubTypes } from "../../constants"
import { attachmentsRelativeURL } from "../index"
import { processFormulas, fixAutoColumnSubType } from "./utils"
import { ObjectStoreBuckets } from "../../constants"
import { context, db as dbCore, objectStore } from "@budibase/backend-core"
import { InternalTables } from "../../db/utils"
import { TYPE_TRANSFORM_MAP } from "./map"
import { Row, Table, ContextUser } from "@budibase/types"
import { Row, RowAttachment, Table, ContextUser } from "@budibase/types"
const { cloneDeep } = require("lodash/fp")
export * from "./utils"
@ -35,7 +34,7 @@ function getRemovedAttachmentKeys(
return oldKeys
}
const newKeys = row[attachmentKey].map((attachment: any) => attachment.key)
return oldKeys.filter((key: any) => newKeys.indexOf(key) === -1)
return oldKeys.filter((key: string) => newKeys.indexOf(key) === -1)
}
/**
@ -105,7 +104,7 @@ export function processAutoColumn(
* @param {object} type The type fo coerce to
* @returns {object} The coerced value
*/
export function coerce(row: any, type: any) {
export function coerce(row: any, type: string) {
// no coercion specified for type, skip it
if (!TYPE_TRANSFORM_MAP[type]) {
return row
@ -158,6 +157,16 @@ export function inputProcessing(
else {
clonedRow[key] = coerce(value, field.type)
}
// remove any attachment urls, they are generated on read
if (field.type === FieldTypes.ATTACHMENT) {
const attachments = clonedRow[key]
if (attachments?.length) {
attachments.forEach((attachment: RowAttachment) => {
delete attachment.url
})
}
}
}
if (!clonedRow._id || !clonedRow._rev) {
@ -194,15 +203,15 @@ export async function outputProcessing(
// process formulas
enriched = processFormulas(table, enriched, { dynamic: true }) as Row[]
// update the attachments URL depending on hosting
// set the attachments URLs
for (let [property, column] of Object.entries(table.schema)) {
if (column.type === FieldTypes.ATTACHMENT) {
for (let row of enriched) {
if (row[property] == null || !Array.isArray(row[property])) {
continue
}
row[property].forEach((attachment: any) => {
attachment.url = attachmentsRelativeURL(attachment.key)
row[property].forEach((attachment: RowAttachment) => {
attachment.url = objectStore.getAppFileUrl(attachment.key)
})
}
}
@ -265,6 +274,6 @@ export async function cleanupAttachments(
}
}
if (files.length > 0) {
return objectStore.deleteFiles(ObjectStoreBuckets.APPS, files)
await objectStore.deleteFiles(ObjectStoreBuckets.APPS, files)
}
}

View File

@ -1,23 +0,0 @@
import { enrichPluginURLs } from "../plugins"
const env = require("../../environment")
jest.mock("../../environment")
describe("plugins utility", () => {
let pluginsArray: any = [
{
name: "test-plugin",
},
]
it("enriches the plugins url self-hosted", async () => {
let result = enrichPluginURLs(pluginsArray)
expect(result[0].jsUrl).toEqual("/plugins/test-plugin/plugin.min.js")
})
it("enriches the plugins url cloud", async () => {
env.SELF_HOSTED = 0
let result = enrichPluginURLs(pluginsArray)
expect(result[0].jsUrl).toEqual(
"https://cdn.budi.live/test-plugin/plugin.min.js"
)
})
})

View File

@ -1273,14 +1273,16 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/backend-core@2.1.46-alpha.6":
version "2.1.46-alpha.6"
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-2.1.46-alpha.6.tgz#eb24abae6e3f6435a01b97978d25a466b672caff"
integrity sha512-oDPhUE1nPoBu74lWQFj+9p8Fxh42CbNiE+PqaIBrcjpgSmg88Ftcr82UHg3YPQSXGBa/7hVvIkyXqVYzhIfG/Q==
"@budibase/backend-core@2.2.4-alpha.2":
version "2.2.4-alpha.2"
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-2.2.4-alpha.2.tgz#7942695880cd5677d91dd1fc85deb0f13769bed1"
integrity sha512-2VMZjKeojP9S0aDKeK8y/I1xY5XoIWHmioqwydppG9R0hIRk2MsxNAg6XxcPMHFEwV2AJc9TX9byDo+qXKyvlw==
dependencies:
"@budibase/types" "2.1.46-alpha.6"
"@budibase/nano" "10.1.1"
"@budibase/types" "2.2.4-alpha.2"
"@shopify/jest-koa-mocks" "5.0.1"
"@techpass/passport-openidconnect" "0.3.2"
aws-cloudfront-sign "2.2.0"
aws-sdk "2.1030.0"
bcrypt "5.0.1"
bcryptjs "2.4.3"
@ -1293,7 +1295,6 @@
koa-passport "4.1.4"
lodash "4.17.21"
lodash.isarguments "3.1.0"
nano "^10.1.0"
node-fetch "2.6.7"
passport-google-oauth "2.0.0"
passport-jwt "4.0.0"
@ -1360,16 +1361,29 @@
svelte-flatpickr "^3.2.3"
svelte-portal "^1.0.0"
"@budibase/pro@2.1.46-alpha.6":
version "2.1.46-alpha.6"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.1.46-alpha.6.tgz#c81465fe03c1a2dac69308ce5304e423bfbcabf4"
integrity sha512-76/29biUDsGfOE4nzMHuVyzTpXPXsNOSe1dkbhGvxBVn42CQGIaR17a+0do9XX5I9qn7zhFJmz2B3UYYb9rZ4g==
"@budibase/nano@10.1.1":
version "10.1.1"
resolved "https://registry.yarnpkg.com/@budibase/nano/-/nano-10.1.1.tgz#36ccda4d9bb64b5ee14dd2b27a295b40739b1038"
integrity sha512-kbMIzMkjVtl+xI0UPwVU0/pn8/ccxTyfzwBz6Z+ZiN2oUSb0fJCe0qwA6o8dxwSa8nZu4MbGAeMJl3CJndmWtA==
dependencies:
"@budibase/backend-core" "2.1.46-alpha.6"
"@budibase/types" "2.1.46-alpha.6"
"@types/tough-cookie" "^4.0.2"
axios "^1.1.3"
http-cookie-agent "^4.0.2"
node-abort-controller "^3.0.1"
qs "^6.11.0"
tough-cookie "^4.1.2"
"@budibase/pro@2.2.4-alpha.2":
version "2.2.4-alpha.2"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.2.4-alpha.2.tgz#1e1a26b9194669aed1737d1245be45b448a6e889"
integrity sha512-jqOuLIUqFyphG9K0Ovly2ipVdAMZ7yvqLo74S3KwVu74Rk5Ab7rBw4DecOdKvGbu/r0ssNdXDXv8bns/P6I2ng==
dependencies:
"@budibase/backend-core" "2.2.4-alpha.2"
"@budibase/types" "2.2.4-alpha.2"
"@koa/router" "8.0.8"
bull "4.10.1"
joi "17.6.0"
jsonwebtoken "8.5.1"
node-fetch "^2.6.1"
"@budibase/standard-components@^0.9.139":
@ -1390,10 +1404,10 @@
svelte-apexcharts "^1.0.2"
svelte-flatpickr "^3.1.0"
"@budibase/types@2.1.46-alpha.6":
version "2.1.46-alpha.6"
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.1.46-alpha.6.tgz#d80f47aa57ffa0685f03f5aaf5477d1e985fc9cf"
integrity sha512-ol0/j0h5A6ZCQrc+qGkigFcuQ8EsyTLhHEhBynh/TWyTbjbUWPJBGTeY5lYzWD2bqQWnRDXsDP4iNdpbuviZNA==
"@budibase/types@2.2.4-alpha.2":
version "2.2.4-alpha.2"
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.2.4-alpha.2.tgz#56503aee0e1294ca5b27338f229290dc4739c79a"
integrity sha512-Z8VcC/TjuBz0SoFPRD+Kj1B0w5w4lNpSXQWecsHp9ne72vG788LrMeoepBnXMw/icNof1cG/vu7J/sr4ONzQTg==
"@bull-board/api@3.7.0":
version "3.7.0"
@ -3124,10 +3138,10 @@
"@types/koa-compose" "*"
"@types/node" "*"
"@types/koa__router@8.0.11":
version "8.0.11"
resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-8.0.11.tgz#d7b37e6db934fc072ea1baa2ab92bc8ac4564f3e"
integrity sha512-WXgKWpBsbS14kzmzD9LeFapOIa678h7zvUHxDwXwSx4ETKXhXLVUAToX6jZ/U7EihM7qwyD9W/BZvB0MRu7MTQ==
"@types/koa__router@8.0.8":
version "8.0.8"
resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-8.0.8.tgz#b1e0e9a512498777d3366bbdf0e853df27ec831c"
integrity sha512-9pGCaDtzCsj4HJ8HmGuqzk8+s57sPj4njWd08GG5o92n5Xp9io2snc40CPpXFhoKcZ8OKhuu6ht4gNou9e1C2w==
dependencies:
"@types/koa" "*"
@ -3399,6 +3413,13 @@
"@types/mime" "^1"
"@types/node" "*"
"@types/server-destroy@1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@types/server-destroy/-/server-destroy-1.0.1.tgz#6010a89e2df4f2c15a265fe73c70fd3641486530"
integrity sha512-77QGr7waZbE0Y0uF+G+uH3H3SmhyA78Jf2r5r7QSrpg0U3kSXduWpGjzP9PvPLR/KCy+kHjjpnugRHsYTnHopg==
dependencies:
"@types/node" "*"
"@types/stack-utils@^1.0.1":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-1.0.1.tgz#0a851d3bd96498fa25c33ab7278ed3bd65f06c3e"
@ -3417,6 +3438,14 @@
"@types/cookiejar" "*"
"@types/node" "*"
"@types/tar@6.1.3":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.3.tgz#46a2ce7617950c4852dfd7e9cd41aa8161b9d750"
integrity sha512-YzDOr5kdAeqS8dcO6NTTHTMJ44MUCBDoLEIyPtwEn7PssKqUYL49R1iCVJPeiPzPlKi6DbH33eZkpeJ27e4vHg==
dependencies:
"@types/node" "*"
minipass "^3.3.5"
"@types/tough-cookie@*", "@types/tough-cookie@^4.0.2":
version "4.0.2"
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.2.tgz#6286b4c7228d58ab7866d19716f3696e03a09397"
@ -4217,6 +4246,13 @@ available-typed-arrays@^1.0.5:
resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7"
integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==
aws-cloudfront-sign@2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/aws-cloudfront-sign/-/aws-cloudfront-sign-2.2.0.tgz#3910f5a6d0d90fec07f2b4ef8ab07f3eefb5625d"
integrity sha512-qG+rwZMP3KRTPPbVmWY8DlrT56AkA4iVOeo23vkdK2EXeW/brJFN2haSNKzVz+oYhFMEIzVVloeAcrEzuRkuVQ==
dependencies:
lodash "^3.6.0"
aws-sdk@2.1030.0:
version "2.1030.0"
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1030.0.tgz#24a856af3d2b8b37c14a8f59974993661c66fd82"
@ -10197,6 +10233,11 @@ lodash@4.17.21, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
lodash@^3.6.0:
version "3.10.1"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6"
integrity sha512-9mDDwqVIma6OZX79ZlDACZl8sBm0TEnkf99zV3iMA4GzkIT/9hiqP5mY0HoT1iNLCrKc/R1HByV+yJfRWVJryQ==
logform@^2.3.2, logform@^2.4.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/logform/-/logform-2.4.0.tgz#131651715a17d50f09c2a2c1a524ff1a4164bcfe"
@ -10534,6 +10575,13 @@ minipass@^3.0.0:
dependencies:
yallist "^4.0.0"
minipass@^3.3.5:
version "3.3.6"
resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a"
integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==
dependencies:
yallist "^4.0.0"
minizlib@^2.1.1:
version "2.1.2"
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931"
@ -10720,18 +10768,6 @@ nan@^2.12.1:
resolved "https://registry.yarnpkg.com/nan/-/nan-2.16.0.tgz#664f43e45460fb98faf00edca0bb0d7b8dce7916"
integrity sha512-UdAqHyFngu7TfQKsCBgAA6pWDkT8MAO7d0jyOecVhN5354xbLqdn8mV9Tat9gepAupm0bt2DbeaSC8vS52MuFA==
nano@^10.1.0:
version "10.1.0"
resolved "https://registry.yarnpkg.com/nano/-/nano-10.1.0.tgz#afdd5a7440e62f09a8e23f41fcea328d27383922"
integrity sha512-COeN2TpLcHuSN44QLnPmfZCoCsKAg8/aelPOVqqm/2/MvRHDEA11/Kld5C4sLzDlWlhFZ3SO2WGJGevCsvcEzQ==
dependencies:
"@types/tough-cookie" "^4.0.2"
axios "^1.1.3"
http-cookie-agent "^4.0.2"
node-abort-controller "^3.0.1"
qs "^6.11.0"
tough-cookie "^4.1.2"
nanomatch@^1.2.9:
version "1.2.13"
resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/types",
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"description": "Budibase types",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@ -13,11 +13,12 @@
},
"jest": {},
"devDependencies": {
"@types/json5": "^2.2.0",
"@budibase/nano": "10.1.1",
"@types/json5": "2.2.0",
"@types/koa": "2.13.4",
"@types/node": "14.18.20",
"@types/pouchdb": "6.4.0",
"nano": "10.1.0",
"koa-body": "4.2.0",
"rimraf": "3.0.2",
"typescript": "4.7.3"
}

View File

@ -19,9 +19,10 @@ export enum FieldType {
export interface RowAttachment {
size: number
name: string
url: string
extension: string
key: string
// Populated on read
url?: string
}
export interface Row extends Document {

View File

@ -2,6 +2,7 @@ import { Document } from "../document"
export interface Config extends Document {
type: ConfigType
config: any
}
export interface SMTPConfig extends Config {
@ -17,9 +18,12 @@ export interface SMTPConfig extends Config {
export interface SettingsConfig extends Config {
config: {
company: string
logoUrl: string
// Populated on read
logoUrl?: string
logoUrlEtag?: boolean
platformUrl: string
uniqueTenantId?: string
analyticsEnabled?: boolean
}
}
@ -57,18 +61,15 @@ export interface OIDCConfig extends Config {
}
}
export type NestedConfig =
| SMTPConfig
| SettingsConfig
| GoogleConfig
| OIDCConfig
export const isSettingsConfig = (config: Config): config is SettingsConfig =>
config.type === ConfigType.SETTINGS
export const isSMTPConfig = (config: Config): config is SMTPConfig =>
config.type === ConfigType.SMTP
export const isGoogleConfig = (config: Config): config is GoogleConfig =>
config.type === ConfigType.GOOGLE
export const isOIDCConfig = (config: Config): config is OIDCConfig =>
config.type === ConfigType.OIDC

View File

@ -20,8 +20,6 @@ export interface Plugin extends Document {
description: string
name: string
version: string
jsUrl?: string
iconUrl?: string
source: PluginSource
package: { [key: string]: any }
hash: string
@ -29,6 +27,11 @@ export interface Plugin extends Document {
type: PluginType
[key: string]: any
}
iconFileName?: string
// Populated on read
jsUrl?: string
// Populated on read
iconUrl?: string
}
export const PLUGIN_TYPE_ARR = Object.values(PluginType)

View File

@ -1,5 +1,5 @@
import PouchDB from "pouchdb"
import Nano from "nano"
import Nano from "@budibase/nano"
import { AllDocsResponse, AnyDocument, Document } from "../"
export type PouchOptions = {

View File

@ -1,7 +1,8 @@
import { BaseEvent } from "./event"
import { ConfigType } from "../../documents"
export type LoginSource = "local" | "google" | "oidc" | "google-internal"
export type SSOType = "oidc" | "google"
export type SSOType = ConfigType.OIDC | ConfigType.GOOGLE
export interface LoginEvent extends BaseEvent {
userId: string

View File

@ -1,6 +1,7 @@
import { Context, Request } from "koa"
import { User, Role, UserRoles, Account } from "../documents"
import { FeatureFlag, License } from "../sdk"
import { Files } from "formidable"
export interface ContextUser extends Omit<User, "roles"> {
globalId?: string
@ -15,12 +16,34 @@ export interface ContextUser extends Omit<User, "roles"> {
account?: Account
}
export interface BBRequest extends Request {
body: any
files?: any
/**
* Add support for koa-body in context.
*/
export interface BBRequest<RequestBody> extends Request {
body: RequestBody
files?: Files
}
export interface BBContext extends Context {
request: BBRequest
/**
* Basic context with no user.
*/
export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
request: BBRequest<RequestBody>
body: ResponseBody
}
/**
* Authenticated context.
*/
export interface UserCtx<RequestBody = any, ResponseBody = any>
extends Ctx<RequestBody, ResponseBody> {
user: ContextUser
}
/**
* Deprecated: Use UserCtx / Ctx appropriately
* Authenticated context.
*/
export interface BBContext extends Ctx {
user?: ContextUser
}

View File

@ -2,6 +2,18 @@
# yarn lockfile v1
"@budibase/nano@10.1.1":
version "10.1.1"
resolved "https://registry.yarnpkg.com/@budibase/nano/-/nano-10.1.1.tgz#36ccda4d9bb64b5ee14dd2b27a295b40739b1038"
integrity sha512-kbMIzMkjVtl+xI0UPwVU0/pn8/ccxTyfzwBz6Z+ZiN2oUSb0fJCe0qwA6o8dxwSa8nZu4MbGAeMJl3CJndmWtA==
dependencies:
"@types/tough-cookie" "^4.0.2"
axios "^1.1.3"
http-cookie-agent "^4.0.2"
node-abort-controller "^3.0.1"
qs "^6.11.0"
tough-cookie "^4.1.2"
"@types/accepts@*":
version "1.3.5"
resolved "https://registry.yarnpkg.com/@types/accepts/-/accepts-1.3.5.tgz#c34bec115cfc746e04fe5a059df4ce7e7b391575"
@ -65,6 +77,13 @@
"@types/qs" "*"
"@types/serve-static" "*"
"@types/formidable@^1.0.31":
version "1.2.5"
resolved "https://registry.yarnpkg.com/@types/formidable/-/formidable-1.2.5.tgz#561d026e5f09179e5c8ef7b31e8f4652e11abe4c"
integrity sha512-zu3mQJa4hDNubEMViSj937602XdDGzK7Q5pJ5QmLUbNxclbo9tZGt5jtwM352ssZ+pqo5V4H14TBvT/ALqQQcA==
dependencies:
"@types/node" "*"
"@types/http-assert@*":
version "1.5.3"
resolved "https://registry.yarnpkg.com/@types/http-assert/-/http-assert-1.5.3.tgz#ef8e3d1a8d46c387f04ab0f2e8ab8cb0c5078661"
@ -75,7 +94,7 @@
resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-1.8.2.tgz#7315b4c4c54f82d13fa61c228ec5c2ea5cc9e0e1"
integrity sha512-EqX+YQxINb+MeXaIqYDASb6U6FCHbWjkj4a1CKDBks3d/QiB2+PqBLyO72vLDgAO1wUI4O+9gweRcQK11bTL/w==
"@types/json5@^2.2.0":
"@types/json5@2.2.0":
version "2.2.0"
resolved "https://registry.yarnpkg.com/@types/json5/-/json5-2.2.0.tgz#afff29abf9182a7d4a7e39105ca051f11c603d13"
integrity sha512-NrVug5woqbvNZ0WX+Gv4R+L4TGddtmFek2u8RtccAgFZWtS9QXF2xCXY22/M4nzkaKF0q9Fc6M/5rxLDhfwc/A==
@ -345,6 +364,11 @@ brace-expansion@^1.1.7:
balanced-match "^1.0.0"
concat-map "0.0.1"
bytes@3.1.2:
version "3.1.2"
resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5"
integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==
call-bind@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c"
@ -353,6 +377,16 @@ call-bind@^1.0.0:
function-bind "^1.1.1"
get-intrinsic "^1.0.2"
co-body@^5.1.1:
version "5.2.0"
resolved "https://registry.yarnpkg.com/co-body/-/co-body-5.2.0.tgz#5a0a658c46029131e0e3a306f67647302f71c124"
integrity sha512-sX/LQ7LqUhgyaxzbe7IqwPeTr2yfpfUIQ/dgpKo6ZI4y4lpQA0YxAomWIY+7I7rHWcG02PG+OuPREzMW/5tszQ==
dependencies:
inflation "^2.0.0"
qs "^6.4.0"
raw-body "^2.2.0"
type-is "^1.6.14"
combined-stream@^1.0.8:
version "1.0.8"
resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
@ -377,6 +411,11 @@ delayed-stream@~1.0.0:
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==
depd@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df"
integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==
follow-redirects@^1.15.0:
version "1.15.2"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13"
@ -391,6 +430,11 @@ form-data@^4.0.0:
combined-stream "^1.0.8"
mime-types "^2.1.12"
formidable@^1.1.1:
version "1.2.6"
resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.2.6.tgz#d2a51d60162bbc9b4a055d8457a7c75315d1a168"
integrity sha512-KcpbcpuLNOwrEjnbpMC0gS+X8ciDoZE1kkqzat4a8vrprf+s9pKNQ/QIwWfbfs4ltgmFl3MD177SNTkve3BwGQ==
fs.realpath@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
@ -441,6 +485,29 @@ http-cookie-agent@^4.0.2:
dependencies:
agent-base "^6.0.2"
http-errors@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3"
integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==
dependencies:
depd "2.0.0"
inherits "2.0.4"
setprototypeof "1.2.0"
statuses "2.0.1"
toidentifier "1.0.1"
iconv-lite@0.4.24:
version "0.4.24"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
dependencies:
safer-buffer ">= 2.1.2 < 3"
inflation@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/inflation/-/inflation-2.0.0.tgz#8b417e47c28f925a45133d914ca1fd389107f30f"
integrity sha512-m3xv4hJYR2oXw4o4Y5l6P5P16WYmazYof+el6Al3f+YlggGj6qT9kImBAnzDelRALnP5d3h4jGBPKzYCizjZZw==
inflight@^1.0.4:
version "1.0.6"
resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
@ -449,7 +516,7 @@ inflight@^1.0.4:
once "^1.3.0"
wrappy "1"
inherits@2:
inherits@2, inherits@2.0.4:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
@ -459,12 +526,26 @@ json5@*:
resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c"
integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==
koa-body@4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/koa-body/-/koa-body-4.2.0.tgz#37229208b820761aca5822d14c5fc55cee31b26f"
integrity sha512-wdGu7b9amk4Fnk/ytH8GuWwfs4fsB5iNkY8kZPpgQVb04QZSv85T0M8reb+cJmvLE8cjPYvBzRikD3s6qz8OoA==
dependencies:
"@types/formidable" "^1.0.31"
co-body "^5.1.1"
formidable "^1.1.1"
media-typer@0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==
mime-db@1.52.0:
version "1.52.0"
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
mime-types@^2.1.12:
mime-types@^2.1.12, mime-types@~2.1.24:
version "2.1.35"
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
@ -483,18 +564,6 @@ ms@2.1.2:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
nano@10.1.0:
version "10.1.0"
resolved "https://registry.yarnpkg.com/nano/-/nano-10.1.0.tgz#afdd5a7440e62f09a8e23f41fcea328d27383922"
integrity sha512-COeN2TpLcHuSN44QLnPmfZCoCsKAg8/aelPOVqqm/2/MvRHDEA11/Kld5C4sLzDlWlhFZ3SO2WGJGevCsvcEzQ==
dependencies:
"@types/tough-cookie" "^4.0.2"
axios "^1.1.3"
http-cookie-agent "^4.0.2"
node-abort-controller "^3.0.1"
qs "^6.11.0"
tough-cookie "^4.1.2"
node-abort-controller@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/node-abort-controller/-/node-abort-controller-3.0.1.tgz#f91fa50b1dee3f909afabb7e261b1e1d6b0cb74e"
@ -532,7 +601,7 @@ punycode@^2.1.1:
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
qs@^6.11.0:
qs@^6.11.0, qs@^6.4.0:
version "6.11.0"
resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a"
integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==
@ -544,6 +613,16 @@ querystringify@^2.1.1:
resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6"
integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==
raw-body@^2.2.0:
version "2.5.1"
resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857"
integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==
dependencies:
bytes "3.1.2"
http-errors "2.0.0"
iconv-lite "0.4.24"
unpipe "1.0.0"
requires-port@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
@ -556,6 +635,16 @@ rimraf@3.0.2:
dependencies:
glob "^7.1.3"
"safer-buffer@>= 2.1.2 < 3":
version "2.1.2"
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
setprototypeof@1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424"
integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==
side-channel@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf"
@ -565,6 +654,16 @@ side-channel@^1.0.4:
get-intrinsic "^1.0.2"
object-inspect "^1.9.0"
statuses@2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63"
integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==
toidentifier@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35"
integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==
tough-cookie@^4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.2.tgz#e53e84b85f24e0b65dd526f46628db6c85f6b874"
@ -575,6 +674,14 @@ tough-cookie@^4.1.2:
universalify "^0.2.0"
url-parse "^1.5.3"
type-is@^1.6.14:
version "1.6.18"
resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
dependencies:
media-typer "0.3.0"
mime-types "~2.1.24"
typescript@4.7.3:
version "4.7.3"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.7.3.tgz#8364b502d5257b540f9de4c40be84c98e23a129d"
@ -585,6 +692,11 @@ universalify@^0.2.0:
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.2.0.tgz#6451760566fa857534745ab1dde952d1b1761be0"
integrity sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==
unpipe@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==
url-parse@^1.5.3:
version "1.5.10"
resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1"

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/worker",
"email": "hi@budibase.com",
"version": "2.1.46-alpha.6",
"version": "2.2.4-alpha.2",
"description": "Budibase background service",
"main": "src/index.ts",
"repository": {
@ -36,10 +36,10 @@
"author": "Budibase",
"license": "GPL-3.0",
"dependencies": {
"@budibase/backend-core": "2.1.46-alpha.6",
"@budibase/pro": "2.1.46-alpha.6",
"@budibase/string-templates": "2.1.46-alpha.6",
"@budibase/types": "2.1.46-alpha.6",
"@budibase/backend-core": "2.2.4-alpha.2",
"@budibase/pro": "2.2.4-alpha.2",
"@budibase/string-templates": "2.2.4-alpha.2",
"@budibase/types": "2.2.4-alpha.2",
"@koa/router": "8.0.8",
"@sentry/node": "6.17.7",
"@techpass/passport-openidconnect": "0.3.2",
@ -73,10 +73,11 @@
"@types/jest": "26.0.23",
"@types/jsonwebtoken": "8.5.1",
"@types/koa": "2.13.4",
"@types/koa__router": "8.0.11",
"@types/koa__router": "8.0.8",
"@types/node": "14.18.20",
"@types/node-fetch": "2.6.1",
"@types/pouchdb": "6.4.0",
"@types/server-destroy": "1.0.1",
"@types/uuid": "8.3.4",
"@typescript-eslint/parser": "5.45.0",
"copyfiles": "2.4.1",

Some files were not shown because too many files have changed in this diff Show More