Merge branch 'develop' of github.com:Budibase/budibase into data-ui-pagination

This commit is contained in:
Andrew Kingston 2021-09-29 10:34:03 +01:00
commit e4a57253fa
58 changed files with 417 additions and 257 deletions

View File

@ -37,5 +37,5 @@ dependencies:
condition: services.couchdb.enabled condition: services.couchdb.enabled
- name: ingress-nginx - name: ingress-nginx
version: 3.35.0 version: 3.35.0
repository: https://github.com/kubernetes/ingress-nginx repository: https://kubernetes.github.io/ingress-nginx
condition: services.ingress.nginx condition: services.ingress.nginx

View File

@ -94,6 +94,8 @@ spec:
value: {{ .Values.globals.sentryDSN }} value: {{ .Values.globals.sentryDSN }}
- name: WORKER_URL - name: WORKER_URL
value: worker-service:{{ .Values.services.worker.port }} value: worker-service:{{ .Values.services.worker.port }}
- name: COOKIE_DOMAIN
value: {{ .Values.globals.cookieDomain | quote }}
image: budibase/apps image: budibase/apps
imagePullPolicy: Always imagePullPolicy: Always
name: bbapps name: bbapps

View File

@ -89,6 +89,8 @@ spec:
value: {{ .Values.globals.selfHosted | quote }} value: {{ .Values.globals.selfHosted | quote }}
- name: ACCOUNT_PORTAL_URL - name: ACCOUNT_PORTAL_URL
value: {{ .Values.globals.accountPortalUrl | quote }} value: {{ .Values.globals.accountPortalUrl | quote }}
- name: COOKIE_DOMAIN
value: {{ .Values.globals.cookieDomain | quote }}
image: budibase/worker image: budibase/worker
imagePullPolicy: Always imagePullPolicy: Always
name: bbworker name: bbworker

View File

@ -90,6 +90,7 @@ globals:
logLevel: info logLevel: info
selfHosted: 1 selfHosted: 1
accountPortalUrL: "" accountPortalUrL: ""
cookieDomain: ""
createSecrets: true # creates an internal API key, JWT secrets and redis password for you createSecrets: true # creates an internal API key, JWT secrets and redis password for you
# if createSecrets is set to false, you can hard-code your secrets here # if createSecrets is set to false, you can hard-code your secrets here

View File

@ -1,5 +1,5 @@
{ {
"version": "0.9.140-alpha.11", "version": "0.9.143-alpha.9",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -0,0 +1 @@
module.exports = require("./src/cloud/accounts")

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/auth", "name": "@budibase/auth",
"version": "0.9.140-alpha.11", "version": "0.9.143-alpha.9",
"description": "Authentication middlewares for budibase builder and apps", "description": "Authentication middlewares for budibase builder and apps",
"main": "src/index.js", "main": "src/index.js",
"author": "Budibase", "author": "Budibase",

View File

@ -12,6 +12,7 @@ exports.StaticDatabases = {
name: "global-info", name: "global-info",
docs: { docs: {
tenants: "tenants", tenants: "tenants",
usageQuota: "usage_quota",
}, },
}, },
} }

View File

@ -368,8 +368,33 @@ async function getScopedConfig(db, params) {
return configDoc && configDoc.config ? configDoc.config : configDoc return configDoc && configDoc.config ? configDoc.config : configDoc
} }
function generateNewUsageQuotaDoc() {
return {
_id: StaticDatabases.PLATFORM_INFO.docs.usageQuota,
quotaReset: Date.now() + 2592000000,
usageQuota: {
automationRuns: 0,
rows: 0,
storage: 0,
apps: 0,
users: 0,
views: 0,
emails: 0,
},
usageLimits: {
automationRuns: 1000,
rows: 4000,
apps: 4,
storage: 1000,
users: 10,
emails: 50,
},
}
}
exports.Replication = Replication exports.Replication = Replication
exports.getScopedConfig = getScopedConfig exports.getScopedConfig = getScopedConfig
exports.generateConfigID = generateConfigID exports.generateConfigID = generateConfigID
exports.getConfigParams = getConfigParams exports.getConfigParams = getConfigParams
exports.getScopedFullConfig = getScopedFullConfig exports.getScopedFullConfig = getScopedFullConfig
exports.generateNewUsageQuotaDoc = generateNewUsageQuotaDoc

View File

@ -22,6 +22,7 @@ module.exports = {
MULTI_TENANCY: process.env.MULTI_TENANCY, MULTI_TENANCY: process.env.MULTI_TENANCY,
ACCOUNT_PORTAL_URL: process.env.ACCOUNT_PORTAL_URL, ACCOUNT_PORTAL_URL: process.env.ACCOUNT_PORTAL_URL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED), SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED),
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
isTest, isTest,
_set(key, value) { _set(key, value) {
process.env[key] = value process.env[key] = value

View File

@ -139,8 +139,7 @@ exports.doesHaveResourcePermission = (
// set foundSub to not subResourceId, incase there is no subResource // set foundSub to not subResourceId, incase there is no subResource
let foundMain = false, let foundMain = false,
foundSub = false foundSub = false
for (let [resource, level] of Object.entries(permissions)) { for (let [resource, levels] of Object.entries(permissions)) {
const levels = getAllowedLevels(level)
if (resource === resourceId && levels.indexOf(permLevel) !== -1) { if (resource === resourceId && levels.indexOf(permLevel) !== -1) {
foundMain = true foundMain = true
} }
@ -177,10 +176,6 @@ exports.doesHaveBasePermission = (permType, permLevel, permissionIds) => {
return false return false
} }
exports.higherPermission = (perm1, perm2) => {
return levelToNumber(perm1) > levelToNumber(perm2) ? perm1 : perm2
}
exports.isPermissionLevelHigherThanRead = level => { exports.isPermissionLevelHigherThanRead = level => {
return levelToNumber(level) > 1 return levelToNumber(level) > 1
} }

View File

@ -1,6 +1,6 @@
const { getDB } = require("../db") const { getDB } = require("../db")
const { cloneDeep } = require("lodash/fp") const { cloneDeep } = require("lodash/fp")
const { BUILTIN_PERMISSION_IDS, higherPermission } = require("./permissions") const { BUILTIN_PERMISSION_IDS } = require("./permissions")
const { const {
generateRoleID, generateRoleID,
getRoleParams, getRoleParams,
@ -193,8 +193,17 @@ exports.getUserPermissions = async (appId, userRoleId) => {
const permissions = {} const permissions = {}
for (let role of rolesHierarchy) { for (let role of rolesHierarchy) {
if (role.permissions) { if (role.permissions) {
for (let [resource, level] of Object.entries(role.permissions)) { for (let [resource, levels] of Object.entries(role.permissions)) {
permissions[resource] = higherPermission(permissions[resource], level) if (!permissions[resource]) {
permissions[resource] = []
}
const permsSet = new Set(permissions[resource])
if (Array.isArray(levels)) {
levels.forEach(level => permsSet.add(level))
} else {
permsSet.add(levels)
}
permissions[resource] = [...permsSet]
} }
} }
} }

View File

@ -53,6 +53,11 @@ exports.setTenantId = (
// processed later in the chain // processed later in the chain
tenantId = user.tenantId || header || tenantId tenantId = user.tenantId || header || tenantId
// Set the tenantId from the subdomain
if (!tenantId) {
tenantId = ctx.subdomains && ctx.subdomains[0]
}
if (!tenantId && !allowNoTenant) { if (!tenantId && !allowNoTenant) {
ctx.throw(403, "Tenant id not set") ctx.throw(403, "Tenant id not set")
} }

View File

@ -4,6 +4,7 @@ const { options } = require("./middleware/passport/jwt")
const { createUserEmailView } = require("./db/views") const { createUserEmailView } = require("./db/views")
const { Headers } = require("./constants") const { Headers } = require("./constants")
const { getGlobalDB } = require("./tenancy") const { getGlobalDB } = require("./tenancy")
const environment = require("./environment")
const APP_PREFIX = DocumentTypes.APP + SEPARATOR const APP_PREFIX = DocumentTypes.APP + SEPARATOR
@ -70,12 +71,19 @@ exports.setCookie = (ctx, value, name = "builder") => {
ctx.cookies.set(name) ctx.cookies.set(name)
} else { } else {
value = jwt.sign(value, options.secretOrKey) value = jwt.sign(value, options.secretOrKey)
ctx.cookies.set(name, value, {
const config = {
maxAge: Number.MAX_SAFE_INTEGER, maxAge: Number.MAX_SAFE_INTEGER,
path: "/", path: "/",
httpOnly: false, httpOnly: false,
overwrite: true, overwrite: true,
}) }
if (environment.COOKIE_DOMAIN) {
config.domain = environment.COOKIE_DOMAIN
}
ctx.cookies.set(name, value, config)
} }
} }

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/bbui", "name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.", "description": "A UI solution used in the different Budibase projects.",
"version": "0.9.140-alpha.11", "version": "0.9.143-alpha.9",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"module": "dist/bbui.es.js", "module": "dist/bbui.es.js",

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/builder", "name": "@budibase/builder",
"version": "0.9.140-alpha.11", "version": "0.9.143-alpha.9",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"private": true, "private": true,
"scripts": { "scripts": {
@ -65,10 +65,10 @@
} }
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^0.9.140-alpha.11", "@budibase/bbui": "^0.9.143-alpha.9",
"@budibase/client": "^0.9.140-alpha.11", "@budibase/client": "^0.9.143-alpha.9",
"@budibase/colorpicker": "1.1.2", "@budibase/colorpicker": "1.1.2",
"@budibase/string-templates": "^0.9.140-alpha.11", "@budibase/string-templates": "^0.9.143-alpha.9",
"@sentry/browser": "5.19.1", "@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1", "@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1", "@spectrum-css/vars": "^3.0.1",

View File

@ -3,8 +3,6 @@ import PosthogClient from "./PosthogClient"
import IntercomClient from "./IntercomClient" import IntercomClient from "./IntercomClient"
import SentryClient from "./SentryClient" import SentryClient from "./SentryClient"
import { Events } from "./constants" import { Events } from "./constants"
import { auth } from "stores/portal"
import { get } from "svelte/store"
const posthog = new PosthogClient( const posthog = new PosthogClient(
process.env.POSTHOG_TOKEN, process.env.POSTHOG_TOKEN,
@ -19,27 +17,13 @@ class AnalyticsHub {
} }
async activate() { async activate() {
// Setting the analytics env var off in the backend overrides org/tenant settings
const analyticsStatus = await api.get("/api/analytics") const analyticsStatus = await api.get("/api/analytics")
const json = await analyticsStatus.json() const json = await analyticsStatus.json()
// Multitenancy disabled on the backend // Analytics disabled
if (!json.enabled) return if (!json.enabled) return
const tenantId = get(auth).tenantId
if (tenantId) {
const res = await api.get(
`/api/global/configs/public?tenantId=${tenantId}`
)
const orgJson = await res.json()
// analytics opted out for the tenant
if (orgJson.config?.analytics === false) return
}
this.clients.forEach(client => client.init()) this.clients.forEach(client => client.init())
this.enabled = true
} }
identify(id, metadata) { identify(id, metadata) {

View File

@ -8,7 +8,7 @@
$: actionProviders = getActionProviderComponents( $: actionProviders = getActionProviderComponents(
$currentAsset, $currentAsset,
$store.selectedComponentId, $store.selectedComponentId,
"RefreshDataProvider" "RefreshDatasource"
) )
</script> </script>

View File

@ -135,7 +135,7 @@
/> />
{:else if ["string", "longform", "number"].includes(filter.type)} {:else if ["string", "longform", "number"].includes(filter.type)}
<Input disabled={filter.noValue} bind:value={filter.value} /> <Input disabled={filter.noValue} bind:value={filter.value} />
{:else if filter.type === "options" || "array"} {:else if ["options", "array"].includes(filter.type)}
<Combobox <Combobox
disabled={filter.noValue} disabled={filter.noValue}
options={getFieldOptions(filter.field)} options={getFieldOptions(filter.field)}

View File

@ -7,13 +7,11 @@
Divider, Divider,
Label, Label,
Input, Input,
Toggle,
Dropzone, Dropzone,
notifications, notifications,
} from "@budibase/bbui" } from "@budibase/bbui"
import { auth, organisation } from "stores/portal" import { auth, organisation, admin } from "stores/portal"
import { post } from "builderStore/api" import { post } from "builderStore/api"
import analytics from "analytics"
import { writable } from "svelte/store" import { writable } from "svelte/store"
import { redirect } from "@roxi/routify" import { redirect } from "@roxi/routify"
@ -25,7 +23,6 @@
} }
const values = writable({ const values = writable({
analytics: analytics.enabled,
company: $organisation.company, company: $organisation.company,
platformUrl: $organisation.platformUrl, platformUrl: $organisation.platformUrl,
logo: $organisation.logoUrl logo: $organisation.logoUrl
@ -57,7 +54,6 @@
const config = { const config = {
company: $values.company ?? "", company: $values.company ?? "",
platformUrl: $values.platformUrl ?? "", platformUrl: $values.platformUrl ?? "",
analytics: $values.analytics,
} }
// remove logo if required // remove logo if required
if (!$values.logo) { if (!$values.logo) {
@ -112,34 +108,22 @@
</div> </div>
</div> </div>
</div> </div>
<Divider size="S" /> {#if !$admin.cloud}
<Layout gap="XS" noPadding> <Divider size="S" />
<Heading size="S">Platform</Heading>
<Body size="S">Here you can set up general platform settings.</Body>
</Layout>
<div class="fields">
<div class="field">
<Label size="L">Platform URL</Label>
<Input thin bind:value={$values.platformUrl} />
</div>
</div>
<Divider size="S" />
<Layout gap="S" noPadding>
<Layout gap="XS" noPadding> <Layout gap="XS" noPadding>
<Heading size="S">Analytics</Heading> <Heading size="S">Platform</Heading>
<Body size="S"> <Body size="S">Here you can set up general platform settings.</Body>
If you would like to send analytics that help us make Budibase better,
please let us know below.
</Body>
</Layout> </Layout>
<Toggle <div class="fields">
text="Send Analytics to Budibase" <div class="field">
bind:value={$values.analytics} <Label size="L">Platform URL</Label>
/> <Input thin bind:value={$values.platformUrl} />
<div> </div>
<Button disabled={loading} on:click={saveConfig} cta>Save</Button>
</div> </div>
</Layout> {/if}
<div>
<Button disabled={loading} on:click={saveConfig} cta>Save</Button>
</div>
</Layout> </Layout>
{/if} {/if}

View File

@ -54,15 +54,13 @@ export function createAuthStore() {
if (user) { if (user) {
analytics.activate().then(() => { analytics.activate().then(() => {
analytics.identify(user._id, user) analytics.identify(user._id, user)
if (user.size === "100+" || user.size === "10000+") { analytics.showChat({
analytics.showChat({ email: user.email,
email: user.email, created_at: user.createdAt || Date.now(),
created_at: user.createdAt || Date.now(), name: user.name,
name: user.name, user_id: user._id,
user_id: user._id, tenant: user.tenantId,
tenant: user.tenantId, })
})
}
}) })
} }
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/cli", "name": "@budibase/cli",
"version": "0.9.140-alpha.11", "version": "0.9.143-alpha.9",
"description": "Budibase CLI, for developers, self hosting and migrations.", "description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js", "main": "src/index.js",
"bin": { "bin": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/client", "name": "@budibase/client",
"version": "0.9.140-alpha.11", "version": "0.9.143-alpha.9",
"license": "MPL-2.0", "license": "MPL-2.0",
"module": "dist/budibase-client.js", "module": "dist/budibase-client.js",
"main": "dist/budibase-client.js", "main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw" "dev:builder": "rollup -cw"
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^0.9.140-alpha.11", "@budibase/bbui": "^0.9.143-alpha.9",
"@budibase/standard-components": "^0.9.139", "@budibase/standard-components": "^0.9.139",
"@budibase/string-templates": "^0.9.140-alpha.11", "@budibase/string-templates": "^0.9.143-alpha.9",
"regexparam": "^1.3.0", "regexparam": "^1.3.0",
"shortid": "^2.2.15", "shortid": "^2.2.15",
"svelte-spa-router": "^3.0.5" "svelte-spa-router": "^3.0.5"

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/server", "name": "@budibase/server",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "0.9.140-alpha.11", "version": "0.9.143-alpha.9",
"description": "Budibase Web Server", "description": "Budibase Web Server",
"main": "src/index.js", "main": "src/index.js",
"repository": { "repository": {
@ -25,7 +25,9 @@
"lint:fix": "yarn run format && yarn run lint", "lint:fix": "yarn run format && yarn run lint",
"initialise": "node scripts/initialise.js", "initialise": "node scripts/initialise.js",
"multi:enable": "node scripts/multiTenancy.js enable", "multi:enable": "node scripts/multiTenancy.js enable",
"multi:disable": "node scripts/multiTenancy.js disable" "multi:disable": "node scripts/multiTenancy.js disable",
"selfhost:enable": "node scripts/selfhost.js enable",
"selfhost:disable": "node scripts/selfhost.js disable"
}, },
"jest": { "jest": {
"preset": "ts-jest", "preset": "ts-jest",
@ -62,9 +64,9 @@
"author": "Budibase", "author": "Budibase",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"dependencies": { "dependencies": {
"@budibase/auth": "^0.9.140-alpha.11", "@budibase/auth": "^0.9.143-alpha.9",
"@budibase/client": "^0.9.140-alpha.11", "@budibase/client": "^0.9.143-alpha.9",
"@budibase/string-templates": "^0.9.140-alpha.11", "@budibase/string-templates": "^0.9.143-alpha.9",
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",
"@koa/router": "8.0.0", "@koa/router": "8.0.0",
"@sendgrid/mail": "7.1.1", "@sendgrid/mail": "7.1.1",

View File

@ -0,0 +1,28 @@
version: "3.8"
services:
db:
container_name: postgres-json
image: postgres
restart: always
environment:
POSTGRES_USER: root
POSTGRES_PASSWORD: root
POSTGRES_DB: main
ports:
- "5432:5432"
volumes:
#- pg_data:/var/lib/postgresql/data/
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
pgadmin:
container_name: pgadmin-json
image: dpage/pgadmin4
restart: always
environment:
PGADMIN_DEFAULT_EMAIL: root@root.com
PGADMIN_DEFAULT_PASSWORD: root
ports:
- "5050:80"
#volumes:
# pg_data:

View File

@ -0,0 +1,22 @@
SELECT 'CREATE DATABASE main'
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
CREATE TABLE jsonTable (
id character varying(32),
data jsonb,
text text
);
INSERT INTO jsonTable (id, data) VALUES ('1', '{"id": 1, "age": 1, "name": "Mike", "newline": "this is text with a\n newline in it"}');
CREATE VIEW jsonView AS SELECT
x.id,
x.age,
x.name,
x.newline
FROM
jsonTable c,
LATERAL jsonb_to_record(c.data) x (id character varying(32),
age BIGINT,
name TEXT,
newline TEXT
);

View File

@ -0,0 +1,3 @@
#!/bin/bash
docker-compose down
docker volume prune -f

View File

@ -1,9 +1,4 @@
const { const { getBuiltinPermissions } = require("@budibase/auth/permissions")
getBuiltinPermissions,
PermissionLevels,
isPermissionLevelHigherThanRead,
higherPermission,
} = require("@budibase/auth/permissions")
const { const {
isBuiltin, isBuiltin,
getDBRoleID, getDBRoleID,
@ -16,6 +11,7 @@ const {
CURRENTLY_SUPPORTED_LEVELS, CURRENTLY_SUPPORTED_LEVELS,
getBasePermissions, getBasePermissions,
} = require("../../utilities/security") } = require("../../utilities/security")
const { removeFromArray } = require("../../utilities")
const PermissionUpdateType = { const PermissionUpdateType = {
REMOVE: "remove", REMOVE: "remove",
@ -24,22 +20,6 @@ const PermissionUpdateType = {
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
// quick function to perform a bit of weird logic, make sure fetch calls
// always say a write role also has read permission
function fetchLevelPerms(permissions, level, roleId) {
if (!permissions) {
permissions = {}
}
permissions[level] = roleId
if (
isPermissionLevelHigherThanRead(level) &&
!permissions[PermissionLevels.READ]
) {
permissions[PermissionLevels.READ] = roleId
}
return permissions
}
// utility function to stop this repetition - permissions always stored under roles // utility function to stop this repetition - permissions always stored under roles
async function getAllDBRoles(db) { async function getAllDBRoles(db) {
const body = await db.allDocs( const body = await db.allDocs(
@ -74,23 +54,31 @@ async function updatePermissionOnRole(
for (let role of dbRoles) { for (let role of dbRoles) {
let updated = false let updated = false
const rolePermissions = role.permissions ? role.permissions : {} const rolePermissions = role.permissions ? role.permissions : {}
// make sure its an array, also handle migrating
if (
!rolePermissions[resourceId] ||
!Array.isArray(rolePermissions[resourceId])
) {
rolePermissions[resourceId] =
typeof rolePermissions[resourceId] === "string"
? [rolePermissions[resourceId]]
: []
}
// handle the removal/updating the role which has this permission first // handle the removal/updating the role which has this permission first
// the updating (role._id !== dbRoleId) is required because a resource/level can // the updating (role._id !== dbRoleId) is required because a resource/level can
// only be permitted in a single role (this reduces hierarchy confusion and simplifies // only be permitted in a single role (this reduces hierarchy confusion and simplifies
// the general UI for this, rather than needing to show everywhere it is used) // the general UI for this, rather than needing to show everywhere it is used)
if ( if (
(role._id !== dbRoleId || remove) && (role._id !== dbRoleId || remove) &&
rolePermissions[resourceId] === level rolePermissions[resourceId].indexOf(level) !== -1
) { ) {
delete rolePermissions[resourceId] removeFromArray(rolePermissions[resourceId], level)
updated = true updated = true
} }
// handle the adding, we're on the correct role, at it to this // handle the adding, we're on the correct role, at it to this
if (!remove && role._id === dbRoleId) { if (!remove && role._id === dbRoleId) {
rolePermissions[resourceId] = higherPermission( const set = new Set(rolePermissions[resourceId])
rolePermissions[resourceId], rolePermissions[resourceId] = [...set.add(level)]
level
)
updated = true updated = true
} }
// handle the update, add it to bulk docs to perform at end // handle the update, add it to bulk docs to perform at end
@ -127,12 +115,11 @@ exports.fetch = async function (ctx) {
continue continue
} }
const roleId = getExternalRoleID(role._id) const roleId = getExternalRoleID(role._id)
for (let [resource, level] of Object.entries(role.permissions)) { for (let [resource, levelArr] of Object.entries(role.permissions)) {
permissions[resource] = fetchLevelPerms( const levels = Array.isArray(levelArr) ? [levelArr] : levelArr
permissions[resource], const perms = {}
level, levels.forEach(level => (perms[level] = roleId))
roleId permissions[resource] = perms
)
} }
} }
// apply the base permissions // apply the base permissions
@ -157,12 +144,13 @@ exports.getResourcePerms = async function (ctx) {
for (let level of SUPPORTED_LEVELS) { for (let level of SUPPORTED_LEVELS) {
// update the various roleIds in the resource permissions // update the various roleIds in the resource permissions
for (let role of roles) { for (let role of roles) {
if (role.permissions && role.permissions[resourceId] === level) { const rolePerms = role.permissions
permissions = fetchLevelPerms( if (
permissions, rolePerms &&
level, (rolePerms[resourceId] === level ||
getExternalRoleID(role._id) rolePerms[resourceId].indexOf(level) !== -1)
) ) {
permissions[level] = getExternalRoleID(role._id)
} }
} }
} }

View File

@ -546,7 +546,7 @@ module External {
}, },
meta: { meta: {
table, table,
} },
} }
// can't really use response right now // can't really use response right now
const response = await makeExternalQuery(appId, json) const response = await makeExternalQuery(appId, json)

View File

@ -2,11 +2,12 @@ const Router = require("@koa/router")
const controller = require("../controllers/application") const controller = require("../controllers/application")
const authorized = require("../../middleware/authorized") const authorized = require("../../middleware/authorized")
const { BUILDER } = require("@budibase/auth/permissions") const { BUILDER } = require("@budibase/auth/permissions")
const usage = require("../../middleware/usageQuota")
const router = Router() const router = Router()
router router
.post("/api/applications", authorized(BUILDER), controller.create) .post("/api/applications", authorized(BUILDER), usage, controller.create)
.get("/api/applications/:appId/definition", controller.fetchAppDefinition) .get("/api/applications/:appId/definition", controller.fetchAppDefinition)
.get("/api/applications", controller.fetch) .get("/api/applications", controller.fetch)
.get("/api/applications/:appId/appPackage", controller.fetchAppPackage) .get("/api/applications/:appId/appPackage", controller.fetchAppPackage)
@ -21,6 +22,11 @@ router
authorized(BUILDER), authorized(BUILDER),
controller.revertClient controller.revertClient
) )
.delete("/api/applications/:appId", authorized(BUILDER), controller.delete) .delete(
"/api/applications/:appId",
authorized(BUILDER),
usage,
controller.delete
)
module.exports = router module.exports = router

View File

@ -72,7 +72,7 @@ describe("/roles", () => {
.expect(200) .expect(200)
expect(res.body.length).toBeGreaterThan(0) expect(res.body.length).toBeGreaterThan(0)
const power = res.body.find(role => role._id === BUILTIN_ROLE_IDS.POWER) const power = res.body.find(role => role._id === BUILTIN_ROLE_IDS.POWER)
expect(power.permissions[table._id]).toEqual("read") expect(power.permissions[table._id]).toEqual(["read"])
}) })
}) })

View File

@ -5,7 +5,6 @@ const {
PermissionLevels, PermissionLevels,
PermissionTypes, PermissionTypes,
} = require("@budibase/auth/permissions") } = require("@budibase/auth/permissions")
const usage = require("../../middleware/usageQuota")
const router = Router() const router = Router()
@ -28,13 +27,11 @@ router
.post( .post(
"/api/users/metadata/self", "/api/users/metadata/self",
authorized(PermissionTypes.USER, PermissionLevels.WRITE), authorized(PermissionTypes.USER, PermissionLevels.WRITE),
usage,
controller.updateSelfMetadata controller.updateSelfMetadata
) )
.delete( .delete(
"/api/users/metadata/:id", "/api/users/metadata/:id",
authorized(PermissionTypes.USER, PermissionLevels.WRITE), authorized(PermissionTypes.USER, PermissionLevels.WRITE),
usage,
controller.destroyMetadata controller.destroyMetadata
) )

View File

@ -8,7 +8,6 @@ const {
PermissionTypes, PermissionTypes,
PermissionLevels, PermissionLevels,
} = require("@budibase/auth/permissions") } = require("@budibase/auth/permissions")
const usage = require("../../middleware/usageQuota")
const router = Router() const router = Router()
@ -25,9 +24,8 @@ router
"/api/views/:viewName", "/api/views/:viewName",
paramResource("viewName"), paramResource("viewName"),
authorized(BUILDER), authorized(BUILDER),
usage,
viewController.destroy viewController.destroy
) )
.post("/api/views", authorized(BUILDER), usage, viewController.save) .post("/api/views", authorized(BUILDER), viewController.save)
module.exports = router module.exports = router

View File

@ -60,7 +60,7 @@ exports.definition = {
}, },
} }
exports.run = async function ({ inputs, appId, apiKey, emitter }) { exports.run = async function ({ inputs, appId, emitter }) {
if (inputs.row == null || inputs.row.tableId == null) { if (inputs.row == null || inputs.row.tableId == null) {
return { return {
success: false, success: false,
@ -84,7 +84,7 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
inputs.row inputs.row
) )
if (env.USE_QUOTAS) { if (env.USE_QUOTAS) {
await usage.update(apiKey, usage.Properties.ROW, 1) await usage.update(usage.Properties.ROW, 1)
} }
await rowController.save(ctx) await rowController.save(ctx)
return { return {

View File

@ -52,7 +52,7 @@ exports.definition = {
}, },
} }
exports.run = async function ({ inputs, appId, apiKey, emitter }) { exports.run = async function ({ inputs, appId, emitter }) {
if (inputs.id == null || inputs.revision == null) { if (inputs.id == null || inputs.revision == null) {
return { return {
success: false, success: false,
@ -74,7 +74,7 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
try { try {
if (env.isProd()) { if (env.isProd()) {
await usage.update(apiKey, usage.Properties.ROW, -1) await usage.update(usage.Properties.ROW, -1)
} }
await rowController.destroy(ctx) await rowController.destroy(ctx)
return { return {

View File

@ -53,7 +53,7 @@ exports.run = async function ({ inputs }) {
contents = "<h1>No content</h1>" contents = "<h1>No content</h1>"
} }
try { try {
let response = await sendSmtpEmail(to, from, subject, contents) let response = await sendSmtpEmail(to, from, subject, contents, true)
return { return {
success: true, success: true,
response, response,

View File

@ -13,8 +13,6 @@ const { makePartial } = require("../../tests/utilities")
const { cleanInputValues } = require("../automationUtils") const { cleanInputValues } = require("../automationUtils")
const setup = require("./utilities") const setup = require("./utilities")
usageQuota.getAPIKey.mockReturnValue({ apiKey: "test" })
describe("Run through some parts of the automations system", () => { describe("Run through some parts of the automations system", () => {
let config = setup.getConfig() let config = setup.getConfig()

View File

@ -46,7 +46,7 @@ describe("test the create row action", () => {
await setup.runStep(setup.actions.CREATE_ROW.stepId, { await setup.runStep(setup.actions.CREATE_ROW.stepId, {
row row
}) })
expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", 1) expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
}) })
}) })

View File

@ -37,7 +37,7 @@ describe("test the delete row action", () => {
it("check usage quota attempts", async () => { it("check usage quota attempts", async () => {
await setup.runInProd(async () => { await setup.runInProd(async () => {
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs) await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", -1) expect(usageQuota.update).toHaveBeenCalledWith("rows", -1)
}) })
}) })

View File

@ -4,8 +4,10 @@ const AutomationEmitter = require("../events/AutomationEmitter")
const { processObject } = require("@budibase/string-templates") const { processObject } = require("@budibase/string-templates")
const { DEFAULT_TENANT_ID } = require("@budibase/auth").constants const { DEFAULT_TENANT_ID } = require("@budibase/auth").constants
const CouchDB = require("../db") const CouchDB = require("../db")
const { DocumentTypes } = require("../db/utils") const { DocumentTypes, isDevAppID } = require("../db/utils")
const { doInTenant } = require("@budibase/auth/tenancy") const { doInTenant } = require("@budibase/auth/tenancy")
const env = require("../environment")
const usage = require("../utilities/usageQuota")
const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId
@ -80,7 +82,6 @@ class Orchestrator {
return stepFn({ return stepFn({
inputs: step.inputs, inputs: step.inputs,
appId: this._appId, appId: this._appId,
apiKey: automation.apiKey,
emitter: this._emitter, emitter: this._emitter,
context: this._context, context: this._context,
}) })
@ -95,6 +96,11 @@ class Orchestrator {
return err return err
} }
} }
// Increment quota for automation runs
if (!env.SELF_HOSTED && !isDevAppID(this._appId)) {
usage.update(usage.Properties.AUTOMATION, 1)
}
return this.executionOutput return this.executionOutput
} }
} }

View File

@ -1,4 +1,4 @@
import {Table} from "./common"; import { Table } from "./common"
export enum Operation { export enum Operation {
CREATE = "CREATE", CREATE = "CREATE",
@ -139,7 +139,7 @@ export interface QueryJson {
paginate?: PaginationJson paginate?: PaginationJson
body?: object body?: object
meta?: { meta?: {
table?: Table, table?: Table
} }
extra?: { extra?: {
idFilter?: SearchFilters idFilter?: SearchFilters

View File

@ -148,7 +148,7 @@ function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
if (!resource) { if (!resource) {
resource = { fields: [] } resource = { fields: [] }
} }
let selectStatement: string|string[] = "*" let selectStatement: string | string[] = "*"
// handle select // handle select
if (resource.fields && resource.fields.length > 0) { if (resource.fields && resource.fields.length > 0) {
// select the resources as the format "table.columnName" - this is what is provided // select the resources as the format "table.columnName" - this is what is provided

View File

@ -12,7 +12,11 @@ import { getSqlQuery } from "./utils"
module MySQLModule { module MySQLModule {
const mysql = require("mysql") const mysql = require("mysql")
const Sql = require("./base/sql") const Sql = require("./base/sql")
const { buildExternalTableId, convertType, copyExistingPropsOver } = require("./utils") const {
buildExternalTableId,
convertType,
copyExistingPropsOver,
} = require("./utils")
const { FieldTypes } = require("../constants") const { FieldTypes } = require("../constants")
interface MySQLConfig { interface MySQLConfig {
@ -104,7 +108,7 @@ module MySQLModule {
client: any, client: any,
query: SqlQuery, query: SqlQuery,
connect: boolean = true connect: boolean = true
): Promise<any[]|any> { ): Promise<any[] | any> {
// Node MySQL is callback based, so we must wrap our call in a promise // Node MySQL is callback based, so we must wrap our call in a promise
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
if (connect) { if (connect) {
@ -248,9 +252,9 @@ module MySQLModule {
json.extra = { json.extra = {
idFilter: { idFilter: {
equal: { equal: {
[primaryKey]: results.insertId [primaryKey]: results.insertId,
}, },
} },
} }
return json return json
} }

View File

@ -12,7 +12,14 @@ module PostgresModule {
const { Pool } = require("pg") const { Pool } = require("pg")
const Sql = require("./base/sql") const Sql = require("./base/sql")
const { FieldTypes } = require("../constants") const { FieldTypes } = require("../constants")
const { buildExternalTableId, convertType, copyExistingPropsOver } = require("./utils") const {
buildExternalTableId,
convertType,
copyExistingPropsOver,
} = require("./utils")
const { escapeDangerousCharacters } = require("../utilities")
const JSON_REGEX = /'{.*}'::json/s
interface PostgresConfig { interface PostgresConfig {
host: string host: string
@ -94,6 +101,17 @@ module PostgresModule {
} }
async function internalQuery(client: any, query: SqlQuery) { async function internalQuery(client: any, query: SqlQuery) {
// need to handle a specific issue with json data types in postgres,
// new lines inside the JSON data will break it
if (query && query.sql) {
const matches = query.sql.match(JSON_REGEX)
if (matches && matches.length > 0) {
for (let match of matches) {
const escaped = escapeDangerousCharacters(match)
query.sql = query.sql.replace(match, escaped)
}
}
}
try { try {
return await client.query(query.sql, query.bindings || []) return await client.query(query.sql, query.bindings || [])
} catch (err) { } catch (err) {
@ -108,7 +126,7 @@ module PostgresModule {
private readonly config: PostgresConfig private readonly config: PostgresConfig
COLUMNS_SQL = COLUMNS_SQL =
"select * from information_schema.columns where table_schema = 'public'" "select * from information_schema.columns where not table_schema = 'information_schema' and not table_schema = 'pg_catalog'"
PRIMARY_KEYS_SQL = ` PRIMARY_KEYS_SQL = `
select tc.table_schema, tc.table_name, kc.column_name as primary_key select tc.table_schema, tc.table_name, kc.column_name as primary_key
@ -179,10 +197,16 @@ module PostgresModule {
} }
const type: string = convertType(column.data_type, TYPE_MAP) const type: string = convertType(column.data_type, TYPE_MAP)
const identity = !!(column.identity_generation || column.identity_start || column.identity_increment) const identity = !!(
const hasDefault = typeof column.column_default === "string" && column.identity_generation ||
column.identity_start ||
column.identity_increment
)
const hasDefault =
typeof column.column_default === "string" &&
column.column_default.startsWith("nextval") column.column_default.startsWith("nextval")
const isGenerated = column.is_generated && column.is_generated !== "NEVER" const isGenerated =
column.is_generated && column.is_generated !== "NEVER"
const isAuto: boolean = hasDefault || identity || isGenerated const isAuto: boolean = hasDefault || identity || isGenerated
tables[tableName].schema[columnName] = { tables[tableName].schema[columnName] = {
autocolumn: isAuto, autocolumn: isAuto,

View File

@ -84,7 +84,11 @@ export function isIsoDateString(str: string) {
} }
// add the existing relationships from the entities if they exist, to prevent them from being overridden // add the existing relationships from the entities if they exist, to prevent them from being overridden
export function copyExistingPropsOver(tableName: string, tables: { [key: string]: any }, entities: { [key: string]: any }) { export function copyExistingPropsOver(
tableName: string,
tables: { [key: string]: any },
entities: { [key: string]: any }
) {
if (entities && entities[tableName]) { if (entities && entities[tableName]) {
if (entities[tableName].primaryDisplay) { if (entities[tableName].primaryDisplay) {
tables[tableName].primaryDisplay = entities[tableName].primaryDisplay tables[tableName].primaryDisplay = entities[tableName].primaryDisplay

View File

@ -39,7 +39,7 @@ class TestConfiguration {
if (bool) { if (bool) {
env.isDev = () => false env.isDev = () => false
env.isProd = () => true env.isProd = () => true
this.ctx.auth = { apiKey: "test" } this.ctx.user = { tenantId: "test" }
} else { } else {
env.isDev = () => true env.isDev = () => true
env.isProd = () => false env.isProd = () => false
@ -114,7 +114,7 @@ describe("usageQuota middleware", () => {
await config.executeMiddleware() await config.executeMiddleware()
expect(usageQuota.update).toHaveBeenCalledWith("test", "rows", 1) expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
expect(config.next).toHaveBeenCalled() expect(config.next).toHaveBeenCalled()
}) })
@ -131,7 +131,7 @@ describe("usageQuota middleware", () => {
]) ])
await config.executeMiddleware() await config.executeMiddleware()
expect(usageQuota.update).toHaveBeenCalledWith("test", "storage", 10100) expect(usageQuota.update).toHaveBeenCalledWith("storage", 10100)
expect(config.next).toHaveBeenCalled() expect(config.next).toHaveBeenCalled()
}) })
}) })

View File

@ -13,6 +13,7 @@ const DOMAIN_MAP = {
upload: usageQuota.Properties.UPLOAD, upload: usageQuota.Properties.UPLOAD,
views: usageQuota.Properties.VIEW, views: usageQuota.Properties.VIEW,
users: usageQuota.Properties.USER, users: usageQuota.Properties.USER,
applications: usageQuota.Properties.APPS,
// this will not be updated by endpoint calls // this will not be updated by endpoint calls
// instead it will be updated by triggerInfo // instead it will be updated by triggerInfo
automationRuns: usageQuota.Properties.AUTOMATION, automationRuns: usageQuota.Properties.AUTOMATION,
@ -57,9 +58,9 @@ module.exports = async (ctx, next) => {
usage = files.map(file => file.size).reduce((total, size) => total + size) usage = files.map(file => file.size).reduce((total, size) => total + size)
} }
try { try {
await usageQuota.update(ctx.auth.apiKey, property, usage) await usageQuota.update(property, usage)
return next() return next()
} catch (err) { } catch (err) {
ctx.throw(403, err) ctx.throw(400, err)
} }
} }

View File

@ -10,6 +10,14 @@ exports.wait = ms => new Promise(resolve => setTimeout(resolve, ms))
exports.isDev = env.isDev exports.isDev = env.isDev
exports.removeFromArray = (array, element) => {
const index = array.indexOf(element)
if (index !== -1) {
array.splice(index, 1)
}
return array
}
/** /**
* Makes sure that a URL has the correct number of slashes, while maintaining the * Makes sure that a URL has the correct number of slashes, while maintaining the
* http(s):// double slashes. * http(s):// double slashes.
@ -106,3 +114,13 @@ exports.deleteEntityMetadata = async (appId, type, entityId) => {
await db.remove(id, rev) await db.remove(id, rev)
} }
} }
exports.escapeDangerousCharacters = string => {
return string
.replace(/[\\]/g, "\\\\")
.replace(/[\b]/g, "\\b")
.replace(/[\f]/g, "\\f")
.replace(/[\n]/g, "\\n")
.replace(/[\r]/g, "\\r")
.replace(/[\t]/g, "\\t")
}

View File

@ -1,41 +1,9 @@
const env = require("../environment") const env = require("../environment")
const { apiKeyTable } = require("../db/dynamoClient") const { getGlobalDB } = require("@budibase/auth/tenancy")
const {
const DEFAULT_USAGE = { StaticDatabases,
rows: 0, generateNewUsageQuotaDoc,
storage: 0, } = require("@budibase/auth/db")
views: 0,
automationRuns: 0,
users: 0,
}
const DEFAULT_PLAN = {
rows: 1000,
// 1 GB
storage: 8589934592,
views: 10,
automationRuns: 100,
users: 10000,
}
function buildUpdateParams(key, property, usage) {
return {
primary: key,
condition:
"attribute_exists(#quota) AND attribute_exists(#limits) AND #quota.#prop < #limits.#prop AND #quotaReset > :now",
expression: "ADD #quota.#prop :usage",
names: {
"#quota": "usageQuota",
"#prop": property,
"#limits": "usageLimits",
"#quotaReset": "quotaReset",
},
values: {
":usage": usage,
":now": Date.now(),
},
}
}
function getNewQuotaReset() { function getNewQuotaReset() {
return Date.now() + 2592000000 return Date.now() + 2592000000
@ -47,59 +15,59 @@ exports.Properties = {
VIEW: "views", VIEW: "views",
USER: "users", USER: "users",
AUTOMATION: "automationRuns", AUTOMATION: "automationRuns",
APPS: "apps",
EMAILS: "emails",
} }
exports.getAPIKey = async appId => { async function getUsageQuotaDoc(db) {
if (!env.USE_QUOTAS) { let quota
return { apiKey: null } try {
quota = await db.get(StaticDatabases.PLATFORM_INFO.docs.usageQuota)
} catch (err) {
// doc doesn't exist. Create it
quota = await db.post(generateNewUsageQuotaDoc())
} }
return apiKeyTable.get({ primary: appId })
return quota
} }
/** /**
* Given a specified API key this will add to the usage object for the specified property. * Given a specified tenantId this will add to the usage object for the specified property.
* @param {string} apiKey The API key which is to be updated.
* @param {string} property The property which is to be added to (within the nested usageQuota object). * @param {string} property The property which is to be added to (within the nested usageQuota object).
* @param {number} usage The amount (this can be negative) to adjust the number by. * @param {number} usage The amount (this can be negative) to adjust the number by.
* @returns {Promise<void>} When this completes the API key will now be up to date - the quota period may have * @returns {Promise<void>} When this completes the API key will now be up to date - the quota period may have
* also been reset after this call. * also been reset after this call.
*/ */
exports.update = async (apiKey, property, usage) => { exports.update = async (property, usage) => {
if (!env.USE_QUOTAS) { if (!env.USE_QUOTAS) {
return return
} }
try { try {
await apiKeyTable.update(buildUpdateParams(apiKey, property, usage)) const db = getGlobalDB()
} catch (err) { const quota = await getUsageQuotaDoc(db)
// conditional check means the condition failed, need to check why
if (err.code === "ConditionalCheckFailedException") { // Check if the quota needs reset
// get the API key so we can check it if (Date.now() >= quota.quotaReset) {
const keyObj = await apiKeyTable.get({ primary: apiKey }) quota.quotaReset = getNewQuotaReset()
// the usage quota or usage limits didn't exist for (let prop of Object.keys(quota.usageQuota)) {
if (keyObj && (keyObj.usageQuota == null || keyObj.usageLimits == null)) { quota.usageQuota[prop] = 0
keyObj.usageQuota =
keyObj.usageQuota == null ? DEFAULT_USAGE : keyObj.usageQuota
keyObj.usageLimits =
keyObj.usageLimits == null ? DEFAULT_PLAN : keyObj.usageLimits
keyObj.quotaReset = getNewQuotaReset()
await apiKeyTable.put({ item: keyObj })
return
}
// we have in fact breached the reset period
else if (keyObj && keyObj.quotaReset <= Date.now()) {
// update the quota reset period and reset the values for all properties
keyObj.quotaReset = getNewQuotaReset()
for (let prop of Object.keys(keyObj.usageQuota)) {
if (prop === property) {
keyObj.usageQuota[prop] = usage > 0 ? usage : 0
} else {
keyObj.usageQuota[prop] = 0
}
}
await apiKeyTable.put({ item: keyObj })
return
} }
} }
// increment the quota
quota.usageQuota[property] += usage
if (quota.usageQuota[property] >= quota.usageLimits[property]) {
throw new Error(
`You have exceeded your usage quota of ${quota.usageLimits[property]} ${property}.`
)
}
// update the usage quotas
await db.put(quota)
} catch (err) {
console.error(`Error updating usage quotas for ${property}`, err)
throw err throw err
} }
} }

View File

@ -34,7 +34,7 @@ function request(ctx, request) {
exports.request = request exports.request = request
// have to pass in the tenant ID as this could be coming from an automation // have to pass in the tenant ID as this could be coming from an automation
exports.sendSmtpEmail = async (to, from, subject, contents) => { exports.sendSmtpEmail = async (to, from, subject, contents, automation) => {
// tenant ID will be set in header // tenant ID will be set in header
const response = await fetch( const response = await fetch(
checkSlashesInUrl(env.WORKER_URL + `/api/global/email/send`), checkSlashesInUrl(env.WORKER_URL + `/api/global/email/send`),
@ -46,6 +46,7 @@ exports.sendSmtpEmail = async (to, from, subject, contents) => {
contents, contents,
subject, subject,
purpose: "custom", purpose: "custom",
automation,
}, },
}) })
) )

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/string-templates", "name": "@budibase/string-templates",
"version": "0.9.140-alpha.11", "version": "0.9.143-alpha.9",
"description": "Handlebars wrapper for Budibase templating.", "description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs", "main": "src/index.cjs",
"module": "dist/bundle.mjs", "module": "dist/bundle.mjs",

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/worker", "name": "@budibase/worker",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "0.9.140-alpha.11", "version": "0.9.143-alpha.9",
"description": "Budibase background service", "description": "Budibase background service",
"main": "src/index.js", "main": "src/index.js",
"repository": { "repository": {
@ -25,8 +25,8 @@
"author": "Budibase", "author": "Budibase",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"dependencies": { "dependencies": {
"@budibase/auth": "^0.9.140-alpha.11", "@budibase/auth": "^0.9.143-alpha.9",
"@budibase/string-templates": "^0.9.140-alpha.11", "@budibase/string-templates": "^0.9.143-alpha.9",
"@koa/router": "^8.0.0", "@koa/router": "^8.0.0",
"@techpass/passport-openidconnect": "^0.3.0", "@techpass/passport-openidconnect": "^0.3.0",
"aws-sdk": "^2.811.0", "aws-sdk": "^2.811.0",

View File

@ -10,6 +10,7 @@ const email = require("../../../utilities/email")
const { upload, ObjectStoreBuckets } = require("@budibase/auth").objectStore const { upload, ObjectStoreBuckets } = require("@budibase/auth").objectStore
const CouchDB = require("../../../db") const CouchDB = require("../../../db")
const { getGlobalDB } = require("@budibase/auth/tenancy") const { getGlobalDB } = require("@budibase/auth/tenancy")
const env = require("../../../environment")
exports.save = async function (ctx) { exports.save = async function (ctx) {
const db = getGlobalDB() const db = getGlobalDB()
@ -174,7 +175,13 @@ exports.upload = async function (ctx) {
const file = ctx.request.files.file const file = ctx.request.files.file
const { type, name } = ctx.params const { type, name } = ctx.params
const bucket = ObjectStoreBuckets.GLOBAL let bucket
if (env.SELF_HOSTED) {
bucket = ObjectStoreBuckets.GLOBAL
} else {
bucket = ObjectStoreBuckets.GLOBAL_CLOUD
}
const key = `${type}/${name}` const key = `${type}/${name}`
await upload({ await upload({
bucket, bucket,

View File

@ -2,8 +2,16 @@ const { sendEmail } = require("../../../utilities/email")
const { getGlobalDB } = require("@budibase/auth/tenancy") const { getGlobalDB } = require("@budibase/auth/tenancy")
exports.sendEmail = async ctx => { exports.sendEmail = async ctx => {
let { workspaceId, email, userId, purpose, contents, from, subject } = let {
ctx.request.body workspaceId,
email,
userId,
purpose,
contents,
from,
subject,
automation,
} = ctx.request.body
let user let user
if (userId) { if (userId) {
const db = getGlobalDB() const db = getGlobalDB()
@ -15,6 +23,7 @@ exports.sendEmail = async ctx => {
contents, contents,
from, from,
subject, subject,
automation,
}) })
ctx.body = { ctx.body = {
...response, ...response,

View File

@ -1,8 +1,8 @@
const { const {
generateGlobalUserID, generateGlobalUserID,
getGlobalUserParams, getGlobalUserParams,
StaticDatabases, StaticDatabases,
generateNewUsageQuotaDoc,
} = require("@budibase/auth/db") } = require("@budibase/auth/db")
const { hash, getGlobalUserByEmail } = require("@budibase/auth").utils const { hash, getGlobalUserByEmail } = require("@budibase/auth").utils
const { UserStatus, EmailTemplatePurpose } = require("../../../constants") const { UserStatus, EmailTemplatePurpose } = require("../../../constants")
@ -11,6 +11,7 @@ const { sendEmail } = require("../../../utilities/email")
const { user: userCache } = require("@budibase/auth/cache") const { user: userCache } = require("@budibase/auth/cache")
const { invalidateSessions } = require("@budibase/auth/sessions") const { invalidateSessions } = require("@budibase/auth/sessions")
const CouchDB = require("../../../db") const CouchDB = require("../../../db")
const accounts = require("@budibase/auth/accounts")
const { const {
getGlobalDB, getGlobalDB,
getTenantId, getTenantId,
@ -18,6 +19,7 @@ const {
tryAddTenant, tryAddTenant,
updateTenantId, updateTenantId,
} = require("@budibase/auth/tenancy") } = require("@budibase/auth/tenancy")
const env = require("../../../environment")
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
@ -48,10 +50,27 @@ async function saveUser(
// make sure another user isn't using the same email // make sure another user isn't using the same email
let dbUser let dbUser
if (email) { if (email) {
// check budibase users inside the tenant
dbUser = await getGlobalUserByEmail(email) dbUser = await getGlobalUserByEmail(email)
if (dbUser != null && (dbUser._id !== _id || Array.isArray(dbUser))) { if (dbUser != null && (dbUser._id !== _id || Array.isArray(dbUser))) {
throw "Email address already in use." throw "Email address already in use."
} }
// check budibase users in other tenants
if (env.MULTI_TENANCY) {
dbUser = await getTenantUser(email)
if (dbUser != null) {
throw "Email address already in use."
}
}
// check root account users in account portal
if (!env.SELF_HOSTED) {
const account = await accounts.getAccount(email)
if (account) {
throw "Email address already in use."
}
}
} else { } else {
dbUser = await db.get(_id) dbUser = await db.get(_id)
} }
@ -139,6 +158,11 @@ exports.adminUser = async ctx => {
}) })
) )
// write usage quotas for cloud
if (!env.SELF_HOSTED) {
await db.post(generateNewUsageQuotaDoc())
}
if (response.rows.some(row => row.doc.admin)) { if (response.rows.some(row => row.doc.admin)) {
ctx.throw( ctx.throw(
403, 403,
@ -261,13 +285,22 @@ exports.find = async ctx => {
ctx.body = user ctx.body = user
} }
exports.tenantUserLookup = async ctx => { // lookup, could be email or userId, either will return a doc
const id = ctx.params.id const getTenantUser = async identifier => {
// lookup, could be email or userId, either will return a doc
const db = new CouchDB(PLATFORM_INFO_DB) const db = new CouchDB(PLATFORM_INFO_DB)
try { try {
ctx.body = await db.get(id) return await db.get(identifier)
} catch (err) { } catch (err) {
return null
}
}
exports.tenantUserLookup = async ctx => {
const id = ctx.params.id
const user = await getTenantUser(id)
if (user) {
ctx.body = user
} else {
ctx.throw(400, "No tenant user found.") ctx.throw(400, "No tenant user found.")
} }
} }

View File

@ -33,6 +33,12 @@ module.exports = {
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY, INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
MULTI_TENANCY: process.env.MULTI_TENANCY, MULTI_TENANCY: process.env.MULTI_TENANCY,
ACCOUNT_PORTAL_URL: process.env.ACCOUNT_PORTAL_URL, ACCOUNT_PORTAL_URL: process.env.ACCOUNT_PORTAL_URL,
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
SMTP_USER: process.env.SMTP_USER,
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: process.env.SMTP_PORT,
SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS,
_set(key, value) { _set(key, value) {
process.env[key] = value process.env[key] = value
module.exports[key] = value module.exports[key] = value

View File

@ -1,4 +1,5 @@
const nodemailer = require("nodemailer") const nodemailer = require("nodemailer")
const env = require("../environment")
const { getScopedConfig } = require("@budibase/auth/db") const { getScopedConfig } = require("@budibase/auth/db")
const { EmailTemplatePurpose, TemplateTypes, Configs } = require("../constants") const { EmailTemplatePurpose, TemplateTypes, Configs } = require("../constants")
const { getTemplateByPurpose } = require("../constants/templates") const { getTemplateByPurpose } = require("../constants/templates")
@ -101,16 +102,35 @@ async function buildEmail(purpose, email, context, { user, contents } = {}) {
* Utility function for finding most valid SMTP configuration. * Utility function for finding most valid SMTP configuration.
* @param {object} db The CouchDB database which is to be looked up within. * @param {object} db The CouchDB database which is to be looked up within.
* @param {string|null} workspaceId If using finer grain control of configs a workspace can be used. * @param {string|null} workspaceId If using finer grain control of configs a workspace can be used.
* @param {boolean|null} automation Whether or not the configuration is being fetched for an email automation.
* @return {Promise<object|null>} returns the SMTP configuration if it exists * @return {Promise<object|null>} returns the SMTP configuration if it exists
*/ */
async function getSmtpConfiguration(db, workspaceId = null) { async function getSmtpConfiguration(db, workspaceId = null, automation) {
const params = { const params = {
type: Configs.SMTP, type: Configs.SMTP,
} }
if (workspaceId) { if (workspaceId) {
params.workspace = workspaceId params.workspace = workspaceId
} }
return getScopedConfig(db, params)
const customConfig = getScopedConfig(db, params)
if (customConfig) {
return customConfig
}
// Use an SMTP fallback configuration from env variables
if (!automation && env.SMTP_FALLBACK_ENABLED) {
return {
port: env.SMTP_PORT,
host: env.SMTP_HOST,
secure: false,
auth: {
user: env.SMTP_USER,
pass: env.SMTP_PASSWORD,
},
}
}
} }
/** /**
@ -118,8 +138,8 @@ async function getSmtpConfiguration(db, workspaceId = null) {
* @return {Promise<boolean>} returns true if there is a configuration that can be used. * @return {Promise<boolean>} returns true if there is a configuration that can be used.
*/ */
exports.isEmailConfigured = async (workspaceId = null) => { exports.isEmailConfigured = async (workspaceId = null) => {
// when "testing" simply return true // when "testing" or smtp fallback is enabled simply return true
if (TEST_MODE) { if (TEST_MODE || env.SMTP_FALLBACK_ENABLED) {
return true return true
} }
const db = getGlobalDB() const db = getGlobalDB()
@ -138,16 +158,17 @@ exports.isEmailConfigured = async (workspaceId = null) => {
* @param {string|undefined} contents If sending a custom email then can supply contents which will be added to it. * @param {string|undefined} contents If sending a custom email then can supply contents which will be added to it.
* @param {string|undefined} subject A custom subject can be specified if the config one is not desired. * @param {string|undefined} subject A custom subject can be specified if the config one is not desired.
* @param {object|undefined} info Pass in a structure of information to be stored alongside the invitation. * @param {object|undefined} info Pass in a structure of information to be stored alongside the invitation.
* @param {boolean|undefined} disableFallback Prevent email being sent from SMTP fallback to avoid spam.
* @return {Promise<object>} returns details about the attempt to send email, e.g. if it is successful; based on * @return {Promise<object>} returns details about the attempt to send email, e.g. if it is successful; based on
* nodemailer response. * nodemailer response.
*/ */
exports.sendEmail = async ( exports.sendEmail = async (
email, email,
purpose, purpose,
{ workspaceId, user, from, contents, subject, info } = {} { workspaceId, user, from, contents, subject, info, automation } = {}
) => { ) => {
const db = getGlobalDB() const db = getGlobalDB()
let config = (await getSmtpConfiguration(db, workspaceId)) || {} let config = (await getSmtpConfiguration(db, workspaceId, automation)) || {}
if (Object.keys(config).length === 0 && !TEST_MODE) { if (Object.keys(config).length === 0 && !TEST_MODE) {
throw "Unable to find SMTP configuration." throw "Unable to find SMTP configuration."
} }