diff --git a/.github/workflows/release-develop.yml b/.github/workflows/release-develop.yml
index ce41fcc3e6..cf0d6f848c 100644
--- a/.github/workflows/release-develop.yml
+++ b/.github/workflows/release-develop.yml
@@ -7,6 +7,7 @@ on:
env:
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
+ INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 6316bf1837..7b38a70eb7 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -13,6 +13,7 @@ on:
env:
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
+ INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
diff --git a/lerna.json b/lerna.json
index a8cda22a63..01c429c73d 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "0.9.142",
+ "version": "0.9.143-alpha.0",
"npmClient": "yarn",
"packages": [
"packages/*"
diff --git a/packages/auth/package.json b/packages/auth/package.json
index f62e4cb5e6..0457520d91 100644
--- a/packages/auth/package.json
+++ b/packages/auth/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/auth",
- "version": "0.9.142",
+ "version": "0.9.143-alpha.0",
"description": "Authentication middlewares for budibase builder and apps",
"main": "src/index.js",
"author": "Budibase",
diff --git a/packages/auth/src/db/constants.js b/packages/auth/src/db/constants.js
index 77643ce4c5..ad4f6c9f66 100644
--- a/packages/auth/src/db/constants.js
+++ b/packages/auth/src/db/constants.js
@@ -12,6 +12,7 @@ exports.StaticDatabases = {
name: "global-info",
docs: {
tenants: "tenants",
+ usageQuota: "usage_quota",
},
},
}
diff --git a/packages/auth/src/db/utils.js b/packages/auth/src/db/utils.js
index a1a831523e..09e2ff6314 100644
--- a/packages/auth/src/db/utils.js
+++ b/packages/auth/src/db/utils.js
@@ -368,8 +368,33 @@ async function getScopedConfig(db, params) {
return configDoc && configDoc.config ? configDoc.config : configDoc
}
+function generateNewUsageQuotaDoc() {
+ return {
+ _id: StaticDatabases.PLATFORM_INFO.docs.usageQuota,
+ quotaReset: Date.now() + 2592000000,
+ usageQuota: {
+ automationRuns: 0,
+ rows: 0,
+ storage: 0,
+ apps: 0,
+ users: 0,
+ views: 0,
+ emails: 0,
+ },
+ usageLimits: {
+ automationRuns: 1000,
+ rows: 4000,
+ apps: 4,
+ storage: 1000,
+ users: 10,
+ emails: 50,
+ },
+ }
+}
+
exports.Replication = Replication
exports.getScopedConfig = getScopedConfig
exports.generateConfigID = generateConfigID
exports.getConfigParams = getConfigParams
exports.getScopedFullConfig = getScopedFullConfig
+exports.generateNewUsageQuotaDoc = generateNewUsageQuotaDoc
diff --git a/packages/auth/src/middleware/passport/tests/third-party-common.spec.js b/packages/auth/src/middleware/passport/tests/third-party-common.spec.js
index ff38a01fbb..1ace65ba40 100644
--- a/packages/auth/src/middleware/passport/tests/third-party-common.spec.js
+++ b/packages/auth/src/middleware/passport/tests/third-party-common.spec.js
@@ -104,7 +104,7 @@ describe("third party common", () => {
_id: id,
email: email,
}
- const response = await db.post(dbUser)
+ const response = await db.put(dbUser)
dbUser._rev = response.rev
}
diff --git a/packages/auth/src/middleware/passport/third-party-common.js b/packages/auth/src/middleware/passport/third-party-common.js
index 7c03944232..c25aa3e0b0 100644
--- a/packages/auth/src/middleware/passport/third-party-common.js
+++ b/packages/auth/src/middleware/passport/third-party-common.js
@@ -71,7 +71,7 @@ exports.authenticateThirdParty = async function (
dbUser = await syncUser(dbUser, thirdPartyUser)
// create or sync the user
- const response = await db.post(dbUser)
+ const response = await db.put(dbUser)
dbUser._rev = response.rev
// authenticate
diff --git a/packages/auth/src/security/permissions.js b/packages/auth/src/security/permissions.js
index 03fa5fa562..d0308d783e 100644
--- a/packages/auth/src/security/permissions.js
+++ b/packages/auth/src/security/permissions.js
@@ -139,8 +139,7 @@ exports.doesHaveResourcePermission = (
// set foundSub to not subResourceId, incase there is no subResource
let foundMain = false,
foundSub = false
- for (let [resource, level] of Object.entries(permissions)) {
- const levels = getAllowedLevels(level)
+ for (let [resource, levels] of Object.entries(permissions)) {
if (resource === resourceId && levels.indexOf(permLevel) !== -1) {
foundMain = true
}
@@ -177,10 +176,6 @@ exports.doesHaveBasePermission = (permType, permLevel, permissionIds) => {
return false
}
-exports.higherPermission = (perm1, perm2) => {
- return levelToNumber(perm1) > levelToNumber(perm2) ? perm1 : perm2
-}
-
exports.isPermissionLevelHigherThanRead = level => {
return levelToNumber(level) > 1
}
diff --git a/packages/auth/src/security/roles.js b/packages/auth/src/security/roles.js
index baa8fc40dc..71fbc10132 100644
--- a/packages/auth/src/security/roles.js
+++ b/packages/auth/src/security/roles.js
@@ -1,6 +1,6 @@
const { getDB } = require("../db")
const { cloneDeep } = require("lodash/fp")
-const { BUILTIN_PERMISSION_IDS, higherPermission } = require("./permissions")
+const { BUILTIN_PERMISSION_IDS } = require("./permissions")
const {
generateRoleID,
getRoleParams,
@@ -193,8 +193,17 @@ exports.getUserPermissions = async (appId, userRoleId) => {
const permissions = {}
for (let role of rolesHierarchy) {
if (role.permissions) {
- for (let [resource, level] of Object.entries(role.permissions)) {
- permissions[resource] = higherPermission(permissions[resource], level)
+ for (let [resource, levels] of Object.entries(role.permissions)) {
+ if (!permissions[resource]) {
+ permissions[resource] = []
+ }
+ const permsSet = new Set(permissions[resource])
+ if (Array.isArray(levels)) {
+ levels.forEach(level => permsSet.add(level))
+ } else {
+ permsSet.add(levels)
+ }
+ permissions[resource] = [...permsSet]
}
}
}
diff --git a/packages/bbui/package.json b/packages/bbui/package.json
index ddede26a86..42e4215caa 100644
--- a/packages/bbui/package.json
+++ b/packages/bbui/package.json
@@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
- "version": "0.9.142",
+ "version": "0.9.143-alpha.0",
"license": "AGPL-3.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
diff --git a/packages/builder/package.json b/packages/builder/package.json
index 7ab313974b..2c255835ce 100644
--- a/packages/builder/package.json
+++ b/packages/builder/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
- "version": "0.9.142",
+ "version": "0.9.143-alpha.0",
"license": "AGPL-3.0",
"private": true,
"scripts": {
@@ -65,10 +65,10 @@
}
},
"dependencies": {
- "@budibase/bbui": "^0.9.142",
- "@budibase/client": "^0.9.142",
+ "@budibase/bbui": "^0.9.143-alpha.0",
+ "@budibase/client": "^0.9.143-alpha.0",
"@budibase/colorpicker": "1.1.2",
- "@budibase/string-templates": "^0.9.142",
+ "@budibase/string-templates": "^0.9.143-alpha.0",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",
diff --git a/packages/builder/src/App.svelte b/packages/builder/src/App.svelte
index 0624690b27..60051ea043 100644
--- a/packages/builder/src/App.svelte
+++ b/packages/builder/src/App.svelte
@@ -1,16 +1,10 @@
diff --git a/packages/builder/src/analytics.js b/packages/builder/src/analytics.js
deleted file mode 100644
index 5b130a8e6b..0000000000
--- a/packages/builder/src/analytics.js
+++ /dev/null
@@ -1,139 +0,0 @@
-import * as Sentry from "@sentry/browser"
-import posthog from "posthog-js"
-import api from "builderStore/api"
-
-let analyticsEnabled
-const posthogConfigured = process.env.POSTHOG_TOKEN && process.env.POSTHOG_URL
-const sentryConfigured = process.env.SENTRY_DSN
-
-const FEEDBACK_SUBMITTED_KEY = "budibase:feedback_submitted"
-const APP_FIRST_STARTED_KEY = "budibase:first_run"
-const feedbackHours = 12
-
-async function activate() {
- if (analyticsEnabled === undefined) {
- // only the server knows the true NODE_ENV
- // this was an issue as NODE_ENV = 'cypress' on the server,
- // but 'production' on the client
- const response = await api.get("/api/analytics")
- analyticsEnabled = (await response.json()).enabled === true
- }
- if (!analyticsEnabled) return
- if (sentryConfigured) Sentry.init({ dsn: process.env.SENTRY_DSN })
- if (posthogConfigured) {
- posthog.init(process.env.POSTHOG_TOKEN, {
- autocapture: false,
- capture_pageview: false,
- api_host: process.env.POSTHOG_URL,
- })
- posthog.set_config({ persistence: "cookie" })
- }
-}
-
-function identify(id) {
- if (!analyticsEnabled || !id) return
- if (posthogConfigured) posthog.identify(id)
- if (sentryConfigured)
- Sentry.configureScope(scope => {
- scope.setUser({ id: id })
- })
-}
-
-async function identifyByApiKey(apiKey) {
- if (!analyticsEnabled) return true
- try {
- const response = await fetch(
- `https://03gaine137.execute-api.eu-west-1.amazonaws.com/prod/account/id?api_key=${apiKey.trim()}`
- )
- if (response.status === 200) {
- const id = await response.json()
-
- await api.put("/api/keys/userId", { value: id })
- identify(id)
- return true
- }
-
- return false
- } catch (error) {
- console.log(error)
- }
-}
-
-function captureException(err) {
- if (!analyticsEnabled) return
- Sentry.captureException(err)
- captureEvent("Error", { error: err.message ? err.message : err })
-}
-
-function captureEvent(eventName, props = {}) {
- if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return
- props.sourceApp = "builder"
- posthog.capture(eventName, props)
-}
-
-if (!localStorage.getItem(APP_FIRST_STARTED_KEY)) {
- localStorage.setItem(APP_FIRST_STARTED_KEY, Date.now())
-}
-
-const isFeedbackTimeElapsed = sinceDateStr => {
- const sinceDate = parseFloat(sinceDateStr)
- const feedbackMilliseconds = feedbackHours * 60 * 60 * 1000
- return Date.now() > sinceDate + feedbackMilliseconds
-}
-
-function submitFeedback(values) {
- if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return
- localStorage.setItem(FEEDBACK_SUBMITTED_KEY, Date.now())
-
- const prefixedValues = Object.entries(values).reduce((obj, [key, value]) => {
- obj[`feedback_${key}`] = value
- return obj
- }, {})
-
- posthog.capture("Feedback Submitted", prefixedValues)
-}
-
-function requestFeedbackOnDeploy() {
- if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return false
- const lastSubmittedStr = localStorage.getItem(FEEDBACK_SUBMITTED_KEY)
- if (!lastSubmittedStr) return true
- return isFeedbackTimeElapsed(lastSubmittedStr)
-}
-
-function highlightFeedbackIcon() {
- if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return false
- const lastSubmittedStr = localStorage.getItem(FEEDBACK_SUBMITTED_KEY)
- if (lastSubmittedStr) return isFeedbackTimeElapsed(lastSubmittedStr)
- const firstRunStr = localStorage.getItem(APP_FIRST_STARTED_KEY)
- if (!firstRunStr) return false
- return isFeedbackTimeElapsed(firstRunStr)
-}
-
-// Opt In/Out
-const ifAnalyticsEnabled = func => () => {
- if (analyticsEnabled && process.env.POSTHOG_TOKEN) {
- return func()
- }
-}
-const disabled = () => posthog.has_opted_out_capturing()
-const optIn = () => posthog.opt_in_capturing()
-const optOut = () => posthog.opt_out_capturing()
-
-export default {
- activate,
- identify,
- identifyByApiKey,
- captureException,
- captureEvent,
- requestFeedbackOnDeploy,
- submitFeedback,
- highlightFeedbackIcon,
- disabled: () => {
- if (analyticsEnabled == null) {
- return true
- }
- return ifAnalyticsEnabled(disabled)
- },
- optIn: ifAnalyticsEnabled(optIn),
- optOut: ifAnalyticsEnabled(optOut),
-}
diff --git a/packages/builder/src/analytics/IntercomClient.js b/packages/builder/src/analytics/IntercomClient.js
new file mode 100644
index 0000000000..8cc7e35bbf
--- /dev/null
+++ b/packages/builder/src/analytics/IntercomClient.js
@@ -0,0 +1,94 @@
+export default class IntercomClient {
+ constructor(token) {
+ this.token = token
+ }
+
+ /**
+ * Instantiate intercom using their provided script.
+ */
+ init() {
+ if (!this.token) return
+
+ const token = this.token
+
+ var w = window
+ var ic = w.Intercom
+ if (typeof ic === "function") {
+ ic("reattach_activator")
+ ic("update", w.intercomSettings)
+ } else {
+ var d = document
+ var i = function () {
+ i.c(arguments)
+ }
+ i.q = []
+ i.c = function (args) {
+ i.q.push(args)
+ }
+ w.Intercom = i
+ var l = function () {
+ var s = d.createElement("script")
+ s.type = "text/javascript"
+ s.async = true
+ s.src = "https://widget.intercom.io/widget/" + token
+ var x = d.getElementsByTagName("script")[0]
+ x.parentNode.insertBefore(s, x)
+ }
+ if (document.readyState === "complete") {
+ l()
+ } else if (w.attachEvent) {
+ w.attachEvent("onload", l)
+ } else {
+ w.addEventListener("load", l, false)
+ }
+
+ this.initialised = true
+ }
+ }
+
+ /**
+ * Show the intercom chat bubble.
+ * @param {Object} user - user to identify
+ * @returns Intercom global object
+ */
+ show(user = {}) {
+ if (!this.initialised) return
+
+ return window.Intercom("boot", {
+ app_id: this.token,
+ ...user,
+ })
+ }
+
+ /**
+ * Update intercom user details and messages.
+ * @returns Intercom global object
+ */
+ update() {
+ if (!this.initialised) return
+
+ return window.Intercom("update")
+ }
+
+ /**
+ * Capture analytics events and send them to intercom.
+ * @param {String} event - event identifier
+ * @param {Object} props - properties for the event
+ * @returns Intercom global object
+ */
+ captureEvent(event, props = {}) {
+ if (!this.initialised) return
+
+ return window.Intercom("trackEvent", event, props)
+ }
+
+ /**
+ * Disassociate the user from the current session.
+ * @returns Intercom global object
+ */
+ logout() {
+ if (!this.initialised) return
+
+ return window.Intercom("shutdown")
+ }
+}
diff --git a/packages/builder/src/analytics/PosthogClient.js b/packages/builder/src/analytics/PosthogClient.js
new file mode 100644
index 0000000000..0a1fde42ea
--- /dev/null
+++ b/packages/builder/src/analytics/PosthogClient.js
@@ -0,0 +1,80 @@
+import posthog from "posthog-js"
+import { Events } from "./constants"
+
+export default class PosthogClient {
+ constructor(token, url) {
+ this.token = token
+ this.url = url
+ }
+
+ init() {
+ if (!this.token || !this.url) return
+
+ posthog.init(this.token, {
+ autocapture: false,
+ capture_pageview: false,
+ api_host: this.url,
+ })
+ posthog.set_config({ persistence: "cookie" })
+
+ this.initialised = true
+ }
+
+ /**
+ * Set the posthog context to the current user
+ * @param {String} id - unique user id
+ */
+ identify(id) {
+ if (!this.initialised) return
+
+ posthog.identify(id)
+ }
+
+ /**
+ * Update user metadata associated with current user in posthog
+ * @param {Object} meta - user fields
+ */
+ updateUser(meta) {
+ if (!this.initialised) return
+
+ posthog.people.set(meta)
+ }
+
+ /**
+ * Capture analytics events and send them to posthog.
+ * @param {String} event - event identifier
+ * @param {Object} props - properties for the event
+ */
+ captureEvent(eventName, props) {
+ if (!this.initialised) return
+
+ props.sourceApp = "builder"
+ posthog.capture(eventName, props)
+ }
+
+ /**
+ * Submit NPS feedback to posthog.
+ * @param {Object} values - NPS Values
+ */
+ npsFeedback(values) {
+ if (!this.initialised) return
+
+ localStorage.setItem(Events.NPS.SUBMITTED, Date.now())
+
+ const prefixedFeedback = {}
+ for (let key in values) {
+ prefixedFeedback[`feedback_${key}`] = values[key]
+ }
+
+ posthog.capture(Events.NPS.SUBMITTED, prefixedFeedback)
+ }
+
+ /**
+ * Reset posthog user back to initial state on logout.
+ */
+ logout() {
+ if (!this.initialised) return
+
+ posthog.reset()
+ }
+}
diff --git a/packages/builder/src/analytics/SentryClient.js b/packages/builder/src/analytics/SentryClient.js
new file mode 100644
index 0000000000..2a1f8732e3
--- /dev/null
+++ b/packages/builder/src/analytics/SentryClient.js
@@ -0,0 +1,37 @@
+import * as Sentry from "@sentry/browser"
+
+export default class SentryClient {
+ constructor(dsn) {
+ this.dsn = dsn
+ }
+
+ init() {
+ if (this.dsn) {
+ Sentry.init({ dsn: this.dsn })
+
+ this.initalised = true
+ }
+ }
+
+ /**
+ * Capture an exception and send it to sentry.
+ * @param {Error} err - JS error object
+ */
+ captureException(err) {
+ if (!this.initalised) return
+
+ Sentry.captureException(err)
+ }
+
+ /**
+ * Identify user in sentry.
+ * @param {String} id - Unique user id
+ */
+ identify(id) {
+ if (!this.initalised) return
+
+ Sentry.configureScope(scope => {
+ scope.setUser({ id })
+ })
+ }
+}
diff --git a/packages/builder/src/analytics/constants.js b/packages/builder/src/analytics/constants.js
new file mode 100644
index 0000000000..d38b7bba4f
--- /dev/null
+++ b/packages/builder/src/analytics/constants.js
@@ -0,0 +1,49 @@
+export const Events = {
+ BUILDER: {
+ STARTED: "Builder Started",
+ },
+ COMPONENT: {
+ CREATED: "Added Component",
+ },
+ DATASOURCE: {
+ CREATED: "Datasource Created",
+ UPDATED: "Datasource Updated",
+ },
+ TABLE: {
+ CREATED: "Table Created",
+ },
+ VIEW: {
+ CREATED: "View Created",
+ ADDED_FILTER: "Added View Filter",
+ ADDED_CALCULATE: "Added View Calculate",
+ },
+ SCREEN: {
+ CREATED: "Screen Created",
+ },
+ AUTOMATION: {
+ CREATED: "Automation Created",
+ SAVED: "Automation Saved",
+ BLOCK_ADDED: "Added Automation Block",
+ },
+ NPS: {
+ SUBMITTED: "budibase:feedback_submitted",
+ },
+ APP: {
+ CREATED: "budibase:app_created",
+ PUBLISHED: "budibase:app_published",
+ UNPUBLISHED: "budibase:app_unpublished",
+ },
+ ANALYTICS: {
+ OPT_IN: "budibase:analytics_opt_in",
+ OPT_OUT: "budibase:analytics_opt_out",
+ },
+ USER: {
+ INVITE: "budibase:portal_user_invite",
+ },
+ SMTP: {
+ SAVED: "budibase:smtp_saved",
+ },
+ SSO: {
+ SAVED: "budibase:sso_saved",
+ },
+}
diff --git a/packages/builder/src/analytics/index.js b/packages/builder/src/analytics/index.js
new file mode 100644
index 0000000000..b79ab67e0c
--- /dev/null
+++ b/packages/builder/src/analytics/index.js
@@ -0,0 +1,79 @@
+import api from "builderStore/api"
+import PosthogClient from "./PosthogClient"
+import IntercomClient from "./IntercomClient"
+import SentryClient from "./SentryClient"
+import { Events } from "./constants"
+import { auth } from "stores/portal"
+import { get } from "svelte/store"
+
+const posthog = new PosthogClient(
+ process.env.POSTHOG_TOKEN,
+ process.env.POSTHOG_URL
+)
+const sentry = new SentryClient(process.env.SENTRY_DSN)
+const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
+
+class AnalyticsHub {
+ constructor() {
+ this.clients = [posthog, sentry, intercom]
+ }
+
+ async activate() {
+ // Setting the analytics env var off in the backend overrides org/tenant settings
+ const analyticsStatus = await api.get("/api/analytics")
+ const json = await analyticsStatus.json()
+
+ // Multitenancy disabled on the backend
+ if (!json.enabled) return
+
+ const tenantId = get(auth).tenantId
+
+ if (tenantId) {
+ const res = await api.get(
+ `/api/global/configs/public?tenantId=${tenantId}`
+ )
+ const orgJson = await res.json()
+
+ // analytics opted out for the tenant
+ if (orgJson.config?.analytics === false) return
+ }
+
+ this.clients.forEach(client => client.init())
+ this.enabled = true
+ }
+
+ identify(id, metadata) {
+ posthog.identify(id)
+ if (metadata) {
+ posthog.updateUser(metadata)
+ }
+ sentry.identify(id)
+ }
+
+ captureException(err) {
+ sentry.captureException(err)
+ }
+
+ captureEvent(eventName, props = {}) {
+ posthog.captureEvent(eventName, props)
+ intercom.captureEvent(eventName, props)
+ }
+
+ showChat(user) {
+ intercom.show(user)
+ }
+
+ submitFeedback(values) {
+ posthog.npsFeedback(values)
+ }
+
+ async logout() {
+ posthog.logout()
+ intercom.logout()
+ }
+}
+
+const analytics = new AnalyticsHub()
+
+export { Events }
+export default analytics
diff --git a/packages/builder/src/builderStore/dataBinding.js b/packages/builder/src/builderStore/dataBinding.js
index d3af6799f3..0858b29bcb 100644
--- a/packages/builder/src/builderStore/dataBinding.js
+++ b/packages/builder/src/builderStore/dataBinding.js
@@ -443,10 +443,9 @@ function bindingReplacement(bindableProperties, textWithBindings, convertTo) {
for (let from of convertFromProps) {
if (shouldReplaceBinding(newBoundValue, from, convertTo)) {
const binding = bindableProperties.find(el => el[convertFrom] === from)
- newBoundValue = newBoundValue.replace(
- new RegExp(from, "gi"),
- binding[convertTo]
- )
+ while (newBoundValue.includes(from)) {
+ newBoundValue = newBoundValue.replace(from, binding[convertTo])
+ }
}
}
result = result.replace(boundValue, newBoundValue)
diff --git a/packages/builder/src/builderStore/index.js b/packages/builder/src/builderStore/index.js
index 6fecda84c0..f32dedd47e 100644
--- a/packages/builder/src/builderStore/index.js
+++ b/packages/builder/src/builderStore/index.js
@@ -3,7 +3,6 @@ import { getAutomationStore } from "./store/automation"
import { getHostingStore } from "./store/hosting"
import { getThemeStore } from "./store/theme"
import { derived, writable } from "svelte/store"
-import analytics from "analytics"
import { FrontendTypes, LAYOUT_NAMES } from "../constants"
import { findComponent } from "./storeUtils"
@@ -55,13 +54,4 @@ export const mainLayout = derived(store, $store => {
export const selectedAccessRole = writable("BASIC")
-export const initialise = async () => {
- try {
- await analytics.activate()
- analytics.captureEvent("Builder Started")
- } catch (err) {
- console.log(err)
- }
-}
-
export const screenSearchString = writable(null)
diff --git a/packages/builder/src/builderStore/store/automation/index.js b/packages/builder/src/builderStore/store/automation/index.js
index e60553070b..0a47970d28 100644
--- a/packages/builder/src/builderStore/store/automation/index.js
+++ b/packages/builder/src/builderStore/store/automation/index.js
@@ -2,7 +2,7 @@ import { writable } from "svelte/store"
import api from "../../api"
import Automation from "./Automation"
import { cloneDeep } from "lodash/fp"
-import analytics from "analytics"
+import analytics, { Events } from "analytics"
const automationActions = store => ({
fetch: async () => {
@@ -110,7 +110,7 @@ const automationActions = store => ({
state.selectedBlock = newBlock
return state
})
- analytics.captureEvent("Added Automation Block", {
+ analytics.captureEvent(Events.AUTOMATION.BLOCK_ADDED, {
name: block.name,
})
},
diff --git a/packages/builder/src/builderStore/store/frontend.js b/packages/builder/src/builderStore/store/frontend.js
index 603fa88b09..09132f28cb 100644
--- a/packages/builder/src/builderStore/store/frontend.js
+++ b/packages/builder/src/builderStore/store/frontend.js
@@ -19,7 +19,7 @@ import {
import { fetchComponentLibDefinitions } from "../loadComponentLibraries"
import api from "../api"
import { FrontendTypes } from "constants"
-import analytics from "analytics"
+import analytics, { Events } from "analytics"
import {
findComponentType,
findComponentParent,
@@ -215,6 +215,13 @@ export const getFrontendStore = () => {
if (screenToDelete._id === state.selectedScreenId) {
state.selectedScreenId = null
}
+ //remove the link for this screen
+ screenDeletePromises.push(
+ store.actions.components.links.delete(
+ screenToDelete.routing.route,
+ screenToDelete.props._instanceName
+ )
+ )
}
return state
})
@@ -443,7 +450,7 @@ export const getFrontendStore = () => {
})
// Log event
- analytics.captureEvent("Added Component", {
+ analytics.captureEvent(Events.COMPONENT.CREATED, {
name: componentInstance._component,
})
@@ -646,6 +653,36 @@ export const getFrontendStore = () => {
// Save layout
await store.actions.layouts.save(layout)
},
+ delete: async (url, title) => {
+ const layout = get(mainLayout)
+ if (!layout) {
+ return
+ }
+
+ // Add link setting to main layout
+ if (layout.props._component.endsWith("layout")) {
+ // If using a new SDK, add to the layout component settings
+ layout.props.links = layout.props.links.filter(
+ link => !(link.text === title && link.url === url)
+ )
+ } else {
+ // If using an old SDK, add to the navigation component
+ // TODO: remove this when we can assume everyone has updated
+ const nav = findComponentType(
+ layout.props,
+ "@budibase/standard-components/navigation"
+ )
+ if (!nav) {
+ return
+ }
+
+ nav._children = nav._children.filter(
+ child => !(child.url === url && child.text === title)
+ )
+ }
+ // Save layout
+ await store.actions.layouts.save(layout)
+ },
},
},
}
diff --git a/packages/builder/src/components/automation/AutomationPanel/CreateAutomationModal.svelte b/packages/builder/src/components/automation/AutomationPanel/CreateAutomationModal.svelte
index 7700a4a1c2..f3273aa5ec 100644
--- a/packages/builder/src/components/automation/AutomationPanel/CreateAutomationModal.svelte
+++ b/packages/builder/src/components/automation/AutomationPanel/CreateAutomationModal.svelte
@@ -4,7 +4,7 @@
import { automationStore } from "builderStore"
import { notifications } from "@budibase/bbui"
import { Input, ModalContent, Layout, Body, Icon } from "@budibase/bbui"
- import analytics from "analytics"
+ import analytics, { Events } from "analytics"
let name
let selectedTrigger
@@ -36,7 +36,7 @@
notifications.success(`Automation ${name} created.`)
$goto(`./${$automationStore.selectedAutomation.automation._id}`)
- analytics.captureEvent("Automation Created", { name })
+ analytics.captureEvent(Events.AUTOMATION.CREATED, { name })
}
$: triggers = Object.entries($automationStore.blockDefinitions.TRIGGER)
diff --git a/packages/builder/src/components/automation/AutomationPanel/UpdateAutomationModal.svelte b/packages/builder/src/components/automation/AutomationPanel/UpdateAutomationModal.svelte
index 29966ec372..64197c3a77 100644
--- a/packages/builder/src/components/automation/AutomationPanel/UpdateAutomationModal.svelte
+++ b/packages/builder/src/components/automation/AutomationPanel/UpdateAutomationModal.svelte
@@ -2,7 +2,7 @@
import { automationStore } from "builderStore"
import { notifications } from "@budibase/bbui"
import { Icon, Input, ModalContent, Modal } from "@budibase/bbui"
- import analytics from "analytics"
+ import analytics, { Events } from "analytics"
let name
let error = ""
@@ -26,7 +26,7 @@
}
await automationStore.actions.save(updatedAutomation)
notifications.success(`Automation ${name} updated successfully.`)
- analytics.captureEvent("Automation Saved", { name })
+ analytics.captureEvent(Events.AUTOMATION.SAVED, { name })
hide()
}
diff --git a/packages/builder/src/components/backend/DataTable/modals/CalculateModal.svelte b/packages/builder/src/components/backend/DataTable/modals/CalculateModal.svelte
index 660a822898..50d44eca88 100644
--- a/packages/builder/src/components/backend/DataTable/modals/CalculateModal.svelte
+++ b/packages/builder/src/components/backend/DataTable/modals/CalculateModal.svelte
@@ -1,7 +1,7 @@
diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateViewModal.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateViewModal.svelte
index 61777c0b7e..2f6ec51233 100644
--- a/packages/builder/src/components/backend/DataTable/modals/CreateViewModal.svelte
+++ b/packages/builder/src/components/backend/DataTable/modals/CreateViewModal.svelte
@@ -3,7 +3,7 @@
import { goto } from "@roxi/routify"
import { views as viewsStore } from "stores/backend"
import { tables } from "stores/backend"
- import analytics from "analytics"
+ import analytics, { Events } from "analytics"
let name
let field
@@ -21,7 +21,7 @@
field,
})
notifications.success(`View ${name} created`)
- analytics.captureEvent("View Created", { name })
+ analytics.captureEvent(Events.VIEW.CREATED, { name })
$goto(`../../view/${name}`)
}
diff --git a/packages/builder/src/components/backend/DataTable/modals/FilterModal.svelte b/packages/builder/src/components/backend/DataTable/modals/FilterModal.svelte
index 170bb75142..9c6f4956b0 100644
--- a/packages/builder/src/components/backend/DataTable/modals/FilterModal.svelte
+++ b/packages/builder/src/components/backend/DataTable/modals/FilterModal.svelte
@@ -11,7 +11,7 @@
Icon,
} from "@budibase/bbui"
import { tables, views } from "stores/backend"
- import analytics from "analytics"
+ import analytics, { Events } from "analytics"
const CONDITIONS = [
{
@@ -65,7 +65,7 @@
function saveView() {
views.save(view)
notifications.success(`View ${view.name} saved.`)
- analytics.captureEvent("Added View Filter", {
+ analytics.captureEvent(Events.VIEW.ADDED_FILTER, {
filters: JSON.stringify(view.filters),
})
}
diff --git a/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte b/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte
index 84c737eb67..6ba8e4042f 100644
--- a/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte
+++ b/packages/builder/src/components/backend/DatasourceNavigator/DatasourceNavigator.svelte
@@ -1,8 +1,9 @@
{#if $database?._id}
- {#each $datasources.list as datasource, idx}
+ {#each enrichedDataSources as datasource, idx}
0}
text={datasource.name}
- opened={openDataSources.includes(datasource._id)}
- selected={$datasources.selected === datasource._id}
+ opened={datasource.open}
+ selected={datasource.selected}
withArrow={true}
on:click={() => selectDatasource(datasource)}
on:iconClick={() => toggleNode(datasource)}
@@ -61,22 +81,21 @@
{/if}
- {#if openDataSources.includes(datasource._id)}
+ {#if datasource.open}
+ {#each $queries.list.filter(query => query.datasourceId === datasource._id) as query}
+
onClickQuery(query)}
+ >
+
+
+ {/each}
{/if}
-
- {#each $queries.list.filter(query => query.datasourceId === datasource._id) as query}
-
onClickQuery(query)}
- >
-
-
- {/each}
{/each}
{/if}
diff --git a/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte
index 9cdd893230..e7affb30c4 100644
--- a/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte
+++ b/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte
@@ -5,7 +5,7 @@
import { Input, Label, ModalContent, Modal, Context } from "@budibase/bbui"
import TableIntegrationMenu from "../TableIntegrationMenu/index.svelte"
import CreateTableModal from "components/backend/TableNavigator/modals/CreateTableModal.svelte"
- import analytics from "analytics"
+ import analytics, { Events } from "analytics"
import { getContext } from "svelte"
const modalContext = getContext(Context.Modal)
@@ -45,7 +45,7 @@
plus,
})
notifications.success(`Datasource ${name} created successfully.`)
- analytics.captureEvent("Datasource Created", { name, type })
+ analytics.captureEvent(Events.DATASOURCE.CREATED, { name, type })
// Navigate to new datasource
$goto(`./datasource/${response._id}`)
diff --git a/packages/builder/src/components/backend/DatasourceNavigator/modals/UpdateDatasourceModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/modals/UpdateDatasourceModal.svelte
index f93af59a38..28625aa86e 100644
--- a/packages/builder/src/components/backend/DatasourceNavigator/modals/UpdateDatasourceModal.svelte
+++ b/packages/builder/src/components/backend/DatasourceNavigator/modals/UpdateDatasourceModal.svelte
@@ -2,7 +2,7 @@
import { datasources } from "stores/backend"
import { notifications } from "@budibase/bbui"
import { Input, ModalContent, Modal } from "@budibase/bbui"
- import analytics from "analytics"
+ import analytics, { Events } from "analytics"
let error = ""
let modal
@@ -35,7 +35,7 @@
}
await datasources.save(updatedDatasource)
notifications.success(`Datasource ${name} updated successfully.`)
- analytics.captureEvent("Datasource Updated", updatedDatasource)
+ analytics.captureEvent(Events.DATASOURCE.UPDATED, updatedDatasource)
hide()
}
diff --git a/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte b/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte
index b59e5cda5e..dd8876be27 100644
--- a/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte
+++ b/packages/builder/src/components/backend/TableNavigator/modals/CreateTableModal.svelte
@@ -12,7 +12,7 @@
Layout,
} from "@budibase/bbui"
import TableDataImport from "../TableDataImport.svelte"
- import analytics from "analytics"
+ import analytics, { Events } from "analytics"
import screenTemplates from "builderStore/store/screenTemplates"
import { buildAutoColumn, getAutoColumnInformation } from "builderStore/utils"
import { NEW_ROW_TEMPLATE } from "builderStore/store/screenTemplates/newRowScreen"
@@ -67,7 +67,7 @@
// Create table
const table = await tables.save(newTable)
notifications.success(`Table ${name} created successfully.`)
- analytics.captureEvent("Table Created", { name })
+ analytics.captureEvent(Events.TABLE.CREATED, { name })
// Create auto screens
if (createAutoscreens) {
diff --git a/packages/builder/src/components/deploy/DeployModal.svelte b/packages/builder/src/components/deploy/DeployModal.svelte
index 4daa16c7c4..3dcf0c27b1 100644
--- a/packages/builder/src/components/deploy/DeployModal.svelte
+++ b/packages/builder/src/components/deploy/DeployModal.svelte
@@ -2,7 +2,8 @@
import { onMount, onDestroy } from "svelte"
import { Button, Modal, notifications, ModalContent } from "@budibase/bbui"
import api from "builderStore/api"
- import analytics from "analytics"
+ import analytics, { Events } from "analytics"
+ import { store } from "builderStore"
const DeploymentStatus = {
SUCCESS: "SUCCESS",
@@ -23,6 +24,9 @@
if (response.status !== 200) {
throw new Error(`status ${response.status}`)
} else {
+ analytics.captureEvent(Events.APP.PUBLISHED, {
+ appId: $store.appId,
+ })
notifications.success(`Application published successfully`)
}
} catch (err) {
diff --git a/packages/builder/src/components/design/NavigationPanel/NewScreenModal.svelte b/packages/builder/src/components/design/NavigationPanel/NewScreenModal.svelte
index ed0c764956..e02f9d87e5 100644
--- a/packages/builder/src/components/design/NavigationPanel/NewScreenModal.svelte
+++ b/packages/builder/src/components/design/NavigationPanel/NewScreenModal.svelte
@@ -4,7 +4,7 @@
import { roles } from "stores/backend"
import { Input, Select, ModalContent, Toggle } from "@budibase/bbui"
import getTemplates from "builderStore/store/screenTemplates"
- import analytics from "analytics"
+ import analytics, { Events } from "analytics"
const CONTAINER = "@budibase/standard-components/container"
@@ -66,7 +66,7 @@
if (templateIndex !== undefined) {
const template = templates[templateIndex]
- analytics.captureEvent("Screen Created", {
+ analytics.captureEvent(Events.SCREEN.CREATED, {
template: template.id || template.name,
})
}
diff --git a/packages/builder/src/components/design/PropertiesPanel/PropertyControls/EventsEditor/actions/RefreshDataProvider.svelte b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/EventsEditor/actions/RefreshDataProvider.svelte
new file mode 100644
index 0000000000..fe251a0320
--- /dev/null
+++ b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/EventsEditor/actions/RefreshDataProvider.svelte
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
diff --git a/packages/builder/src/components/design/PropertiesPanel/PropertyControls/EventsEditor/actions/index.js b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/EventsEditor/actions/index.js
index eaab22d89d..cca8ece484 100644
--- a/packages/builder/src/components/design/PropertiesPanel/PropertyControls/EventsEditor/actions/index.js
+++ b/packages/builder/src/components/design/PropertiesPanel/PropertyControls/EventsEditor/actions/index.js
@@ -12,6 +12,7 @@ import ClearForm from "./ClearForm.svelte"
import CloseScreenModal from "./CloseScreenModal.svelte"
import ChangeFormStep from "./ChangeFormStep.svelte"
import UpdateStateStep from "./UpdateState.svelte"
+import RefreshDataProvider from "./RefreshDataProvider.svelte"
// Defines which actions are available to configure in the front end.
// Unfortunately the "name" property is used as the identifier so please don't
@@ -62,6 +63,10 @@ export const getAvailableActions = () => {
name: "Change Form Step",
component: ChangeFormStep,
},
+ {
+ name: "Refresh Data Provider",
+ component: RefreshDataProvider,
+ },
]
if (get(store).clientFeatures?.state) {
diff --git a/packages/builder/src/components/start/CreateAppModal.svelte b/packages/builder/src/components/start/CreateAppModal.svelte
index 4310d3322e..9ce9d746d7 100644
--- a/packages/builder/src/components/start/CreateAppModal.svelte
+++ b/packages/builder/src/components/start/CreateAppModal.svelte
@@ -12,7 +12,7 @@
import { admin } from "stores/portal"
import { string, mixed, object } from "yup"
import api, { get, post } from "builderStore/api"
- import analytics from "analytics"
+ import analytics, { Events } from "analytics"
import { onMount } from "svelte"
import { capitalise } from "helpers"
import { goto } from "@roxi/routify"
@@ -98,9 +98,9 @@
throw new Error(appJson.message)
}
- analytics.captureEvent("App Created", {
+ analytics.captureEvent(Events.APP.CREATED, {
name: $values.name,
- appId: appJson._id,
+ appId: appJson.instance._id,
template,
})
diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/_layout.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/_layout.svelte
new file mode 100644
index 0000000000..ed271aae34
--- /dev/null
+++ b/packages/builder/src/pages/builder/app/[application]/data/datasource/bb_internal/_layout.svelte
@@ -0,0 +1,7 @@
+
+
+
diff --git a/packages/builder/src/pages/builder/auth/login.svelte b/packages/builder/src/pages/builder/auth/login.svelte
index 783e5a4903..f9f2b34578 100644
--- a/packages/builder/src/pages/builder/auth/login.svelte
+++ b/packages/builder/src/pages/builder/auth/login.svelte
@@ -29,6 +29,7 @@
username,
password,
})
+
if ($auth?.user?.forceResetPassword) {
$goto("./reset")
} else {
diff --git a/packages/builder/src/pages/builder/portal/apps/index.svelte b/packages/builder/src/pages/builder/portal/apps/index.svelte
index a18ec6a8bd..d84b327e90 100644
--- a/packages/builder/src/pages/builder/portal/apps/index.svelte
+++ b/packages/builder/src/pages/builder/portal/apps/index.svelte
@@ -15,8 +15,7 @@
} from "@budibase/bbui"
import CreateAppModal from "components/start/CreateAppModal.svelte"
import UpdateAppModal from "components/start/UpdateAppModal.svelte"
- import api, { del } from "builderStore/api"
- import analytics from "analytics"
+ import { del } from "builderStore/api"
import { onMount } from "svelte"
import { apps, auth, admin } from "stores/portal"
import download from "downloadjs"
@@ -66,14 +65,6 @@
}
}
- const checkKeys = async () => {
- const response = await api.get(`/api/keys/`)
- const keys = await response.json()
- if (keys.userId) {
- analytics.identify(keys.userId)
- }
- }
-
const initiateAppCreation = () => {
creationModal.show()
creatingApp = true
@@ -188,7 +179,6 @@
}
onMount(async () => {
- checkKeys()
await apps.load()
loaded = true
})
diff --git a/packages/builder/src/pages/builder/portal/manage/auth/index.svelte b/packages/builder/src/pages/builder/portal/manage/auth/index.svelte
index 48d9da18f9..c2445e14ae 100644
--- a/packages/builder/src/pages/builder/portal/manage/auth/index.svelte
+++ b/packages/builder/src/pages/builder/portal/manage/auth/index.svelte
@@ -23,6 +23,7 @@
import api from "builderStore/api"
import { organisation, auth, admin } from "stores/portal"
import { uuid } from "builderStore/uuid"
+ import analytics, { Events } from "analytics"
$: tenantId = $auth.tenantId
$: multiTenancyEnabled = $admin.multiTenancy
@@ -209,6 +210,7 @@
providers[res.type]._id = res._id
})
notifications.success(`Settings saved.`)
+ analytics.captureEvent(Events.SSO.SAVED)
})
.catch(err => {
notifications.error(`Failed to update auth settings. ${err}`)
diff --git a/packages/builder/src/pages/builder/portal/manage/email/index.svelte b/packages/builder/src/pages/builder/portal/manage/email/index.svelte
index 76d98ed545..5a78623b81 100644
--- a/packages/builder/src/pages/builder/portal/manage/email/index.svelte
+++ b/packages/builder/src/pages/builder/portal/manage/email/index.svelte
@@ -16,6 +16,7 @@
import { email } from "stores/portal"
import api from "builderStore/api"
import { cloneDeep } from "lodash/fp"
+ import analytics, { Events } from "analytics"
const ConfigTypes = {
SMTP: "smtp",
@@ -69,6 +70,7 @@
smtpConfig._rev = json._rev
smtpConfig._id = json._id
notifications.success(`Settings saved.`)
+ analytics.captureEvent(Events.SMTP.SAVED)
}
}
diff --git a/packages/builder/src/pages/builder/portal/manage/users/_components/AddUserModal.svelte b/packages/builder/src/pages/builder/portal/manage/users/_components/AddUserModal.svelte
index 9504f73b68..25a69af1c8 100644
--- a/packages/builder/src/pages/builder/portal/manage/users/_components/AddUserModal.svelte
+++ b/packages/builder/src/pages/builder/portal/manage/users/_components/AddUserModal.svelte
@@ -10,6 +10,7 @@
} from "@budibase/bbui"
import { createValidationStore, emailValidator } from "helpers/validation"
import { users } from "stores/portal"
+ import analytics, { Events } from "analytics"
export let disabled
@@ -25,6 +26,7 @@
notifications.error(res.message)
} else {
notifications.success(res.message)
+ analytics.captureEvent(Events.USER.INVITE, { type: selected })
}
}
diff --git a/packages/builder/src/pages/builder/portal/settings/organisation.svelte b/packages/builder/src/pages/builder/portal/settings/organisation.svelte
index be8b60e6e7..79eaebb28b 100644
--- a/packages/builder/src/pages/builder/portal/settings/organisation.svelte
+++ b/packages/builder/src/pages/builder/portal/settings/organisation.svelte
@@ -25,7 +25,7 @@
}
const values = writable({
- analytics: !analytics.disabled(),
+ analytics: analytics.enabled,
company: $organisation.company,
platformUrl: $organisation.platformUrl,
logo: $organisation.logoUrl
@@ -48,13 +48,6 @@
async function saveConfig() {
loading = true
- // Set analytics preference
- if ($values.analytics) {
- analytics.optIn()
- } else {
- analytics.optOut()
- }
-
// Upload logo if required
if ($values.logo && !$values.logo.url) {
await uploadLogo($values.logo)
@@ -64,6 +57,7 @@
const config = {
company: $values.company ?? "",
platformUrl: $values.platformUrl ?? "",
+ analytics: $values.analytics,
}
// remove logo if required
if (!$values.logo) {
diff --git a/packages/builder/src/stores/backend/datasources.js b/packages/builder/src/stores/backend/datasources.js
index 5c6ed3f2cb..5e42315948 100644
--- a/packages/builder/src/stores/backend/datasources.js
+++ b/packages/builder/src/stores/backend/datasources.js
@@ -1,4 +1,4 @@
-import { writable } from "svelte/store"
+import { writable, get } from "svelte/store"
import { queries, tables, views } from "./"
import api from "../../builderStore/api"
@@ -8,7 +8,8 @@ export const INITIAL_DATASOURCE_VALUES = {
}
export function createDatasourcesStore() {
- const { subscribe, update, set } = writable(INITIAL_DATASOURCE_VALUES)
+ const store = writable(INITIAL_DATASOURCE_VALUES)
+ const { subscribe, update, set } = store
return {
subscribe,
@@ -21,7 +22,15 @@ export function createDatasourcesStore() {
fetch: async () => {
const response = await api.get(`/api/datasources`)
const json = await response.json()
- update(state => ({ ...state, list: json, selected: null }))
+
+ // Clear selected if it no longer exists, otherwise keep it
+ const selected = get(store).selected
+ let nextSelected = null
+ if (selected && json.find(source => source._id === selected)) {
+ nextSelected = selected
+ }
+
+ update(state => ({ ...state, list: json, selected: nextSelected }))
return json
},
select: async datasourceId => {
diff --git a/packages/builder/src/stores/portal/auth.js b/packages/builder/src/stores/portal/auth.js
index fe8f87cfb2..e33a1f22ac 100644
--- a/packages/builder/src/stores/portal/auth.js
+++ b/packages/builder/src/stores/portal/auth.js
@@ -1,6 +1,7 @@
import { derived, writable, get } from "svelte/store"
import api from "../../builderStore/api"
import { admin } from "stores/portal"
+import analytics from "analytics"
export function createAuthStore() {
const auth = writable({
@@ -49,6 +50,21 @@ export function createAuthStore() {
}
return store
})
+
+ if (user) {
+ analytics.activate().then(() => {
+ analytics.identify(user._id, user)
+ if (user.size === "100+" || user.size === "10000+") {
+ analytics.showChat({
+ email: user.email,
+ created_at: user.createdAt || Date.now(),
+ name: user.name,
+ user_id: user._id,
+ tenant: user.tenantId,
+ })
+ }
+ })
+ }
}
async function setOrganisation(tenantId) {
diff --git a/packages/builder/vite.config.js b/packages/builder/vite.config.js
index d8b8dbba1d..12b45e7cf8 100644
--- a/packages/builder/vite.config.js
+++ b/packages/builder/vite.config.js
@@ -22,6 +22,9 @@ export default ({ mode }) => {
isProduction ? "production" : "development"
),
"process.env.POSTHOG_TOKEN": JSON.stringify(process.env.POSTHOG_TOKEN),
+ "process.env.INTERCOM_TOKEN": JSON.stringify(
+ process.env.INTERCOM_TOKEN
+ ),
"process.env.POSTHOG_URL": JSON.stringify(process.env.POSTHOG_URL),
"process.env.SENTRY_DSN": JSON.stringify(process.env.SENTRY_DSN),
}),
diff --git a/packages/cli/package.json b/packages/cli/package.json
index 0340759e45..29763b9da6 100644
--- a/packages/cli/package.json
+++ b/packages/cli/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
- "version": "0.9.142",
+ "version": "0.9.143-alpha.0",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {
diff --git a/packages/client/manifest.json b/packages/client/manifest.json
index 7bef9c2e4b..2e64b1fb4c 100644
--- a/packages/client/manifest.json
+++ b/packages/client/manifest.json
@@ -2389,6 +2389,7 @@
"icon": "Data",
"illegalChildren": ["section"],
"hasChildren": true,
+ "actions": ["RefreshDatasource"],
"settings": [
{
"type": "dataSource",
diff --git a/packages/client/package.json b/packages/client/package.json
index 3d25a1b5d6..f9777629f4 100644
--- a/packages/client/package.json
+++ b/packages/client/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/client",
- "version": "0.9.142",
+ "version": "0.9.143-alpha.0",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
- "@budibase/bbui": "^0.9.142",
+ "@budibase/bbui": "^0.9.143-alpha.0",
"@budibase/standard-components": "^0.9.139",
- "@budibase/string-templates": "^0.9.142",
+ "@budibase/string-templates": "^0.9.143-alpha.0",
"regexparam": "^1.3.0",
"shortid": "^2.2.15",
"svelte-spa-router": "^3.0.5"
diff --git a/packages/client/src/utils/buttonActions.js b/packages/client/src/utils/buttonActions.js
index aeefe6163c..11aa033c1d 100644
--- a/packages/client/src/utils/buttonActions.js
+++ b/packages/client/src/utils/buttonActions.js
@@ -88,7 +88,7 @@ const validateFormHandler = async (action, context) => {
)
}
-const refreshDatasourceHandler = async (action, context) => {
+const refreshDataProviderHandler = async (action, context) => {
return await executeActionHandler(
context,
action.parameters.componentId,
@@ -139,7 +139,7 @@ const handlerMap = {
["Execute Query"]: queryExecutionHandler,
["Trigger Automation"]: triggerAutomationHandler,
["Validate Form"]: validateFormHandler,
- ["Refresh Datasource"]: refreshDatasourceHandler,
+ ["Refresh Data Provider"]: refreshDataProviderHandler,
["Log Out"]: logoutHandler,
["Clear Form"]: clearFormHandler,
["Close Screen Modal"]: closeScreenModalHandler,
diff --git a/packages/server/package.json b/packages/server/package.json
index 9397ab16f3..ad289e8c0d 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
- "version": "0.9.142",
+ "version": "0.9.143-alpha.0",
"description": "Budibase Web Server",
"main": "src/index.js",
"repository": {
@@ -23,10 +23,9 @@
"format": "prettier --config ../../.prettierrc.json 'src/**/*.ts' --write",
"lint": "eslint --fix src/",
"lint:fix": "yarn run format && yarn run lint",
+ "initialise": "node scripts/initialise.js",
"multi:enable": "node scripts/multiTenancy.js enable",
- "multi:disable": "node scripts/multiTenancy.js disable",
- "selfhost:enable": "node scripts/selfhost.js enable",
- "selfhost:disable": "node scripts/selfhost.js disable"
+ "multi:disable": "node scripts/multiTenancy.js disable"
},
"jest": {
"preset": "ts-jest",
@@ -49,8 +48,7 @@
"!src/automations/tests/**/*",
"!src/utilities/fileProcessor.js",
"!src/utilities/fileSystem/**/*",
- "!src/utilities/redis.js",
- "!src/api/controllers/row/internalSearch.js"
+ "!src/utilities/redis.js"
],
"coverageReporters": [
"lcov",
@@ -64,9 +62,9 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
- "@budibase/auth": "^0.9.142",
- "@budibase/client": "^0.9.142",
- "@budibase/string-templates": "^0.9.142",
+ "@budibase/auth": "^0.9.143-alpha.0",
+ "@budibase/client": "^0.9.143-alpha.0",
+ "@budibase/string-templates": "^0.9.143-alpha.0",
"@elastic/elasticsearch": "7.10.0",
"@koa/router": "8.0.0",
"@sendgrid/mail": "7.1.1",
@@ -98,12 +96,13 @@
"lodash": "4.17.21",
"mongodb": "3.6.3",
"mssql": "6.2.3",
- "mysql": "^2.18.1",
+ "mysql": "2.18.1",
"node-fetch": "2.6.0",
"open": "7.3.0",
"pg": "8.5.1",
"pino-pretty": "4.0.0",
"pouchdb": "7.2.1",
+ "pouchdb-adapter-memory": "^7.2.1",
"pouchdb-all-dbs": "1.0.2",
"pouchdb-find": "^7.2.2",
"pouchdb-replication-stream": "1.2.9",
@@ -133,7 +132,6 @@
"express": "^4.17.1",
"jest": "^27.0.5",
"nodemon": "^2.0.4",
- "pouchdb-adapter-memory": "^7.2.1",
"prettier": "^2.3.1",
"rimraf": "^3.0.2",
"supertest": "^4.0.2",
diff --git a/packages/server/scripts/integrations/pg-json/docker-compose.yml b/packages/server/scripts/integrations/pg-json/docker-compose.yml
new file mode 100644
index 0000000000..6bc307a86d
--- /dev/null
+++ b/packages/server/scripts/integrations/pg-json/docker-compose.yml
@@ -0,0 +1,28 @@
+version: "3.8"
+services:
+ db:
+ container_name: postgres-json
+ image: postgres
+ restart: always
+ environment:
+ POSTGRES_USER: root
+ POSTGRES_PASSWORD: root
+ POSTGRES_DB: main
+ ports:
+ - "5432:5432"
+ volumes:
+ #- pg_data:/var/lib/postgresql/data/
+ - ./init.sql:/docker-entrypoint-initdb.d/init.sql
+
+ pgadmin:
+ container_name: pgadmin-json
+ image: dpage/pgadmin4
+ restart: always
+ environment:
+ PGADMIN_DEFAULT_EMAIL: root@root.com
+ PGADMIN_DEFAULT_PASSWORD: root
+ ports:
+ - "5050:80"
+
+#volumes:
+# pg_data:
diff --git a/packages/server/scripts/integrations/pg-json/init.sql b/packages/server/scripts/integrations/pg-json/init.sql
new file mode 100644
index 0000000000..06a5b4901d
--- /dev/null
+++ b/packages/server/scripts/integrations/pg-json/init.sql
@@ -0,0 +1,22 @@
+SELECT 'CREATE DATABASE main'
+WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
+CREATE TABLE jsonTable (
+ id character varying(32),
+ data jsonb,
+ text text
+);
+
+INSERT INTO jsonTable (id, data) VALUES ('1', '{"id": 1, "age": 1, "name": "Mike", "newline": "this is text with a\n newline in it"}');
+
+CREATE VIEW jsonView AS SELECT
+ x.id,
+ x.age,
+ x.name,
+ x.newline
+FROM
+ jsonTable c,
+ LATERAL jsonb_to_record(c.data) x (id character varying(32),
+ age BIGINT,
+ name TEXT,
+ newline TEXT
+ );
diff --git a/packages/server/scripts/integrations/pg-json/reset.sh b/packages/server/scripts/integrations/pg-json/reset.sh
new file mode 100755
index 0000000000..32778bd11f
--- /dev/null
+++ b/packages/server/scripts/integrations/pg-json/reset.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+docker-compose down
+docker volume prune -f
diff --git a/packages/server/scripts/integrations/postgres/docker-compose.yml b/packages/server/scripts/integrations/postgres/docker-compose.yml
index e2bba9f38e..4dfcb0e1ad 100644
--- a/packages/server/scripts/integrations/postgres/docker-compose.yml
+++ b/packages/server/scripts/integrations/postgres/docker-compose.yml
@@ -15,7 +15,7 @@ services:
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
pgadmin:
- container_name: pgadmin
+ container_name: pgadmin-pg
image: dpage/pgadmin4
restart: always
environment:
diff --git a/packages/server/scripts/integrations/service-vehicles/docker-compose.yml b/packages/server/scripts/integrations/service-vehicles/docker-compose.yml
new file mode 100644
index 0000000000..7473e540db
--- /dev/null
+++ b/packages/server/scripts/integrations/service-vehicles/docker-compose.yml
@@ -0,0 +1,28 @@
+version: "3.8"
+services:
+ db:
+ container_name: postgres-vehicle
+ image: postgres
+ restart: always
+ environment:
+ POSTGRES_USER: root
+ POSTGRES_PASSWORD: root
+ POSTGRES_DB: main
+ ports:
+ - "5432:5432"
+ volumes:
+ #- pg_data:/var/lib/postgresql/data/
+ - ./init.sql:/docker-entrypoint-initdb.d/init.sql
+
+ pgadmin:
+ container_name: pgadmin
+ image: dpage/pgadmin4
+ restart: always
+ environment:
+ PGADMIN_DEFAULT_EMAIL: root@root.com
+ PGADMIN_DEFAULT_PASSWORD: root
+ ports:
+ - "5050:80"
+
+#volumes:
+# pg_data:
diff --git a/packages/server/scripts/integrations/service-vehicles/init.sql b/packages/server/scripts/integrations/service-vehicles/init.sql
new file mode 100644
index 0000000000..3e0485313e
--- /dev/null
+++ b/packages/server/scripts/integrations/service-vehicles/init.sql
@@ -0,0 +1,52 @@
+SELECT 'CREATE DATABASE main'
+WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
+CREATE TABLE Vehicles (
+ id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ),
+ Registration text COLLATE pg_catalog."default",
+ Make text COLLATE pg_catalog."default",
+ Model text COLLATE pg_catalog."default",
+ Colour text COLLATE pg_catalog."default",
+ Year smallint,
+ CONSTRAINT Vehicles_pkey PRIMARY KEY (id)
+);
+
+CREATE TABLE ServiceLog (
+ id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ),
+ Description text COLLATE pg_catalog."default",
+ VehicleId bigint,
+ ServiceDate timestamp without time zone,
+ Category text COLLATE pg_catalog."default",
+ Mileage bigint,
+ CONSTRAINT ServiceLog_pkey PRIMARY KEY (id),
+ CONSTRAINT vehicle_foreign_key FOREIGN KEY (VehicleId)
+ REFERENCES Vehicles (id) MATCH SIMPLE
+ ON UPDATE NO ACTION
+ ON DELETE NO ACTION
+);
+
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('FAZ 9837','Volkswagen','Polo','White',2002);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('JHI 8827','BMW','M3','Black',2013);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('D903PI','Volvo','XC40','Grey',2014);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('YFI002','Volkswagen','Golf','Dark Blue',2018);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('HGT5677','Skoda','Octavia','Graphite',2009);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('PPF9276','Skoda','Octavia','Graphite',2021);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('J893FT','Toyota','Corolla','Red',2015);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('MJK776','Honda','HR-V','Silver',2015);
+
+
+INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
+VALUES ('Change front brakes', 1, '2021-05-04', 'Brakes', 20667);
+INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
+VALUES ('Tyres - full set', 1, '2021-05-04', 'Brakes', 20667);
+INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
+VALUES ('Engine tune up', 2, '2021-07-14', 'Brakes', 50889);
+INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
+VALUES ('Replace transmission', 3, '2021-09-26', 'Transmission', 98002);
diff --git a/packages/server/scripts/integrations/service-vehicles/reset.sh b/packages/server/scripts/integrations/service-vehicles/reset.sh
new file mode 100755
index 0000000000..32778bd11f
--- /dev/null
+++ b/packages/server/scripts/integrations/service-vehicles/reset.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+docker-compose down
+docker volume prune -f
diff --git a/packages/server/src/api/controllers/analytics.js b/packages/server/src/api/controllers/analytics.js
index d6e1a9ce5b..eb64bc87b9 100644
--- a/packages/server/src/api/controllers/analytics.js
+++ b/packages/server/src/api/controllers/analytics.js
@@ -2,6 +2,6 @@ const env = require("../../environment")
exports.isEnabled = async function (ctx) {
ctx.body = {
- enabled: env.ENABLE_ANALYTICS === "true",
+ enabled: !env.SELF_HOSTED && env.ENABLE_ANALYTICS === "true",
}
}
diff --git a/packages/server/src/api/controllers/application.js b/packages/server/src/api/controllers/application.js
index da0014c5f8..76675116c2 100644
--- a/packages/server/src/api/controllers/application.js
+++ b/packages/server/src/api/controllers/application.js
@@ -230,7 +230,12 @@ exports.create = async function (ctx) {
const response = await db.put(newApplication, { force: true })
newApplication._rev = response.rev
- await createEmptyAppPackage(ctx, newApplication)
+ // Only create the default home screens and layout if we aren't importing
+ // an app
+ if (useTemplate !== "true") {
+ await createEmptyAppPackage(ctx, newApplication)
+ }
+
/* istanbul ignore next */
if (!env.isTest()) {
await createApp(appId)
diff --git a/packages/server/src/api/controllers/datasource.js b/packages/server/src/api/controllers/datasource.js
index 38b6e68932..4a2fd7d86a 100644
--- a/packages/server/src/api/controllers/datasource.js
+++ b/packages/server/src/api/controllers/datasource.js
@@ -51,7 +51,7 @@ exports.buildSchemaFromDb = async function (ctx) {
await connector.buildSchema(datasource._id, datasource.entities)
datasource.entities = connector.tables
- const response = await db.post(datasource)
+ const response = await db.put(datasource)
datasource._rev = response.rev
ctx.body = datasource
@@ -89,7 +89,7 @@ exports.save = async function (ctx) {
...ctx.request.body,
}
- const response = await db.post(datasource)
+ const response = await db.put(datasource)
datasource._rev = response.rev
// Drain connection pools when configuration is changed
diff --git a/packages/server/src/api/controllers/permission.js b/packages/server/src/api/controllers/permission.js
index e269f8c41d..6c02663649 100644
--- a/packages/server/src/api/controllers/permission.js
+++ b/packages/server/src/api/controllers/permission.js
@@ -1,9 +1,4 @@
-const {
- getBuiltinPermissions,
- PermissionLevels,
- isPermissionLevelHigherThanRead,
- higherPermission,
-} = require("@budibase/auth/permissions")
+const { getBuiltinPermissions } = require("@budibase/auth/permissions")
const {
isBuiltin,
getDBRoleID,
@@ -16,6 +11,7 @@ const {
CURRENTLY_SUPPORTED_LEVELS,
getBasePermissions,
} = require("../../utilities/security")
+const { removeFromArray } = require("../../utilities")
const PermissionUpdateType = {
REMOVE: "remove",
@@ -24,22 +20,6 @@ const PermissionUpdateType = {
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
-// quick function to perform a bit of weird logic, make sure fetch calls
-// always say a write role also has read permission
-function fetchLevelPerms(permissions, level, roleId) {
- if (!permissions) {
- permissions = {}
- }
- permissions[level] = roleId
- if (
- isPermissionLevelHigherThanRead(level) &&
- !permissions[PermissionLevels.READ]
- ) {
- permissions[PermissionLevels.READ] = roleId
- }
- return permissions
-}
-
// utility function to stop this repetition - permissions always stored under roles
async function getAllDBRoles(db) {
const body = await db.allDocs(
@@ -74,23 +54,31 @@ async function updatePermissionOnRole(
for (let role of dbRoles) {
let updated = false
const rolePermissions = role.permissions ? role.permissions : {}
+ // make sure its an array, also handle migrating
+ if (
+ !rolePermissions[resourceId] ||
+ !Array.isArray(rolePermissions[resourceId])
+ ) {
+ rolePermissions[resourceId] =
+ typeof rolePermissions[resourceId] === "string"
+ ? [rolePermissions[resourceId]]
+ : []
+ }
// handle the removal/updating the role which has this permission first
// the updating (role._id !== dbRoleId) is required because a resource/level can
// only be permitted in a single role (this reduces hierarchy confusion and simplifies
// the general UI for this, rather than needing to show everywhere it is used)
if (
(role._id !== dbRoleId || remove) &&
- rolePermissions[resourceId] === level
+ rolePermissions[resourceId].indexOf(level) !== -1
) {
- delete rolePermissions[resourceId]
+ removeFromArray(rolePermissions[resourceId], level)
updated = true
}
// handle the adding, we're on the correct role, at it to this
if (!remove && role._id === dbRoleId) {
- rolePermissions[resourceId] = higherPermission(
- rolePermissions[resourceId],
- level
- )
+ const set = new Set(rolePermissions[resourceId])
+ rolePermissions[resourceId] = [...set.add(level)]
updated = true
}
// handle the update, add it to bulk docs to perform at end
@@ -127,12 +115,11 @@ exports.fetch = async function (ctx) {
continue
}
const roleId = getExternalRoleID(role._id)
- for (let [resource, level] of Object.entries(role.permissions)) {
- permissions[resource] = fetchLevelPerms(
- permissions[resource],
- level,
- roleId
- )
+ for (let [resource, levelArr] of Object.entries(role.permissions)) {
+ const levels = Array.isArray(levelArr) ? [levelArr] : levelArr
+ const perms = {}
+ levels.forEach(level => (perms[level] = roleId))
+ permissions[resource] = perms
}
}
// apply the base permissions
@@ -157,12 +144,13 @@ exports.getResourcePerms = async function (ctx) {
for (let level of SUPPORTED_LEVELS) {
// update the various roleIds in the resource permissions
for (let role of roles) {
- if (role.permissions && role.permissions[resourceId] === level) {
- permissions = fetchLevelPerms(
- permissions,
- level,
- getExternalRoleID(role._id)
- )
+ const rolePerms = role.permissions
+ if (
+ rolePerms &&
+ (rolePerms[resourceId] === level ||
+ rolePerms[resourceId].indexOf(level) !== -1)
+ ) {
+ permissions[level] = getExternalRoleID(role._id)
}
}
}
diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts
index eced518604..75c3e9b492 100644
--- a/packages/server/src/api/controllers/row/ExternalRequest.ts
+++ b/packages/server/src/api/controllers/row/ExternalRequest.ts
@@ -437,7 +437,11 @@ module External {
for (let [colName, { isMany, rows, tableId }] of Object.entries(
related
)) {
- const table = this.getTable(tableId)
+ const table: Table = this.getTable(tableId)
+ // if its not the foreign key skip it, nothing to do
+ if (table.primary && table.primary.indexOf(colName) !== -1) {
+ continue
+ }
for (let row of rows) {
const filters = buildFilters(generateIdForRow(row, table), {}, table)
// safety check, if there are no filters on deletion bad things happen
@@ -540,6 +544,9 @@ module External {
extra: {
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
},
+ meta: {
+ table,
+ },
}
// can't really use response right now
const response = await makeExternalQuery(appId, json)
diff --git a/packages/server/src/api/controllers/row/internal.js b/packages/server/src/api/controllers/row/internal.js
index 2299a20580..d429c14cc7 100644
--- a/packages/server/src/api/controllers/row/internal.js
+++ b/packages/server/src/api/controllers/row/internal.js
@@ -5,17 +5,22 @@ const {
generateRowID,
DocumentTypes,
InternalTables,
+ generateMemoryViewID,
} = require("../../../db/utils")
const userController = require("../user")
const {
inputProcessing,
outputProcessing,
+ processAutoColumn,
} = require("../../../utilities/rowProcessor")
const { FieldTypes } = require("../../../constants")
const { isEqual } = require("lodash")
const { validate, findRow } = require("./utils")
const { fullSearch, paginatedSearch } = require("./internalSearch")
const { getGlobalUsersFromMetadata } = require("../../../utilities/global")
+const inMemoryViews = require("../../../db/inMemoryView")
+const env = require("../../../environment")
+const { migrateToInMemoryView } = require("../view/utils")
const CALCULATION_TYPES = {
SUM: "sum",
@@ -25,17 +30,84 @@ const CALCULATION_TYPES = {
async function storeResponse(ctx, db, row, oldTable, table) {
row.type = "row"
- const response = await db.put(row)
// don't worry about rev, tables handle rev/lastID updates
+ // if another row has been written since processing this will
+ // handle the auto ID clash
if (!isEqual(oldTable, table)) {
- await db.put(table)
+ try {
+ await db.put(table)
+ } catch (err) {
+ if (err.status === 409) {
+ const updatedTable = await db.get(table._id)
+ let response = processAutoColumn(null, updatedTable, row, {
+ reprocessing: true,
+ })
+ await db.put(response.table)
+ row = response.row
+ } else {
+ throw err
+ }
+ }
}
+ const response = await db.put(row)
row._rev = response.rev
// process the row before return, to include relationships
row = await outputProcessing(ctx, table, row, { squash: false })
return { row, table }
}
+// doesn't do the outputProcessing
+async function getRawTableData(ctx, db, tableId) {
+ let rows
+ if (tableId === InternalTables.USER_METADATA) {
+ await userController.fetchMetadata(ctx)
+ rows = ctx.body
+ } else {
+ const response = await db.allDocs(
+ getRowParams(tableId, null, {
+ include_docs: true,
+ })
+ )
+ rows = response.rows.map(row => row.doc)
+ }
+ return rows
+}
+
+async function getView(db, viewName) {
+ let viewInfo
+ async function getFromDesignDoc() {
+ const designDoc = await db.get("_design/database")
+ viewInfo = designDoc.views[viewName]
+ return viewInfo
+ }
+ let migrate = false
+ if (env.SELF_HOSTED) {
+ viewInfo = await getFromDesignDoc()
+ } else {
+ try {
+ viewInfo = await db.get(generateMemoryViewID(viewName))
+ if (viewInfo) {
+ viewInfo = viewInfo.view
+ }
+ } catch (err) {
+ // check if it can be retrieved from design doc (needs migrated)
+ if (err.status !== 404) {
+ viewInfo = null
+ } else {
+ viewInfo = await getFromDesignDoc()
+ migrate = !!viewInfo
+ }
+ }
+ }
+ if (migrate) {
+ await migrateToInMemoryView(db, viewName)
+ }
+ if (!viewInfo) {
+ throw "View does not exist."
+ }
+ return viewInfo
+}
+
exports.patch = async ctx => {
const appId = ctx.appId
const db = new CouchDB(appId)
@@ -139,15 +211,18 @@ exports.fetchView = async ctx => {
const db = new CouchDB(appId)
const { calculation, group, field } = ctx.query
- const designDoc = await db.get("_design/database")
- const viewInfo = designDoc.views[viewName]
- if (!viewInfo) {
- throw "View does not exist."
+ const viewInfo = await getView(db, viewName)
+ let response
+ if (env.SELF_HOSTED) {
+ response = await db.query(`database/${viewName}`, {
+ include_docs: !calculation,
+ group: !!group,
+ })
+ } else {
+ const tableId = viewInfo.meta.tableId
+ const data = await getRawTableData(ctx, db, tableId)
+ response = await inMemoryViews.runView(viewInfo, calculation, group, data)
}
- const response = await db.query(`database/${viewName}`, {
- include_docs: !calculation,
- group: !!group,
- })
let rows
if (!calculation) {
@@ -191,19 +266,9 @@ exports.fetch = async ctx => {
const appId = ctx.appId
const db = new CouchDB(appId)
- let rows,
- table = await db.get(ctx.params.tableId)
- if (ctx.params.tableId === InternalTables.USER_METADATA) {
- await userController.fetchMetadata(ctx)
- rows = ctx.body
- } else {
- const response = await db.allDocs(
- getRowParams(ctx.params.tableId, null, {
- include_docs: true,
- })
- )
- rows = response.rows.map(row => row.doc)
- }
+ const tableId = ctx.params.tableId
+ let table = await db.get(tableId)
+ let rows = await getRawTableData(ctx, db, tableId)
return outputProcessing(ctx, table, rows)
}
diff --git a/packages/server/src/api/controllers/row/utils.js b/packages/server/src/api/controllers/row/utils.js
index cb9a5e166c..ca6c782713 100644
--- a/packages/server/src/api/controllers/row/utils.js
+++ b/packages/server/src/api/controllers/row/utils.js
@@ -5,6 +5,7 @@ const { InternalTables } = require("../../../db/utils")
const userController = require("../user")
const { FieldTypes } = require("../../../constants")
const { integrations } = require("../../../integrations")
+const { processStringSync } = require("@budibase/string-templates")
validateJs.extend(validateJs.validators.datetime, {
parse: function (value) {
@@ -73,6 +74,11 @@ exports.validate = async ({ appId, tableId, row, table }) => {
errors[fieldName] = "Field not in list"
}
})
+ } else if (table.schema[fieldName].type === FieldTypes.FORMULA) {
+ res = validateJs.single(
+ processStringSync(table.schema[fieldName].formula, row),
+ constraints
+ )
} else {
res = validateJs.single(row[fieldName], constraints)
}
diff --git a/packages/server/src/api/controllers/table/index.js b/packages/server/src/api/controllers/table/index.js
index 60b5167f66..c7b72cf1c8 100644
--- a/packages/server/src/api/controllers/table/index.js
+++ b/packages/server/src/api/controllers/table/index.js
@@ -145,7 +145,7 @@ exports.save = async function (ctx) {
if (updatedRows && updatedRows.length !== 0) {
await db.bulkDocs(updatedRows)
}
- const result = await db.post(tableToSave)
+ const result = await db.put(tableToSave)
tableToSave._rev = result.rev
tableToSave = await tableSaveFunctions.after(tableToSave)
diff --git a/packages/server/src/api/controllers/table/utils.js b/packages/server/src/api/controllers/table/utils.js
index 154a9ba8f5..d263002da6 100644
--- a/packages/server/src/api/controllers/table/utils.js
+++ b/packages/server/src/api/controllers/table/utils.js
@@ -68,23 +68,17 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
// Populate the table with rows imported from CSV in a bulk update
const data = await csvParser.transform(dataImport)
+ let finalData = []
for (let i = 0; i < data.length; i++) {
let row = data[i]
row._id = generateRowID(table._id)
row.tableId = table._id
- const processed = inputProcessing(user, table, row)
+ const processed = inputProcessing(user, table, row, {
+ noAutoRelationships: true,
+ })
table = processed.table
row = processed.row
- // make sure link rows are up to date
- row = await linkRows.updateLinks({
- appId,
- eventType: linkRows.EventType.ROW_SAVE,
- row,
- tableId: row.tableId,
- table,
- })
-
for (let [fieldName, schema] of Object.entries(table.schema)) {
// check whether the options need to be updated for inclusion as part of the data import
if (
@@ -98,10 +92,20 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
]
}
}
- data[i] = row
+
+ // make sure link rows are up to date
+ finalData.push(
+ linkRows.updateLinks({
+ appId,
+ eventType: linkRows.EventType.ROW_SAVE,
+ row,
+ tableId: row.tableId,
+ table,
+ })
+ )
}
- await db.bulkDocs(data)
+ await db.bulkDocs(await Promise.all(finalData))
let response = await db.put(table)
table._rev = response._rev
}
diff --git a/packages/server/src/api/controllers/view/index.js b/packages/server/src/api/controllers/view/index.js
index 3d0f236fce..ecaee0f32f 100644
--- a/packages/server/src/api/controllers/view/index.js
+++ b/packages/server/src/api/controllers/view/index.js
@@ -2,127 +2,93 @@ const CouchDB = require("../../../db")
const viewTemplate = require("./viewBuilder")
const { apiFileReturn } = require("../../../utilities/fileSystem")
const exporters = require("./exporters")
+const { saveView, getView, getViews, deleteView } = require("./utils")
const { fetchView } = require("../row")
-const { ViewNames } = require("../../../db/utils")
-const controller = {
- fetch: async ctx => {
- const db = new CouchDB(ctx.appId)
- const designDoc = await db.get("_design/database")
- const response = []
-
- for (let name of Object.keys(designDoc.views)) {
- // Only return custom views, not built ins
- if (Object.values(ViewNames).indexOf(name) !== -1) {
- continue
- }
- response.push({
- name,
- ...designDoc.views[name],
- })
- }
-
- ctx.body = response
- },
- save: async ctx => {
- const db = new CouchDB(ctx.appId)
- const { originalName, ...viewToSave } = ctx.request.body
- const designDoc = await db.get("_design/database")
- const view = viewTemplate(viewToSave)
-
- if (!viewToSave.name) {
- ctx.throw(400, "Cannot create view without a name")
- }
-
- designDoc.views = {
- ...designDoc.views,
- [viewToSave.name]: view,
- }
-
- // view has been renamed
- if (originalName) {
- delete designDoc.views[originalName]
- }
-
- await db.put(designDoc)
-
- // add views to table document
- const table = await db.get(ctx.request.body.tableId)
- if (!table.views) table.views = {}
- if (!view.meta.schema) {
- view.meta.schema = table.schema
- }
- table.views[viewToSave.name] = view.meta
-
- if (originalName) {
- delete table.views[originalName]
- }
-
- await db.put(table)
-
- ctx.body = {
- ...table.views[viewToSave.name],
- name: viewToSave.name,
- }
- },
- destroy: async ctx => {
- const db = new CouchDB(ctx.appId)
- const designDoc = await db.get("_design/database")
- const viewName = decodeURI(ctx.params.viewName)
- const view = designDoc.views[viewName]
- delete designDoc.views[viewName]
-
- await db.put(designDoc)
-
- const table = await db.get(view.meta.tableId)
- delete table.views[viewName]
- await db.put(table)
-
- ctx.body = view
- },
- exportView: async ctx => {
- const db = new CouchDB(ctx.appId)
- const designDoc = await db.get("_design/database")
- const viewName = decodeURI(ctx.query.view)
-
- const view = designDoc.views[viewName]
- const format = ctx.query.format
- if (!format) {
- ctx.throw(400, "Format must be specified, either csv or json")
- }
-
- if (view) {
- ctx.params.viewName = viewName
- // Fetch view rows
- ctx.query = {
- group: view.meta.groupBy,
- calculation: view.meta.calculation,
- stats: !!view.meta.field,
- field: view.meta.field,
- }
- } else {
- // table all_ view
- /* istanbul ignore next */
- ctx.params.viewName = viewName
- }
-
- await fetchView(ctx)
-
- let schema = view && view.meta && view.meta.schema
- if (!schema) {
- const tableId = ctx.params.tableId || view.meta.tableId
- const table = await db.get(tableId)
- schema = table.schema
- }
-
- // Export part
- let headers = Object.keys(schema)
- const exporter = exporters[format]
- const filename = `${viewName}.${format}`
- // send down the file
- ctx.attachment(filename)
- ctx.body = apiFileReturn(exporter(headers, ctx.body))
- },
+exports.fetch = async ctx => {
+ const db = new CouchDB(ctx.appId)
+ ctx.body = await getViews(db)
}
-module.exports = controller
+exports.save = async ctx => {
+ const db = new CouchDB(ctx.appId)
+ const { originalName, ...viewToSave } = ctx.request.body
+ const view = viewTemplate(viewToSave)
+
+ if (!viewToSave.name) {
+ ctx.throw(400, "Cannot create view without a name")
+ }
+
+ await saveView(db, originalName, viewToSave.name, view)
+
+ // add views to table document
+ const table = await db.get(ctx.request.body.tableId)
+ if (!table.views) table.views = {}
+ if (!view.meta.schema) {
+ view.meta.schema = table.schema
+ }
+ table.views[viewToSave.name] = view.meta
+ if (originalName) {
+ delete table.views[originalName]
+ }
+ await db.put(table)
+
+ ctx.body = {
+ ...table.views[viewToSave.name],
+ name: viewToSave.name,
+ }
+}
+
+exports.destroy = async ctx => {
+ const db = new CouchDB(ctx.appId)
+ const viewName = decodeURI(ctx.params.viewName)
+ const view = await deleteView(db, viewName)
+ const table = await db.get(view.meta.tableId)
+ delete table.views[viewName]
+ await db.put(table)
+
+ ctx.body = view
+}
+
+exports.exportView = async ctx => {
+ const db = new CouchDB(ctx.appId)
+ const viewName = decodeURI(ctx.query.view)
+ const view = await getView(db, viewName)
+
+ const format = ctx.query.format
+ if (!format) {
+ ctx.throw(400, "Format must be specified, either csv or json")
+ }
+
+ if (view) {
+ ctx.params.viewName = viewName
+ // Fetch view rows
+ ctx.query = {
+ group: view.meta.groupBy,
+ calculation: view.meta.calculation,
+ stats: !!view.meta.field,
+ field: view.meta.field,
+ }
+ } else {
+ // table all_ view
+ /* istanbul ignore next */
+ ctx.params.viewName = viewName
+ }
+
+ await fetchView(ctx)
+
+ let schema = view && view.meta && view.meta.schema
+ if (!schema) {
+ const tableId = ctx.params.tableId || view.meta.tableId
+ const table = await db.get(tableId)
+ schema = table.schema
+ }
+
+ // Export part
+ let headers = Object.keys(schema)
+ const exporter = exporters[format]
+ const filename = `${viewName}.${format}`
+ // send down the file
+ ctx.attachment(filename)
+ ctx.body = apiFileReturn(exporter(headers, ctx.body))
+}
diff --git a/packages/server/src/api/controllers/view/utils.js b/packages/server/src/api/controllers/view/utils.js
new file mode 100644
index 0000000000..c93604177f
--- /dev/null
+++ b/packages/server/src/api/controllers/view/utils.js
@@ -0,0 +1,109 @@
+const {
+ ViewNames,
+ generateMemoryViewID,
+ getMemoryViewParams,
+} = require("../../../db/utils")
+const env = require("../../../environment")
+
+exports.getView = async (db, viewName) => {
+ if (env.SELF_HOSTED) {
+ const designDoc = await db.get("_design/database")
+ return designDoc.views[viewName]
+ } else {
+ const viewDoc = await db.get(generateMemoryViewID(viewName))
+ return viewDoc.view
+ }
+}
+
+exports.getViews = async db => {
+ const response = []
+ if (env.SELF_HOSTED) {
+ const designDoc = await db.get("_design/database")
+ for (let name of Object.keys(designDoc.views)) {
+ // Only return custom views, not built ins
+ if (Object.values(ViewNames).indexOf(name) !== -1) {
+ continue
+ }
+ response.push({
+ name,
+ ...designDoc.views[name],
+ })
+ }
+ } else {
+ const views = (
+ await db.allDocs(
+ getMemoryViewParams({
+ include_docs: true,
+ })
+ )
+ ).rows.map(row => row.doc)
+ for (let viewDoc of views) {
+ response.push({
+ name: viewDoc.name,
+ ...viewDoc.view,
+ })
+ }
+ }
+ return response
+}
+
+exports.saveView = async (db, originalName, viewName, viewTemplate) => {
+ if (env.SELF_HOSTED) {
+ const designDoc = await db.get("_design/database")
+ designDoc.views = {
+ ...designDoc.views,
+ [viewName]: viewTemplate,
+ }
+ // view has been renamed
+ if (originalName) {
+ delete designDoc.views[originalName]
+ }
+ await db.put(designDoc)
+ } else {
+ const id = generateMemoryViewID(viewName)
+ const originalId = originalName ? generateMemoryViewID(originalName) : null
+ const viewDoc = {
+ _id: id,
+ view: viewTemplate,
+ name: viewName,
+ tableId: viewTemplate.meta.tableId,
+ }
+ try {
+ const old = await db.get(id)
+ if (originalId) {
+ const originalDoc = await db.get(originalId)
+ await db.remove(originalDoc._id, originalDoc._rev)
+ }
+ if (old && old._rev) {
+ viewDoc._rev = old._rev
+ }
+ } catch (err) {
+ // didn't exist, just skip
+ }
+ await db.put(viewDoc)
+ }
+}
+
+exports.deleteView = async (db, viewName) => {
+ if (env.SELF_HOSTED) {
+ const designDoc = await db.get("_design/database")
+ const view = designDoc.views[viewName]
+ delete designDoc.views[viewName]
+ await db.put(designDoc)
+ return view
+ } else {
+ const id = generateMemoryViewID(viewName)
+ const viewDoc = await db.get(id)
+ await db.remove(viewDoc._id, viewDoc._rev)
+ return viewDoc.view
+ }
+}
+
+exports.migrateToInMemoryView = async (db, viewName) => {
+ // delete the view initially
+ const designDoc = await db.get("_design/database")
+ const view = designDoc.views[viewName]
+ delete designDoc.views[viewName]
+ await db.put(designDoc)
+ await exports.saveView(db, null, viewName, view)
+}
diff --git a/packages/server/src/api/routes/application.js b/packages/server/src/api/routes/application.js
index c1d39acbd5..4d67a0f4f4 100644
--- a/packages/server/src/api/routes/application.js
+++ b/packages/server/src/api/routes/application.js
@@ -2,11 +2,12 @@ const Router = require("@koa/router")
const controller = require("../controllers/application")
const authorized = require("../../middleware/authorized")
const { BUILDER } = require("@budibase/auth/permissions")
+const usage = require("../../middleware/usageQuota")
const router = Router()
router
- .post("/api/applications", authorized(BUILDER), controller.create)
+ .post("/api/applications", authorized(BUILDER), usage, controller.create)
.get("/api/applications/:appId/definition", controller.fetchAppDefinition)
.get("/api/applications", controller.fetch)
.get("/api/applications/:appId/appPackage", controller.fetchAppPackage)
@@ -21,6 +22,11 @@ router
authorized(BUILDER),
controller.revertClient
)
- .delete("/api/applications/:appId", authorized(BUILDER), controller.delete)
+ .delete(
+ "/api/applications/:appId",
+ authorized(BUILDER),
+ usage,
+ controller.delete
+ )
module.exports = router
diff --git a/packages/server/src/api/routes/tests/datasource.spec.js b/packages/server/src/api/routes/tests/datasource.spec.js
index 98a99717fd..b6d94f714d 100644
--- a/packages/server/src/api/routes/tests/datasource.spec.js
+++ b/packages/server/src/api/routes/tests/datasource.spec.js
@@ -94,7 +94,8 @@ describe("/datasources", () => {
.expect(200)
// this is mock data, can't test it
expect(res.body).toBeDefined()
- expect(pg.queryMock).toHaveBeenCalledWith(`select "users"."name" as "users.name", "users"."age" as "users.age" from "users" where "users"."name" ilike $1 limit $2`, ["John%", 5000])
+ const expSql = `select "users"."name" as "users.name", "users"."age" as "users.age" from (select * from "users" where "users"."name" ilike $1 limit $2) as "users"`
+ expect(pg.queryMock).toHaveBeenCalledWith(expSql, ["John%", 5000])
})
})
diff --git a/packages/server/src/api/routes/tests/role.spec.js b/packages/server/src/api/routes/tests/role.spec.js
index ad42ef180a..d74a84b2b2 100644
--- a/packages/server/src/api/routes/tests/role.spec.js
+++ b/packages/server/src/api/routes/tests/role.spec.js
@@ -72,7 +72,7 @@ describe("/roles", () => {
.expect(200)
expect(res.body.length).toBeGreaterThan(0)
const power = res.body.find(role => role._id === BUILTIN_ROLE_IDS.POWER)
- expect(power.permissions[table._id]).toEqual("read")
+ expect(power.permissions[table._id]).toEqual(["read"])
})
})
diff --git a/packages/server/src/api/routes/tests/view.spec.js b/packages/server/src/api/routes/tests/view.spec.js
index 458da6e023..b1c5f655c6 100644
--- a/packages/server/src/api/routes/tests/view.spec.js
+++ b/packages/server/src/api/routes/tests/view.spec.js
@@ -205,7 +205,7 @@ describe("/views", () => {
})
describe("exportView", () => {
- it("should be able to delete a view", async () => {
+ it("should be able to export a view", async () => {
await config.createTable(priceTable())
await config.createRow()
const view = await config.createView()
diff --git a/packages/server/src/api/routes/user.js b/packages/server/src/api/routes/user.js
index b3b486fe45..d171870215 100644
--- a/packages/server/src/api/routes/user.js
+++ b/packages/server/src/api/routes/user.js
@@ -5,7 +5,6 @@ const {
PermissionLevels,
PermissionTypes,
} = require("@budibase/auth/permissions")
-const usage = require("../../middleware/usageQuota")
const router = Router()
@@ -28,13 +27,11 @@ router
.post(
"/api/users/metadata/self",
authorized(PermissionTypes.USER, PermissionLevels.WRITE),
- usage,
controller.updateSelfMetadata
)
.delete(
"/api/users/metadata/:id",
authorized(PermissionTypes.USER, PermissionLevels.WRITE),
- usage,
controller.destroyMetadata
)
diff --git a/packages/server/src/api/routes/view.js b/packages/server/src/api/routes/view.js
index 7d390805c6..b72fe1ac26 100644
--- a/packages/server/src/api/routes/view.js
+++ b/packages/server/src/api/routes/view.js
@@ -8,7 +8,6 @@ const {
PermissionTypes,
PermissionLevels,
} = require("@budibase/auth/permissions")
-const usage = require("../../middleware/usageQuota")
const router = Router()
@@ -25,9 +24,8 @@ router
"/api/views/:viewName",
paramResource("viewName"),
authorized(BUILDER),
- usage,
viewController.destroy
)
- .post("/api/views", authorized(BUILDER), usage, viewController.save)
+ .post("/api/views", authorized(BUILDER), viewController.save)
module.exports = router
diff --git a/packages/server/src/automations/steps/createRow.js b/packages/server/src/automations/steps/createRow.js
index 9706126438..47d0b4eb99 100644
--- a/packages/server/src/automations/steps/createRow.js
+++ b/packages/server/src/automations/steps/createRow.js
@@ -2,6 +2,7 @@ const rowController = require("../../api/controllers/row")
const automationUtils = require("../automationUtils")
const env = require("../../environment")
const usage = require("../../utilities/usageQuota")
+const { buildCtx } = require("./utils")
exports.definition = {
name: "Create Row",
@@ -59,7 +60,7 @@ exports.definition = {
},
}
-exports.run = async function ({ inputs, appId, apiKey, emitter }) {
+exports.run = async function ({ inputs, appId, emitter }) {
if (inputs.row == null || inputs.row.tableId == null) {
return {
success: false,
@@ -69,16 +70,12 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
}
}
// have to clean up the row, remove the table from it
- const ctx = {
+ const ctx = buildCtx(appId, emitter, {
+ body: inputs.row,
params: {
tableId: inputs.row.tableId,
},
- request: {
- body: inputs.row,
- },
- appId,
- eventEmitter: emitter,
- }
+ })
try {
inputs.row = await automationUtils.cleanUpRow(
@@ -86,8 +83,8 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
inputs.row.tableId,
inputs.row
)
- if (env.isProd()) {
- await usage.update(apiKey, usage.Properties.ROW, 1)
+ if (env.USE_QUOTAS) {
+ await usage.update(usage.Properties.ROW, 1)
}
await rowController.save(ctx)
return {
diff --git a/packages/server/src/automations/steps/deleteRow.js b/packages/server/src/automations/steps/deleteRow.js
index 26623d628b..225f00c5df 100644
--- a/packages/server/src/automations/steps/deleteRow.js
+++ b/packages/server/src/automations/steps/deleteRow.js
@@ -1,6 +1,7 @@
const rowController = require("../../api/controllers/row")
const env = require("../../environment")
const usage = require("../../utilities/usageQuota")
+const { buildCtx } = require("./utils")
exports.definition = {
description: "Delete a row from your database",
@@ -51,7 +52,7 @@ exports.definition = {
},
}
-exports.run = async function ({ inputs, appId, apiKey, emitter }) {
+exports.run = async function ({ inputs, appId, emitter }) {
if (inputs.id == null || inputs.revision == null) {
return {
success: false,
@@ -60,23 +61,20 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
},
}
}
- let ctx = {
+
+ let ctx = buildCtx(appId, emitter, {
+ body: {
+ _id: inputs.id,
+ _rev: inputs.revision,
+ },
params: {
tableId: inputs.tableId,
},
- request: {
- body: {
- _id: inputs.id,
- _rev: inputs.revision,
- },
- },
- appId,
- eventEmitter: emitter,
- }
+ })
try {
if (env.isProd()) {
- await usage.update(apiKey, usage.Properties.ROW, -1)
+ await usage.update(usage.Properties.ROW, -1)
}
await rowController.destroy(ctx)
return {
diff --git a/packages/server/src/automations/steps/queryRows.js b/packages/server/src/automations/steps/queryRows.js
index 64b757418e..3c4bb422a0 100644
--- a/packages/server/src/automations/steps/queryRows.js
+++ b/packages/server/src/automations/steps/queryRows.js
@@ -1,6 +1,7 @@
const rowController = require("../../api/controllers/row")
const tableController = require("../../api/controllers/table")
const { FieldTypes } = require("../../constants")
+const { buildCtx } = require("./utils")
const SortOrders = {
ASCENDING: "ascending",
@@ -70,12 +71,11 @@ exports.definition = {
}
async function getTable(appId, tableId) {
- const ctx = {
+ const ctx = buildCtx(appId, null, {
params: {
id: tableId,
},
- appId,
- }
+ })
await tableController.find(ctx)
return ctx.body
}
@@ -89,21 +89,18 @@ exports.run = async function ({ inputs, appId }) {
sortType =
fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING
}
- const ctx = {
+ const ctx = buildCtx(appId, null, {
params: {
tableId,
},
- request: {
- body: {
- sortOrder,
- sortType,
- sort: sortColumn,
- query: filters || {},
- limit,
- },
+ body: {
+ sortOrder,
+ sortType,
+ sort: sortColumn,
+ query: filters || {},
+ limit,
},
- appId,
- }
+ })
try {
await rowController.search(ctx)
return {
diff --git a/packages/server/src/automations/steps/sendSmtpEmail.js b/packages/server/src/automations/steps/sendSmtpEmail.js
index 9e4b5a6a3c..07a3059215 100644
--- a/packages/server/src/automations/steps/sendSmtpEmail.js
+++ b/packages/server/src/automations/steps/sendSmtpEmail.js
@@ -53,7 +53,7 @@ exports.run = async function ({ inputs }) {
contents = "No content
"
}
try {
- let response = await sendSmtpEmail(to, from, subject, contents)
+ let response = await sendSmtpEmail(to, from, subject, contents, true)
return {
success: true,
response,
diff --git a/packages/server/src/automations/steps/updateRow.js b/packages/server/src/automations/steps/updateRow.js
index ac5eb16fcd..94f77bc801 100644
--- a/packages/server/src/automations/steps/updateRow.js
+++ b/packages/server/src/automations/steps/updateRow.js
@@ -1,5 +1,6 @@
const rowController = require("../../api/controllers/row")
const automationUtils = require("../automationUtils")
+const { buildCtx } = require("./utils")
exports.definition = {
name: "Update Row",
@@ -72,19 +73,15 @@ exports.run = async function ({ inputs, appId, emitter }) {
}
// have to clean up the row, remove the table from it
- const ctx = {
+ const ctx = buildCtx(appId, emitter, {
+ body: {
+ ...inputs.row,
+ _id: inputs.rowId,
+ },
params: {
rowId: inputs.rowId,
},
- request: {
- body: {
- ...inputs.row,
- _id: inputs.rowId,
- },
- },
- appId,
- eventEmitter: emitter,
- }
+ })
try {
inputs.row = await automationUtils.cleanUpRowById(
diff --git a/packages/server/src/automations/tests/automation.spec.js b/packages/server/src/automations/tests/automation.spec.js
index 83b7b81a75..9444995ca1 100644
--- a/packages/server/src/automations/tests/automation.spec.js
+++ b/packages/server/src/automations/tests/automation.spec.js
@@ -13,8 +13,6 @@ const { makePartial } = require("../../tests/utilities")
const { cleanInputValues } = require("../automationUtils")
const setup = require("./utilities")
-usageQuota.getAPIKey.mockReturnValue({ apiKey: "test" })
-
describe("Run through some parts of the automations system", () => {
let config = setup.getConfig()
diff --git a/packages/server/src/automations/tests/createRow.spec.js b/packages/server/src/automations/tests/createRow.spec.js
index 1004711d87..a04fc7aad4 100644
--- a/packages/server/src/automations/tests/createRow.spec.js
+++ b/packages/server/src/automations/tests/createRow.spec.js
@@ -46,7 +46,7 @@ describe("test the create row action", () => {
await setup.runStep(setup.actions.CREATE_ROW.stepId, {
row
})
- expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", 1)
+ expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
})
})
diff --git a/packages/server/src/automations/tests/deleteRow.spec.js b/packages/server/src/automations/tests/deleteRow.spec.js
index a3d73d3bf6..21246f22d0 100644
--- a/packages/server/src/automations/tests/deleteRow.spec.js
+++ b/packages/server/src/automations/tests/deleteRow.spec.js
@@ -37,7 +37,7 @@ describe("test the delete row action", () => {
it("check usage quota attempts", async () => {
await setup.runInProd(async () => {
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
- expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", -1)
+ expect(usageQuota.update).toHaveBeenCalledWith("rows", -1)
})
})
diff --git a/packages/server/src/automations/thread.js b/packages/server/src/automations/thread.js
index a3e81a2274..ef12494165 100644
--- a/packages/server/src/automations/thread.js
+++ b/packages/server/src/automations/thread.js
@@ -4,8 +4,10 @@ const AutomationEmitter = require("../events/AutomationEmitter")
const { processObject } = require("@budibase/string-templates")
const { DEFAULT_TENANT_ID } = require("@budibase/auth").constants
const CouchDB = require("../db")
-const { DocumentTypes } = require("../db/utils")
+const { DocumentTypes, isDevAppID } = require("../db/utils")
const { doInTenant } = require("@budibase/auth/tenancy")
+const env = require("../environment")
+const usage = require("../utilities/usageQuota")
const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId
@@ -80,7 +82,6 @@ class Orchestrator {
return stepFn({
inputs: step.inputs,
appId: this._appId,
- apiKey: automation.apiKey,
emitter: this._emitter,
context: this._context,
})
@@ -95,6 +96,11 @@ class Orchestrator {
return err
}
}
+
+ // Increment quota for automation runs
+ if (!env.SELF_HOSTED && !isDevAppID(this._appId)) {
+ usage.update(usage.Properties.AUTOMATION, 1)
+ }
return this.executionOutput
}
}
diff --git a/packages/server/src/db/inMemoryView.js b/packages/server/src/db/inMemoryView.js
new file mode 100644
index 0000000000..892617e068
--- /dev/null
+++ b/packages/server/src/db/inMemoryView.js
@@ -0,0 +1,48 @@
+const PouchDB = require("pouchdb")
+const memory = require("pouchdb-adapter-memory")
+const newid = require("./newid")
+
+PouchDB.plugin(memory)
+const Pouch = PouchDB.defaults({
+ prefix: undefined,
+ adapter: "memory",
+})
+
+exports.runView = async (view, calculation, group, data) => {
+ // use a different ID each time for the DB, make sure they
+ // are always unique for each query, don't want overlap
+ // which could cause 409s
+ const db = new Pouch(newid())
+ // write all the docs to the in memory Pouch (remove revs)
+ await db.bulkDocs(
+ data.map(row => ({
+ ...row,
+ _rev: undefined,
+ }))
+ )
+ let fn = (doc, emit) => emit(doc._id)
+ eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)"))
+ const queryFns = {
+ meta: view.meta,
+ map: fn,
+ }
+ if (view.reduce) {
+ queryFns.reduce = view.reduce
+ }
+ const response = await db.query(queryFns, {
+ include_docs: !calculation,
+ group: !!group,
+ })
+ // need to fix the revs to be totally accurate
+ for (let row of response.rows) {
+ if (!row._rev || !row._id) {
+ continue
+ }
+ const found = data.find(possible => possible._id === row._id)
+ if (found) {
+ row._rev = found._rev
+ }
+ }
+ await db.destroy()
+ return response
+}
diff --git a/packages/server/src/db/linkedRows/index.js b/packages/server/src/db/linkedRows/index.js
index 67412e7e89..303cd085c1 100644
--- a/packages/server/src/db/linkedRows/index.js
+++ b/packages/server/src/db/linkedRows/index.js
@@ -76,9 +76,12 @@ async function getFullLinkedDocs(ctx, appId, links) {
// create DBs
const db = new CouchDB(appId)
const linkedRowIds = links.map(link => link.id)
- let linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map(
+ const uniqueRowIds = [...new Set(linkedRowIds)]
+ let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map(
row => row.doc
)
+ // convert the unique db rows back to a full list of linked rows
+ const linked = linkedRowIds.map(id => dbRows.find(row => row._id === id))
// need to handle users as specific cases
let [users, other] = partition(linked, linkRow =>
linkRow._id.startsWith(USER_METDATA_PREFIX)
@@ -112,7 +115,7 @@ exports.updateLinks = async function (args) {
let linkController = new LinkController(args)
try {
if (
- !(await linkController.doesTableHaveLinkedFields()) &&
+ !(await linkController.doesTableHaveLinkedFields(table)) &&
(oldTable == null ||
!(await linkController.doesTableHaveLinkedFields(oldTable)))
) {
diff --git a/packages/server/src/db/utils.js b/packages/server/src/db/utils.js
index ec1c267fa2..3e20b30869 100644
--- a/packages/server/src/db/utils.js
+++ b/packages/server/src/db/utils.js
@@ -39,6 +39,7 @@ const DocumentTypes = {
QUERY: "query",
DEPLOYMENTS: "deployments",
METADATA: "metadata",
+ MEM_VIEW: "view",
}
const ViewNames = {
@@ -348,6 +349,14 @@ exports.getMetadataParams = (type, entityId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.METADATA, docId, otherProps)
}
+exports.generateMemoryViewID = viewName => {
+ return `${DocumentTypes.MEM_VIEW}${SEPARATOR}${viewName}`
+}
+
+exports.getMemoryViewParams = (otherProps = {}) => {
+ return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps)
+}
+
/**
* This can be used with the db.allDocs to get a list of IDs
*/
diff --git a/packages/server/src/definitions/datasource.ts b/packages/server/src/definitions/datasource.ts
index 48fd24e1cf..2daef8eda7 100644
--- a/packages/server/src/definitions/datasource.ts
+++ b/packages/server/src/definitions/datasource.ts
@@ -1,3 +1,5 @@
+import { Table } from "./common"
+
export enum Operation {
CREATE = "CREATE",
READ = "READ",
@@ -136,6 +138,9 @@ export interface QueryJson {
sort?: SortJson
paginate?: PaginationJson
body?: object
+ meta?: {
+ table?: Table
+ }
extra?: {
idFilter?: SearchFilters
}
diff --git a/packages/server/src/environment.js b/packages/server/src/environment.js
index c5739a37e1..89e015b6f5 100644
--- a/packages/server/src/environment.js
+++ b/packages/server/src/environment.js
@@ -26,7 +26,7 @@ module.exports = {
COUCH_DB_URL: process.env.COUCH_DB_URL,
MINIO_URL: process.env.MINIO_URL,
WORKER_URL: process.env.WORKER_URL,
- SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED),
+ SELF_HOSTED: process.env.SELF_HOSTED,
AWS_REGION: process.env.AWS_REGION,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
@@ -66,3 +66,10 @@ module.exports = {
return !isDev()
},
}
+
+// convert any strings to numbers if required, like "0" would be true otherwise
+for (let [key, value] of Object.entries(module.exports)) {
+ if (typeof value === "string" && !isNaN(parseInt(value))) {
+ module.exports[key] = parseInt(value)
+ }
+}
diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts
index b59bac5a5a..c5e9bdb0bb 100644
--- a/packages/server/src/integrations/base/sql.ts
+++ b/packages/server/src/integrations/base/sql.ts
@@ -1,7 +1,5 @@
import { Knex, knex } from "knex"
const BASE_LIMIT = 5000
-// if requesting a single row then need to up the limit for the sake of joins
-const SINGLE_ROW_LIMIT = 100
import {
QueryJson,
SearchFilters,
@@ -146,46 +144,48 @@ function buildCreate(
function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
let { endpoint, resource, filters, sort, paginate, relationships } = json
const tableName = endpoint.entityId
- let query: KnexQuery = knex(tableName)
// select all if not specified
if (!resource) {
resource = { fields: [] }
}
+ let selectStatement: string | string[] = "*"
// handle select
if (resource.fields && resource.fields.length > 0) {
// select the resources as the format "table.columnName" - this is what is provided
// by the resource builder further up
- query = query.select(resource.fields.map(field => `${field} as ${field}`))
- } else {
- query = query.select("*")
+ selectStatement = resource.fields.map(field => `${field} as ${field}`)
+ }
+ let foundLimit = limit || BASE_LIMIT
+ // handle pagination
+ let foundOffset: number | null = null
+ if (paginate && paginate.page && paginate.limit) {
+ // @ts-ignore
+ const page = paginate.page <= 1 ? 0 : paginate.page - 1
+ const offset = page * paginate.limit
+ foundLimit = paginate.limit
+ foundOffset = offset
+ } else if (paginate && paginate.limit) {
+ foundLimit = paginate.limit
+ }
+ // start building the query
+ let query: KnexQuery = knex(tableName).limit(foundLimit)
+ if (foundOffset) {
+ query = query.offset(foundOffset)
}
- // handle where
- query = addFilters(tableName, query, filters)
- // handle join
- query = addRelationships(query, tableName, relationships)
- // handle sorting
if (sort) {
for (let [key, value] of Object.entries(sort)) {
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
query = query.orderBy(key, direction)
}
}
- let foundLimit = limit || BASE_LIMIT
- // handle pagination
- if (paginate && paginate.page && paginate.limit) {
+ query = addFilters(tableName, query, filters)
+ // @ts-ignore
+ let preQuery: KnexQuery = knex({
// @ts-ignore
- const page = paginate.page <= 1 ? 0 : paginate.page - 1
- const offset = page * paginate.limit
- foundLimit = paginate.limit
- query = query.offset(offset)
- } else if (paginate && paginate.limit) {
- foundLimit = paginate.limit
- }
- if (foundLimit === 1) {
- foundLimit = SINGLE_ROW_LIMIT
- }
- query = query.limit(foundLimit)
- return query
+ [tableName]: query,
+ }).select(selectStatement)
+ // handle joins
+ return addRelationships(preQuery, tableName, relationships)
}
function buildUpdate(
diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts
index 3ce21675d9..c17cca0745 100644
--- a/packages/server/src/integrations/mysql.ts
+++ b/packages/server/src/integrations/mysql.ts
@@ -12,7 +12,11 @@ import { getSqlQuery } from "./utils"
module MySQLModule {
const mysql = require("mysql")
const Sql = require("./base/sql")
- const { buildExternalTableId, convertType } = require("./utils")
+ const {
+ buildExternalTableId,
+ convertType,
+ copyExistingPropsOver,
+ } = require("./utils")
const { FieldTypes } = require("../constants")
interface MySQLConfig {
@@ -104,7 +108,7 @@ module MySQLModule {
client: any,
query: SqlQuery,
connect: boolean = true
- ): Promise {
+ ): Promise {
// Node MySQL is callback based, so we must wrap our call in a promise
return new Promise((resolve, reject) => {
if (connect) {
@@ -194,18 +198,7 @@ module MySQLModule {
}
}
- // add the existing relationships from the entities if they exist, to prevent them from being overridden
- if (entities && entities[tableName]) {
- const existingTableSchema = entities[tableName].schema
- for (let key in existingTableSchema) {
- if (!existingTableSchema.hasOwnProperty(key)) {
- continue
- }
- if (existingTableSchema[key].type === "link") {
- tables[tableName].schema[key] = existingTableSchema[key]
- }
- }
- }
+ copyExistingPropsOver(tableName, tables, entities)
}
this.client.end()
@@ -249,6 +242,23 @@ module MySQLModule {
return internalQuery(this.client, input, false)
}
+ // when creating if an ID has been inserted need to make sure
+ // the id filter is enriched with it before trying to retrieve the row
+ checkLookupKeys(results: any, json: QueryJson) {
+ if (!results?.insertId || !json.meta?.table || !json.meta.table.primary) {
+ return json
+ }
+ const primaryKey = json.meta.table.primary?.[0]
+ json.extra = {
+ idFilter: {
+ equal: {
+ [primaryKey]: results.insertId,
+ },
+ },
+ }
+ return json
+ }
+
async query(json: QueryJson) {
const operation = this._operation(json)
this.client.connect()
@@ -261,7 +271,7 @@ module MySQLModule {
const results = await internalQuery(this.client, input, false)
// same as delete, manage returning
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
- row = this.getReturningRow(json)
+ row = this.getReturningRow(this.checkLookupKeys(results, json))
}
this.client.end()
if (operation !== Operation.READ) {
diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts
index dd46652871..db81e183d1 100644
--- a/packages/server/src/integrations/postgres.ts
+++ b/packages/server/src/integrations/postgres.ts
@@ -12,7 +12,14 @@ module PostgresModule {
const { Pool } = require("pg")
const Sql = require("./base/sql")
const { FieldTypes } = require("../constants")
- const { buildExternalTableId, convertType } = require("./utils")
+ const {
+ buildExternalTableId,
+ convertType,
+ copyExistingPropsOver,
+ } = require("./utils")
+ const { escapeDangerousCharacters } = require("../utilities")
+
+ const JSON_REGEX = /'{.*}'::json/s
interface PostgresConfig {
host: string
@@ -84,13 +91,27 @@ module PostgresModule {
bigint: FieldTypes.NUMBER,
decimal: FieldTypes.NUMBER,
smallint: FieldTypes.NUMBER,
+ real: FieldTypes.NUMBER,
+ "double precision": FieldTypes.NUMBER,
timestamp: FieldTypes.DATETIME,
time: FieldTypes.DATETIME,
boolean: FieldTypes.BOOLEAN,
json: FieldTypes.JSON,
+ date: FieldTypes.DATETIME,
}
async function internalQuery(client: any, query: SqlQuery) {
+ // need to handle a specific issue with json data types in postgres,
+ // new lines inside the JSON data will break it
+ if (query && query.sql) {
+ const matches = query.sql.match(JSON_REGEX)
+ if (matches && matches.length > 0) {
+ for (let match of matches) {
+ const escaped = escapeDangerousCharacters(match)
+ query.sql = query.sql.replace(match, escaped)
+ }
+ }
+ }
try {
return await client.query(query.sql, query.bindings || [])
} catch (err) {
@@ -173,31 +194,30 @@ module PostgresModule {
name: tableName,
schema: {},
}
-
- // add the existing relationships from the entities if they exist, to prevent them from being overridden
- if (entities && entities[tableName]) {
- const existingTableSchema = entities[tableName].schema
- for (let key in existingTableSchema) {
- if (!existingTableSchema.hasOwnProperty(key)) {
- continue
- }
- if (existingTableSchema[key].type === "link") {
- tables[tableName].schema[key] = existingTableSchema[key]
- }
- }
- }
}
const type: string = convertType(column.data_type, TYPE_MAP)
- const isAuto: boolean =
+ const identity = !!(
+ column.identity_generation ||
+ column.identity_start ||
+ column.identity_increment
+ )
+ const hasDefault =
typeof column.column_default === "string" &&
column.column_default.startsWith("nextval")
+ const isGenerated =
+ column.is_generated && column.is_generated !== "NEVER"
+ const isAuto: boolean = hasDefault || identity || isGenerated
tables[tableName].schema[columnName] = {
autocolumn: isAuto,
name: columnName,
type,
}
}
+
+ for (let tableName of Object.keys(tables)) {
+ copyExistingPropsOver(tableName, tables, entities)
+ }
this.tables = tables
}
diff --git a/packages/server/src/integrations/tests/sql.spec.js b/packages/server/src/integrations/tests/sql.spec.js
index fa8bcd1d86..64cdda215f 100644
--- a/packages/server/src/integrations/tests/sql.spec.js
+++ b/packages/server/src/integrations/tests/sql.spec.js
@@ -57,7 +57,7 @@ describe("SQL query builder", () => {
const query = sql._query(generateReadJson())
expect(query).toEqual({
bindings: [limit],
- sql: `select * from "${TABLE_NAME}" limit $1`
+ sql: `select * from (select * from "${TABLE_NAME}" limit $1) as "${TABLE_NAME}"`
})
})
@@ -68,7 +68,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: [limit],
- sql: `select "${TABLE_NAME}"."name" as "${nameProp}", "${TABLE_NAME}"."age" as "${ageProp}" from "${TABLE_NAME}" limit $1`
+ sql: `select "${TABLE_NAME}"."name" as "${nameProp}", "${TABLE_NAME}"."age" as "${ageProp}" from (select * from "${TABLE_NAME}" limit $1) as "${TABLE_NAME}"`
})
})
@@ -82,7 +82,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: ["John%", limit],
- sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."name" ilike $1 limit $2`
+ sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."name" ilike $1 limit $2) as "${TABLE_NAME}"`
})
})
@@ -99,7 +99,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: [2, 10, limit],
- sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age" between $1 and $2 limit $3`
+ sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age" between $1 and $2 limit $3) as "${TABLE_NAME}"`
})
})
@@ -115,7 +115,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: [10, "John", limit],
- sql: `select * from "${TABLE_NAME}" where ("${TABLE_NAME}"."age" = $1) or ("${TABLE_NAME}"."name" = $2) limit $3`
+ sql: `select * from (select * from "${TABLE_NAME}" where ("${TABLE_NAME}"."age" = $1) or ("${TABLE_NAME}"."name" = $2) limit $3) as "${TABLE_NAME}"`
})
})
@@ -160,7 +160,7 @@ describe("SQL query builder", () => {
const query = new Sql("mssql", 10)._query(generateReadJson())
expect(query).toEqual({
bindings: [10],
- sql: `select top (@p0) * from [${TABLE_NAME}]`
+ sql: `select * from (select top (@p0) * from [${TABLE_NAME}]) as [${TABLE_NAME}]`
})
})
@@ -168,7 +168,7 @@ describe("SQL query builder", () => {
const query = new Sql("mysql", 10)._query(generateReadJson())
expect(query).toEqual({
bindings: [10],
- sql: `select * from \`${TABLE_NAME}\` limit ?`
+ sql: `select * from (select * from \`${TABLE_NAME}\` limit ?) as \`${TABLE_NAME}\``
})
})
})
diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils.ts
index 5b247213c0..6e3dc6f684 100644
--- a/packages/server/src/integrations/utils.ts
+++ b/packages/server/src/integrations/utils.ts
@@ -82,3 +82,25 @@ export function isIsoDateString(str: string) {
let d = new Date(str)
return d.toISOString() === str
}
+
+// add the existing relationships from the entities if they exist, to prevent them from being overridden
+export function copyExistingPropsOver(
+ tableName: string,
+ tables: { [key: string]: any },
+ entities: { [key: string]: any }
+) {
+ if (entities && entities[tableName]) {
+ if (entities[tableName].primaryDisplay) {
+ tables[tableName].primaryDisplay = entities[tableName].primaryDisplay
+ }
+ const existingTableSchema = entities[tableName].schema
+ for (let key in existingTableSchema) {
+ if (!existingTableSchema.hasOwnProperty(key)) {
+ continue
+ }
+ if (existingTableSchema[key].type === "link") {
+ tables[tableName].schema[key] = existingTableSchema[key]
+ }
+ }
+ }
+}
diff --git a/packages/server/src/middleware/tests/usageQuota.spec.js b/packages/server/src/middleware/tests/usageQuota.spec.js
index 97d9c7794a..d828f2ca60 100644
--- a/packages/server/src/middleware/tests/usageQuota.spec.js
+++ b/packages/server/src/middleware/tests/usageQuota.spec.js
@@ -39,7 +39,7 @@ class TestConfiguration {
if (bool) {
env.isDev = () => false
env.isProd = () => true
- this.ctx.auth = { apiKey: "test" }
+ this.ctx.user = { tenantId: "test" }
} else {
env.isDev = () => true
env.isProd = () => false
@@ -114,7 +114,7 @@ describe("usageQuota middleware", () => {
await config.executeMiddleware()
- expect(usageQuota.update).toHaveBeenCalledWith("test", "rows", 1)
+ expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
expect(config.next).toHaveBeenCalled()
})
@@ -131,7 +131,7 @@ describe("usageQuota middleware", () => {
])
await config.executeMiddleware()
- expect(usageQuota.update).toHaveBeenCalledWith("test", "storage", 10100)
+ expect(usageQuota.update).toHaveBeenCalledWith("storage", 10100)
expect(config.next).toHaveBeenCalled()
})
})
\ No newline at end of file
diff --git a/packages/server/src/middleware/usageQuota.js b/packages/server/src/middleware/usageQuota.js
index 4647878721..3a244ef5bc 100644
--- a/packages/server/src/middleware/usageQuota.js
+++ b/packages/server/src/middleware/usageQuota.js
@@ -13,6 +13,7 @@ const DOMAIN_MAP = {
upload: usageQuota.Properties.UPLOAD,
views: usageQuota.Properties.VIEW,
users: usageQuota.Properties.USER,
+ applications: usageQuota.Properties.APPS,
// this will not be updated by endpoint calls
// instead it will be updated by triggerInfo
automationRuns: usageQuota.Properties.AUTOMATION,
@@ -57,9 +58,9 @@ module.exports = async (ctx, next) => {
usage = files.map(file => file.size).reduce((total, size) => total + size)
}
try {
- await usageQuota.update(ctx.auth.apiKey, property, usage)
+ await usageQuota.update(property, usage)
return next()
} catch (err) {
- ctx.throw(403, err)
+ ctx.throw(400, err)
}
}
diff --git a/packages/server/src/utilities/index.js b/packages/server/src/utilities/index.js
index a81f9ddcf5..b16a687fe5 100644
--- a/packages/server/src/utilities/index.js
+++ b/packages/server/src/utilities/index.js
@@ -10,6 +10,14 @@ exports.wait = ms => new Promise(resolve => setTimeout(resolve, ms))
exports.isDev = env.isDev
+exports.removeFromArray = (array, element) => {
+ const index = array.indexOf(element)
+ if (index !== -1) {
+ array.splice(index, 1)
+ }
+ return array
+}
+
/**
* Makes sure that a URL has the correct number of slashes, while maintaining the
* http(s):// double slashes.
@@ -106,3 +114,13 @@ exports.deleteEntityMetadata = async (appId, type, entityId) => {
await db.remove(id, rev)
}
}
+
+exports.escapeDangerousCharacters = string => {
+ return string
+ .replace(/[\\]/g, "\\\\")
+ .replace(/[\b]/g, "\\b")
+ .replace(/[\f]/g, "\\f")
+ .replace(/[\n]/g, "\\n")
+ .replace(/[\r]/g, "\\r")
+ .replace(/[\t]/g, "\\t")
+}
diff --git a/packages/server/src/utilities/rowProcessor/index.js b/packages/server/src/utilities/rowProcessor/index.js
index bb4ac98bb7..07549dd8a8 100644
--- a/packages/server/src/utilities/rowProcessor/index.js
+++ b/packages/server/src/utilities/rowProcessor/index.js
@@ -89,10 +89,16 @@ const TYPE_TRANSFORM_MAP = {
* @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields.
* @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing.
* @param {Object} row The row which is to be updated with information for the auto columns.
+ * @param {Object} opts specific options for function to carry out optional features.
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
* for automatic ID purposes.
*/
-function processAutoColumn(user, table, row) {
+function processAutoColumn(
+ user,
+ table,
+ row,
+ opts = { reprocessing: false, noAutoRelationships: false }
+) {
let now = new Date().toISOString()
// if a row doesn't have a revision then it doesn't exist yet
const creating = !row._rev
@@ -102,7 +108,7 @@ function processAutoColumn(user, table, row) {
}
switch (schema.subtype) {
case AutoFieldSubTypes.CREATED_BY:
- if (creating) {
+ if (creating && !opts.reprocessing && !opts.noAutoRelationships) {
row[key] = [user.userId]
}
break
@@ -112,7 +118,9 @@ function processAutoColumn(user, table, row) {
}
break
case AutoFieldSubTypes.UPDATED_BY:
- row[key] = [user.userId]
+ if (!opts.reprocessing && !opts.noAutoRelationships) {
+ row[key] = [user.userId]
+ }
break
case AutoFieldSubTypes.UPDATED_AT:
row[key] = now
@@ -127,6 +135,7 @@ function processAutoColumn(user, table, row) {
}
return { table, row }
}
+exports.processAutoColumn = processAutoColumn
/**
* This will coerce a value to the correct types based on the type transform map
@@ -151,9 +160,15 @@ exports.coerce = (row, type) => {
* @param {object} user the user which is performing the input.
* @param {object} row the row which is being created/updated.
* @param {object} table the table which the row is being saved to.
+ * @param {object} opts some input processing options (like disabling auto-column relationships).
* @returns {object} the row which has been prepared to be written to the DB.
*/
-exports.inputProcessing = (user = {}, table, row) => {
+exports.inputProcessing = (
+ user = {},
+ table,
+ row,
+ opts = { noAutoRelationships: false }
+) => {
let clonedRow = cloneDeep(row)
// need to copy the table so it can be differenced on way out
const copiedTable = cloneDeep(table)
@@ -176,7 +191,7 @@ exports.inputProcessing = (user = {}, table, row) => {
}
}
// handle auto columns - this returns an object like {table, row}
- return processAutoColumn(user, copiedTable, clonedRow)
+ return processAutoColumn(user, copiedTable, clonedRow, opts)
}
/**
diff --git a/packages/server/src/utilities/usageQuota.js b/packages/server/src/utilities/usageQuota.js
index bfe71a4093..80fddb8303 100644
--- a/packages/server/src/utilities/usageQuota.js
+++ b/packages/server/src/utilities/usageQuota.js
@@ -1,41 +1,9 @@
const env = require("../environment")
-const { apiKeyTable } = require("../db/dynamoClient")
-
-const DEFAULT_USAGE = {
- rows: 0,
- storage: 0,
- views: 0,
- automationRuns: 0,
- users: 0,
-}
-
-const DEFAULT_PLAN = {
- rows: 1000,
- // 1 GB
- storage: 8589934592,
- views: 10,
- automationRuns: 100,
- users: 10000,
-}
-
-function buildUpdateParams(key, property, usage) {
- return {
- primary: key,
- condition:
- "attribute_exists(#quota) AND attribute_exists(#limits) AND #quota.#prop < #limits.#prop AND #quotaReset > :now",
- expression: "ADD #quota.#prop :usage",
- names: {
- "#quota": "usageQuota",
- "#prop": property,
- "#limits": "usageLimits",
- "#quotaReset": "quotaReset",
- },
- values: {
- ":usage": usage,
- ":now": Date.now(),
- },
- }
-}
+const { getGlobalDB } = require("@budibase/auth/tenancy")
+const {
+ StaticDatabases,
+ generateNewUsageQuotaDoc,
+} = require("@budibase/auth/db")
function getNewQuotaReset() {
return Date.now() + 2592000000
@@ -47,59 +15,59 @@ exports.Properties = {
VIEW: "views",
USER: "users",
AUTOMATION: "automationRuns",
+ APPS: "apps",
+ EMAILS: "emails",
}
-exports.getAPIKey = async appId => {
- if (!env.USE_QUOTAS) {
- return { apiKey: null }
+async function getUsageQuotaDoc(db) {
+ let quota
+ try {
+ quota = await db.get(StaticDatabases.PLATFORM_INFO.docs.usageQuota)
+ } catch (err) {
+ // doc doesn't exist. Create it
+ quota = await db.post(generateNewUsageQuotaDoc())
}
- return apiKeyTable.get({ primary: appId })
+
+ return quota
}
/**
- * Given a specified API key this will add to the usage object for the specified property.
- * @param {string} apiKey The API key which is to be updated.
+ * Given a specified tenantId this will add to the usage object for the specified property.
* @param {string} property The property which is to be added to (within the nested usageQuota object).
* @param {number} usage The amount (this can be negative) to adjust the number by.
* @returns {Promise} When this completes the API key will now be up to date - the quota period may have
* also been reset after this call.
*/
-exports.update = async (apiKey, property, usage) => {
+exports.update = async (property, usage) => {
if (!env.USE_QUOTAS) {
return
}
+
try {
- await apiKeyTable.update(buildUpdateParams(apiKey, property, usage))
- } catch (err) {
- // conditional check means the condition failed, need to check why
- if (err.code === "ConditionalCheckFailedException") {
- // get the API key so we can check it
- const keyObj = await apiKeyTable.get({ primary: apiKey })
- // the usage quota or usage limits didn't exist
- if (keyObj && (keyObj.usageQuota == null || keyObj.usageLimits == null)) {
- keyObj.usageQuota =
- keyObj.usageQuota == null ? DEFAULT_USAGE : keyObj.usageQuota
- keyObj.usageLimits =
- keyObj.usageLimits == null ? DEFAULT_PLAN : keyObj.usageLimits
- keyObj.quotaReset = getNewQuotaReset()
- await apiKeyTable.put({ item: keyObj })
- return
- }
- // we have in fact breached the reset period
- else if (keyObj && keyObj.quotaReset <= Date.now()) {
- // update the quota reset period and reset the values for all properties
- keyObj.quotaReset = getNewQuotaReset()
- for (let prop of Object.keys(keyObj.usageQuota)) {
- if (prop === property) {
- keyObj.usageQuota[prop] = usage > 0 ? usage : 0
- } else {
- keyObj.usageQuota[prop] = 0
- }
- }
- await apiKeyTable.put({ item: keyObj })
- return
+ const db = getGlobalDB()
+ const quota = await getUsageQuotaDoc(db)
+
+ // Check if the quota needs reset
+ if (Date.now() >= quota.quotaReset) {
+ quota.quotaReset = getNewQuotaReset()
+ for (let prop of Object.keys(quota.usageQuota)) {
+ quota.usageQuota[prop] = 0
}
}
+
+ // increment the quota
+ quota.usageQuota[property] += usage
+
+ if (quota.usageQuota[property] >= quota.usageLimits[property]) {
+ throw new Error(
+ `You have exceeded your usage quota of ${quota.usageLimits[property]} ${property}.`
+ )
+ }
+
+ // update the usage quotas
+ await db.put(quota)
+ } catch (err) {
+ console.error(`Error updating usage quotas for ${property}`, err)
throw err
}
}
diff --git a/packages/server/src/utilities/workerRequests.js b/packages/server/src/utilities/workerRequests.js
index 377658084f..2ace265ca0 100644
--- a/packages/server/src/utilities/workerRequests.js
+++ b/packages/server/src/utilities/workerRequests.js
@@ -34,7 +34,7 @@ function request(ctx, request) {
exports.request = request
// have to pass in the tenant ID as this could be coming from an automation
-exports.sendSmtpEmail = async (to, from, subject, contents) => {
+exports.sendSmtpEmail = async (to, from, subject, contents, automation) => {
// tenant ID will be set in header
const response = await fetch(
checkSlashesInUrl(env.WORKER_URL + `/api/global/email/send`),
@@ -46,6 +46,7 @@ exports.sendSmtpEmail = async (to, from, subject, contents) => {
contents,
subject,
purpose: "custom",
+ automation,
},
})
)
diff --git a/packages/string-templates/package.json b/packages/string-templates/package.json
index 44a3290696..9b3c2487b9 100644
--- a/packages/string-templates/package.json
+++ b/packages/string-templates/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
- "version": "0.9.142",
+ "version": "0.9.143-alpha.0",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",
diff --git a/packages/worker/package.json b/packages/worker/package.json
index 83e53e9b45..a1cd5c90e1 100644
--- a/packages/worker/package.json
+++ b/packages/worker/package.json
@@ -1,7 +1,7 @@
{
"name": "@budibase/worker",
"email": "hi@budibase.com",
- "version": "0.9.142",
+ "version": "0.9.143-alpha.0",
"description": "Budibase background service",
"main": "src/index.js",
"repository": {
@@ -25,8 +25,8 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
- "@budibase/auth": "^0.9.142",
- "@budibase/string-templates": "^0.9.142",
+ "@budibase/auth": "^0.9.143-alpha.0",
+ "@budibase/string-templates": "^0.9.143-alpha.0",
"@koa/router": "^8.0.0",
"@techpass/passport-openidconnect": "^0.3.0",
"aws-sdk": "^2.811.0",
diff --git a/packages/worker/src/api/controllers/global/email.js b/packages/worker/src/api/controllers/global/email.js
index 57b78a6d7a..e194a30862 100644
--- a/packages/worker/src/api/controllers/global/email.js
+++ b/packages/worker/src/api/controllers/global/email.js
@@ -2,8 +2,16 @@ const { sendEmail } = require("../../../utilities/email")
const { getGlobalDB } = require("@budibase/auth/tenancy")
exports.sendEmail = async ctx => {
- let { workspaceId, email, userId, purpose, contents, from, subject } =
- ctx.request.body
+ let {
+ workspaceId,
+ email,
+ userId,
+ purpose,
+ contents,
+ from,
+ subject,
+ automation,
+ } = ctx.request.body
let user
if (userId) {
const db = getGlobalDB()
@@ -15,6 +23,7 @@ exports.sendEmail = async ctx => {
contents,
from,
subject,
+ automation,
})
ctx.body = {
...response,
diff --git a/packages/worker/src/api/controllers/global/users.js b/packages/worker/src/api/controllers/global/users.js
index c8382d4189..1d3f38698b 100644
--- a/packages/worker/src/api/controllers/global/users.js
+++ b/packages/worker/src/api/controllers/global/users.js
@@ -1,8 +1,8 @@
const {
generateGlobalUserID,
getGlobalUserParams,
-
StaticDatabases,
+ generateNewUsageQuotaDoc,
} = require("@budibase/auth/db")
const { hash, getGlobalUserByEmail } = require("@budibase/auth").utils
const { UserStatus, EmailTemplatePurpose } = require("../../../constants")
@@ -18,6 +18,7 @@ const {
tryAddTenant,
updateTenantId,
} = require("@budibase/auth/tenancy")
+const env = require("../../../environment")
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
@@ -68,6 +69,7 @@ async function saveUser(
_id = _id || generateGlobalUserID()
user = {
+ createdAt: Date.now(),
...dbUser,
...user,
_id,
@@ -138,6 +140,11 @@ exports.adminUser = async ctx => {
})
)
+ // write usage quotas for cloud
+ if (!env.SELF_HOSTED) {
+ await db.post(generateNewUsageQuotaDoc())
+ }
+
if (response.rows.some(row => row.doc.admin)) {
ctx.throw(
403,
@@ -148,6 +155,7 @@ exports.adminUser = async ctx => {
const user = {
email: email,
password: password,
+ createdAt: Date.now(),
roles: {},
builder: {
global: true,
diff --git a/packages/worker/src/api/controllers/global/workspaces.js b/packages/worker/src/api/controllers/global/workspaces.js
index 95a1ec296d..48a710c92d 100644
--- a/packages/worker/src/api/controllers/global/workspaces.js
+++ b/packages/worker/src/api/controllers/global/workspaces.js
@@ -11,7 +11,7 @@ exports.save = async function (ctx) {
}
try {
- const response = await db.post(workspaceDoc)
+ const response = await db.put(workspaceDoc)
ctx.body = {
_id: response.id,
_rev: response.rev,
diff --git a/packages/worker/src/environment.js b/packages/worker/src/environment.js
index 646536f292..28ab4e2e69 100644
--- a/packages/worker/src/environment.js
+++ b/packages/worker/src/environment.js
@@ -33,6 +33,12 @@ module.exports = {
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
MULTI_TENANCY: process.env.MULTI_TENANCY,
ACCOUNT_PORTAL_URL: process.env.ACCOUNT_PORTAL_URL,
+ SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
+ SMTP_USER: process.env.SMTP_USER,
+ SMTP_PASSWORD: process.env.SMTP_PASSWORD,
+ SMTP_HOST: process.env.SMTP_HOST,
+ SMTP_PORT: process.env.SMTP_PORT,
+ SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS,
_set(key, value) {
process.env[key] = value
module.exports[key] = value
diff --git a/packages/worker/src/utilities/email.js b/packages/worker/src/utilities/email.js
index d22933ef36..14c836952e 100644
--- a/packages/worker/src/utilities/email.js
+++ b/packages/worker/src/utilities/email.js
@@ -1,4 +1,5 @@
const nodemailer = require("nodemailer")
+const env = require("../environment")
const { getScopedConfig } = require("@budibase/auth/db")
const { EmailTemplatePurpose, TemplateTypes, Configs } = require("../constants")
const { getTemplateByPurpose } = require("../constants/templates")
@@ -101,16 +102,35 @@ async function buildEmail(purpose, email, context, { user, contents } = {}) {
* Utility function for finding most valid SMTP configuration.
* @param {object} db The CouchDB database which is to be looked up within.
* @param {string|null} workspaceId If using finer grain control of configs a workspace can be used.
+ * @param {boolean|null} automation Whether or not the configuration is being fetched for an email automation.
* @return {Promise