commit
399364e344
|
@ -7,6 +7,7 @@ on:
|
|||
|
||||
env:
|
||||
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
|
||||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||
POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ on:
|
|||
|
||||
env:
|
||||
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
|
||||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||
POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "0.9.142",
|
||||
"version": "0.9.143-alpha.0",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/auth",
|
||||
"version": "0.9.142",
|
||||
"version": "0.9.143-alpha.0",
|
||||
"description": "Authentication middlewares for budibase builder and apps",
|
||||
"main": "src/index.js",
|
||||
"author": "Budibase",
|
||||
|
|
|
@ -12,6 +12,7 @@ exports.StaticDatabases = {
|
|||
name: "global-info",
|
||||
docs: {
|
||||
tenants: "tenants",
|
||||
usageQuota: "usage_quota",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -368,8 +368,33 @@ async function getScopedConfig(db, params) {
|
|||
return configDoc && configDoc.config ? configDoc.config : configDoc
|
||||
}
|
||||
|
||||
function generateNewUsageQuotaDoc() {
|
||||
return {
|
||||
_id: StaticDatabases.PLATFORM_INFO.docs.usageQuota,
|
||||
quotaReset: Date.now() + 2592000000,
|
||||
usageQuota: {
|
||||
automationRuns: 0,
|
||||
rows: 0,
|
||||
storage: 0,
|
||||
apps: 0,
|
||||
users: 0,
|
||||
views: 0,
|
||||
emails: 0,
|
||||
},
|
||||
usageLimits: {
|
||||
automationRuns: 1000,
|
||||
rows: 4000,
|
||||
apps: 4,
|
||||
storage: 1000,
|
||||
users: 10,
|
||||
emails: 50,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
exports.Replication = Replication
|
||||
exports.getScopedConfig = getScopedConfig
|
||||
exports.generateConfigID = generateConfigID
|
||||
exports.getConfigParams = getConfigParams
|
||||
exports.getScopedFullConfig = getScopedFullConfig
|
||||
exports.generateNewUsageQuotaDoc = generateNewUsageQuotaDoc
|
||||
|
|
|
@ -104,7 +104,7 @@ describe("third party common", () => {
|
|||
_id: id,
|
||||
email: email,
|
||||
}
|
||||
const response = await db.post(dbUser)
|
||||
const response = await db.put(dbUser)
|
||||
dbUser._rev = response.rev
|
||||
}
|
||||
|
||||
|
|
|
@ -71,7 +71,7 @@ exports.authenticateThirdParty = async function (
|
|||
dbUser = await syncUser(dbUser, thirdPartyUser)
|
||||
|
||||
// create or sync the user
|
||||
const response = await db.post(dbUser)
|
||||
const response = await db.put(dbUser)
|
||||
dbUser._rev = response.rev
|
||||
|
||||
// authenticate
|
||||
|
|
|
@ -139,8 +139,7 @@ exports.doesHaveResourcePermission = (
|
|||
// set foundSub to not subResourceId, incase there is no subResource
|
||||
let foundMain = false,
|
||||
foundSub = false
|
||||
for (let [resource, level] of Object.entries(permissions)) {
|
||||
const levels = getAllowedLevels(level)
|
||||
for (let [resource, levels] of Object.entries(permissions)) {
|
||||
if (resource === resourceId && levels.indexOf(permLevel) !== -1) {
|
||||
foundMain = true
|
||||
}
|
||||
|
@ -177,10 +176,6 @@ exports.doesHaveBasePermission = (permType, permLevel, permissionIds) => {
|
|||
return false
|
||||
}
|
||||
|
||||
exports.higherPermission = (perm1, perm2) => {
|
||||
return levelToNumber(perm1) > levelToNumber(perm2) ? perm1 : perm2
|
||||
}
|
||||
|
||||
exports.isPermissionLevelHigherThanRead = level => {
|
||||
return levelToNumber(level) > 1
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { getDB } = require("../db")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { BUILTIN_PERMISSION_IDS, higherPermission } = require("./permissions")
|
||||
const { BUILTIN_PERMISSION_IDS } = require("./permissions")
|
||||
const {
|
||||
generateRoleID,
|
||||
getRoleParams,
|
||||
|
@ -193,8 +193,17 @@ exports.getUserPermissions = async (appId, userRoleId) => {
|
|||
const permissions = {}
|
||||
for (let role of rolesHierarchy) {
|
||||
if (role.permissions) {
|
||||
for (let [resource, level] of Object.entries(role.permissions)) {
|
||||
permissions[resource] = higherPermission(permissions[resource], level)
|
||||
for (let [resource, levels] of Object.entries(role.permissions)) {
|
||||
if (!permissions[resource]) {
|
||||
permissions[resource] = []
|
||||
}
|
||||
const permsSet = new Set(permissions[resource])
|
||||
if (Array.isArray(levels)) {
|
||||
levels.forEach(level => permsSet.add(level))
|
||||
} else {
|
||||
permsSet.add(levels)
|
||||
}
|
||||
permissions[resource] = [...permsSet]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/bbui",
|
||||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "0.9.142",
|
||||
"version": "0.9.143-alpha.0",
|
||||
"license": "AGPL-3.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/builder",
|
||||
"version": "0.9.142",
|
||||
"version": "0.9.143-alpha.0",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
@ -65,10 +65,10 @@
|
|||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^0.9.142",
|
||||
"@budibase/client": "^0.9.142",
|
||||
"@budibase/bbui": "^0.9.143-alpha.0",
|
||||
"@budibase/client": "^0.9.143-alpha.0",
|
||||
"@budibase/colorpicker": "1.1.2",
|
||||
"@budibase/string-templates": "^0.9.142",
|
||||
"@budibase/string-templates": "^0.9.143-alpha.0",
|
||||
"@sentry/browser": "5.19.1",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
|
|
|
@ -1,16 +1,10 @@
|
|||
<script>
|
||||
import { onMount } from "svelte"
|
||||
import { Router } from "@roxi/routify"
|
||||
import { routes } from "../.routify/routes"
|
||||
import { initialise } from "builderStore"
|
||||
import { NotificationDisplay } from "@budibase/bbui"
|
||||
import { parse, stringify } from "qs"
|
||||
import HelpIcon from "components/common/HelpIcon.svelte"
|
||||
|
||||
onMount(async () => {
|
||||
await initialise()
|
||||
})
|
||||
|
||||
const queryHandler = { parse, stringify }
|
||||
</script>
|
||||
|
||||
|
|
|
@ -1,139 +0,0 @@
|
|||
import * as Sentry from "@sentry/browser"
|
||||
import posthog from "posthog-js"
|
||||
import api from "builderStore/api"
|
||||
|
||||
let analyticsEnabled
|
||||
const posthogConfigured = process.env.POSTHOG_TOKEN && process.env.POSTHOG_URL
|
||||
const sentryConfigured = process.env.SENTRY_DSN
|
||||
|
||||
const FEEDBACK_SUBMITTED_KEY = "budibase:feedback_submitted"
|
||||
const APP_FIRST_STARTED_KEY = "budibase:first_run"
|
||||
const feedbackHours = 12
|
||||
|
||||
async function activate() {
|
||||
if (analyticsEnabled === undefined) {
|
||||
// only the server knows the true NODE_ENV
|
||||
// this was an issue as NODE_ENV = 'cypress' on the server,
|
||||
// but 'production' on the client
|
||||
const response = await api.get("/api/analytics")
|
||||
analyticsEnabled = (await response.json()).enabled === true
|
||||
}
|
||||
if (!analyticsEnabled) return
|
||||
if (sentryConfigured) Sentry.init({ dsn: process.env.SENTRY_DSN })
|
||||
if (posthogConfigured) {
|
||||
posthog.init(process.env.POSTHOG_TOKEN, {
|
||||
autocapture: false,
|
||||
capture_pageview: false,
|
||||
api_host: process.env.POSTHOG_URL,
|
||||
})
|
||||
posthog.set_config({ persistence: "cookie" })
|
||||
}
|
||||
}
|
||||
|
||||
function identify(id) {
|
||||
if (!analyticsEnabled || !id) return
|
||||
if (posthogConfigured) posthog.identify(id)
|
||||
if (sentryConfigured)
|
||||
Sentry.configureScope(scope => {
|
||||
scope.setUser({ id: id })
|
||||
})
|
||||
}
|
||||
|
||||
async function identifyByApiKey(apiKey) {
|
||||
if (!analyticsEnabled) return true
|
||||
try {
|
||||
const response = await fetch(
|
||||
`https://03gaine137.execute-api.eu-west-1.amazonaws.com/prod/account/id?api_key=${apiKey.trim()}`
|
||||
)
|
||||
if (response.status === 200) {
|
||||
const id = await response.json()
|
||||
|
||||
await api.put("/api/keys/userId", { value: id })
|
||||
identify(id)
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
|
||||
function captureException(err) {
|
||||
if (!analyticsEnabled) return
|
||||
Sentry.captureException(err)
|
||||
captureEvent("Error", { error: err.message ? err.message : err })
|
||||
}
|
||||
|
||||
function captureEvent(eventName, props = {}) {
|
||||
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return
|
||||
props.sourceApp = "builder"
|
||||
posthog.capture(eventName, props)
|
||||
}
|
||||
|
||||
if (!localStorage.getItem(APP_FIRST_STARTED_KEY)) {
|
||||
localStorage.setItem(APP_FIRST_STARTED_KEY, Date.now())
|
||||
}
|
||||
|
||||
const isFeedbackTimeElapsed = sinceDateStr => {
|
||||
const sinceDate = parseFloat(sinceDateStr)
|
||||
const feedbackMilliseconds = feedbackHours * 60 * 60 * 1000
|
||||
return Date.now() > sinceDate + feedbackMilliseconds
|
||||
}
|
||||
|
||||
function submitFeedback(values) {
|
||||
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return
|
||||
localStorage.setItem(FEEDBACK_SUBMITTED_KEY, Date.now())
|
||||
|
||||
const prefixedValues = Object.entries(values).reduce((obj, [key, value]) => {
|
||||
obj[`feedback_${key}`] = value
|
||||
return obj
|
||||
}, {})
|
||||
|
||||
posthog.capture("Feedback Submitted", prefixedValues)
|
||||
}
|
||||
|
||||
function requestFeedbackOnDeploy() {
|
||||
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return false
|
||||
const lastSubmittedStr = localStorage.getItem(FEEDBACK_SUBMITTED_KEY)
|
||||
if (!lastSubmittedStr) return true
|
||||
return isFeedbackTimeElapsed(lastSubmittedStr)
|
||||
}
|
||||
|
||||
function highlightFeedbackIcon() {
|
||||
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return false
|
||||
const lastSubmittedStr = localStorage.getItem(FEEDBACK_SUBMITTED_KEY)
|
||||
if (lastSubmittedStr) return isFeedbackTimeElapsed(lastSubmittedStr)
|
||||
const firstRunStr = localStorage.getItem(APP_FIRST_STARTED_KEY)
|
||||
if (!firstRunStr) return false
|
||||
return isFeedbackTimeElapsed(firstRunStr)
|
||||
}
|
||||
|
||||
// Opt In/Out
|
||||
const ifAnalyticsEnabled = func => () => {
|
||||
if (analyticsEnabled && process.env.POSTHOG_TOKEN) {
|
||||
return func()
|
||||
}
|
||||
}
|
||||
const disabled = () => posthog.has_opted_out_capturing()
|
||||
const optIn = () => posthog.opt_in_capturing()
|
||||
const optOut = () => posthog.opt_out_capturing()
|
||||
|
||||
export default {
|
||||
activate,
|
||||
identify,
|
||||
identifyByApiKey,
|
||||
captureException,
|
||||
captureEvent,
|
||||
requestFeedbackOnDeploy,
|
||||
submitFeedback,
|
||||
highlightFeedbackIcon,
|
||||
disabled: () => {
|
||||
if (analyticsEnabled == null) {
|
||||
return true
|
||||
}
|
||||
return ifAnalyticsEnabled(disabled)
|
||||
},
|
||||
optIn: ifAnalyticsEnabled(optIn),
|
||||
optOut: ifAnalyticsEnabled(optOut),
|
||||
}
|
|
@ -0,0 +1,94 @@
|
|||
export default class IntercomClient {
|
||||
constructor(token) {
|
||||
this.token = token
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiate intercom using their provided script.
|
||||
*/
|
||||
init() {
|
||||
if (!this.token) return
|
||||
|
||||
const token = this.token
|
||||
|
||||
var w = window
|
||||
var ic = w.Intercom
|
||||
if (typeof ic === "function") {
|
||||
ic("reattach_activator")
|
||||
ic("update", w.intercomSettings)
|
||||
} else {
|
||||
var d = document
|
||||
var i = function () {
|
||||
i.c(arguments)
|
||||
}
|
||||
i.q = []
|
||||
i.c = function (args) {
|
||||
i.q.push(args)
|
||||
}
|
||||
w.Intercom = i
|
||||
var l = function () {
|
||||
var s = d.createElement("script")
|
||||
s.type = "text/javascript"
|
||||
s.async = true
|
||||
s.src = "https://widget.intercom.io/widget/" + token
|
||||
var x = d.getElementsByTagName("script")[0]
|
||||
x.parentNode.insertBefore(s, x)
|
||||
}
|
||||
if (document.readyState === "complete") {
|
||||
l()
|
||||
} else if (w.attachEvent) {
|
||||
w.attachEvent("onload", l)
|
||||
} else {
|
||||
w.addEventListener("load", l, false)
|
||||
}
|
||||
|
||||
this.initialised = true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Show the intercom chat bubble.
|
||||
* @param {Object} user - user to identify
|
||||
* @returns Intercom global object
|
||||
*/
|
||||
show(user = {}) {
|
||||
if (!this.initialised) return
|
||||
|
||||
return window.Intercom("boot", {
|
||||
app_id: this.token,
|
||||
...user,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Update intercom user details and messages.
|
||||
* @returns Intercom global object
|
||||
*/
|
||||
update() {
|
||||
if (!this.initialised) return
|
||||
|
||||
return window.Intercom("update")
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture analytics events and send them to intercom.
|
||||
* @param {String} event - event identifier
|
||||
* @param {Object} props - properties for the event
|
||||
* @returns Intercom global object
|
||||
*/
|
||||
captureEvent(event, props = {}) {
|
||||
if (!this.initialised) return
|
||||
|
||||
return window.Intercom("trackEvent", event, props)
|
||||
}
|
||||
|
||||
/**
|
||||
* Disassociate the user from the current session.
|
||||
* @returns Intercom global object
|
||||
*/
|
||||
logout() {
|
||||
if (!this.initialised) return
|
||||
|
||||
return window.Intercom("shutdown")
|
||||
}
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
import posthog from "posthog-js"
|
||||
import { Events } from "./constants"
|
||||
|
||||
export default class PosthogClient {
|
||||
constructor(token, url) {
|
||||
this.token = token
|
||||
this.url = url
|
||||
}
|
||||
|
||||
init() {
|
||||
if (!this.token || !this.url) return
|
||||
|
||||
posthog.init(this.token, {
|
||||
autocapture: false,
|
||||
capture_pageview: false,
|
||||
api_host: this.url,
|
||||
})
|
||||
posthog.set_config({ persistence: "cookie" })
|
||||
|
||||
this.initialised = true
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the posthog context to the current user
|
||||
* @param {String} id - unique user id
|
||||
*/
|
||||
identify(id) {
|
||||
if (!this.initialised) return
|
||||
|
||||
posthog.identify(id)
|
||||
}
|
||||
|
||||
/**
|
||||
* Update user metadata associated with current user in posthog
|
||||
* @param {Object} meta - user fields
|
||||
*/
|
||||
updateUser(meta) {
|
||||
if (!this.initialised) return
|
||||
|
||||
posthog.people.set(meta)
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture analytics events and send them to posthog.
|
||||
* @param {String} event - event identifier
|
||||
* @param {Object} props - properties for the event
|
||||
*/
|
||||
captureEvent(eventName, props) {
|
||||
if (!this.initialised) return
|
||||
|
||||
props.sourceApp = "builder"
|
||||
posthog.capture(eventName, props)
|
||||
}
|
||||
|
||||
/**
|
||||
* Submit NPS feedback to posthog.
|
||||
* @param {Object} values - NPS Values
|
||||
*/
|
||||
npsFeedback(values) {
|
||||
if (!this.initialised) return
|
||||
|
||||
localStorage.setItem(Events.NPS.SUBMITTED, Date.now())
|
||||
|
||||
const prefixedFeedback = {}
|
||||
for (let key in values) {
|
||||
prefixedFeedback[`feedback_${key}`] = values[key]
|
||||
}
|
||||
|
||||
posthog.capture(Events.NPS.SUBMITTED, prefixedFeedback)
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset posthog user back to initial state on logout.
|
||||
*/
|
||||
logout() {
|
||||
if (!this.initialised) return
|
||||
|
||||
posthog.reset()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
import * as Sentry from "@sentry/browser"
|
||||
|
||||
export default class SentryClient {
|
||||
constructor(dsn) {
|
||||
this.dsn = dsn
|
||||
}
|
||||
|
||||
init() {
|
||||
if (this.dsn) {
|
||||
Sentry.init({ dsn: this.dsn })
|
||||
|
||||
this.initalised = true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture an exception and send it to sentry.
|
||||
* @param {Error} err - JS error object
|
||||
*/
|
||||
captureException(err) {
|
||||
if (!this.initalised) return
|
||||
|
||||
Sentry.captureException(err)
|
||||
}
|
||||
|
||||
/**
|
||||
* Identify user in sentry.
|
||||
* @param {String} id - Unique user id
|
||||
*/
|
||||
identify(id) {
|
||||
if (!this.initalised) return
|
||||
|
||||
Sentry.configureScope(scope => {
|
||||
scope.setUser({ id })
|
||||
})
|
||||
}
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
export const Events = {
|
||||
BUILDER: {
|
||||
STARTED: "Builder Started",
|
||||
},
|
||||
COMPONENT: {
|
||||
CREATED: "Added Component",
|
||||
},
|
||||
DATASOURCE: {
|
||||
CREATED: "Datasource Created",
|
||||
UPDATED: "Datasource Updated",
|
||||
},
|
||||
TABLE: {
|
||||
CREATED: "Table Created",
|
||||
},
|
||||
VIEW: {
|
||||
CREATED: "View Created",
|
||||
ADDED_FILTER: "Added View Filter",
|
||||
ADDED_CALCULATE: "Added View Calculate",
|
||||
},
|
||||
SCREEN: {
|
||||
CREATED: "Screen Created",
|
||||
},
|
||||
AUTOMATION: {
|
||||
CREATED: "Automation Created",
|
||||
SAVED: "Automation Saved",
|
||||
BLOCK_ADDED: "Added Automation Block",
|
||||
},
|
||||
NPS: {
|
||||
SUBMITTED: "budibase:feedback_submitted",
|
||||
},
|
||||
APP: {
|
||||
CREATED: "budibase:app_created",
|
||||
PUBLISHED: "budibase:app_published",
|
||||
UNPUBLISHED: "budibase:app_unpublished",
|
||||
},
|
||||
ANALYTICS: {
|
||||
OPT_IN: "budibase:analytics_opt_in",
|
||||
OPT_OUT: "budibase:analytics_opt_out",
|
||||
},
|
||||
USER: {
|
||||
INVITE: "budibase:portal_user_invite",
|
||||
},
|
||||
SMTP: {
|
||||
SAVED: "budibase:smtp_saved",
|
||||
},
|
||||
SSO: {
|
||||
SAVED: "budibase:sso_saved",
|
||||
},
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
import api from "builderStore/api"
|
||||
import PosthogClient from "./PosthogClient"
|
||||
import IntercomClient from "./IntercomClient"
|
||||
import SentryClient from "./SentryClient"
|
||||
import { Events } from "./constants"
|
||||
import { auth } from "stores/portal"
|
||||
import { get } from "svelte/store"
|
||||
|
||||
const posthog = new PosthogClient(
|
||||
process.env.POSTHOG_TOKEN,
|
||||
process.env.POSTHOG_URL
|
||||
)
|
||||
const sentry = new SentryClient(process.env.SENTRY_DSN)
|
||||
const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
|
||||
|
||||
class AnalyticsHub {
|
||||
constructor() {
|
||||
this.clients = [posthog, sentry, intercom]
|
||||
}
|
||||
|
||||
async activate() {
|
||||
// Setting the analytics env var off in the backend overrides org/tenant settings
|
||||
const analyticsStatus = await api.get("/api/analytics")
|
||||
const json = await analyticsStatus.json()
|
||||
|
||||
// Multitenancy disabled on the backend
|
||||
if (!json.enabled) return
|
||||
|
||||
const tenantId = get(auth).tenantId
|
||||
|
||||
if (tenantId) {
|
||||
const res = await api.get(
|
||||
`/api/global/configs/public?tenantId=${tenantId}`
|
||||
)
|
||||
const orgJson = await res.json()
|
||||
|
||||
// analytics opted out for the tenant
|
||||
if (orgJson.config?.analytics === false) return
|
||||
}
|
||||
|
||||
this.clients.forEach(client => client.init())
|
||||
this.enabled = true
|
||||
}
|
||||
|
||||
identify(id, metadata) {
|
||||
posthog.identify(id)
|
||||
if (metadata) {
|
||||
posthog.updateUser(metadata)
|
||||
}
|
||||
sentry.identify(id)
|
||||
}
|
||||
|
||||
captureException(err) {
|
||||
sentry.captureException(err)
|
||||
}
|
||||
|
||||
captureEvent(eventName, props = {}) {
|
||||
posthog.captureEvent(eventName, props)
|
||||
intercom.captureEvent(eventName, props)
|
||||
}
|
||||
|
||||
showChat(user) {
|
||||
intercom.show(user)
|
||||
}
|
||||
|
||||
submitFeedback(values) {
|
||||
posthog.npsFeedback(values)
|
||||
}
|
||||
|
||||
async logout() {
|
||||
posthog.logout()
|
||||
intercom.logout()
|
||||
}
|
||||
}
|
||||
|
||||
const analytics = new AnalyticsHub()
|
||||
|
||||
export { Events }
|
||||
export default analytics
|
|
@ -443,10 +443,9 @@ function bindingReplacement(bindableProperties, textWithBindings, convertTo) {
|
|||
for (let from of convertFromProps) {
|
||||
if (shouldReplaceBinding(newBoundValue, from, convertTo)) {
|
||||
const binding = bindableProperties.find(el => el[convertFrom] === from)
|
||||
newBoundValue = newBoundValue.replace(
|
||||
new RegExp(from, "gi"),
|
||||
binding[convertTo]
|
||||
)
|
||||
while (newBoundValue.includes(from)) {
|
||||
newBoundValue = newBoundValue.replace(from, binding[convertTo])
|
||||
}
|
||||
}
|
||||
}
|
||||
result = result.replace(boundValue, newBoundValue)
|
||||
|
|
|
@ -3,7 +3,6 @@ import { getAutomationStore } from "./store/automation"
|
|||
import { getHostingStore } from "./store/hosting"
|
||||
import { getThemeStore } from "./store/theme"
|
||||
import { derived, writable } from "svelte/store"
|
||||
import analytics from "analytics"
|
||||
import { FrontendTypes, LAYOUT_NAMES } from "../constants"
|
||||
import { findComponent } from "./storeUtils"
|
||||
|
||||
|
@ -55,13 +54,4 @@ export const mainLayout = derived(store, $store => {
|
|||
|
||||
export const selectedAccessRole = writable("BASIC")
|
||||
|
||||
export const initialise = async () => {
|
||||
try {
|
||||
await analytics.activate()
|
||||
analytics.captureEvent("Builder Started")
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
}
|
||||
}
|
||||
|
||||
export const screenSearchString = writable(null)
|
||||
|
|
|
@ -2,7 +2,7 @@ import { writable } from "svelte/store"
|
|||
import api from "../../api"
|
||||
import Automation from "./Automation"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
const automationActions = store => ({
|
||||
fetch: async () => {
|
||||
|
@ -110,7 +110,7 @@ const automationActions = store => ({
|
|||
state.selectedBlock = newBlock
|
||||
return state
|
||||
})
|
||||
analytics.captureEvent("Added Automation Block", {
|
||||
analytics.captureEvent(Events.AUTOMATION.BLOCK_ADDED, {
|
||||
name: block.name,
|
||||
})
|
||||
},
|
||||
|
|
|
@ -19,7 +19,7 @@ import {
|
|||
import { fetchComponentLibDefinitions } from "../loadComponentLibraries"
|
||||
import api from "../api"
|
||||
import { FrontendTypes } from "constants"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
import {
|
||||
findComponentType,
|
||||
findComponentParent,
|
||||
|
@ -215,6 +215,13 @@ export const getFrontendStore = () => {
|
|||
if (screenToDelete._id === state.selectedScreenId) {
|
||||
state.selectedScreenId = null
|
||||
}
|
||||
//remove the link for this screen
|
||||
screenDeletePromises.push(
|
||||
store.actions.components.links.delete(
|
||||
screenToDelete.routing.route,
|
||||
screenToDelete.props._instanceName
|
||||
)
|
||||
)
|
||||
}
|
||||
return state
|
||||
})
|
||||
|
@ -443,7 +450,7 @@ export const getFrontendStore = () => {
|
|||
})
|
||||
|
||||
// Log event
|
||||
analytics.captureEvent("Added Component", {
|
||||
analytics.captureEvent(Events.COMPONENT.CREATED, {
|
||||
name: componentInstance._component,
|
||||
})
|
||||
|
||||
|
@ -646,6 +653,36 @@ export const getFrontendStore = () => {
|
|||
// Save layout
|
||||
await store.actions.layouts.save(layout)
|
||||
},
|
||||
delete: async (url, title) => {
|
||||
const layout = get(mainLayout)
|
||||
if (!layout) {
|
||||
return
|
||||
}
|
||||
|
||||
// Add link setting to main layout
|
||||
if (layout.props._component.endsWith("layout")) {
|
||||
// If using a new SDK, add to the layout component settings
|
||||
layout.props.links = layout.props.links.filter(
|
||||
link => !(link.text === title && link.url === url)
|
||||
)
|
||||
} else {
|
||||
// If using an old SDK, add to the navigation component
|
||||
// TODO: remove this when we can assume everyone has updated
|
||||
const nav = findComponentType(
|
||||
layout.props,
|
||||
"@budibase/standard-components/navigation"
|
||||
)
|
||||
if (!nav) {
|
||||
return
|
||||
}
|
||||
|
||||
nav._children = nav._children.filter(
|
||||
child => !(child.url === url && child.text === title)
|
||||
)
|
||||
}
|
||||
// Save layout
|
||||
await store.actions.layouts.save(layout)
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
import { automationStore } from "builderStore"
|
||||
import { notifications } from "@budibase/bbui"
|
||||
import { Input, ModalContent, Layout, Body, Icon } from "@budibase/bbui"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
let name
|
||||
let selectedTrigger
|
||||
|
@ -36,7 +36,7 @@
|
|||
notifications.success(`Automation ${name} created.`)
|
||||
|
||||
$goto(`./${$automationStore.selectedAutomation.automation._id}`)
|
||||
analytics.captureEvent("Automation Created", { name })
|
||||
analytics.captureEvent(Events.AUTOMATION.CREATED, { name })
|
||||
}
|
||||
$: triggers = Object.entries($automationStore.blockDefinitions.TRIGGER)
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import { automationStore } from "builderStore"
|
||||
import { notifications } from "@budibase/bbui"
|
||||
import { Icon, Input, ModalContent, Modal } from "@budibase/bbui"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
let name
|
||||
let error = ""
|
||||
|
@ -26,7 +26,7 @@
|
|||
}
|
||||
await automationStore.actions.save(updatedAutomation)
|
||||
notifications.success(`Automation ${name} updated successfully.`)
|
||||
analytics.captureEvent("Automation Saved", { name })
|
||||
analytics.captureEvent(Events.AUTOMATION.SAVED, { name })
|
||||
hide()
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<script>
|
||||
import { Select, Label, notifications, ModalContent } from "@budibase/bbui"
|
||||
import { tables, views } from "stores/backend"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
import { FIELDS } from "constants/backend"
|
||||
|
||||
const CALCULATIONS = [
|
||||
|
@ -40,7 +40,7 @@
|
|||
function saveView() {
|
||||
views.save(view)
|
||||
notifications.success(`View ${view.name} saved.`)
|
||||
analytics.captureEvent("Added View Calculate", { field: view.field })
|
||||
analytics.captureEvent(Events.VIEW.ADDED_CALCULATE, { field: view.field })
|
||||
}
|
||||
</script>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
import { goto } from "@roxi/routify"
|
||||
import { views as viewsStore } from "stores/backend"
|
||||
import { tables } from "stores/backend"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
let name
|
||||
let field
|
||||
|
@ -21,7 +21,7 @@
|
|||
field,
|
||||
})
|
||||
notifications.success(`View ${name} created`)
|
||||
analytics.captureEvent("View Created", { name })
|
||||
analytics.captureEvent(Events.VIEW.CREATED, { name })
|
||||
$goto(`../../view/${name}`)
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
Icon,
|
||||
} from "@budibase/bbui"
|
||||
import { tables, views } from "stores/backend"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
const CONDITIONS = [
|
||||
{
|
||||
|
@ -65,7 +65,7 @@
|
|||
function saveView() {
|
||||
views.save(view)
|
||||
notifications.success(`View ${view.name} saved.`)
|
||||
analytics.captureEvent("Added View Filter", {
|
||||
analytics.captureEvent(Events.VIEW.ADDED_FILTER, {
|
||||
filters: JSON.stringify(view.filters),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
<script>
|
||||
import { onMount } from "svelte"
|
||||
import { get } from "svelte/store"
|
||||
import { goto } from "@roxi/routify"
|
||||
import { BUDIBASE_INTERNAL_DB } from "constants"
|
||||
import { database, datasources, queries } from "stores/backend"
|
||||
import { database, datasources, queries, tables } from "stores/backend"
|
||||
import EditDatasourcePopover from "./popovers/EditDatasourcePopover.svelte"
|
||||
import EditQueryPopover from "./popovers/EditQueryPopover.svelte"
|
||||
import NavItem from "components/common/NavItem.svelte"
|
||||
|
@ -10,6 +11,13 @@
|
|||
import ICONS from "./icons"
|
||||
|
||||
let openDataSources = []
|
||||
$: enrichedDataSources = $datasources.list.map(datasource => ({
|
||||
...datasource,
|
||||
open:
|
||||
openDataSources.includes(datasource._id) ||
|
||||
containsActiveTable(datasource),
|
||||
selected: $datasources.selected === datasource._id,
|
||||
}))
|
||||
|
||||
function selectDatasource(datasource) {
|
||||
toggleNode(datasource)
|
||||
|
@ -35,16 +43,28 @@
|
|||
datasources.fetch()
|
||||
queries.fetch()
|
||||
})
|
||||
|
||||
const containsActiveTable = datasource => {
|
||||
const activeTableId = get(tables).selected?._id
|
||||
if (!datasource.entities) {
|
||||
return false
|
||||
}
|
||||
let tableOptions = datasource.entities
|
||||
if (!Array.isArray(tableOptions)) {
|
||||
tableOptions = Object.values(tableOptions)
|
||||
}
|
||||
return tableOptions.find(x => x._id === activeTableId) != null
|
||||
}
|
||||
</script>
|
||||
|
||||
{#if $database?._id}
|
||||
<div class="hierarchy-items-container">
|
||||
{#each $datasources.list as datasource, idx}
|
||||
{#each enrichedDataSources as datasource, idx}
|
||||
<NavItem
|
||||
border={idx > 0}
|
||||
text={datasource.name}
|
||||
opened={openDataSources.includes(datasource._id)}
|
||||
selected={$datasources.selected === datasource._id}
|
||||
opened={datasource.open}
|
||||
selected={datasource.selected}
|
||||
withArrow={true}
|
||||
on:click={() => selectDatasource(datasource)}
|
||||
on:iconClick={() => toggleNode(datasource)}
|
||||
|
@ -61,22 +81,21 @@
|
|||
{/if}
|
||||
</NavItem>
|
||||
|
||||
{#if openDataSources.includes(datasource._id)}
|
||||
{#if datasource.open}
|
||||
<TableNavigator sourceId={datasource._id} />
|
||||
{#each $queries.list.filter(query => query.datasourceId === datasource._id) as query}
|
||||
<NavItem
|
||||
indentLevel={1}
|
||||
icon="SQLQuery"
|
||||
text={query.name}
|
||||
opened={$queries.selected === query._id}
|
||||
selected={$queries.selected === query._id}
|
||||
on:click={() => onClickQuery(query)}
|
||||
>
|
||||
<EditQueryPopover {query} />
|
||||
</NavItem>
|
||||
{/each}
|
||||
{/if}
|
||||
|
||||
{#each $queries.list.filter(query => query.datasourceId === datasource._id) as query}
|
||||
<NavItem
|
||||
indentLevel={1}
|
||||
icon="SQLQuery"
|
||||
text={query.name}
|
||||
opened={$queries.selected === query._id}
|
||||
selected={$queries.selected === query._id}
|
||||
on:click={() => onClickQuery(query)}
|
||||
>
|
||||
<EditQueryPopover {query} />
|
||||
</NavItem>
|
||||
{/each}
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
import { Input, Label, ModalContent, Modal, Context } from "@budibase/bbui"
|
||||
import TableIntegrationMenu from "../TableIntegrationMenu/index.svelte"
|
||||
import CreateTableModal from "components/backend/TableNavigator/modals/CreateTableModal.svelte"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
import { getContext } from "svelte"
|
||||
|
||||
const modalContext = getContext(Context.Modal)
|
||||
|
@ -45,7 +45,7 @@
|
|||
plus,
|
||||
})
|
||||
notifications.success(`Datasource ${name} created successfully.`)
|
||||
analytics.captureEvent("Datasource Created", { name, type })
|
||||
analytics.captureEvent(Events.DATASOURCE.CREATED, { name, type })
|
||||
|
||||
// Navigate to new datasource
|
||||
$goto(`./datasource/${response._id}`)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import { datasources } from "stores/backend"
|
||||
import { notifications } from "@budibase/bbui"
|
||||
import { Input, ModalContent, Modal } from "@budibase/bbui"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
let error = ""
|
||||
let modal
|
||||
|
@ -35,7 +35,7 @@
|
|||
}
|
||||
await datasources.save(updatedDatasource)
|
||||
notifications.success(`Datasource ${name} updated successfully.`)
|
||||
analytics.captureEvent("Datasource Updated", updatedDatasource)
|
||||
analytics.captureEvent(Events.DATASOURCE.UPDATED, updatedDatasource)
|
||||
hide()
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
Layout,
|
||||
} from "@budibase/bbui"
|
||||
import TableDataImport from "../TableDataImport.svelte"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
import screenTemplates from "builderStore/store/screenTemplates"
|
||||
import { buildAutoColumn, getAutoColumnInformation } from "builderStore/utils"
|
||||
import { NEW_ROW_TEMPLATE } from "builderStore/store/screenTemplates/newRowScreen"
|
||||
|
@ -67,7 +67,7 @@
|
|||
// Create table
|
||||
const table = await tables.save(newTable)
|
||||
notifications.success(`Table ${name} created successfully.`)
|
||||
analytics.captureEvent("Table Created", { name })
|
||||
analytics.captureEvent(Events.TABLE.CREATED, { name })
|
||||
|
||||
// Create auto screens
|
||||
if (createAutoscreens) {
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
import { onMount, onDestroy } from "svelte"
|
||||
import { Button, Modal, notifications, ModalContent } from "@budibase/bbui"
|
||||
import api from "builderStore/api"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
import { store } from "builderStore"
|
||||
|
||||
const DeploymentStatus = {
|
||||
SUCCESS: "SUCCESS",
|
||||
|
@ -23,6 +24,9 @@
|
|||
if (response.status !== 200) {
|
||||
throw new Error(`status ${response.status}`)
|
||||
} else {
|
||||
analytics.captureEvent(Events.APP.PUBLISHED, {
|
||||
appId: $store.appId,
|
||||
})
|
||||
notifications.success(`Application published successfully`)
|
||||
}
|
||||
} catch (err) {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
import { roles } from "stores/backend"
|
||||
import { Input, Select, ModalContent, Toggle } from "@budibase/bbui"
|
||||
import getTemplates from "builderStore/store/screenTemplates"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
const CONTAINER = "@budibase/standard-components/container"
|
||||
|
||||
|
@ -66,7 +66,7 @@
|
|||
|
||||
if (templateIndex !== undefined) {
|
||||
const template = templates[templateIndex]
|
||||
analytics.captureEvent("Screen Created", {
|
||||
analytics.captureEvent(Events.SCREEN.CREATED, {
|
||||
template: template.id || template.name,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
<script>
|
||||
import { Select, Label } from "@budibase/bbui"
|
||||
import { currentAsset, store } from "builderStore"
|
||||
import { getActionProviderComponents } from "builderStore/dataBinding"
|
||||
|
||||
export let parameters
|
||||
|
||||
$: actionProviders = getActionProviderComponents(
|
||||
$currentAsset,
|
||||
$store.selectedComponentId,
|
||||
"RefreshDataProvider"
|
||||
)
|
||||
</script>
|
||||
|
||||
<div class="root">
|
||||
<Label small>Data Provider</Label>
|
||||
<Select
|
||||
bind:value={parameters.componentId}
|
||||
options={actionProviders}
|
||||
getOptionLabel={x => x._instanceName}
|
||||
getOptionValue={x => x._id}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.root {
|
||||
display: grid;
|
||||
column-gap: var(--spacing-l);
|
||||
row-gap: var(--spacing-s);
|
||||
grid-template-columns: 70px 1fr;
|
||||
align-items: center;
|
||||
max-width: 400px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
</style>
|
|
@ -12,6 +12,7 @@ import ClearForm from "./ClearForm.svelte"
|
|||
import CloseScreenModal from "./CloseScreenModal.svelte"
|
||||
import ChangeFormStep from "./ChangeFormStep.svelte"
|
||||
import UpdateStateStep from "./UpdateState.svelte"
|
||||
import RefreshDataProvider from "./RefreshDataProvider.svelte"
|
||||
|
||||
// Defines which actions are available to configure in the front end.
|
||||
// Unfortunately the "name" property is used as the identifier so please don't
|
||||
|
@ -62,6 +63,10 @@ export const getAvailableActions = () => {
|
|||
name: "Change Form Step",
|
||||
component: ChangeFormStep,
|
||||
},
|
||||
{
|
||||
name: "Refresh Data Provider",
|
||||
component: RefreshDataProvider,
|
||||
},
|
||||
]
|
||||
|
||||
if (get(store).clientFeatures?.state) {
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
import { admin } from "stores/portal"
|
||||
import { string, mixed, object } from "yup"
|
||||
import api, { get, post } from "builderStore/api"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
import { onMount } from "svelte"
|
||||
import { capitalise } from "helpers"
|
||||
import { goto } from "@roxi/routify"
|
||||
|
@ -98,9 +98,9 @@
|
|||
throw new Error(appJson.message)
|
||||
}
|
||||
|
||||
analytics.captureEvent("App Created", {
|
||||
analytics.captureEvent(Events.APP.CREATED, {
|
||||
name: $values.name,
|
||||
appId: appJson._id,
|
||||
appId: appJson.instance._id,
|
||||
template,
|
||||
})
|
||||
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
<script>
|
||||
import { datasources } from "stores/backend"
|
||||
|
||||
datasources.select("bb_internal")
|
||||
</script>
|
||||
|
||||
<slot />
|
|
@ -29,6 +29,7 @@
|
|||
username,
|
||||
password,
|
||||
})
|
||||
|
||||
if ($auth?.user?.forceResetPassword) {
|
||||
$goto("./reset")
|
||||
} else {
|
||||
|
|
|
@ -15,8 +15,7 @@
|
|||
} from "@budibase/bbui"
|
||||
import CreateAppModal from "components/start/CreateAppModal.svelte"
|
||||
import UpdateAppModal from "components/start/UpdateAppModal.svelte"
|
||||
import api, { del } from "builderStore/api"
|
||||
import analytics from "analytics"
|
||||
import { del } from "builderStore/api"
|
||||
import { onMount } from "svelte"
|
||||
import { apps, auth, admin } from "stores/portal"
|
||||
import download from "downloadjs"
|
||||
|
@ -66,14 +65,6 @@
|
|||
}
|
||||
}
|
||||
|
||||
const checkKeys = async () => {
|
||||
const response = await api.get(`/api/keys/`)
|
||||
const keys = await response.json()
|
||||
if (keys.userId) {
|
||||
analytics.identify(keys.userId)
|
||||
}
|
||||
}
|
||||
|
||||
const initiateAppCreation = () => {
|
||||
creationModal.show()
|
||||
creatingApp = true
|
||||
|
@ -188,7 +179,6 @@
|
|||
}
|
||||
|
||||
onMount(async () => {
|
||||
checkKeys()
|
||||
await apps.load()
|
||||
loaded = true
|
||||
})
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
import api from "builderStore/api"
|
||||
import { organisation, auth, admin } from "stores/portal"
|
||||
import { uuid } from "builderStore/uuid"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
$: tenantId = $auth.tenantId
|
||||
$: multiTenancyEnabled = $admin.multiTenancy
|
||||
|
@ -209,6 +210,7 @@
|
|||
providers[res.type]._id = res._id
|
||||
})
|
||||
notifications.success(`Settings saved.`)
|
||||
analytics.captureEvent(Events.SSO.SAVED)
|
||||
})
|
||||
.catch(err => {
|
||||
notifications.error(`Failed to update auth settings. ${err}`)
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
import { email } from "stores/portal"
|
||||
import api from "builderStore/api"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
const ConfigTypes = {
|
||||
SMTP: "smtp",
|
||||
|
@ -69,6 +70,7 @@
|
|||
smtpConfig._rev = json._rev
|
||||
smtpConfig._id = json._id
|
||||
notifications.success(`Settings saved.`)
|
||||
analytics.captureEvent(Events.SMTP.SAVED)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
} from "@budibase/bbui"
|
||||
import { createValidationStore, emailValidator } from "helpers/validation"
|
||||
import { users } from "stores/portal"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
export let disabled
|
||||
|
||||
|
@ -25,6 +26,7 @@
|
|||
notifications.error(res.message)
|
||||
} else {
|
||||
notifications.success(res.message)
|
||||
analytics.captureEvent(Events.USER.INVITE, { type: selected })
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
}
|
||||
|
||||
const values = writable({
|
||||
analytics: !analytics.disabled(),
|
||||
analytics: analytics.enabled,
|
||||
company: $organisation.company,
|
||||
platformUrl: $organisation.platformUrl,
|
||||
logo: $organisation.logoUrl
|
||||
|
@ -48,13 +48,6 @@
|
|||
async function saveConfig() {
|
||||
loading = true
|
||||
|
||||
// Set analytics preference
|
||||
if ($values.analytics) {
|
||||
analytics.optIn()
|
||||
} else {
|
||||
analytics.optOut()
|
||||
}
|
||||
|
||||
// Upload logo if required
|
||||
if ($values.logo && !$values.logo.url) {
|
||||
await uploadLogo($values.logo)
|
||||
|
@ -64,6 +57,7 @@
|
|||
const config = {
|
||||
company: $values.company ?? "",
|
||||
platformUrl: $values.platformUrl ?? "",
|
||||
analytics: $values.analytics,
|
||||
}
|
||||
// remove logo if required
|
||||
if (!$values.logo) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { writable } from "svelte/store"
|
||||
import { writable, get } from "svelte/store"
|
||||
import { queries, tables, views } from "./"
|
||||
import api from "../../builderStore/api"
|
||||
|
||||
|
@ -8,7 +8,8 @@ export const INITIAL_DATASOURCE_VALUES = {
|
|||
}
|
||||
|
||||
export function createDatasourcesStore() {
|
||||
const { subscribe, update, set } = writable(INITIAL_DATASOURCE_VALUES)
|
||||
const store = writable(INITIAL_DATASOURCE_VALUES)
|
||||
const { subscribe, update, set } = store
|
||||
|
||||
return {
|
||||
subscribe,
|
||||
|
@ -21,7 +22,15 @@ export function createDatasourcesStore() {
|
|||
fetch: async () => {
|
||||
const response = await api.get(`/api/datasources`)
|
||||
const json = await response.json()
|
||||
update(state => ({ ...state, list: json, selected: null }))
|
||||
|
||||
// Clear selected if it no longer exists, otherwise keep it
|
||||
const selected = get(store).selected
|
||||
let nextSelected = null
|
||||
if (selected && json.find(source => source._id === selected)) {
|
||||
nextSelected = selected
|
||||
}
|
||||
|
||||
update(state => ({ ...state, list: json, selected: nextSelected }))
|
||||
return json
|
||||
},
|
||||
select: async datasourceId => {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { derived, writable, get } from "svelte/store"
|
||||
import api from "../../builderStore/api"
|
||||
import { admin } from "stores/portal"
|
||||
import analytics from "analytics"
|
||||
|
||||
export function createAuthStore() {
|
||||
const auth = writable({
|
||||
|
@ -49,6 +50,21 @@ export function createAuthStore() {
|
|||
}
|
||||
return store
|
||||
})
|
||||
|
||||
if (user) {
|
||||
analytics.activate().then(() => {
|
||||
analytics.identify(user._id, user)
|
||||
if (user.size === "100+" || user.size === "10000+") {
|
||||
analytics.showChat({
|
||||
email: user.email,
|
||||
created_at: user.createdAt || Date.now(),
|
||||
name: user.name,
|
||||
user_id: user._id,
|
||||
tenant: user.tenantId,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function setOrganisation(tenantId) {
|
||||
|
|
|
@ -22,6 +22,9 @@ export default ({ mode }) => {
|
|||
isProduction ? "production" : "development"
|
||||
),
|
||||
"process.env.POSTHOG_TOKEN": JSON.stringify(process.env.POSTHOG_TOKEN),
|
||||
"process.env.INTERCOM_TOKEN": JSON.stringify(
|
||||
process.env.INTERCOM_TOKEN
|
||||
),
|
||||
"process.env.POSTHOG_URL": JSON.stringify(process.env.POSTHOG_URL),
|
||||
"process.env.SENTRY_DSN": JSON.stringify(process.env.SENTRY_DSN),
|
||||
}),
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/cli",
|
||||
"version": "0.9.142",
|
||||
"version": "0.9.143-alpha.0",
|
||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||
"main": "src/index.js",
|
||||
"bin": {
|
||||
|
|
|
@ -2389,6 +2389,7 @@
|
|||
"icon": "Data",
|
||||
"illegalChildren": ["section"],
|
||||
"hasChildren": true,
|
||||
"actions": ["RefreshDatasource"],
|
||||
"settings": [
|
||||
{
|
||||
"type": "dataSource",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/client",
|
||||
"version": "0.9.142",
|
||||
"version": "0.9.143-alpha.0",
|
||||
"license": "MPL-2.0",
|
||||
"module": "dist/budibase-client.js",
|
||||
"main": "dist/budibase-client.js",
|
||||
|
@ -19,9 +19,9 @@
|
|||
"dev:builder": "rollup -cw"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^0.9.142",
|
||||
"@budibase/bbui": "^0.9.143-alpha.0",
|
||||
"@budibase/standard-components": "^0.9.139",
|
||||
"@budibase/string-templates": "^0.9.142",
|
||||
"@budibase/string-templates": "^0.9.143-alpha.0",
|
||||
"regexparam": "^1.3.0",
|
||||
"shortid": "^2.2.15",
|
||||
"svelte-spa-router": "^3.0.5"
|
||||
|
|
|
@ -88,7 +88,7 @@ const validateFormHandler = async (action, context) => {
|
|||
)
|
||||
}
|
||||
|
||||
const refreshDatasourceHandler = async (action, context) => {
|
||||
const refreshDataProviderHandler = async (action, context) => {
|
||||
return await executeActionHandler(
|
||||
context,
|
||||
action.parameters.componentId,
|
||||
|
@ -139,7 +139,7 @@ const handlerMap = {
|
|||
["Execute Query"]: queryExecutionHandler,
|
||||
["Trigger Automation"]: triggerAutomationHandler,
|
||||
["Validate Form"]: validateFormHandler,
|
||||
["Refresh Datasource"]: refreshDatasourceHandler,
|
||||
["Refresh Data Provider"]: refreshDataProviderHandler,
|
||||
["Log Out"]: logoutHandler,
|
||||
["Clear Form"]: clearFormHandler,
|
||||
["Close Screen Modal"]: closeScreenModalHandler,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/server",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "0.9.142",
|
||||
"version": "0.9.143-alpha.0",
|
||||
"description": "Budibase Web Server",
|
||||
"main": "src/index.js",
|
||||
"repository": {
|
||||
|
@ -23,10 +23,9 @@
|
|||
"format": "prettier --config ../../.prettierrc.json 'src/**/*.ts' --write",
|
||||
"lint": "eslint --fix src/",
|
||||
"lint:fix": "yarn run format && yarn run lint",
|
||||
"initialise": "node scripts/initialise.js",
|
||||
"multi:enable": "node scripts/multiTenancy.js enable",
|
||||
"multi:disable": "node scripts/multiTenancy.js disable",
|
||||
"selfhost:enable": "node scripts/selfhost.js enable",
|
||||
"selfhost:disable": "node scripts/selfhost.js disable"
|
||||
"multi:disable": "node scripts/multiTenancy.js disable"
|
||||
},
|
||||
"jest": {
|
||||
"preset": "ts-jest",
|
||||
|
@ -49,8 +48,7 @@
|
|||
"!src/automations/tests/**/*",
|
||||
"!src/utilities/fileProcessor.js",
|
||||
"!src/utilities/fileSystem/**/*",
|
||||
"!src/utilities/redis.js",
|
||||
"!src/api/controllers/row/internalSearch.js"
|
||||
"!src/utilities/redis.js"
|
||||
],
|
||||
"coverageReporters": [
|
||||
"lcov",
|
||||
|
@ -64,9 +62,9 @@
|
|||
"author": "Budibase",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@budibase/auth": "^0.9.142",
|
||||
"@budibase/client": "^0.9.142",
|
||||
"@budibase/string-templates": "^0.9.142",
|
||||
"@budibase/auth": "^0.9.143-alpha.0",
|
||||
"@budibase/client": "^0.9.143-alpha.0",
|
||||
"@budibase/string-templates": "^0.9.143-alpha.0",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
"@koa/router": "8.0.0",
|
||||
"@sendgrid/mail": "7.1.1",
|
||||
|
@ -98,12 +96,13 @@
|
|||
"lodash": "4.17.21",
|
||||
"mongodb": "3.6.3",
|
||||
"mssql": "6.2.3",
|
||||
"mysql": "^2.18.1",
|
||||
"mysql": "2.18.1",
|
||||
"node-fetch": "2.6.0",
|
||||
"open": "7.3.0",
|
||||
"pg": "8.5.1",
|
||||
"pino-pretty": "4.0.0",
|
||||
"pouchdb": "7.2.1",
|
||||
"pouchdb-adapter-memory": "^7.2.1",
|
||||
"pouchdb-all-dbs": "1.0.2",
|
||||
"pouchdb-find": "^7.2.2",
|
||||
"pouchdb-replication-stream": "1.2.9",
|
||||
|
@ -133,7 +132,6 @@
|
|||
"express": "^4.17.1",
|
||||
"jest": "^27.0.5",
|
||||
"nodemon": "^2.0.4",
|
||||
"pouchdb-adapter-memory": "^7.2.1",
|
||||
"prettier": "^2.3.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"supertest": "^4.0.2",
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
version: "3.8"
|
||||
services:
|
||||
db:
|
||||
container_name: postgres-json
|
||||
image: postgres
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_USER: root
|
||||
POSTGRES_PASSWORD: root
|
||||
POSTGRES_DB: main
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
#- pg_data:/var/lib/postgresql/data/
|
||||
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||
|
||||
pgadmin:
|
||||
container_name: pgadmin-json
|
||||
image: dpage/pgadmin4
|
||||
restart: always
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: root@root.com
|
||||
PGADMIN_DEFAULT_PASSWORD: root
|
||||
ports:
|
||||
- "5050:80"
|
||||
|
||||
#volumes:
|
||||
# pg_data:
|
|
@ -0,0 +1,22 @@
|
|||
SELECT 'CREATE DATABASE main'
|
||||
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
|
||||
CREATE TABLE jsonTable (
|
||||
id character varying(32),
|
||||
data jsonb,
|
||||
text text
|
||||
);
|
||||
|
||||
INSERT INTO jsonTable (id, data) VALUES ('1', '{"id": 1, "age": 1, "name": "Mike", "newline": "this is text with a\n newline in it"}');
|
||||
|
||||
CREATE VIEW jsonView AS SELECT
|
||||
x.id,
|
||||
x.age,
|
||||
x.name,
|
||||
x.newline
|
||||
FROM
|
||||
jsonTable c,
|
||||
LATERAL jsonb_to_record(c.data) x (id character varying(32),
|
||||
age BIGINT,
|
||||
name TEXT,
|
||||
newline TEXT
|
||||
);
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
docker-compose down
|
||||
docker volume prune -f
|
|
@ -15,7 +15,7 @@ services:
|
|||
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||
|
||||
pgadmin:
|
||||
container_name: pgadmin
|
||||
container_name: pgadmin-pg
|
||||
image: dpage/pgadmin4
|
||||
restart: always
|
||||
environment:
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
version: "3.8"
|
||||
services:
|
||||
db:
|
||||
container_name: postgres-vehicle
|
||||
image: postgres
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_USER: root
|
||||
POSTGRES_PASSWORD: root
|
||||
POSTGRES_DB: main
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
#- pg_data:/var/lib/postgresql/data/
|
||||
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||
|
||||
pgadmin:
|
||||
container_name: pgadmin
|
||||
image: dpage/pgadmin4
|
||||
restart: always
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: root@root.com
|
||||
PGADMIN_DEFAULT_PASSWORD: root
|
||||
ports:
|
||||
- "5050:80"
|
||||
|
||||
#volumes:
|
||||
# pg_data:
|
|
@ -0,0 +1,52 @@
|
|||
SELECT 'CREATE DATABASE main'
|
||||
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
|
||||
CREATE TABLE Vehicles (
|
||||
id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ),
|
||||
Registration text COLLATE pg_catalog."default",
|
||||
Make text COLLATE pg_catalog."default",
|
||||
Model text COLLATE pg_catalog."default",
|
||||
Colour text COLLATE pg_catalog."default",
|
||||
Year smallint,
|
||||
CONSTRAINT Vehicles_pkey PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
CREATE TABLE ServiceLog (
|
||||
id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ),
|
||||
Description text COLLATE pg_catalog."default",
|
||||
VehicleId bigint,
|
||||
ServiceDate timestamp without time zone,
|
||||
Category text COLLATE pg_catalog."default",
|
||||
Mileage bigint,
|
||||
CONSTRAINT ServiceLog_pkey PRIMARY KEY (id),
|
||||
CONSTRAINT vehicle_foreign_key FOREIGN KEY (VehicleId)
|
||||
REFERENCES Vehicles (id) MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE NO ACTION
|
||||
);
|
||||
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('FAZ 9837','Volkswagen','Polo','White',2002);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('JHI 8827','BMW','M3','Black',2013);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('D903PI','Volvo','XC40','Grey',2014);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('YFI002','Volkswagen','Golf','Dark Blue',2018);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('HGT5677','Skoda','Octavia','Graphite',2009);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('PPF9276','Skoda','Octavia','Graphite',2021);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('J893FT','Toyota','Corolla','Red',2015);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('MJK776','Honda','HR-V','Silver',2015);
|
||||
|
||||
|
||||
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
|
||||
VALUES ('Change front brakes', 1, '2021-05-04', 'Brakes', 20667);
|
||||
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
|
||||
VALUES ('Tyres - full set', 1, '2021-05-04', 'Brakes', 20667);
|
||||
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
|
||||
VALUES ('Engine tune up', 2, '2021-07-14', 'Brakes', 50889);
|
||||
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
|
||||
VALUES ('Replace transmission', 3, '2021-09-26', 'Transmission', 98002);
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
docker-compose down
|
||||
docker volume prune -f
|
|
@ -2,6 +2,6 @@ const env = require("../../environment")
|
|||
|
||||
exports.isEnabled = async function (ctx) {
|
||||
ctx.body = {
|
||||
enabled: env.ENABLE_ANALYTICS === "true",
|
||||
enabled: !env.SELF_HOSTED && env.ENABLE_ANALYTICS === "true",
|
||||
}
|
||||
}
|
||||
|
|
|
@ -230,7 +230,12 @@ exports.create = async function (ctx) {
|
|||
const response = await db.put(newApplication, { force: true })
|
||||
newApplication._rev = response.rev
|
||||
|
||||
await createEmptyAppPackage(ctx, newApplication)
|
||||
// Only create the default home screens and layout if we aren't importing
|
||||
// an app
|
||||
if (useTemplate !== "true") {
|
||||
await createEmptyAppPackage(ctx, newApplication)
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
if (!env.isTest()) {
|
||||
await createApp(appId)
|
||||
|
|
|
@ -51,7 +51,7 @@ exports.buildSchemaFromDb = async function (ctx) {
|
|||
await connector.buildSchema(datasource._id, datasource.entities)
|
||||
datasource.entities = connector.tables
|
||||
|
||||
const response = await db.post(datasource)
|
||||
const response = await db.put(datasource)
|
||||
datasource._rev = response.rev
|
||||
|
||||
ctx.body = datasource
|
||||
|
@ -89,7 +89,7 @@ exports.save = async function (ctx) {
|
|||
...ctx.request.body,
|
||||
}
|
||||
|
||||
const response = await db.post(datasource)
|
||||
const response = await db.put(datasource)
|
||||
datasource._rev = response.rev
|
||||
|
||||
// Drain connection pools when configuration is changed
|
||||
|
|
|
@ -1,9 +1,4 @@
|
|||
const {
|
||||
getBuiltinPermissions,
|
||||
PermissionLevels,
|
||||
isPermissionLevelHigherThanRead,
|
||||
higherPermission,
|
||||
} = require("@budibase/auth/permissions")
|
||||
const { getBuiltinPermissions } = require("@budibase/auth/permissions")
|
||||
const {
|
||||
isBuiltin,
|
||||
getDBRoleID,
|
||||
|
@ -16,6 +11,7 @@ const {
|
|||
CURRENTLY_SUPPORTED_LEVELS,
|
||||
getBasePermissions,
|
||||
} = require("../../utilities/security")
|
||||
const { removeFromArray } = require("../../utilities")
|
||||
|
||||
const PermissionUpdateType = {
|
||||
REMOVE: "remove",
|
||||
|
@ -24,22 +20,6 @@ const PermissionUpdateType = {
|
|||
|
||||
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
|
||||
|
||||
// quick function to perform a bit of weird logic, make sure fetch calls
|
||||
// always say a write role also has read permission
|
||||
function fetchLevelPerms(permissions, level, roleId) {
|
||||
if (!permissions) {
|
||||
permissions = {}
|
||||
}
|
||||
permissions[level] = roleId
|
||||
if (
|
||||
isPermissionLevelHigherThanRead(level) &&
|
||||
!permissions[PermissionLevels.READ]
|
||||
) {
|
||||
permissions[PermissionLevels.READ] = roleId
|
||||
}
|
||||
return permissions
|
||||
}
|
||||
|
||||
// utility function to stop this repetition - permissions always stored under roles
|
||||
async function getAllDBRoles(db) {
|
||||
const body = await db.allDocs(
|
||||
|
@ -74,23 +54,31 @@ async function updatePermissionOnRole(
|
|||
for (let role of dbRoles) {
|
||||
let updated = false
|
||||
const rolePermissions = role.permissions ? role.permissions : {}
|
||||
// make sure its an array, also handle migrating
|
||||
if (
|
||||
!rolePermissions[resourceId] ||
|
||||
!Array.isArray(rolePermissions[resourceId])
|
||||
) {
|
||||
rolePermissions[resourceId] =
|
||||
typeof rolePermissions[resourceId] === "string"
|
||||
? [rolePermissions[resourceId]]
|
||||
: []
|
||||
}
|
||||
// handle the removal/updating the role which has this permission first
|
||||
// the updating (role._id !== dbRoleId) is required because a resource/level can
|
||||
// only be permitted in a single role (this reduces hierarchy confusion and simplifies
|
||||
// the general UI for this, rather than needing to show everywhere it is used)
|
||||
if (
|
||||
(role._id !== dbRoleId || remove) &&
|
||||
rolePermissions[resourceId] === level
|
||||
rolePermissions[resourceId].indexOf(level) !== -1
|
||||
) {
|
||||
delete rolePermissions[resourceId]
|
||||
removeFromArray(rolePermissions[resourceId], level)
|
||||
updated = true
|
||||
}
|
||||
// handle the adding, we're on the correct role, at it to this
|
||||
if (!remove && role._id === dbRoleId) {
|
||||
rolePermissions[resourceId] = higherPermission(
|
||||
rolePermissions[resourceId],
|
||||
level
|
||||
)
|
||||
const set = new Set(rolePermissions[resourceId])
|
||||
rolePermissions[resourceId] = [...set.add(level)]
|
||||
updated = true
|
||||
}
|
||||
// handle the update, add it to bulk docs to perform at end
|
||||
|
@ -127,12 +115,11 @@ exports.fetch = async function (ctx) {
|
|||
continue
|
||||
}
|
||||
const roleId = getExternalRoleID(role._id)
|
||||
for (let [resource, level] of Object.entries(role.permissions)) {
|
||||
permissions[resource] = fetchLevelPerms(
|
||||
permissions[resource],
|
||||
level,
|
||||
roleId
|
||||
)
|
||||
for (let [resource, levelArr] of Object.entries(role.permissions)) {
|
||||
const levels = Array.isArray(levelArr) ? [levelArr] : levelArr
|
||||
const perms = {}
|
||||
levels.forEach(level => (perms[level] = roleId))
|
||||
permissions[resource] = perms
|
||||
}
|
||||
}
|
||||
// apply the base permissions
|
||||
|
@ -157,12 +144,13 @@ exports.getResourcePerms = async function (ctx) {
|
|||
for (let level of SUPPORTED_LEVELS) {
|
||||
// update the various roleIds in the resource permissions
|
||||
for (let role of roles) {
|
||||
if (role.permissions && role.permissions[resourceId] === level) {
|
||||
permissions = fetchLevelPerms(
|
||||
permissions,
|
||||
level,
|
||||
getExternalRoleID(role._id)
|
||||
)
|
||||
const rolePerms = role.permissions
|
||||
if (
|
||||
rolePerms &&
|
||||
(rolePerms[resourceId] === level ||
|
||||
rolePerms[resourceId].indexOf(level) !== -1)
|
||||
) {
|
||||
permissions[level] = getExternalRoleID(role._id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -437,7 +437,11 @@ module External {
|
|||
for (let [colName, { isMany, rows, tableId }] of Object.entries(
|
||||
related
|
||||
)) {
|
||||
const table = this.getTable(tableId)
|
||||
const table: Table = this.getTable(tableId)
|
||||
// if its not the foreign key skip it, nothing to do
|
||||
if (table.primary && table.primary.indexOf(colName) !== -1) {
|
||||
continue
|
||||
}
|
||||
for (let row of rows) {
|
||||
const filters = buildFilters(generateIdForRow(row, table), {}, table)
|
||||
// safety check, if there are no filters on deletion bad things happen
|
||||
|
@ -540,6 +544,9 @@ module External {
|
|||
extra: {
|
||||
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
|
||||
},
|
||||
meta: {
|
||||
table,
|
||||
},
|
||||
}
|
||||
// can't really use response right now
|
||||
const response = await makeExternalQuery(appId, json)
|
||||
|
|
|
@ -5,17 +5,22 @@ const {
|
|||
generateRowID,
|
||||
DocumentTypes,
|
||||
InternalTables,
|
||||
generateMemoryViewID,
|
||||
} = require("../../../db/utils")
|
||||
const userController = require("../user")
|
||||
const {
|
||||
inputProcessing,
|
||||
outputProcessing,
|
||||
processAutoColumn,
|
||||
} = require("../../../utilities/rowProcessor")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { isEqual } = require("lodash")
|
||||
const { validate, findRow } = require("./utils")
|
||||
const { fullSearch, paginatedSearch } = require("./internalSearch")
|
||||
const { getGlobalUsersFromMetadata } = require("../../../utilities/global")
|
||||
const inMemoryViews = require("../../../db/inMemoryView")
|
||||
const env = require("../../../environment")
|
||||
const { migrateToInMemoryView } = require("../view/utils")
|
||||
|
||||
const CALCULATION_TYPES = {
|
||||
SUM: "sum",
|
||||
|
@ -25,17 +30,84 @@ const CALCULATION_TYPES = {
|
|||
|
||||
async function storeResponse(ctx, db, row, oldTable, table) {
|
||||
row.type = "row"
|
||||
const response = await db.put(row)
|
||||
// don't worry about rev, tables handle rev/lastID updates
|
||||
// if another row has been written since processing this will
|
||||
// handle the auto ID clash
|
||||
if (!isEqual(oldTable, table)) {
|
||||
await db.put(table)
|
||||
try {
|
||||
await db.put(table)
|
||||
} catch (err) {
|
||||
if (err.status === 409) {
|
||||
const updatedTable = await db.get(table._id)
|
||||
let response = processAutoColumn(null, updatedTable, row, {
|
||||
reprocessing: true,
|
||||
})
|
||||
await db.put(response.table)
|
||||
row = response.row
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
const response = await db.put(row)
|
||||
row._rev = response.rev
|
||||
// process the row before return, to include relationships
|
||||
row = await outputProcessing(ctx, table, row, { squash: false })
|
||||
return { row, table }
|
||||
}
|
||||
|
||||
// doesn't do the outputProcessing
|
||||
async function getRawTableData(ctx, db, tableId) {
|
||||
let rows
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
await userController.fetchMetadata(ctx)
|
||||
rows = ctx.body
|
||||
} else {
|
||||
const response = await db.allDocs(
|
||||
getRowParams(tableId, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
rows = response.rows.map(row => row.doc)
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
async function getView(db, viewName) {
|
||||
let viewInfo
|
||||
async function getFromDesignDoc() {
|
||||
const designDoc = await db.get("_design/database")
|
||||
viewInfo = designDoc.views[viewName]
|
||||
return viewInfo
|
||||
}
|
||||
let migrate = false
|
||||
if (env.SELF_HOSTED) {
|
||||
viewInfo = await getFromDesignDoc()
|
||||
} else {
|
||||
try {
|
||||
viewInfo = await db.get(generateMemoryViewID(viewName))
|
||||
if (viewInfo) {
|
||||
viewInfo = viewInfo.view
|
||||
}
|
||||
} catch (err) {
|
||||
// check if it can be retrieved from design doc (needs migrated)
|
||||
if (err.status !== 404) {
|
||||
viewInfo = null
|
||||
} else {
|
||||
viewInfo = await getFromDesignDoc()
|
||||
migrate = !!viewInfo
|
||||
}
|
||||
}
|
||||
}
|
||||
if (migrate) {
|
||||
await migrateToInMemoryView(db, viewName)
|
||||
}
|
||||
if (!viewInfo) {
|
||||
throw "View does not exist."
|
||||
}
|
||||
return viewInfo
|
||||
}
|
||||
|
||||
exports.patch = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
|
@ -139,15 +211,18 @@ exports.fetchView = async ctx => {
|
|||
|
||||
const db = new CouchDB(appId)
|
||||
const { calculation, group, field } = ctx.query
|
||||
const designDoc = await db.get("_design/database")
|
||||
const viewInfo = designDoc.views[viewName]
|
||||
if (!viewInfo) {
|
||||
throw "View does not exist."
|
||||
const viewInfo = await getView(db, viewName)
|
||||
let response
|
||||
if (env.SELF_HOSTED) {
|
||||
response = await db.query(`database/${viewName}`, {
|
||||
include_docs: !calculation,
|
||||
group: !!group,
|
||||
})
|
||||
} else {
|
||||
const tableId = viewInfo.meta.tableId
|
||||
const data = await getRawTableData(ctx, db, tableId)
|
||||
response = await inMemoryViews.runView(viewInfo, calculation, group, data)
|
||||
}
|
||||
const response = await db.query(`database/${viewName}`, {
|
||||
include_docs: !calculation,
|
||||
group: !!group,
|
||||
})
|
||||
|
||||
let rows
|
||||
if (!calculation) {
|
||||
|
@ -191,19 +266,9 @@ exports.fetch = async ctx => {
|
|||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
|
||||
let rows,
|
||||
table = await db.get(ctx.params.tableId)
|
||||
if (ctx.params.tableId === InternalTables.USER_METADATA) {
|
||||
await userController.fetchMetadata(ctx)
|
||||
rows = ctx.body
|
||||
} else {
|
||||
const response = await db.allDocs(
|
||||
getRowParams(ctx.params.tableId, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
rows = response.rows.map(row => row.doc)
|
||||
}
|
||||
const tableId = ctx.params.tableId
|
||||
let table = await db.get(tableId)
|
||||
let rows = await getRawTableData(ctx, db, tableId)
|
||||
return outputProcessing(ctx, table, rows)
|
||||
}
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ const { InternalTables } = require("../../../db/utils")
|
|||
const userController = require("../user")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { integrations } = require("../../../integrations")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
|
||||
validateJs.extend(validateJs.validators.datetime, {
|
||||
parse: function (value) {
|
||||
|
@ -73,6 +74,11 @@ exports.validate = async ({ appId, tableId, row, table }) => {
|
|||
errors[fieldName] = "Field not in list"
|
||||
}
|
||||
})
|
||||
} else if (table.schema[fieldName].type === FieldTypes.FORMULA) {
|
||||
res = validateJs.single(
|
||||
processStringSync(table.schema[fieldName].formula, row),
|
||||
constraints
|
||||
)
|
||||
} else {
|
||||
res = validateJs.single(row[fieldName], constraints)
|
||||
}
|
||||
|
|
|
@ -145,7 +145,7 @@ exports.save = async function (ctx) {
|
|||
if (updatedRows && updatedRows.length !== 0) {
|
||||
await db.bulkDocs(updatedRows)
|
||||
}
|
||||
const result = await db.post(tableToSave)
|
||||
const result = await db.put(tableToSave)
|
||||
tableToSave._rev = result.rev
|
||||
|
||||
tableToSave = await tableSaveFunctions.after(tableToSave)
|
||||
|
|
|
@ -68,23 +68,17 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
|
|||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await csvParser.transform(dataImport)
|
||||
|
||||
let finalData = []
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let row = data[i]
|
||||
row._id = generateRowID(table._id)
|
||||
row.tableId = table._id
|
||||
const processed = inputProcessing(user, table, row)
|
||||
const processed = inputProcessing(user, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
table = processed.table
|
||||
row = processed.row
|
||||
|
||||
// make sure link rows are up to date
|
||||
row = await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.ROW_SAVE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
table,
|
||||
})
|
||||
|
||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||
// check whether the options need to be updated for inclusion as part of the data import
|
||||
if (
|
||||
|
@ -98,10 +92,20 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
|
|||
]
|
||||
}
|
||||
}
|
||||
data[i] = row
|
||||
|
||||
// make sure link rows are up to date
|
||||
finalData.push(
|
||||
linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.ROW_SAVE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
table,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
await db.bulkDocs(data)
|
||||
await db.bulkDocs(await Promise.all(finalData))
|
||||
let response = await db.put(table)
|
||||
table._rev = response._rev
|
||||
}
|
||||
|
|
|
@ -2,127 +2,93 @@ const CouchDB = require("../../../db")
|
|||
const viewTemplate = require("./viewBuilder")
|
||||
const { apiFileReturn } = require("../../../utilities/fileSystem")
|
||||
const exporters = require("./exporters")
|
||||
const { saveView, getView, getViews, deleteView } = require("./utils")
|
||||
const { fetchView } = require("../row")
|
||||
const { ViewNames } = require("../../../db/utils")
|
||||
|
||||
const controller = {
|
||||
fetch: async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const designDoc = await db.get("_design/database")
|
||||
const response = []
|
||||
|
||||
for (let name of Object.keys(designDoc.views)) {
|
||||
// Only return custom views, not built ins
|
||||
if (Object.values(ViewNames).indexOf(name) !== -1) {
|
||||
continue
|
||||
}
|
||||
response.push({
|
||||
name,
|
||||
...designDoc.views[name],
|
||||
})
|
||||
}
|
||||
|
||||
ctx.body = response
|
||||
},
|
||||
save: async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const { originalName, ...viewToSave } = ctx.request.body
|
||||
const designDoc = await db.get("_design/database")
|
||||
const view = viewTemplate(viewToSave)
|
||||
|
||||
if (!viewToSave.name) {
|
||||
ctx.throw(400, "Cannot create view without a name")
|
||||
}
|
||||
|
||||
designDoc.views = {
|
||||
...designDoc.views,
|
||||
[viewToSave.name]: view,
|
||||
}
|
||||
|
||||
// view has been renamed
|
||||
if (originalName) {
|
||||
delete designDoc.views[originalName]
|
||||
}
|
||||
|
||||
await db.put(designDoc)
|
||||
|
||||
// add views to table document
|
||||
const table = await db.get(ctx.request.body.tableId)
|
||||
if (!table.views) table.views = {}
|
||||
if (!view.meta.schema) {
|
||||
view.meta.schema = table.schema
|
||||
}
|
||||
table.views[viewToSave.name] = view.meta
|
||||
|
||||
if (originalName) {
|
||||
delete table.views[originalName]
|
||||
}
|
||||
|
||||
await db.put(table)
|
||||
|
||||
ctx.body = {
|
||||
...table.views[viewToSave.name],
|
||||
name: viewToSave.name,
|
||||
}
|
||||
},
|
||||
destroy: async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const designDoc = await db.get("_design/database")
|
||||
const viewName = decodeURI(ctx.params.viewName)
|
||||
const view = designDoc.views[viewName]
|
||||
delete designDoc.views[viewName]
|
||||
|
||||
await db.put(designDoc)
|
||||
|
||||
const table = await db.get(view.meta.tableId)
|
||||
delete table.views[viewName]
|
||||
await db.put(table)
|
||||
|
||||
ctx.body = view
|
||||
},
|
||||
exportView: async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const designDoc = await db.get("_design/database")
|
||||
const viewName = decodeURI(ctx.query.view)
|
||||
|
||||
const view = designDoc.views[viewName]
|
||||
const format = ctx.query.format
|
||||
if (!format) {
|
||||
ctx.throw(400, "Format must be specified, either csv or json")
|
||||
}
|
||||
|
||||
if (view) {
|
||||
ctx.params.viewName = viewName
|
||||
// Fetch view rows
|
||||
ctx.query = {
|
||||
group: view.meta.groupBy,
|
||||
calculation: view.meta.calculation,
|
||||
stats: !!view.meta.field,
|
||||
field: view.meta.field,
|
||||
}
|
||||
} else {
|
||||
// table all_ view
|
||||
/* istanbul ignore next */
|
||||
ctx.params.viewName = viewName
|
||||
}
|
||||
|
||||
await fetchView(ctx)
|
||||
|
||||
let schema = view && view.meta && view.meta.schema
|
||||
if (!schema) {
|
||||
const tableId = ctx.params.tableId || view.meta.tableId
|
||||
const table = await db.get(tableId)
|
||||
schema = table.schema
|
||||
}
|
||||
|
||||
// Export part
|
||||
let headers = Object.keys(schema)
|
||||
const exporter = exporters[format]
|
||||
const filename = `${viewName}.${format}`
|
||||
// send down the file
|
||||
ctx.attachment(filename)
|
||||
ctx.body = apiFileReturn(exporter(headers, ctx.body))
|
||||
},
|
||||
exports.fetch = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
ctx.body = await getViews(db)
|
||||
}
|
||||
|
||||
module.exports = controller
|
||||
exports.save = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const { originalName, ...viewToSave } = ctx.request.body
|
||||
const view = viewTemplate(viewToSave)
|
||||
|
||||
if (!viewToSave.name) {
|
||||
ctx.throw(400, "Cannot create view without a name")
|
||||
}
|
||||
|
||||
await saveView(db, originalName, viewToSave.name, view)
|
||||
|
||||
// add views to table document
|
||||
const table = await db.get(ctx.request.body.tableId)
|
||||
if (!table.views) table.views = {}
|
||||
if (!view.meta.schema) {
|
||||
view.meta.schema = table.schema
|
||||
}
|
||||
table.views[viewToSave.name] = view.meta
|
||||
if (originalName) {
|
||||
delete table.views[originalName]
|
||||
}
|
||||
await db.put(table)
|
||||
|
||||
ctx.body = {
|
||||
...table.views[viewToSave.name],
|
||||
name: viewToSave.name,
|
||||
}
|
||||
}
|
||||
|
||||
exports.destroy = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const viewName = decodeURI(ctx.params.viewName)
|
||||
const view = await deleteView(db, viewName)
|
||||
const table = await db.get(view.meta.tableId)
|
||||
delete table.views[viewName]
|
||||
await db.put(table)
|
||||
|
||||
ctx.body = view
|
||||
}
|
||||
|
||||
exports.exportView = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const viewName = decodeURI(ctx.query.view)
|
||||
const view = await getView(db, viewName)
|
||||
|
||||
const format = ctx.query.format
|
||||
if (!format) {
|
||||
ctx.throw(400, "Format must be specified, either csv or json")
|
||||
}
|
||||
|
||||
if (view) {
|
||||
ctx.params.viewName = viewName
|
||||
// Fetch view rows
|
||||
ctx.query = {
|
||||
group: view.meta.groupBy,
|
||||
calculation: view.meta.calculation,
|
||||
stats: !!view.meta.field,
|
||||
field: view.meta.field,
|
||||
}
|
||||
} else {
|
||||
// table all_ view
|
||||
/* istanbul ignore next */
|
||||
ctx.params.viewName = viewName
|
||||
}
|
||||
|
||||
await fetchView(ctx)
|
||||
|
||||
let schema = view && view.meta && view.meta.schema
|
||||
if (!schema) {
|
||||
const tableId = ctx.params.tableId || view.meta.tableId
|
||||
const table = await db.get(tableId)
|
||||
schema = table.schema
|
||||
}
|
||||
|
||||
// Export part
|
||||
let headers = Object.keys(schema)
|
||||
const exporter = exporters[format]
|
||||
const filename = `${viewName}.${format}`
|
||||
// send down the file
|
||||
ctx.attachment(filename)
|
||||
ctx.body = apiFileReturn(exporter(headers, ctx.body))
|
||||
}
|
||||
|
|
|
@ -0,0 +1,109 @@
|
|||
const {
|
||||
ViewNames,
|
||||
generateMemoryViewID,
|
||||
getMemoryViewParams,
|
||||
} = require("../../../db/utils")
|
||||
const env = require("../../../environment")
|
||||
|
||||
exports.getView = async (db, viewName) => {
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
return designDoc.views[viewName]
|
||||
} else {
|
||||
const viewDoc = await db.get(generateMemoryViewID(viewName))
|
||||
return viewDoc.view
|
||||
}
|
||||
}
|
||||
|
||||
exports.getViews = async db => {
|
||||
const response = []
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
for (let name of Object.keys(designDoc.views)) {
|
||||
// Only return custom views, not built ins
|
||||
if (Object.values(ViewNames).indexOf(name) !== -1) {
|
||||
continue
|
||||
}
|
||||
response.push({
|
||||
name,
|
||||
...designDoc.views[name],
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const views = (
|
||||
await db.allDocs(
|
||||
getMemoryViewParams({
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
).rows.map(row => row.doc)
|
||||
for (let viewDoc of views) {
|
||||
response.push({
|
||||
name: viewDoc.name,
|
||||
...viewDoc.view,
|
||||
})
|
||||
}
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
exports.saveView = async (db, originalName, viewName, viewTemplate) => {
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
designDoc.views = {
|
||||
...designDoc.views,
|
||||
[viewName]: viewTemplate,
|
||||
}
|
||||
// view has been renamed
|
||||
if (originalName) {
|
||||
delete designDoc.views[originalName]
|
||||
}
|
||||
await db.put(designDoc)
|
||||
} else {
|
||||
const id = generateMemoryViewID(viewName)
|
||||
const originalId = originalName ? generateMemoryViewID(originalName) : null
|
||||
const viewDoc = {
|
||||
_id: id,
|
||||
view: viewTemplate,
|
||||
name: viewName,
|
||||
tableId: viewTemplate.meta.tableId,
|
||||
}
|
||||
try {
|
||||
const old = await db.get(id)
|
||||
if (originalId) {
|
||||
const originalDoc = await db.get(originalId)
|
||||
await db.remove(originalDoc._id, originalDoc._rev)
|
||||
}
|
||||
if (old && old._rev) {
|
||||
viewDoc._rev = old._rev
|
||||
}
|
||||
} catch (err) {
|
||||
// didn't exist, just skip
|
||||
}
|
||||
await db.put(viewDoc)
|
||||
}
|
||||
}
|
||||
|
||||
exports.deleteView = async (db, viewName) => {
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
const view = designDoc.views[viewName]
|
||||
delete designDoc.views[viewName]
|
||||
await db.put(designDoc)
|
||||
return view
|
||||
} else {
|
||||
const id = generateMemoryViewID(viewName)
|
||||
const viewDoc = await db.get(id)
|
||||
await db.remove(viewDoc._id, viewDoc._rev)
|
||||
return viewDoc.view
|
||||
}
|
||||
}
|
||||
|
||||
exports.migrateToInMemoryView = async (db, viewName) => {
|
||||
// delete the view initially
|
||||
const designDoc = await db.get("_design/database")
|
||||
const view = designDoc.views[viewName]
|
||||
delete designDoc.views[viewName]
|
||||
await db.put(designDoc)
|
||||
await exports.saveView(db, null, viewName, view)
|
||||
}
|
|
@ -2,11 +2,12 @@ const Router = require("@koa/router")
|
|||
const controller = require("../controllers/application")
|
||||
const authorized = require("../../middleware/authorized")
|
||||
const { BUILDER } = require("@budibase/auth/permissions")
|
||||
const usage = require("../../middleware/usageQuota")
|
||||
|
||||
const router = Router()
|
||||
|
||||
router
|
||||
.post("/api/applications", authorized(BUILDER), controller.create)
|
||||
.post("/api/applications", authorized(BUILDER), usage, controller.create)
|
||||
.get("/api/applications/:appId/definition", controller.fetchAppDefinition)
|
||||
.get("/api/applications", controller.fetch)
|
||||
.get("/api/applications/:appId/appPackage", controller.fetchAppPackage)
|
||||
|
@ -21,6 +22,11 @@ router
|
|||
authorized(BUILDER),
|
||||
controller.revertClient
|
||||
)
|
||||
.delete("/api/applications/:appId", authorized(BUILDER), controller.delete)
|
||||
.delete(
|
||||
"/api/applications/:appId",
|
||||
authorized(BUILDER),
|
||||
usage,
|
||||
controller.delete
|
||||
)
|
||||
|
||||
module.exports = router
|
||||
|
|
|
@ -94,7 +94,8 @@ describe("/datasources", () => {
|
|||
.expect(200)
|
||||
// this is mock data, can't test it
|
||||
expect(res.body).toBeDefined()
|
||||
expect(pg.queryMock).toHaveBeenCalledWith(`select "users"."name" as "users.name", "users"."age" as "users.age" from "users" where "users"."name" ilike $1 limit $2`, ["John%", 5000])
|
||||
const expSql = `select "users"."name" as "users.name", "users"."age" as "users.age" from (select * from "users" where "users"."name" ilike $1 limit $2) as "users"`
|
||||
expect(pg.queryMock).toHaveBeenCalledWith(expSql, ["John%", 5000])
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ describe("/roles", () => {
|
|||
.expect(200)
|
||||
expect(res.body.length).toBeGreaterThan(0)
|
||||
const power = res.body.find(role => role._id === BUILTIN_ROLE_IDS.POWER)
|
||||
expect(power.permissions[table._id]).toEqual("read")
|
||||
expect(power.permissions[table._id]).toEqual(["read"])
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -205,7 +205,7 @@ describe("/views", () => {
|
|||
})
|
||||
|
||||
describe("exportView", () => {
|
||||
it("should be able to delete a view", async () => {
|
||||
it("should be able to export a view", async () => {
|
||||
await config.createTable(priceTable())
|
||||
await config.createRow()
|
||||
const view = await config.createView()
|
||||
|
|
|
@ -5,7 +5,6 @@ const {
|
|||
PermissionLevels,
|
||||
PermissionTypes,
|
||||
} = require("@budibase/auth/permissions")
|
||||
const usage = require("../../middleware/usageQuota")
|
||||
|
||||
const router = Router()
|
||||
|
||||
|
@ -28,13 +27,11 @@ router
|
|||
.post(
|
||||
"/api/users/metadata/self",
|
||||
authorized(PermissionTypes.USER, PermissionLevels.WRITE),
|
||||
usage,
|
||||
controller.updateSelfMetadata
|
||||
)
|
||||
.delete(
|
||||
"/api/users/metadata/:id",
|
||||
authorized(PermissionTypes.USER, PermissionLevels.WRITE),
|
||||
usage,
|
||||
controller.destroyMetadata
|
||||
)
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ const {
|
|||
PermissionTypes,
|
||||
PermissionLevels,
|
||||
} = require("@budibase/auth/permissions")
|
||||
const usage = require("../../middleware/usageQuota")
|
||||
|
||||
const router = Router()
|
||||
|
||||
|
@ -25,9 +24,8 @@ router
|
|||
"/api/views/:viewName",
|
||||
paramResource("viewName"),
|
||||
authorized(BUILDER),
|
||||
usage,
|
||||
viewController.destroy
|
||||
)
|
||||
.post("/api/views", authorized(BUILDER), usage, viewController.save)
|
||||
.post("/api/views", authorized(BUILDER), viewController.save)
|
||||
|
||||
module.exports = router
|
||||
|
|
|
@ -2,6 +2,7 @@ const rowController = require("../../api/controllers/row")
|
|||
const automationUtils = require("../automationUtils")
|
||||
const env = require("../../environment")
|
||||
const usage = require("../../utilities/usageQuota")
|
||||
const { buildCtx } = require("./utils")
|
||||
|
||||
exports.definition = {
|
||||
name: "Create Row",
|
||||
|
@ -59,7 +60,7 @@ exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
||||
exports.run = async function ({ inputs, appId, emitter }) {
|
||||
if (inputs.row == null || inputs.row.tableId == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
@ -69,16 +70,12 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
|||
}
|
||||
}
|
||||
// have to clean up the row, remove the table from it
|
||||
const ctx = {
|
||||
const ctx = buildCtx(appId, emitter, {
|
||||
body: inputs.row,
|
||||
params: {
|
||||
tableId: inputs.row.tableId,
|
||||
},
|
||||
request: {
|
||||
body: inputs.row,
|
||||
},
|
||||
appId,
|
||||
eventEmitter: emitter,
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
inputs.row = await automationUtils.cleanUpRow(
|
||||
|
@ -86,8 +83,8 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
|||
inputs.row.tableId,
|
||||
inputs.row
|
||||
)
|
||||
if (env.isProd()) {
|
||||
await usage.update(apiKey, usage.Properties.ROW, 1)
|
||||
if (env.USE_QUOTAS) {
|
||||
await usage.update(usage.Properties.ROW, 1)
|
||||
}
|
||||
await rowController.save(ctx)
|
||||
return {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const rowController = require("../../api/controllers/row")
|
||||
const env = require("../../environment")
|
||||
const usage = require("../../utilities/usageQuota")
|
||||
const { buildCtx } = require("./utils")
|
||||
|
||||
exports.definition = {
|
||||
description: "Delete a row from your database",
|
||||
|
@ -51,7 +52,7 @@ exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
||||
exports.run = async function ({ inputs, appId, emitter }) {
|
||||
if (inputs.id == null || inputs.revision == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
@ -60,23 +61,20 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
|||
},
|
||||
}
|
||||
}
|
||||
let ctx = {
|
||||
|
||||
let ctx = buildCtx(appId, emitter, {
|
||||
body: {
|
||||
_id: inputs.id,
|
||||
_rev: inputs.revision,
|
||||
},
|
||||
params: {
|
||||
tableId: inputs.tableId,
|
||||
},
|
||||
request: {
|
||||
body: {
|
||||
_id: inputs.id,
|
||||
_rev: inputs.revision,
|
||||
},
|
||||
},
|
||||
appId,
|
||||
eventEmitter: emitter,
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
if (env.isProd()) {
|
||||
await usage.update(apiKey, usage.Properties.ROW, -1)
|
||||
await usage.update(usage.Properties.ROW, -1)
|
||||
}
|
||||
await rowController.destroy(ctx)
|
||||
return {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const rowController = require("../../api/controllers/row")
|
||||
const tableController = require("../../api/controllers/table")
|
||||
const { FieldTypes } = require("../../constants")
|
||||
const { buildCtx } = require("./utils")
|
||||
|
||||
const SortOrders = {
|
||||
ASCENDING: "ascending",
|
||||
|
@ -70,12 +71,11 @@ exports.definition = {
|
|||
}
|
||||
|
||||
async function getTable(appId, tableId) {
|
||||
const ctx = {
|
||||
const ctx = buildCtx(appId, null, {
|
||||
params: {
|
||||
id: tableId,
|
||||
},
|
||||
appId,
|
||||
}
|
||||
})
|
||||
await tableController.find(ctx)
|
||||
return ctx.body
|
||||
}
|
||||
|
@ -89,21 +89,18 @@ exports.run = async function ({ inputs, appId }) {
|
|||
sortType =
|
||||
fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING
|
||||
}
|
||||
const ctx = {
|
||||
const ctx = buildCtx(appId, null, {
|
||||
params: {
|
||||
tableId,
|
||||
},
|
||||
request: {
|
||||
body: {
|
||||
sortOrder,
|
||||
sortType,
|
||||
sort: sortColumn,
|
||||
query: filters || {},
|
||||
limit,
|
||||
},
|
||||
body: {
|
||||
sortOrder,
|
||||
sortType,
|
||||
sort: sortColumn,
|
||||
query: filters || {},
|
||||
limit,
|
||||
},
|
||||
appId,
|
||||
}
|
||||
})
|
||||
try {
|
||||
await rowController.search(ctx)
|
||||
return {
|
||||
|
|
|
@ -53,7 +53,7 @@ exports.run = async function ({ inputs }) {
|
|||
contents = "<h1>No content</h1>"
|
||||
}
|
||||
try {
|
||||
let response = await sendSmtpEmail(to, from, subject, contents)
|
||||
let response = await sendSmtpEmail(to, from, subject, contents, true)
|
||||
return {
|
||||
success: true,
|
||||
response,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
const rowController = require("../../api/controllers/row")
|
||||
const automationUtils = require("../automationUtils")
|
||||
const { buildCtx } = require("./utils")
|
||||
|
||||
exports.definition = {
|
||||
name: "Update Row",
|
||||
|
@ -72,19 +73,15 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
|||
}
|
||||
|
||||
// have to clean up the row, remove the table from it
|
||||
const ctx = {
|
||||
const ctx = buildCtx(appId, emitter, {
|
||||
body: {
|
||||
...inputs.row,
|
||||
_id: inputs.rowId,
|
||||
},
|
||||
params: {
|
||||
rowId: inputs.rowId,
|
||||
},
|
||||
request: {
|
||||
body: {
|
||||
...inputs.row,
|
||||
_id: inputs.rowId,
|
||||
},
|
||||
},
|
||||
appId,
|
||||
eventEmitter: emitter,
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
inputs.row = await automationUtils.cleanUpRowById(
|
||||
|
|
|
@ -13,8 +13,6 @@ const { makePartial } = require("../../tests/utilities")
|
|||
const { cleanInputValues } = require("../automationUtils")
|
||||
const setup = require("./utilities")
|
||||
|
||||
usageQuota.getAPIKey.mockReturnValue({ apiKey: "test" })
|
||||
|
||||
describe("Run through some parts of the automations system", () => {
|
||||
let config = setup.getConfig()
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ describe("test the create row action", () => {
|
|||
await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||
row
|
||||
})
|
||||
expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", 1)
|
||||
expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ describe("test the delete row action", () => {
|
|||
it("check usage quota attempts", async () => {
|
||||
await setup.runInProd(async () => {
|
||||
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
|
||||
expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", -1)
|
||||
expect(usageQuota.update).toHaveBeenCalledWith("rows", -1)
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -4,8 +4,10 @@ const AutomationEmitter = require("../events/AutomationEmitter")
|
|||
const { processObject } = require("@budibase/string-templates")
|
||||
const { DEFAULT_TENANT_ID } = require("@budibase/auth").constants
|
||||
const CouchDB = require("../db")
|
||||
const { DocumentTypes } = require("../db/utils")
|
||||
const { DocumentTypes, isDevAppID } = require("../db/utils")
|
||||
const { doInTenant } = require("@budibase/auth/tenancy")
|
||||
const env = require("../environment")
|
||||
const usage = require("../utilities/usageQuota")
|
||||
|
||||
const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId
|
||||
|
||||
|
@ -80,7 +82,6 @@ class Orchestrator {
|
|||
return stepFn({
|
||||
inputs: step.inputs,
|
||||
appId: this._appId,
|
||||
apiKey: automation.apiKey,
|
||||
emitter: this._emitter,
|
||||
context: this._context,
|
||||
})
|
||||
|
@ -95,6 +96,11 @@ class Orchestrator {
|
|||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Increment quota for automation runs
|
||||
if (!env.SELF_HOSTED && !isDevAppID(this._appId)) {
|
||||
usage.update(usage.Properties.AUTOMATION, 1)
|
||||
}
|
||||
return this.executionOutput
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
const PouchDB = require("pouchdb")
|
||||
const memory = require("pouchdb-adapter-memory")
|
||||
const newid = require("./newid")
|
||||
|
||||
PouchDB.plugin(memory)
|
||||
const Pouch = PouchDB.defaults({
|
||||
prefix: undefined,
|
||||
adapter: "memory",
|
||||
})
|
||||
|
||||
exports.runView = async (view, calculation, group, data) => {
|
||||
// use a different ID each time for the DB, make sure they
|
||||
// are always unique for each query, don't want overlap
|
||||
// which could cause 409s
|
||||
const db = new Pouch(newid())
|
||||
// write all the docs to the in memory Pouch (remove revs)
|
||||
await db.bulkDocs(
|
||||
data.map(row => ({
|
||||
...row,
|
||||
_rev: undefined,
|
||||
}))
|
||||
)
|
||||
let fn = (doc, emit) => emit(doc._id)
|
||||
eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)"))
|
||||
const queryFns = {
|
||||
meta: view.meta,
|
||||
map: fn,
|
||||
}
|
||||
if (view.reduce) {
|
||||
queryFns.reduce = view.reduce
|
||||
}
|
||||
const response = await db.query(queryFns, {
|
||||
include_docs: !calculation,
|
||||
group: !!group,
|
||||
})
|
||||
// need to fix the revs to be totally accurate
|
||||
for (let row of response.rows) {
|
||||
if (!row._rev || !row._id) {
|
||||
continue
|
||||
}
|
||||
const found = data.find(possible => possible._id === row._id)
|
||||
if (found) {
|
||||
row._rev = found._rev
|
||||
}
|
||||
}
|
||||
await db.destroy()
|
||||
return response
|
||||
}
|
|
@ -76,9 +76,12 @@ async function getFullLinkedDocs(ctx, appId, links) {
|
|||
// create DBs
|
||||
const db = new CouchDB(appId)
|
||||
const linkedRowIds = links.map(link => link.id)
|
||||
let linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map(
|
||||
const uniqueRowIds = [...new Set(linkedRowIds)]
|
||||
let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map(
|
||||
row => row.doc
|
||||
)
|
||||
// convert the unique db rows back to a full list of linked rows
|
||||
const linked = linkedRowIds.map(id => dbRows.find(row => row._id === id))
|
||||
// need to handle users as specific cases
|
||||
let [users, other] = partition(linked, linkRow =>
|
||||
linkRow._id.startsWith(USER_METDATA_PREFIX)
|
||||
|
@ -112,7 +115,7 @@ exports.updateLinks = async function (args) {
|
|||
let linkController = new LinkController(args)
|
||||
try {
|
||||
if (
|
||||
!(await linkController.doesTableHaveLinkedFields()) &&
|
||||
!(await linkController.doesTableHaveLinkedFields(table)) &&
|
||||
(oldTable == null ||
|
||||
!(await linkController.doesTableHaveLinkedFields(oldTable)))
|
||||
) {
|
||||
|
|
|
@ -39,6 +39,7 @@ const DocumentTypes = {
|
|||
QUERY: "query",
|
||||
DEPLOYMENTS: "deployments",
|
||||
METADATA: "metadata",
|
||||
MEM_VIEW: "view",
|
||||
}
|
||||
|
||||
const ViewNames = {
|
||||
|
@ -348,6 +349,14 @@ exports.getMetadataParams = (type, entityId = null, otherProps = {}) => {
|
|||
return getDocParams(DocumentTypes.METADATA, docId, otherProps)
|
||||
}
|
||||
|
||||
exports.generateMemoryViewID = viewName => {
|
||||
return `${DocumentTypes.MEM_VIEW}${SEPARATOR}${viewName}`
|
||||
}
|
||||
|
||||
exports.getMemoryViewParams = (otherProps = {}) => {
|
||||
return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
* This can be used with the db.allDocs to get a list of IDs
|
||||
*/
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import { Table } from "./common"
|
||||
|
||||
export enum Operation {
|
||||
CREATE = "CREATE",
|
||||
READ = "READ",
|
||||
|
@ -136,6 +138,9 @@ export interface QueryJson {
|
|||
sort?: SortJson
|
||||
paginate?: PaginationJson
|
||||
body?: object
|
||||
meta?: {
|
||||
table?: Table
|
||||
}
|
||||
extra?: {
|
||||
idFilter?: SearchFilters
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ module.exports = {
|
|||
COUCH_DB_URL: process.env.COUCH_DB_URL,
|
||||
MINIO_URL: process.env.MINIO_URL,
|
||||
WORKER_URL: process.env.WORKER_URL,
|
||||
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED),
|
||||
SELF_HOSTED: process.env.SELF_HOSTED,
|
||||
AWS_REGION: process.env.AWS_REGION,
|
||||
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||
|
@ -66,3 +66,10 @@ module.exports = {
|
|||
return !isDev()
|
||||
},
|
||||
}
|
||||
|
||||
// convert any strings to numbers if required, like "0" would be true otherwise
|
||||
for (let [key, value] of Object.entries(module.exports)) {
|
||||
if (typeof value === "string" && !isNaN(parseInt(value))) {
|
||||
module.exports[key] = parseInt(value)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
import { Knex, knex } from "knex"
|
||||
const BASE_LIMIT = 5000
|
||||
// if requesting a single row then need to up the limit for the sake of joins
|
||||
const SINGLE_ROW_LIMIT = 100
|
||||
import {
|
||||
QueryJson,
|
||||
SearchFilters,
|
||||
|
@ -146,46 +144,48 @@ function buildCreate(
|
|||
function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
||||
const tableName = endpoint.entityId
|
||||
let query: KnexQuery = knex(tableName)
|
||||
// select all if not specified
|
||||
if (!resource) {
|
||||
resource = { fields: [] }
|
||||
}
|
||||
let selectStatement: string | string[] = "*"
|
||||
// handle select
|
||||
if (resource.fields && resource.fields.length > 0) {
|
||||
// select the resources as the format "table.columnName" - this is what is provided
|
||||
// by the resource builder further up
|
||||
query = query.select(resource.fields.map(field => `${field} as ${field}`))
|
||||
} else {
|
||||
query = query.select("*")
|
||||
selectStatement = resource.fields.map(field => `${field} as ${field}`)
|
||||
}
|
||||
let foundLimit = limit || BASE_LIMIT
|
||||
// handle pagination
|
||||
let foundOffset: number | null = null
|
||||
if (paginate && paginate.page && paginate.limit) {
|
||||
// @ts-ignore
|
||||
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
||||
const offset = page * paginate.limit
|
||||
foundLimit = paginate.limit
|
||||
foundOffset = offset
|
||||
} else if (paginate && paginate.limit) {
|
||||
foundLimit = paginate.limit
|
||||
}
|
||||
// start building the query
|
||||
let query: KnexQuery = knex(tableName).limit(foundLimit)
|
||||
if (foundOffset) {
|
||||
query = query.offset(foundOffset)
|
||||
}
|
||||
// handle where
|
||||
query = addFilters(tableName, query, filters)
|
||||
// handle join
|
||||
query = addRelationships(query, tableName, relationships)
|
||||
// handle sorting
|
||||
if (sort) {
|
||||
for (let [key, value] of Object.entries(sort)) {
|
||||
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
|
||||
query = query.orderBy(key, direction)
|
||||
}
|
||||
}
|
||||
let foundLimit = limit || BASE_LIMIT
|
||||
// handle pagination
|
||||
if (paginate && paginate.page && paginate.limit) {
|
||||
query = addFilters(tableName, query, filters)
|
||||
// @ts-ignore
|
||||
let preQuery: KnexQuery = knex({
|
||||
// @ts-ignore
|
||||
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
||||
const offset = page * paginate.limit
|
||||
foundLimit = paginate.limit
|
||||
query = query.offset(offset)
|
||||
} else if (paginate && paginate.limit) {
|
||||
foundLimit = paginate.limit
|
||||
}
|
||||
if (foundLimit === 1) {
|
||||
foundLimit = SINGLE_ROW_LIMIT
|
||||
}
|
||||
query = query.limit(foundLimit)
|
||||
return query
|
||||
[tableName]: query,
|
||||
}).select(selectStatement)
|
||||
// handle joins
|
||||
return addRelationships(preQuery, tableName, relationships)
|
||||
}
|
||||
|
||||
function buildUpdate(
|
||||
|
|
|
@ -12,7 +12,11 @@ import { getSqlQuery } from "./utils"
|
|||
module MySQLModule {
|
||||
const mysql = require("mysql")
|
||||
const Sql = require("./base/sql")
|
||||
const { buildExternalTableId, convertType } = require("./utils")
|
||||
const {
|
||||
buildExternalTableId,
|
||||
convertType,
|
||||
copyExistingPropsOver,
|
||||
} = require("./utils")
|
||||
const { FieldTypes } = require("../constants")
|
||||
|
||||
interface MySQLConfig {
|
||||
|
@ -104,7 +108,7 @@ module MySQLModule {
|
|||
client: any,
|
||||
query: SqlQuery,
|
||||
connect: boolean = true
|
||||
): Promise<any[]> {
|
||||
): Promise<any[] | any> {
|
||||
// Node MySQL is callback based, so we must wrap our call in a promise
|
||||
return new Promise((resolve, reject) => {
|
||||
if (connect) {
|
||||
|
@ -194,18 +198,7 @@ module MySQLModule {
|
|||
}
|
||||
}
|
||||
|
||||
// add the existing relationships from the entities if they exist, to prevent them from being overridden
|
||||
if (entities && entities[tableName]) {
|
||||
const existingTableSchema = entities[tableName].schema
|
||||
for (let key in existingTableSchema) {
|
||||
if (!existingTableSchema.hasOwnProperty(key)) {
|
||||
continue
|
||||
}
|
||||
if (existingTableSchema[key].type === "link") {
|
||||
tables[tableName].schema[key] = existingTableSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
copyExistingPropsOver(tableName, tables, entities)
|
||||
}
|
||||
|
||||
this.client.end()
|
||||
|
@ -249,6 +242,23 @@ module MySQLModule {
|
|||
return internalQuery(this.client, input, false)
|
||||
}
|
||||
|
||||
// when creating if an ID has been inserted need to make sure
|
||||
// the id filter is enriched with it before trying to retrieve the row
|
||||
checkLookupKeys(results: any, json: QueryJson) {
|
||||
if (!results?.insertId || !json.meta?.table || !json.meta.table.primary) {
|
||||
return json
|
||||
}
|
||||
const primaryKey = json.meta.table.primary?.[0]
|
||||
json.extra = {
|
||||
idFilter: {
|
||||
equal: {
|
||||
[primaryKey]: results.insertId,
|
||||
},
|
||||
},
|
||||
}
|
||||
return json
|
||||
}
|
||||
|
||||
async query(json: QueryJson) {
|
||||
const operation = this._operation(json)
|
||||
this.client.connect()
|
||||
|
@ -261,7 +271,7 @@ module MySQLModule {
|
|||
const results = await internalQuery(this.client, input, false)
|
||||
// same as delete, manage returning
|
||||
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
|
||||
row = this.getReturningRow(json)
|
||||
row = this.getReturningRow(this.checkLookupKeys(results, json))
|
||||
}
|
||||
this.client.end()
|
||||
if (operation !== Operation.READ) {
|
||||
|
|
|
@ -12,7 +12,14 @@ module PostgresModule {
|
|||
const { Pool } = require("pg")
|
||||
const Sql = require("./base/sql")
|
||||
const { FieldTypes } = require("../constants")
|
||||
const { buildExternalTableId, convertType } = require("./utils")
|
||||
const {
|
||||
buildExternalTableId,
|
||||
convertType,
|
||||
copyExistingPropsOver,
|
||||
} = require("./utils")
|
||||
const { escapeDangerousCharacters } = require("../utilities")
|
||||
|
||||
const JSON_REGEX = /'{.*}'::json/s
|
||||
|
||||
interface PostgresConfig {
|
||||
host: string
|
||||
|
@ -84,13 +91,27 @@ module PostgresModule {
|
|||
bigint: FieldTypes.NUMBER,
|
||||
decimal: FieldTypes.NUMBER,
|
||||
smallint: FieldTypes.NUMBER,
|
||||
real: FieldTypes.NUMBER,
|
||||
"double precision": FieldTypes.NUMBER,
|
||||
timestamp: FieldTypes.DATETIME,
|
||||
time: FieldTypes.DATETIME,
|
||||
boolean: FieldTypes.BOOLEAN,
|
||||
json: FieldTypes.JSON,
|
||||
date: FieldTypes.DATETIME,
|
||||
}
|
||||
|
||||
async function internalQuery(client: any, query: SqlQuery) {
|
||||
// need to handle a specific issue with json data types in postgres,
|
||||
// new lines inside the JSON data will break it
|
||||
if (query && query.sql) {
|
||||
const matches = query.sql.match(JSON_REGEX)
|
||||
if (matches && matches.length > 0) {
|
||||
for (let match of matches) {
|
||||
const escaped = escapeDangerousCharacters(match)
|
||||
query.sql = query.sql.replace(match, escaped)
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
return await client.query(query.sql, query.bindings || [])
|
||||
} catch (err) {
|
||||
|
@ -173,31 +194,30 @@ module PostgresModule {
|
|||
name: tableName,
|
||||
schema: {},
|
||||
}
|
||||
|
||||
// add the existing relationships from the entities if they exist, to prevent them from being overridden
|
||||
if (entities && entities[tableName]) {
|
||||
const existingTableSchema = entities[tableName].schema
|
||||
for (let key in existingTableSchema) {
|
||||
if (!existingTableSchema.hasOwnProperty(key)) {
|
||||
continue
|
||||
}
|
||||
if (existingTableSchema[key].type === "link") {
|
||||
tables[tableName].schema[key] = existingTableSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const type: string = convertType(column.data_type, TYPE_MAP)
|
||||
const isAuto: boolean =
|
||||
const identity = !!(
|
||||
column.identity_generation ||
|
||||
column.identity_start ||
|
||||
column.identity_increment
|
||||
)
|
||||
const hasDefault =
|
||||
typeof column.column_default === "string" &&
|
||||
column.column_default.startsWith("nextval")
|
||||
const isGenerated =
|
||||
column.is_generated && column.is_generated !== "NEVER"
|
||||
const isAuto: boolean = hasDefault || identity || isGenerated
|
||||
tables[tableName].schema[columnName] = {
|
||||
autocolumn: isAuto,
|
||||
name: columnName,
|
||||
type,
|
||||
}
|
||||
}
|
||||
|
||||
for (let tableName of Object.keys(tables)) {
|
||||
copyExistingPropsOver(tableName, tables, entities)
|
||||
}
|
||||
this.tables = tables
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ describe("SQL query builder", () => {
|
|||
const query = sql._query(generateReadJson())
|
||||
expect(query).toEqual({
|
||||
bindings: [limit],
|
||||
sql: `select * from "${TABLE_NAME}" limit $1`
|
||||
sql: `select * from (select * from "${TABLE_NAME}" limit $1) as "${TABLE_NAME}"`
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -68,7 +68,7 @@ describe("SQL query builder", () => {
|
|||
}))
|
||||
expect(query).toEqual({
|
||||
bindings: [limit],
|
||||
sql: `select "${TABLE_NAME}"."name" as "${nameProp}", "${TABLE_NAME}"."age" as "${ageProp}" from "${TABLE_NAME}" limit $1`
|
||||
sql: `select "${TABLE_NAME}"."name" as "${nameProp}", "${TABLE_NAME}"."age" as "${ageProp}" from (select * from "${TABLE_NAME}" limit $1) as "${TABLE_NAME}"`
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -82,7 +82,7 @@ describe("SQL query builder", () => {
|
|||
}))
|
||||
expect(query).toEqual({
|
||||
bindings: ["John%", limit],
|
||||
sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."name" ilike $1 limit $2`
|
||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."name" ilike $1 limit $2) as "${TABLE_NAME}"`
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -99,7 +99,7 @@ describe("SQL query builder", () => {
|
|||
}))
|
||||
expect(query).toEqual({
|
||||
bindings: [2, 10, limit],
|
||||
sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age" between $1 and $2 limit $3`
|
||||
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age" between $1 and $2 limit $3) as "${TABLE_NAME}"`
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -115,7 +115,7 @@ describe("SQL query builder", () => {
|
|||
}))
|
||||
expect(query).toEqual({
|
||||
bindings: [10, "John", limit],
|
||||
sql: `select * from "${TABLE_NAME}" where ("${TABLE_NAME}"."age" = $1) or ("${TABLE_NAME}"."name" = $2) limit $3`
|
||||
sql: `select * from (select * from "${TABLE_NAME}" where ("${TABLE_NAME}"."age" = $1) or ("${TABLE_NAME}"."name" = $2) limit $3) as "${TABLE_NAME}"`
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -160,7 +160,7 @@ describe("SQL query builder", () => {
|
|||
const query = new Sql("mssql", 10)._query(generateReadJson())
|
||||
expect(query).toEqual({
|
||||
bindings: [10],
|
||||
sql: `select top (@p0) * from [${TABLE_NAME}]`
|
||||
sql: `select * from (select top (@p0) * from [${TABLE_NAME}]) as [${TABLE_NAME}]`
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -168,7 +168,7 @@ describe("SQL query builder", () => {
|
|||
const query = new Sql("mysql", 10)._query(generateReadJson())
|
||||
expect(query).toEqual({
|
||||
bindings: [10],
|
||||
sql: `select * from \`${TABLE_NAME}\` limit ?`
|
||||
sql: `select * from (select * from \`${TABLE_NAME}\` limit ?) as \`${TABLE_NAME}\``
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -82,3 +82,25 @@ export function isIsoDateString(str: string) {
|
|||
let d = new Date(str)
|
||||
return d.toISOString() === str
|
||||
}
|
||||
|
||||
// add the existing relationships from the entities if they exist, to prevent them from being overridden
|
||||
export function copyExistingPropsOver(
|
||||
tableName: string,
|
||||
tables: { [key: string]: any },
|
||||
entities: { [key: string]: any }
|
||||
) {
|
||||
if (entities && entities[tableName]) {
|
||||
if (entities[tableName].primaryDisplay) {
|
||||
tables[tableName].primaryDisplay = entities[tableName].primaryDisplay
|
||||
}
|
||||
const existingTableSchema = entities[tableName].schema
|
||||
for (let key in existingTableSchema) {
|
||||
if (!existingTableSchema.hasOwnProperty(key)) {
|
||||
continue
|
||||
}
|
||||
if (existingTableSchema[key].type === "link") {
|
||||
tables[tableName].schema[key] = existingTableSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ class TestConfiguration {
|
|||
if (bool) {
|
||||
env.isDev = () => false
|
||||
env.isProd = () => true
|
||||
this.ctx.auth = { apiKey: "test" }
|
||||
this.ctx.user = { tenantId: "test" }
|
||||
} else {
|
||||
env.isDev = () => true
|
||||
env.isProd = () => false
|
||||
|
@ -114,7 +114,7 @@ describe("usageQuota middleware", () => {
|
|||
|
||||
await config.executeMiddleware()
|
||||
|
||||
expect(usageQuota.update).toHaveBeenCalledWith("test", "rows", 1)
|
||||
expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
|
||||
expect(config.next).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
|
@ -131,7 +131,7 @@ describe("usageQuota middleware", () => {
|
|||
])
|
||||
await config.executeMiddleware()
|
||||
|
||||
expect(usageQuota.update).toHaveBeenCalledWith("test", "storage", 10100)
|
||||
expect(usageQuota.update).toHaveBeenCalledWith("storage", 10100)
|
||||
expect(config.next).toHaveBeenCalled()
|
||||
})
|
||||
})
|
|
@ -13,6 +13,7 @@ const DOMAIN_MAP = {
|
|||
upload: usageQuota.Properties.UPLOAD,
|
||||
views: usageQuota.Properties.VIEW,
|
||||
users: usageQuota.Properties.USER,
|
||||
applications: usageQuota.Properties.APPS,
|
||||
// this will not be updated by endpoint calls
|
||||
// instead it will be updated by triggerInfo
|
||||
automationRuns: usageQuota.Properties.AUTOMATION,
|
||||
|
@ -57,9 +58,9 @@ module.exports = async (ctx, next) => {
|
|||
usage = files.map(file => file.size).reduce((total, size) => total + size)
|
||||
}
|
||||
try {
|
||||
await usageQuota.update(ctx.auth.apiKey, property, usage)
|
||||
await usageQuota.update(property, usage)
|
||||
return next()
|
||||
} catch (err) {
|
||||
ctx.throw(403, err)
|
||||
ctx.throw(400, err)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,14 @@ exports.wait = ms => new Promise(resolve => setTimeout(resolve, ms))
|
|||
|
||||
exports.isDev = env.isDev
|
||||
|
||||
exports.removeFromArray = (array, element) => {
|
||||
const index = array.indexOf(element)
|
||||
if (index !== -1) {
|
||||
array.splice(index, 1)
|
||||
}
|
||||
return array
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes sure that a URL has the correct number of slashes, while maintaining the
|
||||
* http(s):// double slashes.
|
||||
|
@ -106,3 +114,13 @@ exports.deleteEntityMetadata = async (appId, type, entityId) => {
|
|||
await db.remove(id, rev)
|
||||
}
|
||||
}
|
||||
|
||||
exports.escapeDangerousCharacters = string => {
|
||||
return string
|
||||
.replace(/[\\]/g, "\\\\")
|
||||
.replace(/[\b]/g, "\\b")
|
||||
.replace(/[\f]/g, "\\f")
|
||||
.replace(/[\n]/g, "\\n")
|
||||
.replace(/[\r]/g, "\\r")
|
||||
.replace(/[\t]/g, "\\t")
|
||||
}
|
||||
|
|
|
@ -89,10 +89,16 @@ const TYPE_TRANSFORM_MAP = {
|
|||
* @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields.
|
||||
* @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing.
|
||||
* @param {Object} row The row which is to be updated with information for the auto columns.
|
||||
* @param {Object} opts specific options for function to carry out optional features.
|
||||
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
|
||||
* for automatic ID purposes.
|
||||
*/
|
||||
function processAutoColumn(user, table, row) {
|
||||
function processAutoColumn(
|
||||
user,
|
||||
table,
|
||||
row,
|
||||
opts = { reprocessing: false, noAutoRelationships: false }
|
||||
) {
|
||||
let now = new Date().toISOString()
|
||||
// if a row doesn't have a revision then it doesn't exist yet
|
||||
const creating = !row._rev
|
||||
|
@ -102,7 +108,7 @@ function processAutoColumn(user, table, row) {
|
|||
}
|
||||
switch (schema.subtype) {
|
||||
case AutoFieldSubTypes.CREATED_BY:
|
||||
if (creating) {
|
||||
if (creating && !opts.reprocessing && !opts.noAutoRelationships) {
|
||||
row[key] = [user.userId]
|
||||
}
|
||||
break
|
||||
|
@ -112,7 +118,9 @@ function processAutoColumn(user, table, row) {
|
|||
}
|
||||
break
|
||||
case AutoFieldSubTypes.UPDATED_BY:
|
||||
row[key] = [user.userId]
|
||||
if (!opts.reprocessing && !opts.noAutoRelationships) {
|
||||
row[key] = [user.userId]
|
||||
}
|
||||
break
|
||||
case AutoFieldSubTypes.UPDATED_AT:
|
||||
row[key] = now
|
||||
|
@ -127,6 +135,7 @@ function processAutoColumn(user, table, row) {
|
|||
}
|
||||
return { table, row }
|
||||
}
|
||||
exports.processAutoColumn = processAutoColumn
|
||||
|
||||
/**
|
||||
* This will coerce a value to the correct types based on the type transform map
|
||||
|
@ -151,9 +160,15 @@ exports.coerce = (row, type) => {
|
|||
* @param {object} user the user which is performing the input.
|
||||
* @param {object} row the row which is being created/updated.
|
||||
* @param {object} table the table which the row is being saved to.
|
||||
* @param {object} opts some input processing options (like disabling auto-column relationships).
|
||||
* @returns {object} the row which has been prepared to be written to the DB.
|
||||
*/
|
||||
exports.inputProcessing = (user = {}, table, row) => {
|
||||
exports.inputProcessing = (
|
||||
user = {},
|
||||
table,
|
||||
row,
|
||||
opts = { noAutoRelationships: false }
|
||||
) => {
|
||||
let clonedRow = cloneDeep(row)
|
||||
// need to copy the table so it can be differenced on way out
|
||||
const copiedTable = cloneDeep(table)
|
||||
|
@ -176,7 +191,7 @@ exports.inputProcessing = (user = {}, table, row) => {
|
|||
}
|
||||
}
|
||||
// handle auto columns - this returns an object like {table, row}
|
||||
return processAutoColumn(user, copiedTable, clonedRow)
|
||||
return processAutoColumn(user, copiedTable, clonedRow, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,41 +1,9 @@
|
|||
const env = require("../environment")
|
||||
const { apiKeyTable } = require("../db/dynamoClient")
|
||||
|
||||
const DEFAULT_USAGE = {
|
||||
rows: 0,
|
||||
storage: 0,
|
||||
views: 0,
|
||||
automationRuns: 0,
|
||||
users: 0,
|
||||
}
|
||||
|
||||
const DEFAULT_PLAN = {
|
||||
rows: 1000,
|
||||
// 1 GB
|
||||
storage: 8589934592,
|
||||
views: 10,
|
||||
automationRuns: 100,
|
||||
users: 10000,
|
||||
}
|
||||
|
||||
function buildUpdateParams(key, property, usage) {
|
||||
return {
|
||||
primary: key,
|
||||
condition:
|
||||
"attribute_exists(#quota) AND attribute_exists(#limits) AND #quota.#prop < #limits.#prop AND #quotaReset > :now",
|
||||
expression: "ADD #quota.#prop :usage",
|
||||
names: {
|
||||
"#quota": "usageQuota",
|
||||
"#prop": property,
|
||||
"#limits": "usageLimits",
|
||||
"#quotaReset": "quotaReset",
|
||||
},
|
||||
values: {
|
||||
":usage": usage,
|
||||
":now": Date.now(),
|
||||
},
|
||||
}
|
||||
}
|
||||
const { getGlobalDB } = require("@budibase/auth/tenancy")
|
||||
const {
|
||||
StaticDatabases,
|
||||
generateNewUsageQuotaDoc,
|
||||
} = require("@budibase/auth/db")
|
||||
|
||||
function getNewQuotaReset() {
|
||||
return Date.now() + 2592000000
|
||||
|
@ -47,59 +15,59 @@ exports.Properties = {
|
|||
VIEW: "views",
|
||||
USER: "users",
|
||||
AUTOMATION: "automationRuns",
|
||||
APPS: "apps",
|
||||
EMAILS: "emails",
|
||||
}
|
||||
|
||||
exports.getAPIKey = async appId => {
|
||||
if (!env.USE_QUOTAS) {
|
||||
return { apiKey: null }
|
||||
async function getUsageQuotaDoc(db) {
|
||||
let quota
|
||||
try {
|
||||
quota = await db.get(StaticDatabases.PLATFORM_INFO.docs.usageQuota)
|
||||
} catch (err) {
|
||||
// doc doesn't exist. Create it
|
||||
quota = await db.post(generateNewUsageQuotaDoc())
|
||||
}
|
||||
return apiKeyTable.get({ primary: appId })
|
||||
|
||||
return quota
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a specified API key this will add to the usage object for the specified property.
|
||||
* @param {string} apiKey The API key which is to be updated.
|
||||
* Given a specified tenantId this will add to the usage object for the specified property.
|
||||
* @param {string} property The property which is to be added to (within the nested usageQuota object).
|
||||
* @param {number} usage The amount (this can be negative) to adjust the number by.
|
||||
* @returns {Promise<void>} When this completes the API key will now be up to date - the quota period may have
|
||||
* also been reset after this call.
|
||||
*/
|
||||
exports.update = async (apiKey, property, usage) => {
|
||||
exports.update = async (property, usage) => {
|
||||
if (!env.USE_QUOTAS) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
await apiKeyTable.update(buildUpdateParams(apiKey, property, usage))
|
||||
} catch (err) {
|
||||
// conditional check means the condition failed, need to check why
|
||||
if (err.code === "ConditionalCheckFailedException") {
|
||||
// get the API key so we can check it
|
||||
const keyObj = await apiKeyTable.get({ primary: apiKey })
|
||||
// the usage quota or usage limits didn't exist
|
||||
if (keyObj && (keyObj.usageQuota == null || keyObj.usageLimits == null)) {
|
||||
keyObj.usageQuota =
|
||||
keyObj.usageQuota == null ? DEFAULT_USAGE : keyObj.usageQuota
|
||||
keyObj.usageLimits =
|
||||
keyObj.usageLimits == null ? DEFAULT_PLAN : keyObj.usageLimits
|
||||
keyObj.quotaReset = getNewQuotaReset()
|
||||
await apiKeyTable.put({ item: keyObj })
|
||||
return
|
||||
}
|
||||
// we have in fact breached the reset period
|
||||
else if (keyObj && keyObj.quotaReset <= Date.now()) {
|
||||
// update the quota reset period and reset the values for all properties
|
||||
keyObj.quotaReset = getNewQuotaReset()
|
||||
for (let prop of Object.keys(keyObj.usageQuota)) {
|
||||
if (prop === property) {
|
||||
keyObj.usageQuota[prop] = usage > 0 ? usage : 0
|
||||
} else {
|
||||
keyObj.usageQuota[prop] = 0
|
||||
}
|
||||
}
|
||||
await apiKeyTable.put({ item: keyObj })
|
||||
return
|
||||
const db = getGlobalDB()
|
||||
const quota = await getUsageQuotaDoc(db)
|
||||
|
||||
// Check if the quota needs reset
|
||||
if (Date.now() >= quota.quotaReset) {
|
||||
quota.quotaReset = getNewQuotaReset()
|
||||
for (let prop of Object.keys(quota.usageQuota)) {
|
||||
quota.usageQuota[prop] = 0
|
||||
}
|
||||
}
|
||||
|
||||
// increment the quota
|
||||
quota.usageQuota[property] += usage
|
||||
|
||||
if (quota.usageQuota[property] >= quota.usageLimits[property]) {
|
||||
throw new Error(
|
||||
`You have exceeded your usage quota of ${quota.usageLimits[property]} ${property}.`
|
||||
)
|
||||
}
|
||||
|
||||
// update the usage quotas
|
||||
await db.put(quota)
|
||||
} catch (err) {
|
||||
console.error(`Error updating usage quotas for ${property}`, err)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue