Merge branch 'develop' of github.com:Budibase/budibase into ak-fixes

This commit is contained in:
Andrew Kingston 2021-09-23 09:18:52 +01:00
commit ec2f727af3
88 changed files with 1879 additions and 568 deletions

View File

@ -7,6 +7,7 @@ on:
env: env:
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }} POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
POSTHOG_URL: ${{ secrets.POSTHOG_URL }} POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }} SENTRY_DSN: ${{ secrets.SENTRY_DSN }}

View File

@ -4,9 +4,16 @@ on:
push: push:
branches: branches:
- master - master
workflow_dispatch:
inputs:
release_self_host:
description: 'Release to self hosters? (Y/N)'
required: true
default: 'N'
env: env:
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }} POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
POSTHOG_URL: ${{ secrets.POSTHOG_URL }} POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }} SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
@ -47,7 +54,19 @@ jobs:
uses: "WyriHaximus/github-action-get-previous-tag@v1" uses: "WyriHaximus/github-action-get-previous-tag@v1"
- name: Build/release Docker images - name: Build/release Docker images
run: | if: ${{ github.event.inputs.release_self_host != 'Y' }}
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build
yarn build:docker
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
BUDIBASE_RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }}
- name: Build/release Docker images (Self Host)
if: ${{ github.event.inputs.release_self_host == 'Y' }}
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build yarn build
yarn build:docker yarn build:docker

View File

@ -1,5 +1,5 @@
{ {
"version": "0.9.125-alpha.19", "version": "0.9.140-alpha.5",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -42,7 +42,8 @@
"lint:fix": "yarn run lint:fix:ts && yarn run lint:fix:prettier && yarn run lint:fix:eslint", "lint:fix": "yarn run lint:fix:ts && yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"test:e2e": "lerna run cy:test", "test:e2e": "lerna run cy:test",
"test:e2e:ci": "lerna run cy:ci", "test:e2e:ci": "lerna run cy:ci",
"build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION release && cd -", "build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:production": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION release && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -", "build:docker:develop": "node scripts/pinVersions && lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"release:helm": "./scripts/release_helm_chart.sh", "release:helm": "./scripts/release_helm_chart.sh",
"multi:enable": "lerna run multi:enable", "multi:enable": "lerna run multi:enable",

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/auth", "name": "@budibase/auth",
"version": "0.9.125-alpha.19", "version": "0.9.140-alpha.5",
"description": "Authentication middlewares for budibase builder and apps", "description": "Authentication middlewares for budibase builder and apps",
"main": "src/index.js", "main": "src/index.js",
"author": "Budibase", "author": "Budibase",

View File

@ -16,6 +16,7 @@ module.exports = {
REDIS_PASSWORD: process.env.REDIS_PASSWORD, REDIS_PASSWORD: process.env.REDIS_PASSWORD,
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY, MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
AWS_REGION: process.env.AWS_REGION,
MINIO_URL: process.env.MINIO_URL, MINIO_URL: process.env.MINIO_URL,
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY, INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
MULTI_TENANCY: process.env.MULTI_TENANCY, MULTI_TENANCY: process.env.MULTI_TENANCY,

View File

@ -12,6 +12,7 @@ const {
auditLog, auditLog,
tenancy, tenancy,
appTenancy, appTenancy,
authError,
} = require("./middleware") } = require("./middleware")
const { setDB } = require("./db") const { setDB } = require("./db")
const userCache = require("./cache/user") const userCache = require("./cache/user")
@ -60,6 +61,7 @@ module.exports = {
buildTenancyMiddleware: tenancy, buildTenancyMiddleware: tenancy,
buildAppTenancyMiddleware: appTenancy, buildAppTenancyMiddleware: appTenancy,
auditLog, auditLog,
authError,
}, },
cache: { cache: {
user: userCache, user: userCache,

View File

@ -2,6 +2,7 @@ const jwt = require("./passport/jwt")
const local = require("./passport/local") const local = require("./passport/local")
const google = require("./passport/google") const google = require("./passport/google")
const oidc = require("./passport/oidc") const oidc = require("./passport/oidc")
const { authError } = require("./passport/utils")
const authenticated = require("./authenticated") const authenticated = require("./authenticated")
const auditLog = require("./auditLog") const auditLog = require("./auditLog")
const tenancy = require("./tenancy") const tenancy = require("./tenancy")
@ -16,4 +17,5 @@ module.exports = {
auditLog, auditLog,
tenancy, tenancy,
appTenancy, appTenancy,
authError,
} }

View File

@ -27,7 +27,11 @@ async function authenticate(accessToken, refreshToken, profile, done) {
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport. * from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
* @returns Dynamically configured Passport Google Strategy * @returns Dynamically configured Passport Google Strategy
*/ */
exports.strategyFactory = async function (config, callbackUrl) { exports.strategyFactory = async function (
config,
callbackUrl,
verify = authenticate
) {
try { try {
const { clientID, clientSecret } = config const { clientID, clientSecret } = config
@ -43,7 +47,7 @@ exports.strategyFactory = async function (config, callbackUrl) {
clientSecret: config.clientSecret, clientSecret: config.clientSecret,
callbackURL: callbackUrl, callbackURL: callbackUrl,
}, },
authenticate verify
) )
} catch (err) { } catch (err) {
console.error(err) console.error(err)

View File

@ -104,7 +104,7 @@ describe("third party common", () => {
_id: id, _id: id,
email: email, email: email,
} }
const response = await db.post(dbUser) const response = await db.put(dbUser)
dbUser._rev = response.rev dbUser._rev = response.rev
} }

View File

@ -71,7 +71,7 @@ exports.authenticateThirdParty = async function (
dbUser = await syncUser(dbUser, thirdPartyUser) dbUser = await syncUser(dbUser, thirdPartyUser)
// create or sync the user // create or sync the user
const response = await db.post(dbUser) const response = await db.put(dbUser)
dbUser._rev = response.rev dbUser._rev = response.rev
// authenticate // authenticate

View File

@ -73,6 +73,7 @@ exports.ObjectStore = bucket => {
AWS.config.update({ AWS.config.update({
accessKeyId: env.MINIO_ACCESS_KEY, accessKeyId: env.MINIO_ACCESS_KEY,
secretAccessKey: env.MINIO_SECRET_KEY, secretAccessKey: env.MINIO_SECRET_KEY,
region: env.AWS_REGION,
}) })
const config = { const config = {
s3ForcePathStyle: true, s3ForcePathStyle: true,

View File

@ -30,6 +30,10 @@ exports.invalidateSessions = async (userId, sessionId = null) => {
sessions.push({ key: makeSessionID(userId, sessionId) }) sessions.push({ key: makeSessionID(userId, sessionId) })
} else { } else {
sessions = await getSessionsForUser(userId) sessions = await getSessionsForUser(userId)
sessions.forEach(
session =>
(session.key = makeSessionID(session.userId, session.sessionId))
)
} }
const client = await redis.getSessionClient() const client = await redis.getSessionClient()
const promises = [] const promises = []

View File

@ -4470,9 +4470,9 @@ tmp@^0.0.33:
os-tmpdir "~1.0.2" os-tmpdir "~1.0.2"
tmpl@1.0.x: tmpl@1.0.x:
version "1.0.4" version "1.0.5"
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
to-fast-properties@^2.0.0: to-fast-properties@^2.0.0:
version "2.0.0" version "2.0.0"

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/bbui", "name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.", "description": "A UI solution used in the different Budibase projects.",
"version": "0.9.125-alpha.19", "version": "0.9.140-alpha.5",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"svelte": "src/index.js", "svelte": "src/index.js",
"module": "dist/bbui.es.js", "module": "dist/bbui.es.js",

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/builder", "name": "@budibase/builder",
"version": "0.9.125-alpha.19", "version": "0.9.140-alpha.5",
"license": "AGPL-3.0", "license": "AGPL-3.0",
"private": true, "private": true,
"scripts": { "scripts": {
@ -65,10 +65,10 @@
} }
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^0.9.125-alpha.19", "@budibase/bbui": "^0.9.140-alpha.5",
"@budibase/client": "^0.9.125-alpha.19", "@budibase/client": "^0.9.140-alpha.5",
"@budibase/colorpicker": "1.1.2", "@budibase/colorpicker": "1.1.2",
"@budibase/string-templates": "^0.9.125-alpha.19", "@budibase/string-templates": "^0.9.140-alpha.5",
"@sentry/browser": "5.19.1", "@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1", "@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1", "@spectrum-css/vars": "^3.0.1",

View File

@ -1,16 +1,10 @@
<script> <script>
import { onMount } from "svelte"
import { Router } from "@roxi/routify" import { Router } from "@roxi/routify"
import { routes } from "../.routify/routes" import { routes } from "../.routify/routes"
import { initialise } from "builderStore"
import { NotificationDisplay } from "@budibase/bbui" import { NotificationDisplay } from "@budibase/bbui"
import { parse, stringify } from "qs" import { parse, stringify } from "qs"
import HelpIcon from "components/common/HelpIcon.svelte" import HelpIcon from "components/common/HelpIcon.svelte"
onMount(async () => {
await initialise()
})
const queryHandler = { parse, stringify } const queryHandler = { parse, stringify }
</script> </script>

View File

@ -1,139 +0,0 @@
import * as Sentry from "@sentry/browser"
import posthog from "posthog-js"
import api from "builderStore/api"
let analyticsEnabled
const posthogConfigured = process.env.POSTHOG_TOKEN && process.env.POSTHOG_URL
const sentryConfigured = process.env.SENTRY_DSN
const FEEDBACK_SUBMITTED_KEY = "budibase:feedback_submitted"
const APP_FIRST_STARTED_KEY = "budibase:first_run"
const feedbackHours = 12
async function activate() {
if (analyticsEnabled === undefined) {
// only the server knows the true NODE_ENV
// this was an issue as NODE_ENV = 'cypress' on the server,
// but 'production' on the client
const response = await api.get("/api/analytics")
analyticsEnabled = (await response.json()).enabled === true
}
if (!analyticsEnabled) return
if (sentryConfigured) Sentry.init({ dsn: process.env.SENTRY_DSN })
if (posthogConfigured) {
posthog.init(process.env.POSTHOG_TOKEN, {
autocapture: false,
capture_pageview: false,
api_host: process.env.POSTHOG_URL,
})
posthog.set_config({ persistence: "cookie" })
}
}
function identify(id) {
if (!analyticsEnabled || !id) return
if (posthogConfigured) posthog.identify(id)
if (sentryConfigured)
Sentry.configureScope(scope => {
scope.setUser({ id: id })
})
}
async function identifyByApiKey(apiKey) {
if (!analyticsEnabled) return true
try {
const response = await fetch(
`https://03gaine137.execute-api.eu-west-1.amazonaws.com/prod/account/id?api_key=${apiKey.trim()}`
)
if (response.status === 200) {
const id = await response.json()
await api.put("/api/keys/userId", { value: id })
identify(id)
return true
}
return false
} catch (error) {
console.log(error)
}
}
function captureException(err) {
if (!analyticsEnabled) return
Sentry.captureException(err)
captureEvent("Error", { error: err.message ? err.message : err })
}
function captureEvent(eventName, props = {}) {
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return
props.sourceApp = "builder"
posthog.capture(eventName, props)
}
if (!localStorage.getItem(APP_FIRST_STARTED_KEY)) {
localStorage.setItem(APP_FIRST_STARTED_KEY, Date.now())
}
const isFeedbackTimeElapsed = sinceDateStr => {
const sinceDate = parseFloat(sinceDateStr)
const feedbackMilliseconds = feedbackHours * 60 * 60 * 1000
return Date.now() > sinceDate + feedbackMilliseconds
}
function submitFeedback(values) {
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return
localStorage.setItem(FEEDBACK_SUBMITTED_KEY, Date.now())
const prefixedValues = Object.entries(values).reduce((obj, [key, value]) => {
obj[`feedback_${key}`] = value
return obj
}, {})
posthog.capture("Feedback Submitted", prefixedValues)
}
function requestFeedbackOnDeploy() {
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return false
const lastSubmittedStr = localStorage.getItem(FEEDBACK_SUBMITTED_KEY)
if (!lastSubmittedStr) return true
return isFeedbackTimeElapsed(lastSubmittedStr)
}
function highlightFeedbackIcon() {
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return false
const lastSubmittedStr = localStorage.getItem(FEEDBACK_SUBMITTED_KEY)
if (lastSubmittedStr) return isFeedbackTimeElapsed(lastSubmittedStr)
const firstRunStr = localStorage.getItem(APP_FIRST_STARTED_KEY)
if (!firstRunStr) return false
return isFeedbackTimeElapsed(firstRunStr)
}
// Opt In/Out
const ifAnalyticsEnabled = func => () => {
if (analyticsEnabled && process.env.POSTHOG_TOKEN) {
return func()
}
}
const disabled = () => posthog.has_opted_out_capturing()
const optIn = () => posthog.opt_in_capturing()
const optOut = () => posthog.opt_out_capturing()
export default {
activate,
identify,
identifyByApiKey,
captureException,
captureEvent,
requestFeedbackOnDeploy,
submitFeedback,
highlightFeedbackIcon,
disabled: () => {
if (analyticsEnabled == null) {
return true
}
return ifAnalyticsEnabled(disabled)
},
optIn: ifAnalyticsEnabled(optIn),
optOut: ifAnalyticsEnabled(optOut),
}

View File

@ -0,0 +1,94 @@
export default class IntercomClient {
constructor(token) {
this.token = token
}
/**
* Instantiate intercom using their provided script.
*/
init() {
if (!this.token) return
const token = this.token
var w = window
var ic = w.Intercom
if (typeof ic === "function") {
ic("reattach_activator")
ic("update", w.intercomSettings)
} else {
var d = document
var i = function () {
i.c(arguments)
}
i.q = []
i.c = function (args) {
i.q.push(args)
}
w.Intercom = i
var l = function () {
var s = d.createElement("script")
s.type = "text/javascript"
s.async = true
s.src = "https://widget.intercom.io/widget/" + token
var x = d.getElementsByTagName("script")[0]
x.parentNode.insertBefore(s, x)
}
if (document.readyState === "complete") {
l()
} else if (w.attachEvent) {
w.attachEvent("onload", l)
} else {
w.addEventListener("load", l, false)
}
this.initialised = true
}
}
/**
* Show the intercom chat bubble.
* @param {Object} user - user to identify
* @returns Intercom global object
*/
show(user = {}) {
if (!this.initialised) return
return window.Intercom("boot", {
app_id: this.token,
...user,
})
}
/**
* Update intercom user details and messages.
* @returns Intercom global object
*/
update() {
if (!this.initialised) return
return window.Intercom("update")
}
/**
* Capture analytics events and send them to intercom.
* @param {String} event - event identifier
* @param {Object} props - properties for the event
* @returns Intercom global object
*/
captureEvent(event, props = {}) {
if (!this.initialised) return
return window.Intercom("trackEvent", event, props)
}
/**
* Disassociate the user from the current session.
* @returns Intercom global object
*/
logout() {
if (!this.initialised) return
return window.Intercom("shutdown")
}
}

View File

@ -0,0 +1,80 @@
import posthog from "posthog-js"
import { Events } from "./constants"
export default class PosthogClient {
constructor(token, url) {
this.token = token
this.url = url
}
init() {
if (!this.token || !this.url) return
posthog.init(this.token, {
autocapture: false,
capture_pageview: false,
api_host: this.url,
})
posthog.set_config({ persistence: "cookie" })
this.initialised = true
}
/**
* Set the posthog context to the current user
* @param {String} id - unique user id
*/
identify(id) {
if (!this.initialised) return
posthog.identify(id)
}
/**
* Update user metadata associated with current user in posthog
* @param {Object} meta - user fields
*/
updateUser(meta) {
if (!this.initialised) return
posthog.people.set(meta)
}
/**
* Capture analytics events and send them to posthog.
* @param {String} event - event identifier
* @param {Object} props - properties for the event
*/
captureEvent(eventName, props) {
if (!this.initialised) return
props.sourceApp = "builder"
posthog.capture(eventName, props)
}
/**
* Submit NPS feedback to posthog.
* @param {Object} values - NPS Values
*/
npsFeedback(values) {
if (!this.initialised) return
localStorage.setItem(Events.NPS.SUBMITTED, Date.now())
const prefixedFeedback = {}
for (let key in values) {
prefixedFeedback[`feedback_${key}`] = values[key]
}
posthog.capture(Events.NPS.SUBMITTED, prefixedFeedback)
}
/**
* Reset posthog user back to initial state on logout.
*/
logout() {
if (!this.initialised) return
posthog.reset()
}
}

View File

@ -0,0 +1,37 @@
import * as Sentry from "@sentry/browser"
export default class SentryClient {
constructor(dsn) {
this.dsn = dsn
}
init() {
if (this.dsn) {
Sentry.init({ dsn: this.dsn })
this.initalised = true
}
}
/**
* Capture an exception and send it to sentry.
* @param {Error} err - JS error object
*/
captureException(err) {
if (!this.initalised) return
Sentry.captureException(err)
}
/**
* Identify user in sentry.
* @param {String} id - Unique user id
*/
identify(id) {
if (!this.initalised) return
Sentry.configureScope(scope => {
scope.setUser({ id })
})
}
}

View File

@ -0,0 +1,49 @@
export const Events = {
BUILDER: {
STARTED: "Builder Started",
},
COMPONENT: {
CREATED: "Added Component",
},
DATASOURCE: {
CREATED: "Datasource Created",
UPDATED: "Datasource Updated",
},
TABLE: {
CREATED: "Table Created",
},
VIEW: {
CREATED: "View Created",
ADDED_FILTER: "Added View Filter",
ADDED_CALCULATE: "Added View Calculate",
},
SCREEN: {
CREATED: "Screen Created",
},
AUTOMATION: {
CREATED: "Automation Created",
SAVED: "Automation Saved",
BLOCK_ADDED: "Added Automation Block",
},
NPS: {
SUBMITTED: "budibase:feedback_submitted",
},
APP: {
CREATED: "budibase:app_created",
PUBLISHED: "budibase:app_published",
UNPUBLISHED: "budibase:app_unpublished",
},
ANALYTICS: {
OPT_IN: "budibase:analytics_opt_in",
OPT_OUT: "budibase:analytics_opt_out",
},
USER: {
INVITE: "budibase:portal_user_invite",
},
SMTP: {
SAVED: "budibase:smtp_saved",
},
SSO: {
SAVED: "budibase:sso_saved",
},
}

View File

@ -0,0 +1,79 @@
import api from "builderStore/api"
import PosthogClient from "./PosthogClient"
import IntercomClient from "./IntercomClient"
import SentryClient from "./SentryClient"
import { Events } from "./constants"
import { auth } from "stores/portal"
import { get } from "svelte/store"
const posthog = new PosthogClient(
process.env.POSTHOG_TOKEN,
process.env.POSTHOG_URL
)
const sentry = new SentryClient(process.env.SENTRY_DSN)
const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
class AnalyticsHub {
constructor() {
this.clients = [posthog, sentry, intercom]
}
async activate() {
// Setting the analytics env var off in the backend overrides org/tenant settings
const analyticsStatus = await api.get("/api/analytics")
const json = await analyticsStatus.json()
// Multitenancy disabled on the backend
if (!json.enabled) return
const tenantId = get(auth).tenantId
if (tenantId) {
const res = await api.get(
`/api/global/configs/public?tenantId=${tenantId}`
)
const orgJson = await res.json()
// analytics opted out for the tenant
if (orgJson.config?.analytics === false) return
}
this.clients.forEach(client => client.init())
this.enabled = true
}
identify(id, metadata) {
posthog.identify(id)
if (metadata) {
posthog.updateUser(metadata)
}
sentry.identify(id)
}
captureException(err) {
sentry.captureException(err)
}
captureEvent(eventName, props = {}) {
posthog.captureEvent(eventName, props)
intercom.captureEvent(eventName, props)
}
showChat(user) {
intercom.show(user)
}
submitFeedback(values) {
posthog.npsFeedback(values)
}
async logout() {
posthog.logout()
intercom.logout()
}
}
const analytics = new AnalyticsHub()
export { Events }
export default analytics

View File

@ -443,7 +443,10 @@ function bindingReplacement(bindableProperties, textWithBindings, convertTo) {
for (let from of convertFromProps) { for (let from of convertFromProps) {
if (shouldReplaceBinding(newBoundValue, from, convertTo)) { if (shouldReplaceBinding(newBoundValue, from, convertTo)) {
const binding = bindableProperties.find(el => el[convertFrom] === from) const binding = bindableProperties.find(el => el[convertFrom] === from)
newBoundValue = newBoundValue.replace(from, binding[convertTo]) newBoundValue = newBoundValue.replace(
new RegExp(from, "gi"),
binding[convertTo]
)
} }
} }
result = result.replace(boundValue, newBoundValue) result = result.replace(boundValue, newBoundValue)

View File

@ -3,7 +3,6 @@ import { getAutomationStore } from "./store/automation"
import { getHostingStore } from "./store/hosting" import { getHostingStore } from "./store/hosting"
import { getThemeStore } from "./store/theme" import { getThemeStore } from "./store/theme"
import { derived, writable } from "svelte/store" import { derived, writable } from "svelte/store"
import analytics from "analytics"
import { FrontendTypes, LAYOUT_NAMES } from "../constants" import { FrontendTypes, LAYOUT_NAMES } from "../constants"
import { findComponent } from "./storeUtils" import { findComponent } from "./storeUtils"
@ -55,13 +54,4 @@ export const mainLayout = derived(store, $store => {
export const selectedAccessRole = writable("BASIC") export const selectedAccessRole = writable("BASIC")
export const initialise = async () => {
try {
await analytics.activate()
analytics.captureEvent("Builder Started")
} catch (err) {
console.log(err)
}
}
export const screenSearchString = writable(null) export const screenSearchString = writable(null)

View File

@ -2,7 +2,7 @@ import { writable } from "svelte/store"
import api from "../../api" import api from "../../api"
import Automation from "./Automation" import Automation from "./Automation"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import analytics from "analytics" import analytics, { Events } from "analytics"
const automationActions = store => ({ const automationActions = store => ({
fetch: async () => { fetch: async () => {
@ -110,7 +110,7 @@ const automationActions = store => ({
state.selectedBlock = newBlock state.selectedBlock = newBlock
return state return state
}) })
analytics.captureEvent("Added Automation Block", { analytics.captureEvent(Events.AUTOMATION.BLOCK_ADDED, {
name: block.name, name: block.name,
}) })
}, },

View File

@ -19,7 +19,7 @@ import {
import { fetchComponentLibDefinitions } from "../loadComponentLibraries" import { fetchComponentLibDefinitions } from "../loadComponentLibraries"
import api from "../api" import api from "../api"
import { FrontendTypes } from "constants" import { FrontendTypes } from "constants"
import analytics from "analytics" import analytics, { Events } from "analytics"
import { import {
findComponentType, findComponentType,
findComponentParent, findComponentParent,
@ -443,7 +443,7 @@ export const getFrontendStore = () => {
}) })
// Log event // Log event
analytics.captureEvent("Added Component", { analytics.captureEvent(Events.COMPONENT.CREATED, {
name: componentInstance._component, name: componentInstance._component,
}) })

View File

@ -123,7 +123,7 @@
padding: var(--spectrum-alias-item-padding-s); padding: var(--spectrum-alias-item-padding-s);
background: var(--spectrum-alias-background-color-secondary); background: var(--spectrum-alias-background-color-secondary);
transition: 0.3s all; transition: 0.3s all;
border: solid #3b3d3c; border: solid var(--spectrum-alias-border-color);
border-radius: 5px; border-radius: 5px;
box-sizing: border-box; box-sizing: border-box;
border-width: 2px; border-width: 2px;

View File

@ -1,9 +1,8 @@
<script> <script>
import { automationStore } from "builderStore" import { automationStore } from "builderStore"
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
import FlowItem from "./FlowItem.svelte" import FlowItem from "./FlowItem.svelte"
import TestDataModal from "./TestDataModal.svelte" import TestDataModal from "./TestDataModal.svelte"
import { flip } from "svelte/animate" import { flip } from "svelte/animate"
import { fade, fly } from "svelte/transition" import { fade, fly } from "svelte/transition"
import { import {
@ -13,13 +12,12 @@
notifications, notifications,
Modal, Modal,
} from "@budibase/bbui" } from "@budibase/bbui"
import { database } from "stores/backend"
export let automation export let automation
export let onSelect export let onSelect
let testDataModal let testDataModal
let blocks let blocks
$: instanceId = $database._id let confirmDeleteDialog
$: { $: {
blocks = [] blocks = []
@ -35,6 +33,7 @@
await automationStore.actions.delete( await automationStore.actions.delete(
$automationStore.selectedAutomation?.automation $automationStore.selectedAutomation?.automation
) )
notifications.success("Automation deleted.")
} }
async function testAutomation() { async function testAutomation() {
@ -63,8 +62,14 @@
style="display:flex; style="display:flex;
color: var(--spectrum-global-color-gray-400);" color: var(--spectrum-global-color-gray-400);"
> >
<span on:click={() => deleteAutomation()} class="iconPadding"> <span class="iconPadding">
<Icon name="DeleteOutline" /> <div class="icon">
<Icon
on:click={confirmDeleteDialog.show}
hoverable
name="DeleteOutline"
/>
</div>
</span> </span>
<ActionButton <ActionButton
on:click={() => { on:click={() => {
@ -92,6 +97,17 @@
</div> </div>
{/each} {/each}
</div> </div>
<ConfirmDialog
bind:this={confirmDeleteDialog}
okText="Delete Automation"
onOk={deleteAutomation}
title="Confirm Deletion"
>
Are you sure you wish to delete the automation
<i>{automation.name}?</i>
This action cannot be undone.
</ConfirmDialog>
<Modal bind:this={testDataModal} width="30%"> <Modal bind:this={testDataModal} width="30%">
<TestDataModal {testAutomation} /> <TestDataModal {testAutomation} />
</Modal> </Modal>
@ -139,7 +155,7 @@
justify-content: space-between; justify-content: space-between;
} }
.iconPadding { .icon {
cursor: pointer; cursor: pointer;
display: flex; display: flex;
padding-right: var(--spacing-m); padding-right: var(--spacing-m);

View File

@ -4,7 +4,7 @@
import { automationStore } from "builderStore" import { automationStore } from "builderStore"
import { notifications } from "@budibase/bbui" import { notifications } from "@budibase/bbui"
import { Input, ModalContent, Layout, Body, Icon } from "@budibase/bbui" import { Input, ModalContent, Layout, Body, Icon } from "@budibase/bbui"
import analytics from "analytics" import analytics, { Events } from "analytics"
let name let name
let selectedTrigger let selectedTrigger
@ -36,7 +36,7 @@
notifications.success(`Automation ${name} created.`) notifications.success(`Automation ${name} created.`)
$goto(`./${$automationStore.selectedAutomation.automation._id}`) $goto(`./${$automationStore.selectedAutomation.automation._id}`)
analytics.captureEvent("Automation Created", { name }) analytics.captureEvent(Events.AUTOMATION.CREATED, { name })
} }
$: triggers = Object.entries($automationStore.blockDefinitions.TRIGGER) $: triggers = Object.entries($automationStore.blockDefinitions.TRIGGER)
@ -102,7 +102,7 @@
padding: var(--spectrum-alias-item-padding-s); padding: var(--spectrum-alias-item-padding-s);
background: var(--spectrum-alias-background-color-secondary); background: var(--spectrum-alias-background-color-secondary);
transition: 0.3s all; transition: 0.3s all;
border: solid #3b3d3c; border: solid var(--spectrum-alias-border-color);
border-radius: 5px; border-radius: 5px;
box-sizing: border-box; box-sizing: border-box;
border-width: 2px; border-width: 2px;

View File

@ -2,7 +2,7 @@
import { automationStore } from "builderStore" import { automationStore } from "builderStore"
import { notifications } from "@budibase/bbui" import { notifications } from "@budibase/bbui"
import { Icon, Input, ModalContent, Modal } from "@budibase/bbui" import { Icon, Input, ModalContent, Modal } from "@budibase/bbui"
import analytics from "analytics" import analytics, { Events } from "analytics"
let name let name
let error = "" let error = ""
@ -26,7 +26,7 @@
} }
await automationStore.actions.save(updatedAutomation) await automationStore.actions.save(updatedAutomation)
notifications.success(`Automation ${name} updated successfully.`) notifications.success(`Automation ${name} updated successfully.`)
analytics.captureEvent("Automation Saved", { name }) analytics.captureEvent(Events.AUTOMATION.SAVED, { name })
hide() hide()
} }

View File

@ -20,7 +20,6 @@
import QueryParamSelector from "./QueryParamSelector.svelte" import QueryParamSelector from "./QueryParamSelector.svelte"
import CronBuilder from "./CronBuilder.svelte" import CronBuilder from "./CronBuilder.svelte"
import Editor from "components/integration/QueryEditor.svelte" import Editor from "components/integration/QueryEditor.svelte"
import { database } from "stores/backend"
import { debounce } from "lodash" import { debounce } from "lodash"
import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte" import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte"
import FilterDrawer from "components/design/PropertiesPanel/PropertyControls/FilterEditor/FilterDrawer.svelte" import FilterDrawer from "components/design/PropertiesPanel/PropertyControls/FilterEditor/FilterDrawer.svelte"
@ -35,13 +34,11 @@
let drawer let drawer
let tempFilters = lookForFilters(schemaProperties) || [] let tempFilters = lookForFilters(schemaProperties) || []
let fillWidth = true let fillWidth = true
$: stepId = block.stepId $: stepId = block.stepId
$: bindings = getAvailableBindings( $: bindings = getAvailableBindings(
block || $automationStore.selectedBlock, block || $automationStore.selectedBlock,
$automationStore.selectedAutomation?.automation?.definition $automationStore.selectedAutomation?.automation?.definition
) )
$: instanceId = $database._id
$: inputData = testData ? testData : block.inputs $: inputData = testData ? testData : block.inputs
$: tableId = inputData ? inputData.tableId : null $: tableId = inputData ? inputData.tableId : null
@ -210,7 +207,7 @@
{:else if value.customType === "webhookUrl"} {:else if value.customType === "webhookUrl"}
<WebhookDisplay value={inputData[key]} /> <WebhookDisplay value={inputData[key]} />
{:else if value.customType === "triggerSchema"} {:else if value.customType === "triggerSchema"}
<SchemaSetup on:change={e => onChange(e, key)} value={value[key]} /> <SchemaSetup on:change={e => onChange(e, key)} value={inputData[key]} />
{:else if value.customType === "code"} {:else if value.customType === "code"}
<CodeEditorModal> <CodeEditorModal>
<pre>{JSON.stringify(bindings, null, 2)}</pre> <pre>{JSON.stringify(bindings, null, 2)}</pre>

View File

@ -1,6 +1,6 @@
<script> <script>
import { tables } from "stores/backend" import { tables } from "stores/backend"
import { Select } from "@budibase/bbui" import { Select, Toggle, DatePicker, Multiselect } from "@budibase/bbui"
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte" import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte" import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
@ -44,13 +44,31 @@
<div class="schema-fields"> <div class="schema-fields">
{#each schemaFields as [field, schema]} {#each schemaFields as [field, schema]}
{#if !schema.autocolumn} {#if !schema.autocolumn}
{#if schemaHasOptions(schema)} {#if schemaHasOptions(schema) && schema.type !== "array"}
<Select <Select
on:change={e => onChange(e, field)} on:change={e => onChange(e, field)}
label={field} label={field}
value={value[field]} value={value[field]}
options={schema.constraints.inclusion} options={schema.constraints.inclusion}
/> />
{:else if schema.type === "datetime"}
<DatePicker
label={field}
value={value[field]}
on:change={e => onChange(e, field)}
/>
{:else if schema.type === "boolean"}
<Toggle
text={field}
value={value[field]}
on:change={e => onChange(e, field)}
/>
{:else if schema.type === "array"}
<Multiselect
bind:value={value[field]}
label={field}
options={schema.constraints.inclusion}
/>
{:else if schema.type === "string" || schema.type === "number"} {:else if schema.type === "string" || schema.type === "number"}
{#if $automationStore.selectedAutomation.automation.testData} {#if $automationStore.selectedAutomation.automation.testData}
<ModalBindableInput <ModalBindableInput

View File

@ -5,10 +5,14 @@
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
export let value = {} export let value = {}
$: fieldsArray = Object.entries(value).map(([name, type]) => ({
name, $: fieldsArray = value
type, ? Object.entries(value).map(([name, type]) => ({
})) name,
type,
}))
: []
const typeOptions = [ const typeOptions = [
{ {
label: "Text", label: "Text",
@ -73,7 +77,7 @@
<Select <Select
value={field.type} value={field.type}
on:change={e => { on:change={e => {
value[field.name] = e.target.value value[field.name] = e.detail
dispatch("change", value) dispatch("change", value)
}} }}
options={typeOptions} options={typeOptions}
@ -88,9 +92,7 @@
<style> <style>
.root { .root {
position: relative;
max-width: 100%; max-width: 100%;
overflow-x: auto;
/* so we can show the "+" button beside the "fields" label*/ /* so we can show the "+" button beside the "fields" label*/
top: -26px; top: -26px;
} }
@ -110,7 +112,6 @@
/*grid-template-rows: auto auto; /*grid-template-rows: auto auto;
grid-template-columns: auto;*/ grid-template-columns: auto;*/
position: relative; position: relative;
overflow: hidden;
} }
.field :global(select) { .field :global(select) {

View File

@ -1,7 +1,7 @@
<script> <script>
import { Select, Label, notifications, ModalContent } from "@budibase/bbui" import { Select, Label, notifications, ModalContent } from "@budibase/bbui"
import { tables, views } from "stores/backend" import { tables, views } from "stores/backend"
import analytics from "analytics" import analytics, { Events } from "analytics"
import { FIELDS } from "constants/backend" import { FIELDS } from "constants/backend"
const CALCULATIONS = [ const CALCULATIONS = [
@ -40,7 +40,7 @@
function saveView() { function saveView() {
views.save(view) views.save(view)
notifications.success(`View ${view.name} saved.`) notifications.success(`View ${view.name} saved.`)
analytics.captureEvent("Added View Calculate", { field: view.field }) analytics.captureEvent(Events.VIEW.ADDED_CALCULATE, { field: view.field })
} }
</script> </script>

View File

@ -3,7 +3,7 @@
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
import { views as viewsStore } from "stores/backend" import { views as viewsStore } from "stores/backend"
import { tables } from "stores/backend" import { tables } from "stores/backend"
import analytics from "analytics" import analytics, { Events } from "analytics"
let name let name
let field let field
@ -21,7 +21,7 @@
field, field,
}) })
notifications.success(`View ${name} created`) notifications.success(`View ${name} created`)
analytics.captureEvent("View Created", { name }) analytics.captureEvent(Events.VIEW.CREATED, { name })
$goto(`../../view/${name}`) $goto(`../../view/${name}`)
} }
</script> </script>

View File

@ -11,7 +11,7 @@
Icon, Icon,
} from "@budibase/bbui" } from "@budibase/bbui"
import { tables, views } from "stores/backend" import { tables, views } from "stores/backend"
import analytics from "analytics" import analytics, { Events } from "analytics"
const CONDITIONS = [ const CONDITIONS = [
{ {
@ -65,7 +65,7 @@
function saveView() { function saveView() {
views.save(view) views.save(view)
notifications.success(`View ${view.name} saved.`) notifications.success(`View ${view.name} saved.`)
analytics.captureEvent("Added View Filter", { analytics.captureEvent(Events.VIEW.ADDED_FILTER, {
filters: JSON.stringify(view.filters), filters: JSON.stringify(view.filters),
}) })
} }

View File

@ -5,7 +5,7 @@
import { Input, Label, ModalContent, Modal, Context } from "@budibase/bbui" import { Input, Label, ModalContent, Modal, Context } from "@budibase/bbui"
import TableIntegrationMenu from "../TableIntegrationMenu/index.svelte" import TableIntegrationMenu from "../TableIntegrationMenu/index.svelte"
import CreateTableModal from "components/backend/TableNavigator/modals/CreateTableModal.svelte" import CreateTableModal from "components/backend/TableNavigator/modals/CreateTableModal.svelte"
import analytics from "analytics" import analytics, { Events } from "analytics"
import { getContext } from "svelte" import { getContext } from "svelte"
const modalContext = getContext(Context.Modal) const modalContext = getContext(Context.Modal)
@ -45,7 +45,7 @@
plus, plus,
}) })
notifications.success(`Datasource ${name} created successfully.`) notifications.success(`Datasource ${name} created successfully.`)
analytics.captureEvent("Datasource Created", { name, type }) analytics.captureEvent(Events.DATASOURCE.CREATED, { name, type })
// Navigate to new datasource // Navigate to new datasource
$goto(`./datasource/${response._id}`) $goto(`./datasource/${response._id}`)

View File

@ -2,7 +2,7 @@
import { datasources } from "stores/backend" import { datasources } from "stores/backend"
import { notifications } from "@budibase/bbui" import { notifications } from "@budibase/bbui"
import { Input, ModalContent, Modal } from "@budibase/bbui" import { Input, ModalContent, Modal } from "@budibase/bbui"
import analytics from "analytics" import analytics, { Events } from "analytics"
let error = "" let error = ""
let modal let modal
@ -35,7 +35,7 @@
} }
await datasources.save(updatedDatasource) await datasources.save(updatedDatasource)
notifications.success(`Datasource ${name} updated successfully.`) notifications.success(`Datasource ${name} updated successfully.`)
analytics.captureEvent("Datasource Updated", updatedDatasource) analytics.captureEvent(Events.DATASOURCE.UPDATED, updatedDatasource)
hide() hide()
} }
</script> </script>

View File

@ -12,7 +12,7 @@
Layout, Layout,
} from "@budibase/bbui" } from "@budibase/bbui"
import TableDataImport from "../TableDataImport.svelte" import TableDataImport from "../TableDataImport.svelte"
import analytics from "analytics" import analytics, { Events } from "analytics"
import screenTemplates from "builderStore/store/screenTemplates" import screenTemplates from "builderStore/store/screenTemplates"
import { buildAutoColumn, getAutoColumnInformation } from "builderStore/utils" import { buildAutoColumn, getAutoColumnInformation } from "builderStore/utils"
import { NEW_ROW_TEMPLATE } from "builderStore/store/screenTemplates/newRowScreen" import { NEW_ROW_TEMPLATE } from "builderStore/store/screenTemplates/newRowScreen"
@ -67,7 +67,7 @@
// Create table // Create table
const table = await tables.save(newTable) const table = await tables.save(newTable)
notifications.success(`Table ${name} created successfully.`) notifications.success(`Table ${name} created successfully.`)
analytics.captureEvent("Table Created", { name }) analytics.captureEvent(Events.TABLE.CREATED, { name })
// Create auto screens // Create auto screens
if (createAutoscreens) { if (createAutoscreens) {

View File

@ -2,7 +2,8 @@
import { onMount, onDestroy } from "svelte" import { onMount, onDestroy } from "svelte"
import { Button, Modal, notifications, ModalContent } from "@budibase/bbui" import { Button, Modal, notifications, ModalContent } from "@budibase/bbui"
import api from "builderStore/api" import api from "builderStore/api"
import analytics from "analytics" import analytics, { Events } from "analytics"
import { store } from "builderStore"
const DeploymentStatus = { const DeploymentStatus = {
SUCCESS: "SUCCESS", SUCCESS: "SUCCESS",
@ -23,6 +24,9 @@
if (response.status !== 200) { if (response.status !== 200) {
throw new Error(`status ${response.status}`) throw new Error(`status ${response.status}`)
} else { } else {
analytics.captureEvent(Events.APP.PUBLISHED, {
appId: $store.appId,
})
notifications.success(`Application published successfully`) notifications.success(`Application published successfully`)
} }
} catch (err) { } catch (err) {

View File

@ -4,7 +4,7 @@
import { roles } from "stores/backend" import { roles } from "stores/backend"
import { Input, Select, ModalContent, Toggle } from "@budibase/bbui" import { Input, Select, ModalContent, Toggle } from "@budibase/bbui"
import getTemplates from "builderStore/store/screenTemplates" import getTemplates from "builderStore/store/screenTemplates"
import analytics from "analytics" import analytics, { Events } from "analytics"
const CONTAINER = "@budibase/standard-components/container" const CONTAINER = "@budibase/standard-components/container"
@ -66,7 +66,7 @@
if (templateIndex !== undefined) { if (templateIndex !== undefined) {
const template = templates[templateIndex] const template = templates[templateIndex]
analytics.captureEvent("Screen Created", { analytics.captureEvent(Events.SCREEN.CREATED, {
template: template.id || template.name, template: template.id || template.name,
}) })
} }

View File

@ -12,7 +12,7 @@
import { admin } from "stores/portal" import { admin } from "stores/portal"
import { string, mixed, object } from "yup" import { string, mixed, object } from "yup"
import api, { get, post } from "builderStore/api" import api, { get, post } from "builderStore/api"
import analytics from "analytics" import analytics, { Events } from "analytics"
import { onMount } from "svelte" import { onMount } from "svelte"
import { capitalise } from "helpers" import { capitalise } from "helpers"
import { goto } from "@roxi/routify" import { goto } from "@roxi/routify"
@ -98,9 +98,9 @@
throw new Error(appJson.message) throw new Error(appJson.message)
} }
analytics.captureEvent("App Created", { analytics.captureEvent(Events.APP.CREATED, {
name: $values.name, name: $values.name,
appId: appJson._id, appId: appJson.instance._id,
template, template,
}) })

View File

@ -29,6 +29,7 @@
username, username,
password, password,
}) })
if ($auth?.user?.forceResetPassword) { if ($auth?.user?.forceResetPassword) {
$goto("./reset") $goto("./reset")
} else { } else {

View File

@ -15,8 +15,7 @@
} from "@budibase/bbui" } from "@budibase/bbui"
import CreateAppModal from "components/start/CreateAppModal.svelte" import CreateAppModal from "components/start/CreateAppModal.svelte"
import UpdateAppModal from "components/start/UpdateAppModal.svelte" import UpdateAppModal from "components/start/UpdateAppModal.svelte"
import api, { del } from "builderStore/api" import { del } from "builderStore/api"
import analytics from "analytics"
import { onMount } from "svelte" import { onMount } from "svelte"
import { apps, auth, admin } from "stores/portal" import { apps, auth, admin } from "stores/portal"
import download from "downloadjs" import download from "downloadjs"
@ -66,14 +65,6 @@
} }
} }
const checkKeys = async () => {
const response = await api.get(`/api/keys/`)
const keys = await response.json()
if (keys.userId) {
analytics.identify(keys.userId)
}
}
const initiateAppCreation = () => { const initiateAppCreation = () => {
creationModal.show() creationModal.show()
creatingApp = true creatingApp = true
@ -188,7 +179,6 @@
} }
onMount(async () => { onMount(async () => {
checkKeys()
await apps.load() await apps.load()
loaded = true loaded = true
}) })

View File

@ -23,6 +23,7 @@
import api from "builderStore/api" import api from "builderStore/api"
import { organisation, auth, admin } from "stores/portal" import { organisation, auth, admin } from "stores/portal"
import { uuid } from "builderStore/uuid" import { uuid } from "builderStore/uuid"
import analytics, { Events } from "analytics"
$: tenantId = $auth.tenantId $: tenantId = $auth.tenantId
$: multiTenancyEnabled = $admin.multiTenancy $: multiTenancyEnabled = $admin.multiTenancy
@ -209,6 +210,7 @@
providers[res.type]._id = res._id providers[res.type]._id = res._id
}) })
notifications.success(`Settings saved.`) notifications.success(`Settings saved.`)
analytics.captureEvent(Events.SSO.SAVED)
}) })
.catch(err => { .catch(err => {
notifications.error(`Failed to update auth settings. ${err}`) notifications.error(`Failed to update auth settings. ${err}`)

View File

@ -16,6 +16,7 @@
import { email } from "stores/portal" import { email } from "stores/portal"
import api from "builderStore/api" import api from "builderStore/api"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import analytics, { Events } from "analytics"
const ConfigTypes = { const ConfigTypes = {
SMTP: "smtp", SMTP: "smtp",
@ -69,6 +70,7 @@
smtpConfig._rev = json._rev smtpConfig._rev = json._rev
smtpConfig._id = json._id smtpConfig._id = json._id
notifications.success(`Settings saved.`) notifications.success(`Settings saved.`)
analytics.captureEvent(Events.SMTP.SAVED)
} }
} }

View File

@ -10,6 +10,7 @@
} from "@budibase/bbui" } from "@budibase/bbui"
import { createValidationStore, emailValidator } from "helpers/validation" import { createValidationStore, emailValidator } from "helpers/validation"
import { users } from "stores/portal" import { users } from "stores/portal"
import analytics, { Events } from "analytics"
export let disabled export let disabled
@ -25,6 +26,7 @@
notifications.error(res.message) notifications.error(res.message)
} else { } else {
notifications.success(res.message) notifications.success(res.message)
analytics.captureEvent(Events.USER.INVITE, { type: selected })
} }
} }
</script> </script>

View File

@ -25,7 +25,7 @@
} }
const values = writable({ const values = writable({
analytics: !analytics.disabled(), analytics: analytics.enabled,
company: $organisation.company, company: $organisation.company,
platformUrl: $organisation.platformUrl, platformUrl: $organisation.platformUrl,
logo: $organisation.logoUrl logo: $organisation.logoUrl
@ -48,13 +48,6 @@
async function saveConfig() { async function saveConfig() {
loading = true loading = true
// Set analytics preference
if ($values.analytics) {
analytics.optIn()
} else {
analytics.optOut()
}
// Upload logo if required // Upload logo if required
if ($values.logo && !$values.logo.url) { if ($values.logo && !$values.logo.url) {
await uploadLogo($values.logo) await uploadLogo($values.logo)
@ -64,6 +57,7 @@
const config = { const config = {
company: $values.company ?? "", company: $values.company ?? "",
platformUrl: $values.platformUrl ?? "", platformUrl: $values.platformUrl ?? "",
analytics: $values.analytics,
} }
// remove logo if required // remove logo if required
if (!$values.logo) { if (!$values.logo) {

View File

@ -1,6 +1,7 @@
import { derived, writable, get } from "svelte/store" import { derived, writable, get } from "svelte/store"
import api from "../../builderStore/api" import api from "../../builderStore/api"
import { admin } from "stores/portal" import { admin } from "stores/portal"
import analytics from "analytics"
export function createAuthStore() { export function createAuthStore() {
const auth = writable({ const auth = writable({
@ -49,6 +50,21 @@ export function createAuthStore() {
} }
return store return store
}) })
if (user) {
analytics.activate().then(() => {
analytics.identify(user._id, user)
if (user.size === "100+" || user.size === "10000+") {
analytics.showChat({
email: user.email,
created_at: user.createdAt || Date.now(),
name: user.name,
user_id: user._id,
tenant: user.tenantId,
})
}
})
}
} }
async function setOrganisation(tenantId) { async function setOrganisation(tenantId) {

View File

@ -22,6 +22,9 @@ export default ({ mode }) => {
isProduction ? "production" : "development" isProduction ? "production" : "development"
), ),
"process.env.POSTHOG_TOKEN": JSON.stringify(process.env.POSTHOG_TOKEN), "process.env.POSTHOG_TOKEN": JSON.stringify(process.env.POSTHOG_TOKEN),
"process.env.INTERCOM_TOKEN": JSON.stringify(
process.env.INTERCOM_TOKEN
),
"process.env.POSTHOG_URL": JSON.stringify(process.env.POSTHOG_URL), "process.env.POSTHOG_URL": JSON.stringify(process.env.POSTHOG_URL),
"process.env.SENTRY_DSN": JSON.stringify(process.env.SENTRY_DSN), "process.env.SENTRY_DSN": JSON.stringify(process.env.SENTRY_DSN),
}), }),

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/cli", "name": "@budibase/cli",
"version": "0.9.125-alpha.19", "version": "0.9.140-alpha.5",
"description": "Budibase CLI, for developers, self hosting and migrations.", "description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js", "main": "src/index.js",
"bin": { "bin": {

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/client", "name": "@budibase/client",
"version": "0.9.125-alpha.19", "version": "0.9.140-alpha.5",
"license": "MPL-2.0", "license": "MPL-2.0",
"module": "dist/budibase-client.js", "module": "dist/budibase-client.js",
"main": "dist/budibase-client.js", "main": "dist/budibase-client.js",
@ -19,8 +19,9 @@
"dev:builder": "rollup -cw" "dev:builder": "rollup -cw"
}, },
"dependencies": { "dependencies": {
"@budibase/bbui": "^0.9.125-alpha.19", "@budibase/bbui": "^0.9.140-alpha.5",
"@budibase/string-templates": "^0.9.125-alpha.19", "@budibase/standard-components": "^0.9.139",
"@budibase/string-templates": "^0.9.140-alpha.5",
"regexparam": "^1.3.0", "regexparam": "^1.3.0",
"shortid": "^2.2.15", "shortid": "^2.2.15",
"svelte-spa-router": "^3.0.5" "svelte-spa-router": "^3.0.5"

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/server", "name": "@budibase/server",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "0.9.125-alpha.19", "version": "0.9.140-alpha.5",
"description": "Budibase Web Server", "description": "Budibase Web Server",
"main": "src/index.js", "main": "src/index.js",
"repository": { "repository": {
@ -13,7 +13,7 @@
"postbuild": "copyfiles -u 1 src/**/*.svelte dist/ && copyfiles -u 1 src/**/*.hbs dist/ && copyfiles -u 1 src/**/*.json dist/", "postbuild": "copyfiles -u 1 src/**/*.svelte dist/ && copyfiles -u 1 src/**/*.hbs dist/ && copyfiles -u 1 src/**/*.json dist/",
"test": "jest --coverage --maxWorkers=2", "test": "jest --coverage --maxWorkers=2",
"test:watch": "jest --watch", "test:watch": "jest --watch",
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client", "predocker": "copyfiles -f ../client/dist/budibase-client.js ../standard-components/manifest.json client",
"build:docker": "yarn run predocker && docker build . -t app-service", "build:docker": "yarn run predocker && docker build . -t app-service",
"run:docker": "node dist/index.js", "run:docker": "node dist/index.js",
"dev:stack:up": "node scripts/dev/manage.js up", "dev:stack:up": "node scripts/dev/manage.js up",
@ -23,10 +23,9 @@
"format": "prettier --config ../../.prettierrc.json 'src/**/*.ts' --write", "format": "prettier --config ../../.prettierrc.json 'src/**/*.ts' --write",
"lint": "eslint --fix src/", "lint": "eslint --fix src/",
"lint:fix": "yarn run format && yarn run lint", "lint:fix": "yarn run format && yarn run lint",
"initialise": "node scripts/initialise.js",
"multi:enable": "node scripts/multiTenancy.js enable", "multi:enable": "node scripts/multiTenancy.js enable",
"multi:disable": "node scripts/multiTenancy.js disable", "multi:disable": "node scripts/multiTenancy.js disable"
"selfhost:enable": "node scripts/selfhost.js enable",
"selfhost:disable": "node scripts/selfhost.js disable"
}, },
"jest": { "jest": {
"preset": "ts-jest", "preset": "ts-jest",
@ -49,8 +48,7 @@
"!src/automations/tests/**/*", "!src/automations/tests/**/*",
"!src/utilities/fileProcessor.js", "!src/utilities/fileProcessor.js",
"!src/utilities/fileSystem/**/*", "!src/utilities/fileSystem/**/*",
"!src/utilities/redis.js", "!src/utilities/redis.js"
"!src/api/controllers/row/internalSearch.js"
], ],
"coverageReporters": [ "coverageReporters": [
"lcov", "lcov",
@ -64,9 +62,9 @@
"author": "Budibase", "author": "Budibase",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"dependencies": { "dependencies": {
"@budibase/auth": "^0.9.125-alpha.19", "@budibase/auth": "^0.9.140-alpha.5",
"@budibase/client": "^0.9.125-alpha.19", "@budibase/client": "^0.9.140-alpha.5",
"@budibase/string-templates": "^0.9.125-alpha.19", "@budibase/string-templates": "^0.9.140-alpha.5",
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",
"@koa/router": "8.0.0", "@koa/router": "8.0.0",
"@sendgrid/mail": "7.1.1", "@sendgrid/mail": "7.1.1",
@ -98,12 +96,13 @@
"lodash": "4.17.21", "lodash": "4.17.21",
"mongodb": "3.6.3", "mongodb": "3.6.3",
"mssql": "6.2.3", "mssql": "6.2.3",
"mysql": "^2.18.1", "mysql": "2.18.1",
"node-fetch": "2.6.0", "node-fetch": "2.6.0",
"open": "7.3.0", "open": "7.3.0",
"pg": "8.5.1", "pg": "8.5.1",
"pino-pretty": "4.0.0", "pino-pretty": "4.0.0",
"pouchdb": "7.2.1", "pouchdb": "7.2.1",
"pouchdb-adapter-memory": "^7.2.1",
"pouchdb-all-dbs": "1.0.2", "pouchdb-all-dbs": "1.0.2",
"pouchdb-find": "^7.2.2", "pouchdb-find": "^7.2.2",
"pouchdb-replication-stream": "1.2.9", "pouchdb-replication-stream": "1.2.9",
@ -118,6 +117,7 @@
"devDependencies": { "devDependencies": {
"@babel/core": "^7.14.3", "@babel/core": "^7.14.3",
"@babel/preset-env": "^7.14.4", "@babel/preset-env": "^7.14.4",
"@budibase/standard-components": "^0.9.139",
"@jest/test-sequencer": "^24.8.0", "@jest/test-sequencer": "^24.8.0",
"@types/bull": "^3.15.1", "@types/bull": "^3.15.1",
"@types/jest": "^26.0.23", "@types/jest": "^26.0.23",
@ -132,7 +132,6 @@
"express": "^4.17.1", "express": "^4.17.1",
"jest": "^27.0.5", "jest": "^27.0.5",
"nodemon": "^2.0.4", "nodemon": "^2.0.4",
"pouchdb-adapter-memory": "^7.2.1",
"prettier": "^2.3.1", "prettier": "^2.3.1",
"rimraf": "^3.0.2", "rimraf": "^3.0.2",
"supertest": "^4.0.2", "supertest": "^4.0.2",

View File

@ -0,0 +1,28 @@
version: "3.8"
services:
db:
container_name: postgres-vehicle
image: postgres
restart: always
environment:
POSTGRES_USER: root
POSTGRES_PASSWORD: root
POSTGRES_DB: main
ports:
- "5432:5432"
volumes:
#- pg_data:/var/lib/postgresql/data/
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
pgadmin:
container_name: pgadmin
image: dpage/pgadmin4
restart: always
environment:
PGADMIN_DEFAULT_EMAIL: root@root.com
PGADMIN_DEFAULT_PASSWORD: root
ports:
- "5050:80"
#volumes:
# pg_data:

View File

@ -0,0 +1,52 @@
SELECT 'CREATE DATABASE main'
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
CREATE TABLE Vehicles (
id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ),
Registration text COLLATE pg_catalog."default",
Make text COLLATE pg_catalog."default",
Model text COLLATE pg_catalog."default",
Colour text COLLATE pg_catalog."default",
Year smallint,
CONSTRAINT Vehicles_pkey PRIMARY KEY (id)
);
CREATE TABLE ServiceLog (
id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ),
Description text COLLATE pg_catalog."default",
VehicleId bigint,
ServiceDate timestamp without time zone,
Category text COLLATE pg_catalog."default",
Mileage bigint,
CONSTRAINT ServiceLog_pkey PRIMARY KEY (id),
CONSTRAINT vehicle_foreign_key FOREIGN KEY (VehicleId)
REFERENCES Vehicles (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
);
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
VALUES ('FAZ 9837','Volkswagen','Polo','White',2002);
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
VALUES ('JHI 8827','BMW','M3','Black',2013);
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
VALUES ('D903PI','Volvo','XC40','Grey',2014);
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
VALUES ('YFI002','Volkswagen','Golf','Dark Blue',2018);
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
VALUES ('HGT5677','Skoda','Octavia','Graphite',2009);
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
VALUES ('PPF9276','Skoda','Octavia','Graphite',2021);
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
VALUES ('J893FT','Toyota','Corolla','Red',2015);
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
VALUES ('MJK776','Honda','HR-V','Silver',2015);
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
VALUES ('Change front brakes', 1, '2021-05-04', 'Brakes', 20667);
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
VALUES ('Tyres - full set', 1, '2021-05-04', 'Brakes', 20667);
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
VALUES ('Engine tune up', 2, '2021-07-14', 'Brakes', 50889);
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
VALUES ('Replace transmission', 3, '2021-09-26', 'Transmission', 98002);

View File

@ -0,0 +1,3 @@
#!/bin/bash
docker-compose down
docker volume prune -f

View File

@ -2,6 +2,6 @@ const env = require("../../environment")
exports.isEnabled = async function (ctx) { exports.isEnabled = async function (ctx) {
ctx.body = { ctx.body = {
enabled: env.ENABLE_ANALYTICS === "true", enabled: !env.SELF_HOSTED && env.ENABLE_ANALYTICS === "true",
} }
} }

View File

@ -51,7 +51,7 @@ exports.buildSchemaFromDb = async function (ctx) {
await connector.buildSchema(datasource._id, datasource.entities) await connector.buildSchema(datasource._id, datasource.entities)
datasource.entities = connector.tables datasource.entities = connector.tables
const response = await db.post(datasource) const response = await db.put(datasource)
datasource._rev = response.rev datasource._rev = response.rev
ctx.body = datasource ctx.body = datasource
@ -89,7 +89,7 @@ exports.save = async function (ctx) {
...ctx.request.body, ...ctx.request.body,
} }
const response = await db.post(datasource) const response = await db.put(datasource)
datasource._rev = response.rev datasource._rev = response.rev
// Drain connection pools when configuration is changed // Drain connection pools when configuration is changed

View File

@ -437,7 +437,11 @@ module External {
for (let [colName, { isMany, rows, tableId }] of Object.entries( for (let [colName, { isMany, rows, tableId }] of Object.entries(
related related
)) { )) {
const table = this.getTable(tableId) const table: Table = this.getTable(tableId)
// if its not the foreign key skip it, nothing to do
if (table.primary && table.primary.indexOf(colName) !== -1) {
continue
}
for (let row of rows) { for (let row of rows) {
const filters = buildFilters(generateIdForRow(row, table), {}, table) const filters = buildFilters(generateIdForRow(row, table), {}, table)
// safety check, if there are no filters on deletion bad things happen // safety check, if there are no filters on deletion bad things happen

View File

@ -5,17 +5,22 @@ const {
generateRowID, generateRowID,
DocumentTypes, DocumentTypes,
InternalTables, InternalTables,
generateMemoryViewID,
} = require("../../../db/utils") } = require("../../../db/utils")
const userController = require("../user") const userController = require("../user")
const { const {
inputProcessing, inputProcessing,
outputProcessing, outputProcessing,
processAutoColumn,
} = require("../../../utilities/rowProcessor") } = require("../../../utilities/rowProcessor")
const { FieldTypes } = require("../../../constants") const { FieldTypes } = require("../../../constants")
const { isEqual } = require("lodash") const { isEqual } = require("lodash")
const { validate, findRow } = require("./utils") const { validate, findRow } = require("./utils")
const { fullSearch, paginatedSearch } = require("./internalSearch") const { fullSearch, paginatedSearch } = require("./internalSearch")
const { getGlobalUsersFromMetadata } = require("../../../utilities/global") const { getGlobalUsersFromMetadata } = require("../../../utilities/global")
const inMemoryViews = require("../../../db/inMemoryView")
const env = require("../../../environment")
const { migrateToInMemoryView } = require("../view/utils")
const CALCULATION_TYPES = { const CALCULATION_TYPES = {
SUM: "sum", SUM: "sum",
@ -25,17 +30,84 @@ const CALCULATION_TYPES = {
async function storeResponse(ctx, db, row, oldTable, table) { async function storeResponse(ctx, db, row, oldTable, table) {
row.type = "row" row.type = "row"
const response = await db.put(row)
// don't worry about rev, tables handle rev/lastID updates // don't worry about rev, tables handle rev/lastID updates
// if another row has been written since processing this will
// handle the auto ID clash
if (!isEqual(oldTable, table)) { if (!isEqual(oldTable, table)) {
await db.put(table) try {
await db.put(table)
} catch (err) {
if (err.status === 409) {
const updatedTable = await db.get(table._id)
let response = processAutoColumn(null, updatedTable, row, {
reprocessing: true,
})
await db.put(response.table)
row = response.row
} else {
throw err
}
}
} }
const response = await db.put(row)
row._rev = response.rev row._rev = response.rev
// process the row before return, to include relationships // process the row before return, to include relationships
row = await outputProcessing(ctx, table, row, { squash: false }) row = await outputProcessing(ctx, table, row, { squash: false })
return { row, table } return { row, table }
} }
// doesn't do the outputProcessing
async function getRawTableData(ctx, db, tableId) {
let rows
if (tableId === InternalTables.USER_METADATA) {
await userController.fetchMetadata(ctx)
rows = ctx.body
} else {
const response = await db.allDocs(
getRowParams(tableId, null, {
include_docs: true,
})
)
rows = response.rows.map(row => row.doc)
}
return rows
}
async function getView(db, viewName) {
let viewInfo
async function getFromDesignDoc() {
const designDoc = await db.get("_design/database")
viewInfo = designDoc.views[viewName]
return viewInfo
}
let migrate = false
if (env.SELF_HOSTED) {
viewInfo = await getFromDesignDoc()
} else {
try {
viewInfo = await db.get(generateMemoryViewID(viewName))
if (viewInfo) {
viewInfo = viewInfo.view
}
} catch (err) {
// check if it can be retrieved from design doc (needs migrated)
if (err.status !== 404) {
viewInfo = null
} else {
viewInfo = await getFromDesignDoc()
migrate = !!viewInfo
}
}
}
if (migrate) {
await migrateToInMemoryView(db, viewName)
}
if (!viewInfo) {
throw "View does not exist."
}
return viewInfo
}
exports.patch = async ctx => { exports.patch = async ctx => {
const appId = ctx.appId const appId = ctx.appId
const db = new CouchDB(appId) const db = new CouchDB(appId)
@ -139,15 +211,18 @@ exports.fetchView = async ctx => {
const db = new CouchDB(appId) const db = new CouchDB(appId)
const { calculation, group, field } = ctx.query const { calculation, group, field } = ctx.query
const designDoc = await db.get("_design/database") const viewInfo = await getView(db, viewName)
const viewInfo = designDoc.views[viewName] let response
if (!viewInfo) { if (env.SELF_HOSTED) {
throw "View does not exist." response = await db.query(`database/${viewName}`, {
include_docs: !calculation,
group: !!group,
})
} else {
const tableId = viewInfo.meta.tableId
const data = await getRawTableData(ctx, db, tableId)
response = await inMemoryViews.runView(viewInfo, calculation, group, data)
} }
const response = await db.query(`database/${viewName}`, {
include_docs: !calculation,
group: !!group,
})
let rows let rows
if (!calculation) { if (!calculation) {
@ -191,19 +266,9 @@ exports.fetch = async ctx => {
const appId = ctx.appId const appId = ctx.appId
const db = new CouchDB(appId) const db = new CouchDB(appId)
let rows, const tableId = ctx.params.tableId
table = await db.get(ctx.params.tableId) let table = await db.get(tableId)
if (ctx.params.tableId === InternalTables.USER_METADATA) { let rows = await getRawTableData(ctx, db, tableId)
await userController.fetchMetadata(ctx)
rows = ctx.body
} else {
const response = await db.allDocs(
getRowParams(ctx.params.tableId, null, {
include_docs: true,
})
)
rows = response.rows.map(row => row.doc)
}
return outputProcessing(ctx, table, rows) return outputProcessing(ctx, table, rows)
} }

View File

@ -145,7 +145,7 @@ exports.save = async function (ctx) {
if (updatedRows && updatedRows.length !== 0) { if (updatedRows && updatedRows.length !== 0) {
await db.bulkDocs(updatedRows) await db.bulkDocs(updatedRows)
} }
const result = await db.post(tableToSave) const result = await db.put(tableToSave)
tableToSave._rev = result.rev tableToSave._rev = result.rev
tableToSave = await tableSaveFunctions.after(tableToSave) tableToSave = await tableSaveFunctions.after(tableToSave)

View File

@ -68,23 +68,17 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
// Populate the table with rows imported from CSV in a bulk update // Populate the table with rows imported from CSV in a bulk update
const data = await csvParser.transform(dataImport) const data = await csvParser.transform(dataImport)
let finalData = []
for (let i = 0; i < data.length; i++) { for (let i = 0; i < data.length; i++) {
let row = data[i] let row = data[i]
row._id = generateRowID(table._id) row._id = generateRowID(table._id)
row.tableId = table._id row.tableId = table._id
const processed = inputProcessing(user, table, row) const processed = inputProcessing(user, table, row, {
noAutoRelationships: true,
})
table = processed.table table = processed.table
row = processed.row row = processed.row
// make sure link rows are up to date
row = await linkRows.updateLinks({
appId,
eventType: linkRows.EventType.ROW_SAVE,
row,
tableId: row.tableId,
table,
})
for (let [fieldName, schema] of Object.entries(table.schema)) { for (let [fieldName, schema] of Object.entries(table.schema)) {
// check whether the options need to be updated for inclusion as part of the data import // check whether the options need to be updated for inclusion as part of the data import
if ( if (
@ -98,10 +92,20 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
] ]
} }
} }
data[i] = row
// make sure link rows are up to date
finalData.push(
linkRows.updateLinks({
appId,
eventType: linkRows.EventType.ROW_SAVE,
row,
tableId: row.tableId,
table,
})
)
} }
await db.bulkDocs(data) await db.bulkDocs(await Promise.all(finalData))
let response = await db.put(table) let response = await db.put(table)
table._rev = response._rev table._rev = response._rev
} }

View File

@ -2,127 +2,93 @@ const CouchDB = require("../../../db")
const viewTemplate = require("./viewBuilder") const viewTemplate = require("./viewBuilder")
const { apiFileReturn } = require("../../../utilities/fileSystem") const { apiFileReturn } = require("../../../utilities/fileSystem")
const exporters = require("./exporters") const exporters = require("./exporters")
const { saveView, getView, getViews, deleteView } = require("./utils")
const { fetchView } = require("../row") const { fetchView } = require("../row")
const { ViewNames } = require("../../../db/utils")
const controller = { exports.fetch = async ctx => {
fetch: async ctx => { const db = new CouchDB(ctx.appId)
const db = new CouchDB(ctx.appId) ctx.body = await getViews(db)
const designDoc = await db.get("_design/database")
const response = []
for (let name of Object.keys(designDoc.views)) {
// Only return custom views, not built ins
if (Object.values(ViewNames).indexOf(name) !== -1) {
continue
}
response.push({
name,
...designDoc.views[name],
})
}
ctx.body = response
},
save: async ctx => {
const db = new CouchDB(ctx.appId)
const { originalName, ...viewToSave } = ctx.request.body
const designDoc = await db.get("_design/database")
const view = viewTemplate(viewToSave)
if (!viewToSave.name) {
ctx.throw(400, "Cannot create view without a name")
}
designDoc.views = {
...designDoc.views,
[viewToSave.name]: view,
}
// view has been renamed
if (originalName) {
delete designDoc.views[originalName]
}
await db.put(designDoc)
// add views to table document
const table = await db.get(ctx.request.body.tableId)
if (!table.views) table.views = {}
if (!view.meta.schema) {
view.meta.schema = table.schema
}
table.views[viewToSave.name] = view.meta
if (originalName) {
delete table.views[originalName]
}
await db.put(table)
ctx.body = {
...table.views[viewToSave.name],
name: viewToSave.name,
}
},
destroy: async ctx => {
const db = new CouchDB(ctx.appId)
const designDoc = await db.get("_design/database")
const viewName = decodeURI(ctx.params.viewName)
const view = designDoc.views[viewName]
delete designDoc.views[viewName]
await db.put(designDoc)
const table = await db.get(view.meta.tableId)
delete table.views[viewName]
await db.put(table)
ctx.body = view
},
exportView: async ctx => {
const db = new CouchDB(ctx.appId)
const designDoc = await db.get("_design/database")
const viewName = decodeURI(ctx.query.view)
const view = designDoc.views[viewName]
const format = ctx.query.format
if (!format) {
ctx.throw(400, "Format must be specified, either csv or json")
}
if (view) {
ctx.params.viewName = viewName
// Fetch view rows
ctx.query = {
group: view.meta.groupBy,
calculation: view.meta.calculation,
stats: !!view.meta.field,
field: view.meta.field,
}
} else {
// table all_ view
/* istanbul ignore next */
ctx.params.viewName = viewName
}
await fetchView(ctx)
let schema = view && view.meta && view.meta.schema
if (!schema) {
const tableId = ctx.params.tableId || view.meta.tableId
const table = await db.get(tableId)
schema = table.schema
}
// Export part
let headers = Object.keys(schema)
const exporter = exporters[format]
const filename = `${viewName}.${format}`
// send down the file
ctx.attachment(filename)
ctx.body = apiFileReturn(exporter(headers, ctx.body))
},
} }
module.exports = controller exports.save = async ctx => {
const db = new CouchDB(ctx.appId)
const { originalName, ...viewToSave } = ctx.request.body
const view = viewTemplate(viewToSave)
if (!viewToSave.name) {
ctx.throw(400, "Cannot create view without a name")
}
await saveView(db, originalName, viewToSave.name, view)
// add views to table document
const table = await db.get(ctx.request.body.tableId)
if (!table.views) table.views = {}
if (!view.meta.schema) {
view.meta.schema = table.schema
}
table.views[viewToSave.name] = view.meta
if (originalName) {
delete table.views[originalName]
}
await db.put(table)
ctx.body = {
...table.views[viewToSave.name],
name: viewToSave.name,
}
}
exports.destroy = async ctx => {
const db = new CouchDB(ctx.appId)
const viewName = decodeURI(ctx.params.viewName)
const view = await deleteView(db, viewName)
const table = await db.get(view.meta.tableId)
delete table.views[viewName]
await db.put(table)
ctx.body = view
}
exports.exportView = async ctx => {
const db = new CouchDB(ctx.appId)
const viewName = decodeURI(ctx.query.view)
const view = await getView(db, viewName)
const format = ctx.query.format
if (!format) {
ctx.throw(400, "Format must be specified, either csv or json")
}
if (view) {
ctx.params.viewName = viewName
// Fetch view rows
ctx.query = {
group: view.meta.groupBy,
calculation: view.meta.calculation,
stats: !!view.meta.field,
field: view.meta.field,
}
} else {
// table all_ view
/* istanbul ignore next */
ctx.params.viewName = viewName
}
await fetchView(ctx)
let schema = view && view.meta && view.meta.schema
if (!schema) {
const tableId = ctx.params.tableId || view.meta.tableId
const table = await db.get(tableId)
schema = table.schema
}
// Export part
let headers = Object.keys(schema)
const exporter = exporters[format]
const filename = `${viewName}.${format}`
// send down the file
ctx.attachment(filename)
ctx.body = apiFileReturn(exporter(headers, ctx.body))
}

View File

@ -0,0 +1,109 @@
const {
ViewNames,
generateMemoryViewID,
getMemoryViewParams,
} = require("../../../db/utils")
const env = require("../../../environment")
exports.getView = async (db, viewName) => {
if (env.SELF_HOSTED) {
const designDoc = await db.get("_design/database")
return designDoc.views[viewName]
} else {
const viewDoc = await db.get(generateMemoryViewID(viewName))
return viewDoc.view
}
}
exports.getViews = async db => {
const response = []
if (env.SELF_HOSTED) {
const designDoc = await db.get("_design/database")
for (let name of Object.keys(designDoc.views)) {
// Only return custom views, not built ins
if (Object.values(ViewNames).indexOf(name) !== -1) {
continue
}
response.push({
name,
...designDoc.views[name],
})
}
} else {
const views = (
await db.allDocs(
getMemoryViewParams({
include_docs: true,
})
)
).rows.map(row => row.doc)
for (let viewDoc of views) {
response.push({
name: viewDoc.name,
...viewDoc.view,
})
}
}
return response
}
exports.saveView = async (db, originalName, viewName, viewTemplate) => {
if (env.SELF_HOSTED) {
const designDoc = await db.get("_design/database")
designDoc.views = {
...designDoc.views,
[viewName]: viewTemplate,
}
// view has been renamed
if (originalName) {
delete designDoc.views[originalName]
}
await db.put(designDoc)
} else {
const id = generateMemoryViewID(viewName)
const originalId = originalName ? generateMemoryViewID(originalName) : null
const viewDoc = {
_id: id,
view: viewTemplate,
name: viewName,
tableId: viewTemplate.meta.tableId,
}
try {
const old = await db.get(id)
if (originalId) {
const originalDoc = await db.get(originalId)
await db.remove(originalDoc._id, originalDoc._rev)
}
if (old && old._rev) {
viewDoc._rev = old._rev
}
} catch (err) {
// didn't exist, just skip
}
await db.put(viewDoc)
}
}
exports.deleteView = async (db, viewName) => {
if (env.SELF_HOSTED) {
const designDoc = await db.get("_design/database")
const view = designDoc.views[viewName]
delete designDoc.views[viewName]
await db.put(designDoc)
return view
} else {
const id = generateMemoryViewID(viewName)
const viewDoc = await db.get(id)
await db.remove(viewDoc._id, viewDoc._rev)
return viewDoc.view
}
}
exports.migrateToInMemoryView = async (db, viewName) => {
// delete the view initially
const designDoc = await db.get("_design/database")
const view = designDoc.views[viewName]
delete designDoc.views[viewName]
await db.put(designDoc)
await exports.saveView(db, null, viewName, view)
}

View File

@ -205,7 +205,7 @@ describe("/views", () => {
}) })
describe("exportView", () => { describe("exportView", () => {
it("should be able to delete a view", async () => { it("should be able to export a view", async () => {
await config.createTable(priceTable()) await config.createTable(priceTable())
await config.createRow() await config.createRow()
const view = await config.createView() const view = await config.createView()

View File

@ -2,6 +2,7 @@ const rowController = require("../../api/controllers/row")
const automationUtils = require("../automationUtils") const automationUtils = require("../automationUtils")
const env = require("../../environment") const env = require("../../environment")
const usage = require("../../utilities/usageQuota") const usage = require("../../utilities/usageQuota")
const { buildCtx } = require("./utils")
exports.definition = { exports.definition = {
name: "Create Row", name: "Create Row",
@ -69,16 +70,12 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
} }
} }
// have to clean up the row, remove the table from it // have to clean up the row, remove the table from it
const ctx = { const ctx = buildCtx(appId, emitter, {
body: inputs.row,
params: { params: {
tableId: inputs.row.tableId, tableId: inputs.row.tableId,
}, },
request: { })
body: inputs.row,
},
appId,
eventEmitter: emitter,
}
try { try {
inputs.row = await automationUtils.cleanUpRow( inputs.row = await automationUtils.cleanUpRow(
@ -86,7 +83,7 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
inputs.row.tableId, inputs.row.tableId,
inputs.row inputs.row
) )
if (env.isProd()) { if (env.USE_QUOTAS) {
await usage.update(apiKey, usage.Properties.ROW, 1) await usage.update(apiKey, usage.Properties.ROW, 1)
} }
await rowController.save(ctx) await rowController.save(ctx)

View File

@ -1,6 +1,7 @@
const rowController = require("../../api/controllers/row") const rowController = require("../../api/controllers/row")
const env = require("../../environment") const env = require("../../environment")
const usage = require("../../utilities/usageQuota") const usage = require("../../utilities/usageQuota")
const { buildCtx } = require("./utils")
exports.definition = { exports.definition = {
description: "Delete a row from your database", description: "Delete a row from your database",
@ -60,19 +61,16 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
}, },
} }
} }
let ctx = {
let ctx = buildCtx(appId, emitter, {
body: {
_id: inputs.id,
_rev: inputs.revision,
},
params: { params: {
tableId: inputs.tableId, tableId: inputs.tableId,
}, },
request: { })
body: {
_id: inputs.id,
_rev: inputs.revision,
},
},
appId,
eventEmitter: emitter,
}
try { try {
if (env.isProd()) { if (env.isProd()) {

View File

@ -1,6 +1,7 @@
const rowController = require("../../api/controllers/row") const rowController = require("../../api/controllers/row")
const tableController = require("../../api/controllers/table") const tableController = require("../../api/controllers/table")
const { FieldTypes } = require("../../constants") const { FieldTypes } = require("../../constants")
const { buildCtx } = require("./utils")
const SortOrders = { const SortOrders = {
ASCENDING: "ascending", ASCENDING: "ascending",
@ -70,12 +71,11 @@ exports.definition = {
} }
async function getTable(appId, tableId) { async function getTable(appId, tableId) {
const ctx = { const ctx = buildCtx(appId, null, {
params: { params: {
id: tableId, id: tableId,
}, },
appId, })
}
await tableController.find(ctx) await tableController.find(ctx)
return ctx.body return ctx.body
} }
@ -89,21 +89,18 @@ exports.run = async function ({ inputs, appId }) {
sortType = sortType =
fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING
} }
const ctx = { const ctx = buildCtx(appId, null, {
params: { params: {
tableId, tableId,
}, },
request: { body: {
body: { sortOrder,
sortOrder, sortType,
sortType, sort: sortColumn,
sort: sortColumn, query: filters || {},
query: filters || {}, limit,
limit,
},
}, },
appId, })
}
try { try {
await rowController.search(ctx) await rowController.search(ctx)
return { return {

View File

@ -1,5 +1,6 @@
const rowController = require("../../api/controllers/row") const rowController = require("../../api/controllers/row")
const automationUtils = require("../automationUtils") const automationUtils = require("../automationUtils")
const { buildCtx } = require("./utils")
exports.definition = { exports.definition = {
name: "Update Row", name: "Update Row",
@ -72,19 +73,15 @@ exports.run = async function ({ inputs, appId, emitter }) {
} }
// have to clean up the row, remove the table from it // have to clean up the row, remove the table from it
const ctx = { const ctx = buildCtx(appId, emitter, {
body: {
...inputs.row,
_id: inputs.rowId,
},
params: { params: {
rowId: inputs.rowId, rowId: inputs.rowId,
}, },
request: { })
body: {
...inputs.row,
_id: inputs.rowId,
},
},
appId,
eventEmitter: emitter,
}
try { try {
inputs.row = await automationUtils.cleanUpRowById( inputs.row = await automationUtils.cleanUpRowById(

View File

@ -0,0 +1,48 @@
const PouchDB = require("pouchdb")
const memory = require("pouchdb-adapter-memory")
const newid = require("./newid")
PouchDB.plugin(memory)
const Pouch = PouchDB.defaults({
prefix: undefined,
adapter: "memory",
})
exports.runView = async (view, calculation, group, data) => {
// use a different ID each time for the DB, make sure they
// are always unique for each query, don't want overlap
// which could cause 409s
const db = new Pouch(newid())
// write all the docs to the in memory Pouch (remove revs)
await db.bulkDocs(
data.map(row => ({
...row,
_rev: undefined,
}))
)
let fn = (doc, emit) => emit(doc._id)
eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)"))
const queryFns = {
meta: view.meta,
map: fn,
}
if (view.reduce) {
queryFns.reduce = view.reduce
}
const response = await db.query(queryFns, {
include_docs: !calculation,
group: !!group,
})
// need to fix the revs to be totally accurate
for (let row of response.rows) {
if (!row._rev || !row._id) {
continue
}
const found = data.find(possible => possible._id === row._id)
if (found) {
row._rev = found._rev
}
}
await db.destroy()
return response
}

View File

@ -76,9 +76,12 @@ async function getFullLinkedDocs(ctx, appId, links) {
// create DBs // create DBs
const db = new CouchDB(appId) const db = new CouchDB(appId)
const linkedRowIds = links.map(link => link.id) const linkedRowIds = links.map(link => link.id)
let linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map( const uniqueRowIds = [...new Set(linkedRowIds)]
let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map(
row => row.doc row => row.doc
) )
// convert the unique db rows back to a full list of linked rows
const linked = linkedRowIds.map(id => dbRows.find(row => row._id === id))
// need to handle users as specific cases // need to handle users as specific cases
let [users, other] = partition(linked, linkRow => let [users, other] = partition(linked, linkRow =>
linkRow._id.startsWith(USER_METDATA_PREFIX) linkRow._id.startsWith(USER_METDATA_PREFIX)
@ -112,7 +115,7 @@ exports.updateLinks = async function (args) {
let linkController = new LinkController(args) let linkController = new LinkController(args)
try { try {
if ( if (
!(await linkController.doesTableHaveLinkedFields()) && !(await linkController.doesTableHaveLinkedFields(table)) &&
(oldTable == null || (oldTable == null ||
!(await linkController.doesTableHaveLinkedFields(oldTable))) !(await linkController.doesTableHaveLinkedFields(oldTable)))
) { ) {

View File

@ -39,6 +39,7 @@ const DocumentTypes = {
QUERY: "query", QUERY: "query",
DEPLOYMENTS: "deployments", DEPLOYMENTS: "deployments",
METADATA: "metadata", METADATA: "metadata",
MEM_VIEW: "view",
} }
const ViewNames = { const ViewNames = {
@ -348,6 +349,14 @@ exports.getMetadataParams = (type, entityId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.METADATA, docId, otherProps) return getDocParams(DocumentTypes.METADATA, docId, otherProps)
} }
exports.generateMemoryViewID = viewName => {
return `${DocumentTypes.MEM_VIEW}${SEPARATOR}${viewName}`
}
exports.getMemoryViewParams = (otherProps = {}) => {
return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps)
}
/** /**
* This can be used with the db.allDocs to get a list of IDs * This can be used with the db.allDocs to get a list of IDs
*/ */

View File

@ -26,7 +26,7 @@ module.exports = {
COUCH_DB_URL: process.env.COUCH_DB_URL, COUCH_DB_URL: process.env.COUCH_DB_URL,
MINIO_URL: process.env.MINIO_URL, MINIO_URL: process.env.MINIO_URL,
WORKER_URL: process.env.WORKER_URL, WORKER_URL: process.env.WORKER_URL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED), SELF_HOSTED: process.env.SELF_HOSTED,
AWS_REGION: process.env.AWS_REGION, AWS_REGION: process.env.AWS_REGION,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS, ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
@ -66,3 +66,10 @@ module.exports = {
return !isDev() return !isDev()
}, },
} }
// convert any strings to numbers if required, like "0" would be true otherwise
for (let [key, value] of Object.entries(module.exports)) {
if (typeof value === "string" && !isNaN(parseInt(value))) {
module.exports[key] = parseInt(value)
}
}

View File

@ -12,7 +12,7 @@ import { getSqlQuery } from "./utils"
module MySQLModule { module MySQLModule {
const mysql = require("mysql") const mysql = require("mysql")
const Sql = require("./base/sql") const Sql = require("./base/sql")
const { buildExternalTableId, convertType } = require("./utils") const { buildExternalTableId, convertType, copyExistingPropsOver } = require("./utils")
const { FieldTypes } = require("../constants") const { FieldTypes } = require("../constants")
interface MySQLConfig { interface MySQLConfig {
@ -194,18 +194,7 @@ module MySQLModule {
} }
} }
// add the existing relationships from the entities if they exist, to prevent them from being overridden copyExistingPropsOver(tableName, tables, entities)
if (entities && entities[tableName]) {
const existingTableSchema = entities[tableName].schema
for (let key in existingTableSchema) {
if (!existingTableSchema.hasOwnProperty(key)) {
continue
}
if (existingTableSchema[key].type === "link") {
tables[tableName].schema[key] = existingTableSchema[key]
}
}
}
} }
this.client.end() this.client.end()

View File

@ -12,7 +12,7 @@ module PostgresModule {
const { Pool } = require("pg") const { Pool } = require("pg")
const Sql = require("./base/sql") const Sql = require("./base/sql")
const { FieldTypes } = require("../constants") const { FieldTypes } = require("../constants")
const { buildExternalTableId, convertType } = require("./utils") const { buildExternalTableId, convertType, copyExistingPropsOver } = require("./utils")
interface PostgresConfig { interface PostgresConfig {
host: string host: string
@ -173,31 +173,24 @@ module PostgresModule {
name: tableName, name: tableName,
schema: {}, schema: {},
} }
// add the existing relationships from the entities if they exist, to prevent them from being overridden
if (entities && entities[tableName]) {
const existingTableSchema = entities[tableName].schema
for (let key in existingTableSchema) {
if (!existingTableSchema.hasOwnProperty(key)) {
continue
}
if (existingTableSchema[key].type === "link") {
tables[tableName].schema[key] = existingTableSchema[key]
}
}
}
} }
const type: string = convertType(column.data_type, TYPE_MAP) const type: string = convertType(column.data_type, TYPE_MAP)
const isAuto: boolean = const identity = !!(column.identity_generation || column.identity_start || column.identity_increment)
typeof column.column_default === "string" && const hasDefault = typeof column.column_default === "string" &&
column.column_default.startsWith("nextval") column.column_default.startsWith("nextval")
const isGenerated = column.is_generated && column.is_generated !== "NEVER"
const isAuto: boolean = hasDefault || identity || isGenerated
tables[tableName].schema[columnName] = { tables[tableName].schema[columnName] = {
autocolumn: isAuto, autocolumn: isAuto,
name: columnName, name: columnName,
type, type,
} }
} }
for (let tableName of Object.keys(tables)) {
copyExistingPropsOver(tableName, tables, entities)
}
this.tables = tables this.tables = tables
} }

View File

@ -82,3 +82,21 @@ export function isIsoDateString(str: string) {
let d = new Date(str) let d = new Date(str)
return d.toISOString() === str return d.toISOString() === str
} }
// add the existing relationships from the entities if they exist, to prevent them from being overridden
export function copyExistingPropsOver(tableName: string, tables: { [key: string]: any }, entities: { [key: string]: any }) {
if (entities && entities[tableName]) {
if (entities[tableName].primaryDisplay) {
tables[tableName].primaryDisplay = entities[tableName].primaryDisplay
}
const existingTableSchema = entities[tableName].schema
for (let key in existingTableSchema) {
if (!existingTableSchema.hasOwnProperty(key)) {
continue
}
if (existingTableSchema[key].type === "link") {
tables[tableName].schema[key] = existingTableSchema[key]
}
}
}
}

View File

@ -124,11 +124,13 @@ exports.performBackup = async (appId, backupName) => {
), ),
}) })
// write the file to the object store // write the file to the object store
await streamUpload( if (env.SELF_HOSTED) {
ObjectStoreBuckets.BACKUPS, await streamUpload(
join(appId, backupName), ObjectStoreBuckets.BACKUPS,
fs.createReadStream(path) join(appId, backupName),
) fs.createReadStream(path)
)
}
return fs.createReadStream(path) return fs.createReadStream(path)
} }

View File

@ -89,10 +89,16 @@ const TYPE_TRANSFORM_MAP = {
* @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields. * @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields.
* @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing. * @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing.
* @param {Object} row The row which is to be updated with information for the auto columns. * @param {Object} row The row which is to be updated with information for the auto columns.
* @param {Object} opts specific options for function to carry out optional features.
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated * @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
* for automatic ID purposes. * for automatic ID purposes.
*/ */
function processAutoColumn(user, table, row) { function processAutoColumn(
user,
table,
row,
opts = { reprocessing: false, noAutoRelationships: false }
) {
let now = new Date().toISOString() let now = new Date().toISOString()
// if a row doesn't have a revision then it doesn't exist yet // if a row doesn't have a revision then it doesn't exist yet
const creating = !row._rev const creating = !row._rev
@ -102,7 +108,7 @@ function processAutoColumn(user, table, row) {
} }
switch (schema.subtype) { switch (schema.subtype) {
case AutoFieldSubTypes.CREATED_BY: case AutoFieldSubTypes.CREATED_BY:
if (creating) { if (creating && !opts.reprocessing && !opts.noAutoRelationships) {
row[key] = [user.userId] row[key] = [user.userId]
} }
break break
@ -112,7 +118,9 @@ function processAutoColumn(user, table, row) {
} }
break break
case AutoFieldSubTypes.UPDATED_BY: case AutoFieldSubTypes.UPDATED_BY:
row[key] = [user.userId] if (!opts.reprocessing && !opts.noAutoRelationships) {
row[key] = [user.userId]
}
break break
case AutoFieldSubTypes.UPDATED_AT: case AutoFieldSubTypes.UPDATED_AT:
row[key] = now row[key] = now
@ -127,6 +135,7 @@ function processAutoColumn(user, table, row) {
} }
return { table, row } return { table, row }
} }
exports.processAutoColumn = processAutoColumn
/** /**
* This will coerce a value to the correct types based on the type transform map * This will coerce a value to the correct types based on the type transform map
@ -151,9 +160,15 @@ exports.coerce = (row, type) => {
* @param {object} user the user which is performing the input. * @param {object} user the user which is performing the input.
* @param {object} row the row which is being created/updated. * @param {object} row the row which is being created/updated.
* @param {object} table the table which the row is being saved to. * @param {object} table the table which the row is being saved to.
* @param {object} opts some input processing options (like disabling auto-column relationships).
* @returns {object} the row which has been prepared to be written to the DB. * @returns {object} the row which has been prepared to be written to the DB.
*/ */
exports.inputProcessing = (user = {}, table, row) => { exports.inputProcessing = (
user = {},
table,
row,
opts = { noAutoRelationships: false }
) => {
let clonedRow = cloneDeep(row) let clonedRow = cloneDeep(row)
// need to copy the table so it can be differenced on way out // need to copy the table so it can be differenced on way out
const copiedTable = cloneDeep(table) const copiedTable = cloneDeep(table)
@ -176,7 +191,7 @@ exports.inputProcessing = (user = {}, table, row) => {
} }
} }
// handle auto columns - this returns an object like {table, row} // handle auto columns - this returns an object like {table, row}
return processAutoColumn(user, copiedTable, clonedRow) return processAutoColumn(user, copiedTable, clonedRow, opts)
} }
/** /**

View File

@ -943,10 +943,10 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/auth@^0.9.125-alpha.17": "@budibase/auth@^0.9.139":
version "0.9.133" version "0.9.139"
resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.133.tgz#280d581820c9069b6bc021f88178c215ee48ad08" resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.139.tgz#0610582800df062372582f9139c7aa99606af3e1"
integrity sha512-DL7zIYRXE6xSKE/qbHMf/SX3+bceGxM4xzUmLTk4OHtEOP/vaUJr35tkhznAZF7VpUR9Yh20D6/Zw8z/3sxj/A== integrity sha512-2JUAKC3AA74O3TXHjoGCoXkDxXqUS1K8KGFrJtrUQQrVq1YeQGSjD6Km+Ho8PqUaNdpEfZinBS1/3qFUqaQbuQ==
dependencies: dependencies:
"@techpass/passport-openidconnect" "^0.3.0" "@techpass/passport-openidconnect" "^0.3.0"
aws-sdk "^2.901.0" aws-sdk "^2.901.0"
@ -966,10 +966,10 @@
uuid "^8.3.2" uuid "^8.3.2"
zlib "^1.0.5" zlib "^1.0.5"
"@budibase/bbui@^0.9.133": "@budibase/bbui@^0.9.139":
version "0.9.133" version "0.9.139"
resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.133.tgz#91a2fb24abaaf91d2cb1e00eb51c493c1290f9ad" resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.139.tgz#e6cfc90e8f6c2aa3526fc6a7bef251bccdaf51bb"
integrity sha512-xbMmc/hee1QRNW7TrbGUBmLr1hMHXqUDA6rdl9N2PGfHFuFWbqlD8PWYanHmLevVet+CjkuKGPSbBghFK2pQyQ== integrity sha512-HllzXwfCnxqlV/ifdOR4Got6yrvK2rUFwKUWQIcYU0wk8h6hwYmLehP7HqgBa6l8+bvO1Ep9g+rjP2xJPJG21w==
dependencies: dependencies:
"@adobe/spectrum-css-workflow-icons" "^1.2.1" "@adobe/spectrum-css-workflow-icons" "^1.2.1"
"@spectrum-css/actionbutton" "^1.0.1" "@spectrum-css/actionbutton" "^1.0.1"
@ -1015,14 +1015,14 @@
svelte-flatpickr "^3.1.0" svelte-flatpickr "^3.1.0"
svelte-portal "^1.0.0" svelte-portal "^1.0.0"
"@budibase/client@^0.9.125-alpha.17": "@budibase/client@^0.9.139":
version "0.9.133" version "0.9.139"
resolved "https://registry.yarnpkg.com/@budibase/client/-/client-0.9.133.tgz#43748e189e9b92d99d1281ab62bd2c5ebed5dbab" resolved "https://registry.yarnpkg.com/@budibase/client/-/client-0.9.139.tgz#acec8dc746295f7793b188f4950ab2268170366c"
integrity sha512-JrduL9iVMGalZyIUQ+1UN/dhrOZNRJwXU8B4r/eWhVoJf3f3bCuNfpMoT2LN3HY4ooyu37VehD+J5bdDsvlNPw== integrity sha512-PSSSaWjUrY/C4kG8r46aOVfq0aCEZGuI2Uv4jkqmk1zgt0GTXiJ+iQBkg7WZqTDBm7JIUzYUzV1T102tN4L1Jg==
dependencies: dependencies:
"@budibase/bbui" "^0.9.133" "@budibase/bbui" "^0.9.139"
"@budibase/standard-components" "^0.9.133" "@budibase/standard-components" "^0.9.139"
"@budibase/string-templates" "^0.9.133" "@budibase/string-templates" "^0.9.139"
regexparam "^1.3.0" regexparam "^1.3.0"
shortid "^2.2.15" shortid "^2.2.15"
svelte-spa-router "^3.0.5" svelte-spa-router "^3.0.5"
@ -1055,12 +1055,12 @@
to-gfm-code-block "^0.1.1" to-gfm-code-block "^0.1.1"
year "^0.2.1" year "^0.2.1"
"@budibase/standard-components@^0.9.133": "@budibase/standard-components@^0.9.139":
version "0.9.133" version "0.9.139"
resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.133.tgz#789c02b45dc3853b003822c09e18ce7ece4dfa29" resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.139.tgz#cf8e2b759ae863e469e50272b3ca87f2827e66e3"
integrity sha512-xcuwTxsqk1J/YmM4YjThO/Fm0eJ+aZWm0kbFgfN+dNN9fuPlsPOLmlVEWeOUPmBa5XfRyDbx6lDYj0PPEK8CvA== integrity sha512-Av0u9Eq2jerjhG6Atta+c0mOQGgE5K0QI3cm+8s/3Vki6/PXkO1YL5Alo3BOn9ayQAVZ/xp4rtZPuN/rzRibHw==
dependencies: dependencies:
"@budibase/bbui" "^0.9.133" "@budibase/bbui" "^0.9.139"
"@spectrum-css/button" "^3.0.3" "@spectrum-css/button" "^3.0.3"
"@spectrum-css/card" "^3.0.3" "@spectrum-css/card" "^3.0.3"
"@spectrum-css/divider" "^1.0.3" "@spectrum-css/divider" "^1.0.3"
@ -1073,10 +1073,10 @@
svelte-apexcharts "^1.0.2" svelte-apexcharts "^1.0.2"
svelte-flatpickr "^3.1.0" svelte-flatpickr "^3.1.0"
"@budibase/string-templates@^0.9.125-alpha.17", "@budibase/string-templates@^0.9.133": "@budibase/string-templates@^0.9.139":
version "0.9.133" version "0.9.139"
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.133.tgz#221d81e080dc4485dcffa989d16e2bbed39f9055" resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.139.tgz#f87de1d7382a81164bb734ef62ba552839805134"
integrity sha512-SMHcSPwHYdAqol9YCcMoYawp5/ETr9TqGZCUsL+hUUq+LritPwu/miQ++SVvRTQbOR7Mker0S9LO3H8mwYkW8w== integrity sha512-T7FR3GSmc/3vs6bynYrL/POjGP/z4pjlwjI4P6b2u10Fg2HWtI0QPZ+ifnOUf53Ry2r/PvDELATqkElpKh9Spg==
dependencies: dependencies:
"@budibase/handlebars-helpers" "^0.11.4" "@budibase/handlebars-helpers" "^0.11.4"
dayjs "^1.10.4" dayjs "^1.10.4"
@ -11110,9 +11110,9 @@ tmp@^0.0.33:
os-tmpdir "~1.0.2" os-tmpdir "~1.0.2"
tmpl@1.0.x: tmpl@1.0.x:
version "1.0.4" version "1.0.5"
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
to-buffer@^1.1.1: to-buffer@^1.1.1:
version "1.1.1" version "1.1.1"

View File

@ -1,6 +1,6 @@
{ {
"name": "@budibase/string-templates", "name": "@budibase/string-templates",
"version": "0.9.125-alpha.19", "version": "0.9.140-alpha.5",
"description": "Handlebars wrapper for Budibase templating.", "description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs", "main": "src/index.cjs",
"module": "dist/bundle.mjs", "module": "dist/bundle.mjs",

View File

@ -4633,9 +4633,9 @@ time-stamp@^1.0.1:
integrity sha1-dkpaEa9QVhkhsTPztE5hhofg9cM= integrity sha1-dkpaEa9QVhkhsTPztE5hhofg9cM=
tmpl@1.0.x: tmpl@1.0.x:
version "1.0.4" version "1.0.5"
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
to-fast-properties@^2.0.0: to-fast-properties@^2.0.0:
version "2.0.0" version "2.0.0"

View File

@ -1,7 +1,7 @@
{ {
"name": "@budibase/worker", "name": "@budibase/worker",
"email": "hi@budibase.com", "email": "hi@budibase.com",
"version": "0.9.125-alpha.19", "version": "0.9.140-alpha.5",
"description": "Budibase background service", "description": "Budibase background service",
"main": "src/index.js", "main": "src/index.js",
"repository": { "repository": {
@ -25,8 +25,8 @@
"author": "Budibase", "author": "Budibase",
"license": "AGPL-3.0-or-later", "license": "AGPL-3.0-or-later",
"dependencies": { "dependencies": {
"@budibase/auth": "^0.9.125-alpha.19", "@budibase/auth": "^0.9.140-alpha.5",
"@budibase/string-templates": "^0.9.125-alpha.19", "@budibase/string-templates": "^0.9.140-alpha.5",
"@koa/router": "^8.0.0", "@koa/router": "^8.0.0",
"@techpass/passport-openidconnect": "^0.3.0", "@techpass/passport-openidconnect": "^0.3.0",
"aws-sdk": "^2.811.0", "aws-sdk": "^2.811.0",

View File

@ -31,7 +31,12 @@ async function allUsers() {
return response.rows.map(row => row.doc) return response.rows.map(row => row.doc)
} }
async function saveUser(user, tenantId, hashPassword = true) { async function saveUser(
user,
tenantId,
hashPassword = true,
requirePassword = true
) {
if (!tenantId) { if (!tenantId) {
throw "No tenancy specified." throw "No tenancy specified."
} }
@ -57,12 +62,13 @@ async function saveUser(user, tenantId, hashPassword = true) {
hashedPassword = hashPassword ? await hash(password) : password hashedPassword = hashPassword ? await hash(password) : password
} else if (dbUser) { } else if (dbUser) {
hashedPassword = dbUser.password hashedPassword = dbUser.password
} else { } else if (requirePassword) {
throw "Password must be specified." throw "Password must be specified."
} }
_id = _id || generateGlobalUserID() _id = _id || generateGlobalUserID()
user = { user = {
createdAt: Date.now(),
...dbUser, ...dbUser,
...user, ...user,
_id, _id,
@ -106,16 +112,21 @@ exports.save = async ctx => {
} }
} }
const parseBooleanParam = param => {
if (param && param == "false") {
return false
} else {
return true
}
}
exports.adminUser = async ctx => { exports.adminUser = async ctx => {
const { email, password, tenantId } = ctx.request.body const { email, password, tenantId } = ctx.request.body
// account portal sends a pre-hashed password - honour param to prevent double hashing // account portal sends a pre-hashed password - honour param to prevent double hashing
let hashPassword = ctx.request.query.hashPassword const hashPassword = parseBooleanParam(ctx.request.query.hashPassword)
if (hashPassword && hashPassword == "false") { // account portal sends no password for SSO users
hashPassword = false const requirePassword = parseBooleanParam(ctx.request.query.requirePassword)
} else {
hashPassword = true
}
if (await doesTenantExist(tenantId)) { if (await doesTenantExist(tenantId)) {
ctx.throw(403, "Organisation already exists.") ctx.throw(403, "Organisation already exists.")
@ -138,6 +149,7 @@ exports.adminUser = async ctx => {
const user = { const user = {
email: email, email: email,
password: password, password: password,
createdAt: Date.now(),
roles: {}, roles: {},
builder: { builder: {
global: true, global: true,
@ -148,7 +160,7 @@ exports.adminUser = async ctx => {
tenantId, tenantId,
} }
try { try {
ctx.body = await saveUser(user, tenantId, hashPassword) ctx.body = await saveUser(user, tenantId, hashPassword, requirePassword)
} catch (err) { } catch (err) {
ctx.throw(err.status || 400, err) ctx.throw(err.status || 400, err)
} }

View File

@ -11,7 +11,7 @@ exports.save = async function (ctx) {
} }
try { try {
const response = await db.post(workspaceDoc) const response = await db.put(workspaceDoc)
ctx.body = { ctx.body = {
_id: response.id, _id: response.id,
_rev: response.rev, _rev: response.rev,

View File

@ -10,7 +10,7 @@ function buildAdminInitValidation() {
return joiValidator.body( return joiValidator.body(
Joi.object({ Joi.object({
email: Joi.string().required(), email: Joi.string().required(),
password: Joi.string().required(), password: Joi.string(),
tenantId: Joi.string().required(), tenantId: Joi.string().required(),
}) })
.required() .required()

View File

@ -287,10 +287,10 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/auth@^0.9.128": "@budibase/auth@^0.9.139":
version "0.9.128" version "0.9.139"
resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.128.tgz#6bb6c716b6647b7e9362e3faf12b191650ea0ad4" resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.139.tgz#0610582800df062372582f9139c7aa99606af3e1"
integrity sha512-WCcrtAXilT/4++7PdzyTYgrdVqZcKhUev3NcGrFQf7WbDhkVCuigWbb8Q01KXODjbs0BZC0RshVv/PxrgLbBQA== integrity sha512-2JUAKC3AA74O3TXHjoGCoXkDxXqUS1K8KGFrJtrUQQrVq1YeQGSjD6Km+Ho8PqUaNdpEfZinBS1/3qFUqaQbuQ==
dependencies: dependencies:
"@techpass/passport-openidconnect" "^0.3.0" "@techpass/passport-openidconnect" "^0.3.0"
aws-sdk "^2.901.0" aws-sdk "^2.901.0"
@ -338,10 +338,10 @@
to-gfm-code-block "^0.1.1" to-gfm-code-block "^0.1.1"
year "^0.2.1" year "^0.2.1"
"@budibase/string-templates@^0.9.128": "@budibase/string-templates@^0.9.139":
version "0.9.128" version "0.9.139"
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.128.tgz#50ee46dc0d726d481bd5139cd0b38364649a8463" resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.139.tgz#f87de1d7382a81164bb734ef62ba552839805134"
integrity sha512-4TzmnX2o5S2cts08ukB86El4wYm7cHuV2t6a7yDMGPe1mWeKP1WEtVF6rKhXEdbPTiotW8oYondOlgOP7DT9lA== integrity sha512-T7FR3GSmc/3vs6bynYrL/POjGP/z4pjlwjI4P6b2u10Fg2HWtI0QPZ+ifnOUf53Ry2r/PvDELATqkElpKh9Spg==
dependencies: dependencies:
"@budibase/handlebars-helpers" "^0.11.4" "@budibase/handlebars-helpers" "^0.11.4"
dayjs "^1.10.4" dayjs "^1.10.4"
@ -6184,9 +6184,9 @@ tiny-queue@^0.2.0:
integrity sha1-JaZ/LG4lOyypQZd7XvdELvl6YEY= integrity sha1-JaZ/LG4lOyypQZd7XvdELvl6YEY=
tmpl@1.0.x: tmpl@1.0.x:
version "1.0.4" version "1.0.5"
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
to-fast-properties@^2.0.0: to-fast-properties@^2.0.0:
version "2.0.0" version "2.0.0"