Merge remote-tracking branch 'origin/develop' into feature/datasource-wizard
This commit is contained in:
commit
c1829223db
|
@ -7,6 +7,7 @@ on:
|
|||
|
||||
env:
|
||||
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
|
||||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||
POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
|
||||
|
|
|
@ -4,9 +4,16 @@ on:
|
|||
push:
|
||||
branches:
|
||||
- master
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_self_host:
|
||||
description: 'Release to self hosters? (Y/N)'
|
||||
required: true
|
||||
default: 'N'
|
||||
|
||||
env:
|
||||
POSTHOG_TOKEN: ${{ secrets.POSTHOG_TOKEN }}
|
||||
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
|
||||
POSTHOG_URL: ${{ secrets.POSTHOG_URL }}
|
||||
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
|
||||
|
||||
|
@ -47,7 +54,19 @@ jobs:
|
|||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
|
||||
- name: Build/release Docker images
|
||||
run: |
|
||||
if: ${{ github.event.inputs.release_self_host != 'Y' }}
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build
|
||||
yarn build:docker
|
||||
env:
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||
BUDIBASE_RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }}
|
||||
|
||||
- name: Build/release Docker images (Self Host)
|
||||
if: ${{ github.event.inputs.release_self_host == 'Y' }}
|
||||
run: |
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
||||
yarn build
|
||||
yarn build:docker
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "0.9.125-alpha.18",
|
||||
"version": "0.9.140-alpha.8",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -42,7 +42,8 @@
|
|||
"lint:fix": "yarn run lint:fix:ts && yarn run lint:fix:prettier && yarn run lint:fix:eslint",
|
||||
"test:e2e": "lerna run cy:test",
|
||||
"test:e2e:ci": "lerna run cy:ci",
|
||||
"build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION release && cd -",
|
||||
"build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||
"build:docker:production": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION release && cd -",
|
||||
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||
"release:helm": "./scripts/release_helm_chart.sh",
|
||||
"multi:enable": "lerna run multi:enable",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/auth",
|
||||
"version": "0.9.125-alpha.18",
|
||||
"version": "0.9.140-alpha.8",
|
||||
"description": "Authentication middlewares for budibase builder and apps",
|
||||
"main": "src/index.js",
|
||||
"author": "Budibase",
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
const env = require("../src/environment")
|
||||
|
||||
env._set("SELF_HOSTED", "1")
|
||||
env._set("NODE_ENV", "jest")
|
||||
env._set("JWT_SECRET", "test-jwtsecret")
|
||||
env._set("LOG_LEVEL", "silent")
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
const redis = require("../redis/authRedis")
|
||||
const { getTenantId, lookupTenantId, getGlobalDB } = require("../tenancy")
|
||||
const env = require("../environment")
|
||||
const accounts = require("../cloud/accounts")
|
||||
|
||||
const EXPIRY_SECONDS = 3600
|
||||
|
||||
|
@ -9,6 +11,15 @@ const EXPIRY_SECONDS = 3600
|
|||
const populateFromDB = async (userId, tenantId) => {
|
||||
const user = await getGlobalDB(tenantId).get(userId)
|
||||
user.budibaseAccess = true
|
||||
|
||||
if (!env.SELF_HOSTED) {
|
||||
const account = await accounts.getAccount(user.email)
|
||||
if (account) {
|
||||
user.account = account
|
||||
user.accountPortalAccess = true
|
||||
}
|
||||
}
|
||||
|
||||
return user
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
const API = require("./api")
|
||||
const env = require("../environment")
|
||||
|
||||
const api = new API(env.ACCOUNT_PORTAL_URL)
|
||||
|
||||
// TODO: Authorization
|
||||
|
||||
exports.getAccount = async email => {
|
||||
const payload = {
|
||||
email,
|
||||
}
|
||||
const response = await api.post(`/api/accounts/search`, {
|
||||
body: payload,
|
||||
})
|
||||
const json = await response.json()
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw Error(`Error getting account by email ${email}`, json)
|
||||
}
|
||||
|
||||
return json[0]
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
const fetch = require("node-fetch")
|
||||
class API {
|
||||
constructor(host) {
|
||||
this.host = host
|
||||
}
|
||||
|
||||
apiCall =
|
||||
method =>
|
||||
async (url = "", options = {}) => {
|
||||
if (!options.headers) {
|
||||
options.headers = {}
|
||||
}
|
||||
|
||||
if (!options.headers["Content-Type"]) {
|
||||
options.headers = {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
...options.headers,
|
||||
}
|
||||
}
|
||||
|
||||
let json = options.headers["Content-Type"] === "application/json"
|
||||
|
||||
const requestOptions = {
|
||||
method: method,
|
||||
body: json ? JSON.stringify(options.body) : options.body,
|
||||
headers: options.headers,
|
||||
// TODO: See if this is necessary
|
||||
credentials: "include",
|
||||
}
|
||||
|
||||
const resp = await fetch(`${this.host}${url}`, requestOptions)
|
||||
|
||||
return resp
|
||||
}
|
||||
|
||||
post = this.apiCall("POST")
|
||||
get = this.apiCall("GET")
|
||||
patch = this.apiCall("PATCH")
|
||||
del = this.apiCall("DELETE")
|
||||
put = this.apiCall("PUT")
|
||||
}
|
||||
|
||||
module.exports = API
|
|
@ -16,9 +16,12 @@ module.exports = {
|
|||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||
AWS_REGION: process.env.AWS_REGION,
|
||||
MINIO_URL: process.env.MINIO_URL,
|
||||
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
|
||||
MULTI_TENANCY: process.env.MULTI_TENANCY,
|
||||
ACCOUNT_PORTAL_URL: process.env.ACCOUNT_PORTAL_URL,
|
||||
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED),
|
||||
isTest,
|
||||
_set(key, value) {
|
||||
process.env[key] = value
|
||||
|
|
|
@ -12,6 +12,7 @@ const {
|
|||
auditLog,
|
||||
tenancy,
|
||||
appTenancy,
|
||||
authError,
|
||||
} = require("./middleware")
|
||||
const { setDB } = require("./db")
|
||||
const userCache = require("./cache/user")
|
||||
|
@ -60,6 +61,7 @@ module.exports = {
|
|||
buildTenancyMiddleware: tenancy,
|
||||
buildAppTenancyMiddleware: appTenancy,
|
||||
auditLog,
|
||||
authError,
|
||||
},
|
||||
cache: {
|
||||
user: userCache,
|
||||
|
|
|
@ -2,6 +2,7 @@ const jwt = require("./passport/jwt")
|
|||
const local = require("./passport/local")
|
||||
const google = require("./passport/google")
|
||||
const oidc = require("./passport/oidc")
|
||||
const { authError } = require("./passport/utils")
|
||||
const authenticated = require("./authenticated")
|
||||
const auditLog = require("./auditLog")
|
||||
const tenancy = require("./tenancy")
|
||||
|
@ -16,4 +17,5 @@ module.exports = {
|
|||
auditLog,
|
||||
tenancy,
|
||||
appTenancy,
|
||||
authError,
|
||||
}
|
||||
|
|
|
@ -27,7 +27,11 @@ async function authenticate(accessToken, refreshToken, profile, done) {
|
|||
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
|
||||
* @returns Dynamically configured Passport Google Strategy
|
||||
*/
|
||||
exports.strategyFactory = async function (config, callbackUrl) {
|
||||
exports.strategyFactory = async function (
|
||||
config,
|
||||
callbackUrl,
|
||||
verify = authenticate
|
||||
) {
|
||||
try {
|
||||
const { clientID, clientSecret } = config
|
||||
|
||||
|
@ -43,7 +47,7 @@ exports.strategyFactory = async function (config, callbackUrl) {
|
|||
clientSecret: config.clientSecret,
|
||||
callbackURL: callbackUrl,
|
||||
},
|
||||
authenticate
|
||||
verify
|
||||
)
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
|
|
|
@ -104,7 +104,7 @@ describe("third party common", () => {
|
|||
_id: id,
|
||||
email: email,
|
||||
}
|
||||
const response = await db.post(dbUser)
|
||||
const response = await db.put(dbUser)
|
||||
dbUser._rev = response.rev
|
||||
}
|
||||
|
||||
|
|
|
@ -71,7 +71,7 @@ exports.authenticateThirdParty = async function (
|
|||
dbUser = await syncUser(dbUser, thirdPartyUser)
|
||||
|
||||
// create or sync the user
|
||||
const response = await db.post(dbUser)
|
||||
const response = await db.put(dbUser)
|
||||
dbUser._rev = response.rev
|
||||
|
||||
// authenticate
|
||||
|
|
|
@ -73,6 +73,7 @@ exports.ObjectStore = bucket => {
|
|||
AWS.config.update({
|
||||
accessKeyId: env.MINIO_ACCESS_KEY,
|
||||
secretAccessKey: env.MINIO_SECRET_KEY,
|
||||
region: env.AWS_REGION,
|
||||
})
|
||||
const config = {
|
||||
s3ForcePathStyle: true,
|
||||
|
|
|
@ -30,6 +30,10 @@ exports.invalidateSessions = async (userId, sessionId = null) => {
|
|||
sessions.push({ key: makeSessionID(userId, sessionId) })
|
||||
} else {
|
||||
sessions = await getSessionsForUser(userId)
|
||||
sessions.forEach(
|
||||
session =>
|
||||
(session.key = makeSessionID(session.userId, session.sessionId))
|
||||
)
|
||||
}
|
||||
const client = await redis.getSessionClient()
|
||||
const promises = []
|
||||
|
|
|
@ -4470,9 +4470,9 @@ tmp@^0.0.33:
|
|||
os-tmpdir "~1.0.2"
|
||||
|
||||
tmpl@1.0.x:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1"
|
||||
integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
|
||||
integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
|
||||
|
||||
to-fast-properties@^2.0.0:
|
||||
version "2.0.0"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/bbui",
|
||||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "0.9.125-alpha.18",
|
||||
"version": "0.9.140-alpha.8",
|
||||
"license": "AGPL-3.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/builder",
|
||||
"version": "0.9.125-alpha.18",
|
||||
"version": "0.9.140-alpha.8",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
@ -65,10 +65,10 @@
|
|||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^0.9.125-alpha.18",
|
||||
"@budibase/client": "^0.9.125-alpha.18",
|
||||
"@budibase/bbui": "^0.9.140-alpha.8",
|
||||
"@budibase/client": "^0.9.140-alpha.8",
|
||||
"@budibase/colorpicker": "1.1.2",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.18",
|
||||
"@budibase/string-templates": "^0.9.140-alpha.8",
|
||||
"@sentry/browser": "5.19.1",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
|
|
|
@ -1,16 +1,10 @@
|
|||
<script>
|
||||
import { onMount } from "svelte"
|
||||
import { Router } from "@roxi/routify"
|
||||
import { routes } from "../.routify/routes"
|
||||
import { initialise } from "builderStore"
|
||||
import { NotificationDisplay } from "@budibase/bbui"
|
||||
import { parse, stringify } from "qs"
|
||||
import HelpIcon from "components/common/HelpIcon.svelte"
|
||||
|
||||
onMount(async () => {
|
||||
await initialise()
|
||||
})
|
||||
|
||||
const queryHandler = { parse, stringify }
|
||||
</script>
|
||||
|
||||
|
|
|
@ -1,139 +0,0 @@
|
|||
import * as Sentry from "@sentry/browser"
|
||||
import posthog from "posthog-js"
|
||||
import api from "builderStore/api"
|
||||
|
||||
let analyticsEnabled
|
||||
const posthogConfigured = process.env.POSTHOG_TOKEN && process.env.POSTHOG_URL
|
||||
const sentryConfigured = process.env.SENTRY_DSN
|
||||
|
||||
const FEEDBACK_SUBMITTED_KEY = "budibase:feedback_submitted"
|
||||
const APP_FIRST_STARTED_KEY = "budibase:first_run"
|
||||
const feedbackHours = 12
|
||||
|
||||
async function activate() {
|
||||
if (analyticsEnabled === undefined) {
|
||||
// only the server knows the true NODE_ENV
|
||||
// this was an issue as NODE_ENV = 'cypress' on the server,
|
||||
// but 'production' on the client
|
||||
const response = await api.get("/api/analytics")
|
||||
analyticsEnabled = (await response.json()).enabled === true
|
||||
}
|
||||
if (!analyticsEnabled) return
|
||||
if (sentryConfigured) Sentry.init({ dsn: process.env.SENTRY_DSN })
|
||||
if (posthogConfigured) {
|
||||
posthog.init(process.env.POSTHOG_TOKEN, {
|
||||
autocapture: false,
|
||||
capture_pageview: false,
|
||||
api_host: process.env.POSTHOG_URL,
|
||||
})
|
||||
posthog.set_config({ persistence: "cookie" })
|
||||
}
|
||||
}
|
||||
|
||||
function identify(id) {
|
||||
if (!analyticsEnabled || !id) return
|
||||
if (posthogConfigured) posthog.identify(id)
|
||||
if (sentryConfigured)
|
||||
Sentry.configureScope(scope => {
|
||||
scope.setUser({ id: id })
|
||||
})
|
||||
}
|
||||
|
||||
async function identifyByApiKey(apiKey) {
|
||||
if (!analyticsEnabled) return true
|
||||
try {
|
||||
const response = await fetch(
|
||||
`https://03gaine137.execute-api.eu-west-1.amazonaws.com/prod/account/id?api_key=${apiKey.trim()}`
|
||||
)
|
||||
if (response.status === 200) {
|
||||
const id = await response.json()
|
||||
|
||||
await api.put("/api/keys/userId", { value: id })
|
||||
identify(id)
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
} catch (error) {
|
||||
console.log(error)
|
||||
}
|
||||
}
|
||||
|
||||
function captureException(err) {
|
||||
if (!analyticsEnabled) return
|
||||
Sentry.captureException(err)
|
||||
captureEvent("Error", { error: err.message ? err.message : err })
|
||||
}
|
||||
|
||||
function captureEvent(eventName, props = {}) {
|
||||
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return
|
||||
props.sourceApp = "builder"
|
||||
posthog.capture(eventName, props)
|
||||
}
|
||||
|
||||
if (!localStorage.getItem(APP_FIRST_STARTED_KEY)) {
|
||||
localStorage.setItem(APP_FIRST_STARTED_KEY, Date.now())
|
||||
}
|
||||
|
||||
const isFeedbackTimeElapsed = sinceDateStr => {
|
||||
const sinceDate = parseFloat(sinceDateStr)
|
||||
const feedbackMilliseconds = feedbackHours * 60 * 60 * 1000
|
||||
return Date.now() > sinceDate + feedbackMilliseconds
|
||||
}
|
||||
|
||||
function submitFeedback(values) {
|
||||
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return
|
||||
localStorage.setItem(FEEDBACK_SUBMITTED_KEY, Date.now())
|
||||
|
||||
const prefixedValues = Object.entries(values).reduce((obj, [key, value]) => {
|
||||
obj[`feedback_${key}`] = value
|
||||
return obj
|
||||
}, {})
|
||||
|
||||
posthog.capture("Feedback Submitted", prefixedValues)
|
||||
}
|
||||
|
||||
function requestFeedbackOnDeploy() {
|
||||
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return false
|
||||
const lastSubmittedStr = localStorage.getItem(FEEDBACK_SUBMITTED_KEY)
|
||||
if (!lastSubmittedStr) return true
|
||||
return isFeedbackTimeElapsed(lastSubmittedStr)
|
||||
}
|
||||
|
||||
function highlightFeedbackIcon() {
|
||||
if (!analyticsEnabled || !process.env.POSTHOG_TOKEN) return false
|
||||
const lastSubmittedStr = localStorage.getItem(FEEDBACK_SUBMITTED_KEY)
|
||||
if (lastSubmittedStr) return isFeedbackTimeElapsed(lastSubmittedStr)
|
||||
const firstRunStr = localStorage.getItem(APP_FIRST_STARTED_KEY)
|
||||
if (!firstRunStr) return false
|
||||
return isFeedbackTimeElapsed(firstRunStr)
|
||||
}
|
||||
|
||||
// Opt In/Out
|
||||
const ifAnalyticsEnabled = func => () => {
|
||||
if (analyticsEnabled && process.env.POSTHOG_TOKEN) {
|
||||
return func()
|
||||
}
|
||||
}
|
||||
const disabled = () => posthog.has_opted_out_capturing()
|
||||
const optIn = () => posthog.opt_in_capturing()
|
||||
const optOut = () => posthog.opt_out_capturing()
|
||||
|
||||
export default {
|
||||
activate,
|
||||
identify,
|
||||
identifyByApiKey,
|
||||
captureException,
|
||||
captureEvent,
|
||||
requestFeedbackOnDeploy,
|
||||
submitFeedback,
|
||||
highlightFeedbackIcon,
|
||||
disabled: () => {
|
||||
if (analyticsEnabled == null) {
|
||||
return true
|
||||
}
|
||||
return ifAnalyticsEnabled(disabled)
|
||||
},
|
||||
optIn: ifAnalyticsEnabled(optIn),
|
||||
optOut: ifAnalyticsEnabled(optOut),
|
||||
}
|
|
@ -0,0 +1,94 @@
|
|||
export default class IntercomClient {
|
||||
constructor(token) {
|
||||
this.token = token
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiate intercom using their provided script.
|
||||
*/
|
||||
init() {
|
||||
if (!this.token) return
|
||||
|
||||
const token = this.token
|
||||
|
||||
var w = window
|
||||
var ic = w.Intercom
|
||||
if (typeof ic === "function") {
|
||||
ic("reattach_activator")
|
||||
ic("update", w.intercomSettings)
|
||||
} else {
|
||||
var d = document
|
||||
var i = function () {
|
||||
i.c(arguments)
|
||||
}
|
||||
i.q = []
|
||||
i.c = function (args) {
|
||||
i.q.push(args)
|
||||
}
|
||||
w.Intercom = i
|
||||
var l = function () {
|
||||
var s = d.createElement("script")
|
||||
s.type = "text/javascript"
|
||||
s.async = true
|
||||
s.src = "https://widget.intercom.io/widget/" + token
|
||||
var x = d.getElementsByTagName("script")[0]
|
||||
x.parentNode.insertBefore(s, x)
|
||||
}
|
||||
if (document.readyState === "complete") {
|
||||
l()
|
||||
} else if (w.attachEvent) {
|
||||
w.attachEvent("onload", l)
|
||||
} else {
|
||||
w.addEventListener("load", l, false)
|
||||
}
|
||||
|
||||
this.initialised = true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Show the intercom chat bubble.
|
||||
* @param {Object} user - user to identify
|
||||
* @returns Intercom global object
|
||||
*/
|
||||
show(user = {}) {
|
||||
if (!this.initialised) return
|
||||
|
||||
return window.Intercom("boot", {
|
||||
app_id: this.token,
|
||||
...user,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Update intercom user details and messages.
|
||||
* @returns Intercom global object
|
||||
*/
|
||||
update() {
|
||||
if (!this.initialised) return
|
||||
|
||||
return window.Intercom("update")
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture analytics events and send them to intercom.
|
||||
* @param {String} event - event identifier
|
||||
* @param {Object} props - properties for the event
|
||||
* @returns Intercom global object
|
||||
*/
|
||||
captureEvent(event, props = {}) {
|
||||
if (!this.initialised) return
|
||||
|
||||
return window.Intercom("trackEvent", event, props)
|
||||
}
|
||||
|
||||
/**
|
||||
* Disassociate the user from the current session.
|
||||
* @returns Intercom global object
|
||||
*/
|
||||
logout() {
|
||||
if (!this.initialised) return
|
||||
|
||||
return window.Intercom("shutdown")
|
||||
}
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
import posthog from "posthog-js"
|
||||
import { Events } from "./constants"
|
||||
|
||||
export default class PosthogClient {
|
||||
constructor(token, url) {
|
||||
this.token = token
|
||||
this.url = url
|
||||
}
|
||||
|
||||
init() {
|
||||
if (!this.token || !this.url) return
|
||||
|
||||
posthog.init(this.token, {
|
||||
autocapture: false,
|
||||
capture_pageview: false,
|
||||
api_host: this.url,
|
||||
})
|
||||
posthog.set_config({ persistence: "cookie" })
|
||||
|
||||
this.initialised = true
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the posthog context to the current user
|
||||
* @param {String} id - unique user id
|
||||
*/
|
||||
identify(id) {
|
||||
if (!this.initialised) return
|
||||
|
||||
posthog.identify(id)
|
||||
}
|
||||
|
||||
/**
|
||||
* Update user metadata associated with current user in posthog
|
||||
* @param {Object} meta - user fields
|
||||
*/
|
||||
updateUser(meta) {
|
||||
if (!this.initialised) return
|
||||
|
||||
posthog.people.set(meta)
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture analytics events and send them to posthog.
|
||||
* @param {String} event - event identifier
|
||||
* @param {Object} props - properties for the event
|
||||
*/
|
||||
captureEvent(eventName, props) {
|
||||
if (!this.initialised) return
|
||||
|
||||
props.sourceApp = "builder"
|
||||
posthog.capture(eventName, props)
|
||||
}
|
||||
|
||||
/**
|
||||
* Submit NPS feedback to posthog.
|
||||
* @param {Object} values - NPS Values
|
||||
*/
|
||||
npsFeedback(values) {
|
||||
if (!this.initialised) return
|
||||
|
||||
localStorage.setItem(Events.NPS.SUBMITTED, Date.now())
|
||||
|
||||
const prefixedFeedback = {}
|
||||
for (let key in values) {
|
||||
prefixedFeedback[`feedback_${key}`] = values[key]
|
||||
}
|
||||
|
||||
posthog.capture(Events.NPS.SUBMITTED, prefixedFeedback)
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset posthog user back to initial state on logout.
|
||||
*/
|
||||
logout() {
|
||||
if (!this.initialised) return
|
||||
|
||||
posthog.reset()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
import * as Sentry from "@sentry/browser"
|
||||
|
||||
export default class SentryClient {
|
||||
constructor(dsn) {
|
||||
this.dsn = dsn
|
||||
}
|
||||
|
||||
init() {
|
||||
if (this.dsn) {
|
||||
Sentry.init({ dsn: this.dsn })
|
||||
|
||||
this.initalised = true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Capture an exception and send it to sentry.
|
||||
* @param {Error} err - JS error object
|
||||
*/
|
||||
captureException(err) {
|
||||
if (!this.initalised) return
|
||||
|
||||
Sentry.captureException(err)
|
||||
}
|
||||
|
||||
/**
|
||||
* Identify user in sentry.
|
||||
* @param {String} id - Unique user id
|
||||
*/
|
||||
identify(id) {
|
||||
if (!this.initalised) return
|
||||
|
||||
Sentry.configureScope(scope => {
|
||||
scope.setUser({ id })
|
||||
})
|
||||
}
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
export const Events = {
|
||||
BUILDER: {
|
||||
STARTED: "Builder Started",
|
||||
},
|
||||
COMPONENT: {
|
||||
CREATED: "Added Component",
|
||||
},
|
||||
DATASOURCE: {
|
||||
CREATED: "Datasource Created",
|
||||
UPDATED: "Datasource Updated",
|
||||
},
|
||||
TABLE: {
|
||||
CREATED: "Table Created",
|
||||
},
|
||||
VIEW: {
|
||||
CREATED: "View Created",
|
||||
ADDED_FILTER: "Added View Filter",
|
||||
ADDED_CALCULATE: "Added View Calculate",
|
||||
},
|
||||
SCREEN: {
|
||||
CREATED: "Screen Created",
|
||||
},
|
||||
AUTOMATION: {
|
||||
CREATED: "Automation Created",
|
||||
SAVED: "Automation Saved",
|
||||
BLOCK_ADDED: "Added Automation Block",
|
||||
},
|
||||
NPS: {
|
||||
SUBMITTED: "budibase:feedback_submitted",
|
||||
},
|
||||
APP: {
|
||||
CREATED: "budibase:app_created",
|
||||
PUBLISHED: "budibase:app_published",
|
||||
UNPUBLISHED: "budibase:app_unpublished",
|
||||
},
|
||||
ANALYTICS: {
|
||||
OPT_IN: "budibase:analytics_opt_in",
|
||||
OPT_OUT: "budibase:analytics_opt_out",
|
||||
},
|
||||
USER: {
|
||||
INVITE: "budibase:portal_user_invite",
|
||||
},
|
||||
SMTP: {
|
||||
SAVED: "budibase:smtp_saved",
|
||||
},
|
||||
SSO: {
|
||||
SAVED: "budibase:sso_saved",
|
||||
},
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
import api from "builderStore/api"
|
||||
import PosthogClient from "./PosthogClient"
|
||||
import IntercomClient from "./IntercomClient"
|
||||
import SentryClient from "./SentryClient"
|
||||
import { Events } from "./constants"
|
||||
import { auth } from "stores/portal"
|
||||
import { get } from "svelte/store"
|
||||
|
||||
const posthog = new PosthogClient(
|
||||
process.env.POSTHOG_TOKEN,
|
||||
process.env.POSTHOG_URL
|
||||
)
|
||||
const sentry = new SentryClient(process.env.SENTRY_DSN)
|
||||
const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
|
||||
|
||||
class AnalyticsHub {
|
||||
constructor() {
|
||||
this.clients = [posthog, sentry, intercom]
|
||||
}
|
||||
|
||||
async activate() {
|
||||
// Setting the analytics env var off in the backend overrides org/tenant settings
|
||||
const analyticsStatus = await api.get("/api/analytics")
|
||||
const json = await analyticsStatus.json()
|
||||
|
||||
// Multitenancy disabled on the backend
|
||||
if (!json.enabled) return
|
||||
|
||||
const tenantId = get(auth).tenantId
|
||||
|
||||
if (tenantId) {
|
||||
const res = await api.get(
|
||||
`/api/global/configs/public?tenantId=${tenantId}`
|
||||
)
|
||||
const orgJson = await res.json()
|
||||
|
||||
// analytics opted out for the tenant
|
||||
if (orgJson.config?.analytics === false) return
|
||||
}
|
||||
|
||||
this.clients.forEach(client => client.init())
|
||||
this.enabled = true
|
||||
}
|
||||
|
||||
identify(id, metadata) {
|
||||
posthog.identify(id)
|
||||
if (metadata) {
|
||||
posthog.updateUser(metadata)
|
||||
}
|
||||
sentry.identify(id)
|
||||
}
|
||||
|
||||
captureException(err) {
|
||||
sentry.captureException(err)
|
||||
}
|
||||
|
||||
captureEvent(eventName, props = {}) {
|
||||
posthog.captureEvent(eventName, props)
|
||||
intercom.captureEvent(eventName, props)
|
||||
}
|
||||
|
||||
showChat(user) {
|
||||
intercom.show(user)
|
||||
}
|
||||
|
||||
submitFeedback(values) {
|
||||
posthog.npsFeedback(values)
|
||||
}
|
||||
|
||||
async logout() {
|
||||
posthog.logout()
|
||||
intercom.logout()
|
||||
}
|
||||
}
|
||||
|
||||
const analytics = new AnalyticsHub()
|
||||
|
||||
export { Events }
|
||||
export default analytics
|
|
@ -443,7 +443,10 @@ function bindingReplacement(bindableProperties, textWithBindings, convertTo) {
|
|||
for (let from of convertFromProps) {
|
||||
if (shouldReplaceBinding(newBoundValue, from, convertTo)) {
|
||||
const binding = bindableProperties.find(el => el[convertFrom] === from)
|
||||
newBoundValue = newBoundValue.replace(from, binding[convertTo])
|
||||
newBoundValue = newBoundValue.replace(
|
||||
new RegExp(from, "gi"),
|
||||
binding[convertTo]
|
||||
)
|
||||
}
|
||||
}
|
||||
result = result.replace(boundValue, newBoundValue)
|
||||
|
|
|
@ -3,7 +3,6 @@ import { getAutomationStore } from "./store/automation"
|
|||
import { getHostingStore } from "./store/hosting"
|
||||
import { getThemeStore } from "./store/theme"
|
||||
import { derived, writable } from "svelte/store"
|
||||
import analytics from "analytics"
|
||||
import { FrontendTypes, LAYOUT_NAMES } from "../constants"
|
||||
import { findComponent } from "./storeUtils"
|
||||
|
||||
|
@ -55,13 +54,4 @@ export const mainLayout = derived(store, $store => {
|
|||
|
||||
export const selectedAccessRole = writable("BASIC")
|
||||
|
||||
export const initialise = async () => {
|
||||
try {
|
||||
await analytics.activate()
|
||||
analytics.captureEvent("Builder Started")
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
}
|
||||
}
|
||||
|
||||
export const screenSearchString = writable(null)
|
||||
|
|
|
@ -2,7 +2,7 @@ import { writable } from "svelte/store"
|
|||
import api from "../../api"
|
||||
import Automation from "./Automation"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
const automationActions = store => ({
|
||||
fetch: async () => {
|
||||
|
@ -110,7 +110,7 @@ const automationActions = store => ({
|
|||
state.selectedBlock = newBlock
|
||||
return state
|
||||
})
|
||||
analytics.captureEvent("Added Automation Block", {
|
||||
analytics.captureEvent(Events.AUTOMATION.BLOCK_ADDED, {
|
||||
name: block.name,
|
||||
})
|
||||
},
|
||||
|
|
|
@ -19,7 +19,7 @@ import {
|
|||
import { fetchComponentLibDefinitions } from "../loadComponentLibraries"
|
||||
import api from "../api"
|
||||
import { FrontendTypes } from "constants"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
import {
|
||||
findComponentType,
|
||||
findComponentParent,
|
||||
|
@ -215,6 +215,13 @@ export const getFrontendStore = () => {
|
|||
if (screenToDelete._id === state.selectedScreenId) {
|
||||
state.selectedScreenId = null
|
||||
}
|
||||
//remove the link for this screen
|
||||
screenDeletePromises.push(
|
||||
store.actions.components.links.delete(
|
||||
screenToDelete.routing.route,
|
||||
screenToDelete.props._instanceName
|
||||
)
|
||||
)
|
||||
}
|
||||
return state
|
||||
})
|
||||
|
@ -443,7 +450,7 @@ export const getFrontendStore = () => {
|
|||
})
|
||||
|
||||
// Log event
|
||||
analytics.captureEvent("Added Component", {
|
||||
analytics.captureEvent(Events.COMPONENT.CREATED, {
|
||||
name: componentInstance._component,
|
||||
})
|
||||
|
||||
|
@ -646,6 +653,36 @@ export const getFrontendStore = () => {
|
|||
// Save layout
|
||||
await store.actions.layouts.save(layout)
|
||||
},
|
||||
delete: async (url, title) => {
|
||||
const layout = get(mainLayout)
|
||||
if (!layout) {
|
||||
return
|
||||
}
|
||||
|
||||
// Add link setting to main layout
|
||||
if (layout.props._component.endsWith("layout")) {
|
||||
// If using a new SDK, add to the layout component settings
|
||||
layout.props.links = layout.props.links.filter(
|
||||
link => !(link.text === title && link.url === url)
|
||||
)
|
||||
} else {
|
||||
// If using an old SDK, add to the navigation component
|
||||
// TODO: remove this when we can assume everyone has updated
|
||||
const nav = findComponentType(
|
||||
layout.props,
|
||||
"@budibase/standard-components/navigation"
|
||||
)
|
||||
if (!nav) {
|
||||
return
|
||||
}
|
||||
|
||||
nav._children = nav._children.filter(
|
||||
child => !(child.url === url && child.text === title)
|
||||
)
|
||||
}
|
||||
// Save layout
|
||||
await store.actions.layouts.save(layout)
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -123,7 +123,7 @@
|
|||
padding: var(--spectrum-alias-item-padding-s);
|
||||
background: var(--spectrum-alias-background-color-secondary);
|
||||
transition: 0.3s all;
|
||||
border: solid #3b3d3c;
|
||||
border: solid var(--spectrum-alias-border-color);
|
||||
border-radius: 5px;
|
||||
box-sizing: border-box;
|
||||
border-width: 2px;
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
<script>
|
||||
import { automationStore } from "builderStore"
|
||||
|
||||
import ConfirmDialog from "components/common/ConfirmDialog.svelte"
|
||||
import FlowItem from "./FlowItem.svelte"
|
||||
import TestDataModal from "./TestDataModal.svelte"
|
||||
|
||||
import { flip } from "svelte/animate"
|
||||
import { fade, fly } from "svelte/transition"
|
||||
import {
|
||||
|
@ -13,13 +12,12 @@
|
|||
notifications,
|
||||
Modal,
|
||||
} from "@budibase/bbui"
|
||||
import { database } from "stores/backend"
|
||||
|
||||
export let automation
|
||||
export let onSelect
|
||||
let testDataModal
|
||||
let blocks
|
||||
$: instanceId = $database._id
|
||||
let confirmDeleteDialog
|
||||
|
||||
$: {
|
||||
blocks = []
|
||||
|
@ -35,6 +33,7 @@
|
|||
await automationStore.actions.delete(
|
||||
$automationStore.selectedAutomation?.automation
|
||||
)
|
||||
notifications.success("Automation deleted.")
|
||||
}
|
||||
|
||||
async function testAutomation() {
|
||||
|
@ -63,8 +62,14 @@
|
|||
style="display:flex;
|
||||
color: var(--spectrum-global-color-gray-400);"
|
||||
>
|
||||
<span on:click={() => deleteAutomation()} class="iconPadding">
|
||||
<Icon name="DeleteOutline" />
|
||||
<span class="iconPadding">
|
||||
<div class="icon">
|
||||
<Icon
|
||||
on:click={confirmDeleteDialog.show}
|
||||
hoverable
|
||||
name="DeleteOutline"
|
||||
/>
|
||||
</div>
|
||||
</span>
|
||||
<ActionButton
|
||||
on:click={() => {
|
||||
|
@ -92,6 +97,17 @@
|
|||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
<ConfirmDialog
|
||||
bind:this={confirmDeleteDialog}
|
||||
okText="Delete Automation"
|
||||
onOk={deleteAutomation}
|
||||
title="Confirm Deletion"
|
||||
>
|
||||
Are you sure you wish to delete the automation
|
||||
<i>{automation.name}?</i>
|
||||
This action cannot be undone.
|
||||
</ConfirmDialog>
|
||||
|
||||
<Modal bind:this={testDataModal} width="30%">
|
||||
<TestDataModal {testAutomation} />
|
||||
</Modal>
|
||||
|
@ -139,7 +155,7 @@
|
|||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.iconPadding {
|
||||
.icon {
|
||||
cursor: pointer;
|
||||
display: flex;
|
||||
padding-right: var(--spacing-m);
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
import { automationStore } from "builderStore"
|
||||
import { notifications } from "@budibase/bbui"
|
||||
import { Input, ModalContent, Layout, Body, Icon } from "@budibase/bbui"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
let name
|
||||
let selectedTrigger
|
||||
|
@ -36,7 +36,7 @@
|
|||
notifications.success(`Automation ${name} created.`)
|
||||
|
||||
$goto(`./${$automationStore.selectedAutomation.automation._id}`)
|
||||
analytics.captureEvent("Automation Created", { name })
|
||||
analytics.captureEvent(Events.AUTOMATION.CREATED, { name })
|
||||
}
|
||||
$: triggers = Object.entries($automationStore.blockDefinitions.TRIGGER)
|
||||
|
||||
|
@ -102,7 +102,7 @@
|
|||
padding: var(--spectrum-alias-item-padding-s);
|
||||
background: var(--spectrum-alias-background-color-secondary);
|
||||
transition: 0.3s all;
|
||||
border: solid #3b3d3c;
|
||||
border: solid var(--spectrum-alias-border-color);
|
||||
border-radius: 5px;
|
||||
box-sizing: border-box;
|
||||
border-width: 2px;
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import { automationStore } from "builderStore"
|
||||
import { notifications } from "@budibase/bbui"
|
||||
import { Icon, Input, ModalContent, Modal } from "@budibase/bbui"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
let name
|
||||
let error = ""
|
||||
|
@ -26,7 +26,7 @@
|
|||
}
|
||||
await automationStore.actions.save(updatedAutomation)
|
||||
notifications.success(`Automation ${name} updated successfully.`)
|
||||
analytics.captureEvent("Automation Saved", { name })
|
||||
analytics.captureEvent(Events.AUTOMATION.SAVED, { name })
|
||||
hide()
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
import QueryParamSelector from "./QueryParamSelector.svelte"
|
||||
import CronBuilder from "./CronBuilder.svelte"
|
||||
import Editor from "components/integration/QueryEditor.svelte"
|
||||
import { database } from "stores/backend"
|
||||
import { debounce } from "lodash"
|
||||
import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte"
|
||||
import FilterDrawer from "components/design/PropertiesPanel/PropertyControls/FilterEditor/FilterDrawer.svelte"
|
||||
|
@ -35,13 +34,11 @@
|
|||
let drawer
|
||||
let tempFilters = lookForFilters(schemaProperties) || []
|
||||
let fillWidth = true
|
||||
|
||||
$: stepId = block.stepId
|
||||
$: bindings = getAvailableBindings(
|
||||
block || $automationStore.selectedBlock,
|
||||
$automationStore.selectedAutomation?.automation?.definition
|
||||
)
|
||||
$: instanceId = $database._id
|
||||
|
||||
$: inputData = testData ? testData : block.inputs
|
||||
$: tableId = inputData ? inputData.tableId : null
|
||||
|
@ -210,7 +207,7 @@
|
|||
{:else if value.customType === "webhookUrl"}
|
||||
<WebhookDisplay value={inputData[key]} />
|
||||
{:else if value.customType === "triggerSchema"}
|
||||
<SchemaSetup on:change={e => onChange(e, key)} value={value[key]} />
|
||||
<SchemaSetup on:change={e => onChange(e, key)} value={inputData[key]} />
|
||||
{:else if value.customType === "code"}
|
||||
<CodeEditorModal>
|
||||
<pre>{JSON.stringify(bindings, null, 2)}</pre>
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
<script>
|
||||
import { tables } from "stores/backend"
|
||||
import { Select } from "@budibase/bbui"
|
||||
import { Select, Toggle, DatePicker, Multiselect } from "@budibase/bbui"
|
||||
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
||||
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
|
@ -44,13 +44,31 @@
|
|||
<div class="schema-fields">
|
||||
{#each schemaFields as [field, schema]}
|
||||
{#if !schema.autocolumn}
|
||||
{#if schemaHasOptions(schema)}
|
||||
{#if schemaHasOptions(schema) && schema.type !== "array"}
|
||||
<Select
|
||||
on:change={e => onChange(e, field)}
|
||||
label={field}
|
||||
value={value[field]}
|
||||
options={schema.constraints.inclusion}
|
||||
/>
|
||||
{:else if schema.type === "datetime"}
|
||||
<DatePicker
|
||||
label={field}
|
||||
value={value[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
/>
|
||||
{:else if schema.type === "boolean"}
|
||||
<Toggle
|
||||
text={field}
|
||||
value={value[field]}
|
||||
on:change={e => onChange(e, field)}
|
||||
/>
|
||||
{:else if schema.type === "array"}
|
||||
<Multiselect
|
||||
bind:value={value[field]}
|
||||
label={field}
|
||||
options={schema.constraints.inclusion}
|
||||
/>
|
||||
{:else if schema.type === "string" || schema.type === "number"}
|
||||
{#if $automationStore.selectedAutomation.automation.testData}
|
||||
<ModalBindableInput
|
||||
|
|
|
@ -5,10 +5,14 @@
|
|||
const dispatch = createEventDispatcher()
|
||||
|
||||
export let value = {}
|
||||
$: fieldsArray = Object.entries(value).map(([name, type]) => ({
|
||||
name,
|
||||
type,
|
||||
}))
|
||||
|
||||
$: fieldsArray = value
|
||||
? Object.entries(value).map(([name, type]) => ({
|
||||
name,
|
||||
type,
|
||||
}))
|
||||
: []
|
||||
|
||||
const typeOptions = [
|
||||
{
|
||||
label: "Text",
|
||||
|
@ -73,7 +77,7 @@
|
|||
<Select
|
||||
value={field.type}
|
||||
on:change={e => {
|
||||
value[field.name] = e.target.value
|
||||
value[field.name] = e.detail
|
||||
dispatch("change", value)
|
||||
}}
|
||||
options={typeOptions}
|
||||
|
@ -88,9 +92,7 @@
|
|||
|
||||
<style>
|
||||
.root {
|
||||
position: relative;
|
||||
max-width: 100%;
|
||||
overflow-x: auto;
|
||||
/* so we can show the "+" button beside the "fields" label*/
|
||||
top: -26px;
|
||||
}
|
||||
|
@ -110,7 +112,6 @@
|
|||
/*grid-template-rows: auto auto;
|
||||
grid-template-columns: auto;*/
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.field :global(select) {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<script>
|
||||
import { Select, Label, notifications, ModalContent } from "@budibase/bbui"
|
||||
import { tables, views } from "stores/backend"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
import { FIELDS } from "constants/backend"
|
||||
|
||||
const CALCULATIONS = [
|
||||
|
@ -40,7 +40,7 @@
|
|||
function saveView() {
|
||||
views.save(view)
|
||||
notifications.success(`View ${view.name} saved.`)
|
||||
analytics.captureEvent("Added View Calculate", { field: view.field })
|
||||
analytics.captureEvent(Events.VIEW.ADDED_CALCULATE, { field: view.field })
|
||||
}
|
||||
</script>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
import { goto } from "@roxi/routify"
|
||||
import { views as viewsStore } from "stores/backend"
|
||||
import { tables } from "stores/backend"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
let name
|
||||
let field
|
||||
|
@ -21,7 +21,7 @@
|
|||
field,
|
||||
})
|
||||
notifications.success(`View ${name} created`)
|
||||
analytics.captureEvent("View Created", { name })
|
||||
analytics.captureEvent(Events.VIEW.CREATED, { name })
|
||||
$goto(`../../view/${name}`)
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
Icon,
|
||||
} from "@budibase/bbui"
|
||||
import { tables, views } from "stores/backend"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
const CONDITIONS = [
|
||||
{
|
||||
|
@ -65,7 +65,7 @@
|
|||
function saveView() {
|
||||
views.save(view)
|
||||
notifications.success(`View ${view.name} saved.`)
|
||||
analytics.captureEvent("Added View Filter", {
|
||||
analytics.captureEvent(Events.VIEW.ADDED_FILTER, {
|
||||
filters: JSON.stringify(view.filters),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -63,20 +63,19 @@
|
|||
|
||||
{#if openDataSources.includes(datasource._id)}
|
||||
<TableNavigator sourceId={datasource._id} />
|
||||
{#each $queries.list.filter(query => query.datasourceId === datasource._id) as query}
|
||||
<NavItem
|
||||
indentLevel={1}
|
||||
icon="SQLQuery"
|
||||
text={query.name}
|
||||
opened={$queries.selected === query._id}
|
||||
selected={$queries.selected === query._id}
|
||||
on:click={() => onClickQuery(query)}
|
||||
>
|
||||
<EditQueryPopover {query} />
|
||||
</NavItem>
|
||||
{/each}
|
||||
{/if}
|
||||
|
||||
{#each $queries.list.filter(query => query.datasourceId === datasource._id) as query}
|
||||
<NavItem
|
||||
indentLevel={1}
|
||||
icon="SQLQuery"
|
||||
text={query.name}
|
||||
opened={$queries.selected === query._id}
|
||||
selected={$queries.selected === query._id}
|
||||
on:click={() => onClickQuery(query)}
|
||||
>
|
||||
<EditQueryPopover {query} />
|
||||
</NavItem>
|
||||
{/each}
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import { datasources } from "stores/backend"
|
||||
import { notifications } from "@budibase/bbui"
|
||||
import { Input, ModalContent, Modal } from "@budibase/bbui"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
let error = ""
|
||||
let modal
|
||||
|
@ -35,7 +35,7 @@
|
|||
}
|
||||
await datasources.save(updatedDatasource)
|
||||
notifications.success(`Datasource ${name} updated successfully.`)
|
||||
analytics.captureEvent("Datasource Updated", updatedDatasource)
|
||||
analytics.captureEvent(Events.DATASOURCE.UPDATED, updatedDatasource)
|
||||
hide()
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
Layout,
|
||||
} from "@budibase/bbui"
|
||||
import TableDataImport from "../TableDataImport.svelte"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
import screenTemplates from "builderStore/store/screenTemplates"
|
||||
import { buildAutoColumn, getAutoColumnInformation } from "builderStore/utils"
|
||||
import { NEW_ROW_TEMPLATE } from "builderStore/store/screenTemplates/newRowScreen"
|
||||
|
@ -67,7 +67,7 @@
|
|||
// Create table
|
||||
const table = await tables.save(newTable)
|
||||
notifications.success(`Table ${name} created successfully.`)
|
||||
analytics.captureEvent("Table Created", { name })
|
||||
analytics.captureEvent(Events.TABLE.CREATED, { name })
|
||||
|
||||
// Create auto screens
|
||||
if (createAutoscreens) {
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
import { onMount, onDestroy } from "svelte"
|
||||
import { Button, Modal, notifications, ModalContent } from "@budibase/bbui"
|
||||
import api from "builderStore/api"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
import { store } from "builderStore"
|
||||
|
||||
const DeploymentStatus = {
|
||||
SUCCESS: "SUCCESS",
|
||||
|
@ -23,6 +24,9 @@
|
|||
if (response.status !== 200) {
|
||||
throw new Error(`status ${response.status}`)
|
||||
} else {
|
||||
analytics.captureEvent(Events.APP.PUBLISHED, {
|
||||
appId: $store.appId,
|
||||
})
|
||||
notifications.success(`Application published successfully`)
|
||||
}
|
||||
} catch (err) {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
import { roles } from "stores/backend"
|
||||
import { Input, Select, ModalContent, Toggle } from "@budibase/bbui"
|
||||
import getTemplates from "builderStore/store/screenTemplates"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
const CONTAINER = "@budibase/standard-components/container"
|
||||
|
||||
|
@ -66,7 +66,7 @@
|
|||
|
||||
if (templateIndex !== undefined) {
|
||||
const template = templates[templateIndex]
|
||||
analytics.captureEvent("Screen Created", {
|
||||
analytics.captureEvent(Events.SCREEN.CREATED, {
|
||||
template: template.id || template.name,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
import { admin } from "stores/portal"
|
||||
import { string, mixed, object } from "yup"
|
||||
import api, { get, post } from "builderStore/api"
|
||||
import analytics from "analytics"
|
||||
import analytics, { Events } from "analytics"
|
||||
import { onMount } from "svelte"
|
||||
import { capitalise } from "helpers"
|
||||
import { goto } from "@roxi/routify"
|
||||
|
@ -98,9 +98,9 @@
|
|||
throw new Error(appJson.message)
|
||||
}
|
||||
|
||||
analytics.captureEvent("App Created", {
|
||||
analytics.captureEvent(Events.APP.CREATED, {
|
||||
name: $values.name,
|
||||
appId: appJson._id,
|
||||
appId: appJson.instance._id,
|
||||
template,
|
||||
})
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@
|
|||
username,
|
||||
password,
|
||||
})
|
||||
|
||||
if ($auth?.user?.forceResetPassword) {
|
||||
$goto("./reset")
|
||||
} else {
|
||||
|
|
|
@ -15,8 +15,7 @@
|
|||
} from "@budibase/bbui"
|
||||
import CreateAppModal from "components/start/CreateAppModal.svelte"
|
||||
import UpdateAppModal from "components/start/UpdateAppModal.svelte"
|
||||
import api, { del } from "builderStore/api"
|
||||
import analytics from "analytics"
|
||||
import { del } from "builderStore/api"
|
||||
import { onMount } from "svelte"
|
||||
import { apps, auth, admin } from "stores/portal"
|
||||
import download from "downloadjs"
|
||||
|
@ -66,14 +65,6 @@
|
|||
}
|
||||
}
|
||||
|
||||
const checkKeys = async () => {
|
||||
const response = await api.get(`/api/keys/`)
|
||||
const keys = await response.json()
|
||||
if (keys.userId) {
|
||||
analytics.identify(keys.userId)
|
||||
}
|
||||
}
|
||||
|
||||
const initiateAppCreation = () => {
|
||||
creationModal.show()
|
||||
creatingApp = true
|
||||
|
@ -188,7 +179,6 @@
|
|||
}
|
||||
|
||||
onMount(async () => {
|
||||
checkKeys()
|
||||
await apps.load()
|
||||
loaded = true
|
||||
})
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
import api from "builderStore/api"
|
||||
import { organisation, auth, admin } from "stores/portal"
|
||||
import { uuid } from "builderStore/uuid"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
$: tenantId = $auth.tenantId
|
||||
$: multiTenancyEnabled = $admin.multiTenancy
|
||||
|
@ -209,6 +210,7 @@
|
|||
providers[res.type]._id = res._id
|
||||
})
|
||||
notifications.success(`Settings saved.`)
|
||||
analytics.captureEvent(Events.SSO.SAVED)
|
||||
})
|
||||
.catch(err => {
|
||||
notifications.error(`Failed to update auth settings. ${err}`)
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
import { email } from "stores/portal"
|
||||
import api from "builderStore/api"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
const ConfigTypes = {
|
||||
SMTP: "smtp",
|
||||
|
@ -69,6 +70,7 @@
|
|||
smtpConfig._rev = json._rev
|
||||
smtpConfig._id = json._id
|
||||
notifications.success(`Settings saved.`)
|
||||
analytics.captureEvent(Events.SMTP.SAVED)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
} from "@budibase/bbui"
|
||||
import { createValidationStore, emailValidator } from "helpers/validation"
|
||||
import { users } from "stores/portal"
|
||||
import analytics, { Events } from "analytics"
|
||||
|
||||
export let disabled
|
||||
|
||||
|
@ -25,6 +26,7 @@
|
|||
notifications.error(res.message)
|
||||
} else {
|
||||
notifications.success(res.message)
|
||||
analytics.captureEvent(Events.USER.INVITE, { type: selected })
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
}
|
||||
|
||||
const values = writable({
|
||||
analytics: !analytics.disabled(),
|
||||
analytics: analytics.enabled,
|
||||
company: $organisation.company,
|
||||
platformUrl: $organisation.platformUrl,
|
||||
logo: $organisation.logoUrl
|
||||
|
@ -48,13 +48,6 @@
|
|||
async function saveConfig() {
|
||||
loading = true
|
||||
|
||||
// Set analytics preference
|
||||
if ($values.analytics) {
|
||||
analytics.optIn()
|
||||
} else {
|
||||
analytics.optOut()
|
||||
}
|
||||
|
||||
// Upload logo if required
|
||||
if ($values.logo && !$values.logo.url) {
|
||||
await uploadLogo($values.logo)
|
||||
|
@ -64,6 +57,7 @@
|
|||
const config = {
|
||||
company: $values.company ?? "",
|
||||
platformUrl: $values.platformUrl ?? "",
|
||||
analytics: $values.analytics,
|
||||
}
|
||||
// remove logo if required
|
||||
if (!$values.logo) {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { derived, writable, get } from "svelte/store"
|
||||
import api from "../../builderStore/api"
|
||||
import { admin } from "stores/portal"
|
||||
import analytics from "analytics"
|
||||
|
||||
export function createAuthStore() {
|
||||
const auth = writable({
|
||||
|
@ -49,6 +50,21 @@ export function createAuthStore() {
|
|||
}
|
||||
return store
|
||||
})
|
||||
|
||||
if (user) {
|
||||
analytics.activate().then(() => {
|
||||
analytics.identify(user._id, user)
|
||||
if (user.size === "100+" || user.size === "10000+") {
|
||||
analytics.showChat({
|
||||
email: user.email,
|
||||
created_at: user.createdAt || Date.now(),
|
||||
name: user.name,
|
||||
user_id: user._id,
|
||||
tenant: user.tenantId,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function setOrganisation(tenantId) {
|
||||
|
|
|
@ -22,6 +22,9 @@ export default ({ mode }) => {
|
|||
isProduction ? "production" : "development"
|
||||
),
|
||||
"process.env.POSTHOG_TOKEN": JSON.stringify(process.env.POSTHOG_TOKEN),
|
||||
"process.env.INTERCOM_TOKEN": JSON.stringify(
|
||||
process.env.INTERCOM_TOKEN
|
||||
),
|
||||
"process.env.POSTHOG_URL": JSON.stringify(process.env.POSTHOG_URL),
|
||||
"process.env.SENTRY_DSN": JSON.stringify(process.env.SENTRY_DSN),
|
||||
}),
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/cli",
|
||||
"version": "0.9.125-alpha.18",
|
||||
"version": "0.9.140-alpha.8",
|
||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||
"main": "src/index.js",
|
||||
"bin": {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/client",
|
||||
"version": "0.9.125-alpha.18",
|
||||
"version": "0.9.140-alpha.8",
|
||||
"license": "MPL-2.0",
|
||||
"module": "dist/budibase-client.js",
|
||||
"main": "dist/budibase-client.js",
|
||||
|
@ -19,8 +19,9 @@
|
|||
"dev:builder": "rollup -cw"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^0.9.125-alpha.18",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.18",
|
||||
"@budibase/bbui": "^0.9.140-alpha.8",
|
||||
"@budibase/standard-components": "^0.9.139",
|
||||
"@budibase/string-templates": "^0.9.140-alpha.8",
|
||||
"regexparam": "^1.3.0",
|
||||
"shortid": "^2.2.15",
|
||||
"svelte-spa-router": "^3.0.5"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/server",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "0.9.125-alpha.18",
|
||||
"version": "0.9.140-alpha.8",
|
||||
"description": "Budibase Web Server",
|
||||
"main": "src/index.js",
|
||||
"repository": {
|
||||
|
@ -23,10 +23,9 @@
|
|||
"format": "prettier --config ../../.prettierrc.json 'src/**/*.ts' --write",
|
||||
"lint": "eslint --fix src/",
|
||||
"lint:fix": "yarn run format && yarn run lint",
|
||||
"initialise": "node scripts/initialise.js",
|
||||
"multi:enable": "node scripts/multiTenancy.js enable",
|
||||
"multi:disable": "node scripts/multiTenancy.js disable",
|
||||
"selfhost:enable": "node scripts/selfhost.js enable",
|
||||
"selfhost:disable": "node scripts/selfhost.js disable"
|
||||
"multi:disable": "node scripts/multiTenancy.js disable"
|
||||
},
|
||||
"jest": {
|
||||
"preset": "ts-jest",
|
||||
|
@ -49,8 +48,7 @@
|
|||
"!src/automations/tests/**/*",
|
||||
"!src/utilities/fileProcessor.js",
|
||||
"!src/utilities/fileSystem/**/*",
|
||||
"!src/utilities/redis.js",
|
||||
"!src/api/controllers/row/internalSearch.js"
|
||||
"!src/utilities/redis.js"
|
||||
],
|
||||
"coverageReporters": [
|
||||
"lcov",
|
||||
|
@ -64,9 +62,9 @@
|
|||
"author": "Budibase",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@budibase/auth": "^0.9.125-alpha.18",
|
||||
"@budibase/client": "^0.9.125-alpha.18",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.18",
|
||||
"@budibase/auth": "^0.9.140-alpha.8",
|
||||
"@budibase/client": "^0.9.140-alpha.8",
|
||||
"@budibase/string-templates": "^0.9.140-alpha.8",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
"@koa/router": "8.0.0",
|
||||
"@sendgrid/mail": "7.1.1",
|
||||
|
@ -98,12 +96,13 @@
|
|||
"lodash": "4.17.21",
|
||||
"mongodb": "3.6.3",
|
||||
"mssql": "6.2.3",
|
||||
"mysql": "^2.18.1",
|
||||
"mysql": "2.18.1",
|
||||
"node-fetch": "2.6.0",
|
||||
"open": "7.3.0",
|
||||
"pg": "8.5.1",
|
||||
"pino-pretty": "4.0.0",
|
||||
"pouchdb": "7.2.1",
|
||||
"pouchdb-adapter-memory": "^7.2.1",
|
||||
"pouchdb-all-dbs": "1.0.2",
|
||||
"pouchdb-find": "^7.2.2",
|
||||
"pouchdb-replication-stream": "1.2.9",
|
||||
|
@ -118,6 +117,7 @@
|
|||
"devDependencies": {
|
||||
"@babel/core": "^7.14.3",
|
||||
"@babel/preset-env": "^7.14.4",
|
||||
"@budibase/standard-components": "^0.9.139",
|
||||
"@jest/test-sequencer": "^24.8.0",
|
||||
"@types/bull": "^3.15.1",
|
||||
"@types/jest": "^26.0.23",
|
||||
|
@ -132,7 +132,6 @@
|
|||
"express": "^4.17.1",
|
||||
"jest": "^27.0.5",
|
||||
"nodemon": "^2.0.4",
|
||||
"pouchdb-adapter-memory": "^7.2.1",
|
||||
"prettier": "^2.3.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"supertest": "^4.0.2",
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
version: "3.8"
|
||||
services:
|
||||
db:
|
||||
container_name: postgres-vehicle
|
||||
image: postgres
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_USER: root
|
||||
POSTGRES_PASSWORD: root
|
||||
POSTGRES_DB: main
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
#- pg_data:/var/lib/postgresql/data/
|
||||
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||
|
||||
pgadmin:
|
||||
container_name: pgadmin
|
||||
image: dpage/pgadmin4
|
||||
restart: always
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: root@root.com
|
||||
PGADMIN_DEFAULT_PASSWORD: root
|
||||
ports:
|
||||
- "5050:80"
|
||||
|
||||
#volumes:
|
||||
# pg_data:
|
|
@ -0,0 +1,52 @@
|
|||
SELECT 'CREATE DATABASE main'
|
||||
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
|
||||
CREATE TABLE Vehicles (
|
||||
id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ),
|
||||
Registration text COLLATE pg_catalog."default",
|
||||
Make text COLLATE pg_catalog."default",
|
||||
Model text COLLATE pg_catalog."default",
|
||||
Colour text COLLATE pg_catalog."default",
|
||||
Year smallint,
|
||||
CONSTRAINT Vehicles_pkey PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
CREATE TABLE ServiceLog (
|
||||
id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ),
|
||||
Description text COLLATE pg_catalog."default",
|
||||
VehicleId bigint,
|
||||
ServiceDate timestamp without time zone,
|
||||
Category text COLLATE pg_catalog."default",
|
||||
Mileage bigint,
|
||||
CONSTRAINT ServiceLog_pkey PRIMARY KEY (id),
|
||||
CONSTRAINT vehicle_foreign_key FOREIGN KEY (VehicleId)
|
||||
REFERENCES Vehicles (id) MATCH SIMPLE
|
||||
ON UPDATE NO ACTION
|
||||
ON DELETE NO ACTION
|
||||
);
|
||||
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('FAZ 9837','Volkswagen','Polo','White',2002);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('JHI 8827','BMW','M3','Black',2013);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('D903PI','Volvo','XC40','Grey',2014);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('YFI002','Volkswagen','Golf','Dark Blue',2018);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('HGT5677','Skoda','Octavia','Graphite',2009);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('PPF9276','Skoda','Octavia','Graphite',2021);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('J893FT','Toyota','Corolla','Red',2015);
|
||||
INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
|
||||
VALUES ('MJK776','Honda','HR-V','Silver',2015);
|
||||
|
||||
|
||||
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
|
||||
VALUES ('Change front brakes', 1, '2021-05-04', 'Brakes', 20667);
|
||||
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
|
||||
VALUES ('Tyres - full set', 1, '2021-05-04', 'Brakes', 20667);
|
||||
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
|
||||
VALUES ('Engine tune up', 2, '2021-07-14', 'Brakes', 50889);
|
||||
INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
|
||||
VALUES ('Replace transmission', 3, '2021-09-26', 'Transmission', 98002);
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
docker-compose down
|
||||
docker volume prune -f
|
|
@ -1,6 +1,7 @@
|
|||
const { tmpdir } = require("os")
|
||||
const env = require("../src/environment")
|
||||
|
||||
env._set("SELF_HOSTED", "1")
|
||||
env._set("NODE_ENV", "jest")
|
||||
env._set("JWT_SECRET", "test-jwtsecret")
|
||||
env._set("CLIENT_ID", "test-client-id")
|
||||
|
|
|
@ -2,6 +2,6 @@ const env = require("../../environment")
|
|||
|
||||
exports.isEnabled = async function (ctx) {
|
||||
ctx.body = {
|
||||
enabled: env.ENABLE_ANALYTICS === "true",
|
||||
enabled: !env.SELF_HOSTED && env.ENABLE_ANALYTICS === "true",
|
||||
}
|
||||
}
|
||||
|
|
|
@ -51,7 +51,7 @@ exports.buildSchemaFromDb = async function (ctx) {
|
|||
await connector.buildSchema(datasource._id, datasource.entities)
|
||||
datasource.entities = connector.tables
|
||||
|
||||
const response = await db.post(datasource)
|
||||
const response = await db.put(datasource)
|
||||
datasource._rev = response.rev
|
||||
|
||||
ctx.body = datasource
|
||||
|
@ -89,7 +89,7 @@ exports.save = async function (ctx) {
|
|||
...ctx.request.body,
|
||||
}
|
||||
|
||||
const response = await db.post(datasource)
|
||||
const response = await db.put(datasource)
|
||||
datasource._rev = response.rev
|
||||
|
||||
// Drain connection pools when configuration is changed
|
||||
|
|
|
@ -437,7 +437,11 @@ module External {
|
|||
for (let [colName, { isMany, rows, tableId }] of Object.entries(
|
||||
related
|
||||
)) {
|
||||
const table = this.getTable(tableId)
|
||||
const table: Table = this.getTable(tableId)
|
||||
// if its not the foreign key skip it, nothing to do
|
||||
if (table.primary && table.primary.indexOf(colName) !== -1) {
|
||||
continue
|
||||
}
|
||||
for (let row of rows) {
|
||||
const filters = buildFilters(generateIdForRow(row, table), {}, table)
|
||||
// safety check, if there are no filters on deletion bad things happen
|
||||
|
|
|
@ -5,17 +5,22 @@ const {
|
|||
generateRowID,
|
||||
DocumentTypes,
|
||||
InternalTables,
|
||||
generateMemoryViewID,
|
||||
} = require("../../../db/utils")
|
||||
const userController = require("../user")
|
||||
const {
|
||||
inputProcessing,
|
||||
outputProcessing,
|
||||
processAutoColumn,
|
||||
} = require("../../../utilities/rowProcessor")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { isEqual } = require("lodash")
|
||||
const { validate, findRow } = require("./utils")
|
||||
const { fullSearch, paginatedSearch } = require("./internalSearch")
|
||||
const { getGlobalUsersFromMetadata } = require("../../../utilities/global")
|
||||
const inMemoryViews = require("../../../db/inMemoryView")
|
||||
const env = require("../../../environment")
|
||||
const { migrateToInMemoryView } = require("../view/utils")
|
||||
|
||||
const CALCULATION_TYPES = {
|
||||
SUM: "sum",
|
||||
|
@ -25,17 +30,84 @@ const CALCULATION_TYPES = {
|
|||
|
||||
async function storeResponse(ctx, db, row, oldTable, table) {
|
||||
row.type = "row"
|
||||
const response = await db.put(row)
|
||||
// don't worry about rev, tables handle rev/lastID updates
|
||||
// if another row has been written since processing this will
|
||||
// handle the auto ID clash
|
||||
if (!isEqual(oldTable, table)) {
|
||||
await db.put(table)
|
||||
try {
|
||||
await db.put(table)
|
||||
} catch (err) {
|
||||
if (err.status === 409) {
|
||||
const updatedTable = await db.get(table._id)
|
||||
let response = processAutoColumn(null, updatedTable, row, {
|
||||
reprocessing: true,
|
||||
})
|
||||
await db.put(response.table)
|
||||
row = response.row
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
const response = await db.put(row)
|
||||
row._rev = response.rev
|
||||
// process the row before return, to include relationships
|
||||
row = await outputProcessing(ctx, table, row, { squash: false })
|
||||
return { row, table }
|
||||
}
|
||||
|
||||
// doesn't do the outputProcessing
|
||||
async function getRawTableData(ctx, db, tableId) {
|
||||
let rows
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
await userController.fetchMetadata(ctx)
|
||||
rows = ctx.body
|
||||
} else {
|
||||
const response = await db.allDocs(
|
||||
getRowParams(tableId, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
rows = response.rows.map(row => row.doc)
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
async function getView(db, viewName) {
|
||||
let viewInfo
|
||||
async function getFromDesignDoc() {
|
||||
const designDoc = await db.get("_design/database")
|
||||
viewInfo = designDoc.views[viewName]
|
||||
return viewInfo
|
||||
}
|
||||
let migrate = false
|
||||
if (env.SELF_HOSTED) {
|
||||
viewInfo = await getFromDesignDoc()
|
||||
} else {
|
||||
try {
|
||||
viewInfo = await db.get(generateMemoryViewID(viewName))
|
||||
if (viewInfo) {
|
||||
viewInfo = viewInfo.view
|
||||
}
|
||||
} catch (err) {
|
||||
// check if it can be retrieved from design doc (needs migrated)
|
||||
if (err.status !== 404) {
|
||||
viewInfo = null
|
||||
} else {
|
||||
viewInfo = await getFromDesignDoc()
|
||||
migrate = !!viewInfo
|
||||
}
|
||||
}
|
||||
}
|
||||
if (migrate) {
|
||||
await migrateToInMemoryView(db, viewName)
|
||||
}
|
||||
if (!viewInfo) {
|
||||
throw "View does not exist."
|
||||
}
|
||||
return viewInfo
|
||||
}
|
||||
|
||||
exports.patch = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
|
@ -139,15 +211,18 @@ exports.fetchView = async ctx => {
|
|||
|
||||
const db = new CouchDB(appId)
|
||||
const { calculation, group, field } = ctx.query
|
||||
const designDoc = await db.get("_design/database")
|
||||
const viewInfo = designDoc.views[viewName]
|
||||
if (!viewInfo) {
|
||||
throw "View does not exist."
|
||||
const viewInfo = await getView(db, viewName)
|
||||
let response
|
||||
if (env.SELF_HOSTED) {
|
||||
response = await db.query(`database/${viewName}`, {
|
||||
include_docs: !calculation,
|
||||
group: !!group,
|
||||
})
|
||||
} else {
|
||||
const tableId = viewInfo.meta.tableId
|
||||
const data = await getRawTableData(ctx, db, tableId)
|
||||
response = await inMemoryViews.runView(viewInfo, calculation, group, data)
|
||||
}
|
||||
const response = await db.query(`database/${viewName}`, {
|
||||
include_docs: !calculation,
|
||||
group: !!group,
|
||||
})
|
||||
|
||||
let rows
|
||||
if (!calculation) {
|
||||
|
@ -191,19 +266,9 @@ exports.fetch = async ctx => {
|
|||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
|
||||
let rows,
|
||||
table = await db.get(ctx.params.tableId)
|
||||
if (ctx.params.tableId === InternalTables.USER_METADATA) {
|
||||
await userController.fetchMetadata(ctx)
|
||||
rows = ctx.body
|
||||
} else {
|
||||
const response = await db.allDocs(
|
||||
getRowParams(ctx.params.tableId, null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
rows = response.rows.map(row => row.doc)
|
||||
}
|
||||
const tableId = ctx.params.tableId
|
||||
let table = await db.get(tableId)
|
||||
let rows = await getRawTableData(ctx, db, tableId)
|
||||
return outputProcessing(ctx, table, rows)
|
||||
}
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@ const { InternalTables } = require("../../../db/utils")
|
|||
const userController = require("../user")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { integrations } = require("../../../integrations")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
|
||||
validateJs.extend(validateJs.validators.datetime, {
|
||||
parse: function (value) {
|
||||
|
@ -73,6 +74,11 @@ exports.validate = async ({ appId, tableId, row, table }) => {
|
|||
errors[fieldName] = "Field not in list"
|
||||
}
|
||||
})
|
||||
} else if (table.schema[fieldName].type === FieldTypes.FORMULA) {
|
||||
res = validateJs.single(
|
||||
processStringSync(table.schema[fieldName].formula, row),
|
||||
constraints
|
||||
)
|
||||
} else {
|
||||
res = validateJs.single(row[fieldName], constraints)
|
||||
}
|
||||
|
|
|
@ -145,7 +145,7 @@ exports.save = async function (ctx) {
|
|||
if (updatedRows && updatedRows.length !== 0) {
|
||||
await db.bulkDocs(updatedRows)
|
||||
}
|
||||
const result = await db.post(tableToSave)
|
||||
const result = await db.put(tableToSave)
|
||||
tableToSave._rev = result.rev
|
||||
|
||||
tableToSave = await tableSaveFunctions.after(tableToSave)
|
||||
|
|
|
@ -68,23 +68,17 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
|
|||
// Populate the table with rows imported from CSV in a bulk update
|
||||
const data = await csvParser.transform(dataImport)
|
||||
|
||||
let finalData = []
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
let row = data[i]
|
||||
row._id = generateRowID(table._id)
|
||||
row.tableId = table._id
|
||||
const processed = inputProcessing(user, table, row)
|
||||
const processed = inputProcessing(user, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
table = processed.table
|
||||
row = processed.row
|
||||
|
||||
// make sure link rows are up to date
|
||||
row = await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.ROW_SAVE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
table,
|
||||
})
|
||||
|
||||
for (let [fieldName, schema] of Object.entries(table.schema)) {
|
||||
// check whether the options need to be updated for inclusion as part of the data import
|
||||
if (
|
||||
|
@ -98,10 +92,20 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
|
|||
]
|
||||
}
|
||||
}
|
||||
data[i] = row
|
||||
|
||||
// make sure link rows are up to date
|
||||
finalData.push(
|
||||
linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.ROW_SAVE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
table,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
await db.bulkDocs(data)
|
||||
await db.bulkDocs(await Promise.all(finalData))
|
||||
let response = await db.put(table)
|
||||
table._rev = response._rev
|
||||
}
|
||||
|
|
|
@ -2,127 +2,93 @@ const CouchDB = require("../../../db")
|
|||
const viewTemplate = require("./viewBuilder")
|
||||
const { apiFileReturn } = require("../../../utilities/fileSystem")
|
||||
const exporters = require("./exporters")
|
||||
const { saveView, getView, getViews, deleteView } = require("./utils")
|
||||
const { fetchView } = require("../row")
|
||||
const { ViewNames } = require("../../../db/utils")
|
||||
|
||||
const controller = {
|
||||
fetch: async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const designDoc = await db.get("_design/database")
|
||||
const response = []
|
||||
|
||||
for (let name of Object.keys(designDoc.views)) {
|
||||
// Only return custom views, not built ins
|
||||
if (Object.values(ViewNames).indexOf(name) !== -1) {
|
||||
continue
|
||||
}
|
||||
response.push({
|
||||
name,
|
||||
...designDoc.views[name],
|
||||
})
|
||||
}
|
||||
|
||||
ctx.body = response
|
||||
},
|
||||
save: async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const { originalName, ...viewToSave } = ctx.request.body
|
||||
const designDoc = await db.get("_design/database")
|
||||
const view = viewTemplate(viewToSave)
|
||||
|
||||
if (!viewToSave.name) {
|
||||
ctx.throw(400, "Cannot create view without a name")
|
||||
}
|
||||
|
||||
designDoc.views = {
|
||||
...designDoc.views,
|
||||
[viewToSave.name]: view,
|
||||
}
|
||||
|
||||
// view has been renamed
|
||||
if (originalName) {
|
||||
delete designDoc.views[originalName]
|
||||
}
|
||||
|
||||
await db.put(designDoc)
|
||||
|
||||
// add views to table document
|
||||
const table = await db.get(ctx.request.body.tableId)
|
||||
if (!table.views) table.views = {}
|
||||
if (!view.meta.schema) {
|
||||
view.meta.schema = table.schema
|
||||
}
|
||||
table.views[viewToSave.name] = view.meta
|
||||
|
||||
if (originalName) {
|
||||
delete table.views[originalName]
|
||||
}
|
||||
|
||||
await db.put(table)
|
||||
|
||||
ctx.body = {
|
||||
...table.views[viewToSave.name],
|
||||
name: viewToSave.name,
|
||||
}
|
||||
},
|
||||
destroy: async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const designDoc = await db.get("_design/database")
|
||||
const viewName = decodeURI(ctx.params.viewName)
|
||||
const view = designDoc.views[viewName]
|
||||
delete designDoc.views[viewName]
|
||||
|
||||
await db.put(designDoc)
|
||||
|
||||
const table = await db.get(view.meta.tableId)
|
||||
delete table.views[viewName]
|
||||
await db.put(table)
|
||||
|
||||
ctx.body = view
|
||||
},
|
||||
exportView: async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const designDoc = await db.get("_design/database")
|
||||
const viewName = decodeURI(ctx.query.view)
|
||||
|
||||
const view = designDoc.views[viewName]
|
||||
const format = ctx.query.format
|
||||
if (!format) {
|
||||
ctx.throw(400, "Format must be specified, either csv or json")
|
||||
}
|
||||
|
||||
if (view) {
|
||||
ctx.params.viewName = viewName
|
||||
// Fetch view rows
|
||||
ctx.query = {
|
||||
group: view.meta.groupBy,
|
||||
calculation: view.meta.calculation,
|
||||
stats: !!view.meta.field,
|
||||
field: view.meta.field,
|
||||
}
|
||||
} else {
|
||||
// table all_ view
|
||||
/* istanbul ignore next */
|
||||
ctx.params.viewName = viewName
|
||||
}
|
||||
|
||||
await fetchView(ctx)
|
||||
|
||||
let schema = view && view.meta && view.meta.schema
|
||||
if (!schema) {
|
||||
const tableId = ctx.params.tableId || view.meta.tableId
|
||||
const table = await db.get(tableId)
|
||||
schema = table.schema
|
||||
}
|
||||
|
||||
// Export part
|
||||
let headers = Object.keys(schema)
|
||||
const exporter = exporters[format]
|
||||
const filename = `${viewName}.${format}`
|
||||
// send down the file
|
||||
ctx.attachment(filename)
|
||||
ctx.body = apiFileReturn(exporter(headers, ctx.body))
|
||||
},
|
||||
exports.fetch = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
ctx.body = await getViews(db)
|
||||
}
|
||||
|
||||
module.exports = controller
|
||||
exports.save = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const { originalName, ...viewToSave } = ctx.request.body
|
||||
const view = viewTemplate(viewToSave)
|
||||
|
||||
if (!viewToSave.name) {
|
||||
ctx.throw(400, "Cannot create view without a name")
|
||||
}
|
||||
|
||||
await saveView(db, originalName, viewToSave.name, view)
|
||||
|
||||
// add views to table document
|
||||
const table = await db.get(ctx.request.body.tableId)
|
||||
if (!table.views) table.views = {}
|
||||
if (!view.meta.schema) {
|
||||
view.meta.schema = table.schema
|
||||
}
|
||||
table.views[viewToSave.name] = view.meta
|
||||
if (originalName) {
|
||||
delete table.views[originalName]
|
||||
}
|
||||
await db.put(table)
|
||||
|
||||
ctx.body = {
|
||||
...table.views[viewToSave.name],
|
||||
name: viewToSave.name,
|
||||
}
|
||||
}
|
||||
|
||||
exports.destroy = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const viewName = decodeURI(ctx.params.viewName)
|
||||
const view = await deleteView(db, viewName)
|
||||
const table = await db.get(view.meta.tableId)
|
||||
delete table.views[viewName]
|
||||
await db.put(table)
|
||||
|
||||
ctx.body = view
|
||||
}
|
||||
|
||||
exports.exportView = async ctx => {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const viewName = decodeURI(ctx.query.view)
|
||||
const view = await getView(db, viewName)
|
||||
|
||||
const format = ctx.query.format
|
||||
if (!format) {
|
||||
ctx.throw(400, "Format must be specified, either csv or json")
|
||||
}
|
||||
|
||||
if (view) {
|
||||
ctx.params.viewName = viewName
|
||||
// Fetch view rows
|
||||
ctx.query = {
|
||||
group: view.meta.groupBy,
|
||||
calculation: view.meta.calculation,
|
||||
stats: !!view.meta.field,
|
||||
field: view.meta.field,
|
||||
}
|
||||
} else {
|
||||
// table all_ view
|
||||
/* istanbul ignore next */
|
||||
ctx.params.viewName = viewName
|
||||
}
|
||||
|
||||
await fetchView(ctx)
|
||||
|
||||
let schema = view && view.meta && view.meta.schema
|
||||
if (!schema) {
|
||||
const tableId = ctx.params.tableId || view.meta.tableId
|
||||
const table = await db.get(tableId)
|
||||
schema = table.schema
|
||||
}
|
||||
|
||||
// Export part
|
||||
let headers = Object.keys(schema)
|
||||
const exporter = exporters[format]
|
||||
const filename = `${viewName}.${format}`
|
||||
// send down the file
|
||||
ctx.attachment(filename)
|
||||
ctx.body = apiFileReturn(exporter(headers, ctx.body))
|
||||
}
|
||||
|
|
|
@ -0,0 +1,109 @@
|
|||
const {
|
||||
ViewNames,
|
||||
generateMemoryViewID,
|
||||
getMemoryViewParams,
|
||||
} = require("../../../db/utils")
|
||||
const env = require("../../../environment")
|
||||
|
||||
exports.getView = async (db, viewName) => {
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
return designDoc.views[viewName]
|
||||
} else {
|
||||
const viewDoc = await db.get(generateMemoryViewID(viewName))
|
||||
return viewDoc.view
|
||||
}
|
||||
}
|
||||
|
||||
exports.getViews = async db => {
|
||||
const response = []
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
for (let name of Object.keys(designDoc.views)) {
|
||||
// Only return custom views, not built ins
|
||||
if (Object.values(ViewNames).indexOf(name) !== -1) {
|
||||
continue
|
||||
}
|
||||
response.push({
|
||||
name,
|
||||
...designDoc.views[name],
|
||||
})
|
||||
}
|
||||
} else {
|
||||
const views = (
|
||||
await db.allDocs(
|
||||
getMemoryViewParams({
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
).rows.map(row => row.doc)
|
||||
for (let viewDoc of views) {
|
||||
response.push({
|
||||
name: viewDoc.name,
|
||||
...viewDoc.view,
|
||||
})
|
||||
}
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
exports.saveView = async (db, originalName, viewName, viewTemplate) => {
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
designDoc.views = {
|
||||
...designDoc.views,
|
||||
[viewName]: viewTemplate,
|
||||
}
|
||||
// view has been renamed
|
||||
if (originalName) {
|
||||
delete designDoc.views[originalName]
|
||||
}
|
||||
await db.put(designDoc)
|
||||
} else {
|
||||
const id = generateMemoryViewID(viewName)
|
||||
const originalId = originalName ? generateMemoryViewID(originalName) : null
|
||||
const viewDoc = {
|
||||
_id: id,
|
||||
view: viewTemplate,
|
||||
name: viewName,
|
||||
tableId: viewTemplate.meta.tableId,
|
||||
}
|
||||
try {
|
||||
const old = await db.get(id)
|
||||
if (originalId) {
|
||||
const originalDoc = await db.get(originalId)
|
||||
await db.remove(originalDoc._id, originalDoc._rev)
|
||||
}
|
||||
if (old && old._rev) {
|
||||
viewDoc._rev = old._rev
|
||||
}
|
||||
} catch (err) {
|
||||
// didn't exist, just skip
|
||||
}
|
||||
await db.put(viewDoc)
|
||||
}
|
||||
}
|
||||
|
||||
exports.deleteView = async (db, viewName) => {
|
||||
if (env.SELF_HOSTED) {
|
||||
const designDoc = await db.get("_design/database")
|
||||
const view = designDoc.views[viewName]
|
||||
delete designDoc.views[viewName]
|
||||
await db.put(designDoc)
|
||||
return view
|
||||
} else {
|
||||
const id = generateMemoryViewID(viewName)
|
||||
const viewDoc = await db.get(id)
|
||||
await db.remove(viewDoc._id, viewDoc._rev)
|
||||
return viewDoc.view
|
||||
}
|
||||
}
|
||||
|
||||
exports.migrateToInMemoryView = async (db, viewName) => {
|
||||
// delete the view initially
|
||||
const designDoc = await db.get("_design/database")
|
||||
const view = designDoc.views[viewName]
|
||||
delete designDoc.views[viewName]
|
||||
await db.put(designDoc)
|
||||
await exports.saveView(db, null, viewName, view)
|
||||
}
|
|
@ -205,7 +205,7 @@ describe("/views", () => {
|
|||
})
|
||||
|
||||
describe("exportView", () => {
|
||||
it("should be able to delete a view", async () => {
|
||||
it("should be able to export a view", async () => {
|
||||
await config.createTable(priceTable())
|
||||
await config.createRow()
|
||||
const view = await config.createView()
|
||||
|
|
|
@ -2,6 +2,7 @@ const rowController = require("../../api/controllers/row")
|
|||
const automationUtils = require("../automationUtils")
|
||||
const env = require("../../environment")
|
||||
const usage = require("../../utilities/usageQuota")
|
||||
const { buildCtx } = require("./utils")
|
||||
|
||||
exports.definition = {
|
||||
name: "Create Row",
|
||||
|
@ -69,16 +70,12 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
|||
}
|
||||
}
|
||||
// have to clean up the row, remove the table from it
|
||||
const ctx = {
|
||||
const ctx = buildCtx(appId, emitter, {
|
||||
body: inputs.row,
|
||||
params: {
|
||||
tableId: inputs.row.tableId,
|
||||
},
|
||||
request: {
|
||||
body: inputs.row,
|
||||
},
|
||||
appId,
|
||||
eventEmitter: emitter,
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
inputs.row = await automationUtils.cleanUpRow(
|
||||
|
@ -86,7 +83,7 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
|||
inputs.row.tableId,
|
||||
inputs.row
|
||||
)
|
||||
if (env.isProd()) {
|
||||
if (env.USE_QUOTAS) {
|
||||
await usage.update(apiKey, usage.Properties.ROW, 1)
|
||||
}
|
||||
await rowController.save(ctx)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const rowController = require("../../api/controllers/row")
|
||||
const env = require("../../environment")
|
||||
const usage = require("../../utilities/usageQuota")
|
||||
const { buildCtx } = require("./utils")
|
||||
|
||||
exports.definition = {
|
||||
description: "Delete a row from your database",
|
||||
|
@ -60,19 +61,16 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
|||
},
|
||||
}
|
||||
}
|
||||
let ctx = {
|
||||
|
||||
let ctx = buildCtx(appId, emitter, {
|
||||
body: {
|
||||
_id: inputs.id,
|
||||
_rev: inputs.revision,
|
||||
},
|
||||
params: {
|
||||
tableId: inputs.tableId,
|
||||
},
|
||||
request: {
|
||||
body: {
|
||||
_id: inputs.id,
|
||||
_rev: inputs.revision,
|
||||
},
|
||||
},
|
||||
appId,
|
||||
eventEmitter: emitter,
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
if (env.isProd()) {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const rowController = require("../../api/controllers/row")
|
||||
const tableController = require("../../api/controllers/table")
|
||||
const { FieldTypes } = require("../../constants")
|
||||
const { buildCtx } = require("./utils")
|
||||
|
||||
const SortOrders = {
|
||||
ASCENDING: "ascending",
|
||||
|
@ -70,12 +71,11 @@ exports.definition = {
|
|||
}
|
||||
|
||||
async function getTable(appId, tableId) {
|
||||
const ctx = {
|
||||
const ctx = buildCtx(appId, null, {
|
||||
params: {
|
||||
id: tableId,
|
||||
},
|
||||
appId,
|
||||
}
|
||||
})
|
||||
await tableController.find(ctx)
|
||||
return ctx.body
|
||||
}
|
||||
|
@ -89,21 +89,18 @@ exports.run = async function ({ inputs, appId }) {
|
|||
sortType =
|
||||
fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING
|
||||
}
|
||||
const ctx = {
|
||||
const ctx = buildCtx(appId, null, {
|
||||
params: {
|
||||
tableId,
|
||||
},
|
||||
request: {
|
||||
body: {
|
||||
sortOrder,
|
||||
sortType,
|
||||
sort: sortColumn,
|
||||
query: filters || {},
|
||||
limit,
|
||||
},
|
||||
body: {
|
||||
sortOrder,
|
||||
sortType,
|
||||
sort: sortColumn,
|
||||
query: filters || {},
|
||||
limit,
|
||||
},
|
||||
appId,
|
||||
}
|
||||
})
|
||||
try {
|
||||
await rowController.search(ctx)
|
||||
return {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
const rowController = require("../../api/controllers/row")
|
||||
const automationUtils = require("../automationUtils")
|
||||
const { buildCtx } = require("./utils")
|
||||
|
||||
exports.definition = {
|
||||
name: "Update Row",
|
||||
|
@ -72,19 +73,15 @@ exports.run = async function ({ inputs, appId, emitter }) {
|
|||
}
|
||||
|
||||
// have to clean up the row, remove the table from it
|
||||
const ctx = {
|
||||
const ctx = buildCtx(appId, emitter, {
|
||||
body: {
|
||||
...inputs.row,
|
||||
_id: inputs.rowId,
|
||||
},
|
||||
params: {
|
||||
rowId: inputs.rowId,
|
||||
},
|
||||
request: {
|
||||
body: {
|
||||
...inputs.row,
|
||||
_id: inputs.rowId,
|
||||
},
|
||||
},
|
||||
appId,
|
||||
eventEmitter: emitter,
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
inputs.row = await automationUtils.cleanUpRowById(
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
const PouchDB = require("pouchdb")
|
||||
const memory = require("pouchdb-adapter-memory")
|
||||
const newid = require("./newid")
|
||||
|
||||
PouchDB.plugin(memory)
|
||||
const Pouch = PouchDB.defaults({
|
||||
prefix: undefined,
|
||||
adapter: "memory",
|
||||
})
|
||||
|
||||
exports.runView = async (view, calculation, group, data) => {
|
||||
// use a different ID each time for the DB, make sure they
|
||||
// are always unique for each query, don't want overlap
|
||||
// which could cause 409s
|
||||
const db = new Pouch(newid())
|
||||
// write all the docs to the in memory Pouch (remove revs)
|
||||
await db.bulkDocs(
|
||||
data.map(row => ({
|
||||
...row,
|
||||
_rev: undefined,
|
||||
}))
|
||||
)
|
||||
let fn = (doc, emit) => emit(doc._id)
|
||||
eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)"))
|
||||
const queryFns = {
|
||||
meta: view.meta,
|
||||
map: fn,
|
||||
}
|
||||
if (view.reduce) {
|
||||
queryFns.reduce = view.reduce
|
||||
}
|
||||
const response = await db.query(queryFns, {
|
||||
include_docs: !calculation,
|
||||
group: !!group,
|
||||
})
|
||||
// need to fix the revs to be totally accurate
|
||||
for (let row of response.rows) {
|
||||
if (!row._rev || !row._id) {
|
||||
continue
|
||||
}
|
||||
const found = data.find(possible => possible._id === row._id)
|
||||
if (found) {
|
||||
row._rev = found._rev
|
||||
}
|
||||
}
|
||||
await db.destroy()
|
||||
return response
|
||||
}
|
|
@ -76,9 +76,12 @@ async function getFullLinkedDocs(ctx, appId, links) {
|
|||
// create DBs
|
||||
const db = new CouchDB(appId)
|
||||
const linkedRowIds = links.map(link => link.id)
|
||||
let linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map(
|
||||
const uniqueRowIds = [...new Set(linkedRowIds)]
|
||||
let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map(
|
||||
row => row.doc
|
||||
)
|
||||
// convert the unique db rows back to a full list of linked rows
|
||||
const linked = linkedRowIds.map(id => dbRows.find(row => row._id === id))
|
||||
// need to handle users as specific cases
|
||||
let [users, other] = partition(linked, linkRow =>
|
||||
linkRow._id.startsWith(USER_METDATA_PREFIX)
|
||||
|
@ -112,7 +115,7 @@ exports.updateLinks = async function (args) {
|
|||
let linkController = new LinkController(args)
|
||||
try {
|
||||
if (
|
||||
!(await linkController.doesTableHaveLinkedFields()) &&
|
||||
!(await linkController.doesTableHaveLinkedFields(table)) &&
|
||||
(oldTable == null ||
|
||||
!(await linkController.doesTableHaveLinkedFields(oldTable)))
|
||||
) {
|
||||
|
|
|
@ -39,6 +39,7 @@ const DocumentTypes = {
|
|||
QUERY: "query",
|
||||
DEPLOYMENTS: "deployments",
|
||||
METADATA: "metadata",
|
||||
MEM_VIEW: "view",
|
||||
}
|
||||
|
||||
const ViewNames = {
|
||||
|
@ -348,6 +349,14 @@ exports.getMetadataParams = (type, entityId = null, otherProps = {}) => {
|
|||
return getDocParams(DocumentTypes.METADATA, docId, otherProps)
|
||||
}
|
||||
|
||||
exports.generateMemoryViewID = viewName => {
|
||||
return `${DocumentTypes.MEM_VIEW}${SEPARATOR}${viewName}`
|
||||
}
|
||||
|
||||
exports.getMemoryViewParams = (otherProps = {}) => {
|
||||
return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
* This can be used with the db.allDocs to get a list of IDs
|
||||
*/
|
||||
|
|
|
@ -66,3 +66,10 @@ module.exports = {
|
|||
return !isDev()
|
||||
},
|
||||
}
|
||||
|
||||
// convert any strings to numbers if required, like "0" would be true otherwise
|
||||
for (let [key, value] of Object.entries(module.exports)) {
|
||||
if (typeof value === "string" && !isNaN(parseInt(value))) {
|
||||
module.exports[key] = parseInt(value)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ import { getSqlQuery } from "./utils"
|
|||
module MySQLModule {
|
||||
const mysql = require("mysql")
|
||||
const Sql = require("./base/sql")
|
||||
const { buildExternalTableId, convertType } = require("./utils")
|
||||
const { buildExternalTableId, convertType, copyExistingPropsOver } = require("./utils")
|
||||
const { FieldTypes } = require("../constants")
|
||||
|
||||
interface MySQLConfig {
|
||||
|
@ -194,18 +194,7 @@ module MySQLModule {
|
|||
}
|
||||
}
|
||||
|
||||
// add the existing relationships from the entities if they exist, to prevent them from being overridden
|
||||
if (entities && entities[tableName]) {
|
||||
const existingTableSchema = entities[tableName].schema
|
||||
for (let key in existingTableSchema) {
|
||||
if (!existingTableSchema.hasOwnProperty(key)) {
|
||||
continue
|
||||
}
|
||||
if (existingTableSchema[key].type === "link") {
|
||||
tables[tableName].schema[key] = existingTableSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
copyExistingPropsOver(tableName, tables, entities)
|
||||
}
|
||||
|
||||
this.client.end()
|
||||
|
|
|
@ -12,7 +12,7 @@ module PostgresModule {
|
|||
const { Pool } = require("pg")
|
||||
const Sql = require("./base/sql")
|
||||
const { FieldTypes } = require("../constants")
|
||||
const { buildExternalTableId, convertType } = require("./utils")
|
||||
const { buildExternalTableId, convertType, copyExistingPropsOver } = require("./utils")
|
||||
|
||||
interface PostgresConfig {
|
||||
host: string
|
||||
|
@ -84,10 +84,13 @@ module PostgresModule {
|
|||
bigint: FieldTypes.NUMBER,
|
||||
decimal: FieldTypes.NUMBER,
|
||||
smallint: FieldTypes.NUMBER,
|
||||
real: FieldTypes.NUMBER,
|
||||
"double precision": FieldTypes.NUMBER,
|
||||
timestamp: FieldTypes.DATETIME,
|
||||
time: FieldTypes.DATETIME,
|
||||
boolean: FieldTypes.BOOLEAN,
|
||||
json: FieldTypes.JSON,
|
||||
date: FieldTypes.DATETIME,
|
||||
}
|
||||
|
||||
async function internalQuery(client: any, query: SqlQuery) {
|
||||
|
@ -173,31 +176,24 @@ module PostgresModule {
|
|||
name: tableName,
|
||||
schema: {},
|
||||
}
|
||||
|
||||
// add the existing relationships from the entities if they exist, to prevent them from being overridden
|
||||
if (entities && entities[tableName]) {
|
||||
const existingTableSchema = entities[tableName].schema
|
||||
for (let key in existingTableSchema) {
|
||||
if (!existingTableSchema.hasOwnProperty(key)) {
|
||||
continue
|
||||
}
|
||||
if (existingTableSchema[key].type === "link") {
|
||||
tables[tableName].schema[key] = existingTableSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const type: string = convertType(column.data_type, TYPE_MAP)
|
||||
const isAuto: boolean =
|
||||
typeof column.column_default === "string" &&
|
||||
const identity = !!(column.identity_generation || column.identity_start || column.identity_increment)
|
||||
const hasDefault = typeof column.column_default === "string" &&
|
||||
column.column_default.startsWith("nextval")
|
||||
const isGenerated = column.is_generated && column.is_generated !== "NEVER"
|
||||
const isAuto: boolean = hasDefault || identity || isGenerated
|
||||
tables[tableName].schema[columnName] = {
|
||||
autocolumn: isAuto,
|
||||
name: columnName,
|
||||
type,
|
||||
}
|
||||
}
|
||||
|
||||
for (let tableName of Object.keys(tables)) {
|
||||
copyExistingPropsOver(tableName, tables, entities)
|
||||
}
|
||||
this.tables = tables
|
||||
}
|
||||
|
||||
|
|
|
@ -82,3 +82,21 @@ export function isIsoDateString(str: string) {
|
|||
let d = new Date(str)
|
||||
return d.toISOString() === str
|
||||
}
|
||||
|
||||
// add the existing relationships from the entities if they exist, to prevent them from being overridden
|
||||
export function copyExistingPropsOver(tableName: string, tables: { [key: string]: any }, entities: { [key: string]: any }) {
|
||||
if (entities && entities[tableName]) {
|
||||
if (entities[tableName].primaryDisplay) {
|
||||
tables[tableName].primaryDisplay = entities[tableName].primaryDisplay
|
||||
}
|
||||
const existingTableSchema = entities[tableName].schema
|
||||
for (let key in existingTableSchema) {
|
||||
if (!existingTableSchema.hasOwnProperty(key)) {
|
||||
continue
|
||||
}
|
||||
if (existingTableSchema[key].type === "link") {
|
||||
tables[tableName].schema[key] = existingTableSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -124,11 +124,13 @@ exports.performBackup = async (appId, backupName) => {
|
|||
),
|
||||
})
|
||||
// write the file to the object store
|
||||
await streamUpload(
|
||||
ObjectStoreBuckets.BACKUPS,
|
||||
join(appId, backupName),
|
||||
fs.createReadStream(path)
|
||||
)
|
||||
if (env.SELF_HOSTED) {
|
||||
await streamUpload(
|
||||
ObjectStoreBuckets.BACKUPS,
|
||||
join(appId, backupName),
|
||||
fs.createReadStream(path)
|
||||
)
|
||||
}
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
|
|
|
@ -89,10 +89,16 @@ const TYPE_TRANSFORM_MAP = {
|
|||
* @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields.
|
||||
* @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing.
|
||||
* @param {Object} row The row which is to be updated with information for the auto columns.
|
||||
* @param {Object} opts specific options for function to carry out optional features.
|
||||
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
|
||||
* for automatic ID purposes.
|
||||
*/
|
||||
function processAutoColumn(user, table, row) {
|
||||
function processAutoColumn(
|
||||
user,
|
||||
table,
|
||||
row,
|
||||
opts = { reprocessing: false, noAutoRelationships: false }
|
||||
) {
|
||||
let now = new Date().toISOString()
|
||||
// if a row doesn't have a revision then it doesn't exist yet
|
||||
const creating = !row._rev
|
||||
|
@ -102,7 +108,7 @@ function processAutoColumn(user, table, row) {
|
|||
}
|
||||
switch (schema.subtype) {
|
||||
case AutoFieldSubTypes.CREATED_BY:
|
||||
if (creating) {
|
||||
if (creating && !opts.reprocessing && !opts.noAutoRelationships) {
|
||||
row[key] = [user.userId]
|
||||
}
|
||||
break
|
||||
|
@ -112,7 +118,9 @@ function processAutoColumn(user, table, row) {
|
|||
}
|
||||
break
|
||||
case AutoFieldSubTypes.UPDATED_BY:
|
||||
row[key] = [user.userId]
|
||||
if (!opts.reprocessing && !opts.noAutoRelationships) {
|
||||
row[key] = [user.userId]
|
||||
}
|
||||
break
|
||||
case AutoFieldSubTypes.UPDATED_AT:
|
||||
row[key] = now
|
||||
|
@ -127,6 +135,7 @@ function processAutoColumn(user, table, row) {
|
|||
}
|
||||
return { table, row }
|
||||
}
|
||||
exports.processAutoColumn = processAutoColumn
|
||||
|
||||
/**
|
||||
* This will coerce a value to the correct types based on the type transform map
|
||||
|
@ -151,9 +160,15 @@ exports.coerce = (row, type) => {
|
|||
* @param {object} user the user which is performing the input.
|
||||
* @param {object} row the row which is being created/updated.
|
||||
* @param {object} table the table which the row is being saved to.
|
||||
* @param {object} opts some input processing options (like disabling auto-column relationships).
|
||||
* @returns {object} the row which has been prepared to be written to the DB.
|
||||
*/
|
||||
exports.inputProcessing = (user = {}, table, row) => {
|
||||
exports.inputProcessing = (
|
||||
user = {},
|
||||
table,
|
||||
row,
|
||||
opts = { noAutoRelationships: false }
|
||||
) => {
|
||||
let clonedRow = cloneDeep(row)
|
||||
// need to copy the table so it can be differenced on way out
|
||||
const copiedTable = cloneDeep(table)
|
||||
|
@ -176,7 +191,7 @@ exports.inputProcessing = (user = {}, table, row) => {
|
|||
}
|
||||
}
|
||||
// handle auto columns - this returns an object like {table, row}
|
||||
return processAutoColumn(user, copiedTable, clonedRow)
|
||||
return processAutoColumn(user, copiedTable, clonedRow, opts)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -943,10 +943,10 @@
|
|||
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
|
||||
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
|
||||
|
||||
"@budibase/auth@^0.9.125-alpha.17":
|
||||
version "0.9.133"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.133.tgz#280d581820c9069b6bc021f88178c215ee48ad08"
|
||||
integrity sha512-DL7zIYRXE6xSKE/qbHMf/SX3+bceGxM4xzUmLTk4OHtEOP/vaUJr35tkhznAZF7VpUR9Yh20D6/Zw8z/3sxj/A==
|
||||
"@budibase/auth@^0.9.139":
|
||||
version "0.9.139"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.139.tgz#0610582800df062372582f9139c7aa99606af3e1"
|
||||
integrity sha512-2JUAKC3AA74O3TXHjoGCoXkDxXqUS1K8KGFrJtrUQQrVq1YeQGSjD6Km+Ho8PqUaNdpEfZinBS1/3qFUqaQbuQ==
|
||||
dependencies:
|
||||
"@techpass/passport-openidconnect" "^0.3.0"
|
||||
aws-sdk "^2.901.0"
|
||||
|
@ -966,10 +966,10 @@
|
|||
uuid "^8.3.2"
|
||||
zlib "^1.0.5"
|
||||
|
||||
"@budibase/bbui@^0.9.133":
|
||||
version "0.9.133"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.133.tgz#91a2fb24abaaf91d2cb1e00eb51c493c1290f9ad"
|
||||
integrity sha512-xbMmc/hee1QRNW7TrbGUBmLr1hMHXqUDA6rdl9N2PGfHFuFWbqlD8PWYanHmLevVet+CjkuKGPSbBghFK2pQyQ==
|
||||
"@budibase/bbui@^0.9.139":
|
||||
version "0.9.139"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.139.tgz#e6cfc90e8f6c2aa3526fc6a7bef251bccdaf51bb"
|
||||
integrity sha512-HllzXwfCnxqlV/ifdOR4Got6yrvK2rUFwKUWQIcYU0wk8h6hwYmLehP7HqgBa6l8+bvO1Ep9g+rjP2xJPJG21w==
|
||||
dependencies:
|
||||
"@adobe/spectrum-css-workflow-icons" "^1.2.1"
|
||||
"@spectrum-css/actionbutton" "^1.0.1"
|
||||
|
@ -1015,14 +1015,14 @@
|
|||
svelte-flatpickr "^3.1.0"
|
||||
svelte-portal "^1.0.0"
|
||||
|
||||
"@budibase/client@^0.9.125-alpha.17":
|
||||
version "0.9.133"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/client/-/client-0.9.133.tgz#43748e189e9b92d99d1281ab62bd2c5ebed5dbab"
|
||||
integrity sha512-JrduL9iVMGalZyIUQ+1UN/dhrOZNRJwXU8B4r/eWhVoJf3f3bCuNfpMoT2LN3HY4ooyu37VehD+J5bdDsvlNPw==
|
||||
"@budibase/client@^0.9.139":
|
||||
version "0.9.139"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/client/-/client-0.9.139.tgz#acec8dc746295f7793b188f4950ab2268170366c"
|
||||
integrity sha512-PSSSaWjUrY/C4kG8r46aOVfq0aCEZGuI2Uv4jkqmk1zgt0GTXiJ+iQBkg7WZqTDBm7JIUzYUzV1T102tN4L1Jg==
|
||||
dependencies:
|
||||
"@budibase/bbui" "^0.9.133"
|
||||
"@budibase/standard-components" "^0.9.133"
|
||||
"@budibase/string-templates" "^0.9.133"
|
||||
"@budibase/bbui" "^0.9.139"
|
||||
"@budibase/standard-components" "^0.9.139"
|
||||
"@budibase/string-templates" "^0.9.139"
|
||||
regexparam "^1.3.0"
|
||||
shortid "^2.2.15"
|
||||
svelte-spa-router "^3.0.5"
|
||||
|
@ -1055,12 +1055,12 @@
|
|||
to-gfm-code-block "^0.1.1"
|
||||
year "^0.2.1"
|
||||
|
||||
"@budibase/standard-components@^0.9.133":
|
||||
version "0.9.133"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.133.tgz#789c02b45dc3853b003822c09e18ce7ece4dfa29"
|
||||
integrity sha512-xcuwTxsqk1J/YmM4YjThO/Fm0eJ+aZWm0kbFgfN+dNN9fuPlsPOLmlVEWeOUPmBa5XfRyDbx6lDYj0PPEK8CvA==
|
||||
"@budibase/standard-components@^0.9.139":
|
||||
version "0.9.139"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.139.tgz#cf8e2b759ae863e469e50272b3ca87f2827e66e3"
|
||||
integrity sha512-Av0u9Eq2jerjhG6Atta+c0mOQGgE5K0QI3cm+8s/3Vki6/PXkO1YL5Alo3BOn9ayQAVZ/xp4rtZPuN/rzRibHw==
|
||||
dependencies:
|
||||
"@budibase/bbui" "^0.9.133"
|
||||
"@budibase/bbui" "^0.9.139"
|
||||
"@spectrum-css/button" "^3.0.3"
|
||||
"@spectrum-css/card" "^3.0.3"
|
||||
"@spectrum-css/divider" "^1.0.3"
|
||||
|
@ -1073,10 +1073,10 @@
|
|||
svelte-apexcharts "^1.0.2"
|
||||
svelte-flatpickr "^3.1.0"
|
||||
|
||||
"@budibase/string-templates@^0.9.125-alpha.17", "@budibase/string-templates@^0.9.133":
|
||||
version "0.9.133"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.133.tgz#221d81e080dc4485dcffa989d16e2bbed39f9055"
|
||||
integrity sha512-SMHcSPwHYdAqol9YCcMoYawp5/ETr9TqGZCUsL+hUUq+LritPwu/miQ++SVvRTQbOR7Mker0S9LO3H8mwYkW8w==
|
||||
"@budibase/string-templates@^0.9.139":
|
||||
version "0.9.139"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.139.tgz#f87de1d7382a81164bb734ef62ba552839805134"
|
||||
integrity sha512-T7FR3GSmc/3vs6bynYrL/POjGP/z4pjlwjI4P6b2u10Fg2HWtI0QPZ+ifnOUf53Ry2r/PvDELATqkElpKh9Spg==
|
||||
dependencies:
|
||||
"@budibase/handlebars-helpers" "^0.11.4"
|
||||
dayjs "^1.10.4"
|
||||
|
@ -11110,9 +11110,9 @@ tmp@^0.0.33:
|
|||
os-tmpdir "~1.0.2"
|
||||
|
||||
tmpl@1.0.x:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1"
|
||||
integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
|
||||
integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
|
||||
|
||||
to-buffer@^1.1.1:
|
||||
version "1.1.1"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/string-templates",
|
||||
"version": "0.9.125-alpha.18",
|
||||
"version": "0.9.140-alpha.8",
|
||||
"description": "Handlebars wrapper for Budibase templating.",
|
||||
"main": "src/index.cjs",
|
||||
"module": "dist/bundle.mjs",
|
||||
|
|
|
@ -4633,9 +4633,9 @@ time-stamp@^1.0.1:
|
|||
integrity sha1-dkpaEa9QVhkhsTPztE5hhofg9cM=
|
||||
|
||||
tmpl@1.0.x:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1"
|
||||
integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
|
||||
integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
|
||||
|
||||
to-fast-properties@^2.0.0:
|
||||
version "2.0.0"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/worker",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "0.9.125-alpha.18",
|
||||
"version": "0.9.140-alpha.8",
|
||||
"description": "Budibase background service",
|
||||
"main": "src/index.js",
|
||||
"repository": {
|
||||
|
@ -25,8 +25,8 @@
|
|||
"author": "Budibase",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@budibase/auth": "^0.9.125-alpha.18",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.18",
|
||||
"@budibase/auth": "^0.9.140-alpha.8",
|
||||
"@budibase/string-templates": "^0.9.140-alpha.8",
|
||||
"@koa/router": "^8.0.0",
|
||||
"@techpass/passport-openidconnect": "^0.3.0",
|
||||
"aws-sdk": "^2.811.0",
|
||||
|
|
|
@ -21,7 +21,7 @@ async function init() {
|
|||
COUCH_DB_PASSWORD: "budibase",
|
||||
// empty string is false
|
||||
MULTI_TENANCY: "",
|
||||
ACCOUNT_PORTAL_URL: "http://localhost:3001",
|
||||
ACCOUNT_PORTAL_URL: "http://localhost:10001",
|
||||
}
|
||||
let envFile = ""
|
||||
Object.keys(envFileJson).forEach(key => {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
const env = require("../src/environment")
|
||||
|
||||
env._set("SELF_HOSTED", "1")
|
||||
env._set("NODE_ENV", "jest")
|
||||
env._set("JWT_SECRET", "test-jwtsecret")
|
||||
env._set("LOG_LEVEL", "silent")
|
||||
|
|
|
@ -31,7 +31,12 @@ async function allUsers() {
|
|||
return response.rows.map(row => row.doc)
|
||||
}
|
||||
|
||||
async function saveUser(user, tenantId, hashPassword = true) {
|
||||
async function saveUser(
|
||||
user,
|
||||
tenantId,
|
||||
hashPassword = true,
|
||||
requirePassword = true
|
||||
) {
|
||||
if (!tenantId) {
|
||||
throw "No tenancy specified."
|
||||
}
|
||||
|
@ -57,12 +62,13 @@ async function saveUser(user, tenantId, hashPassword = true) {
|
|||
hashedPassword = hashPassword ? await hash(password) : password
|
||||
} else if (dbUser) {
|
||||
hashedPassword = dbUser.password
|
||||
} else {
|
||||
} else if (requirePassword) {
|
||||
throw "Password must be specified."
|
||||
}
|
||||
|
||||
_id = _id || generateGlobalUserID()
|
||||
user = {
|
||||
createdAt: Date.now(),
|
||||
...dbUser,
|
||||
...user,
|
||||
_id,
|
||||
|
@ -106,16 +112,21 @@ exports.save = async ctx => {
|
|||
}
|
||||
}
|
||||
|
||||
const parseBooleanParam = param => {
|
||||
if (param && param == "false") {
|
||||
return false
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
exports.adminUser = async ctx => {
|
||||
const { email, password, tenantId } = ctx.request.body
|
||||
|
||||
// account portal sends a pre-hashed password - honour param to prevent double hashing
|
||||
let hashPassword = ctx.request.query.hashPassword
|
||||
if (hashPassword && hashPassword == "false") {
|
||||
hashPassword = false
|
||||
} else {
|
||||
hashPassword = true
|
||||
}
|
||||
const hashPassword = parseBooleanParam(ctx.request.query.hashPassword)
|
||||
// account portal sends no password for SSO users
|
||||
const requirePassword = parseBooleanParam(ctx.request.query.requirePassword)
|
||||
|
||||
if (await doesTenantExist(tenantId)) {
|
||||
ctx.throw(403, "Organisation already exists.")
|
||||
|
@ -138,6 +149,7 @@ exports.adminUser = async ctx => {
|
|||
const user = {
|
||||
email: email,
|
||||
password: password,
|
||||
createdAt: Date.now(),
|
||||
roles: {},
|
||||
builder: {
|
||||
global: true,
|
||||
|
@ -148,7 +160,7 @@ exports.adminUser = async ctx => {
|
|||
tenantId,
|
||||
}
|
||||
try {
|
||||
ctx.body = await saveUser(user, tenantId, hashPassword)
|
||||
ctx.body = await saveUser(user, tenantId, hashPassword, requirePassword)
|
||||
} catch (err) {
|
||||
ctx.throw(err.status || 400, err)
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ exports.save = async function (ctx) {
|
|||
}
|
||||
|
||||
try {
|
||||
const response = await db.post(workspaceDoc)
|
||||
const response = await db.put(workspaceDoc)
|
||||
ctx.body = {
|
||||
_id: response.id,
|
||||
_rev: response.rev,
|
||||
|
|
|
@ -3,7 +3,7 @@ const env = require("../../../environment")
|
|||
exports.fetch = async ctx => {
|
||||
ctx.body = {
|
||||
multiTenancy: !!env.MULTI_TENANCY,
|
||||
cloud: !(env.SELF_HOSTED === "1"),
|
||||
cloud: !env.SELF_HOSTED,
|
||||
accountPortalUrl: env.ACCOUNT_PORTAL_URL,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ function buildAdminInitValidation() {
|
|||
return joiValidator.body(
|
||||
Joi.object({
|
||||
email: Joi.string().required(),
|
||||
password: Joi.string().required(),
|
||||
password: Joi.string(),
|
||||
tenantId: Joi.string().required(),
|
||||
})
|
||||
.required()
|
||||
|
|
|
@ -18,7 +18,7 @@ if (!LOADED && isDev() && !isTest()) {
|
|||
|
||||
module.exports = {
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
SELF_HOSTED: process.env.SELF_HOSTED,
|
||||
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED),
|
||||
PORT: process.env.PORT,
|
||||
CLUSTER_PORT: process.env.CLUSTER_PORT,
|
||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||
|
|
|
@ -287,10 +287,10 @@
|
|||
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
|
||||
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
|
||||
|
||||
"@budibase/auth@^0.9.128":
|
||||
version "0.9.128"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.128.tgz#6bb6c716b6647b7e9362e3faf12b191650ea0ad4"
|
||||
integrity sha512-WCcrtAXilT/4++7PdzyTYgrdVqZcKhUev3NcGrFQf7WbDhkVCuigWbb8Q01KXODjbs0BZC0RshVv/PxrgLbBQA==
|
||||
"@budibase/auth@^0.9.139":
|
||||
version "0.9.139"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.139.tgz#0610582800df062372582f9139c7aa99606af3e1"
|
||||
integrity sha512-2JUAKC3AA74O3TXHjoGCoXkDxXqUS1K8KGFrJtrUQQrVq1YeQGSjD6Km+Ho8PqUaNdpEfZinBS1/3qFUqaQbuQ==
|
||||
dependencies:
|
||||
"@techpass/passport-openidconnect" "^0.3.0"
|
||||
aws-sdk "^2.901.0"
|
||||
|
@ -338,10 +338,10 @@
|
|||
to-gfm-code-block "^0.1.1"
|
||||
year "^0.2.1"
|
||||
|
||||
"@budibase/string-templates@^0.9.128":
|
||||
version "0.9.128"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.128.tgz#50ee46dc0d726d481bd5139cd0b38364649a8463"
|
||||
integrity sha512-4TzmnX2o5S2cts08ukB86El4wYm7cHuV2t6a7yDMGPe1mWeKP1WEtVF6rKhXEdbPTiotW8oYondOlgOP7DT9lA==
|
||||
"@budibase/string-templates@^0.9.139":
|
||||
version "0.9.139"
|
||||
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.139.tgz#f87de1d7382a81164bb734ef62ba552839805134"
|
||||
integrity sha512-T7FR3GSmc/3vs6bynYrL/POjGP/z4pjlwjI4P6b2u10Fg2HWtI0QPZ+ifnOUf53Ry2r/PvDELATqkElpKh9Spg==
|
||||
dependencies:
|
||||
"@budibase/handlebars-helpers" "^0.11.4"
|
||||
dayjs "^1.10.4"
|
||||
|
@ -6184,9 +6184,9 @@ tiny-queue@^0.2.0:
|
|||
integrity sha1-JaZ/LG4lOyypQZd7XvdELvl6YEY=
|
||||
|
||||
tmpl@1.0.x:
|
||||
version "1.0.4"
|
||||
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1"
|
||||
integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
|
||||
integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
|
||||
|
||||
to-fast-properties@^2.0.0:
|
||||
version "2.0.0"
|
||||
|
|
Loading…
Reference in New Issue