-
+
-
+ {/if}
+
+
+
{/if}
diff --git a/packages/builder/src/stores/backend/datasources.js b/packages/builder/src/stores/backend/datasources.js
index 5c6ed3f2cb..7c74074e03 100644
--- a/packages/builder/src/stores/backend/datasources.js
+++ b/packages/builder/src/stores/backend/datasources.js
@@ -1,4 +1,4 @@
-import { writable } from "svelte/store"
+import { writable, get } from "svelte/store"
import { queries, tables, views } from "./"
import api from "../../builderStore/api"
@@ -8,7 +8,8 @@ export const INITIAL_DATASOURCE_VALUES = {
}
export function createDatasourcesStore() {
- const { subscribe, update, set } = writable(INITIAL_DATASOURCE_VALUES)
+ const store = writable(INITIAL_DATASOURCE_VALUES)
+ const { subscribe, update, set } = store
return {
subscribe,
@@ -21,7 +22,15 @@ export function createDatasourcesStore() {
fetch: async () => {
const response = await api.get(`/api/datasources`)
const json = await response.json()
- update(state => ({ ...state, list: json, selected: null }))
+
+ // Clear selected if it no longer exists, otherwise keep it
+ const selected = get(store).selected
+ let nextSelected = null
+ if (selected && json.find(source => source._id === selected)) {
+ nextSelected = selected
+ }
+
+ update(state => ({ ...state, list: json, selected: nextSelected }))
return json
},
select: async datasourceId => {
@@ -58,7 +67,7 @@ export function createDatasourcesStore() {
})
return json
},
- save: async datasource => {
+ save: async (datasource, fetchSchema = false) => {
let response
if (datasource._id) {
response = await api.put(
@@ -66,7 +75,10 @@ export function createDatasourcesStore() {
datasource
)
} else {
- response = await api.post("/api/datasources", datasource)
+ response = await api.post("/api/datasources", {
+ datasource: datasource,
+ fetchSchema,
+ })
}
const json = await response.json()
diff --git a/packages/builder/src/stores/backend/queries.js b/packages/builder/src/stores/backend/queries.js
index 2eeae29b9d..020a0c9420 100644
--- a/packages/builder/src/stores/backend/queries.js
+++ b/packages/builder/src/stores/backend/queries.js
@@ -1,5 +1,5 @@
import { writable, get } from "svelte/store"
-import { datasources, integrations, tables } from "./"
+import { datasources, integrations, tables, views } from "./"
import api from "builderStore/api"
export function createQueriesStore() {
@@ -55,10 +55,9 @@ export function createQueriesStore() {
},
select: query => {
update(state => ({ ...state, selected: query._id }))
- tables.update(state => ({
- ...state,
- selected: null,
- }))
+ views.unselect()
+ tables.unselect()
+ datasources.unselect()
},
unselect: () => {
update(state => ({ ...state, selected: null }))
diff --git a/packages/builder/src/stores/backend/tables.js b/packages/builder/src/stores/backend/tables.js
index e0b614a63e..161877f660 100644
--- a/packages/builder/src/stores/backend/tables.js
+++ b/packages/builder/src/stores/backend/tables.js
@@ -95,7 +95,13 @@ export function createTablesStore() {
selected: {},
}))
},
- saveField: ({ originalName, field, primaryDisplay = false, indexes }) => {
+ saveField: async ({
+ originalName,
+ field,
+ primaryDisplay = false,
+ indexes,
+ }) => {
+ let promise
update(state => {
// delete the original if renaming
// need to handle if the column had no name, empty string
@@ -126,9 +132,12 @@ export function createTablesStore() {
...state.draft.schema,
[field.name]: cloneDeep(field),
}
- save(state.draft)
+ promise = save(state.draft)
return state
})
+ if (promise) {
+ await promise
+ }
},
deleteField: field => {
update(state => {
diff --git a/packages/builder/src/stores/backend/views.js b/packages/builder/src/stores/backend/views.js
index 0b15d18fa5..14c7bf92a4 100644
--- a/packages/builder/src/stores/backend/views.js
+++ b/packages/builder/src/stores/backend/views.js
@@ -16,6 +16,7 @@ export function createViewsStore() {
...state,
selected: view,
}))
+ tables.unselect()
queries.unselect()
datasources.unselect()
},
diff --git a/packages/builder/src/stores/portal/admin.js b/packages/builder/src/stores/portal/admin.js
index 44ff63a082..ebe8294060 100644
--- a/packages/builder/src/stores/portal/admin.js
+++ b/packages/builder/src/stores/portal/admin.js
@@ -7,6 +7,7 @@ export function createAdminStore() {
loaded: false,
multiTenancy: false,
cloud: false,
+ disableAccountPortal: false,
accountPortalUrl: "",
onboardingProgress: 0,
checklist: {
@@ -47,12 +48,14 @@ export function createAdminStore() {
async function getEnvironment() {
let multiTenancyEnabled = false
let cloud = false
+ let disableAccountPortal = false
let accountPortalUrl = ""
try {
const response = await api.get(`/api/system/environment`)
const json = await response.json()
multiTenancyEnabled = json.multiTenancy
cloud = json.cloud
+ disableAccountPortal = json.disableAccountPortal
accountPortalUrl = json.accountPortalUrl
} catch (err) {
// just let it stay disabled
@@ -60,6 +63,7 @@ export function createAdminStore() {
admin.update(store => {
store.multiTenancy = multiTenancyEnabled
store.cloud = cloud
+ store.disableAccountPortal = disableAccountPortal
store.accountPortalUrl = accountPortalUrl
return store
})
diff --git a/packages/builder/src/stores/portal/auth.js b/packages/builder/src/stores/portal/auth.js
index fe8f87cfb2..f522095473 100644
--- a/packages/builder/src/stores/portal/auth.js
+++ b/packages/builder/src/stores/portal/auth.js
@@ -1,6 +1,7 @@
import { derived, writable, get } from "svelte/store"
import api from "../../builderStore/api"
import { admin } from "stores/portal"
+import analytics from "analytics"
export function createAuthStore() {
const auth = writable({
@@ -49,6 +50,19 @@ export function createAuthStore() {
}
return store
})
+
+ if (user) {
+ analytics.activate().then(() => {
+ analytics.identify(user._id, user)
+ analytics.showChat({
+ email: user.email,
+ created_at: user.createdAt || Date.now(),
+ name: user.name,
+ user_id: user._id,
+ tenant: user.tenantId,
+ })
+ })
+ }
}
async function setOrganisation(tenantId) {
@@ -66,6 +80,7 @@ export function createAuthStore() {
return {
subscribe: store.subscribe,
+ setOrganisation: setOrganisation,
checkQueryString: async () => {
const urlParams = new URLSearchParams(window.location.search)
if (urlParams.has("tenantId")) {
diff --git a/packages/builder/vite.config.js b/packages/builder/vite.config.js
index d8b8dbba1d..12b45e7cf8 100644
--- a/packages/builder/vite.config.js
+++ b/packages/builder/vite.config.js
@@ -22,6 +22,9 @@ export default ({ mode }) => {
isProduction ? "production" : "development"
),
"process.env.POSTHOG_TOKEN": JSON.stringify(process.env.POSTHOG_TOKEN),
+ "process.env.INTERCOM_TOKEN": JSON.stringify(
+ process.env.INTERCOM_TOKEN
+ ),
"process.env.POSTHOG_URL": JSON.stringify(process.env.POSTHOG_URL),
"process.env.SENTRY_DSN": JSON.stringify(process.env.SENTRY_DSN),
}),
diff --git a/packages/builder/yarn.lock b/packages/builder/yarn.lock
index 5257ba0c37..443d00680b 100644
--- a/packages/builder/yarn.lock
+++ b/packages/builder/yarn.lock
@@ -5582,9 +5582,9 @@ tmp@~0.2.1:
rimraf "^3.0.0"
tmpl@1.0.x:
- version "1.0.4"
- resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1"
- integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
+ integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
to-fast-properties@^2.0.0:
version "2.0.0"
diff --git a/packages/cli/package.json b/packages/cli/package.json
index b956d1d27b..55bc2bb3de 100644
--- a/packages/cli/package.json
+++ b/packages/cli/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
- "version": "0.9.125-alpha.17",
+ "version": "0.9.146-alpha.5",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {
diff --git a/packages/client/manifest.json b/packages/client/manifest.json
index 7bef9c2e4b..2e64b1fb4c 100644
--- a/packages/client/manifest.json
+++ b/packages/client/manifest.json
@@ -2389,6 +2389,7 @@
"icon": "Data",
"illegalChildren": ["section"],
"hasChildren": true,
+ "actions": ["RefreshDatasource"],
"settings": [
{
"type": "dataSource",
diff --git a/packages/client/package.json b/packages/client/package.json
index 217797bf56..aebaf3f903 100644
--- a/packages/client/package.json
+++ b/packages/client/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/client",
- "version": "0.9.125-alpha.17",
+ "version": "0.9.146-alpha.5",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@@ -19,8 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
- "@budibase/bbui": "^0.9.125-alpha.17",
- "@budibase/string-templates": "^0.9.125-alpha.17",
+ "@budibase/bbui": "^0.9.146-alpha.5",
+ "@budibase/standard-components": "^0.9.139",
+ "@budibase/string-templates": "^0.9.146-alpha.5",
"regexparam": "^1.3.0",
"shortid": "^2.2.15",
"svelte-spa-router": "^3.0.5"
diff --git a/packages/client/rollup.config.js b/packages/client/rollup.config.js
index f404f93c4c..a814303069 100644
--- a/packages/client/rollup.config.js
+++ b/packages/client/rollup.config.js
@@ -58,6 +58,10 @@ export default {
find: "sdk",
replacement: path.resolve("./src/sdk"),
},
+ {
+ find: "builder",
+ replacement: path.resolve("../builder"),
+ },
],
}),
svelte({
diff --git a/packages/client/src/components/ClientApp.svelte b/packages/client/src/components/ClientApp.svelte
index fb9117832f..7d231b3762 100644
--- a/packages/client/src/components/ClientApp.svelte
+++ b/packages/client/src/components/ClientApp.svelte
@@ -24,7 +24,7 @@
import HoverIndicator from "components/preview/HoverIndicator.svelte"
import CustomThemeWrapper from "./CustomThemeWrapper.svelte"
import DNDHandler from "components/preview/DNDHandler.svelte"
- import ErrorSVG from "../../../builder/assets/error.svg"
+ import ErrorSVG from "builder/assets/error.svg"
// Provide contexts
setContext("sdk", SDK)
diff --git a/packages/client/src/components/app/DataProvider.svelte b/packages/client/src/components/app/DataProvider.svelte
index bdc9001445..991c41b77d 100644
--- a/packages/client/src/components/app/DataProvider.svelte
+++ b/packages/client/src/components/app/DataProvider.svelte
@@ -6,7 +6,7 @@
luceneQuery,
luceneSort,
luceneLimit,
- } from "utils/lucene"
+ } from "builder/src/helpers/lucene"
import Placeholder from "./Placeholder.svelte"
export let dataSource
diff --git a/packages/client/src/stores/state.js b/packages/client/src/stores/state.js
index cb20149de8..ce977c4333 100644
--- a/packages/client/src/stores/state.js
+++ b/packages/client/src/stores/state.js
@@ -1,5 +1,5 @@
import { writable, get, derived } from "svelte/store"
-import { localStorageStore } from "../../../builder/src/builderStore/store/localStorage"
+import { localStorageStore } from "builder/src/builderStore/store/localStorage"
import { appStore } from "./app"
const createStateStore = () => {
diff --git a/packages/client/src/utils/buttonActions.js b/packages/client/src/utils/buttonActions.js
index aeefe6163c..11aa033c1d 100644
--- a/packages/client/src/utils/buttonActions.js
+++ b/packages/client/src/utils/buttonActions.js
@@ -88,7 +88,7 @@ const validateFormHandler = async (action, context) => {
)
}
-const refreshDatasourceHandler = async (action, context) => {
+const refreshDataProviderHandler = async (action, context) => {
return await executeActionHandler(
context,
action.parameters.componentId,
@@ -139,7 +139,7 @@ const handlerMap = {
["Execute Query"]: queryExecutionHandler,
["Trigger Automation"]: triggerAutomationHandler,
["Validate Form"]: validateFormHandler,
- ["Refresh Datasource"]: refreshDatasourceHandler,
+ ["Refresh Data Provider"]: refreshDataProviderHandler,
["Log Out"]: logoutHandler,
["Clear Form"]: clearFormHandler,
["Close Screen Modal"]: closeScreenModalHandler,
diff --git a/packages/client/src/utils/conditions.js b/packages/client/src/utils/conditions.js
index 964a63d3fd..2791fa169e 100644
--- a/packages/client/src/utils/conditions.js
+++ b/packages/client/src/utils/conditions.js
@@ -1,4 +1,4 @@
-import { buildLuceneQuery, luceneQuery } from "./lucene"
+import { buildLuceneQuery, luceneQuery } from "builder/src/helpers/lucene"
export const getActiveConditions = conditions => {
if (!conditions?.length) {
diff --git a/packages/client/src/utils/lucene.js b/packages/client/src/utils/lucene.js
deleted file mode 100644
index 03baa751cc..0000000000
--- a/packages/client/src/utils/lucene.js
+++ /dev/null
@@ -1,179 +0,0 @@
-/**
- * Builds a lucene JSON query from the filter structure generated in the builder
- * @param filter the builder filter structure
- */
-export const buildLuceneQuery = filter => {
- let query = {
- string: {},
- fuzzy: {},
- range: {},
- equal: {},
- notEqual: {},
- empty: {},
- notEmpty: {},
- contains: {},
- notContains: {},
- }
- if (Array.isArray(filter)) {
- filter.forEach(expression => {
- let { operator, field, type, value } = expression
- // Parse all values into correct types
- if (type === "datetime" && value) {
- value = new Date(value).toISOString()
- }
- if (type === "number") {
- value = parseFloat(value)
- }
- if (type === "boolean") {
- value = `${value}`?.toLowerCase() === "true"
- }
- if (operator.startsWith("range")) {
- if (!query.range[field]) {
- query.range[field] = {
- low:
- type === "number"
- ? Number.MIN_SAFE_INTEGER
- : "0000-00-00T00:00:00.000Z",
- high:
- type === "number"
- ? Number.MAX_SAFE_INTEGER
- : "9999-00-00T00:00:00.000Z",
- }
- }
- if (operator === "rangeLow" && value != null && value !== "") {
- query.range[field].low = value
- } else if (operator === "rangeHigh" && value != null && value !== "") {
- query.range[field].high = value
- }
- } else if (query[operator]) {
- if (type === "boolean") {
- // Transform boolean filters to cope with null.
- // "equals false" needs to be "not equals true"
- // "not equals false" needs to be "equals true"
- if (operator === "equal" && value === false) {
- query.notEqual[field] = true
- } else if (operator === "notEqual" && value === false) {
- query.equal[field] = true
- } else {
- query[operator][field] = value
- }
- } else {
- query[operator][field] = value
- }
- }
- })
- }
-
- return query
-}
-
-/**
- * Performs a client-side lucene search on an array of data
- * @param docs the data
- * @param query the JSON lucene query
- */
-export const luceneQuery = (docs, query) => {
- if (!query) {
- return docs
- }
-
- // Iterates over a set of filters and evaluates a fail function against a doc
- const match = (type, failFn) => doc => {
- const filters = Object.entries(query[type] || {})
- for (let i = 0; i < filters.length; i++) {
- if (failFn(filters[i][0], filters[i][1], doc)) {
- return false
- }
- }
- return true
- }
-
- // Process a string match (fails if the value does not start with the string)
- const stringMatch = match("string", (key, value, doc) => {
- return !doc[key] || !doc[key].startsWith(value)
- })
-
- // Process a fuzzy match (treat the same as starts with when running locally)
- const fuzzyMatch = match("fuzzy", (key, value, doc) => {
- return !doc[key] || !doc[key].startsWith(value)
- })
-
- // Process a range match
- const rangeMatch = match("range", (key, value, doc) => {
- return !doc[key] || doc[key] < value.low || doc[key] > value.high
- })
-
- // Process an equal match (fails if the value is different)
- const equalMatch = match("equal", (key, value, doc) => {
- return value != null && value !== "" && doc[key] !== value
- })
-
- // Process a not-equal match (fails if the value is the same)
- const notEqualMatch = match("notEqual", (key, value, doc) => {
- return value != null && value !== "" && doc[key] === value
- })
-
- // Process an empty match (fails if the value is not empty)
- const emptyMatch = match("empty", (key, value, doc) => {
- return doc[key] != null && doc[key] !== ""
- })
-
- // Process a not-empty match (fails is the value is empty)
- const notEmptyMatch = match("notEmpty", (key, value, doc) => {
- return doc[key] == null || doc[key] === ""
- })
-
- // Match a document against all criteria
- const docMatch = doc => {
- return (
- stringMatch(doc) &&
- fuzzyMatch(doc) &&
- rangeMatch(doc) &&
- equalMatch(doc) &&
- notEqualMatch(doc) &&
- emptyMatch(doc) &&
- notEmptyMatch(doc)
- )
- }
-
- // Process all docs
- return docs.filter(docMatch)
-}
-
-/**
- * Performs a client-side sort from the equivalent server-side lucene sort
- * parameters.
- * @param docs the data
- * @param sort the sort column
- * @param sortOrder the sort order ("ascending" or "descending")
- * @param sortType the type of sort ("string" or "number")
- */
-export const luceneSort = (docs, sort, sortOrder, sortType = "string") => {
- if (!sort || !sortOrder || !sortType) {
- return docs
- }
- const parse = sortType === "string" ? x => `${x}` : x => parseFloat(x)
- return docs.slice().sort((a, b) => {
- const colA = parse(a[sort])
- const colB = parse(b[sort])
- if (sortOrder === "Descending") {
- return colA > colB ? -1 : 1
- } else {
- return colA > colB ? 1 : -1
- }
- })
-}
-
-/**
- * Limits the specified docs to the specified number of rows from the equivalent
- * server-side lucene limit parameters.
- * @param docs the data
- * @param limit the number of docs to limit to
- */
-export const luceneLimit = (docs, limit) => {
- const numLimit = parseFloat(limit)
- if (isNaN(numLimit)) {
- return docs
- }
- return docs.slice(0, numLimit)
-}
diff --git a/packages/client/yarn.lock b/packages/client/yarn.lock
index 1e2c654b21..bdfb4fb699 100644
--- a/packages/client/yarn.lock
+++ b/packages/client/yarn.lock
@@ -28,10 +28,59 @@
chalk "^2.0.0"
js-tokens "^4.0.0"
-"@budibase/bbui@^0.9.125-alpha.17":
- version "0.9.133"
- resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.133.tgz#91a2fb24abaaf91d2cb1e00eb51c493c1290f9ad"
- integrity sha512-xbMmc/hee1QRNW7TrbGUBmLr1hMHXqUDA6rdl9N2PGfHFuFWbqlD8PWYanHmLevVet+CjkuKGPSbBghFK2pQyQ==
+"@budibase/bbui@^0.9.139":
+ version "0.9.142"
+ resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.142.tgz#7edbda7967c9e5dfc96e5be5231656e5aab8d0e3"
+ integrity sha512-m2YlqqH87T4RwqD/oGhH6twHIgvFv4oUMEhKpkgLsbxjXVLVD0OOF7WqjpDnSa4khVQaixjdkI/Jiw2qhBUSaA==
+ dependencies:
+ "@adobe/spectrum-css-workflow-icons" "^1.2.1"
+ "@spectrum-css/actionbutton" "^1.0.1"
+ "@spectrum-css/actiongroup" "^1.0.1"
+ "@spectrum-css/avatar" "^3.0.2"
+ "@spectrum-css/button" "^3.0.1"
+ "@spectrum-css/buttongroup" "^3.0.2"
+ "@spectrum-css/checkbox" "^3.0.2"
+ "@spectrum-css/dialog" "^3.0.1"
+ "@spectrum-css/divider" "^1.0.3"
+ "@spectrum-css/dropzone" "^3.0.2"
+ "@spectrum-css/fieldgroup" "^3.0.2"
+ "@spectrum-css/fieldlabel" "^3.0.1"
+ "@spectrum-css/icon" "^3.0.1"
+ "@spectrum-css/illustratedmessage" "^3.0.2"
+ "@spectrum-css/inputgroup" "^3.0.2"
+ "@spectrum-css/label" "^2.0.10"
+ "@spectrum-css/link" "^3.1.1"
+ "@spectrum-css/menu" "^3.0.1"
+ "@spectrum-css/modal" "^3.0.1"
+ "@spectrum-css/pagination" "^3.0.3"
+ "@spectrum-css/picker" "^1.0.1"
+ "@spectrum-css/popover" "^3.0.1"
+ "@spectrum-css/progressbar" "^1.0.2"
+ "@spectrum-css/progresscircle" "^1.0.2"
+ "@spectrum-css/radio" "^3.0.2"
+ "@spectrum-css/search" "^3.0.2"
+ "@spectrum-css/sidenav" "^3.0.2"
+ "@spectrum-css/statuslight" "^3.0.2"
+ "@spectrum-css/stepper" "^3.0.3"
+ "@spectrum-css/switch" "^1.0.2"
+ "@spectrum-css/table" "^3.0.1"
+ "@spectrum-css/tabs" "^3.0.1"
+ "@spectrum-css/tags" "^3.0.2"
+ "@spectrum-css/textfield" "^3.0.1"
+ "@spectrum-css/toast" "^3.0.1"
+ "@spectrum-css/tooltip" "^3.0.3"
+ "@spectrum-css/treeview" "^3.0.2"
+ "@spectrum-css/typography" "^3.0.1"
+ "@spectrum-css/underlay" "^2.0.9"
+ "@spectrum-css/vars" "^3.0.1"
+ dayjs "^1.10.4"
+ svelte-flatpickr "^3.1.0"
+ svelte-portal "^1.0.0"
+
+"@budibase/bbui@^0.9.146-alpha.3":
+ version "0.9.146"
+ resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.146.tgz#7689b2c0f148321e62969181e3f6549f03dd3e78"
+ integrity sha512-Mq0oMyaN18Dg5e0IPtPXSGmu/TS4B74gW+l2ypJDNTzSRm934DOAPghDgkb53rFNZhsovCYjixJZmesUcv2o3g==
dependencies:
"@adobe/spectrum-css-workflow-icons" "^1.2.1"
"@spectrum-css/actionbutton" "^1.0.1"
@@ -105,10 +154,28 @@
to-gfm-code-block "^0.1.1"
year "^0.2.1"
-"@budibase/string-templates@^0.9.125-alpha.17":
- version "0.9.133"
- resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.133.tgz#221d81e080dc4485dcffa989d16e2bbed39f9055"
- integrity sha512-SMHcSPwHYdAqol9YCcMoYawp5/ETr9TqGZCUsL+hUUq+LritPwu/miQ++SVvRTQbOR7Mker0S9LO3H8mwYkW8w==
+"@budibase/standard-components@^0.9.139":
+ version "0.9.139"
+ resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.139.tgz#cf8e2b759ae863e469e50272b3ca87f2827e66e3"
+ integrity sha512-Av0u9Eq2jerjhG6Atta+c0mOQGgE5K0QI3cm+8s/3Vki6/PXkO1YL5Alo3BOn9ayQAVZ/xp4rtZPuN/rzRibHw==
+ dependencies:
+ "@budibase/bbui" "^0.9.139"
+ "@spectrum-css/button" "^3.0.3"
+ "@spectrum-css/card" "^3.0.3"
+ "@spectrum-css/divider" "^1.0.3"
+ "@spectrum-css/link" "^3.1.3"
+ "@spectrum-css/page" "^3.0.1"
+ "@spectrum-css/typography" "^3.0.2"
+ "@spectrum-css/vars" "^3.0.1"
+ apexcharts "^3.22.1"
+ dayjs "^1.10.5"
+ svelte-apexcharts "^1.0.2"
+ svelte-flatpickr "^3.1.0"
+
+"@budibase/string-templates@^0.9.146-alpha.3":
+ version "0.9.146"
+ resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.146.tgz#85249c7a8777a5f0c280af6f6d0e3d3ff0bf20b5"
+ integrity sha512-4f91SVUaTKseB+j7ycWbP54XiqiFZ6bZvcKgzsg1mLF+VVJ1/ALUsLvCRaj6SlcSHrhhALiGVR1z18KOyBWoKw==
dependencies:
"@budibase/handlebars-helpers" "^0.11.4"
dayjs "^1.10.4"
@@ -2169,9 +2236,9 @@ is-extglob@^2.1.1:
integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=
is-glob@^4.0.1:
- version "4.0.1"
- resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
- integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==
+ version "4.0.3"
+ resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
+ integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==
dependencies:
is-extglob "^2.1.1"
diff --git a/packages/server/package.json b/packages/server/package.json
index 57d1391d5d..ac62454ba9 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
- "version": "0.9.125-alpha.17",
+ "version": "0.9.146-alpha.5",
"description": "Budibase Web Server",
"main": "src/index.js",
"repository": {
@@ -23,6 +23,7 @@
"format": "prettier --config ../../.prettierrc.json 'src/**/*.ts' --write",
"lint": "eslint --fix src/",
"lint:fix": "yarn run format && yarn run lint",
+ "initialise": "node scripts/initialise.js",
"multi:enable": "node scripts/multiTenancy.js enable",
"multi:disable": "node scripts/multiTenancy.js disable",
"selfhost:enable": "node scripts/selfhost.js enable",
@@ -49,8 +50,7 @@
"!src/automations/tests/**/*",
"!src/utilities/fileProcessor.js",
"!src/utilities/fileSystem/**/*",
- "!src/utilities/redis.js",
- "!src/api/controllers/row/internalSearch.js"
+ "!src/utilities/redis.js"
],
"coverageReporters": [
"lcov",
@@ -64,9 +64,9 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
- "@budibase/auth": "^0.9.125-alpha.17",
- "@budibase/client": "^0.9.125-alpha.17",
- "@budibase/string-templates": "^0.9.125-alpha.17",
+ "@budibase/auth": "^0.9.146-alpha.5",
+ "@budibase/client": "^0.9.146-alpha.5",
+ "@budibase/string-templates": "^0.9.146-alpha.5",
"@elastic/elasticsearch": "7.10.0",
"@koa/router": "8.0.0",
"@sendgrid/mail": "7.1.1",
@@ -96,14 +96,16 @@
"koa-session": "5.12.0",
"koa-static": "5.0.0",
"lodash": "4.17.21",
+ "memorystream": "^0.3.1",
"mongodb": "3.6.3",
"mssql": "6.2.3",
- "mysql": "^2.18.1",
+ "mysql": "2.18.1",
"node-fetch": "2.6.0",
"open": "7.3.0",
"pg": "8.5.1",
"pino-pretty": "4.0.0",
"pouchdb": "7.2.1",
+ "pouchdb-adapter-memory": "^7.2.1",
"pouchdb-all-dbs": "1.0.2",
"pouchdb-find": "^7.2.2",
"pouchdb-replication-stream": "1.2.9",
@@ -118,6 +120,7 @@
"devDependencies": {
"@babel/core": "^7.14.3",
"@babel/preset-env": "^7.14.4",
+ "@budibase/standard-components": "^0.9.139",
"@jest/test-sequencer": "^24.8.0",
"@types/bull": "^3.15.1",
"@types/jest": "^26.0.23",
@@ -132,7 +135,6 @@
"express": "^4.17.1",
"jest": "^27.0.5",
"nodemon": "^2.0.4",
- "pouchdb-adapter-memory": "^7.2.1",
"prettier": "^2.3.1",
"rimraf": "^3.0.2",
"supertest": "^4.0.2",
diff --git a/packages/server/scripts/dev/manage.js b/packages/server/scripts/dev/manage.js
index 2557f88adf..bd91056f84 100644
--- a/packages/server/scripts/dev/manage.js
+++ b/packages/server/scripts/dev/manage.js
@@ -37,7 +37,7 @@ async function init() {
const envFileJson = {
PORT: 4001,
MINIO_URL: "http://localhost:10000/",
- COUCH_DB_URL: "http://@localhost:10000/db/",
+ COUCH_DB_URL: "http://budibase:budibase@localhost:10000/db/",
REDIS_URL: "localhost:6379",
WORKER_URL: "http://localhost:4002",
INTERNAL_API_KEY: "budibase",
@@ -48,6 +48,7 @@ async function init() {
COUCH_DB_PASSWORD: "budibase",
COUCH_DB_USER: "budibase",
SELF_HOSTED: 1,
+ DISABLE_ACCOUNT_PORTAL: "",
MULTI_TENANCY: "",
}
let envFile = ""
diff --git a/packages/server/scripts/integrations/pg-json/docker-compose.yml b/packages/server/scripts/integrations/pg-json/docker-compose.yml
new file mode 100644
index 0000000000..6bc307a86d
--- /dev/null
+++ b/packages/server/scripts/integrations/pg-json/docker-compose.yml
@@ -0,0 +1,28 @@
+version: "3.8"
+services:
+ db:
+ container_name: postgres-json
+ image: postgres
+ restart: always
+ environment:
+ POSTGRES_USER: root
+ POSTGRES_PASSWORD: root
+ POSTGRES_DB: main
+ ports:
+ - "5432:5432"
+ volumes:
+ #- pg_data:/var/lib/postgresql/data/
+ - ./init.sql:/docker-entrypoint-initdb.d/init.sql
+
+ pgadmin:
+ container_name: pgadmin-json
+ image: dpage/pgadmin4
+ restart: always
+ environment:
+ PGADMIN_DEFAULT_EMAIL: root@root.com
+ PGADMIN_DEFAULT_PASSWORD: root
+ ports:
+ - "5050:80"
+
+#volumes:
+# pg_data:
diff --git a/packages/server/scripts/integrations/pg-json/init.sql b/packages/server/scripts/integrations/pg-json/init.sql
new file mode 100644
index 0000000000..06a5b4901d
--- /dev/null
+++ b/packages/server/scripts/integrations/pg-json/init.sql
@@ -0,0 +1,22 @@
+SELECT 'CREATE DATABASE main'
+WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
+CREATE TABLE jsonTable (
+ id character varying(32),
+ data jsonb,
+ text text
+);
+
+INSERT INTO jsonTable (id, data) VALUES ('1', '{"id": 1, "age": 1, "name": "Mike", "newline": "this is text with a\n newline in it"}');
+
+CREATE VIEW jsonView AS SELECT
+ x.id,
+ x.age,
+ x.name,
+ x.newline
+FROM
+ jsonTable c,
+ LATERAL jsonb_to_record(c.data) x (id character varying(32),
+ age BIGINT,
+ name TEXT,
+ newline TEXT
+ );
diff --git a/packages/server/scripts/integrations/pg-json/reset.sh b/packages/server/scripts/integrations/pg-json/reset.sh
new file mode 100755
index 0000000000..32778bd11f
--- /dev/null
+++ b/packages/server/scripts/integrations/pg-json/reset.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+docker-compose down
+docker volume prune -f
diff --git a/packages/server/scripts/integrations/postgres/docker-compose.yml b/packages/server/scripts/integrations/postgres/docker-compose.yml
index e2bba9f38e..4dfcb0e1ad 100644
--- a/packages/server/scripts/integrations/postgres/docker-compose.yml
+++ b/packages/server/scripts/integrations/postgres/docker-compose.yml
@@ -15,7 +15,7 @@ services:
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
pgadmin:
- container_name: pgadmin
+ container_name: pgadmin-pg
image: dpage/pgadmin4
restart: always
environment:
diff --git a/packages/server/scripts/integrations/service-vehicles/docker-compose.yml b/packages/server/scripts/integrations/service-vehicles/docker-compose.yml
new file mode 100644
index 0000000000..7473e540db
--- /dev/null
+++ b/packages/server/scripts/integrations/service-vehicles/docker-compose.yml
@@ -0,0 +1,28 @@
+version: "3.8"
+services:
+ db:
+ container_name: postgres-vehicle
+ image: postgres
+ restart: always
+ environment:
+ POSTGRES_USER: root
+ POSTGRES_PASSWORD: root
+ POSTGRES_DB: main
+ ports:
+ - "5432:5432"
+ volumes:
+ #- pg_data:/var/lib/postgresql/data/
+ - ./init.sql:/docker-entrypoint-initdb.d/init.sql
+
+ pgadmin:
+ container_name: pgadmin
+ image: dpage/pgadmin4
+ restart: always
+ environment:
+ PGADMIN_DEFAULT_EMAIL: root@root.com
+ PGADMIN_DEFAULT_PASSWORD: root
+ ports:
+ - "5050:80"
+
+#volumes:
+# pg_data:
diff --git a/packages/server/scripts/integrations/service-vehicles/init.sql b/packages/server/scripts/integrations/service-vehicles/init.sql
new file mode 100644
index 0000000000..3e0485313e
--- /dev/null
+++ b/packages/server/scripts/integrations/service-vehicles/init.sql
@@ -0,0 +1,52 @@
+SELECT 'CREATE DATABASE main'
+WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
+CREATE TABLE Vehicles (
+ id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ),
+ Registration text COLLATE pg_catalog."default",
+ Make text COLLATE pg_catalog."default",
+ Model text COLLATE pg_catalog."default",
+ Colour text COLLATE pg_catalog."default",
+ Year smallint,
+ CONSTRAINT Vehicles_pkey PRIMARY KEY (id)
+);
+
+CREATE TABLE ServiceLog (
+ id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 MINVALUE 1 MAXVALUE 9223372036854775807 CACHE 1 ),
+ Description text COLLATE pg_catalog."default",
+ VehicleId bigint,
+ ServiceDate timestamp without time zone,
+ Category text COLLATE pg_catalog."default",
+ Mileage bigint,
+ CONSTRAINT ServiceLog_pkey PRIMARY KEY (id),
+ CONSTRAINT vehicle_foreign_key FOREIGN KEY (VehicleId)
+ REFERENCES Vehicles (id) MATCH SIMPLE
+ ON UPDATE NO ACTION
+ ON DELETE NO ACTION
+);
+
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('FAZ 9837','Volkswagen','Polo','White',2002);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('JHI 8827','BMW','M3','Black',2013);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('D903PI','Volvo','XC40','Grey',2014);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('YFI002','Volkswagen','Golf','Dark Blue',2018);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('HGT5677','Skoda','Octavia','Graphite',2009);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('PPF9276','Skoda','Octavia','Graphite',2021);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('J893FT','Toyota','Corolla','Red',2015);
+INSERT INTO Vehicles (Registration, Make, Model, Colour, Year)
+VALUES ('MJK776','Honda','HR-V','Silver',2015);
+
+
+INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
+VALUES ('Change front brakes', 1, '2021-05-04', 'Brakes', 20667);
+INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
+VALUES ('Tyres - full set', 1, '2021-05-04', 'Brakes', 20667);
+INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
+VALUES ('Engine tune up', 2, '2021-07-14', 'Brakes', 50889);
+INSERT INTO ServiceLog (Description, VehicleId, ServiceDate, Category, Mileage)
+VALUES ('Replace transmission', 3, '2021-09-26', 'Transmission', 98002);
diff --git a/packages/server/scripts/integrations/service-vehicles/reset.sh b/packages/server/scripts/integrations/service-vehicles/reset.sh
new file mode 100755
index 0000000000..32778bd11f
--- /dev/null
+++ b/packages/server/scripts/integrations/service-vehicles/reset.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+docker-compose down
+docker volume prune -f
diff --git a/packages/server/scripts/jestSetup.js b/packages/server/scripts/jestSetup.js
index 0cff339fc2..1f3551bf5f 100644
--- a/packages/server/scripts/jestSetup.js
+++ b/packages/server/scripts/jestSetup.js
@@ -1,6 +1,7 @@
const { tmpdir } = require("os")
const env = require("../src/environment")
+env._set("SELF_HOSTED", "1")
env._set("NODE_ENV", "jest")
env._set("JWT_SECRET", "test-jwtsecret")
env._set("CLIENT_ID", "test-client-id")
diff --git a/packages/server/src/api/controllers/analytics.js b/packages/server/src/api/controllers/analytics.js
index d6e1a9ce5b..eb64bc87b9 100644
--- a/packages/server/src/api/controllers/analytics.js
+++ b/packages/server/src/api/controllers/analytics.js
@@ -2,6 +2,6 @@ const env = require("../../environment")
exports.isEnabled = async function (ctx) {
ctx.body = {
- enabled: env.ENABLE_ANALYTICS === "true",
+ enabled: !env.SELF_HOSTED && env.ENABLE_ANALYTICS === "true",
}
}
diff --git a/packages/server/src/api/controllers/application.js b/packages/server/src/api/controllers/application.js
index da0014c5f8..3a0b0f8ed8 100644
--- a/packages/server/src/api/controllers/application.js
+++ b/packages/server/src/api/controllers/application.js
@@ -31,7 +31,7 @@ const {
getDeployedApps,
removeAppFromUserRoles,
} = require("../../utilities/workerRequests")
-const { clientLibraryPath } = require("../../utilities")
+const { clientLibraryPath, stringToReadStream } = require("../../utilities")
const { getAllLocks } = require("../../utilities/redis")
const {
updateClientLibrary,
@@ -114,8 +114,13 @@ async function createInstance(template) {
// replicate the template data to the instance DB
// this is currently very hard to test, downloading and importing template files
- /* istanbul ignore next */
- if (template && template.useTemplate === "true") {
+ if (template && template.templateString) {
+ const { ok } = await db.load(stringToReadStream(template.templateString))
+ if (!ok) {
+ throw "Error loading database dump from memory."
+ }
+ } else if (template && template.useTemplate === "true") {
+ /* istanbul ignore next */
const { ok } = await db.load(await getTemplateStream(template))
if (!ok) {
throw "Error loading database dump from template."
@@ -191,10 +196,11 @@ exports.fetchAppPackage = async function (ctx) {
}
exports.create = async function (ctx) {
- const { useTemplate, templateKey } = ctx.request.body
+ const { useTemplate, templateKey, templateString } = ctx.request.body
const instanceConfig = {
useTemplate,
key: templateKey,
+ templateString,
}
if (ctx.request.files && ctx.request.files.templateFile) {
instanceConfig.file = ctx.request.files.templateFile
@@ -230,7 +236,12 @@ exports.create = async function (ctx) {
const response = await db.put(newApplication, { force: true })
newApplication._rev = response.rev
- await createEmptyAppPackage(ctx, newApplication)
+ // Only create the default home screens and layout if we aren't importing
+ // an app
+ if (useTemplate !== "true") {
+ await createEmptyAppPackage(ctx, newApplication)
+ }
+
/* istanbul ignore next */
if (!env.isTest()) {
await createApp(appId)
diff --git a/packages/server/src/api/controllers/cloud.js b/packages/server/src/api/controllers/cloud.js
new file mode 100644
index 0000000000..aac79bb9dd
--- /dev/null
+++ b/packages/server/src/api/controllers/cloud.js
@@ -0,0 +1,92 @@
+const env = require("../../environment")
+const { getAllApps } = require("@budibase/auth/db")
+const CouchDB = require("../../db")
+const {
+ exportDB,
+ sendTempFile,
+ readFileSync,
+} = require("../../utilities/fileSystem")
+const { stringToReadStream } = require("../../utilities")
+const { getGlobalDBName, getGlobalDB } = require("@budibase/auth/tenancy")
+const { create } = require("./application")
+const { getDocParams, DocumentTypes, isDevAppID } = require("../../db/utils")
+
+async function createApp(appName, appImport) {
+ const ctx = {
+ request: {
+ body: {
+ templateString: appImport,
+ name: appName,
+ },
+ },
+ }
+ return create(ctx)
+}
+
+exports.exportApps = async ctx => {
+ if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
+ ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
+ }
+ const apps = await getAllApps(CouchDB, { all: true })
+ const globalDBString = await exportDB(getGlobalDBName())
+ let allDBs = {
+ global: globalDBString,
+ }
+ for (let app of apps) {
+ // only export the dev apps as they will be the latest, the user can republish the apps
+ // in their self hosted environment
+ if (isDevAppID(app._id)) {
+ allDBs[app.name] = await exportDB(app._id)
+ }
+ }
+ const filename = `cloud-export-${new Date().getTime()}.txt`
+ ctx.attachment(filename)
+ ctx.body = sendTempFile(JSON.stringify(allDBs))
+}
+
+async function getAllDocType(db, docType) {
+ const response = await db.allDocs(
+ getDocParams(docType, null, {
+ include_docs: true,
+ })
+ )
+ return response.rows.map(row => row.doc)
+}
+
+exports.importApps = async ctx => {
+ if (!env.SELF_HOSTED || env.MULTI_TENANCY) {
+ ctx.throw(400, "Importing only allowed in self hosted environments.")
+ }
+ const apps = await getAllApps(CouchDB, { all: true })
+ if (
+ apps.length !== 0 ||
+ !ctx.request.files ||
+ !ctx.request.files.importFile
+ ) {
+ ctx.throw(
+ 400,
+ "Import file is required and environment must be fresh to import apps."
+ )
+ }
+ const importFile = ctx.request.files.importFile
+ const importString = readFileSync(importFile.path)
+ const dbs = JSON.parse(importString)
+ const globalDbImport = dbs.global
+ // remove from the list of apps
+ delete dbs.global
+ const globalDb = getGlobalDB()
+ // load the global db first
+ await globalDb.load(stringToReadStream(globalDbImport))
+ for (let [appName, appImport] of Object.entries(dbs)) {
+ await createApp(appName, appImport)
+ }
+ // once apps are created clean up the global db
+ let users = await getAllDocType(globalDb, DocumentTypes.USER)
+ for (let user of users) {
+ delete user.tenantId
+ }
+ await globalDb.bulkDocs(users)
+ ctx.body = {
+ message: "Apps successfully imported.",
+ }
+}
diff --git a/packages/server/src/api/controllers/datasource.js b/packages/server/src/api/controllers/datasource.js
index 38b6e68932..2ff7c7f9b8 100644
--- a/packages/server/src/api/controllers/datasource.js
+++ b/packages/server/src/api/controllers/datasource.js
@@ -41,17 +41,12 @@ exports.fetch = async function (ctx) {
exports.buildSchemaFromDb = async function (ctx) {
const db = new CouchDB(ctx.appId)
- const datasourceId = ctx.params.datasourceId
- const datasource = await db.get(datasourceId)
+ const datasource = await db.get(ctx.params.datasourceId)
- const Connector = integrations[datasource.source]
+ const tables = await buildSchemaHelper(datasource)
+ datasource.entities = tables
- // Connect to the DB and build the schema
- const connector = new Connector(datasource.config)
- await connector.buildSchema(datasource._id, datasource.entities)
- datasource.entities = connector.tables
-
- const response = await db.post(datasource)
+ const response = await db.put(datasource)
datasource._rev = response.rev
ctx.body = datasource
@@ -81,15 +76,21 @@ exports.update = async function (ctx) {
exports.save = async function (ctx) {
const db = new CouchDB(ctx.appId)
- const plus = ctx.request.body.plus
+ const plus = ctx.request.body.datasource.plus
+ const fetchSchema = ctx.request.body.fetchSchema
const datasource = {
_id: generateDatasourceID({ plus }),
type: plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE,
- ...ctx.request.body,
+ ...ctx.request.body.datasource,
}
- const response = await db.post(datasource)
+ if (fetchSchema) {
+ let tables = await buildSchemaHelper(datasource)
+ datasource.entities = tables
+ }
+
+ const response = await db.put(datasource)
datasource._rev = response.rev
// Drain connection pools when configuration is changed
@@ -133,3 +134,14 @@ exports.query = async function (ctx) {
ctx.throw(400, err)
}
}
+
+const buildSchemaHelper = async datasource => {
+ const Connector = integrations[datasource.source]
+
+ // Connect to the DB and build the schema
+ const connector = new Connector(datasource.config)
+ await connector.buildSchema(datasource._id, datasource.entities)
+ datasource.entities = connector.tables
+
+ return connector.tables
+}
diff --git a/packages/server/src/api/controllers/permission.js b/packages/server/src/api/controllers/permission.js
index e269f8c41d..6c02663649 100644
--- a/packages/server/src/api/controllers/permission.js
+++ b/packages/server/src/api/controllers/permission.js
@@ -1,9 +1,4 @@
-const {
- getBuiltinPermissions,
- PermissionLevels,
- isPermissionLevelHigherThanRead,
- higherPermission,
-} = require("@budibase/auth/permissions")
+const { getBuiltinPermissions } = require("@budibase/auth/permissions")
const {
isBuiltin,
getDBRoleID,
@@ -16,6 +11,7 @@ const {
CURRENTLY_SUPPORTED_LEVELS,
getBasePermissions,
} = require("../../utilities/security")
+const { removeFromArray } = require("../../utilities")
const PermissionUpdateType = {
REMOVE: "remove",
@@ -24,22 +20,6 @@ const PermissionUpdateType = {
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
-// quick function to perform a bit of weird logic, make sure fetch calls
-// always say a write role also has read permission
-function fetchLevelPerms(permissions, level, roleId) {
- if (!permissions) {
- permissions = {}
- }
- permissions[level] = roleId
- if (
- isPermissionLevelHigherThanRead(level) &&
- !permissions[PermissionLevels.READ]
- ) {
- permissions[PermissionLevels.READ] = roleId
- }
- return permissions
-}
-
// utility function to stop this repetition - permissions always stored under roles
async function getAllDBRoles(db) {
const body = await db.allDocs(
@@ -74,23 +54,31 @@ async function updatePermissionOnRole(
for (let role of dbRoles) {
let updated = false
const rolePermissions = role.permissions ? role.permissions : {}
+ // make sure its an array, also handle migrating
+ if (
+ !rolePermissions[resourceId] ||
+ !Array.isArray(rolePermissions[resourceId])
+ ) {
+ rolePermissions[resourceId] =
+ typeof rolePermissions[resourceId] === "string"
+ ? [rolePermissions[resourceId]]
+ : []
+ }
// handle the removal/updating the role which has this permission first
// the updating (role._id !== dbRoleId) is required because a resource/level can
// only be permitted in a single role (this reduces hierarchy confusion and simplifies
// the general UI for this, rather than needing to show everywhere it is used)
if (
(role._id !== dbRoleId || remove) &&
- rolePermissions[resourceId] === level
+ rolePermissions[resourceId].indexOf(level) !== -1
) {
- delete rolePermissions[resourceId]
+ removeFromArray(rolePermissions[resourceId], level)
updated = true
}
// handle the adding, we're on the correct role, at it to this
if (!remove && role._id === dbRoleId) {
- rolePermissions[resourceId] = higherPermission(
- rolePermissions[resourceId],
- level
- )
+ const set = new Set(rolePermissions[resourceId])
+ rolePermissions[resourceId] = [...set.add(level)]
updated = true
}
// handle the update, add it to bulk docs to perform at end
@@ -127,12 +115,11 @@ exports.fetch = async function (ctx) {
continue
}
const roleId = getExternalRoleID(role._id)
- for (let [resource, level] of Object.entries(role.permissions)) {
- permissions[resource] = fetchLevelPerms(
- permissions[resource],
- level,
- roleId
- )
+ for (let [resource, levelArr] of Object.entries(role.permissions)) {
+ const levels = Array.isArray(levelArr) ? [levelArr] : levelArr
+ const perms = {}
+ levels.forEach(level => (perms[level] = roleId))
+ permissions[resource] = perms
}
}
// apply the base permissions
@@ -157,12 +144,13 @@ exports.getResourcePerms = async function (ctx) {
for (let level of SUPPORTED_LEVELS) {
// update the various roleIds in the resource permissions
for (let role of roles) {
- if (role.permissions && role.permissions[resourceId] === level) {
- permissions = fetchLevelPerms(
- permissions,
- level,
- getExternalRoleID(role._id)
- )
+ const rolePerms = role.permissions
+ if (
+ rolePerms &&
+ (rolePerms[resourceId] === level ||
+ rolePerms[resourceId].indexOf(level) !== -1)
+ ) {
+ permissions[level] = getExternalRoleID(role._id)
}
}
}
diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts
index eced518604..75c3e9b492 100644
--- a/packages/server/src/api/controllers/row/ExternalRequest.ts
+++ b/packages/server/src/api/controllers/row/ExternalRequest.ts
@@ -437,7 +437,11 @@ module External {
for (let [colName, { isMany, rows, tableId }] of Object.entries(
related
)) {
- const table = this.getTable(tableId)
+ const table: Table = this.getTable(tableId)
+ // if its not the foreign key skip it, nothing to do
+ if (table.primary && table.primary.indexOf(colName) !== -1) {
+ continue
+ }
for (let row of rows) {
const filters = buildFilters(generateIdForRow(row, table), {}, table)
// safety check, if there are no filters on deletion bad things happen
@@ -540,6 +544,9 @@ module External {
extra: {
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
},
+ meta: {
+ table,
+ },
}
// can't really use response right now
const response = await makeExternalQuery(appId, json)
diff --git a/packages/server/src/api/controllers/row/internal.js b/packages/server/src/api/controllers/row/internal.js
index 2299a20580..76c8188523 100644
--- a/packages/server/src/api/controllers/row/internal.js
+++ b/packages/server/src/api/controllers/row/internal.js
@@ -10,12 +10,21 @@ const userController = require("../user")
const {
inputProcessing,
outputProcessing,
+ processAutoColumn,
} = require("../../../utilities/rowProcessor")
const { FieldTypes } = require("../../../constants")
const { isEqual } = require("lodash")
const { validate, findRow } = require("./utils")
const { fullSearch, paginatedSearch } = require("./internalSearch")
const { getGlobalUsersFromMetadata } = require("../../../utilities/global")
+const inMemoryViews = require("../../../db/inMemoryView")
+const env = require("../../../environment")
+const {
+ migrateToInMemoryView,
+ migrateToDesignView,
+ getFromDesignDoc,
+ getFromMemoryDoc,
+} = require("../view/utils")
const CALCULATION_TYPES = {
SUM: "sum",
@@ -25,17 +34,75 @@ const CALCULATION_TYPES = {
async function storeResponse(ctx, db, row, oldTable, table) {
row.type = "row"
- const response = await db.put(row)
// don't worry about rev, tables handle rev/lastID updates
+ // if another row has been written since processing this will
+ // handle the auto ID clash
if (!isEqual(oldTable, table)) {
- await db.put(table)
+ try {
+ await db.put(table)
+ } catch (err) {
+ if (err.status === 409) {
+ const updatedTable = await db.get(table._id)
+ let response = processAutoColumn(null, updatedTable, row, {
+ reprocessing: true,
+ })
+ await db.put(response.table)
+ row = response.row
+ } else {
+ throw err
+ }
+ }
}
+ const response = await db.put(row)
row._rev = response.rev
// process the row before return, to include relationships
row = await outputProcessing(ctx, table, row, { squash: false })
return { row, table }
}
+// doesn't do the outputProcessing
+async function getRawTableData(ctx, db, tableId) {
+ let rows
+ if (tableId === InternalTables.USER_METADATA) {
+ await userController.fetchMetadata(ctx)
+ rows = ctx.body
+ } else {
+ const response = await db.allDocs(
+ getRowParams(tableId, null, {
+ include_docs: true,
+ })
+ )
+ rows = response.rows.map(row => row.doc)
+ }
+ return rows
+}
+
+async function getView(db, viewName) {
+ let mainGetter = env.SELF_HOSTED ? getFromDesignDoc : getFromMemoryDoc
+ let secondaryGetter = env.SELF_HOSTED ? getFromMemoryDoc : getFromDesignDoc
+ let migration = env.SELF_HOSTED ? migrateToDesignView : migrateToInMemoryView
+ let viewInfo,
+ migrate = false
+ try {
+ viewInfo = await mainGetter(db, viewName)
+ } catch (err) {
+ // check if it can be retrieved from design doc (needs migrated)
+ if (err.status !== 404) {
+ viewInfo = null
+ } else {
+ viewInfo = await secondaryGetter(db, viewName)
+ migrate = !!viewInfo
+ }
+ }
+ if (migrate) {
+ await migration(db, viewName)
+ }
+ if (!viewInfo) {
+ throw "View does not exist."
+ }
+ return viewInfo
+}
+
exports.patch = async ctx => {
const appId = ctx.appId
const db = new CouchDB(appId)
@@ -139,15 +206,18 @@ exports.fetchView = async ctx => {
const db = new CouchDB(appId)
const { calculation, group, field } = ctx.query
- const designDoc = await db.get("_design/database")
- const viewInfo = designDoc.views[viewName]
- if (!viewInfo) {
- throw "View does not exist."
+ const viewInfo = await getView(db, viewName)
+ let response
+ if (env.SELF_HOSTED) {
+ response = await db.query(`database/${viewName}`, {
+ include_docs: !calculation,
+ group: !!group,
+ })
+ } else {
+ const tableId = viewInfo.meta.tableId
+ const data = await getRawTableData(ctx, db, tableId)
+ response = await inMemoryViews.runView(viewInfo, calculation, group, data)
}
- const response = await db.query(`database/${viewName}`, {
- include_docs: !calculation,
- group: !!group,
- })
let rows
if (!calculation) {
@@ -191,19 +261,9 @@ exports.fetch = async ctx => {
const appId = ctx.appId
const db = new CouchDB(appId)
- let rows,
- table = await db.get(ctx.params.tableId)
- if (ctx.params.tableId === InternalTables.USER_METADATA) {
- await userController.fetchMetadata(ctx)
- rows = ctx.body
- } else {
- const response = await db.allDocs(
- getRowParams(ctx.params.tableId, null, {
- include_docs: true,
- })
- )
- rows = response.rows.map(row => row.doc)
- }
+ const tableId = ctx.params.tableId
+ let table = await db.get(tableId)
+ let rows = await getRawTableData(ctx, db, tableId)
return outputProcessing(ctx, table, rows)
}
@@ -286,6 +346,11 @@ exports.bulkDestroy = async ctx => {
}
exports.search = async ctx => {
+ // Fetch the whole table when running in cypress, as search doesn't work
+ if (env.isCypress()) {
+ return { rows: await exports.fetch(ctx) }
+ }
+
const appId = ctx.appId
const { tableId } = ctx.params
const db = new CouchDB(appId)
diff --git a/packages/server/src/api/controllers/row/utils.js b/packages/server/src/api/controllers/row/utils.js
index cb9a5e166c..ca6c782713 100644
--- a/packages/server/src/api/controllers/row/utils.js
+++ b/packages/server/src/api/controllers/row/utils.js
@@ -5,6 +5,7 @@ const { InternalTables } = require("../../../db/utils")
const userController = require("../user")
const { FieldTypes } = require("../../../constants")
const { integrations } = require("../../../integrations")
+const { processStringSync } = require("@budibase/string-templates")
validateJs.extend(validateJs.validators.datetime, {
parse: function (value) {
@@ -73,6 +74,11 @@ exports.validate = async ({ appId, tableId, row, table }) => {
errors[fieldName] = "Field not in list"
}
})
+ } else if (table.schema[fieldName].type === FieldTypes.FORMULA) {
+ res = validateJs.single(
+ processStringSync(table.schema[fieldName].formula, row),
+ constraints
+ )
} else {
res = validateJs.single(row[fieldName], constraints)
}
diff --git a/packages/server/src/api/controllers/table/index.js b/packages/server/src/api/controllers/table/index.js
index 60b5167f66..c7b72cf1c8 100644
--- a/packages/server/src/api/controllers/table/index.js
+++ b/packages/server/src/api/controllers/table/index.js
@@ -145,7 +145,7 @@ exports.save = async function (ctx) {
if (updatedRows && updatedRows.length !== 0) {
await db.bulkDocs(updatedRows)
}
- const result = await db.post(tableToSave)
+ const result = await db.put(tableToSave)
tableToSave._rev = result.rev
tableToSave = await tableSaveFunctions.after(tableToSave)
diff --git a/packages/server/src/api/controllers/table/utils.js b/packages/server/src/api/controllers/table/utils.js
index 154a9ba8f5..d263002da6 100644
--- a/packages/server/src/api/controllers/table/utils.js
+++ b/packages/server/src/api/controllers/table/utils.js
@@ -68,23 +68,17 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
// Populate the table with rows imported from CSV in a bulk update
const data = await csvParser.transform(dataImport)
+ let finalData = []
for (let i = 0; i < data.length; i++) {
let row = data[i]
row._id = generateRowID(table._id)
row.tableId = table._id
- const processed = inputProcessing(user, table, row)
+ const processed = inputProcessing(user, table, row, {
+ noAutoRelationships: true,
+ })
table = processed.table
row = processed.row
- // make sure link rows are up to date
- row = await linkRows.updateLinks({
- appId,
- eventType: linkRows.EventType.ROW_SAVE,
- row,
- tableId: row.tableId,
- table,
- })
-
for (let [fieldName, schema] of Object.entries(table.schema)) {
// check whether the options need to be updated for inclusion as part of the data import
if (
@@ -98,10 +92,20 @@ exports.handleDataImport = async (appId, user, table, dataImport) => {
]
}
}
- data[i] = row
+
+ // make sure link rows are up to date
+ finalData.push(
+ linkRows.updateLinks({
+ appId,
+ eventType: linkRows.EventType.ROW_SAVE,
+ row,
+ tableId: row.tableId,
+ table,
+ })
+ )
}
- await db.bulkDocs(data)
+ await db.bulkDocs(await Promise.all(finalData))
let response = await db.put(table)
table._rev = response._rev
}
diff --git a/packages/server/src/api/controllers/view/index.js b/packages/server/src/api/controllers/view/index.js
index 3d0f236fce..ecaee0f32f 100644
--- a/packages/server/src/api/controllers/view/index.js
+++ b/packages/server/src/api/controllers/view/index.js
@@ -2,127 +2,93 @@ const CouchDB = require("../../../db")
const viewTemplate = require("./viewBuilder")
const { apiFileReturn } = require("../../../utilities/fileSystem")
const exporters = require("./exporters")
+const { saveView, getView, getViews, deleteView } = require("./utils")
const { fetchView } = require("../row")
-const { ViewNames } = require("../../../db/utils")
-const controller = {
- fetch: async ctx => {
- const db = new CouchDB(ctx.appId)
- const designDoc = await db.get("_design/database")
- const response = []
-
- for (let name of Object.keys(designDoc.views)) {
- // Only return custom views, not built ins
- if (Object.values(ViewNames).indexOf(name) !== -1) {
- continue
- }
- response.push({
- name,
- ...designDoc.views[name],
- })
- }
-
- ctx.body = response
- },
- save: async ctx => {
- const db = new CouchDB(ctx.appId)
- const { originalName, ...viewToSave } = ctx.request.body
- const designDoc = await db.get("_design/database")
- const view = viewTemplate(viewToSave)
-
- if (!viewToSave.name) {
- ctx.throw(400, "Cannot create view without a name")
- }
-
- designDoc.views = {
- ...designDoc.views,
- [viewToSave.name]: view,
- }
-
- // view has been renamed
- if (originalName) {
- delete designDoc.views[originalName]
- }
-
- await db.put(designDoc)
-
- // add views to table document
- const table = await db.get(ctx.request.body.tableId)
- if (!table.views) table.views = {}
- if (!view.meta.schema) {
- view.meta.schema = table.schema
- }
- table.views[viewToSave.name] = view.meta
-
- if (originalName) {
- delete table.views[originalName]
- }
-
- await db.put(table)
-
- ctx.body = {
- ...table.views[viewToSave.name],
- name: viewToSave.name,
- }
- },
- destroy: async ctx => {
- const db = new CouchDB(ctx.appId)
- const designDoc = await db.get("_design/database")
- const viewName = decodeURI(ctx.params.viewName)
- const view = designDoc.views[viewName]
- delete designDoc.views[viewName]
-
- await db.put(designDoc)
-
- const table = await db.get(view.meta.tableId)
- delete table.views[viewName]
- await db.put(table)
-
- ctx.body = view
- },
- exportView: async ctx => {
- const db = new CouchDB(ctx.appId)
- const designDoc = await db.get("_design/database")
- const viewName = decodeURI(ctx.query.view)
-
- const view = designDoc.views[viewName]
- const format = ctx.query.format
- if (!format) {
- ctx.throw(400, "Format must be specified, either csv or json")
- }
-
- if (view) {
- ctx.params.viewName = viewName
- // Fetch view rows
- ctx.query = {
- group: view.meta.groupBy,
- calculation: view.meta.calculation,
- stats: !!view.meta.field,
- field: view.meta.field,
- }
- } else {
- // table all_ view
- /* istanbul ignore next */
- ctx.params.viewName = viewName
- }
-
- await fetchView(ctx)
-
- let schema = view && view.meta && view.meta.schema
- if (!schema) {
- const tableId = ctx.params.tableId || view.meta.tableId
- const table = await db.get(tableId)
- schema = table.schema
- }
-
- // Export part
- let headers = Object.keys(schema)
- const exporter = exporters[format]
- const filename = `${viewName}.${format}`
- // send down the file
- ctx.attachment(filename)
- ctx.body = apiFileReturn(exporter(headers, ctx.body))
- },
+exports.fetch = async ctx => {
+ const db = new CouchDB(ctx.appId)
+ ctx.body = await getViews(db)
}
-module.exports = controller
+exports.save = async ctx => {
+ const db = new CouchDB(ctx.appId)
+ const { originalName, ...viewToSave } = ctx.request.body
+ const view = viewTemplate(viewToSave)
+
+ if (!viewToSave.name) {
+ ctx.throw(400, "Cannot create view without a name")
+ }
+
+ await saveView(db, originalName, viewToSave.name, view)
+
+ // add views to table document
+ const table = await db.get(ctx.request.body.tableId)
+ if (!table.views) table.views = {}
+ if (!view.meta.schema) {
+ view.meta.schema = table.schema
+ }
+ table.views[viewToSave.name] = view.meta
+ if (originalName) {
+ delete table.views[originalName]
+ }
+ await db.put(table)
+
+ ctx.body = {
+ ...table.views[viewToSave.name],
+ name: viewToSave.name,
+ }
+}
+
+exports.destroy = async ctx => {
+ const db = new CouchDB(ctx.appId)
+ const viewName = decodeURI(ctx.params.viewName)
+ const view = await deleteView(db, viewName)
+ const table = await db.get(view.meta.tableId)
+ delete table.views[viewName]
+ await db.put(table)
+
+ ctx.body = view
+}
+
+exports.exportView = async ctx => {
+ const db = new CouchDB(ctx.appId)
+ const viewName = decodeURI(ctx.query.view)
+ const view = await getView(db, viewName)
+
+ const format = ctx.query.format
+ if (!format) {
+ ctx.throw(400, "Format must be specified, either csv or json")
+ }
+
+ if (view) {
+ ctx.params.viewName = viewName
+ // Fetch view rows
+ ctx.query = {
+ group: view.meta.groupBy,
+ calculation: view.meta.calculation,
+ stats: !!view.meta.field,
+ field: view.meta.field,
+ }
+ } else {
+ // table all_ view
+ /* istanbul ignore next */
+ ctx.params.viewName = viewName
+ }
+
+ await fetchView(ctx)
+
+ let schema = view && view.meta && view.meta.schema
+ if (!schema) {
+ const tableId = ctx.params.tableId || view.meta.tableId
+ const table = await db.get(tableId)
+ schema = table.schema
+ }
+
+ // Export part
+ let headers = Object.keys(schema)
+ const exporter = exporters[format]
+ const filename = `${viewName}.${format}`
+ // send down the file
+ ctx.attachment(filename)
+ ctx.body = apiFileReturn(exporter(headers, ctx.body))
+}
diff --git a/packages/server/src/api/controllers/view/utils.js b/packages/server/src/api/controllers/view/utils.js
new file mode 100644
index 0000000000..1f3b980882
--- /dev/null
+++ b/packages/server/src/api/controllers/view/utils.js
@@ -0,0 +1,136 @@
+const {
+ ViewNames,
+ generateMemoryViewID,
+ getMemoryViewParams,
+} = require("../../../db/utils")
+const env = require("../../../environment")
+
+exports.getView = async (db, viewName) => {
+ if (env.SELF_HOSTED) {
+ const designDoc = await db.get("_design/database")
+ return designDoc.views[viewName]
+ } else {
+ const viewDoc = await db.get(generateMemoryViewID(viewName))
+ return viewDoc.view
+ }
+}
+
+exports.getViews = async db => {
+ const response = []
+ if (env.SELF_HOSTED) {
+ const designDoc = await db.get("_design/database")
+ for (let name of Object.keys(designDoc.views)) {
+ // Only return custom views, not built ins
+ if (Object.values(ViewNames).indexOf(name) !== -1) {
+ continue
+ }
+ response.push({
+ name,
+ ...designDoc.views[name],
+ })
+ }
+ } else {
+ const views = (
+ await db.allDocs(
+ getMemoryViewParams({
+ include_docs: true,
+ })
+ )
+ ).rows.map(row => row.doc)
+ for (let viewDoc of views) {
+ response.push({
+ name: viewDoc.name,
+ ...viewDoc.view,
+ })
+ }
+ }
+ return response
+}
+
+exports.saveView = async (db, originalName, viewName, viewTemplate) => {
+ if (env.SELF_HOSTED) {
+ const designDoc = await db.get("_design/database")
+ designDoc.views = {
+ ...designDoc.views,
+ [viewName]: viewTemplate,
+ }
+ // view has been renamed
+ if (originalName) {
+ delete designDoc.views[originalName]
+ }
+ await db.put(designDoc)
+ } else {
+ const id = generateMemoryViewID(viewName)
+ const originalId = originalName ? generateMemoryViewID(originalName) : null
+ const viewDoc = {
+ _id: id,
+ view: viewTemplate,
+ name: viewName,
+ tableId: viewTemplate.meta.tableId,
+ }
+ try {
+ const old = await db.get(id)
+ if (originalId) {
+ const originalDoc = await db.get(originalId)
+ await db.remove(originalDoc._id, originalDoc._rev)
+ }
+ if (old && old._rev) {
+ viewDoc._rev = old._rev
+ }
+ } catch (err) {
+ // didn't exist, just skip
+ }
+ await db.put(viewDoc)
+ }
+}
+
+exports.deleteView = async (db, viewName) => {
+ if (env.SELF_HOSTED) {
+ const designDoc = await db.get("_design/database")
+ const view = designDoc.views[viewName]
+ delete designDoc.views[viewName]
+ await db.put(designDoc)
+ return view
+ } else {
+ const id = generateMemoryViewID(viewName)
+ const viewDoc = await db.get(id)
+ await db.remove(viewDoc._id, viewDoc._rev)
+ return viewDoc.view
+ }
+}
+
+exports.migrateToInMemoryView = async (db, viewName) => {
+ // delete the view initially
+ const designDoc = await db.get("_design/database")
+ const view = designDoc.views[viewName]
+ delete designDoc.views[viewName]
+ await db.put(designDoc)
+ await exports.saveView(db, null, viewName, view)
+}
+
+exports.migrateToDesignView = async (db, viewName) => {
+ let view = await db.get(generateMemoryViewID(viewName))
+ const designDoc = await db.get("_design/database")
+ designDoc.views[viewName] = view.view
+ await db.put(designDoc)
+ await db.remove(view._id, view._rev)
+}
+
+exports.getFromDesignDoc = async (db, viewName) => {
+ const designDoc = await db.get("_design/database")
+ let view = designDoc.views[viewName]
+ if (view == null) {
+ throw { status: 404, message: "Unable to get view" }
+ }
+ return view
+}
+
+exports.getFromMemoryDoc = async (db, viewName) => {
+ let view = await db.get(generateMemoryViewID(viewName))
+ if (view) {
+ view = view.view
+ } else {
+ throw { status: 404, message: "Unable to get view" }
+ }
+ return view
+}
diff --git a/packages/server/src/api/routes/application.js b/packages/server/src/api/routes/application.js
index c1d39acbd5..4d67a0f4f4 100644
--- a/packages/server/src/api/routes/application.js
+++ b/packages/server/src/api/routes/application.js
@@ -2,11 +2,12 @@ const Router = require("@koa/router")
const controller = require("../controllers/application")
const authorized = require("../../middleware/authorized")
const { BUILDER } = require("@budibase/auth/permissions")
+const usage = require("../../middleware/usageQuota")
const router = Router()
router
- .post("/api/applications", authorized(BUILDER), controller.create)
+ .post("/api/applications", authorized(BUILDER), usage, controller.create)
.get("/api/applications/:appId/definition", controller.fetchAppDefinition)
.get("/api/applications", controller.fetch)
.get("/api/applications/:appId/appPackage", controller.fetchAppPackage)
@@ -21,6 +22,11 @@ router
authorized(BUILDER),
controller.revertClient
)
- .delete("/api/applications/:appId", authorized(BUILDER), controller.delete)
+ .delete(
+ "/api/applications/:appId",
+ authorized(BUILDER),
+ usage,
+ controller.delete
+ )
module.exports = router
diff --git a/packages/server/src/api/routes/cloud.js b/packages/server/src/api/routes/cloud.js
new file mode 100644
index 0000000000..214473f43f
--- /dev/null
+++ b/packages/server/src/api/routes/cloud.js
@@ -0,0 +1,13 @@
+const Router = require("@koa/router")
+const controller = require("../controllers/cloud")
+const authorized = require("../../middleware/authorized")
+const { BUILDER } = require("@budibase/auth/permissions")
+
+const router = Router()
+
+router
+ .get("/api/cloud/export", authorized(BUILDER), controller.exportApps)
+ // has to be public, only run if apps don't exist
+ .post("/api/cloud/import", controller.importApps)
+
+module.exports = router
diff --git a/packages/server/src/api/routes/index.js b/packages/server/src/api/routes/index.js
index 2e1353df98..29d0cd42b4 100644
--- a/packages/server/src/api/routes/index.js
+++ b/packages/server/src/api/routes/index.js
@@ -24,6 +24,7 @@ const hostingRoutes = require("./hosting")
const backupRoutes = require("./backup")
const metadataRoutes = require("./metadata")
const devRoutes = require("./dev")
+const cloudRoutes = require("./cloud")
exports.mainRoutes = [
authRoutes,
@@ -49,6 +50,7 @@ exports.mainRoutes = [
backupRoutes,
metadataRoutes,
devRoutes,
+ cloudRoutes,
// these need to be handled last as they still use /api/:tableId
// this could be breaking as koa may recognise other routes as this
tableRoutes,
diff --git a/packages/server/src/api/routes/tests/datasource.spec.js b/packages/server/src/api/routes/tests/datasource.spec.js
index 98a99717fd..b6d94f714d 100644
--- a/packages/server/src/api/routes/tests/datasource.spec.js
+++ b/packages/server/src/api/routes/tests/datasource.spec.js
@@ -94,7 +94,8 @@ describe("/datasources", () => {
.expect(200)
// this is mock data, can't test it
expect(res.body).toBeDefined()
- expect(pg.queryMock).toHaveBeenCalledWith(`select "users"."name" as "users.name", "users"."age" as "users.age" from "users" where "users"."name" ilike $1 limit $2`, ["John%", 5000])
+ const expSql = `select "users"."name" as "users.name", "users"."age" as "users.age" from (select * from "users" where "users"."name" ilike $1 limit $2) as "users"`
+ expect(pg.queryMock).toHaveBeenCalledWith(expSql, ["John%", 5000])
})
})
diff --git a/packages/server/src/api/routes/tests/query.spec.js b/packages/server/src/api/routes/tests/query.spec.js
index eadd475ed4..716817509b 100644
--- a/packages/server/src/api/routes/tests/query.spec.js
+++ b/packages/server/src/api/routes/tests/query.spec.js
@@ -1,6 +1,7 @@
// mock out postgres for this
jest.mock("pg")
+const { findLastKey } = require("lodash/fp")
const setup = require("./utilities")
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const { basicQuery, basicDatasource } = setup.structures
@@ -19,10 +20,10 @@ describe("/queries", () => {
})
async function createInvalidIntegration() {
- const datasource = await config.createDatasource({
- ...basicDatasource(),
+ const datasource = await config.createDatasource({datasource: {
+ ...basicDatasource().datasource,
source: "INVALID_INTEGRATION",
- })
+ }})
const query = await config.createQuery()
return { datasource, query }
}
@@ -183,11 +184,14 @@ describe("/queries", () => {
})
it("should fail with invalid integration type", async () => {
- const { query } = await createInvalidIntegration()
+ const { query, datasource } = await createInvalidIntegration()
await request
.post(`/api/queries/${query._id}`)
.send({
+ datasourceId: datasource._id,
parameters: {},
+ fields: {},
+ queryVerb: "read",
})
.set(config.defaultHeaders())
.expect(400)
diff --git a/packages/server/src/api/routes/tests/role.spec.js b/packages/server/src/api/routes/tests/role.spec.js
index ad42ef180a..d74a84b2b2 100644
--- a/packages/server/src/api/routes/tests/role.spec.js
+++ b/packages/server/src/api/routes/tests/role.spec.js
@@ -72,7 +72,7 @@ describe("/roles", () => {
.expect(200)
expect(res.body.length).toBeGreaterThan(0)
const power = res.body.find(role => role._id === BUILTIN_ROLE_IDS.POWER)
- expect(power.permissions[table._id]).toEqual("read")
+ expect(power.permissions[table._id]).toEqual(["read"])
})
})
diff --git a/packages/server/src/api/routes/tests/row.spec.js b/packages/server/src/api/routes/tests/row.spec.js
index d089d7775d..01284552c5 100644
--- a/packages/server/src/api/routes/tests/row.spec.js
+++ b/packages/server/src/api/routes/tests/row.spec.js
@@ -317,7 +317,7 @@ describe("/rows", () => {
await request
.get(`/api/views/derp`)
.set(config.defaultHeaders())
- .expect(400)
+ .expect(404)
})
it("should be able to run on a view", async () => {
@@ -394,4 +394,4 @@ describe("/rows", () => {
})
})
})
-})
\ No newline at end of file
+})
diff --git a/packages/server/src/api/routes/tests/view.spec.js b/packages/server/src/api/routes/tests/view.spec.js
index 458da6e023..b1c5f655c6 100644
--- a/packages/server/src/api/routes/tests/view.spec.js
+++ b/packages/server/src/api/routes/tests/view.spec.js
@@ -205,7 +205,7 @@ describe("/views", () => {
})
describe("exportView", () => {
- it("should be able to delete a view", async () => {
+ it("should be able to export a view", async () => {
await config.createTable(priceTable())
await config.createRow()
const view = await config.createView()
diff --git a/packages/server/src/api/routes/user.js b/packages/server/src/api/routes/user.js
index b3b486fe45..d171870215 100644
--- a/packages/server/src/api/routes/user.js
+++ b/packages/server/src/api/routes/user.js
@@ -5,7 +5,6 @@ const {
PermissionLevels,
PermissionTypes,
} = require("@budibase/auth/permissions")
-const usage = require("../../middleware/usageQuota")
const router = Router()
@@ -28,13 +27,11 @@ router
.post(
"/api/users/metadata/self",
authorized(PermissionTypes.USER, PermissionLevels.WRITE),
- usage,
controller.updateSelfMetadata
)
.delete(
"/api/users/metadata/:id",
authorized(PermissionTypes.USER, PermissionLevels.WRITE),
- usage,
controller.destroyMetadata
)
diff --git a/packages/server/src/api/routes/view.js b/packages/server/src/api/routes/view.js
index 7d390805c6..b72fe1ac26 100644
--- a/packages/server/src/api/routes/view.js
+++ b/packages/server/src/api/routes/view.js
@@ -8,7 +8,6 @@ const {
PermissionTypes,
PermissionLevels,
} = require("@budibase/auth/permissions")
-const usage = require("../../middleware/usageQuota")
const router = Router()
@@ -25,9 +24,8 @@ router
"/api/views/:viewName",
paramResource("viewName"),
authorized(BUILDER),
- usage,
viewController.destroy
)
- .post("/api/views", authorized(BUILDER), usage, viewController.save)
+ .post("/api/views", authorized(BUILDER), viewController.save)
module.exports = router
diff --git a/packages/server/src/automations/steps/createRow.js b/packages/server/src/automations/steps/createRow.js
index 9706126438..47d0b4eb99 100644
--- a/packages/server/src/automations/steps/createRow.js
+++ b/packages/server/src/automations/steps/createRow.js
@@ -2,6 +2,7 @@ const rowController = require("../../api/controllers/row")
const automationUtils = require("../automationUtils")
const env = require("../../environment")
const usage = require("../../utilities/usageQuota")
+const { buildCtx } = require("./utils")
exports.definition = {
name: "Create Row",
@@ -59,7 +60,7 @@ exports.definition = {
},
}
-exports.run = async function ({ inputs, appId, apiKey, emitter }) {
+exports.run = async function ({ inputs, appId, emitter }) {
if (inputs.row == null || inputs.row.tableId == null) {
return {
success: false,
@@ -69,16 +70,12 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
}
}
// have to clean up the row, remove the table from it
- const ctx = {
+ const ctx = buildCtx(appId, emitter, {
+ body: inputs.row,
params: {
tableId: inputs.row.tableId,
},
- request: {
- body: inputs.row,
- },
- appId,
- eventEmitter: emitter,
- }
+ })
try {
inputs.row = await automationUtils.cleanUpRow(
@@ -86,8 +83,8 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
inputs.row.tableId,
inputs.row
)
- if (env.isProd()) {
- await usage.update(apiKey, usage.Properties.ROW, 1)
+ if (env.USE_QUOTAS) {
+ await usage.update(usage.Properties.ROW, 1)
}
await rowController.save(ctx)
return {
diff --git a/packages/server/src/automations/steps/deleteRow.js b/packages/server/src/automations/steps/deleteRow.js
index 26623d628b..225f00c5df 100644
--- a/packages/server/src/automations/steps/deleteRow.js
+++ b/packages/server/src/automations/steps/deleteRow.js
@@ -1,6 +1,7 @@
const rowController = require("../../api/controllers/row")
const env = require("../../environment")
const usage = require("../../utilities/usageQuota")
+const { buildCtx } = require("./utils")
exports.definition = {
description: "Delete a row from your database",
@@ -51,7 +52,7 @@ exports.definition = {
},
}
-exports.run = async function ({ inputs, appId, apiKey, emitter }) {
+exports.run = async function ({ inputs, appId, emitter }) {
if (inputs.id == null || inputs.revision == null) {
return {
success: false,
@@ -60,23 +61,20 @@ exports.run = async function ({ inputs, appId, apiKey, emitter }) {
},
}
}
- let ctx = {
+
+ let ctx = buildCtx(appId, emitter, {
+ body: {
+ _id: inputs.id,
+ _rev: inputs.revision,
+ },
params: {
tableId: inputs.tableId,
},
- request: {
- body: {
- _id: inputs.id,
- _rev: inputs.revision,
- },
- },
- appId,
- eventEmitter: emitter,
- }
+ })
try {
if (env.isProd()) {
- await usage.update(apiKey, usage.Properties.ROW, -1)
+ await usage.update(usage.Properties.ROW, -1)
}
await rowController.destroy(ctx)
return {
diff --git a/packages/server/src/automations/steps/queryRows.js b/packages/server/src/automations/steps/queryRows.js
index 64b757418e..3c4bb422a0 100644
--- a/packages/server/src/automations/steps/queryRows.js
+++ b/packages/server/src/automations/steps/queryRows.js
@@ -1,6 +1,7 @@
const rowController = require("../../api/controllers/row")
const tableController = require("../../api/controllers/table")
const { FieldTypes } = require("../../constants")
+const { buildCtx } = require("./utils")
const SortOrders = {
ASCENDING: "ascending",
@@ -70,12 +71,11 @@ exports.definition = {
}
async function getTable(appId, tableId) {
- const ctx = {
+ const ctx = buildCtx(appId, null, {
params: {
id: tableId,
},
- appId,
- }
+ })
await tableController.find(ctx)
return ctx.body
}
@@ -89,21 +89,18 @@ exports.run = async function ({ inputs, appId }) {
sortType =
fieldType === FieldTypes.NUMBER ? FieldTypes.NUMBER : FieldTypes.STRING
}
- const ctx = {
+ const ctx = buildCtx(appId, null, {
params: {
tableId,
},
- request: {
- body: {
- sortOrder,
- sortType,
- sort: sortColumn,
- query: filters || {},
- limit,
- },
+ body: {
+ sortOrder,
+ sortType,
+ sort: sortColumn,
+ query: filters || {},
+ limit,
},
- appId,
- }
+ })
try {
await rowController.search(ctx)
return {
diff --git a/packages/server/src/automations/steps/sendSmtpEmail.js b/packages/server/src/automations/steps/sendSmtpEmail.js
index 9e4b5a6a3c..07a3059215 100644
--- a/packages/server/src/automations/steps/sendSmtpEmail.js
+++ b/packages/server/src/automations/steps/sendSmtpEmail.js
@@ -53,7 +53,7 @@ exports.run = async function ({ inputs }) {
contents = "
No content
"
}
try {
- let response = await sendSmtpEmail(to, from, subject, contents)
+ let response = await sendSmtpEmail(to, from, subject, contents, true)
return {
success: true,
response,
diff --git a/packages/server/src/automations/steps/updateRow.js b/packages/server/src/automations/steps/updateRow.js
index ac5eb16fcd..94f77bc801 100644
--- a/packages/server/src/automations/steps/updateRow.js
+++ b/packages/server/src/automations/steps/updateRow.js
@@ -1,5 +1,6 @@
const rowController = require("../../api/controllers/row")
const automationUtils = require("../automationUtils")
+const { buildCtx } = require("./utils")
exports.definition = {
name: "Update Row",
@@ -72,19 +73,15 @@ exports.run = async function ({ inputs, appId, emitter }) {
}
// have to clean up the row, remove the table from it
- const ctx = {
+ const ctx = buildCtx(appId, emitter, {
+ body: {
+ ...inputs.row,
+ _id: inputs.rowId,
+ },
params: {
rowId: inputs.rowId,
},
- request: {
- body: {
- ...inputs.row,
- _id: inputs.rowId,
- },
- },
- appId,
- eventEmitter: emitter,
- }
+ })
try {
inputs.row = await automationUtils.cleanUpRowById(
diff --git a/packages/server/src/automations/tests/automation.spec.js b/packages/server/src/automations/tests/automation.spec.js
index 83b7b81a75..9444995ca1 100644
--- a/packages/server/src/automations/tests/automation.spec.js
+++ b/packages/server/src/automations/tests/automation.spec.js
@@ -13,8 +13,6 @@ const { makePartial } = require("../../tests/utilities")
const { cleanInputValues } = require("../automationUtils")
const setup = require("./utilities")
-usageQuota.getAPIKey.mockReturnValue({ apiKey: "test" })
-
describe("Run through some parts of the automations system", () => {
let config = setup.getConfig()
diff --git a/packages/server/src/automations/tests/createRow.spec.js b/packages/server/src/automations/tests/createRow.spec.js
index 1004711d87..a04fc7aad4 100644
--- a/packages/server/src/automations/tests/createRow.spec.js
+++ b/packages/server/src/automations/tests/createRow.spec.js
@@ -46,7 +46,7 @@ describe("test the create row action", () => {
await setup.runStep(setup.actions.CREATE_ROW.stepId, {
row
})
- expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", 1)
+ expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
})
})
diff --git a/packages/server/src/automations/tests/deleteRow.spec.js b/packages/server/src/automations/tests/deleteRow.spec.js
index a3d73d3bf6..21246f22d0 100644
--- a/packages/server/src/automations/tests/deleteRow.spec.js
+++ b/packages/server/src/automations/tests/deleteRow.spec.js
@@ -37,7 +37,7 @@ describe("test the delete row action", () => {
it("check usage quota attempts", async () => {
await setup.runInProd(async () => {
await setup.runStep(setup.actions.DELETE_ROW.stepId, inputs)
- expect(usageQuota.update).toHaveBeenCalledWith(setup.apiKey, "rows", -1)
+ expect(usageQuota.update).toHaveBeenCalledWith("rows", -1)
})
})
diff --git a/packages/server/src/automations/thread.js b/packages/server/src/automations/thread.js
index a3e81a2274..ef12494165 100644
--- a/packages/server/src/automations/thread.js
+++ b/packages/server/src/automations/thread.js
@@ -4,8 +4,10 @@ const AutomationEmitter = require("../events/AutomationEmitter")
const { processObject } = require("@budibase/string-templates")
const { DEFAULT_TENANT_ID } = require("@budibase/auth").constants
const CouchDB = require("../db")
-const { DocumentTypes } = require("../db/utils")
+const { DocumentTypes, isDevAppID } = require("../db/utils")
const { doInTenant } = require("@budibase/auth/tenancy")
+const env = require("../environment")
+const usage = require("../utilities/usageQuota")
const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId
@@ -80,7 +82,6 @@ class Orchestrator {
return stepFn({
inputs: step.inputs,
appId: this._appId,
- apiKey: automation.apiKey,
emitter: this._emitter,
context: this._context,
})
@@ -95,6 +96,11 @@ class Orchestrator {
return err
}
}
+
+ // Increment quota for automation runs
+ if (!env.SELF_HOSTED && !isDevAppID(this._appId)) {
+ usage.update(usage.Properties.AUTOMATION, 1)
+ }
return this.executionOutput
}
}
diff --git a/packages/server/src/db/inMemoryView.js b/packages/server/src/db/inMemoryView.js
new file mode 100644
index 0000000000..892617e068
--- /dev/null
+++ b/packages/server/src/db/inMemoryView.js
@@ -0,0 +1,48 @@
+const PouchDB = require("pouchdb")
+const memory = require("pouchdb-adapter-memory")
+const newid = require("./newid")
+
+PouchDB.plugin(memory)
+const Pouch = PouchDB.defaults({
+ prefix: undefined,
+ adapter: "memory",
+})
+
+exports.runView = async (view, calculation, group, data) => {
+ // use a different ID each time for the DB, make sure they
+ // are always unique for each query, don't want overlap
+ // which could cause 409s
+ const db = new Pouch(newid())
+ // write all the docs to the in memory Pouch (remove revs)
+ await db.bulkDocs(
+ data.map(row => ({
+ ...row,
+ _rev: undefined,
+ }))
+ )
+ let fn = (doc, emit) => emit(doc._id)
+ eval("fn = " + view.map.replace("function (doc)", "function (doc, emit)"))
+ const queryFns = {
+ meta: view.meta,
+ map: fn,
+ }
+ if (view.reduce) {
+ queryFns.reduce = view.reduce
+ }
+ const response = await db.query(queryFns, {
+ include_docs: !calculation,
+ group: !!group,
+ })
+ // need to fix the revs to be totally accurate
+ for (let row of response.rows) {
+ if (!row._rev || !row._id) {
+ continue
+ }
+ const found = data.find(possible => possible._id === row._id)
+ if (found) {
+ row._rev = found._rev
+ }
+ }
+ await db.destroy()
+ return response
+}
diff --git a/packages/server/src/db/linkedRows/index.js b/packages/server/src/db/linkedRows/index.js
index 67412e7e89..303cd085c1 100644
--- a/packages/server/src/db/linkedRows/index.js
+++ b/packages/server/src/db/linkedRows/index.js
@@ -76,9 +76,12 @@ async function getFullLinkedDocs(ctx, appId, links) {
// create DBs
const db = new CouchDB(appId)
const linkedRowIds = links.map(link => link.id)
- let linked = (await db.allDocs(getMultiIDParams(linkedRowIds))).rows.map(
+ const uniqueRowIds = [...new Set(linkedRowIds)]
+ let dbRows = (await db.allDocs(getMultiIDParams(uniqueRowIds))).rows.map(
row => row.doc
)
+ // convert the unique db rows back to a full list of linked rows
+ const linked = linkedRowIds.map(id => dbRows.find(row => row._id === id))
// need to handle users as specific cases
let [users, other] = partition(linked, linkRow =>
linkRow._id.startsWith(USER_METDATA_PREFIX)
@@ -112,7 +115,7 @@ exports.updateLinks = async function (args) {
let linkController = new LinkController(args)
try {
if (
- !(await linkController.doesTableHaveLinkedFields()) &&
+ !(await linkController.doesTableHaveLinkedFields(table)) &&
(oldTable == null ||
!(await linkController.doesTableHaveLinkedFields(oldTable)))
) {
diff --git a/packages/server/src/db/utils.js b/packages/server/src/db/utils.js
index ec1c267fa2..17b19bba49 100644
--- a/packages/server/src/db/utils.js
+++ b/packages/server/src/db/utils.js
@@ -39,6 +39,7 @@ const DocumentTypes = {
QUERY: "query",
DEPLOYMENTS: "deployments",
METADATA: "metadata",
+ MEM_VIEW: "view",
}
const ViewNames = {
@@ -109,6 +110,8 @@ function getDocParams(docType, docId = null, otherProps = {}) {
}
}
+exports.getDocParams = getDocParams
+
/**
* Gets parameters for retrieving tables, this is a utility function for the getDocParams function.
*/
@@ -348,6 +351,14 @@ exports.getMetadataParams = (type, entityId = null, otherProps = {}) => {
return getDocParams(DocumentTypes.METADATA, docId, otherProps)
}
+exports.generateMemoryViewID = viewName => {
+ return `${DocumentTypes.MEM_VIEW}${SEPARATOR}${viewName}`
+}
+
+exports.getMemoryViewParams = (otherProps = {}) => {
+ return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps)
+}
+
/**
* This can be used with the db.allDocs to get a list of IDs
*/
diff --git a/packages/server/src/definitions/datasource.ts b/packages/server/src/definitions/datasource.ts
index 48fd24e1cf..2daef8eda7 100644
--- a/packages/server/src/definitions/datasource.ts
+++ b/packages/server/src/definitions/datasource.ts
@@ -1,3 +1,5 @@
+import { Table } from "./common"
+
export enum Operation {
CREATE = "CREATE",
READ = "READ",
@@ -136,6 +138,9 @@ export interface QueryJson {
sort?: SortJson
paginate?: PaginationJson
body?: object
+ meta?: {
+ table?: Table
+ }
extra?: {
idFilter?: SearchFilters
}
diff --git a/packages/server/src/environment.js b/packages/server/src/environment.js
index 9e029e440a..f528a78729 100644
--- a/packages/server/src/environment.js
+++ b/packages/server/src/environment.js
@@ -13,6 +13,10 @@ function isDev() {
)
}
+function isCypress() {
+ return process.env.NODE_ENV === "cypress"
+}
+
let LOADED = false
if (!LOADED && isDev() && !isTest()) {
require("dotenv").config()
@@ -40,6 +44,7 @@ module.exports = {
NODE_ENV: process.env.NODE_ENV,
JEST_WORKER_ID: process.env.JEST_WORKER_ID,
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
+ DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
// minor
SALT_ROUNDS: process.env.SALT_ROUNDS,
LOGGER: process.env.LOGGER,
@@ -61,8 +66,16 @@ module.exports = {
module.exports[key] = value
},
isTest,
+ isCypress,
isDev,
isProd: () => {
return !isDev()
},
}
+
+// convert any strings to numbers if required, like "0" would be true otherwise
+for (let [key, value] of Object.entries(module.exports)) {
+ if (typeof value === "string" && !isNaN(parseInt(value))) {
+ module.exports[key] = parseInt(value)
+ }
+}
diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts
index b59bac5a5a..c5e9bdb0bb 100644
--- a/packages/server/src/integrations/base/sql.ts
+++ b/packages/server/src/integrations/base/sql.ts
@@ -1,7 +1,5 @@
import { Knex, knex } from "knex"
const BASE_LIMIT = 5000
-// if requesting a single row then need to up the limit for the sake of joins
-const SINGLE_ROW_LIMIT = 100
import {
QueryJson,
SearchFilters,
@@ -146,46 +144,48 @@ function buildCreate(
function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
let { endpoint, resource, filters, sort, paginate, relationships } = json
const tableName = endpoint.entityId
- let query: KnexQuery = knex(tableName)
// select all if not specified
if (!resource) {
resource = { fields: [] }
}
+ let selectStatement: string | string[] = "*"
// handle select
if (resource.fields && resource.fields.length > 0) {
// select the resources as the format "table.columnName" - this is what is provided
// by the resource builder further up
- query = query.select(resource.fields.map(field => `${field} as ${field}`))
- } else {
- query = query.select("*")
+ selectStatement = resource.fields.map(field => `${field} as ${field}`)
+ }
+ let foundLimit = limit || BASE_LIMIT
+ // handle pagination
+ let foundOffset: number | null = null
+ if (paginate && paginate.page && paginate.limit) {
+ // @ts-ignore
+ const page = paginate.page <= 1 ? 0 : paginate.page - 1
+ const offset = page * paginate.limit
+ foundLimit = paginate.limit
+ foundOffset = offset
+ } else if (paginate && paginate.limit) {
+ foundLimit = paginate.limit
+ }
+ // start building the query
+ let query: KnexQuery = knex(tableName).limit(foundLimit)
+ if (foundOffset) {
+ query = query.offset(foundOffset)
}
- // handle where
- query = addFilters(tableName, query, filters)
- // handle join
- query = addRelationships(query, tableName, relationships)
- // handle sorting
if (sort) {
for (let [key, value] of Object.entries(sort)) {
const direction = value === SortDirection.ASCENDING ? "asc" : "desc"
query = query.orderBy(key, direction)
}
}
- let foundLimit = limit || BASE_LIMIT
- // handle pagination
- if (paginate && paginate.page && paginate.limit) {
+ query = addFilters(tableName, query, filters)
+ // @ts-ignore
+ let preQuery: KnexQuery = knex({
// @ts-ignore
- const page = paginate.page <= 1 ? 0 : paginate.page - 1
- const offset = page * paginate.limit
- foundLimit = paginate.limit
- query = query.offset(offset)
- } else if (paginate && paginate.limit) {
- foundLimit = paginate.limit
- }
- if (foundLimit === 1) {
- foundLimit = SINGLE_ROW_LIMIT
- }
- query = query.limit(foundLimit)
- return query
+ [tableName]: query,
+ }).select(selectStatement)
+ // handle joins
+ return addRelationships(preQuery, tableName, relationships)
}
function buildUpdate(
diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts
index 3ce21675d9..c17cca0745 100644
--- a/packages/server/src/integrations/mysql.ts
+++ b/packages/server/src/integrations/mysql.ts
@@ -12,7 +12,11 @@ import { getSqlQuery } from "./utils"
module MySQLModule {
const mysql = require("mysql")
const Sql = require("./base/sql")
- const { buildExternalTableId, convertType } = require("./utils")
+ const {
+ buildExternalTableId,
+ convertType,
+ copyExistingPropsOver,
+ } = require("./utils")
const { FieldTypes } = require("../constants")
interface MySQLConfig {
@@ -104,7 +108,7 @@ module MySQLModule {
client: any,
query: SqlQuery,
connect: boolean = true
- ): Promise
{
+ ): Promise {
// Node MySQL is callback based, so we must wrap our call in a promise
return new Promise((resolve, reject) => {
if (connect) {
@@ -194,18 +198,7 @@ module MySQLModule {
}
}
- // add the existing relationships from the entities if they exist, to prevent them from being overridden
- if (entities && entities[tableName]) {
- const existingTableSchema = entities[tableName].schema
- for (let key in existingTableSchema) {
- if (!existingTableSchema.hasOwnProperty(key)) {
- continue
- }
- if (existingTableSchema[key].type === "link") {
- tables[tableName].schema[key] = existingTableSchema[key]
- }
- }
- }
+ copyExistingPropsOver(tableName, tables, entities)
}
this.client.end()
@@ -249,6 +242,23 @@ module MySQLModule {
return internalQuery(this.client, input, false)
}
+ // when creating if an ID has been inserted need to make sure
+ // the id filter is enriched with it before trying to retrieve the row
+ checkLookupKeys(results: any, json: QueryJson) {
+ if (!results?.insertId || !json.meta?.table || !json.meta.table.primary) {
+ return json
+ }
+ const primaryKey = json.meta.table.primary?.[0]
+ json.extra = {
+ idFilter: {
+ equal: {
+ [primaryKey]: results.insertId,
+ },
+ },
+ }
+ return json
+ }
+
async query(json: QueryJson) {
const operation = this._operation(json)
this.client.connect()
@@ -261,7 +271,7 @@ module MySQLModule {
const results = await internalQuery(this.client, input, false)
// same as delete, manage returning
if (operation === Operation.CREATE || operation === Operation.UPDATE) {
- row = this.getReturningRow(json)
+ row = this.getReturningRow(this.checkLookupKeys(results, json))
}
this.client.end()
if (operation !== Operation.READ) {
diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts
index dd46652871..332ba8544d 100644
--- a/packages/server/src/integrations/postgres.ts
+++ b/packages/server/src/integrations/postgres.ts
@@ -12,7 +12,14 @@ module PostgresModule {
const { Pool } = require("pg")
const Sql = require("./base/sql")
const { FieldTypes } = require("../constants")
- const { buildExternalTableId, convertType } = require("./utils")
+ const {
+ buildExternalTableId,
+ convertType,
+ copyExistingPropsOver,
+ } = require("./utils")
+ const { escapeDangerousCharacters } = require("../utilities")
+
+ const JSON_REGEX = /'{.*}'::json/s
interface PostgresConfig {
host: string
@@ -84,13 +91,27 @@ module PostgresModule {
bigint: FieldTypes.NUMBER,
decimal: FieldTypes.NUMBER,
smallint: FieldTypes.NUMBER,
+ real: FieldTypes.NUMBER,
+ "double precision": FieldTypes.NUMBER,
timestamp: FieldTypes.DATETIME,
time: FieldTypes.DATETIME,
boolean: FieldTypes.BOOLEAN,
json: FieldTypes.JSON,
+ date: FieldTypes.DATETIME,
}
async function internalQuery(client: any, query: SqlQuery) {
+ // need to handle a specific issue with json data types in postgres,
+ // new lines inside the JSON data will break it
+ if (query && query.sql) {
+ const matches = query.sql.match(JSON_REGEX)
+ if (matches && matches.length > 0) {
+ for (let match of matches) {
+ const escaped = escapeDangerousCharacters(match)
+ query.sql = query.sql.replace(match, escaped)
+ }
+ }
+ }
try {
return await client.query(query.sql, query.bindings || [])
} catch (err) {
@@ -105,7 +126,7 @@ module PostgresModule {
private readonly config: PostgresConfig
COLUMNS_SQL =
- "select * from information_schema.columns where table_schema = 'public'"
+ "select * from information_schema.columns where not table_schema = 'information_schema' and not table_schema = 'pg_catalog'"
PRIMARY_KEYS_SQL = `
select tc.table_schema, tc.table_name, kc.column_name as primary_key
@@ -173,31 +194,30 @@ module PostgresModule {
name: tableName,
schema: {},
}
-
- // add the existing relationships from the entities if they exist, to prevent them from being overridden
- if (entities && entities[tableName]) {
- const existingTableSchema = entities[tableName].schema
- for (let key in existingTableSchema) {
- if (!existingTableSchema.hasOwnProperty(key)) {
- continue
- }
- if (existingTableSchema[key].type === "link") {
- tables[tableName].schema[key] = existingTableSchema[key]
- }
- }
- }
}
const type: string = convertType(column.data_type, TYPE_MAP)
- const isAuto: boolean =
+ const identity = !!(
+ column.identity_generation ||
+ column.identity_start ||
+ column.identity_increment
+ )
+ const hasDefault =
typeof column.column_default === "string" &&
column.column_default.startsWith("nextval")
+ const isGenerated =
+ column.is_generated && column.is_generated !== "NEVER"
+ const isAuto: boolean = hasDefault || identity || isGenerated
tables[tableName].schema[columnName] = {
autocolumn: isAuto,
name: columnName,
type,
}
}
+
+ for (let tableName of Object.keys(tables)) {
+ copyExistingPropsOver(tableName, tables, entities)
+ }
this.tables = tables
}
diff --git a/packages/server/src/integrations/tests/sql.spec.js b/packages/server/src/integrations/tests/sql.spec.js
index fa8bcd1d86..64cdda215f 100644
--- a/packages/server/src/integrations/tests/sql.spec.js
+++ b/packages/server/src/integrations/tests/sql.spec.js
@@ -57,7 +57,7 @@ describe("SQL query builder", () => {
const query = sql._query(generateReadJson())
expect(query).toEqual({
bindings: [limit],
- sql: `select * from "${TABLE_NAME}" limit $1`
+ sql: `select * from (select * from "${TABLE_NAME}" limit $1) as "${TABLE_NAME}"`
})
})
@@ -68,7 +68,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: [limit],
- sql: `select "${TABLE_NAME}"."name" as "${nameProp}", "${TABLE_NAME}"."age" as "${ageProp}" from "${TABLE_NAME}" limit $1`
+ sql: `select "${TABLE_NAME}"."name" as "${nameProp}", "${TABLE_NAME}"."age" as "${ageProp}" from (select * from "${TABLE_NAME}" limit $1) as "${TABLE_NAME}"`
})
})
@@ -82,7 +82,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: ["John%", limit],
- sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."name" ilike $1 limit $2`
+ sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."name" ilike $1 limit $2) as "${TABLE_NAME}"`
})
})
@@ -99,7 +99,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: [2, 10, limit],
- sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age" between $1 and $2 limit $3`
+ sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age" between $1 and $2 limit $3) as "${TABLE_NAME}"`
})
})
@@ -115,7 +115,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: [10, "John", limit],
- sql: `select * from "${TABLE_NAME}" where ("${TABLE_NAME}"."age" = $1) or ("${TABLE_NAME}"."name" = $2) limit $3`
+ sql: `select * from (select * from "${TABLE_NAME}" where ("${TABLE_NAME}"."age" = $1) or ("${TABLE_NAME}"."name" = $2) limit $3) as "${TABLE_NAME}"`
})
})
@@ -160,7 +160,7 @@ describe("SQL query builder", () => {
const query = new Sql("mssql", 10)._query(generateReadJson())
expect(query).toEqual({
bindings: [10],
- sql: `select top (@p0) * from [${TABLE_NAME}]`
+ sql: `select * from (select top (@p0) * from [${TABLE_NAME}]) as [${TABLE_NAME}]`
})
})
@@ -168,7 +168,7 @@ describe("SQL query builder", () => {
const query = new Sql("mysql", 10)._query(generateReadJson())
expect(query).toEqual({
bindings: [10],
- sql: `select * from \`${TABLE_NAME}\` limit ?`
+ sql: `select * from (select * from \`${TABLE_NAME}\` limit ?) as \`${TABLE_NAME}\``
})
})
})
diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils.ts
index 5b247213c0..6e3dc6f684 100644
--- a/packages/server/src/integrations/utils.ts
+++ b/packages/server/src/integrations/utils.ts
@@ -82,3 +82,25 @@ export function isIsoDateString(str: string) {
let d = new Date(str)
return d.toISOString() === str
}
+
+// add the existing relationships from the entities if they exist, to prevent them from being overridden
+export function copyExistingPropsOver(
+ tableName: string,
+ tables: { [key: string]: any },
+ entities: { [key: string]: any }
+) {
+ if (entities && entities[tableName]) {
+ if (entities[tableName].primaryDisplay) {
+ tables[tableName].primaryDisplay = entities[tableName].primaryDisplay
+ }
+ const existingTableSchema = entities[tableName].schema
+ for (let key in existingTableSchema) {
+ if (!existingTableSchema.hasOwnProperty(key)) {
+ continue
+ }
+ if (existingTableSchema[key].type === "link") {
+ tables[tableName].schema[key] = existingTableSchema[key]
+ }
+ }
+ }
+}
diff --git a/packages/server/src/middleware/tests/usageQuota.spec.js b/packages/server/src/middleware/tests/usageQuota.spec.js
index 97d9c7794a..7a64b1a039 100644
--- a/packages/server/src/middleware/tests/usageQuota.spec.js
+++ b/packages/server/src/middleware/tests/usageQuota.spec.js
@@ -6,6 +6,9 @@ jest.mock("../../environment", () => ({
isDev: () => true,
_set: () => {},
}))
+jest.mock("@budibase/auth/tenancy", () => ({
+ getTenantId: () => "testing123"
+}))
const usageQuotaMiddleware = require("../usageQuota")
const usageQuota = require("../../utilities/usageQuota")
@@ -39,7 +42,7 @@ class TestConfiguration {
if (bool) {
env.isDev = () => false
env.isProd = () => true
- this.ctx.auth = { apiKey: "test" }
+ this.ctx.user = { tenantId: "test" }
} else {
env.isDev = () => true
env.isProd = () => false
@@ -114,7 +117,7 @@ describe("usageQuota middleware", () => {
await config.executeMiddleware()
- expect(usageQuota.update).toHaveBeenCalledWith("test", "rows", 1)
+ expect(usageQuota.update).toHaveBeenCalledWith("rows", 1)
expect(config.next).toHaveBeenCalled()
})
@@ -131,7 +134,7 @@ describe("usageQuota middleware", () => {
])
await config.executeMiddleware()
- expect(usageQuota.update).toHaveBeenCalledWith("test", "storage", 10100)
+ expect(usageQuota.update).toHaveBeenCalledWith("storage", 10100)
expect(config.next).toHaveBeenCalled()
})
})
\ No newline at end of file
diff --git a/packages/server/src/middleware/usageQuota.js b/packages/server/src/middleware/usageQuota.js
index 4647878721..de54c93200 100644
--- a/packages/server/src/middleware/usageQuota.js
+++ b/packages/server/src/middleware/usageQuota.js
@@ -1,6 +1,10 @@
const CouchDB = require("../db")
const usageQuota = require("../utilities/usageQuota")
const env = require("../environment")
+const { getTenantId } = require("@budibase/auth/tenancy")
+
+// tenants without limits
+const EXCLUDED_TENANTS = ["bb", "default", "bbtest", "bbstaging"]
// currently only counting new writes and deletes
const METHOD_MAP = {
@@ -13,6 +17,7 @@ const DOMAIN_MAP = {
upload: usageQuota.Properties.UPLOAD,
views: usageQuota.Properties.VIEW,
users: usageQuota.Properties.USER,
+ applications: usageQuota.Properties.APPS,
// this will not be updated by endpoint calls
// instead it will be updated by triggerInfo
automationRuns: usageQuota.Properties.AUTOMATION,
@@ -27,8 +32,10 @@ function getProperty(url) {
}
module.exports = async (ctx, next) => {
+ const tenantId = getTenantId()
+
// if in development or a self hosted cloud usage quotas should not be executed
- if (env.isDev() || env.SELF_HOSTED) {
+ if (env.isDev() || env.SELF_HOSTED || EXCLUDED_TENANTS.includes(tenantId)) {
return next()
}
@@ -57,9 +64,9 @@ module.exports = async (ctx, next) => {
usage = files.map(file => file.size).reduce((total, size) => total + size)
}
try {
- await usageQuota.update(ctx.auth.apiKey, property, usage)
+ await usageQuota.update(property, usage)
return next()
} catch (err) {
- ctx.throw(403, err)
+ ctx.throw(400, err)
}
}
diff --git a/packages/server/src/tests/utilities/structures.js b/packages/server/src/tests/utilities/structures.js
index e4b2c7e1f0..9c900fec09 100644
--- a/packages/server/src/tests/utilities/structures.js
+++ b/packages/server/src/tests/utilities/structures.js
@@ -70,10 +70,12 @@ exports.basicRole = () => {
exports.basicDatasource = () => {
return {
- type: "datasource",
- name: "Test",
- source: "POSTGRES",
- config: {},
+ datasource: {
+ type: "datasource",
+ name: "Test",
+ source: "POSTGRES",
+ config: {},
+ },
}
}
diff --git a/packages/server/src/utilities/fileSystem/index.js b/packages/server/src/utilities/fileSystem/index.js
index 5226fd66ca..6fee7b4283 100644
--- a/packages/server/src/utilities/fileSystem/index.js
+++ b/packages/server/src/utilities/fileSystem/index.js
@@ -19,6 +19,7 @@ const {
USER_METDATA_PREFIX,
LINK_USER_METADATA_PREFIX,
} = require("../../db/utils")
+const MemoryStream = require("memorystream")
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
@@ -111,24 +112,85 @@ exports.apiFileReturn = contents => {
* to the temporary backup file (to return via API if required).
*/
exports.performBackup = async (appId, backupName) => {
- const path = join(budibaseTempDir(), backupName)
- const writeStream = fs.createWriteStream(path)
- // perform couch dump
- const instanceDb = new CouchDB(appId)
- await instanceDb.dump(writeStream, {
- // filter out anything that has a user metadata structure in its ID
+ return exports.exportDB(appId, {
+ exportName: backupName,
filter: doc =>
!(
doc._id.includes(USER_METDATA_PREFIX) ||
doc.includes(LINK_USER_METADATA_PREFIX)
),
})
+}
+
+/**
+ * exports a DB to either file or a variable (memory).
+ * @param {string} dbName the DB which is to be exported.
+ * @param {string} exportName optional - the file name to export to, if not in memory.
+ * @param {function} filter optional - a filter function to clear out any un-wanted docs.
+ * @return Either the file stream or the variable (if no export name provided).
+ */
+exports.exportDB = async (
+ dbName,
+ { exportName, filter } = { exportName: undefined, filter: undefined }
+) => {
+ let stream,
+ appString = "",
+ path = null
+ if (exportName) {
+ path = join(budibaseTempDir(), exportName)
+ stream = fs.createWriteStream(path)
+ } else {
+ stream = new MemoryStream()
+ stream.on("data", chunk => {
+ appString += chunk.toString()
+ })
+ }
+ // perform couch dump
+ const instanceDb = new CouchDB(dbName)
+ await instanceDb.dump(stream, {
+ filter,
+ })
+ // just in memory, return the final string
+ if (!exportName) {
+ return appString
+ }
// write the file to the object store
- await streamUpload(
- ObjectStoreBuckets.BACKUPS,
- join(appId, backupName),
- fs.createReadStream(path)
- )
+ if (env.SELF_HOSTED) {
+ await streamUpload(
+ ObjectStoreBuckets.BACKUPS,
+ join(dbName, exportName),
+ fs.createReadStream(path)
+ )
+ }
+ return fs.createReadStream(path)
+}
+
+/**
+ * Writes the provided contents to a temporary file, which can be used briefly.
+ * @param {string} fileContents contents which will be written to a temp file.
+ * @return {string} the path to the temp file.
+ */
+exports.storeTempFile = fileContents => {
+ const path = join(budibaseTempDir(), uuid())
+ fs.writeFileSync(path, fileContents)
+ return path
+}
+
+/**
+ * Utility function for getting a file read stream - a simple in memory buffered read
+ * stream doesn't work for pouchdb.
+ */
+exports.stringToFileStream = contents => {
+ const path = exports.storeTempFile(contents)
+ return fs.createReadStream(path)
+}
+
+/**
+ * Creates a temp file and returns it from the API.
+ * @param {string} fileContents the contents to be returned in file.
+ */
+exports.sendTempFile = fileContents => {
+ const path = exports.storeTempFile(fileContents)
return fs.createReadStream(path)
}
diff --git a/packages/server/src/utilities/index.js b/packages/server/src/utilities/index.js
index a81f9ddcf5..e568ba063c 100644
--- a/packages/server/src/utilities/index.js
+++ b/packages/server/src/utilities/index.js
@@ -3,6 +3,7 @@ const { OBJ_STORE_DIRECTORY } = require("../constants")
const { sanitizeKey } = require("@budibase/auth/src/objectStore")
const CouchDB = require("../db")
const { generateMetadataID } = require("../db/utils")
+const Readable = require("stream").Readable
const BB_CDN = "https://cdn.budi.live"
@@ -10,6 +11,14 @@ exports.wait = ms => new Promise(resolve => setTimeout(resolve, ms))
exports.isDev = env.isDev
+exports.removeFromArray = (array, element) => {
+ const index = array.indexOf(element)
+ if (index !== -1) {
+ array.splice(index, 1)
+ }
+ return array
+}
+
/**
* Makes sure that a URL has the correct number of slashes, while maintaining the
* http(s):// double slashes.
@@ -106,3 +115,22 @@ exports.deleteEntityMetadata = async (appId, type, entityId) => {
await db.remove(id, rev)
}
}
+
+exports.escapeDangerousCharacters = string => {
+ return string
+ .replace(/[\\]/g, "\\\\")
+ .replace(/[\b]/g, "\\b")
+ .replace(/[\f]/g, "\\f")
+ .replace(/[\n]/g, "\\n")
+ .replace(/[\r]/g, "\\r")
+ .replace(/[\t]/g, "\\t")
+}
+
+exports.stringToReadStream = string => {
+ return new Readable({
+ read() {
+ this.push(string)
+ this.push(null)
+ },
+ })
+}
diff --git a/packages/server/src/utilities/rowProcessor/index.js b/packages/server/src/utilities/rowProcessor/index.js
index bb4ac98bb7..07549dd8a8 100644
--- a/packages/server/src/utilities/rowProcessor/index.js
+++ b/packages/server/src/utilities/rowProcessor/index.js
@@ -89,10 +89,16 @@ const TYPE_TRANSFORM_MAP = {
* @param {Object} user The user to be used for an appId as well as the createdBy and createdAt fields.
* @param {Object} table The table which is to be used for the schema, as well as handling auto IDs incrementing.
* @param {Object} row The row which is to be updated with information for the auto columns.
+ * @param {Object} opts specific options for function to carry out optional features.
* @returns {{row: Object, table: Object}} The updated row and table, the table may need to be updated
* for automatic ID purposes.
*/
-function processAutoColumn(user, table, row) {
+function processAutoColumn(
+ user,
+ table,
+ row,
+ opts = { reprocessing: false, noAutoRelationships: false }
+) {
let now = new Date().toISOString()
// if a row doesn't have a revision then it doesn't exist yet
const creating = !row._rev
@@ -102,7 +108,7 @@ function processAutoColumn(user, table, row) {
}
switch (schema.subtype) {
case AutoFieldSubTypes.CREATED_BY:
- if (creating) {
+ if (creating && !opts.reprocessing && !opts.noAutoRelationships) {
row[key] = [user.userId]
}
break
@@ -112,7 +118,9 @@ function processAutoColumn(user, table, row) {
}
break
case AutoFieldSubTypes.UPDATED_BY:
- row[key] = [user.userId]
+ if (!opts.reprocessing && !opts.noAutoRelationships) {
+ row[key] = [user.userId]
+ }
break
case AutoFieldSubTypes.UPDATED_AT:
row[key] = now
@@ -127,6 +135,7 @@ function processAutoColumn(user, table, row) {
}
return { table, row }
}
+exports.processAutoColumn = processAutoColumn
/**
* This will coerce a value to the correct types based on the type transform map
@@ -151,9 +160,15 @@ exports.coerce = (row, type) => {
* @param {object} user the user which is performing the input.
* @param {object} row the row which is being created/updated.
* @param {object} table the table which the row is being saved to.
+ * @param {object} opts some input processing options (like disabling auto-column relationships).
* @returns {object} the row which has been prepared to be written to the DB.
*/
-exports.inputProcessing = (user = {}, table, row) => {
+exports.inputProcessing = (
+ user = {},
+ table,
+ row,
+ opts = { noAutoRelationships: false }
+) => {
let clonedRow = cloneDeep(row)
// need to copy the table so it can be differenced on way out
const copiedTable = cloneDeep(table)
@@ -176,7 +191,7 @@ exports.inputProcessing = (user = {}, table, row) => {
}
}
// handle auto columns - this returns an object like {table, row}
- return processAutoColumn(user, copiedTable, clonedRow)
+ return processAutoColumn(user, copiedTable, clonedRow, opts)
}
/**
diff --git a/packages/server/src/utilities/usageQuota.js b/packages/server/src/utilities/usageQuota.js
index bfe71a4093..80fddb8303 100644
--- a/packages/server/src/utilities/usageQuota.js
+++ b/packages/server/src/utilities/usageQuota.js
@@ -1,41 +1,9 @@
const env = require("../environment")
-const { apiKeyTable } = require("../db/dynamoClient")
-
-const DEFAULT_USAGE = {
- rows: 0,
- storage: 0,
- views: 0,
- automationRuns: 0,
- users: 0,
-}
-
-const DEFAULT_PLAN = {
- rows: 1000,
- // 1 GB
- storage: 8589934592,
- views: 10,
- automationRuns: 100,
- users: 10000,
-}
-
-function buildUpdateParams(key, property, usage) {
- return {
- primary: key,
- condition:
- "attribute_exists(#quota) AND attribute_exists(#limits) AND #quota.#prop < #limits.#prop AND #quotaReset > :now",
- expression: "ADD #quota.#prop :usage",
- names: {
- "#quota": "usageQuota",
- "#prop": property,
- "#limits": "usageLimits",
- "#quotaReset": "quotaReset",
- },
- values: {
- ":usage": usage,
- ":now": Date.now(),
- },
- }
-}
+const { getGlobalDB } = require("@budibase/auth/tenancy")
+const {
+ StaticDatabases,
+ generateNewUsageQuotaDoc,
+} = require("@budibase/auth/db")
function getNewQuotaReset() {
return Date.now() + 2592000000
@@ -47,59 +15,59 @@ exports.Properties = {
VIEW: "views",
USER: "users",
AUTOMATION: "automationRuns",
+ APPS: "apps",
+ EMAILS: "emails",
}
-exports.getAPIKey = async appId => {
- if (!env.USE_QUOTAS) {
- return { apiKey: null }
+async function getUsageQuotaDoc(db) {
+ let quota
+ try {
+ quota = await db.get(StaticDatabases.PLATFORM_INFO.docs.usageQuota)
+ } catch (err) {
+ // doc doesn't exist. Create it
+ quota = await db.post(generateNewUsageQuotaDoc())
}
- return apiKeyTable.get({ primary: appId })
+
+ return quota
}
/**
- * Given a specified API key this will add to the usage object for the specified property.
- * @param {string} apiKey The API key which is to be updated.
+ * Given a specified tenantId this will add to the usage object for the specified property.
* @param {string} property The property which is to be added to (within the nested usageQuota object).
* @param {number} usage The amount (this can be negative) to adjust the number by.
* @returns {Promise} When this completes the API key will now be up to date - the quota period may have
* also been reset after this call.
*/
-exports.update = async (apiKey, property, usage) => {
+exports.update = async (property, usage) => {
if (!env.USE_QUOTAS) {
return
}
+
try {
- await apiKeyTable.update(buildUpdateParams(apiKey, property, usage))
- } catch (err) {
- // conditional check means the condition failed, need to check why
- if (err.code === "ConditionalCheckFailedException") {
- // get the API key so we can check it
- const keyObj = await apiKeyTable.get({ primary: apiKey })
- // the usage quota or usage limits didn't exist
- if (keyObj && (keyObj.usageQuota == null || keyObj.usageLimits == null)) {
- keyObj.usageQuota =
- keyObj.usageQuota == null ? DEFAULT_USAGE : keyObj.usageQuota
- keyObj.usageLimits =
- keyObj.usageLimits == null ? DEFAULT_PLAN : keyObj.usageLimits
- keyObj.quotaReset = getNewQuotaReset()
- await apiKeyTable.put({ item: keyObj })
- return
- }
- // we have in fact breached the reset period
- else if (keyObj && keyObj.quotaReset <= Date.now()) {
- // update the quota reset period and reset the values for all properties
- keyObj.quotaReset = getNewQuotaReset()
- for (let prop of Object.keys(keyObj.usageQuota)) {
- if (prop === property) {
- keyObj.usageQuota[prop] = usage > 0 ? usage : 0
- } else {
- keyObj.usageQuota[prop] = 0
- }
- }
- await apiKeyTable.put({ item: keyObj })
- return
+ const db = getGlobalDB()
+ const quota = await getUsageQuotaDoc(db)
+
+ // Check if the quota needs reset
+ if (Date.now() >= quota.quotaReset) {
+ quota.quotaReset = getNewQuotaReset()
+ for (let prop of Object.keys(quota.usageQuota)) {
+ quota.usageQuota[prop] = 0
}
}
+
+ // increment the quota
+ quota.usageQuota[property] += usage
+
+ if (quota.usageQuota[property] >= quota.usageLimits[property]) {
+ throw new Error(
+ `You have exceeded your usage quota of ${quota.usageLimits[property]} ${property}.`
+ )
+ }
+
+ // update the usage quotas
+ await db.put(quota)
+ } catch (err) {
+ console.error(`Error updating usage quotas for ${property}`, err)
throw err
}
}
diff --git a/packages/server/src/utilities/workerRequests.js b/packages/server/src/utilities/workerRequests.js
index 377658084f..2ace265ca0 100644
--- a/packages/server/src/utilities/workerRequests.js
+++ b/packages/server/src/utilities/workerRequests.js
@@ -34,7 +34,7 @@ function request(ctx, request) {
exports.request = request
// have to pass in the tenant ID as this could be coming from an automation
-exports.sendSmtpEmail = async (to, from, subject, contents) => {
+exports.sendSmtpEmail = async (to, from, subject, contents, automation) => {
// tenant ID will be set in header
const response = await fetch(
checkSlashesInUrl(env.WORKER_URL + `/api/global/email/send`),
@@ -46,6 +46,7 @@ exports.sendSmtpEmail = async (to, from, subject, contents) => {
contents,
subject,
purpose: "custom",
+ automation,
},
})
)
diff --git a/packages/server/yarn.lock b/packages/server/yarn.lock
index 6e7e7a868d..938077b90d 100644
--- a/packages/server/yarn.lock
+++ b/packages/server/yarn.lock
@@ -943,10 +943,10 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
-"@budibase/auth@^0.9.125-alpha.17":
- version "0.9.133"
- resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.133.tgz#280d581820c9069b6bc021f88178c215ee48ad08"
- integrity sha512-DL7zIYRXE6xSKE/qbHMf/SX3+bceGxM4xzUmLTk4OHtEOP/vaUJr35tkhznAZF7VpUR9Yh20D6/Zw8z/3sxj/A==
+"@budibase/auth@^0.9.146-alpha.3":
+ version "0.9.146"
+ resolved "https://registry.yarnpkg.com/@budibase/auth/-/auth-0.9.146.tgz#920fe02a78ca17903b72ccde307ca3e82b4176ad"
+ integrity sha512-T7DhI3WIolD0CjO2pRCEZfJBpJce4cmZWTFRIZ8lBnKe/6dxkK9fNrkZDYRhRkMwQbDQXoARADZM1hAfgUsSMg==
dependencies:
"@techpass/passport-openidconnect" "^0.3.0"
aws-sdk "^2.901.0"
@@ -966,10 +966,10 @@
uuid "^8.3.2"
zlib "^1.0.5"
-"@budibase/bbui@^0.9.133":
- version "0.9.133"
- resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.133.tgz#91a2fb24abaaf91d2cb1e00eb51c493c1290f9ad"
- integrity sha512-xbMmc/hee1QRNW7TrbGUBmLr1hMHXqUDA6rdl9N2PGfHFuFWbqlD8PWYanHmLevVet+CjkuKGPSbBghFK2pQyQ==
+"@budibase/bbui@^0.9.139":
+ version "0.9.139"
+ resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.139.tgz#e6cfc90e8f6c2aa3526fc6a7bef251bccdaf51bb"
+ integrity sha512-HllzXwfCnxqlV/ifdOR4Got6yrvK2rUFwKUWQIcYU0wk8h6hwYmLehP7HqgBa6l8+bvO1Ep9g+rjP2xJPJG21w==
dependencies:
"@adobe/spectrum-css-workflow-icons" "^1.2.1"
"@spectrum-css/actionbutton" "^1.0.1"
@@ -1015,14 +1015,63 @@
svelte-flatpickr "^3.1.0"
svelte-portal "^1.0.0"
-"@budibase/client@^0.9.125-alpha.17":
- version "0.9.133"
- resolved "https://registry.yarnpkg.com/@budibase/client/-/client-0.9.133.tgz#43748e189e9b92d99d1281ab62bd2c5ebed5dbab"
- integrity sha512-JrduL9iVMGalZyIUQ+1UN/dhrOZNRJwXU8B4r/eWhVoJf3f3bCuNfpMoT2LN3HY4ooyu37VehD+J5bdDsvlNPw==
+"@budibase/bbui@^0.9.146":
+ version "0.9.146"
+ resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.146.tgz#7689b2c0f148321e62969181e3f6549f03dd3e78"
+ integrity sha512-Mq0oMyaN18Dg5e0IPtPXSGmu/TS4B74gW+l2ypJDNTzSRm934DOAPghDgkb53rFNZhsovCYjixJZmesUcv2o3g==
dependencies:
- "@budibase/bbui" "^0.9.133"
- "@budibase/standard-components" "^0.9.133"
- "@budibase/string-templates" "^0.9.133"
+ "@adobe/spectrum-css-workflow-icons" "^1.2.1"
+ "@spectrum-css/actionbutton" "^1.0.1"
+ "@spectrum-css/actiongroup" "^1.0.1"
+ "@spectrum-css/avatar" "^3.0.2"
+ "@spectrum-css/button" "^3.0.1"
+ "@spectrum-css/buttongroup" "^3.0.2"
+ "@spectrum-css/checkbox" "^3.0.2"
+ "@spectrum-css/dialog" "^3.0.1"
+ "@spectrum-css/divider" "^1.0.3"
+ "@spectrum-css/dropzone" "^3.0.2"
+ "@spectrum-css/fieldgroup" "^3.0.2"
+ "@spectrum-css/fieldlabel" "^3.0.1"
+ "@spectrum-css/icon" "^3.0.1"
+ "@spectrum-css/illustratedmessage" "^3.0.2"
+ "@spectrum-css/inputgroup" "^3.0.2"
+ "@spectrum-css/label" "^2.0.10"
+ "@spectrum-css/link" "^3.1.1"
+ "@spectrum-css/menu" "^3.0.1"
+ "@spectrum-css/modal" "^3.0.1"
+ "@spectrum-css/pagination" "^3.0.3"
+ "@spectrum-css/picker" "^1.0.1"
+ "@spectrum-css/popover" "^3.0.1"
+ "@spectrum-css/progressbar" "^1.0.2"
+ "@spectrum-css/progresscircle" "^1.0.2"
+ "@spectrum-css/radio" "^3.0.2"
+ "@spectrum-css/search" "^3.0.2"
+ "@spectrum-css/sidenav" "^3.0.2"
+ "@spectrum-css/statuslight" "^3.0.2"
+ "@spectrum-css/stepper" "^3.0.3"
+ "@spectrum-css/switch" "^1.0.2"
+ "@spectrum-css/table" "^3.0.1"
+ "@spectrum-css/tabs" "^3.0.1"
+ "@spectrum-css/tags" "^3.0.2"
+ "@spectrum-css/textfield" "^3.0.1"
+ "@spectrum-css/toast" "^3.0.1"
+ "@spectrum-css/tooltip" "^3.0.3"
+ "@spectrum-css/treeview" "^3.0.2"
+ "@spectrum-css/typography" "^3.0.1"
+ "@spectrum-css/underlay" "^2.0.9"
+ "@spectrum-css/vars" "^3.0.1"
+ dayjs "^1.10.4"
+ svelte-flatpickr "^3.1.0"
+ svelte-portal "^1.0.0"
+
+"@budibase/client@^0.9.146-alpha.3":
+ version "0.9.146"
+ resolved "https://registry.yarnpkg.com/@budibase/client/-/client-0.9.146.tgz#d3b1bbd67245ab5a3870ccb580b9fc76f0344fd6"
+ integrity sha512-vd/bMmiQVghFH3Pa9jrGXjYAAKo+lGrwWyfUSdXAb4XP6gCSnMK5BXf8NliNrQzQVmruYT+2rGMsnc+9q4lW1g==
+ dependencies:
+ "@budibase/bbui" "^0.9.146"
+ "@budibase/standard-components" "^0.9.139"
+ "@budibase/string-templates" "^0.9.146"
regexparam "^1.3.0"
shortid "^2.2.15"
svelte-spa-router "^3.0.5"
@@ -1055,12 +1104,12 @@
to-gfm-code-block "^0.1.1"
year "^0.2.1"
-"@budibase/standard-components@^0.9.133":
- version "0.9.133"
- resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.133.tgz#789c02b45dc3853b003822c09e18ce7ece4dfa29"
- integrity sha512-xcuwTxsqk1J/YmM4YjThO/Fm0eJ+aZWm0kbFgfN+dNN9fuPlsPOLmlVEWeOUPmBa5XfRyDbx6lDYj0PPEK8CvA==
+"@budibase/standard-components@^0.9.139":
+ version "0.9.139"
+ resolved "https://registry.yarnpkg.com/@budibase/standard-components/-/standard-components-0.9.139.tgz#cf8e2b759ae863e469e50272b3ca87f2827e66e3"
+ integrity sha512-Av0u9Eq2jerjhG6Atta+c0mOQGgE5K0QI3cm+8s/3Vki6/PXkO1YL5Alo3BOn9ayQAVZ/xp4rtZPuN/rzRibHw==
dependencies:
- "@budibase/bbui" "^0.9.133"
+ "@budibase/bbui" "^0.9.139"
"@spectrum-css/button" "^3.0.3"
"@spectrum-css/card" "^3.0.3"
"@spectrum-css/divider" "^1.0.3"
@@ -1073,10 +1122,10 @@
svelte-apexcharts "^1.0.2"
svelte-flatpickr "^3.1.0"
-"@budibase/string-templates@^0.9.125-alpha.17", "@budibase/string-templates@^0.9.133":
- version "0.9.133"
- resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.133.tgz#221d81e080dc4485dcffa989d16e2bbed39f9055"
- integrity sha512-SMHcSPwHYdAqol9YCcMoYawp5/ETr9TqGZCUsL+hUUq+LritPwu/miQ++SVvRTQbOR7Mker0S9LO3H8mwYkW8w==
+"@budibase/string-templates@^0.9.146", "@budibase/string-templates@^0.9.146-alpha.3":
+ version "0.9.146"
+ resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-0.9.146.tgz#85249c7a8777a5f0c280af6f6d0e3d3ff0bf20b5"
+ integrity sha512-4f91SVUaTKseB+j7ycWbP54XiqiFZ6bZvcKgzsg1mLF+VVJ1/ALUsLvCRaj6SlcSHrhhALiGVR1z18KOyBWoKw==
dependencies:
"@budibase/handlebars-helpers" "^0.11.4"
dayjs "^1.10.4"
@@ -2921,9 +2970,9 @@ aws-sdk@^2.767.0:
xml2js "0.4.19"
aws-sdk@^2.901.0:
- version "2.989.0"
- resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.989.0.tgz#ed3cce6b94856b469784bc3312a0b64438b9fe67"
- integrity sha512-sMjvqeF9mEOxXkhOAUjCrBt2iYafclkmaIbgSdjJ+te7zKXeReqrc6P3VgIGUxU8kwmdSro0n1NjrXbzKQJhcw==
+ version "2.997.0"
+ resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.997.0.tgz#8598a5dd7bc6b6833a2fc3d737fba89020a79418"
+ integrity sha512-PiuDmC5hN+FsyLvl7GsZAnS6hQpo1pP+Ax2u8gyL19QlbBLwlhsFQF29vPcYatyv6WUxr51o6uymJdPxQg6uEA==
dependencies:
buffer "4.9.2"
events "1.1.1"
@@ -8131,6 +8180,11 @@ memory-pager@^1.0.2:
resolved "https://registry.yarnpkg.com/memory-pager/-/memory-pager-1.5.0.tgz#d8751655d22d384682741c972f2c3d6dfa3e66b5"
integrity sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==
+memorystream@^0.3.1:
+ version "0.3.1"
+ resolved "https://registry.yarnpkg.com/memorystream/-/memorystream-0.3.1.tgz#86d7090b30ce455d63fbae12dda51a47ddcaf9b2"
+ integrity sha1-htcJCzDORV1j+64S3aUaR93K+bI=
+
merge-descriptors@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
@@ -8334,7 +8388,7 @@ mute-stream@0.0.8:
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d"
integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==
-mysql@^2.18.1:
+mysql@2.18.1:
version "2.18.1"
resolved "https://registry.yarnpkg.com/mysql/-/mysql-2.18.1.tgz#2254143855c5a8c73825e4522baf2ea021766717"
integrity sha512-Bca+gk2YWmqp2Uf6k5NFEurwY/0td0cpebAucFpY/3jhrwrVGuxU2uQFCHjU19SJfje0yQvi+rVWdq78hR5lig==
@@ -8908,9 +8962,9 @@ passport-oauth1@1.x.x:
utils-merge "1.x.x"
passport-oauth2@1.x.x:
- version "1.6.0"
- resolved "https://registry.yarnpkg.com/passport-oauth2/-/passport-oauth2-1.6.0.tgz#5f599735e0ea40ea3027643785f81a3a9b4feb50"
- integrity sha512-emXPLqLcVEcLFR/QvQXZcwLmfK8e9CqvMgmOFJxcNT3okSFMtUbRRKpY20x5euD+01uHsjjCa07DYboEeLXYiw==
+ version "1.6.1"
+ resolved "https://registry.yarnpkg.com/passport-oauth2/-/passport-oauth2-1.6.1.tgz#c5aee8f849ce8bd436c7f81d904a3cd1666f181b"
+ integrity sha512-ZbV43Hq9d/SBSYQ22GOiglFsjsD1YY/qdiptA+8ej+9C1dL1TVB+mBE5kDH/D4AJo50+2i8f4bx0vg4/yDDZCQ==
dependencies:
base64url "3.x.x"
oauth "0.9.x"
@@ -11110,9 +11164,9 @@ tmp@^0.0.33:
os-tmpdir "~1.0.2"
tmpl@1.0.x:
- version "1.0.4"
- resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1"
- integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
+ integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
to-buffer@^1.1.1:
version "1.1.1"
diff --git a/packages/string-templates/package.json b/packages/string-templates/package.json
index 605348b061..bff798cf57 100644
--- a/packages/string-templates/package.json
+++ b/packages/string-templates/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
- "version": "0.9.125-alpha.17",
+ "version": "0.9.146-alpha.5",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",
diff --git a/packages/string-templates/yarn.lock b/packages/string-templates/yarn.lock
index 0188a9ec1d..82f99d7b31 100644
--- a/packages/string-templates/yarn.lock
+++ b/packages/string-templates/yarn.lock
@@ -4633,9 +4633,9 @@ time-stamp@^1.0.1:
integrity sha1-dkpaEa9QVhkhsTPztE5hhofg9cM=
tmpl@1.0.x:
- version "1.0.4"
- resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1"
- integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=
+ version "1.0.5"
+ resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
+ integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==
to-fast-properties@^2.0.0:
version "2.0.0"
diff --git a/packages/worker/package.json b/packages/worker/package.json
index 2b3b969b06..7255ddb1f6 100644
--- a/packages/worker/package.json
+++ b/packages/worker/package.json
@@ -1,7 +1,7 @@
{
"name": "@budibase/worker",
"email": "hi@budibase.com",
- "version": "0.9.125-alpha.17",
+ "version": "0.9.146-alpha.5",
"description": "Budibase background service",
"main": "src/index.js",
"repository": {
@@ -25,8 +25,8 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
- "@budibase/auth": "^0.9.125-alpha.17",
- "@budibase/string-templates": "^0.9.125-alpha.17",
+ "@budibase/auth": "^0.9.146-alpha.5",
+ "@budibase/string-templates": "^0.9.146-alpha.5",
"@koa/router": "^8.0.0",
"@techpass/passport-openidconnect": "^0.3.0",
"aws-sdk": "^2.811.0",
diff --git a/packages/worker/scripts/dev/manage.js b/packages/worker/scripts/dev/manage.js
index f9a931110e..4eb29847bb 100644
--- a/packages/worker/scripts/dev/manage.js
+++ b/packages/worker/scripts/dev/manage.js
@@ -21,7 +21,8 @@ async function init() {
COUCH_DB_PASSWORD: "budibase",
// empty string is false
MULTI_TENANCY: "",
- ACCOUNT_PORTAL_URL: "http://localhost:3001",
+ DISABLE_ACCOUNT_PORTAL: "",
+ ACCOUNT_PORTAL_URL: "http://localhost:10001",
}
let envFile = ""
Object.keys(envFileJson).forEach(key => {
diff --git a/packages/worker/scripts/jestSetup.js b/packages/worker/scripts/jestSetup.js
index 374edfb946..89a517279a 100644
--- a/packages/worker/scripts/jestSetup.js
+++ b/packages/worker/scripts/jestSetup.js
@@ -1,5 +1,6 @@
const env = require("../src/environment")
+env._set("SELF_HOSTED", "1")
env._set("NODE_ENV", "jest")
env._set("JWT_SECRET", "test-jwtsecret")
env._set("LOG_LEVEL", "silent")
diff --git a/packages/worker/src/api/controllers/global/configs.js b/packages/worker/src/api/controllers/global/configs.js
index aa83fd695f..c0c300e4db 100644
--- a/packages/worker/src/api/controllers/global/configs.js
+++ b/packages/worker/src/api/controllers/global/configs.js
@@ -10,6 +10,7 @@ const email = require("../../../utilities/email")
const { upload, ObjectStoreBuckets } = require("@budibase/auth").objectStore
const CouchDB = require("../../../db")
const { getGlobalDB } = require("@budibase/auth/tenancy")
+const env = require("../../../environment")
exports.save = async function (ctx) {
const db = getGlobalDB()
@@ -174,7 +175,13 @@ exports.upload = async function (ctx) {
const file = ctx.request.files.file
const { type, name } = ctx.params
- const bucket = ObjectStoreBuckets.GLOBAL
+ let bucket
+ if (env.SELF_HOSTED) {
+ bucket = ObjectStoreBuckets.GLOBAL
+ } else {
+ bucket = ObjectStoreBuckets.GLOBAL_CLOUD
+ }
+
const key = `${type}/${name}`
await upload({
bucket,
diff --git a/packages/worker/src/api/controllers/global/email.js b/packages/worker/src/api/controllers/global/email.js
index 57b78a6d7a..e194a30862 100644
--- a/packages/worker/src/api/controllers/global/email.js
+++ b/packages/worker/src/api/controllers/global/email.js
@@ -2,8 +2,16 @@ const { sendEmail } = require("../../../utilities/email")
const { getGlobalDB } = require("@budibase/auth/tenancy")
exports.sendEmail = async ctx => {
- let { workspaceId, email, userId, purpose, contents, from, subject } =
- ctx.request.body
+ let {
+ workspaceId,
+ email,
+ userId,
+ purpose,
+ contents,
+ from,
+ subject,
+ automation,
+ } = ctx.request.body
let user
if (userId) {
const db = getGlobalDB()
@@ -15,6 +23,7 @@ exports.sendEmail = async ctx => {
contents,
from,
subject,
+ automation,
})
ctx.body = {
...response,
diff --git a/packages/worker/src/api/controllers/global/users.js b/packages/worker/src/api/controllers/global/users.js
index 8f754e2922..e43513de5e 100644
--- a/packages/worker/src/api/controllers/global/users.js
+++ b/packages/worker/src/api/controllers/global/users.js
@@ -1,8 +1,8 @@
const {
generateGlobalUserID,
getGlobalUserParams,
-
StaticDatabases,
+ generateNewUsageQuotaDoc,
} = require("@budibase/auth/db")
const { hash, getGlobalUserByEmail } = require("@budibase/auth").utils
const { UserStatus, EmailTemplatePurpose } = require("../../../constants")
@@ -11,6 +11,7 @@ const { sendEmail } = require("../../../utilities/email")
const { user: userCache } = require("@budibase/auth/cache")
const { invalidateSessions } = require("@budibase/auth/sessions")
const CouchDB = require("../../../db")
+const accounts = require("@budibase/auth/accounts")
const {
getGlobalDB,
getTenantId,
@@ -18,6 +19,8 @@ const {
tryAddTenant,
updateTenantId,
} = require("@budibase/auth/tenancy")
+const { removeUserFromInfoDB } = require("@budibase/auth/deprovision")
+const env = require("../../../environment")
const PLATFORM_INFO_DB = StaticDatabases.PLATFORM_INFO.name
@@ -31,7 +34,12 @@ async function allUsers() {
return response.rows.map(row => row.doc)
}
-async function saveUser(user, tenantId, hashPassword = true) {
+async function saveUser(
+ user,
+ tenantId,
+ hashPassword = true,
+ requirePassword = true
+) {
if (!tenantId) {
throw "No tenancy specified."
}
@@ -43,9 +51,26 @@ async function saveUser(user, tenantId, hashPassword = true) {
// make sure another user isn't using the same email
let dbUser
if (email) {
+ // check budibase users inside the tenant
dbUser = await getGlobalUserByEmail(email)
if (dbUser != null && (dbUser._id !== _id || Array.isArray(dbUser))) {
- throw "Email address already in use."
+ throw `Email address ${email} already in use.`
+ }
+
+ // check budibase users in other tenants
+ if (env.MULTI_TENANCY) {
+ dbUser = await getTenantUser(email)
+ if (dbUser != null && dbUser.tenantId !== tenantId) {
+ throw `Email address ${email} already in use.`
+ }
+ }
+
+ // check root account users in account portal
+ if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
+ const account = await accounts.getAccount(email)
+ if (account && account.verified && account.tenantId !== tenantId) {
+ throw `Email address ${email} already in use.`
+ }
}
} else {
dbUser = await db.get(_id)
@@ -57,12 +82,13 @@ async function saveUser(user, tenantId, hashPassword = true) {
hashedPassword = hashPassword ? await hash(password) : password
} else if (dbUser) {
hashedPassword = dbUser.password
- } else {
+ } else if (requirePassword) {
throw "Password must be specified."
}
_id = _id || generateGlobalUserID()
user = {
+ createdAt: Date.now(),
...dbUser,
...user,
_id,
@@ -106,16 +132,21 @@ exports.save = async ctx => {
}
}
+const parseBooleanParam = param => {
+ if (param && param === "false") {
+ return false
+ } else {
+ return true
+ }
+}
+
exports.adminUser = async ctx => {
const { email, password, tenantId } = ctx.request.body
// account portal sends a pre-hashed password - honour param to prevent double hashing
- let hashPassword = ctx.request.query.hashPassword
- if (hashPassword && hashPassword == "false") {
- hashPassword = false
- } else {
- hashPassword = true
- }
+ const hashPassword = parseBooleanParam(ctx.request.query.hashPassword)
+ // account portal sends no password for SSO users
+ const requirePassword = parseBooleanParam(ctx.request.query.requirePassword)
if (await doesTenantExist(tenantId)) {
ctx.throw(403, "Organisation already exists.")
@@ -128,6 +159,22 @@ exports.adminUser = async ctx => {
})
)
+ // write usage quotas for cloud
+ if (!env.SELF_HOSTED) {
+ // could be a scenario where it exists, make sure its clean
+ try {
+ const usageQuota = await db.get(
+ StaticDatabases.PLATFORM_INFO.docs.usageQuota
+ )
+ if (usageQuota) {
+ await db.remove(usageQuota._id, usageQuota._rev)
+ }
+ } catch (err) {
+ // don't worry about errors
+ }
+ await db.post(generateNewUsageQuotaDoc())
+ }
+
if (response.rows.some(row => row.doc.admin)) {
ctx.throw(
403,
@@ -138,6 +185,7 @@ exports.adminUser = async ctx => {
const user = {
email: email,
password: password,
+ createdAt: Date.now(),
roles: {},
builder: {
global: true,
@@ -148,7 +196,7 @@ exports.adminUser = async ctx => {
tenantId,
}
try {
- ctx.body = await saveUser(user, tenantId, hashPassword)
+ ctx.body = await saveUser(user, tenantId, hashPassword, requirePassword)
} catch (err) {
ctx.throw(err.status || 400, err)
}
@@ -157,6 +205,7 @@ exports.adminUser = async ctx => {
exports.destroy = async ctx => {
const db = getGlobalDB()
const dbUser = await db.get(ctx.params.id)
+ await removeUserFromInfoDB(dbUser)
await db.remove(dbUser._id, dbUser._rev)
await userCache.invalidateUser(dbUser._id)
await invalidateSessions(dbUser._id)
@@ -249,13 +298,22 @@ exports.find = async ctx => {
ctx.body = user
}
-exports.tenantUserLookup = async ctx => {
- const id = ctx.params.id
- // lookup, could be email or userId, either will return a doc
+// lookup, could be email or userId, either will return a doc
+const getTenantUser = async identifier => {
const db = new CouchDB(PLATFORM_INFO_DB)
try {
- ctx.body = await db.get(id)
+ return await db.get(identifier)
} catch (err) {
+ return null
+ }
+}
+
+exports.tenantUserLookup = async ctx => {
+ const id = ctx.params.id
+ const user = await getTenantUser(id)
+ if (user) {
+ ctx.body = user
+ } else {
ctx.throw(400, "No tenant user found.")
}
}
diff --git a/packages/worker/src/api/controllers/global/workspaces.js b/packages/worker/src/api/controllers/global/workspaces.js
index 95a1ec296d..48a710c92d 100644
--- a/packages/worker/src/api/controllers/global/workspaces.js
+++ b/packages/worker/src/api/controllers/global/workspaces.js
@@ -11,7 +11,7 @@ exports.save = async function (ctx) {
}
try {
- const response = await db.post(workspaceDoc)
+ const response = await db.put(workspaceDoc)
ctx.body = {
_id: response.id,
_rev: response.rev,
diff --git a/packages/worker/src/api/controllers/system/environment.js b/packages/worker/src/api/controllers/system/environment.js
index 305ccd7937..a4022561d4 100644
--- a/packages/worker/src/api/controllers/system/environment.js
+++ b/packages/worker/src/api/controllers/system/environment.js
@@ -3,7 +3,8 @@ const env = require("../../../environment")
exports.fetch = async ctx => {
ctx.body = {
multiTenancy: !!env.MULTI_TENANCY,
- cloud: !(env.SELF_HOSTED === "1"),
+ cloud: !env.SELF_HOSTED,
accountPortalUrl: env.ACCOUNT_PORTAL_URL,
+ disableAccountPortal: env.DISABLE_ACCOUNT_PORTAL,
}
}
diff --git a/packages/worker/src/api/controllers/system/tenants.js b/packages/worker/src/api/controllers/system/tenants.js
index e053216dd9..a96c5e5f9f 100644
--- a/packages/worker/src/api/controllers/system/tenants.js
+++ b/packages/worker/src/api/controllers/system/tenants.js
@@ -1,5 +1,7 @@
const CouchDB = require("../../../db")
const { StaticDatabases } = require("@budibase/auth/db")
+const { getTenantId } = require("@budibase/auth/tenancy")
+const { deleteTenant } = require("@budibase/auth/deprovision")
exports.exists = async ctx => {
const tenantId = ctx.request.params
@@ -31,3 +33,19 @@ exports.fetch = async ctx => {
}
ctx.body = tenants
}
+
+exports.delete = async ctx => {
+ const tenantId = getTenantId()
+
+ if (ctx.params.tenantId !== tenantId) {
+ ctx.throw(403, "Unauthorized")
+ }
+
+ try {
+ await deleteTenant(tenantId)
+ ctx.status = 204
+ } catch (err) {
+ ctx.log.error(err)
+ throw err
+ }
+}
diff --git a/packages/worker/src/api/routes/global/users.js b/packages/worker/src/api/routes/global/users.js
index 9af249260d..1a04944a30 100644
--- a/packages/worker/src/api/routes/global/users.js
+++ b/packages/worker/src/api/routes/global/users.js
@@ -10,7 +10,7 @@ function buildAdminInitValidation() {
return joiValidator.body(
Joi.object({
email: Joi.string().required(),
- password: Joi.string().required(),
+ password: Joi.string(),
tenantId: Joi.string().required(),
})
.required()
diff --git a/packages/worker/src/api/routes/system/tenants.js b/packages/worker/src/api/routes/system/tenants.js
index 223ba9f26e..49c7509a67 100644
--- a/packages/worker/src/api/routes/system/tenants.js
+++ b/packages/worker/src/api/routes/system/tenants.js
@@ -7,5 +7,6 @@ const router = Router()
router
.get("/api/system/tenants/:tenantId/exists", controller.exists)
.get("/api/system/tenants", adminOnly, controller.fetch)
+ .delete("/api/system/tenants/:tenantId", adminOnly, controller.delete)
module.exports = router
diff --git a/packages/worker/src/constants/templates/base.hbs b/packages/worker/src/constants/templates/base.hbs
index 960d6faff1..438197b5d2 100644
--- a/packages/worker/src/constants/templates/base.hbs
+++ b/packages/worker/src/constants/templates/base.hbs
@@ -19,7 +19,7 @@
}
a {
- color: #3869D4;
+ color: #3869D4 !important;
}
a img {
@@ -115,8 +115,8 @@
border-bottom: 10px solid #3869D4;
border-left: 18px solid #3869D4;
display: inline-block;
- color: #FFF;
- text-decoration: none;
+ color: #FFF !important;
+ text-decoration: none !important;
border-radius: 3px;
box-shadow: 0 2px 3px rgba(0, 0, 0, 0.16);
-webkit-text-size-adjust: none;
diff --git a/packages/worker/src/environment.js b/packages/worker/src/environment.js
index 12113c087c..63115ea836 100644
--- a/packages/worker/src/environment.js
+++ b/packages/worker/src/environment.js
@@ -18,7 +18,7 @@ if (!LOADED && isDev() && !isTest()) {
module.exports = {
NODE_ENV: process.env.NODE_ENV,
- SELF_HOSTED: process.env.SELF_HOSTED,
+ SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED),
PORT: process.env.PORT,
CLUSTER_PORT: process.env.CLUSTER_PORT,
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
@@ -32,7 +32,14 @@ module.exports = {
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
INTERNAL_API_KEY: process.env.INTERNAL_API_KEY,
MULTI_TENANCY: process.env.MULTI_TENANCY,
+ DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
ACCOUNT_PORTAL_URL: process.env.ACCOUNT_PORTAL_URL,
+ SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
+ SMTP_USER: process.env.SMTP_USER,
+ SMTP_PASSWORD: process.env.SMTP_PASSWORD,
+ SMTP_HOST: process.env.SMTP_HOST,
+ SMTP_PORT: process.env.SMTP_PORT,
+ SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS,
_set(key, value) {
process.env[key] = value
module.exports[key] = value
diff --git a/packages/worker/src/utilities/email.js b/packages/worker/src/utilities/email.js
index d22933ef36..14c836952e 100644
--- a/packages/worker/src/utilities/email.js
+++ b/packages/worker/src/utilities/email.js
@@ -1,4 +1,5 @@
const nodemailer = require("nodemailer")
+const env = require("../environment")
const { getScopedConfig } = require("@budibase/auth/db")
const { EmailTemplatePurpose, TemplateTypes, Configs } = require("../constants")
const { getTemplateByPurpose } = require("../constants/templates")
@@ -101,16 +102,35 @@ async function buildEmail(purpose, email, context, { user, contents } = {}) {
* Utility function for finding most valid SMTP configuration.
* @param {object} db The CouchDB database which is to be looked up within.
* @param {string|null} workspaceId If using finer grain control of configs a workspace can be used.
+ * @param {boolean|null} automation Whether or not the configuration is being fetched for an email automation.
* @return {Promise