Merge branch 'develop' of github.com:Budibase/budibase into cheeks-lab-day

This commit is contained in:
Andrew Kingston 2021-11-18 15:53:57 +00:00
commit 4ebb4d6b25
83 changed files with 10012 additions and 7881 deletions

View File

@ -18,7 +18,7 @@ jobs:
strategy:
matrix:
node-version: [12.x]
node-version: [14.x]
steps:
- uses: actions/checkout@v2

View File

@ -18,7 +18,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
with:
node-version: 12.x
node-version: 14.x
- run: yarn
- run: yarn bootstrap
- run: yarn lint

View File

@ -16,7 +16,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
with:
node-version: 12.x
node-version: 14.x
- run: yarn
- run: yarn bootstrap

View File

@ -19,7 +19,7 @@ jobs:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
with:
node-version: 12.x
node-version: 14.x
- run: yarn
- run: yarn bootstrap
- run: yarn lint
@ -55,4 +55,4 @@ jobs:
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
BUDIBASE_RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }}
BUDIBASE_RELEASE_VERSION: ${{ steps.previoustag.outputs.tag }}

View File

@ -54,17 +54,51 @@
<br /><br />
## ✨ Features
- **Build and ship real software.** Unlike other platforms, with Budibase you build and ship single page applications. Budibase applications have performance baked in and can be designed responsively, providing your users with a great experience.
### Build and ship real software
Unlike other platforms, with Budibase you build and ship single page applications. Budibase applications have performance baked in and can be designed responsively, providing your users with a great experience.
<br /><br />
- **Open source and extensible.** Budibase is open-source - licensed as GPL v3. This should fill you with confidence that Budibase will always be around. You can also code against Budibase or fork it and make changes as you please, providing a developer-friendly experience.
### Open source and extensible
Budibase is open-source - licensed as GPL v3. This should fill you with confidence that Budibase will always be around. You can also code against Budibase or fork it and make changes as you please, providing a developer-friendly experience.
<br /><br />
- **Load data or start from scratch.** Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no data sources. [Request new data sources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
### Load data or start from scratch
Budibase pulls in data from multiple sources, including MongoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB, or a REST API. And unlike other platforms, with Budibase you can start from scratch and create business apps with no data sources. [Request new data sources](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
- **Design and build apps with powerful pre-made components.** Budibase comes out of the box with beautifully designed, powerful components which you can use like building blocks to build your UI. We also expose a lot of your favourite CSS styling options so you can go that extra creative mile. [Request new component](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
<p align="center">
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970242/Out%20of%20beta%20launch/data_n1tlhf.png">
</p>
<br /><br />
- **Automate processes, integrate with other tools, and connect to webhooks.** Save time by automating manual processes and workflows. From connecting to webhooks, to automating emails, simply tell Budibase what to do and let it work for you. You can easily [create new automations for Budibase here](https://github.com/Budibase/automations) or [Request new automation](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
### Design and build apps with powerful pre-made components
- **Admin paradise.** Budibase is made to scale. With Budibase, you can self-host on your own infrastructure and globally manage users, onboarding, SMTP, apps, groups, theming and more. You can also provide users/groups with an app portal and disseminate user-management to the group manager.
Budibase comes out of the box with beautifully designed, powerful components which you can use like building blocks to build your UI. We also expose a lot of your favourite CSS styling options so you can go that extra creative mile. [Request new component](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
<p align="center">
<img alt="Budibase design" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970243/Out%20of%20beta%20launch/design-like-a-pro_qhlfeu.gif">
</p>
<br /><br />
### Automate processes, integrate with other tools, and connect to webhooks
Save time by automating manual processes and workflows. From connecting to webhooks, to automating emails, simply tell Budibase what to do and let it work for you. You can easily [create new automations for Budibase here](https://github.com/Budibase/automations) or [Request new automation](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
<p align="center">
<img alt="Budibase automations" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970486/Out%20of%20beta%20launch/automation_riro7u.png">
</p>
<br /><br />
### Integrate with your favorite tools
Budibase integrates with a number of popular tools allowing you to build apps that perfectly fit your stack.
<p align="center">
<img alt="Budibase integrations" src="https://res.cloudinary.com/daog6scxm/image/upload/v1636970242/Out%20of%20beta%20launch/integrations_kc7dqt.png">
</p>
<br /><br />
### Admin paradise
Budibase is made to scale. With Budibase, you can self-host on your own infrastructure and globally manage users, onboarding, SMTP, apps, groups, theming and more. You can also provide users/groups with an app portal and disseminate user-management to the group manager.
- Checkout the promo video: https://youtu.be/xoljVpty_Kw
<br /><br /><br />

View File

@ -1,5 +1,5 @@
{
"version": "0.9.185-alpha.2",
"version": "0.9.185-alpha.10",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -59,6 +59,7 @@
"mode:self": "yarn env:selfhost:enable && yarn env:multi:disable && yarn env:account:disable",
"mode:cloud": "yarn env:selfhost:disable && yarn env:multi:enable && yarn env:account:disable",
"mode:account": "yarn mode:cloud && yarn env:account:enable",
"security:audit": "node scripts/audit.js",
"postinstall": "husky install"
}
}

View File

@ -1,3 +1,4 @@
module.exports = {
user: require("./src/cache/user"),
app: require("./src/cache/appMetadata"),
}

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/auth",
"version": "0.9.185-alpha.2",
"version": "0.9.185-alpha.10",
"description": "Authentication middlewares for budibase builder and apps",
"main": "src/index.js",
"author": "Budibase",

85
packages/auth/src/cache/appMetadata.js vendored Normal file
View File

@ -0,0 +1,85 @@
const redis = require("../redis/authRedis")
const { getCouch } = require("../db")
const { DocumentTypes } = require("../db/constants")
const AppState = {
INVALID: "invalid",
}
const EXPIRY_SECONDS = 3600
/**
* The default populate app metadata function
*/
const populateFromDB = async (appId, CouchDB = null) => {
if (!CouchDB) {
CouchDB = getCouch()
}
const db = new CouchDB(appId, { skip_setup: true })
return db.get(DocumentTypes.APP_METADATA)
}
const isInvalid = metadata => {
return !metadata || metadata.state === AppState.INVALID
}
/**
* Get the requested app metadata by id.
* Use redis cache to first read the app metadata.
* If not present fallback to loading the app metadata directly and re-caching.
* @param {string} appId the id of the app to get metadata from.
* @param {object} CouchDB the database being passed
* @returns {object} the app metadata.
*/
exports.getAppMetadata = async (appId, CouchDB = null) => {
const client = await redis.getAppClient()
// try cache
let metadata = await client.get(appId)
if (!metadata) {
let expiry = EXPIRY_SECONDS
try {
metadata = await populateFromDB(appId, CouchDB)
} catch (err) {
// app DB left around, but no metadata, it is invalid
if (err && err.status === 404) {
metadata = { state: AppState.INVALID }
// don't expire the reference to an invalid app, it'll only be
// updated if a metadata doc actually gets stored (app is remade/reverted)
expiry = undefined
} else {
throw err
}
}
// needed for cypress/some scenarios where the caching happens
// so quickly the requests can get slightly out of sync
// might store its invalid just before it stores its valid
if (isInvalid(metadata)) {
const temp = await client.get(appId)
if (temp) {
metadata = temp
}
}
await client.store(appId, metadata, expiry)
}
// we've stored in the cache an object to tell us that it is currently invalid
if (isInvalid(metadata)) {
throw { status: 404, message: "No app metadata found" }
}
return metadata
}
/**
* Invalidate/reset the cached metadata when a change occurs in the db.
* @param appId {string} the cache key to bust/update.
* @param newMetadata {object|undefined} optional - can simply provide the new metadata to update with.
* @return {Promise<void>} will respond with success when cache is updated.
*/
exports.invalidateAppMetadata = async (appId, newMetadata = null) => {
if (!appId) {
throw "Cannot invalidate if no app ID provided."
}
const client = await redis.getAppClient()
await client.delete(appId)
if (newMetadata) {
await client.store(appId, newMetadata, EXPIRY_SECONDS)
}
}

View File

@ -1,11 +1,14 @@
const { newid } = require("../hashing")
const Replication = require("./Replication")
const { DEFAULT_TENANT_ID } = require("../constants")
const { DEFAULT_TENANT_ID, Configs } = require("../constants")
const env = require("../environment")
const { StaticDatabases, SEPARATOR, DocumentTypes } = require("./constants")
const { getTenantId, getTenantIDFromAppID } = require("../tenancy")
const fetch = require("node-fetch")
const { getCouch } = require("./index")
const { getAppMetadata } = require("../cache/appMetadata")
const NO_APP_ERROR = "No app provided"
const UNICODE_MAX = "\ufff0"
@ -45,14 +48,23 @@ function getDocParams(docType, docId = null, otherProps = {}) {
}
exports.isDevAppID = appId => {
if (!appId) {
throw NO_APP_ERROR
}
return appId.startsWith(exports.APP_DEV_PREFIX)
}
exports.isProdAppID = appId => {
if (!appId) {
throw NO_APP_ERROR
}
return appId.startsWith(exports.APP_PREFIX) && !exports.isDevAppID(appId)
}
function isDevApp(app) {
if (!app) {
throw NO_APP_ERROR
}
return exports.isDevAppID(app.appId)
}
@ -232,16 +244,16 @@ exports.getAllApps = async (CouchDB, { dev, all, idsOnly } = {}) => {
if (idsOnly) {
return appDbNames
}
const appPromises = appDbNames.map(db =>
const appPromises = appDbNames.map(app =>
// skip setup otherwise databases could be re-created
new CouchDB(db, { skip_setup: true }).get(DocumentTypes.APP_METADATA)
getAppMetadata(app, CouchDB)
)
if (appPromises.length === 0) {
return []
} else {
const response = await Promise.allSettled(appPromises)
const apps = response
.filter(result => result.status === "fulfilled")
.filter(result => result.status === "fulfilled" && result.value != null)
.map(({ value }) => value)
if (!all) {
return apps.filter(app => {
@ -351,13 +363,50 @@ const getScopedFullConfig = async function (db, { type, user, workspace }) {
}
// Find the config with the most granular scope based on context
const scopedConfig = response.rows.sort(
let scopedConfig = response.rows.sort(
(a, b) => determineScore(a) - determineScore(b)
)[0]
// custom logic for settings doc
// always provide the platform URL
if (type === Configs.SETTINGS) {
if (scopedConfig && scopedConfig.doc) {
scopedConfig.doc.config.platformUrl = await getPlatformUrl(
scopedConfig.doc.config
)
} else {
scopedConfig = {
doc: {
config: {
platformUrl: await getPlatformUrl(),
},
},
}
}
}
return scopedConfig && scopedConfig.doc
}
const getPlatformUrl = async settings => {
let platformUrl = env.PLATFORM_URL
if (!env.SELF_HOSTED && env.MULTI_TENANCY) {
// cloud and multi tenant - add the tenant to the default platform url
const tenantId = getTenantId()
if (!platformUrl.includes("localhost:")) {
platformUrl = platformUrl.replace("://", `://${tenantId}.`)
}
} else {
// self hosted - check for platform url override
if (settings && settings.platformUrl) {
platformUrl = settings.platformUrl
}
}
return platformUrl ? platformUrl : "http://localhost:10000"
}
async function getScopedConfig(db, params) {
const configDoc = await getScopedFullConfig(db, params)
return configDoc && configDoc.config ? configDoc.config : configDoc

View File

@ -25,6 +25,7 @@ module.exports = {
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED),
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
PLATFORM_URL: process.env.PLATFORM_URL,
isTest,
_set(key, value) {
process.env[key] = value

View File

@ -92,6 +92,10 @@ module.exports = (
finalise(ctx, { authenticated, user, internal, version, publicEndpoint })
return next()
} catch (err) {
// invalid token, clear the cookie
if (err && err.name === "JsonWebTokenError") {
clearCookie(ctx, Cookies.Auth)
}
// allow configuring for public access
if ((opts && opts.publicAllowed) || publicEndpoint) {
finalise(ctx, { authenticated: false, version, publicEndpoint })

View File

@ -6,6 +6,7 @@ exports.ObjectStoreBuckets = {
APPS: "prod-budi-app-assets",
TEMPLATES: "templates",
GLOBAL: "global",
GLOBAL_CLOUD: "prod-budi-tenant-uploads",
}
exports.budibaseTempDir = function () {

View File

@ -1,16 +1,18 @@
const Client = require("./index")
const utils = require("./utils")
let userClient, sessionClient
let userClient, sessionClient, appClient
async function init() {
userClient = await new Client(utils.Databases.USER_CACHE).init()
sessionClient = await new Client(utils.Databases.SESSIONS).init()
appClient = await new Client(utils.Databases.APP_METADATA).init()
}
process.on("exit", async () => {
if (userClient) await userClient.finish()
if (sessionClient) await sessionClient.finish()
if (appClient) await appClient.finish()
})
module.exports = {
@ -26,4 +28,10 @@ module.exports = {
}
return sessionClient
},
getAppClient: async () => {
if (!appClient) {
await init()
}
return appClient
},
}

View File

@ -15,6 +15,7 @@ exports.Databases = {
SESSIONS: "session",
USER_CACHE: "users",
FLAGS: "flags",
APP_METADATA: "appMetadata",
}
exports.SEPARATOR = SEPARATOR

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "0.9.185-alpha.2",
"version": "0.9.185-alpha.10",
"license": "AGPL-3.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",

View File

@ -1,16 +1,67 @@
<script>
import "@spectrum-css/fieldlabel/dist/index-vars.css"
import Tooltip from "../Tooltip/Tooltip.svelte"
import Icon from "../Icon/Icon.svelte"
export let size = "M"
export let tooltip = ""
export let showTooltip = false
</script>
<label for="" class={`spectrum-FieldLabel spectrum-FieldLabel--size${size}`}>
<slot />
</label>
{#if tooltip}
<div class="container">
<label
for=""
class={`spectrum-FieldLabel spectrum-FieldLabel--size${size}`}
>
<slot />
</label>
<div class="icon-container">
<div
class="icon"
on:mouseover={() => (showTooltip = true)}
on:mouseleave={() => (showTooltip = false)}
>
<Icon name="InfoOutline" size="S" disabled={true} />
</div>
{#if showTooltip}
<div class="tooltip">
<Tooltip textWrapping={true} direction={"bottom"} text={tooltip} />
</div>
{/if}
</div>
</div>
{:else}
<label for="" class={`spectrum-FieldLabel spectrum-FieldLabel--size${size}`}>
<slot />
</label>
{/if}
<style>
label {
padding: 0;
white-space: nowrap;
}
.container {
display: flex;
}
.icon-container {
position: relative;
display: flex;
justify-content: center;
margin-top: 1px;
margin-left: 5px;
margin-right: 5px;
}
.tooltip {
position: absolute;
display: flex;
justify-content: center;
top: 15px;
z-index: 1;
width: 160px;
}
.icon {
transform: scale(0.75);
}
</style>

View File

@ -3,12 +3,22 @@
export let direction = "top"
export let text = ""
export let textWrapping = false
</script>
<span class="u-tooltip-showOnHover tooltip">
<slot />
<div class={`spectrum-Tooltip spectrum-Tooltip--${direction}`}>
<!-- Showing / Hiding a text wrapped tooltip should be handled outside the component -->
{#if textWrapping}
<span class="spectrum-Tooltip spectrum-Tooltip--{direction} is-open">
<span class="spectrum-Tooltip-label">{text}</span>
<span class="spectrum-Tooltip-tip" />
</div>
</span>
</span>
{:else}
<!-- The default show on hover tooltip does not support text wrapping -->
<span class="u-tooltip-showOnHover tooltip">
<slot />
<div class={`spectrum-Tooltip spectrum-Tooltip--${direction}`}>
<span class="spectrum-Tooltip-label">{text}</span>
<span class="spectrum-Tooltip-tip" />
</div>
</span>
{/if}

View File

@ -17,6 +17,7 @@ process.env.JWT_SECRET = cypressConfig.env.JWT_SECRET
process.env.COUCH_URL = `leveldb://${tmpdir}/.data/`
process.env.SELF_HOSTED = 1
process.env.WORKER_URL = "http://localhost:10002/"
process.env.APPS_URL = `http://localhost:${MAIN_PORT}/`
process.env.MINIO_URL = `http://localhost:${MAIN_PORT}/`
process.env.MINIO_ACCESS_KEY = "budibase"
process.env.MINIO_SECRET_KEY = "budibase"

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "0.9.185-alpha.2",
"version": "0.9.185-alpha.10",
"license": "AGPL-3.0",
"private": true,
"scripts": {
@ -9,7 +9,7 @@
"test": "jest",
"test:watch": "jest --watchAll",
"dev:builder": "routify -c dev:vite",
"dev:vite": "vite",
"dev:vite": "vite --host 0.0.0.0",
"rollup": "rollup -c -w",
"cy:setup": "node ./cypress/setup.js",
"cy:run": "cypress run",
@ -65,10 +65,10 @@
}
},
"dependencies": {
"@budibase/bbui": "^0.9.185-alpha.2",
"@budibase/client": "^0.9.185-alpha.2",
"@budibase/bbui": "^0.9.185-alpha.10",
"@budibase/client": "^0.9.185-alpha.10",
"@budibase/colorpicker": "1.1.2",
"@budibase/string-templates": "^0.9.185-alpha.2",
"@budibase/string-templates": "^0.9.185-alpha.10",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",
@ -91,7 +91,7 @@
"@babel/runtime": "^7.13.10",
"@rollup/plugin-replace": "^2.4.2",
"@roxi/routify": "2.18.0",
"@sveltejs/vite-plugin-svelte": "^1.0.0-next.5",
"@sveltejs/vite-plugin-svelte": "1.0.0-next.19",
"@testing-library/jest-dom": "^5.11.10",
"@testing-library/svelte": "^3.0.0",
"babel-jest": "^26.6.3",

View File

@ -69,6 +69,7 @@
({ _id }) => _id === $views.selected?.tableId
)
$: fields = viewTable && Object.keys(viewTable.schema)
$: schema = viewTable && viewTable.schema ? viewTable.schema : {}
function saveView() {
views.save(view)
@ -90,29 +91,29 @@
function isMultipleChoice(field) {
return (
viewTable.schema[field]?.constraints?.inclusion?.length ||
viewTable.schema[field]?.type === "boolean"
schema[field]?.constraints?.inclusion?.length ||
schema[field]?.type === "boolean"
)
}
function fieldOptions(field) {
return viewTable.schema[field]?.type === "options"
? viewTable.schema[field]?.constraints.inclusion
return schema[field]?.type === "options"
? schema[field]?.constraints.inclusion
: [true, false]
}
function isDate(field) {
return viewTable.schema[field]?.type === "datetime"
return schema[field]?.type === "datetime"
}
function isNumber(field) {
return viewTable.schema[field]?.type === "number"
return schema[field]?.type === "number"
}
const fieldChanged = filter => ev => {
// Reset if type changed
const oldType = viewTable.schema[filter.key]?.type
const newType = viewTable.schema[ev.detail]?.type
const oldType = schema[filter.key]?.type
const newType = schema[ev.detail]?.type
if (filter.key && ev.detail && oldType !== newType) {
filter.value = ""
}

View File

@ -23,8 +23,6 @@
// Show updated permissions in UI: REMOVE
permissions = await permissionsStore.forResource(resourceId)
notifications.success("Updated permissions.")
// TODO: update permissions
// permissions[]
}
</script>

View File

@ -19,15 +19,24 @@
import IntegrationQueryEditor from "components/integration/index.svelte"
import ExternalDataSourceTable from "components/backend/DataTable/ExternalDataSourceTable.svelte"
import ParameterBuilder from "components/integration/QueryParameterBuilder.svelte"
import { datasources, integrations, queries } from "stores/backend"
import {
datasources,
integrations,
queries,
roles,
permissions,
} from "stores/backend"
import { capitalise } from "../../helpers"
import CodeMirrorEditor from "components/common/CodeMirrorEditor.svelte"
import { Roles } from "constants/backend"
import { onMount } from "svelte"
export let query
export let fields = []
let parameters
let data = []
let roleId
const transformerDocs =
"https://docs.budibase.com/building-apps/data/transformers"
const typeOptions = [
@ -70,7 +79,22 @@
}
function resetDependentFields() {
if (query.fields.extra) query.fields.extra = {}
if (query.fields.extra) {
query.fields.extra = {}
}
}
async function updateRole(role, id = null) {
roleId = role
if (query?._id || id) {
for (let level of ["read", "write"]) {
await permissions.save({
level,
role,
resource: query?._id || id,
})
}
}
}
function populateExtraQuery(extraQueryFields) {
@ -122,6 +146,7 @@
async function saveQuery() {
try {
const { _id } = await queries.save(query.datasourceId, query)
await updateRole(roleId, _id)
notifications.success(`Query saved successfully.`)
$goto(`../${_id}`)
} catch (err) {
@ -129,6 +154,18 @@
notifications.error(`Error creating query. ${err.message}`)
}
}
onMount(async () => {
if (!query || !query._id) {
roleId = Roles.BASIC
return
}
try {
roleId = (await permissions.forResource(query._id))["read"]
} catch (err) {
roleId = Roles.BASIC
}
})
</script>
<Layout gap="S" noPadding>
@ -151,6 +188,16 @@
queryConfig[verb]?.displayName || capitalise(verb)}
/>
</div>
<div class="config-field">
<Label>Access level</Label>
<Select
value={roleId}
on:change={e => updateRole(e.detail)}
options={$roles}
getOptionLabel={x => x.name}
getOptionValue={x => x._id}
/>
</div>
{#if integrationInfo?.extra && query.queryVerb}
<ExtraQueryConfig
{query}

View File

@ -21,26 +21,25 @@
} from "@budibase/bbui"
import { onMount } from "svelte"
import api from "builderStore/api"
import { organisation, auth, admin } from "stores/portal"
import { organisation, admin } from "stores/portal"
import { uuid } from "builderStore/uuid"
import analytics, { Events } from "analytics"
$: tenantId = $auth.tenantId
$: multiTenancyEnabled = $admin.multiTenancy
const ConfigTypes = {
Google: "google",
OIDC: "oidc",
}
function callbackUrl(tenantId, end) {
let url = `/api/global/auth`
if (multiTenancyEnabled && tenantId) {
url += `/${tenantId}`
}
url += end
return url
}
// Some older google configs contain a manually specified value - retain the functionality to edit the field
// When there is no value or we are in the cloud - prohibit editing the field, must use platform url to change
$: googleCallbackUrl = undefined
$: googleCallbackReadonly = $admin.cloud || !googleCallbackUrl
// Indicate to user that callback is based on platform url
// If there is an existing value, indicate that it may be removed to return to default behaviour
$: googleCallbackTooltip = googleCallbackReadonly
? "Vist the organisation page to update the platform URL"
: "Leave blank to use the default callback URL"
$: GoogleConfigFields = {
Google: [
@ -49,8 +48,9 @@
{
name: "callbackURL",
label: "Callback URL",
readonly: true,
placeholder: callbackUrl(tenantId, "/google/callback"),
readonly: googleCallbackReadonly,
tooltip: googleCallbackTooltip,
placeholder: $organisation.googleCallbackUrl,
},
],
}
@ -62,9 +62,10 @@
{ name: "clientSecret", label: "Client Secret" },
{
name: "callbackURL",
label: "Callback URL",
readonly: true,
placeholder: callbackUrl(tenantId, "/oidc/callback"),
tooltip: "Vist the organisation page to update the platform URL",
label: "Callback URL",
placeholder: $organisation.oidcCallbackUrl,
},
],
}
@ -241,6 +242,8 @@
providers.google = googleDoc
}
googleCallbackUrl = providers?.google?.config?.callbackURL
//Get the list of user uploaded logos and push it to the dropdown options.
//This needs to be done before the config call so they're available when the dropdown renders
const res = await api.get(`/api/global/configs/logos_oidc`)
@ -308,7 +311,7 @@
<Layout gap="XS" noPadding>
{#each GoogleConfigFields.Google as field}
<div class="form-row">
<Label size="L">{field.label}</Label>
<Label size="L" tooltip={field.tooltip}>{field.label}</Label>
<Input
bind:value={providers.google.config[field.name]}
readonly={field.readonly}
@ -346,7 +349,7 @@
<Layout gap="XS" noPadding>
{#each OIDCConfigFields.Oidc as field}
<div class="form-row">
<Label size="L">{field.label}</Label>
<Label size="L" tooltip={field.tooltip}>{field.label}</Label>
<Input
bind:value={providers.oidc.config.configs[0][field.name]}
readonly={field.readonly}

View File

@ -116,7 +116,11 @@
</Layout>
<div class="fields">
<div class="field">
<Label size="L">Platform URL</Label>
<Label
size="L"
tooltip={"Update the Platform URL to match your Budibase web URL. This keeps email templates and authentication configs up to date."}
>Platform URL</Label
>
<Input thin bind:value={$values.platformUrl} />
</div>
</div>
@ -135,6 +139,7 @@
.field {
display: grid;
grid-template-columns: 100px 1fr;
grid-gap: var(--spacing-l);
align-items: center;
}
.file {

View File

@ -95,6 +95,7 @@ export function createDatasourcesStore() {
return { list: sources, selected: null }
})
await queries.fetch()
return response
},
removeSchemaError: () => {

View File

@ -10,13 +10,11 @@ export function createPermissionStore() {
const response = await api.post(
`/api/permission/${role}/${resource}/${level}`
)
const json = await response.json()
return json
return await response.json()
},
forResource: async resourceId => {
const response = await api.get(`/api/permission/${resourceId}`)
const json = await response.json()
return json
return await response.json()
},
}
}

View File

@ -3,12 +3,14 @@ import api from "builderStore/api"
import { auth } from "stores/portal"
const DEFAULT_CONFIG = {
platformUrl: "http://localhost:10000",
platformUrl: "",
logoUrl: undefined,
docsUrl: undefined,
company: "Budibase",
oidc: undefined,
google: undefined,
oidcCallbackUrl: "",
googleCallbackUrl: "",
}
export function createOrganisationStore() {
@ -28,6 +30,13 @@ export function createOrganisationStore() {
}
async function save(config) {
// delete non-persisted fields
const storeConfig = get(store)
delete storeConfig.oidc
delete storeConfig.google
delete storeConfig.oidcCallbackUrl
delete storeConfig.googleCallbackUrl
const res = await api.post("/api/global/configs", {
type: "settings",
config: { ...get(store), ...config },

View File

@ -1,4 +1,4 @@
import svelte from "@sveltejs/vite-plugin-svelte"
import { svelte } from "@sveltejs/vite-plugin-svelte"
import replace from "@rollup/plugin-replace"
import path from "path"
@ -6,6 +6,11 @@ import path from "path"
export default ({ mode }) => {
const isProduction = mode === "production"
return {
server: {
fs: {
strict: false,
},
},
base: "/builder/",
build: {
minify: isProduction,

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "0.9.185-alpha.2",
"version": "0.9.185-alpha.10",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "0.9.185-alpha.2",
"version": "0.9.185-alpha.10",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "^0.9.185-alpha.2",
"@budibase/bbui": "^0.9.185-alpha.10",
"@budibase/standard-components": "^0.9.139",
"@budibase/string-templates": "^0.9.185-alpha.2",
"@budibase/string-templates": "^0.9.185-alpha.10",
"regexparam": "^1.3.0",
"shortid": "^2.2.15",
"svelte-spa-router": "^3.0.5"

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
FROM node:12-alpine
FROM node:14-alpine
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "0.9.185-alpha.2",
"version": "0.9.185-alpha.10",
"description": "Budibase Web Server",
"main": "src/index.js",
"repository": {
@ -68,9 +68,11 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@budibase/auth": "^0.9.185-alpha.2",
"@budibase/client": "^0.9.185-alpha.2",
"@budibase/string-templates": "^0.9.185-alpha.2",
"@budibase/auth": "^0.9.185-alpha.10",
"@budibase/client": "^0.9.185-alpha.10",
"@budibase/string-templates": "^0.9.185-alpha.10",
"@bull-board/api": "^3.7.0",
"@bull-board/koa": "^3.7.0",
"@elastic/elasticsearch": "7.10.0",
"@koa/router": "8.0.0",
"@sendgrid/mail": "7.1.1",
@ -80,7 +82,6 @@
"aws-sdk": "^2.767.0",
"bcryptjs": "2.4.3",
"bull": "^3.22.4",
"bull-board": "^2.0.1",
"chmodr": "1.2.0",
"csvtojson": "2.0.10",
"dotenv": "8.2.0",
@ -120,6 +121,7 @@
"uuid": "3.3.2",
"validate.js": "0.13.1",
"vm2": "^3.9.3",
"worker-farm": "^1.7.0",
"yargs": "13.2.4",
"zlib": "1.0.5"
},
@ -138,7 +140,6 @@
"copyfiles": "^2.4.1",
"docker-compose": "^0.23.6",
"eslint": "^6.8.0",
"express": "^4.17.1",
"jest": "^27.0.5",
"nodemon": "^2.0.4",
"prettier": "^2.3.1",

View File

@ -0,0 +1,33 @@
/******************************************************
* This script just makes it easy to re-create *
* a cypress like environment for testing the backend *
******************************************************/
const path = require("path")
const tmpdir = path.join(require("os").tmpdir(), ".budibase")
const MAIN_PORT = "10001"
const WORKER_PORT = "10002"
// @ts-ignore
process.env.PORT = MAIN_PORT
process.env.BUDIBASE_API_KEY = "6BE826CB-6B30-4AEC-8777-2E90464633DE"
process.env.NODE_ENV = "cypress"
process.env.ENABLE_ANALYTICS = "false"
process.env.JWT_SECRET = "budibase"
process.env.COUCH_URL = `leveldb://${tmpdir}/.data/`
process.env.SELF_HOSTED = "1"
process.env.WORKER_URL = `http://localhost:${WORKER_PORT}/`
process.env.MINIO_URL = `http://localhost:${MAIN_PORT}/`
process.env.MINIO_ACCESS_KEY = "budibase"
process.env.MINIO_SECRET_KEY = "budibase"
process.env.COUCH_DB_USER = "budibase"
process.env.COUCH_DB_PASSWORD = "budibase"
process.env.INTERNAL_API_KEY = "budibase"
process.env.ALLOW_DEV_AUTOMATIONS = "1"
// don't make this a variable or top level require
// it will cause environment module to be loaded prematurely
const server = require("../src/app")
process.env.PORT = WORKER_PORT
const worker = require("../../worker/src/index")
process.env.PORT = MAIN_PORT

View File

@ -45,6 +45,8 @@ const {
} = require("../../utilities/fileSystem/clientLibrary")
const { getTenantId, isMultiTenant } = require("@budibase/auth/tenancy")
const { syncGlobalUsers } = require("./user")
const { app: appCache } = require("@budibase/auth/cache")
const { cleanupAutomations } = require("../../automations/utils")
const URL_REGEX_SLASH = /\/|\\/g
@ -254,6 +256,7 @@ exports.create = async ctx => {
await createApp(appId)
}
await appCache.invalidateAppMetadata(appId, newApplication)
ctx.status = 200
ctx.body = newApplication
}
@ -317,8 +320,12 @@ exports.delete = async ctx => {
if (!env.isTest() && !ctx.query.unpublish) {
await deleteApp(ctx.params.appId)
}
if (ctx.query && ctx.query.unpublish) {
await cleanupAutomations(ctx.params.appId)
}
// make sure the app/role doesn't stick around after the app has been deleted
await removeAppFromUserRoles(ctx, ctx.params.appId)
await appCache.invalidateAppMetadata(ctx.params.appId)
ctx.status = 200
ctx.body = result
@ -387,7 +394,10 @@ const updateAppPackage = async (ctx, appPackage, appId) => {
// Redis, shouldn't ever store it
delete newAppPackage.lockedBy
return await db.put(newAppPackage)
const response = await db.put(newAppPackage)
// remove any cached metadata, so that it will be updated
await appCache.invalidateAppMetadata(appId)
return response
}
const createEmptyAppPackage = async (ctx, app) => {

View File

@ -119,8 +119,16 @@ exports.destroy = async function (ctx) {
const db = new CouchDB(ctx.appId)
// Delete all queries for the datasource
const rows = await db.allDocs(getQueryParams(ctx.params.datasourceId, null))
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
const queries = await db.allDocs(
getQueryParams(ctx.params.datasourceId, null)
)
await db.bulkDocs(
queries.rows.map(row => ({
_id: row.id,
_rev: row.value.rev,
_deleted: true,
}))
)
// delete the datasource
await db.remove(ctx.params.datasourceId, ctx.params.revId)

View File

@ -6,6 +6,7 @@ const {
disableAllCrons,
enableCronTrigger,
} = require("../../../automations/utils")
const { app: appCache } = require("@budibase/auth/cache")
// the max time we can wait for an invalidation to complete before considering it failed
const MAX_PENDING_TIME_MS = 30 * 60000
@ -103,6 +104,7 @@ async function deployApp(deployment) {
appDoc.appId = productionAppId
appDoc.instance._id = productionAppId
await db.put(appDoc)
await appCache.invalidateAppMetadata(productionAppId)
console.log("New app doc written successfully.")
await initDeployedApp(productionAppId)
console.log("Deployed app initialised, setting deployment to successful")

View File

@ -6,6 +6,7 @@ const { request } = require("../../utilities/workerRequests")
const { clearLock } = require("../../utilities/redis")
const { Replication } = require("@budibase/auth").db
const { DocumentTypes } = require("../../db/utils")
const { app: appCache } = require("@budibase/auth/cache")
async function redirect(ctx, method, path = "global") {
const { devPath } = ctx.params
@ -24,7 +25,8 @@ async function redirect(ctx, method, path = "global") {
)
)
if (response.status !== 200) {
ctx.throw(response.status, response.statusText)
const err = await response.text()
ctx.throw(400, err)
}
const cookie = response.headers.get("set-cookie")
if (cookie) {
@ -106,6 +108,7 @@ exports.revert = async ctx => {
appDoc.appId = appId
appDoc.instance._id = appId
await db.put(appDoc)
await appCache.invalidateAppMetadata(appId)
ctx.body = {
message: "Reverted changes successfully.",
}

View File

@ -1,10 +1,11 @@
const { processString } = require("@budibase/string-templates")
const CouchDB = require("../../db")
const { generateQueryID, getQueryParams } = require("../../db/utils")
const { integrations } = require("../../integrations")
const { BaseQueryVerbs } = require("../../constants")
const env = require("../../environment")
const ScriptRunner = require("../../utilities/scriptRunner")
const { Thread, ThreadType } = require("../../threads")
const Runner = new Thread(ThreadType.QUERY, { timeoutMs: 10000 })
// simple function to append "readable" to all read queries
function enrichQueries(input) {
@ -18,47 +19,6 @@ function enrichQueries(input) {
return wasArray ? queries : queries[0]
}
function formatResponse(resp) {
if (typeof resp === "string") {
try {
resp = JSON.parse(resp)
} catch (err) {
resp = { response: resp }
}
}
return resp
}
async function runAndTransform(
integration,
queryVerb,
enrichedQuery,
transformer
) {
let rows = formatResponse(await integration[queryVerb](enrichedQuery))
// transform as required
if (transformer) {
const runner = new ScriptRunner(transformer, { data: rows })
rows = runner.execute()
}
// needs to an array for next step
if (!Array.isArray(rows)) {
rows = [rows]
}
// map into JSON if just raw primitive here
if (rows.find(row => typeof row !== "object")) {
rows = rows.map(value => ({ value }))
}
// get all the potential fields in the schema
let keys = rows.flatMap(Object.keys)
return { rows, keys }
}
exports.fetch = async function (ctx) {
const db = new CouchDB(ctx.appId)
@ -143,30 +103,23 @@ exports.preview = async function (ctx) {
const datasource = await db.get(ctx.request.body.datasourceId)
const Integration = integrations[datasource.source]
if (!Integration) {
ctx.throw(400, "Integration type does not exist.")
}
const { fields, parameters, queryVerb, transformer } = ctx.request.body
const enrichedQuery = await enrichQueryFields(fields, parameters)
const integration = new Integration(datasource.config)
const { rows, keys } = await runAndTransform(
integration,
queryVerb,
enrichedQuery,
transformer
)
try {
const { rows, keys } = await Runner.run({
datasource,
queryVerb,
query: enrichedQuery,
transformer,
})
ctx.body = {
rows,
schemaFields: [...new Set(keys)],
}
// cleanup
if (integration.end) {
integration.end()
ctx.body = {
rows,
schemaFields: [...new Set(keys)],
}
} catch (err) {
ctx.throw(400, err)
}
}
@ -176,29 +129,22 @@ exports.execute = async function (ctx) {
const query = await db.get(ctx.params.queryId)
const datasource = await db.get(query.datasourceId)
const Integration = integrations[datasource.source]
if (!Integration) {
ctx.throw(400, "Integration type does not exist.")
}
const enrichedQuery = await enrichQueryFields(
query.fields,
ctx.request.body.parameters
)
const integration = new Integration(datasource.config)
// call the relevant CRUD method on the integration class
const { rows } = await runAndTransform(
integration,
query.queryVerb,
enrichedQuery,
query.transformer
)
ctx.body = rows
// cleanup
if (integration.end) {
integration.end()
try {
const { rows } = await Runner.run({
datasource,
queryVerb: query.queryVerb,
query: enrichedQuery,
transformer: query.transformer,
})
ctx.body = rows
} catch (err) {
ctx.throw(400, err)
}
}

View File

@ -66,6 +66,7 @@ router
)
.get(
"/api/queries/:queryId",
paramResource("queryId"),
authorized(PermissionTypes.QUERY, PermissionLevels.READ),
queryController.find
)

View File

@ -41,16 +41,8 @@ app.use(
)
if (!env.isTest()) {
const bullApp = bullboard.init()
app.use(async (ctx: ExtendableContext, next: () => any) => {
if (ctx.path.startsWith(bullboard.pathPrefix)) {
ctx.status = 200
ctx.respond = false
bullApp(ctx.req, ctx.res)
} else {
await next()
}
})
const plugin = bullboard.init()
app.use(plugin)
}
app.context.eventEmitter = eventEmitter

View File

@ -1,6 +1,6 @@
const { createBullBoard } = require("bull-board")
const { BullAdapter } = require("bull-board/bullAdapter")
const express = require("express")
const { createBullBoard } = require("@bull-board/api")
const { BullAdapter } = require("@bull-board/api/bullAdapter")
const { KoaAdapter } = require("@bull-board/koa")
const env = require("../environment")
const Queue = env.isTest()
? require("../utilities/queue/inMemoryQueue")
@ -9,23 +9,40 @@ const { JobQueues } = require("../constants")
const { utils } = require("@budibase/auth/redis")
const { opts, redisProtocolUrl } = utils.getRedisOptions()
const redisConfig = redisProtocolUrl || { redis: opts }
let automationQueue = new Queue(JobQueues.AUTOMATIONS, redisConfig)
const CLEANUP_PERIOD_MS = 60 * 1000
const queueConfig = redisProtocolUrl || { redis: opts }
let cleanupInternal = null
exports.pathPrefix = "/bulladmin"
let automationQueue = new Queue(JobQueues.AUTOMATIONS, queueConfig)
async function cleanup() {
await automationQueue.clean(CLEANUP_PERIOD_MS, "completed")
}
const PATH_PREFIX = "/bulladmin"
exports.init = () => {
const expressApp = express()
// cleanup the events every 5 minutes
if (!cleanupInternal) {
cleanupInternal = setInterval(cleanup, CLEANUP_PERIOD_MS)
// fire off an initial cleanup
cleanup().catch(err => {
console.error(`Unable to cleanup automation queue initially - ${err}`)
})
}
// Set up queues for bull board admin
const queues = [automationQueue]
const adapters = []
const serverAdapter = new KoaAdapter()
for (let queue of queues) {
adapters.push(new BullAdapter(queue))
}
const { router } = createBullBoard(adapters)
expressApp.use(exports.pathPrefix, router)
return expressApp
createBullBoard({
queues: adapters,
serverAdapter,
})
serverAdapter.setBasePath(PATH_PREFIX)
return serverAdapter.registerPlugin()
}
exports.queue = automationQueue

View File

@ -1,5 +1,5 @@
jest.mock("../../utilities/usageQuota")
jest.mock("../thread")
jest.mock("../../threads/automation")
jest.mock("../../utilities/redis", () => ({
init: jest.fn(),
checkTestFlag: () => {
@ -11,8 +11,7 @@ jest.spyOn(global.console, "error")
require("../../environment")
const automation = require("../index")
const usageQuota = require("../../utilities/usageQuota")
const thread = require("../thread")
const thread = require("../../threads/automation")
const triggers = require("../triggers")
const { basicAutomation } = require("../../tests/utilities/structures")
const { wait } = require("../../utilities")
@ -62,7 +61,7 @@ describe("Run through some parts of the automations system", () => {
}
}
}
}))
}), expect.any(Function))
})
it("should be able to clean inputs with the utilities", () => {

View File

@ -11,6 +11,10 @@ const utils = require("./utils")
const env = require("../environment")
const TRIGGER_DEFINITIONS = definitions
const JOB_OPTS = {
removeOnComplete: true,
removeOnFail: true,
}
async function queueRelevantRowAutomations(event, eventType) {
if (event.appId == null) {
@ -47,7 +51,7 @@ async function queueRelevantRowAutomations(event, eventType) {
automationTrigger.inputs &&
automationTrigger.inputs.tableId === event.row.tableId
) {
await queue.add({ automation, event })
await queue.add({ automation, event }, JOB_OPTS)
}
}
}
@ -86,7 +90,7 @@ exports.externalTrigger = async function (
automation.definition.trigger != null &&
automation.definition.trigger.stepId === definitions.APP.stepId &&
automation.definition.trigger.stepId === "APP" &&
!checkTestFlag(automation._id)
!(await checkTestFlag(automation._id))
) {
// values are likely to be submitted as strings, so we shall convert to correct type
const coercedFields = {}
@ -100,7 +104,7 @@ exports.externalTrigger = async function (
if (getResponses) {
return utils.processEvent({ data })
} else {
return queue.add(data)
return queue.add(data, JOB_OPTS)
}
}

View File

@ -1,4 +1,4 @@
const runner = require("./thread")
const { Thread, ThreadType } = require("../threads")
const { definitions } = require("./triggerInfo")
const webhooks = require("../api/controllers/webhook")
const CouchDB = require("../db")
@ -10,11 +10,12 @@ const { getDeployedAppID } = require("@budibase/auth/db")
const WH_STEP_ID = definitions.WEBHOOK.stepId
const CRON_STEP_ID = definitions.CRON.stepId
const Runner = new Thread(ThreadType.AUTOMATION)
exports.processEvent = async job => {
try {
// need to actually await these so that an error can be captured properly
return await runner(job)
return await Runner.run(job)
} catch (err) {
console.error(
`${job.data.automation.appId} automation ${job.data.automation._id} was unable to run - ${err}`
@ -162,3 +163,12 @@ exports.checkForWebhooks = async ({ appId, oldAuto, newAuto }) => {
}
return newAuto
}
/**
* When removing an app/unpublishing it need to make sure automations are cleaned up (cron).
* @param appId {string} the app that is being removed.
* @return {Promise<void>} clean is complete if this succeeds.
*/
exports.cleanupAutomations = async appId => {
await exports.disableAllCrons(appId)
}

View File

@ -3,6 +3,7 @@ import {
DatasourceFieldTypes,
QueryTypes,
} from "../definitions/datasource"
import { IntegrationBase } from "./base/IntegrationBase"
module AirtableModule {
const Airtable = require("airtable")
@ -73,7 +74,7 @@ module AirtableModule {
},
}
class AirtableIntegration {
class AirtableIntegration implements IntegrationBase {
private config: AirtableConfig
private client: any

View File

@ -3,6 +3,7 @@ import {
DatasourceFieldTypes,
QueryTypes,
} from "../definitions/datasource"
import { IntegrationBase } from "./base/IntegrationBase"
module ArangoModule {
const { Database, aql } = require("arangojs")
@ -55,7 +56,7 @@ module ArangoModule {
},
}
class ArangoDBIntegration {
class ArangoDBIntegration implements IntegrationBase {
private config: ArangodbConfig
private client: any

View File

@ -0,0 +1,6 @@
export interface IntegrationBase {
create?(query: any): Promise<[any]>
read?(query: any): Promise<[any]>
update?(query: any): Promise<[any]>
delete?(query: any): Promise<[any]>
}

View File

@ -1,6 +1,7 @@
import { Table } from "../../definitions/common"
import { IntegrationBase } from "./IntegrationBase"
export interface DatasourcePlus {
export interface DatasourcePlus extends IntegrationBase {
tables: Record<string, Table>
schemaErrors: Record<string, string>

View File

@ -13,22 +13,50 @@ import SqlTableQueryBuilder from "./sqlTable"
const BASE_LIMIT = 5000
type KnexQuery = Knex.QueryBuilder | Knex
// these are invalid dates sent by the client, need to convert them to a real max date
const MIN_ISO_DATE = "0000-00-00T00:00:00.000Z"
const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z"
function parse(input: any) {
if (Array.isArray(input)) {
return JSON.stringify(input)
}
if (typeof input !== "string") {
return input
}
if (input === MAX_ISO_DATE) {
return new Date(8640000000000000)
}
if (input === MIN_ISO_DATE) {
return new Date(-8640000000000000)
}
if (isIsoDateString(input)) {
return new Date(input)
}
return input
}
function parseBody(body: any) {
for (let [key, value] of Object.entries(body)) {
if (Array.isArray(value)) {
body[key] = JSON.stringify(value)
}
if (typeof value !== "string") {
continue
}
if (isIsoDateString(value)) {
body[key] = new Date(value)
}
body[key] = parse(value)
}
return body
}
function parseFilters(filters: SearchFilters): SearchFilters {
for (let [key, value] of Object.entries(filters)) {
let parsed
if (typeof value === "object") {
parsed = parseFilters(value)
} else {
parsed = parse(value)
}
// @ts-ignore
filters[key] = parsed
}
return filters
}
class InternalBuilder {
private readonly client: string
@ -53,6 +81,7 @@ class InternalBuilder {
if (!filters) {
return query
}
filters = parseFilters(filters)
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
if (filters.oneOf) {

View File

@ -3,6 +3,7 @@ import {
DatasourceFieldTypes,
QueryTypes,
} from "../definitions/datasource"
import { IntegrationBase } from "./base/IntegrationBase"
module CouchDBModule {
const PouchDB = require("pouchdb")
@ -50,7 +51,7 @@ module CouchDBModule {
},
}
class CouchDBIntegration {
class CouchDBIntegration implements IntegrationBase {
private config: CouchDBConfig
private client: any

View File

@ -3,6 +3,7 @@ import {
DatasourceFieldTypes,
QueryTypes,
} from "../definitions/datasource"
import { IntegrationBase } from "./base/IntegrationBase"
module DynamoModule {
const AWS = require("aws-sdk")
@ -113,7 +114,7 @@ module DynamoModule {
},
}
class DynamoDBIntegration {
class DynamoDBIntegration implements IntegrationBase {
private config: DynamoDBConfig
private client: any

View File

@ -3,6 +3,7 @@ import {
DatasourceFieldTypes,
QueryTypes,
} from "../definitions/datasource"
import { IntegrationBase } from "./base/IntegrationBase"
module ElasticsearchModule {
const { Client } = require("@elastic/elasticsearch")
@ -74,7 +75,7 @@ module ElasticsearchModule {
},
}
class ElasticSearchIntegration {
class ElasticSearchIntegration implements IntegrationBase {
private config: ElasticsearchConfig
private client: any

View File

@ -3,6 +3,7 @@ import {
DatasourceFieldTypes,
QueryTypes,
} from "../definitions/datasource"
import { IntegrationBase } from "./base/IntegrationBase"
module MongoDBModule {
const { MongoClient } = require("mongodb")
@ -62,7 +63,7 @@ module MongoDBModule {
},
}
class MongoIntegration {
class MongoIntegration implements IntegrationBase {
private config: MongoDBConfig
private client: any

View File

@ -184,7 +184,7 @@ module MySQLModule {
return results.length ? results : [{ created: true }]
}
read(query: SqlQuery | string) {
async read(query: SqlQuery | string) {
return internalQuery(this.client, getSqlQuery(query))
}

View File

@ -3,6 +3,7 @@ import {
DatasourceFieldTypes,
QueryTypes,
} from "../definitions/datasource"
import { IntegrationBase } from "./base/IntegrationBase"
module RestModule {
const fetch = require("node-fetch")
@ -131,7 +132,7 @@ module RestModule {
},
}
class RestIntegration {
class RestIntegration implements IntegrationBase {
private config: RestConfig
private headers: {
[key: string]: string

View File

@ -1,4 +1,5 @@
import { Integration, QueryTypes } from "../definitions/datasource"
import { IntegrationBase } from "./base/IntegrationBase"
module S3Module {
const AWS = require("aws-sdk")
@ -42,7 +43,7 @@ module S3Module {
},
}
class S3Integration {
class S3Integration implements IntegrationBase {
private readonly config: S3Config
private client: any
private connectionPromise: Promise<any>

View File

@ -8,6 +8,7 @@ const {
const CouchDB = require("../db")
const { DocumentTypes } = require("../db/utils")
const { PermissionTypes } = require("@budibase/auth/permissions")
const { app: appCache } = require("@budibase/auth/cache")
const DEBOUNCE_TIME_SEC = 30
@ -50,7 +51,9 @@ async function updateAppUpdatedAt(ctx) {
const db = new CouchDB(appId)
const metadata = await db.get(DocumentTypes.APP_METADATA)
metadata.updatedAt = new Date().toISOString()
await db.put(metadata)
const response = await db.put(metadata)
metadata._rev = response.rev
await appCache.invalidateAppMetadata(appId, metadata)
// set a new debounce record with a short TTL
await setDebounce(appId, DEBOUNCE_TIME_SEC)
}

View File

@ -1,5 +1,5 @@
const actions = require("./actions")
const automationUtils = require("./automationUtils")
const actions = require("../automations/actions")
const automationUtils = require("../automations/automationUtils")
const AutomationEmitter = require("../events/AutomationEmitter")
const { processObject } = require("@budibase/string-templates")
const { DEFAULT_TENANT_ID } = require("@budibase/auth").constants
@ -8,8 +8,10 @@ const { DocumentTypes, isDevAppID } = require("../db/utils")
const { doInTenant } = require("@budibase/auth/tenancy")
const env = require("../environment")
const usage = require("../utilities/usageQuota")
const { definitions: triggerDefs } = require("../automations/triggerInfo")
const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId
const CRON_STEP_ID = triggerDefs.CRON.stepId
const STOPPED_STATUS = { success: false, status: "STOPPED" }
/**
@ -23,6 +25,8 @@ class Orchestrator {
this._chainCount = this._metadata ? this._metadata.automationChainCount : 0
this._appId = triggerOutput.appId
this._app = null
const triggerStepId = automation.definition.trigger.stepId
triggerOutput = this.cleanupTriggerOutputs(triggerStepId, triggerOutput)
// remove from context
delete triggerOutput.appId
delete triggerOutput.metadata
@ -34,11 +38,17 @@ class Orchestrator {
this._emitter = new AutomationEmitter(this._chainCount + 1)
this.executionOutput = { trigger: {}, steps: [] }
// setup the execution output
const triggerStepId = automation.definition.trigger.stepId
const triggerId = automation.definition.trigger.id
this.updateExecutionOutput(triggerId, triggerStepId, null, triggerOutput)
}
cleanupTriggerOutputs(stepId, triggerOutput) {
if (stepId === CRON_STEP_ID) {
triggerOutput.timestamp = Date.now()
}
return triggerOutput
}
async getStepFunctionality(stepId) {
let step = await actions.getAction(stepId)
if (step == null) {
@ -119,10 +129,17 @@ class Orchestrator {
}
}
module.exports = async job => {
module.exports = (input, callback) => {
const automationOrchestrator = new Orchestrator(
job.data.automation,
job.data.event
input.data.automation,
input.data.event
)
return automationOrchestrator.execute()
automationOrchestrator
.execute()
.then(response => {
callback(null, response)
})
.catch(err => {
callback(err)
})
}

View File

@ -0,0 +1,60 @@
const workerFarm = require("worker-farm")
const env = require("../environment")
const ThreadType = {
QUERY: "query",
AUTOMATION: "automation",
}
function typeToFile(type) {
let filename = null
switch (type) {
case ThreadType.QUERY:
filename = "./query"
break
case ThreadType.AUTOMATION:
filename = "./automation"
break
default:
throw "Unknown thread type"
}
return require.resolve(filename)
}
class Thread {
constructor(type, opts = { timeoutMs: null, count: 1 }) {
this.type = type
if (!env.isTest()) {
const workerOpts = {
autoStart: true,
maxConcurrentWorkers: opts.count ? opts.count : 1,
}
if (opts.timeoutMs) {
workerOpts.maxCallTime = opts.timeoutMs
}
this.workers = workerFarm(workerOpts, typeToFile(type))
}
}
run(data) {
return new Promise((resolve, reject) => {
let fncToCall
// if in test then don't use threading
if (env.isTest()) {
fncToCall = require(typeToFile(this.type))
} else {
fncToCall = this.workers
}
fncToCall(data, (err, response) => {
if (err) {
reject(err)
} else {
resolve(response)
}
})
})
}
}
module.exports.Thread = Thread
module.exports.ThreadType = ThreadType

View File

@ -0,0 +1,63 @@
const ScriptRunner = require("../utilities/scriptRunner")
const { integrations } = require("../integrations")
function formatResponse(resp) {
if (typeof resp === "string") {
try {
resp = JSON.parse(resp)
} catch (err) {
resp = { response: resp }
}
}
return resp
}
async function runAndTransform(datasource, queryVerb, query, transformer) {
const Integration = integrations[datasource.source]
if (!Integration) {
throw "Integration type does not exist."
}
const integration = new Integration(datasource.config)
let rows = formatResponse(await integration[queryVerb](query))
// transform as required
if (transformer) {
const runner = new ScriptRunner(transformer, { data: rows })
rows = runner.execute()
}
// needs to an array for next step
if (!Array.isArray(rows)) {
rows = [rows]
}
// map into JSON if just raw primitive here
if (rows.find(row => typeof row !== "object")) {
rows = rows.map(value => ({ value }))
}
// get all the potential fields in the schema
let keys = rows.flatMap(Object.keys)
if (integration.end) {
integration.end()
}
return { rows, keys }
}
module.exports = (input, callback) => {
runAndTransform(
input.datasource,
input.queryVerb,
input.query,
input.transformer
)
.then(response => {
callback(null, response)
})
.catch(err => {
callback(err)
})
}

View File

@ -48,6 +48,7 @@ exports.objectStoreUrl = () => {
* via a specific endpoint (under /api/assets/client).
* @param {string} appId In production we need the appId to look up the correct bucket, as the
* version of the client lib may differ between apps.
* @param {string} version The version to retrieve.
* @return {string} The URL to be inserted into appPackage response or server rendered
* app index file.
*/

View File

@ -89,6 +89,13 @@ class InMemoryQueue {
getRepeatableJobs() {
return []
}
/**
* Implemented for tests
*/
async clean() {
return []
}
}
module.exports = InMemoryQueue

View File

@ -1,10 +1,13 @@
const fetch = require("node-fetch")
const { VM, VMScript } = require("vm2")
const JS_TIMEOUT_MS = 1000
class ScriptRunner {
constructor(script, context) {
const code = `let fn = () => {\n${script}\n}; results.out = fn();`
this.vm = new VM()
this.vm = new VM({
timeout: JS_TIMEOUT_MS,
})
this.results = { out: "" }
this.vm.setGlobals(context)
this.vm.setGlobal("fetch", fetch)

View File

@ -2,7 +2,7 @@
"compilerOptions": {
"target": "es6",
"module": "commonjs",
"lib": ["es6"],
"lib": ["es2019"],
"allowJs": true,
"outDir": "dist",
"strict": true,

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
"version": "0.9.185-alpha.2",
"version": "0.9.185-alpha.10",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
FROM node:12-alpine
FROM node:14-alpine
LABEL com.centurylinklabs.watchtower.lifecycle.pre-check="scripts/watchtower-hooks/pre-check.sh"
LABEL com.centurylinklabs.watchtower.lifecycle.pre-update="scripts/watchtower-hooks/pre-update.sh"

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/worker",
"email": "hi@budibase.com",
"version": "0.9.185-alpha.2",
"version": "0.9.185-alpha.10",
"description": "Budibase background service",
"main": "src/index.js",
"repository": {
@ -29,8 +29,8 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@budibase/auth": "^0.9.185-alpha.2",
"@budibase/string-templates": "^0.9.185-alpha.2",
"@budibase/auth": "^0.9.185-alpha.10",
"@budibase/string-templates": "^0.9.185-alpha.10",
"@koa/router": "^8.0.0",
"@sentry/node": "^6.0.0",
"@techpass/passport-openidconnect": "^0.3.0",

View File

@ -1,4 +1,5 @@
const authPkg = require("@budibase/auth")
const { getScopedConfig } = require("@budibase/auth/db")
const { google } = require("@budibase/auth/src/middleware")
const { oidc } = require("@budibase/auth/src/middleware")
const { Configs, EmailTemplatePurpose } = require("../../../constants")
@ -21,17 +22,32 @@ const {
} = require("@budibase/auth/tenancy")
const env = require("../../../environment")
function googleCallbackUrl(config) {
const ssoCallbackUrl = async (config, type) => {
// incase there is a callback URL from before
if (config && config.callbackURL) {
return config.callbackURL
}
const db = getGlobalDB()
const publicConfig = await getScopedConfig(db, {
type: Configs.SETTINGS,
})
let callbackUrl = `/api/global/auth`
if (isMultiTenant()) {
callbackUrl += `/${getTenantId()}`
}
callbackUrl += `/google/callback`
return callbackUrl
callbackUrl += `/${type}/callback`
return `${publicConfig.platformUrl}${callbackUrl}`
}
exports.googleCallbackUrl = async config => {
return ssoCallbackUrl(config, "google")
}
exports.oidcCallbackUrl = async config => {
return ssoCallbackUrl(config, "oidc")
}
async function authInternal(ctx, user, err = null, info = null) {
@ -152,7 +168,7 @@ exports.googlePreAuth = async (ctx, next) => {
type: Configs.GOOGLE,
workspace: ctx.query.workspace,
})
let callbackUrl = googleCallbackUrl(config)
let callbackUrl = await exports.googleCallbackUrl(config)
const strategy = await google.strategyFactory(config, callbackUrl)
return passport.authenticate(strategy, {
@ -167,7 +183,7 @@ exports.googleAuth = async (ctx, next) => {
type: Configs.GOOGLE,
workspace: ctx.query.workspace,
})
const callbackUrl = googleCallbackUrl(config)
const callbackUrl = await exports.googleCallbackUrl(config)
const strategy = await google.strategyFactory(config, callbackUrl)
return passport.authenticate(
@ -189,13 +205,7 @@ async function oidcStrategyFactory(ctx, configId) {
})
const chosenConfig = config.configs.filter(c => c.uuid === configId)[0]
const protocol = env.NODE_ENV === "production" ? "https" : "http"
let callbackUrl = `${protocol}://${ctx.host}/api/global/auth`
if (isMultiTenant()) {
callbackUrl += `/${getTenantId()}`
}
callbackUrl += `/oidc/callback`
let callbackUrl = await exports.oidcCallbackUrl(chosenConfig)
return oidc.strategyFactory(chosenConfig, callbackUrl)
}

View File

@ -9,8 +9,11 @@ const { Configs } = require("../../../constants")
const email = require("../../../utilities/email")
const { upload, ObjectStoreBuckets } = require("@budibase/auth").objectStore
const CouchDB = require("../../../db")
const { getGlobalDB } = require("@budibase/auth/tenancy")
const { getGlobalDB, getTenantId } = require("@budibase/auth/tenancy")
const env = require("../../../environment")
const { googleCallbackUrl, oidcCallbackUrl } = require("./auth")
const BB_TENANT_CDN = "https://tenants.cdn.budi.live"
exports.save = async function (ctx) {
const db = getGlobalDB()
@ -155,6 +158,10 @@ exports.publicSettings = async function (ctx) {
config.config.google = false
}
// callback urls
config.config.oidcCallbackUrl = await oidcCallbackUrl()
config.config.googleCallbackUrl = await googleCallbackUrl()
// oidc button flag
if (oidcConfig && oidcConfig.config) {
config.config.oidc = oidcConfig.config.configs[0].activated
@ -182,7 +189,13 @@ exports.upload = async function (ctx) {
bucket = ObjectStoreBuckets.GLOBAL_CLOUD
}
const key = `${type}/${name}`
let key
if (env.MULTI_TENANCY) {
key = `${getTenantId()}/${type}/${name}`
} else {
key = `${type}/${name}`
}
await upload({
bucket,
filename: key,
@ -200,7 +213,13 @@ exports.upload = async function (ctx) {
config: {},
}
}
const url = `/${bucket}/${key}`
let url
if (env.SELF_HOSTED) {
url = `/${bucket}/${key}`
} else {
url = `${BB_TENANT_CDN}/${key}`
}
cfgStructure.config[`${name}`] = url
// write back to db with url updated
await db.put(cfgStructure)

View File

@ -43,11 +43,7 @@ exports.save = async ctx => {
}
const parseBooleanParam = param => {
if (param && param === "false") {
return false
} else {
return true
}
return !(param && param === "false")
}
exports.adminUser = async ctx => {

View File

@ -76,7 +76,7 @@ describe("/api/global/auth", () => {
afterEach(() => {
expect(strategyFactory).toBeCalledWith(
chosenConfig,
`http://127.0.0.1:4003/api/global/auth/${TENANT_ID}/oidc/callback` // calculated url
`http://localhost:10000/api/global/auth/${TENANT_ID}/oidc/callback`
)
})

View File

@ -6,7 +6,6 @@ const {
EmailTemplatePurpose,
} = require("../constants")
const { checkSlashesInUrl } = require("./index")
const env = require("../environment")
const { getGlobalDB, addTenantToUrl } = require("@budibase/auth/tenancy")
const BASE_COMPANY = "Budibase"
@ -14,9 +13,6 @@ exports.getSettingsTemplateContext = async (purpose, code = null) => {
const db = getGlobalDB()
// TODO: use more granular settings in the future if required
let settings = (await getScopedConfig(db, { type: Configs.SETTINGS })) || {}
if (!settings || !settings.platformUrl) {
settings.platformUrl = env.PLATFORM_URL
}
const URL = settings.platformUrl
const context = {
[InternalTemplateBindings.LOGO_URL]:

File diff suppressed because it is too large Load Diff

54
scripts/audit.js Normal file
View File

@ -0,0 +1,54 @@
const fs = require("fs")
const { join } = require("path")
const { spawnSync } =require("child_process")
const DONT_RUN_PKG = ["bbui"]
const PACKAGES_PATH = join(__dirname, "..", "packages")
function getPackages() {
return fs.readdirSync(PACKAGES_PATH)
}
function deleteFile(path) {
try {
fs.unlinkSync(path)
} catch (err) {
// don't error, it just doesn't exist
}
}
function removeModules(path) {
if (fs.existsSync(path)) {
fs.rmdirSync(path, { recursive: true })
}
}
function executeInPackage(packageName) {
if (DONT_RUN_PKG.includes(packageName)) {
return
}
const dir = join(PACKAGES_PATH, packageName)
if (!fs.existsSync(join(dir, "package.json"))) {
console.error(`SKIPPING ${packageName} directory, no package.json`)
return
}
const packageLockLoc = join(dir, "package-lock.json")
const modulesLoc = join(dir, "node_modules")
deleteFile(join(dir, "yarn.lock"))
deleteFile(packageLockLoc)
removeModules(modulesLoc)
const opts = { cwd: dir, stdio: "inherit", shell: true }
spawnSync("npm", ["i", "--package-lock-only"], opts)
spawnSync("npm", ["audit", "fix"], opts)
spawnSync("yarn", ["import"], opts)
deleteFile(packageLockLoc)
removeModules(modulesLoc)
}
const packages = getPackages()
for (let pkg of packages) {
executeInPackage(pkg)
}
spawnSync("yarn", ["bootstrap"], { cwd: join(__dirname, ".."), stdio: "inherit", shell: true })

2105
yarn.lock

File diff suppressed because it is too large Load Diff