Merge branch 'develop' into csrf

This commit is contained in:
Rory Powell 2022-01-30 21:20:48 +00:00
commit 0089fb06d9
70 changed files with 3040 additions and 839 deletions

View File

@ -1,5 +1,5 @@
{
"version": "1.0.46-alpha.3",
"version": "1.0.46-alpha.7",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.0.46-alpha.3",
"version": "1.0.46-alpha.7",
"description": "Budibase backend core libraries used in server and worker",
"main": "src/index.js",
"author": "Budibase",

View File

@ -7,8 +7,8 @@ exports.Cookies = {
CurrentApp: "budibase:currentapp",
Auth: "budibase:auth",
Init: "budibase:init",
DatasourceAuth: "budibase:datasourceauth",
OIDC_CONFIG: "budibase:oidc:config",
RETURN_URL: "budibase:returnurl",
}
exports.Headers = {

View File

@ -7,6 +7,7 @@ const authenticated = require("./authenticated")
const auditLog = require("./auditLog")
const tenancy = require("./tenancy")
const appTenancy = require("./appTenancy")
const datasourceGoogle = require("./passport/datasource/google")
const csrf = require("./csrf")
module.exports = {
@ -19,5 +20,8 @@ module.exports = {
tenancy,
appTenancy,
authError,
datasource: {
google: datasourceGoogle,
},
csrf,
}

View File

@ -0,0 +1,76 @@
const { getScopedConfig } = require("../../../db/utils")
const { getGlobalDB } = require("../../../tenancy")
const google = require("../google")
const { Configs, Cookies } = require("../../../constants")
const { clearCookie, getCookie } = require("../../../utils")
const { getDB } = require("../../../db")
async function preAuth(passport, ctx, next) {
const db = getGlobalDB()
// get the relevant config
const config = await getScopedConfig(db, {
type: Configs.GOOGLE,
workspace: ctx.query.workspace,
})
const publicConfig = await getScopedConfig(db, {
type: Configs.SETTINGS,
})
let callbackUrl = `${publicConfig.platformUrl}/api/global/auth/datasource/google/callback`
const strategy = await google.strategyFactory(config, callbackUrl)
if (!ctx.query.appId || !ctx.query.datasourceId) {
ctx.throw(400, "appId and datasourceId query params not present.")
}
return passport.authenticate(strategy, {
scope: ["profile", "email", "https://www.googleapis.com/auth/spreadsheets"],
accessType: "offline",
prompt: "consent",
})(ctx, next)
}
async function postAuth(passport, ctx, next) {
const db = getGlobalDB()
const config = await getScopedConfig(db, {
type: Configs.GOOGLE,
workspace: ctx.query.workspace,
})
const publicConfig = await getScopedConfig(db, {
type: Configs.SETTINGS,
})
let callbackUrl = `${publicConfig.platformUrl}/api/global/auth/datasource/google/callback`
const strategy = await google.strategyFactory(
config,
callbackUrl,
(accessToken, refreshToken, profile, done) => {
clearCookie(ctx, Cookies.DatasourceAuth)
done(null, { accessToken, refreshToken })
}
)
const authStateCookie = getCookie(ctx, Cookies.DatasourceAuth)
return passport.authenticate(
strategy,
{ successRedirect: "/", failureRedirect: "/error" },
async (err, tokens) => {
// update the DB for the datasource with all the user info
const db = getDB(authStateCookie.appId)
const datasource = await db.get(authStateCookie.datasourceId)
if (!datasource.config) {
datasource.config = {}
}
datasource.config.auth = { type: "google", ...tokens }
await db.put(datasource)
ctx.redirect(
`/builder/app/${authStateCookie.appId}/data/datasource/${authStateCookie.datasourceId}`
)
}
)(ctx, next)
}
exports.preAuth = preAuth
exports.postAuth = postAuth

View File

@ -96,12 +96,7 @@ exports.getCookie = (ctx, name) => {
* @param {string|object} value The value of cookie which will be set.
* @param {object} opts options like whether to sign.
*/
exports.setCookie = (
ctx,
value,
name = "builder",
opts = { sign: true, requestDomain: false }
) => {
exports.setCookie = (ctx, value, name = "builder", opts = { sign: true }) => {
if (value && opts && opts.sign) {
value = jwt.sign(value, options.secretOrKey)
}
@ -113,7 +108,7 @@ exports.setCookie = (
overwrite: true,
}
if (environment.COOKIE_DOMAIN && !opts.requestDomain) {
if (environment.COOKIE_DOMAIN) {
config.domain = environment.COOKIE_DOMAIN
}

View File

@ -3410,9 +3410,9 @@ node-fetch@2.6.0:
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
node-fetch@^2.6.1:
version "2.6.6"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.6.tgz#1751a7c01834e8e1697758732e9efb6eeadfaf89"
integrity sha512-Z8/6vRlTUChSdIgMa51jxQ4lrw/Jy5SOW10ObaA47/RElsAN2c5Pn8bTgFGWn/ibwzXTE8qwr1Yzx28vsecXEA==
version "2.6.7"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
dependencies:
whatwg-url "^5.0.0"

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "1.0.46-alpha.3",
"version": "1.0.46-alpha.7",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",

View File

@ -147,7 +147,9 @@
<img alt="preview" src={selectedUrl} />
{:else}
<div class="placeholder">
<div class="extension">{selectedImage.extension}</div>
<div class="extension">
{selectedImage.name || "Unknown file"}
</div>
<div>Preview not supported</div>
</div>
{/if}
@ -359,18 +361,21 @@
white-space: nowrap;
width: 0;
margin-right: 10px;
user-select: all;
}
.placeholder {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
text-align: center;
}
.extension {
color: var(--spectrum-global-color-gray-600);
text-transform: uppercase;
font-weight: 600;
margin-bottom: 5px;
user-select: all;
}
.nav {

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "1.0.46-alpha.3",
"version": "1.0.46-alpha.7",
"license": "GPL-3.0",
"private": true,
"scripts": {
@ -65,10 +65,10 @@
}
},
"dependencies": {
"@budibase/bbui": "^1.0.46-alpha.3",
"@budibase/client": "^1.0.46-alpha.3",
"@budibase/bbui": "^1.0.46-alpha.7",
"@budibase/client": "^1.0.46-alpha.7",
"@budibase/colorpicker": "1.1.2",
"@budibase/string-templates": "^1.0.46-alpha.3",
"@budibase/string-templates": "^1.0.46-alpha.7",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",

View File

@ -188,7 +188,7 @@
{:else}
<Body size="S"><i>No tables found.</i></Body>
{/if}
{#if plusTables?.length !== 0}
{#if plusTables?.length !== 0 && integration.relationships}
<Divider size="S" />
<div class="query-header">
<Heading size="S">Relationships</Heading>

View File

@ -0,0 +1,47 @@
<script>
import { ActionButton } from "@budibase/bbui"
import GoogleLogo from "assets/google-logo.png"
import { store } from "builderStore"
import { auth } from "stores/portal"
export let preAuthStep
export let datasource
$: tenantId = $auth.tenantId
</script>
<ActionButton
on:click={async () => {
let ds = datasource
if (!ds) {
ds = await preAuthStep()
}
window.open(
`/api/global/auth/${tenantId}/datasource/google?datasourceId=${datasource._id}&appId=${$store.appId}`,
"_blank"
)
}}
>
<div class="inner">
<img src={GoogleLogo} alt="google icon" />
<p>Sign in with Google</p>
</div>
</ActionButton>
<style>
.inner {
display: flex;
flex-direction: row;
align-items: center;
justify-content: center;
padding-top: var(--spacing-xs);
padding-bottom: var(--spacing-xs);
}
.inner img {
width: 18px;
margin: 3px 10px 3px 3px;
}
.inner p {
margin: 0;
}
</style>

View File

@ -0,0 +1,184 @@
<script>
export let width = "100"
export let height = "100"
</script>
<svg
{width}
{height}
version="1.0"
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 50 80"
preserveAspectRatio="xMidYMid meet"
>
<defs>
<path
d="M29.5833333,0 L4.4375,0 C1.996875,0 0,1.996875 0,4.4375 L0,60.6458333 C0,63.0864583 1.996875,65.0833333 4.4375,65.0833333 L42.8958333,65.0833333 C45.3364583,65.0833333 47.3333333,63.0864583 47.3333333,60.6458333 L47.3333333,17.75 L29.5833333,0 Z"
id="path-1"
/>
<path
d="M29.5833333,0 L4.4375,0 C1.996875,0 0,1.996875 0,4.4375 L0,60.6458333 C0,63.0864583 1.996875,65.0833333 4.4375,65.0833333 L42.8958333,65.0833333 C45.3364583,65.0833333 47.3333333,63.0864583 47.3333333,60.6458333 L47.3333333,17.75 L29.5833333,0 Z"
id="path-3"
/>
<path
d="M29.5833333,0 L4.4375,0 C1.996875,0 0,1.996875 0,4.4375 L0,60.6458333 C0,63.0864583 1.996875,65.0833333 4.4375,65.0833333 L42.8958333,65.0833333 C45.3364583,65.0833333 47.3333333,63.0864583 47.3333333,60.6458333 L47.3333333,17.75 L29.5833333,0 Z"
id="path-5"
/>
<linearGradient
x1="50.0053945%"
y1="8.58610612%"
x2="50.0053945%"
y2="100.013939%"
id="linearGradient-7"
>
<stop stop-color="#263238" stop-opacity="0.2" offset="0%" />
<stop stop-color="#263238" stop-opacity="0.02" offset="100%" />
</linearGradient>
<path
d="M29.5833333,0 L4.4375,0 C1.996875,0 0,1.996875 0,4.4375 L0,60.6458333 C0,63.0864583 1.996875,65.0833333 4.4375,65.0833333 L42.8958333,65.0833333 C45.3364583,65.0833333 47.3333333,63.0864583 47.3333333,60.6458333 L47.3333333,17.75 L29.5833333,0 Z"
id="path-8"
/>
<path
d="M29.5833333,0 L4.4375,0 C1.996875,0 0,1.996875 0,4.4375 L0,60.6458333 C0,63.0864583 1.996875,65.0833333 4.4375,65.0833333 L42.8958333,65.0833333 C45.3364583,65.0833333 47.3333333,63.0864583 47.3333333,60.6458333 L47.3333333,17.75 L29.5833333,0 Z"
id="path-10"
/>
<path
d="M29.5833333,0 L4.4375,0 C1.996875,0 0,1.996875 0,4.4375 L0,60.6458333 C0,63.0864583 1.996875,65.0833333 4.4375,65.0833333 L42.8958333,65.0833333 C45.3364583,65.0833333 47.3333333,63.0864583 47.3333333,60.6458333 L47.3333333,17.75 L29.5833333,0 Z"
id="path-12"
/>
<path
d="M29.5833333,0 L4.4375,0 C1.996875,0 0,1.996875 0,4.4375 L0,60.6458333 C0,63.0864583 1.996875,65.0833333 4.4375,65.0833333 L42.8958333,65.0833333 C45.3364583,65.0833333 47.3333333,63.0864583 47.3333333,60.6458333 L47.3333333,17.75 L29.5833333,0 Z"
id="path-14"
/>
<radialGradient
cx="3.16804688%"
cy="2.71744318%"
fx="3.16804688%"
fy="2.71744318%"
r="161.248516%"
gradientTransform="translate(0.031680,0.027174),scale(1.000000,0.727273),translate(-0.031680,-0.027174)"
id="radialGradient-16"
>
<stop stop-color="#FFFFFF" stop-opacity="0.1" offset="0%" />
<stop stop-color="#FFFFFF" stop-opacity="0" offset="100%" />
</radialGradient>
</defs>
<g id="Page-1" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<g
id="Consumer-Apps-Sheets-Large-VD-R8-"
transform="translate(-451.000000, -451.000000)"
>
<g id="Hero" transform="translate(0.000000, 63.000000)">
<g id="Personal" transform="translate(277.000000, 299.000000)">
<g id="Sheets-icon" transform="translate(174.833333, 89.958333)">
<g id="Group">
<g id="Clipped">
<mask id="mask-2" fill="white">
<use xlink:href="#path-1" />
</mask>
<g id="SVGID_1_" />
<path
d="M29.5833333,0 L4.4375,0 C1.996875,0 0,1.996875 0,4.4375 L0,60.6458333 C0,63.0864583 1.996875,65.0833333 4.4375,65.0833333 L42.8958333,65.0833333 C45.3364583,65.0833333 47.3333333,63.0864583 47.3333333,60.6458333 L47.3333333,17.75 L36.9791667,10.3541667 L29.5833333,0 Z"
id="Path"
fill="#0F9D58"
fill-rule="nonzero"
mask="url(#mask-2)"
/>
</g>
<g id="Clipped">
<mask id="mask-4" fill="white">
<use xlink:href="#path-3" />
</mask>
<g id="SVGID_1_" />
<path
d="M11.8333333,31.8020833 L11.8333333,53.25 L35.5,53.25 L35.5,31.8020833 L11.8333333,31.8020833 Z M22.1875,50.2916667 L14.7916667,50.2916667 L14.7916667,46.59375 L22.1875,46.59375 L22.1875,50.2916667 Z M22.1875,44.375 L14.7916667,44.375 L14.7916667,40.6770833 L22.1875,40.6770833 L22.1875,44.375 Z M22.1875,38.4583333 L14.7916667,38.4583333 L14.7916667,34.7604167 L22.1875,34.7604167 L22.1875,38.4583333 Z M32.5416667,50.2916667 L25.1458333,50.2916667 L25.1458333,46.59375 L32.5416667,46.59375 L32.5416667,50.2916667 Z M32.5416667,44.375 L25.1458333,44.375 L25.1458333,40.6770833 L32.5416667,40.6770833 L32.5416667,44.375 Z M32.5416667,38.4583333 L25.1458333,38.4583333 L25.1458333,34.7604167 L32.5416667,34.7604167 L32.5416667,38.4583333 Z"
id="Shape"
fill="#F1F1F1"
fill-rule="nonzero"
mask="url(#mask-4)"
/>
</g>
<g id="Clipped">
<mask id="mask-6" fill="white">
<use xlink:href="#path-5" />
</mask>
<g id="SVGID_1_" />
<polygon
id="Path"
fill="url(#linearGradient-7)"
fill-rule="nonzero"
mask="url(#mask-6)"
points="30.8813021 16.4520313 47.3333333 32.9003646 47.3333333 17.75"
/>
</g>
<g id="Clipped">
<mask id="mask-9" fill="white">
<use xlink:href="#path-8" />
</mask>
<g id="SVGID_1_" />
<g id="Group" mask="url(#mask-9)">
<g transform="translate(26.625000, -2.958333)">
<path
d="M2.95833333,2.95833333 L2.95833333,16.2708333 C2.95833333,18.7225521 4.94411458,20.7083333 7.39583333,20.7083333 L20.7083333,20.7083333 L2.95833333,2.95833333 Z"
id="Path"
fill="#87CEAC"
fill-rule="nonzero"
/>
</g>
</g>
</g>
<g id="Clipped">
<mask id="mask-11" fill="white">
<use xlink:href="#path-10" />
</mask>
<g id="SVGID_1_" />
<path
d="M4.4375,0 C1.996875,0 0,1.996875 0,4.4375 L0,4.80729167 C0,2.36666667 1.996875,0.369791667 4.4375,0.369791667 L29.5833333,0.369791667 L29.5833333,0 L4.4375,0 Z"
id="Path"
fill-opacity="0.2"
fill="#FFFFFF"
fill-rule="nonzero"
mask="url(#mask-11)"
/>
</g>
<g id="Clipped">
<mask id="mask-13" fill="white">
<use xlink:href="#path-12" />
</mask>
<g id="SVGID_1_" />
<path
d="M42.8958333,64.7135417 L4.4375,64.7135417 C1.996875,64.7135417 0,62.7166667 0,60.2760417 L0,60.6458333 C0,63.0864583 1.996875,65.0833333 4.4375,65.0833333 L42.8958333,65.0833333 C45.3364583,65.0833333 47.3333333,63.0864583 47.3333333,60.6458333 L47.3333333,60.2760417 C47.3333333,62.7166667 45.3364583,64.7135417 42.8958333,64.7135417 Z"
id="Path"
fill-opacity="0.2"
fill="#263238"
fill-rule="nonzero"
mask="url(#mask-13)"
/>
</g>
<g id="Clipped">
<mask id="mask-15" fill="white">
<use xlink:href="#path-14" />
</mask>
<g id="SVGID_1_" />
<path
d="M34.0208333,17.75 C31.5691146,17.75 29.5833333,15.7642188 29.5833333,13.3125 L29.5833333,13.6822917 C29.5833333,16.1340104 31.5691146,18.1197917 34.0208333,18.1197917 L47.3333333,18.1197917 L47.3333333,17.75 L34.0208333,17.75 Z"
id="Path"
fill-opacity="0.1"
fill="#263238"
fill-rule="nonzero"
mask="url(#mask-15)"
/>
</g>
</g>
<path
d="M29.5833333,0 L4.4375,0 C1.996875,0 0,1.996875 0,4.4375 L0,60.6458333 C0,63.0864583 1.996875,65.0833333 4.4375,65.0833333 L42.8958333,65.0833333 C45.3364583,65.0833333 47.3333333,63.0864583 47.3333333,60.6458333 L47.3333333,17.75 L29.5833333,0 Z"
id="Path"
fill="url(#radialGradient-16)"
fill-rule="nonzero"
/>
</g>
</g>
</g>
</g>
</g>
</svg>

View File

@ -11,6 +11,7 @@ import ArangoDB from "./ArangoDB.svelte"
import Rest from "./Rest.svelte"
import Budibase from "./Budibase.svelte"
import Oracle from "./Oracle.svelte"
import GoogleSheets from "./GoogleSheets.svelte"
export default {
BUDIBASE: Budibase,
@ -26,4 +27,5 @@ export default {
ARANGODB: ArangoDB,
REST: Rest,
ORACLE: Oracle,
GOOGLE_SHEETS: GoogleSheets,
}

View File

@ -6,6 +6,7 @@
import { IntegrationNames, IntegrationTypes } from "constants/backend"
import CreateTableModal from "components/backend/TableNavigator/modals/CreateTableModal.svelte"
import DatasourceConfigModal from "components/backend/DatasourceNavigator/modals/DatasourceConfigModal.svelte"
import GoogleDatasourceConfigModal from "components/backend/DatasourceNavigator/modals/GoogleDatasourceConfigModal.svelte"
import { createRestDatasource } from "builderStore/datasource"
import { goto } from "@roxi/routify"
import ImportRestQueriesModal from "./ImportRestQueriesModal.svelte"
@ -38,6 +39,7 @@
plus: selected.plus,
config,
schema: selected.datasource,
auth: selected.auth,
}
checkShowImport()
}
@ -79,7 +81,11 @@
</Modal>
<Modal bind:this={externalDatasourceModal}>
{#if integration?.auth?.type === "google"}
<GoogleDatasourceConfigModal {integration} {modal} />
{:else}
<DatasourceConfigModal {integration} {modal} />
{/if}
</Modal>
<Modal bind:this={importModal}>

View File

@ -51,13 +51,9 @@
>Connect your database to Budibase using the config below.
</Body>
</Layout>
<IntegrationConfigForm
schema={datasource.schema}
bind:datasource
creating={true}
/>
</ModalContent>
<style>
</style>

View File

@ -0,0 +1,29 @@
<script>
import { ModalContent, Body, Layout } from "@budibase/bbui"
import { IntegrationNames } from "constants/backend"
import cloneDeep from "lodash/cloneDeepWith"
import GoogleButton from "../_components/GoogleButton.svelte"
import { saveDatasource as save } from "builderStore/datasource"
export let integration
export let modal
// kill the reference so the input isn't saved
let datasource = cloneDeep(integration)
</script>
<ModalContent
title={`Connect to ${IntegrationNames[datasource.type]}`}
onCancel={() => modal.show()}
cancelText="Back"
size="L"
>
<Layout noPadding>
<Body size="XS"
>Authenticate with your google account to use the {IntegrationNames[
datasource.type
]} integration.</Body
>
</Layout>
<GoogleButton preAuthStep={() => save(datasource, true)} />
</ModalContent>

View File

@ -1,6 +1,7 @@
<script>
import { Select } from "@budibase/bbui"
import { store } from "builderStore"
import { get } from "svelte/store"
const themeOptions = [
{
@ -20,6 +21,17 @@
value: "spectrum--darkest",
},
]
const onChangeTheme = async theme => {
await store.actions.theme.save(theme)
await store.actions.customTheme.save({
...get(store).customTheme,
navBackground:
theme === "spectrum--light"
? "var(--spectrum-global-color-gray-50)"
: "var(--spectrum-global-color-gray-100)",
})
}
</script>
<div>
@ -27,7 +39,7 @@
value={$store.theme}
options={themeOptions}
placeholder={null}
on:change={e => store.actions.theme.save(e.detail)}
on:change={e => onChangeTheme(e.detail)}
/>
</div>

View File

@ -19,7 +19,7 @@
primaryColor: "var(--spectrum-global-color-blue-600)",
primaryColorHover: "var(--spectrum-global-color-blue-500)",
buttonBorderRadius: "16px",
navBackground: "var(--spectrum-global-color-gray-100)",
navBackground: "var(--spectrum-global-color-gray-50)",
navTextColor: "var(--spectrum-global-color-gray-800)",
}
@ -52,7 +52,14 @@
}
const resetTheme = () => {
store.actions.customTheme.save(null)
const theme = get(store).theme
store.actions.customTheme.save({
...defaultTheme,
navBackground:
theme === "spectrum--light"
? "var(--spectrum-global-color-gray-50)"
: "var(--spectrum-global-color-gray-100)",
})
}
</script>

View File

@ -44,7 +44,8 @@
"relationshipfield",
"daterangepicker",
"multifieldselect",
"jsonfield"
"jsonfield",
"s3upload"
]
},
{

View File

@ -0,0 +1,33 @@
<script>
import { Select, Label } from "@budibase/bbui"
import { currentAsset } from "builderStore"
import { findAllMatchingComponents } from "builderStore/componentUtils"
export let parameters
$: components = findAllMatchingComponents($currentAsset.props, component =>
component._component.endsWith("s3upload")
)
</script>
<div class="root">
<Label small>S3 Upload Component</Label>
<Select
bind:value={parameters.componentId}
options={components}
getOptionLabel={x => x._instanceName}
getOptionValue={x => x._id}
/>
</div>
<style>
.root {
display: grid;
column-gap: var(--spacing-l);
row-gap: var(--spacing-s);
grid-template-columns: 120px 1fr;
align-items: center;
max-width: 400px;
margin: 0 auto;
}
</style>

View File

@ -11,3 +11,4 @@ export { default as ChangeFormStep } from "./ChangeFormStep.svelte"
export { default as UpdateState } from "./UpdateState.svelte"
export { default as RefreshDataProvider } from "./RefreshDataProvider.svelte"
export { default as DuplicateRow } from "./DuplicateRow.svelte"
export { default as S3Upload } from "./S3Upload.svelte"

View File

@ -70,6 +70,16 @@
"name": "Update State",
"component": "UpdateState",
"dependsOnFeature": "state"
},
{
"name": "Upload File to S3",
"component": "S3Upload",
"context": [
{
"label": "File URL",
"value": "publicUrl"
}
]
}
]
}

View File

@ -0,0 +1,15 @@
<script>
import { Select } from "@budibase/bbui"
import { datasources } from "stores/backend"
export let value = null
$: dataSources = $datasources.list
.filter(ds => ds.source === "S3" && !ds.config?.endpoint)
.map(ds => ({
label: ds.name,
value: ds._id,
}))
</script>
<Select options={dataSources} {value} on:change />

View File

@ -1,5 +1,6 @@
import { Checkbox, Select, Stepper } from "@budibase/bbui"
import DataSourceSelect from "./DataSourceSelect.svelte"
import S3DataSourceSelect from "./S3DataSourceSelect.svelte"
import DataProviderSelect from "./DataProviderSelect.svelte"
import ButtonActionEditor from "./ButtonActionEditor/ButtonActionEditor.svelte"
import TableSelect from "./TableSelect.svelte"
@ -22,6 +23,7 @@ const componentMap = {
text: DrawerBindableCombobox,
select: Select,
dataSource: DataSourceSelect,
"dataSource/s3": S3DataSourceSelect,
dataProvider: DataProviderSelect,
boolean: Checkbox,
number: Stepper,

View File

@ -15,8 +15,6 @@
queryBindings = [...queryBindings, {}]
}
$: console.log(bindings)
function deleteQueryBinding(idx) {
queryBindings.splice(idx, 1)
queryBindings = queryBindings

View File

@ -177,6 +177,7 @@ export const IntegrationTypes = {
ARANGODB: "ARANGODB",
ORACLE: "ORACLE",
INTERNAL: "INTERNAL",
GOOGLE_SHEETS: "GOOGLE_SHEETS",
}
export const IntegrationNames = {
@ -193,6 +194,7 @@ export const IntegrationNames = {
[IntegrationTypes.ARANGODB]: "ArangoDB",
[IntegrationTypes.ORACLE]: "Oracle",
[IntegrationTypes.INTERNAL]: "Internal",
[IntegrationTypes.GOOGLE_SHEETS]: "Google Sheets",
}
export const SchemaTypeOptions = [

View File

@ -15,6 +15,22 @@ export const AppStatus = {
DEPLOYED: "published",
}
export const IntegrationNames = {
POSTGRES: "PostgreSQL",
MONGODB: "MongoDB",
COUCHDB: "CouchDB",
S3: "S3",
MYSQL: "MySQL",
REST: "REST",
DYNAMODB: "DynamoDB",
ELASTICSEARCH: "ElasticSearch",
SQL_SERVER: "SQL Server",
AIRTABLE: "Airtable",
ARANGODB: "ArangoDB",
ORACLE: "Oracle",
GOOGLE_SHEETS: "Google Sheets",
}
// fields on the user table that cannot be edited
export const UNEDITABLE_USER_FIELDS = [
"email",

View File

@ -7,7 +7,10 @@ export const name = (validation, { apps, currentApp } = { apps: [] }) => {
string()
.trim()
.required("Your application must have a name")
.matches(APP_NAME_REGEX, "App name must be letters and numbers only")
.matches(
APP_NAME_REGEX,
"App name must be letters, numbers and spaces only"
)
.test(
"non-existing-app-name",
"Another app with the same name already exists",
@ -48,7 +51,7 @@ export const url = (validation, { apps, currentApp } = { apps: [] }) => {
}
return !apps
.map(app => app.url)
.some(appUrl => appUrl.toLowerCase() === value.toLowerCase())
.some(appUrl => appUrl?.toLowerCase() === value.toLowerCase())
}
)
.test("valid-url", "Not a valid URL", value => {

View File

@ -1,6 +1,7 @@
import { capitalise } from "helpers"
import { object } from "yup"
import { writable, get } from "svelte/store"
import { notifications } from "@budibase/bbui"
export const createValidationStore = () => {
const DEFAULT = {
@ -24,9 +25,15 @@ export const createValidationStore = () => {
// clear the previous errors
const properties = Object.keys(validator)
properties.forEach(property => (get(validation).errors[property] = null))
let validationError = false
try {
await obj.validate(values, { abortEarly: false })
} catch (error) {
if (!error.inner) {
notifications.error("Unexpected validation error", error)
validationError = true
} else {
error.inner.forEach(err => {
validation.update(store => {
store.errors[err.path] = capitalise(err.message)
@ -34,9 +41,10 @@ export const createValidationStore = () => {
})
})
}
}
let valid
if (properties.length) {
if (properties.length && !validationError) {
valid = await obj.isValid(values)
} else {
// don't say valid until validators have been loaded

View File

@ -19,8 +19,8 @@
import { IntegrationTypes } from "constants/backend"
import { isEqual } from "lodash"
import { cloneDeep } from "lodash/fp"
import ImportRestQueriesModal from "components/backend/DatasourceNavigator/modals/ImportRestQueriesModal.svelte"
let importQueriesModal
let changed

View File

@ -41,7 +41,11 @@
)
function getUrl(app) {
if (app.url) {
return `/app${app.url}`
} else {
return `/${app.prodId}`
}
}
</script>

View File

@ -161,7 +161,11 @@
}
const viewApp = app => {
if (app.url) {
window.open(`/app${app.url}`)
} else {
window.open(`/${app.prodId}`)
}
}
const editApp = app => {

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "1.0.46-alpha.3",
"version": "1.0.46-alpha.7",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {

View File

@ -3340,5 +3340,50 @@
"suffix": "repeater"
}
]
},
"s3upload": {
"name": "S3 File Upload",
"info": "This component can't be used with S3 datasources that use custom endpoints.",
"icon": "UploadToCloud",
"styles": ["size"],
"editable": true,
"settings": [
{
"type": "field/attachment",
"label": "Field",
"key": "field"
},
{
"type": "text",
"label": "Label",
"key": "label"
},
{
"type": "dataSource/s3",
"label": "S3 Datasource",
"key": "datasourceId"
},
{
"type": "text",
"label": "Bucket",
"key": "bucket"
},
{
"type": "text",
"label": "File Name",
"key": "key"
},
{
"type": "boolean",
"label": "Disabled",
"key": "disabled",
"defaultValue": false
},
{
"type": "validation/attachment",
"label": "Validation",
"key": "validation"
}
]
}
}

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "1.0.46-alpha.3",
"version": "1.0.46-alpha.7",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,10 +19,11 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "^1.0.46-alpha.3",
"@budibase/bbui": "^1.0.46-alpha.7",
"@budibase/standard-components": "^0.9.139",
"@budibase/string-templates": "^1.0.46-alpha.3",
"@budibase/string-templates": "^1.0.46-alpha.7",
"regexparam": "^1.3.0",
"rollup-plugin-polyfill-node": "^0.8.0",
"shortid": "^2.2.15",
"svelte-spa-router": "^3.0.5"
},
@ -45,8 +46,6 @@
"postcss": "^8.2.10",
"rollup": "^2.44.0",
"rollup-plugin-json": "^4.0.0",
"rollup-plugin-node-builtins": "^2.1.2",
"rollup-plugin-node-globals": "^1.4.0",
"rollup-plugin-postcss": "^4.0.0",
"rollup-plugin-svelte": "^7.1.0",
"rollup-plugin-svg": "^2.0.0",

View File

@ -6,8 +6,7 @@ import { terser } from "rollup-plugin-terser"
import postcss from "rollup-plugin-postcss"
import svg from "rollup-plugin-svg"
import json from "rollup-plugin-json"
import builtins from "rollup-plugin-node-builtins"
import globals from "rollup-plugin-node-globals"
import nodePolyfills from "rollup-plugin-polyfill-node"
import path from "path"
const production = !process.env.ROLLUP_WATCH
@ -75,8 +74,7 @@ export default {
}),
postcss(),
commonjs(),
globals(),
builtins(),
nodePolyfills(),
resolve({
preferBuiltins: true,
browser: true,

View File

@ -44,7 +44,11 @@ const makeApiCall = async ({ method, url, body, json = true }) => {
})
switch (response.status) {
case 200:
return response.json()
try {
return await response.json()
} catch (error) {
return null
}
case 401:
notificationStore.actions.error("Invalid credentials")
return handleError(`Invalid credentials`)
@ -90,14 +94,15 @@ const makeCachedApiCall = async params => {
* Constructs an API call function for a particular HTTP method.
*/
const requestApiCall = method => async params => {
const { url, cache = false } = params
const fixedUrl = `/${url}`.replace("//", "/")
const { external = false, url, cache = false } = params
const fixedUrl = external ? url : `/${url}`.replace("//", "/")
const enrichedParams = { ...params, method, url: fixedUrl }
return await (cache ? makeCachedApiCall : makeApiCall)(enrichedParams)
}
export default {
post: requestApiCall("POST"),
put: requestApiCall("PUT"),
get: requestApiCall("GET"),
patch: requestApiCall("PATCH"),
del: requestApiCall("DELETE"),

View File

@ -10,3 +10,41 @@ export const uploadAttachment = async (data, tableId = "") => {
json: false,
})
}
/**
* Generates a signed URL to upload a file to an external datasource.
*/
export const getSignedDatasourceURL = async (datasourceId, bucket, key) => {
if (!datasourceId) {
return null
}
const res = await API.post({
url: `/api/attachments/${datasourceId}/url`,
body: { bucket, key },
})
if (res.error) {
throw "Could not generate signed upload URL"
}
return res
}
/**
* Uploads a file to an external datasource.
*/
export const externalUpload = async (datasourceId, bucket, key, data) => {
const { signedUrl, publicUrl } = await getSignedDatasourceURL(
datasourceId,
bucket,
key
)
const res = await API.put({
url: signedUrl,
body: data,
json: false,
external: true,
})
if (res?.error) {
throw "Could not upload file to signed URL"
}
return { publicUrl }
}

View File

@ -276,27 +276,29 @@
// reactive statements as much as possible.
const cacheSettings = (enriched, nested, conditional) => {
const allSettings = { ...enriched, ...nested, ...conditional }
if (!cachedSettings) {
const mounted = ref?.$$set != null
if (!cachedSettings || !mounted) {
cachedSettings = { ...allSettings }
initialSettings = cachedSettings
} else {
Object.keys(allSettings).forEach(key => {
const same = propsAreSame(allSettings[key], cachedSettings[key])
if (!same) {
// Updated cachedSettings (which is assigned by reference to
// initialSettings) so that if we remount the component then the
// initial props are up to date. By setting it this way rather than
// setting it on initialSettings directly, we avoid a double render.
cachedSettings[key] = allSettings[key]
assignSetting(key, allSettings[key])
// Programmatically set the prop to avoid svelte reactive statements
// firing inside components. This circumvents the problems caused by
// spreading a props object.
ref.$$set({ [key]: allSettings[key] })
}
})
}
}
// Assigns a certain setting to this component.
// We manually use the svelte $set function to avoid triggering additional
// reactive statements.
const assignSetting = (key, value) => {
ref?.$$set?.({ [key]: value })
}
// Generates a key used to determine when components need to fully remount.
// Currently only toggling editing requires remounting.
const getRenderKey = (id, editing) => {
@ -305,7 +307,7 @@
</script>
{#key renderKey}
{#if constructor && cachedSettings && (visible || inSelectedPath)}
{#if constructor && initialSettings && (visible || inSelectedPath)}
<!-- The ID is used as a class because getElementsByClassName is O(1) -->
<!-- and the performance matters for the selection indicators -->
<div

View File

@ -0,0 +1,143 @@
<script>
import Field from "./Field.svelte"
import { CoreDropzone, ProgressCircle } from "@budibase/bbui"
import { getContext, onMount, onDestroy } from "svelte"
export let datasourceId
export let bucket
export let key
export let field
export let label
export let disabled = false
export let validation
let fieldState
let fieldApi
const { API, notificationStore, uploadStore } = getContext("sdk")
const component = getContext("component")
// 5GB cap per item sent via S3 REST API
const MaxFileSize = 1000000000 * 5
// Actual file data to upload
let data
let loading = false
const handleFileTooLarge = () => {
notificationStore.actions.warning(
"Files cannot exceed 5GB. Please try again with a smaller file."
)
}
// Process the file input and return a serializable structure expected by
// the dropzone component to display the file
const processFiles = async fileList => {
return await new Promise(resolve => {
if (!fileList?.length) {
return []
}
// Don't read in non-image files
data = fileList[0]
if (!data.type?.startsWith("image")) {
resolve([
{
name: data.name,
type: data.type,
},
])
}
// Read image files and display as preview
const reader = new FileReader()
reader.addEventListener(
"load",
() => {
resolve([
{
url: reader.result,
name: data.name,
type: data.type,
},
])
},
false
)
reader.readAsDataURL(fileList[0])
})
}
const upload = async () => {
loading = true
try {
const res = await API.externalUpload(datasourceId, bucket, key, data)
notificationStore.actions.success("File uploaded successfully")
loading = false
return res
} catch (error) {
notificationStore.actions.error(`Error uploading file: ${error}`)
}
}
onMount(() => {
uploadStore.actions.registerFileUpload($component.id, upload)
})
onDestroy(() => {
uploadStore.actions.unregisterFileUpload($component.id)
})
</script>
<Field
{label}
{field}
{disabled}
{validation}
type="s3upload"
bind:fieldState
bind:fieldApi
defaultValue={[]}
>
<div class="content">
{#if fieldState}
<CoreDropzone
value={fieldState.value}
disabled={loading || fieldState.disabled}
error={fieldState.error}
on:change={e => {
fieldApi.setValue(e.detail)
}}
{processFiles}
{handleFileTooLarge}
maximum={1}
fileSizeLimit={MaxFileSize}
/>
{/if}
{#if loading}
<div class="overlay" />
<div class="loading">
<ProgressCircle />
</div>
{/if}
</div>
</Field>
<style>
.content {
position: relative;
}
.overlay,
.loading {
position: absolute;
top: 0;
height: 100%;
width: 100%;
display: grid;
place-items: center;
}
.overlay {
background-color: var(--spectrum-global-color-gray-50);
opacity: 0.5;
}
</style>

View File

@ -12,3 +12,4 @@ export { default as relationshipfield } from "./RelationshipField.svelte"
export { default as passwordfield } from "./PasswordField.svelte"
export { default as formstep } from "./FormStep.svelte"
export { default as jsonfield } from "./JSONField.svelte"
export { default as s3upload } from "./S3Upload.svelte"

View File

@ -5,6 +5,7 @@ import {
routeStore,
screenStore,
builderStore,
uploadStore,
} from "stores"
import { styleable } from "utils/styleable"
import { linkable } from "utils/linkable"
@ -20,6 +21,7 @@ export default {
routeStore,
screenStore,
builderStore,
uploadStore,
styleable,
linkable,
getAction,

View File

@ -9,6 +9,7 @@ export { confirmationStore } from "./confirmation"
export { peekStore } from "./peek"
export { stateStore } from "./state"
export { themeStore } from "./theme"
export { uploadStore } from "./uploads.js"
// Context stores are layered and duplicated, so it is not a singleton
export { createContextStore } from "./context"

View File

@ -0,0 +1,42 @@
import { writable, get } from "svelte/store"
export const createUploadStore = () => {
const store = writable([])
// Registers a new file upload component
const registerFileUpload = (componentId, callback) => {
if (!componentId || !callback) {
return
}
store.update(state => {
state.push({
componentId,
callback,
})
return state
})
}
// Unregisters a file upload component
const unregisterFileUpload = componentId => {
store.update(state => state.filter(c => c.componentId !== componentId))
}
// Processes a file upload for a given component ID
const processFileUpload = async componentId => {
if (!componentId) {
return
}
const component = get(store).find(c => c.componentId === componentId)
return await component?.callback()
}
return {
subscribe: store.subscribe,
actions: { registerFileUpload, unregisterFileUpload, processFileUpload },
}
}
export const uploadStore = createUploadStore()

View File

@ -5,6 +5,7 @@ import {
confirmationStore,
authStore,
stateStore,
uploadStore,
} from "stores"
import { saveRow, deleteRow, executeQuery, triggerAutomation } from "api"
import { ActionTypes } from "constants"
@ -169,6 +170,17 @@ const updateStateHandler = action => {
}
}
const s3UploadHandler = async action => {
const { componentId } = action.parameters
if (!componentId) {
return
}
const res = await uploadStore.actions.processFileUpload(componentId)
return {
publicUrl: res?.publicUrl,
}
}
const handlerMap = {
["Save Row"]: saveRowHandler,
["Duplicate Row"]: duplicateRowHandler,
@ -183,6 +195,7 @@ const handlerMap = {
["Close Screen Modal"]: closeScreenModalHandler,
["Change Form Step"]: changeFormStepHandler,
["Update State"]: updateStateHandler,
["Upload File to S3"]: s3UploadHandler,
}
const confirmTextMap = {

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
{
"watch": ["src", "../auth"],
"watch": ["src", "../backend-core"],
"ext": "js,ts,json",
"ignore": ["src/**/*.spec.ts", "src/**/*.spec.js"],
"exec": "ts-node src/index.ts"

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "1.0.46-alpha.3",
"version": "1.0.46-alpha.7",
"description": "Budibase Web Server",
"main": "src/index.ts",
"repository": {
@ -70,9 +70,9 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "^10.0.3",
"@budibase/backend-core": "^1.0.46-alpha.3",
"@budibase/client": "^1.0.46-alpha.3",
"@budibase/string-templates": "^1.0.46-alpha.3",
"@budibase/backend-core": "^1.0.46-alpha.7",
"@budibase/client": "^1.0.46-alpha.7",
"@budibase/string-templates": "^1.0.46-alpha.7",
"@bull-board/api": "^3.7.0",
"@bull-board/koa": "^3.7.0",
"@elastic/elasticsearch": "7.10.0",
@ -92,6 +92,8 @@
"fix-path": "3.0.0",
"form-data": "^4.0.0",
"fs-extra": "8.1.0",
"google-auth-library": "^7.11.0",
"google-spreadsheet": "^3.2.0",
"jimp": "0.16.1",
"joi": "17.2.1",
"js-yaml": "^4.1.0",
@ -110,7 +112,7 @@
"mongodb": "3.6.3",
"mssql": "6.2.3",
"mysql2": "^2.3.1",
"node-fetch": "2.6.0",
"node-fetch": "2.6.7",
"open": "^8.4.0",
"pg": "8.5.1",
"pino-pretty": "4.0.0",
@ -139,6 +141,7 @@
"@jest/test-sequencer": "^24.8.0",
"@types/apidoc": "^0.50.0",
"@types/bull": "^3.15.1",
"@types/google-spreadsheet": "^3.1.5",
"@types/jest": "^26.0.23",
"@types/koa": "^2.13.3",
"@types/koa-router": "^7.4.2",

View File

@ -81,12 +81,13 @@ async function getAppUrl(ctx) {
if (ctx.request.body.url) {
// if the url is provided, use that
url = encodeURI(ctx.request.body.url)
} else {
} else if (ctx.request.body.name) {
// otherwise use the name
url = encodeURI(`${ctx.request.body.name}`)
}
if (url) {
url = `/${url.replace(URL_REGEX_SLASH, "")}`.toLowerCase()
}
return url
}
@ -276,16 +277,22 @@ exports.create = async ctx => {
ctx.body = newApplication
}
// This endpoint currently operates as a PATCH rather than a PUT
// Thus name and url fields are handled only if present
exports.update = async ctx => {
const apps = await getAllApps(CouchDB, { dev: true })
// validation
const name = ctx.request.body.name
if (name) {
checkAppName(ctx, apps, name, ctx.params.appId)
}
const url = await getAppUrl(ctx)
if (url) {
checkAppUrl(ctx, apps, url, ctx.params.appId)
ctx.request.body.url = url
}
const appPackageUpdates = { name, url }
const data = await updateAppPackage(appPackageUpdates, ctx.params.appId)
const data = await updateAppPackage(ctx.request.body, ctx.params.appId)
ctx.status = 200
ctx.body = data
}

View File

@ -38,6 +38,13 @@ exports.fetch = async function (ctx) {
)
).rows.map(row => row.doc)
for (let datasource of datasources) {
if (datasource.config && datasource.config.auth) {
// strip secrets from response so they don't show in the network request
delete datasource.config.auth
}
}
ctx.body = [bbInternalDb, ...datasources]
}
@ -94,8 +101,13 @@ exports.update = async function (ctx) {
const db = new CouchDB(ctx.appId)
const datasourceId = ctx.params.datasourceId
let datasource = await db.get(datasourceId)
const auth = datasource.config.auth
await invalidateVariables(datasource, ctx.request.body)
datasource = { ...datasource, ...ctx.request.body }
if (auth && !ctx.request.body.auth) {
// don't strip auth config from DB
datasource.config.auth = auth
}
const response = await db.put(datasource)
datasource._rev = response.rev

View File

@ -141,6 +141,16 @@ async function execute(ctx, opts = { rowsOnly: false }) {
const query = await db.get(ctx.params.queryId)
const datasource = await db.get(query.datasourceId)
const enrichedParameters = ctx.request.body.parameters || {}
// make sure parameters are fully enriched with defaults
if (query && query.parameters) {
for (let parameter of query.parameters) {
if (!enrichedParameters[parameter.name]) {
enrichedParameters[parameter.name] = parameter.default
}
}
}
// call the relevant CRUD method on the integration class
try {
const { rows, pagination, extra } = await Runner.run({
@ -149,7 +159,7 @@ async function execute(ctx, opts = { rowsOnly: false }) {
queryVerb: query.queryVerb,
fields: query.fields,
pagination: ctx.request.body.pagination,
parameters: ctx.request.body.parameters,
parameters: enrichedParameters,
transformer: query.transformer,
queryId: ctx.params.queryId,
})
@ -178,8 +188,9 @@ const removeDynamicVariables = async (db, queryId) => {
if (dynamicVariables) {
// delete dynamic variables from the datasource
const newVariables = dynamicVariables.filter(dv => dv.queryId !== queryId)
datasource.config.dynamicVariables = newVariables
datasource.config.dynamicVariables = dynamicVariables.filter(
dv => dv.queryId !== queryId
)
await db.put(datasource)
// invalidate the deleted variables

View File

@ -52,10 +52,7 @@ exports.validate = async ({ appId, tableId, row, table }) => {
const constraints = cloneDeep(table.schema[fieldName].constraints)
const type = table.schema[fieldName].type
// special case for options, need to always allow unselected (null)
if (
(type === FieldTypes.OPTIONS || type === FieldTypes.ARRAY) &&
constraints.inclusion
) {
if (type === FieldTypes.OPTIONS && constraints.inclusion) {
constraints.inclusion.push(null)
}
let res

View File

@ -17,6 +17,8 @@ const { clientLibraryPath } = require("../../../utilities")
const { upload } = require("../../../utilities/fileSystem")
const { attachmentsRelativeURL } = require("../../../utilities")
const { DocumentTypes } = require("../../../db/utils")
const AWS = require("aws-sdk")
const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
async function prepareUpload({ s3Key, bucket, metadata, file }) {
const response = await upload({
@ -43,7 +45,9 @@ async function getAppIdFromUrl(ctx) {
// search prod apps for a url that matches, exclude dev where id is always used
const apps = await getAllApps(CouchDB, { dev: false })
const app = apps.filter(a => a.url.toLowerCase() === possibleAppUrl)[0]
const app = apps.filter(
a => a.url && a.url.toLowerCase() === possibleAppUrl
)[0]
if (app && app.appId) {
return app.appId
@ -105,3 +109,51 @@ exports.serveClientLibrary = async function (ctx) {
root: join(NODE_MODULES_PATH, "@budibase", "client", "dist"),
})
}
exports.getSignedUploadURL = async function (ctx) {
const database = new CouchDB(ctx.appId)
// Ensure datasource is valid
let datasource
try {
const { datasourceId } = ctx.params
datasource = await database.get(datasourceId)
if (!datasource) {
ctx.throw(400, "The specified datasource could not be found")
}
} catch (error) {
ctx.throw(400, "The specified datasource could not be found")
}
// Ensure we aren't using a custom endpoint
if (datasource?.config?.endpoint) {
ctx.throw(400, "S3 datasources with custom endpoints are not supported")
}
// Determine type of datasource and generate signed URL
let signedUrl
let publicUrl
if (datasource.source === "S3") {
const { bucket, key } = ctx.request.body || {}
if (!bucket || !key) {
ctx.throw(400, "bucket and key values are required")
return
}
try {
const s3 = new AWS.S3({
region: AWS_REGION,
accessKeyId: datasource?.config?.accessKeyId,
secretAccessKey: datasource?.config?.secretAccessKey,
apiVersion: "2006-03-01",
signatureVersion: "v4",
})
const params = { Bucket: bucket, Key: key }
signedUrl = s3.getSignedUrl("putObject", params)
publicUrl = `https://${bucket}.s3.${AWS_REGION}.amazonaws.com/${key}`
} catch (error) {
ctx.throw(400, error)
}
}
ctx.body = { signedUrl, publicUrl }
}

View File

@ -46,5 +46,10 @@ router
)
// TODO: this likely needs to be secured in some way
.get("/:appId/:path*", controller.serveApp)
.post(
"/api/attachments/:datasourceId/url",
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
controller.getSignedUploadURL
)
module.exports = router

View File

@ -0,0 +1,98 @@
jest.mock("node-fetch")
jest.mock("aws-sdk", () => ({
config: {
update: jest.fn(),
},
DynamoDB: {
DocumentClient: jest.fn(),
},
S3: jest.fn(() => ({
getSignedUrl: jest.fn(() => {
return "my-url"
}),
})),
}))
const setup = require("./utilities")
describe("/attachments", () => {
let request = setup.getRequest()
let config = setup.getConfig()
let app
afterAll(setup.afterAll)
beforeEach(async () => {
app = await config.init()
})
describe("generateSignedUrls", () => {
let datasource
beforeEach(async () => {
datasource = await config.createDatasource({
datasource: {
type: "datasource",
name: "Test",
source: "S3",
config: {},
},
})
})
it("should be able to generate a signed upload URL", async () => {
const bucket = "foo"
const key = "bar"
const res = await request
.post(`/api/attachments/${datasource._id}/url`)
.send({ bucket, key })
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.signedUrl).toEqual("my-url")
expect(res.body.publicUrl).toEqual(
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
)
})
it("should handle an invalid datasource ID", async () => {
const res = await request
.post(`/api/attachments/foo/url`)
.send({
bucket: "foo",
key: "bar",
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(400)
expect(res.body.message).toEqual(
"The specified datasource could not be found"
)
})
it("should require a bucket parameter", async () => {
const res = await request
.post(`/api/attachments/${datasource._id}/url`)
.send({
bucket: undefined,
key: "bar",
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(400)
expect(res.body.message).toEqual("bucket and key values are required")
})
it("should require a key parameter", async () => {
const res = await request
.post(`/api/attachments/${datasource._id}/url`)
.send({
bucket: "foo",
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(400)
expect(res.body.message).toEqual("bucket and key values are required")
})
})
})

View File

@ -75,6 +75,10 @@ exports.DataSourceOperation = {
DELETE_TABLE: "DELETE_TABLE",
}
exports.DatasourceAuthTypes = {
GOOGLE: "google",
}
exports.SortDirection = {
ASCENDING: "ASCENDING",
DESCENDING: "DESCENDING",

View File

@ -47,6 +47,7 @@ export enum SourceNames {
ARANGODB = "ARANGODB",
REST = "REST",
ORACLE = "ORACLE",
GOOGLE_SHEETS = "GOOGLE_SHEETS",
}
export enum IncludeRelationships {
@ -86,6 +87,8 @@ export interface ExtraQueryConfig {
export interface Integration {
docs: string
plus?: boolean
auth?: { type: string }
relationships?: boolean
description: string
friendlyName: string
datasource: {}

View File

@ -0,0 +1,353 @@
import {
DatasourceFieldTypes,
Integration,
QueryJson,
QueryTypes,
} from "../definitions/datasource"
import { OAuth2Client } from "google-auth-library"
import { DatasourcePlus } from "./base/datasourcePlus"
import { Row, Table, TableSchema } from "../definitions/common"
import { buildExternalTableId } from "./utils"
import { DataSourceOperation, FieldTypes } from "../constants"
import { GoogleSpreadsheet } from "google-spreadsheet"
import { table } from "console"
module GoogleSheetsModule {
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
const { getScopedConfig } = require("@budibase/backend-core/db")
const { Configs } = require("@budibase/backend-core/constants")
interface GoogleSheetsConfig {
spreadsheetId: string
auth: OAuthClientConfig
}
interface OAuthClientConfig {
appId: string
accessToken: string
refreshToken: string
}
const SCHEMA: Integration = {
plus: true,
auth: {
type: "google",
},
relationships: false,
docs: "https://developers.google.com/sheets/api/quickstart/nodejs",
description:
"Create and collaborate on online spreadsheets in real-time and from any device. ",
friendlyName: "Google Sheets",
datasource: {
spreadsheetId: {
type: DatasourceFieldTypes.STRING,
required: true,
},
},
query: {
create: {
type: QueryTypes.FIELDS,
fields: {
sheet: {
type: DatasourceFieldTypes.STRING,
required: true,
},
row: {
type: QueryTypes.JSON,
required: true,
},
},
},
read: {
type: QueryTypes.FIELDS,
fields: {
sheet: {
type: DatasourceFieldTypes.STRING,
required: true,
},
},
},
update: {
type: QueryTypes.FIELDS,
fields: {
sheet: {
type: DatasourceFieldTypes.STRING,
required: true,
},
rowIndex: {
type: DatasourceFieldTypes.STRING,
required: true,
},
row: {
type: QueryTypes.JSON,
required: true,
},
},
},
delete: {
type: QueryTypes.FIELDS,
fields: {
sheet: {
type: DatasourceFieldTypes.STRING,
required: true,
},
rowIndex: {
type: DatasourceFieldTypes.NUMBER,
required: true,
},
},
},
},
}
class GoogleSheetsIntegration implements DatasourcePlus {
private readonly config: GoogleSheetsConfig
private client: any
public tables: Record<string, Table> = {}
public schemaErrors: Record<string, string> = {}
constructor(config: GoogleSheetsConfig) {
this.config = config
const spreadsheetId = this.cleanSpreadsheetUrl(this.config.spreadsheetId)
this.client = new GoogleSpreadsheet(spreadsheetId)
}
/**
* Pull the spreadsheet ID out from a valid google sheets URL
* @param spreadsheetId - the URL or standard spreadsheetId of the google sheet
* @returns spreadsheet Id of the google sheet
*/
cleanSpreadsheetUrl(spreadsheetId: string) {
if (!spreadsheetId) {
throw new Error(
"You must set a spreadsheet ID in your configuration to fetch tables."
)
}
const parts = spreadsheetId.split("/")
return parts.length > 5 ? parts[5] : spreadsheetId
}
async connect() {
try {
// Initialise oAuth client
const db = getGlobalDB()
const googleConfig = await getScopedConfig(db, {
type: Configs.GOOGLE,
})
const oauthClient = new OAuth2Client({
clientId: googleConfig.clientID,
clientSecret: googleConfig.clientSecret,
})
oauthClient.credentials.access_token = this.config.auth.accessToken
oauthClient.credentials.refresh_token = this.config.auth.refreshToken
this.client.useOAuth2Client(oauthClient)
await this.client.loadInfo()
} catch (err) {
console.error("Error connecting to google sheets", err)
throw err
}
}
async buildSchema(datasourceId: string) {
await this.connect()
const sheets = await this.client.sheetsByIndex
const tables: Record<string, Table> = {}
for (let sheet of sheets) {
// must fetch rows to determine schema
await sheet.getRows()
// build schema
const schema: TableSchema = {}
// build schema from headers
for (let header of sheet.headerValues) {
schema[header] = {
name: header,
type: FieldTypes.STRING,
}
}
// create tables
tables[sheet.title] = {
_id: buildExternalTableId(datasourceId, sheet.title),
name: sheet.title,
primary: ["rowNumber"],
schema,
}
}
this.tables = tables
}
async query(json: QueryJson) {
const sheet = json.endpoint.entityId
const handlers = {
[DataSourceOperation.CREATE]: () =>
this.create({ sheet, row: json.body }),
[DataSourceOperation.READ]: () => this.read({ sheet }),
[DataSourceOperation.UPDATE]: () =>
this.update({
// exclude the header row and zero index
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
sheet,
row: json.body,
}),
[DataSourceOperation.DELETE]: () =>
this.delete({
// exclude the header row and zero index
rowIndex: json.extra?.idFilter?.equal?.rowNumber - 2,
sheet,
}),
[DataSourceOperation.CREATE_TABLE]: () =>
this.createTable(json?.table?.name),
[DataSourceOperation.UPDATE_TABLE]: () => this.updateTable(json.table),
[DataSourceOperation.DELETE_TABLE]: () =>
this.deleteTable(json?.table?.name),
}
const internalQueryMethod = handlers[json.endpoint.operation]
return await internalQueryMethod()
}
buildRowObject(headers: string[], values: string[], rowNumber: number) {
const rowObject: { rowNumber: number; [key: string]: any } = { rowNumber }
for (let i = 0; i < headers.length; i++) {
rowObject._id = rowNumber
rowObject[headers[i]] = values[i]
}
return rowObject
}
async createTable(name?: string) {
try {
await this.connect()
const sheet = await this.client.addSheet({ title: name })
return sheet
} catch (err) {
console.error("Error creating new table in google sheets", err)
throw err
}
}
async updateTable(table?: any) {
try {
await this.connect()
const sheet = await this.client.sheetsByTitle[table.name]
await sheet.loadHeaderRow()
if (table._rename) {
const headers = []
for (let header of sheet.headerValues) {
if (header === table._rename.old) {
headers.push(table._rename.updated)
} else {
headers.push(header)
}
}
await sheet.setHeaderRow(headers)
} else {
let newField = Object.keys(table.schema).find(
key => !sheet.headerValues.includes(key)
)
await sheet.setHeaderRow([...sheet.headerValues, newField])
}
} catch (err) {
console.error("Error updating table in google sheets", err)
throw err
}
}
async deleteTable(sheet: any) {
try {
await this.connect()
const sheetToDelete = await this.client.sheetsByTitle[sheet]
return await sheetToDelete.delete()
} catch (err) {
console.error("Error deleting table in google sheets", err)
throw err
}
}
async create(query: { sheet: string; row: any }) {
try {
await this.connect()
const sheet = await this.client.sheetsByTitle[query.sheet]
const rowToInsert =
typeof query.row === "string" ? JSON.parse(query.row) : query.row
const row = await sheet.addRow(rowToInsert)
return [
this.buildRowObject(sheet.headerValues, row._rawData, row._rowNumber),
]
} catch (err) {
console.error("Error writing to google sheets", err)
throw err
}
}
async read(query: { sheet: string }) {
try {
await this.connect()
const sheet = await this.client.sheetsByTitle[query.sheet]
const rows = await sheet.getRows()
const headerValues = sheet.headerValues
const response = []
for (let row of rows) {
response.push(
this.buildRowObject(headerValues, row._rawData, row._rowNumber)
)
}
return response
} catch (err) {
console.error("Error reading from google sheets", err)
throw err
}
}
async update(query: { sheet: string; rowIndex: number; row: any }) {
try {
await this.connect()
const sheet = await this.client.sheetsByTitle[query.sheet]
const rows = await sheet.getRows()
const row = rows[query.rowIndex]
if (row) {
const updateValues = query.row
for (let key in updateValues) {
row[key] = updateValues[key]
}
await row.save()
return [
this.buildRowObject(
sheet.headerValues,
row._rawData,
row._rowNumber
),
]
} else {
throw new Error("Row does not exist.")
}
} catch (err) {
console.error("Error reading from google sheets", err)
throw err
}
}
async delete(query: { sheet: string; rowIndex: number }) {
await this.connect()
const sheet = await this.client.sheetsByTitle[query.sheet]
const rows = await sheet.getRows()
const row = rows[query.rowIndex]
if (row) {
await row.delete()
return [{ deleted: query.rowIndex }]
} else {
throw new Error("Row does not exist.")
}
}
}
module.exports = {
schema: SCHEMA,
integration: GoogleSheetsIntegration,
}
}

View File

@ -9,6 +9,7 @@ const airtable = require("./airtable")
const mysql = require("./mysql")
const arangodb = require("./arangodb")
const rest = require("./rest")
const googlesheets = require("./googlesheets")
const { SourceNames } = require("../definitions/datasource")
const DEFINITIONS = {
@ -23,6 +24,7 @@ const DEFINITIONS = {
[SourceNames.MYSQL]: mysql.schema,
[SourceNames.ARANGODB]: arangodb.schema,
[SourceNames.REST]: rest.schema,
[SourceNames.GOOGLE_SHEETS]: googlesheets.schema,
}
const INTEGRATIONS = {
@ -37,6 +39,7 @@ const INTEGRATIONS = {
[SourceNames.MYSQL]: mysql.integration,
[SourceNames.ARANGODB]: arangodb.integration,
[SourceNames.REST]: rest.integration,
[SourceNames.GOOGLE_SHEETS]: googlesheets.integration,
}
// optionally add oracle integration if the oracle binary can be installed

View File

@ -47,15 +47,6 @@ module.exports = async (ctx, next) => {
(!ctx.user || !ctx.user.builder || !ctx.user.builder.global)
) {
clearCookie(ctx, Cookies.CurrentApp)
// have to set the return url on the server side as client side is not available
setCookie(ctx, ctx.url, Cookies.RETURN_URL, {
// don't sign so the browser can easily read
sign: false,
// use the request domain to match how ui handles the return url cookie.
// it's important we don't use the shared domain here as the builder
// can't delete from it without awareness of the domain.
requestDomain: true,
})
return ctx.redirect("/")
}

View File

@ -983,10 +983,10 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/backend-core@^1.0.27-alpha.13":
version "1.0.27-alpha.13"
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.0.27-alpha.13.tgz#89f46e081eb7b342f483fd0eccd72c42b2b2fa6c"
integrity sha512-NiasBvZ5wTpvANG9AjuO34DHMTqWQWSpabLcgwBY0tNG4ekh+wvSCPjCcUvN/bBpOzrVMQ8C4hmS4pvv342BhQ==
"@budibase/backend-core@^1.0.46-alpha.5":
version "1.0.47"
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.0.47.tgz#af1e501e20f8a648a40fe7d336b89e65f058c803"
integrity sha512-nj+MC2j6WEH+6LEJhs+zMbnm4BRGCaX7kXvlyq7EXA9h6QOxrNkB/PNFqEumkMJGjorkZAQ/qe8MUEjcE26QBw==
dependencies:
"@techpass/passport-openidconnect" "^0.3.0"
aws-sdk "^2.901.0"
@ -1056,10 +1056,10 @@
svelte-flatpickr "^3.2.3"
svelte-portal "^1.0.0"
"@budibase/bbui@^1.0.35":
version "1.0.35"
resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-1.0.35.tgz#a51886886772257d31e2c6346dbec46fe0c9fd85"
integrity sha512-8qeAzTujtO7uvhj+dMiyW4BTkQ7dC4xF1CNIwyuTnDwIeFDlXYgNb09VVRs3+nWcX2e2eC53EUs1RnLUoSlTsw==
"@budibase/bbui@^1.0.47":
version "1.0.47"
resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-1.0.47.tgz#df2848b89f881fe603e7156855d6a6c31d4f58bf"
integrity sha512-RRm/BgK5aSx2/vGjMGljw240/48Ksc3/h4yB1nhQj8Xx3fKhlGnWDvWNy+sakvA6+fJvEXuti8RoxHtQ6lXmqA==
dependencies:
"@adobe/spectrum-css-workflow-icons" "^1.2.1"
"@spectrum-css/actionbutton" "^1.0.1"
@ -1106,14 +1106,14 @@
svelte-flatpickr "^3.2.3"
svelte-portal "^1.0.0"
"@budibase/client@^1.0.27-alpha.13":
version "1.0.35"
resolved "https://registry.yarnpkg.com/@budibase/client/-/client-1.0.35.tgz#b832e7e7e35032fb35fe5492fbb721db1da15394"
integrity sha512-maL3V29PQb9VjgnPZq44GSDZCuamAGp01bheUeJxEeskjQqZUdf8QC7Frf1mT+ZjgKJf3gU6qtFOxmWRbVzVbw==
"@budibase/client@^1.0.46-alpha.5":
version "1.0.47"
resolved "https://registry.yarnpkg.com/@budibase/client/-/client-1.0.47.tgz#ce9e2fbd300e5dc389ea29a3a3347897f096c824"
integrity sha512-jB/al8v+nY/VLc6sH5Jt9JzWONVo+24/cI95iXlZSV5xwiKIVGj4+2F5QjKZ0c9Gm7SrrfP2T571N+4XaXNCGg==
dependencies:
"@budibase/bbui" "^1.0.35"
"@budibase/bbui" "^1.0.47"
"@budibase/standard-components" "^0.9.139"
"@budibase/string-templates" "^1.0.35"
"@budibase/string-templates" "^1.0.47"
regexparam "^1.3.0"
shortid "^2.2.15"
svelte-spa-router "^3.0.5"
@ -1163,10 +1163,10 @@
svelte-apexcharts "^1.0.2"
svelte-flatpickr "^3.1.0"
"@budibase/string-templates@^1.0.27-alpha.13", "@budibase/string-templates@^1.0.35":
version "1.0.35"
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.35.tgz#a888f1e9327bb36416336a91a95a43cb34e6a42d"
integrity sha512-8HxSv0ru+cgSmphqtOm1pmBM8rc0TRC/6RQGzQefmFFQFfm/SBLAVLLWRmZxAOYTxt4mittGWeL4y05FqEuocg==
"@budibase/string-templates@^1.0.46-alpha.5", "@budibase/string-templates@^1.0.47":
version "1.0.47"
resolved "https://registry.yarnpkg.com/@budibase/string-templates/-/string-templates-1.0.47.tgz#626b9fc4542c7b36a0ae24e820d25a704c527bec"
integrity sha512-87BUfOPr8FGKH8Pt88jhKNGT9PcOmkLRCeen4xi1dI113pAQznBO9vgV+cXOChUBBEQka9Rrt85LMJXidiwVgg==
dependencies:
"@budibase/handlebars-helpers" "^0.11.7"
dayjs "^1.10.4"
@ -2417,6 +2417,11 @@
dependencies:
"@types/node" "*"
"@types/google-spreadsheet@^3.1.5":
version "3.1.5"
resolved "https://registry.yarnpkg.com/@types/google-spreadsheet/-/google-spreadsheet-3.1.5.tgz#2bdc6f9f5372551e0506cb6ef3f562adcf44fc2e"
integrity sha512-7N+mDtZ1pmya2RRFPPl4KYc2TRgiqCNBLUZfyrKfER+u751JgCO+C24/LzF70UmUm/zhHUbzRZ5mtfaxekQ1ZQ==
"@types/graceful-fs@^4.1.2":
version "4.1.5"
resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15"
@ -3193,6 +3198,11 @@ array-unique@^0.3.2:
resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428"
integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=
arrify@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa"
integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==
asap@^2.0.3:
version "2.0.6"
resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46"
@ -3495,7 +3505,7 @@ base62@^1.1.0:
resolved "https://registry.yarnpkg.com/base62/-/base62-1.2.8.tgz#1264cb0fb848d875792877479dbe8bae6bae3428"
integrity sha512-V6YHUbjLxN1ymqNLb1DPHoU1CpfdL7d2YTIp5W3U4hhoG4hhxNmsFDs66M9EXxBiSEke5Bt5dwdfMwwZF70iLA==
base64-js@^1.0.2, base64-js@^1.3.1:
base64-js@^1.0.2, base64-js@^1.3.0, base64-js@^1.3.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
@ -3535,6 +3545,11 @@ big.js@^5.2.2:
resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328"
integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==
bignumber.js@^9.0.0:
version "9.0.1"
resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.0.1.tgz#8d7ba124c882bfd8e43260c67475518d0689e4e5"
integrity sha512-IdZR9mh6ahOBv/hYGiXyVuyCetmGJhtYkqLBpTStdhEGjegpPlUawydyaF3pbIOFynJTpllEs+NP+CS9jKFLjA==
binary-extensions@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
@ -4826,7 +4841,7 @@ ecc-jsbn@~0.1.1:
jsbn "~0.1.0"
safer-buffer "^2.1.0"
ecdsa-sig-formatter@1.0.11:
ecdsa-sig-formatter@1.0.11, ecdsa-sig-formatter@^1.0.11:
version "1.0.11"
resolved "https://registry.yarnpkg.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz#ae0f0fa2d85045ef14a817daa3ce9acd0489e5bf"
integrity sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==
@ -5490,7 +5505,7 @@ extend-shallow@^3.0.0, extend-shallow@^3.0.2:
assign-symbols "^1.0.0"
is-extendable "^1.0.1"
extend@^3.0.0, extend@~3.0.2:
extend@^3.0.0, extend@^3.0.2, extend@~3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa"
integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==
@ -5569,6 +5584,11 @@ fast-safe-stringify@^2.0.7, fast-safe-stringify@^2.0.8:
resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884"
integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==
fast-text-encoding@^1.0.0:
version "1.0.3"
resolved "https://registry.yarnpkg.com/fast-text-encoding/-/fast-text-encoding-1.0.3.tgz#ec02ac8e01ab8a319af182dae2681213cfe9ce53"
integrity sha512-dtm4QZH9nZtcDt8qJiOH9fcQd1NAgi+K1O2DbE6GG1PPCK/BWfOH3idCTRQ4ImXRUOyopDEgDEnVEE7Y/2Wrig==
fast-url-parser@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/fast-url-parser/-/fast-url-parser-1.1.3.tgz#f4af3ea9f34d8a271cf58ad2b3759f431f0b318d"
@ -5919,6 +5939,25 @@ functional-red-black-tree@^1.0.1:
resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327"
integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=
gaxios@^4.0.0:
version "4.3.2"
resolved "https://registry.yarnpkg.com/gaxios/-/gaxios-4.3.2.tgz#845827c2dc25a0213c8ab4155c7a28910f5be83f"
integrity sha512-T+ap6GM6UZ0c4E6yb1y/hy2UB6hTrqhglp3XfmU9qbLCGRYhLVV5aRPpC4EmoG8N8zOnkYCgoBz+ScvGAARY6Q==
dependencies:
abort-controller "^3.0.0"
extend "^3.0.2"
https-proxy-agent "^5.0.0"
is-stream "^2.0.0"
node-fetch "^2.6.1"
gcp-metadata@^4.2.0:
version "4.3.1"
resolved "https://registry.yarnpkg.com/gcp-metadata/-/gcp-metadata-4.3.1.tgz#fb205fe6a90fef2fd9c85e6ba06e5559ee1eefa9"
integrity sha512-x850LS5N7V1F3UcV7PoupzGsyD6iVwTVvsh3tbXfkctZnBnjW5yu5z1/3k3SehF7TyoTIe78rJs02GMMy+LF+A==
dependencies:
gaxios "^4.0.0"
json-bigint "^1.0.0"
generate-function@^2.3.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/generate-function/-/generate-function-2.3.1.tgz#f069617690c10c868e73b8465746764f97c3479f"
@ -6116,6 +6155,36 @@ globby@^11.0.3:
merge2 "^1.3.0"
slash "^3.0.0"
google-auth-library@^6.1.3:
version "6.1.6"
resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-6.1.6.tgz#deacdcdb883d9ed6bac78bb5d79a078877fdf572"
integrity sha512-Q+ZjUEvLQj/lrVHF/IQwRo6p3s8Nc44Zk/DALsN+ac3T4HY/g/3rrufkgtl+nZ1TW7DNAw5cTChdVp4apUXVgQ==
dependencies:
arrify "^2.0.0"
base64-js "^1.3.0"
ecdsa-sig-formatter "^1.0.11"
fast-text-encoding "^1.0.0"
gaxios "^4.0.0"
gcp-metadata "^4.2.0"
gtoken "^5.0.4"
jws "^4.0.0"
lru-cache "^6.0.0"
google-auth-library@^7.11.0:
version "7.11.0"
resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-7.11.0.tgz#b63699c65037310a424128a854ba7e736704cbdb"
integrity sha512-3S5jn2quRumvh9F/Ubf7GFrIq71HZ5a6vqosgdIu105kkk0WtSqc2jGCRqtWWOLRS8SX3AHACMOEDxhyWAQIcg==
dependencies:
arrify "^2.0.0"
base64-js "^1.3.0"
ecdsa-sig-formatter "^1.0.11"
fast-text-encoding "^1.0.0"
gaxios "^4.0.0"
gcp-metadata "^4.2.0"
gtoken "^5.0.4"
jws "^4.0.0"
lru-cache "^6.0.0"
google-auth-library@~0.10.0:
version "0.10.0"
resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-0.10.0.tgz#6e15babee85fd1dd14d8d128a295b6838d52136e"
@ -6133,6 +6202,22 @@ google-p12-pem@^0.1.0:
dependencies:
node-forge "^0.7.1"
google-p12-pem@^3.0.3:
version "3.1.2"
resolved "https://registry.yarnpkg.com/google-p12-pem/-/google-p12-pem-3.1.2.tgz#c3d61c2da8e10843ff830fdb0d2059046238c1d4"
integrity sha512-tjf3IQIt7tWCDsa0ofDQ1qqSCNzahXDxdAGJDbruWqu3eCg5CKLYKN+hi0s6lfvzYZ1GDVr+oDF9OOWlDSdf0A==
dependencies:
node-forge "^0.10.0"
google-spreadsheet@^3.2.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/google-spreadsheet/-/google-spreadsheet-3.2.0.tgz#ce8aa75c15705aa950ad52b091a6fc4d33dcb329"
integrity sha512-z7XMaqb+26rdo8p51r5O03u8aPLAPzn5YhOXYJPcf2hdMVr0dUbIARgdkRdmGiBeoV/QoU/7VNhq1MMCLZv3kQ==
dependencies:
axios "^0.21.4"
google-auth-library "^6.1.3"
lodash "^4.17.21"
googleapis@^16.0.0:
version "16.1.0"
resolved "https://registry.yarnpkg.com/googleapis/-/googleapis-16.1.0.tgz#0f19f2d70572d918881a0f626e3b1a2fa8629576"
@ -6197,6 +6282,15 @@ gtoken@^1.2.1:
mime "^1.4.1"
request "^2.72.0"
gtoken@^5.0.4:
version "5.3.1"
resolved "https://registry.yarnpkg.com/gtoken/-/gtoken-5.3.1.tgz#c1c2598a826f2b5df7c6bb53d7be6cf6d50c3c78"
integrity sha512-yqOREjzLHcbzz1UrQoxhBtpk8KjrVhuqPE7od1K2uhyxG2BHjKZetlbLw/SPZak/QqTIQW+addS+EcjqQsZbwQ==
dependencies:
gaxios "^4.0.0"
google-p12-pem "^3.0.3"
jws "^4.0.0"
gulp-header@^1.7.1:
version "1.8.12"
resolved "https://registry.yarnpkg.com/gulp-header/-/gulp-header-1.8.12.tgz#ad306be0066599127281c4f8786660e705080a84"
@ -8084,6 +8178,13 @@ jsesc@~0.5.0:
resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d"
integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=
json-bigint@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/json-bigint/-/json-bigint-1.0.0.tgz#ae547823ac0cad8398667f8cd9ef4730f5b01ff1"
integrity sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==
dependencies:
bignumber.js "^9.0.0"
json-buffer@3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898"
@ -8197,6 +8298,15 @@ jwa@^1.4.1:
ecdsa-sig-formatter "1.0.11"
safe-buffer "^5.0.1"
jwa@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/jwa/-/jwa-2.0.0.tgz#a7e9c3f29dae94027ebcaf49975c9345593410fc"
integrity sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==
dependencies:
buffer-equal-constant-time "1.0.1"
ecdsa-sig-formatter "1.0.11"
safe-buffer "^5.0.1"
jws@3.x.x, jws@^3.0.0, jws@^3.1.4, jws@^3.2.2:
version "3.2.2"
resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304"
@ -8205,6 +8315,14 @@ jws@3.x.x, jws@^3.0.0, jws@^3.1.4, jws@^3.2.2:
jwa "^1.4.1"
safe-buffer "^5.0.1"
jws@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/jws/-/jws-4.0.0.tgz#2d4e8cf6a318ffaa12615e9dec7e86e6c97310f4"
integrity sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==
dependencies:
jwa "^2.0.0"
safe-buffer "^5.0.1"
keygrip@~1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/keygrip/-/keygrip-1.0.3.tgz#399d709f0aed2bab0a059e0cdd3a5023a053e1dc"
@ -9313,18 +9431,23 @@ node-fetch@2.4.1:
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.4.1.tgz#b2e38f1117b8acbedbe0524f041fb3177188255d"
integrity sha512-P9UbpFK87NyqBZzUuDBDz4f6Yiys8xm8j7ACDbi6usvFm6KItklQUKjeoqTrYS/S1k6I8oaOC2YLLDr/gg26Mw==
node-fetch@2.6.0, node-fetch@^2.6.0:
node-fetch@2.6.0:
version "2.6.0"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
node-fetch@^2.6.1:
version "2.6.6"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.6.tgz#1751a7c01834e8e1697758732e9efb6eeadfaf89"
integrity sha512-Z8/6vRlTUChSdIgMa51jxQ4lrw/Jy5SOW10ObaA47/RElsAN2c5Pn8bTgFGWn/ibwzXTE8qwr1Yzx28vsecXEA==
node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1:
version "2.6.7"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
dependencies:
whatwg-url "^5.0.0"
node-forge@^0.10.0:
version "0.10.0"
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3"
integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==
node-forge@^0.7.1:
version "0.7.6"
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac"

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
"version": "1.0.46-alpha.3",
"version": "1.0.46-alpha.7",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",

View File

@ -112,9 +112,10 @@ module.exports.processStringSync = (string, context, opts) => {
const template = instance.compile(string, {
strict: false,
})
const now = Math.floor(Date.now() / 1000) * 1000
return processors.postprocess(
template({
now: new Date().toISOString(),
now: new Date(now).toISOString(),
...context,
})
)

View File

@ -1,3 +1,3 @@
{
"watch": ["src", "../auth"]
"watch": ["src", "../backend-core"]
}

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/worker",
"email": "hi@budibase.com",
"version": "1.0.46-alpha.3",
"version": "1.0.46-alpha.7",
"description": "Budibase background service",
"main": "src/index.js",
"repository": {
@ -29,8 +29,8 @@
"author": "Budibase",
"license": "GPL-3.0",
"dependencies": {
"@budibase/backend-core": "^1.0.46-alpha.3",
"@budibase/string-templates": "^1.0.46-alpha.3",
"@budibase/backend-core": "^1.0.46-alpha.7",
"@budibase/string-templates": "^1.0.46-alpha.7",
"@koa/router": "^8.0.0",
"@sentry/node": "^6.0.0",
"@techpass/passport-openidconnect": "^0.3.0",

View File

@ -147,6 +147,32 @@ exports.logout = async ctx => {
ctx.body = { message: "User logged out." }
}
exports.datasourcePreAuth = async (ctx, next) => {
const provider = ctx.params.provider
const middleware = require(`@budibase/backend-core/middleware`)
const handler = middleware.datasource[provider]
setCookie(
ctx,
{
provider,
appId: ctx.query.appId,
datasourceId: ctx.query.datasourceId,
},
Cookies.DatasourceAuth
)
return handler.preAuth(passport, ctx, next)
}
exports.datasourceAuth = async (ctx, next) => {
const authStateCookie = getCookie(ctx, Cookies.DatasourceAuth)
const provider = authStateCookie.provider
const middleware = require(`@budibase/backend-core/middleware`)
const handler = middleware.datasource[provider]
return handler.postAuth(passport, ctx, next)
}
/**
* The initial call that google authentication makes to take you to the google login screen.
* On a successful login, you will be redirected to the googleAuth callback route.

View File

@ -63,8 +63,17 @@ router
updateTenant,
authController.googlePreAuth
)
.get(
"/api/global/auth/:tenantId/datasource/:provider",
updateTenant,
authController.datasourcePreAuth
)
// single tenancy endpoint
.get("/api/global/auth/google/callback", authController.googleAuth)
.get(
"/api/global/auth/datasource/:provider/callback",
authController.datasourceAuth
)
// multi-tenancy endpoint
.get(
"/api/global/auth/:tenantId/google/callback",

File diff suppressed because it is too large Load Diff