Merge branch 'develop' into postgres-schema

This commit is contained in:
Rory Powell 2021-11-05 14:07:39 +00:00
commit 2b6c6d7aaa
67 changed files with 2239 additions and 1280 deletions

1
.gitignore vendored
View File

@ -3,6 +3,7 @@ builder/*
.temp/
packages/server/runtime_apps/
.idea/
bb-airgapped.tar.gz
# Logs
logs

View File

@ -0,0 +1,51 @@
const fs = require("fs")
const { execSync } = require("child_process")
const path = require("path")
const IMAGES = {
worker: "budibase/worker",
apps: "budibase/apps",
proxy: "envoyproxy/envoy:v1.16-latest",
minio: "minio/minio",
couch: "ibmcom/couchdb3",
curl: "curlimages/curl",
redis: "redis",
watchtower: "containrrr/watchtower"
}
const FILES = {
COMPOSE: "docker-compose.yaml",
ENVOY: "envoy.yaml",
PROPERTIES: "hosting.properties"
}
const OUTPUT_DIR = path.join(__dirname, "../", "bb-airgapped")
function copyFile(file) {
fs.copyFileSync(
path.join(__dirname, "../", "../", file),
path.join(OUTPUT_DIR, file)
)
}
// create output dir
console.log(`Creating ${OUTPUT_DIR} for build..`)
fs.rmdirSync(OUTPUT_DIR, { recursive: true })
fs.mkdirSync(OUTPUT_DIR)
// package images into tar files
for (let image in IMAGES) {
console.log(`Creating tar for ${image}..`)
execSync(`docker save ${IMAGES[image]} -o ${OUTPUT_DIR}/${image}.tar`)
}
// copy config files
copyFile(FILES.COMPOSE)
copyFile(FILES.ENVOY)
copyFile(FILES.PROPERTIES)
// compress
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)
// clean up
fs.rmdirSync(OUTPUT_DIR, { recursive: true })

View File

@ -1,5 +1,5 @@
{
"version": "0.9.173-alpha.3",
"version": "0.9.176-alpha.3",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -46,6 +46,7 @@
"build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:production": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION release && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
"release:helm": "./scripts/release_helm_chart.sh",
"env:multi:enable": "lerna run env:multi:enable",
"env:multi:disable": "lerna run env:multi:disable",

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/auth",
"version": "0.9.173-alpha.3",
"version": "0.9.176-alpha.3",
"description": "Authentication middlewares for budibase builder and apps",
"main": "src/index.js",
"author": "Budibase",

View File

@ -9,6 +9,8 @@ const { createASession } = require("../../security/sessions")
const { getTenantId } = require("../../tenancy")
const INVALID_ERR = "Invalid Credentials"
const SSO_NO_PASSWORD = "SSO user does not have a password set"
const EXPIRED = "This account has expired. Please reset your password"
exports.options = {
passReqToCallback: true,
@ -36,6 +38,19 @@ exports.authenticate = async function (ctx, email, password, done) {
return authError(done, INVALID_ERR)
}
// check that the user has a stored password before proceeding
if (!dbUser.password) {
if (
(dbUser.account && dbUser.account.authType === "sso") || // root account sso
dbUser.thirdPartyProfile // internal sso
) {
return authError(done, SSO_NO_PASSWORD)
}
console.error("Non SSO usser has no password set", dbUser)
return authError(done, EXPIRED)
}
// authenticate
if (await compare(password, dbUser.password)) {
const sessionId = newid()

View File

@ -181,8 +181,8 @@ exports.saveUser = async (
// check budibase users in other tenants
if (env.MULTI_TENANCY) {
dbUser = await getTenantUser(email)
if (dbUser != null && dbUser.tenantId !== tenantId) {
const tenantUser = await getTenantUser(email)
if (tenantUser != null && tenantUser.tenantId !== tenantId) {
throw `Email address ${email} already in use.`
}
}

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "0.9.173-alpha.3",
"version": "0.9.176-alpha.3",
"license": "AGPL-3.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -51,6 +51,7 @@
"@spectrum-css/fieldlabel": "^3.0.1",
"@spectrum-css/icon": "^3.0.1",
"@spectrum-css/illustratedmessage": "^3.0.2",
"@spectrum-css/inlinealert": "^2.0.1",
"@spectrum-css/inputgroup": "^3.0.2",
"@spectrum-css/label": "^2.0.10",
"@spectrum-css/link": "^3.1.1",

View File

@ -0,0 +1,51 @@
<script>
import "@spectrum-css/inlinealert/dist/index-vars.css"
import Button from "../Button/Button.svelte"
export let type = "info"
export let header = ""
export let message = ""
export let onConfirm = undefined
$: icon = selectIcon(type)
function selectIcon(alertType) {
switch (alertType) {
case "error":
case "negative":
return "Alert"
case "success":
return "CheckmarkCircle"
case "help":
return "Help"
default:
return "Info"
}
}
</script>
<div class="spectrum-InLineAlert spectrum-InLineAlert--{type}">
<svg
class="spectrum-Icon spectrum-Icon--sizeM spectrum-InLineAlert-icon"
focusable="false"
aria-hidden="true"
>
<use xlink:href="#spectrum-icon-18-{icon}" />
</svg>
<div class="spectrum-InLineAlert-header">{header}</div>
<div class="spectrum-InLineAlert-content">{message}</div>
{#if onConfirm}
<div class="spectrum-InLineAlert-footer">
<Button secondary on:click={onConfirm}>OK</Button>
</div>
{/if}
</div>
<style>
.spectrum-InLineAlert {
--spectrum-semantic-negative-border-color: #e34850;
--spectrum-semantic-positive-border-color: #2d9d78;
--spectrum-semantic-positive-icon-color: #2d9d78;
--spectrum-semantic-negative-icon-color: #e34850;
}
</style>

View File

@ -58,6 +58,7 @@ export { default as Pagination } from "./Pagination/Pagination.svelte"
export { default as Badge } from "./Badge/Badge.svelte"
export { default as StatusLight } from "./StatusLight/StatusLight.svelte"
export { default as ColorPicker } from "./ColorPicker/ColorPicker.svelte"
export { default as InlineAlert } from "./InlineAlert/InlineAlert.svelte"
// Typography
export { default as Body } from "./Typography/Body.svelte"

View File

@ -136,6 +136,11 @@
resolved "https://registry.yarnpkg.com/@spectrum-css/illustratedmessage/-/illustratedmessage-3.0.2.tgz#6a480be98b027e050b086e7899e40d87adb0a8c0"
integrity sha512-dqnE8X27bGcO0HN8+dYx8O4o0dNNIAqeivOzDHhe2El+V4dTzMrNIerF6G0NLm3GjVf6XliwmitsZK+K6FmbtA==
"@spectrum-css/inlinealert@^2.0.1":
version "2.0.1"
resolved "https://registry.yarnpkg.com/@spectrum-css/inlinealert/-/inlinealert-2.0.1.tgz#7521f88f6c845806403cc7d925773c7414e204a2"
integrity sha512-Xy5RCOwgurqUXuGQCsEDUduDd5408bmEpmFg+feynG7VFUgLFZWBeylSENB/OqjlFtO76PHXNVdHkhDscPIHTA==
"@spectrum-css/inputgroup@^3.0.2":
version "3.0.2"
resolved "https://registry.yarnpkg.com/@spectrum-css/inputgroup/-/inputgroup-3.0.2.tgz#f1b13603832cbd22394f3d898af13203961f8691"

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "0.9.173-alpha.3",
"version": "0.9.176-alpha.3",
"license": "AGPL-3.0",
"private": true,
"scripts": {
@ -65,10 +65,10 @@
}
},
"dependencies": {
"@budibase/bbui": "^0.9.173-alpha.3",
"@budibase/client": "^0.9.173-alpha.3",
"@budibase/bbui": "^0.9.176-alpha.3",
"@budibase/client": "^0.9.176-alpha.3",
"@budibase/colorpicker": "1.1.2",
"@budibase/string-templates": "^0.9.173-alpha.3",
"@budibase/string-templates": "^0.9.176-alpha.3",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",

View File

@ -4,6 +4,7 @@
import CreateRowButton from "./buttons/CreateRowButton.svelte"
import CreateColumnButton from "./buttons/CreateColumnButton.svelte"
import CreateViewButton from "./buttons/CreateViewButton.svelte"
import ExistingRelationshipButton from "./buttons/ExistingRelationshipButton.svelte"
import ExportButton from "./buttons/ExportButton.svelte"
import EditRolesButton from "./buttons/EditRolesButton.svelte"
import ManageAccessButton from "./buttons/ManageAccessButton.svelte"
@ -98,9 +99,7 @@
on:updatecolumns={onUpdateColumns}
on:updaterows={onUpdateRows}
>
{#if isInternal}
<CreateColumnButton on:updatecolumns={onUpdateColumns} />
{/if}
<CreateColumnButton on:updatecolumns={onUpdateColumns} />
{#if schema && Object.keys(schema).length > 0}
{#if !isUsersTable}
<CreateRowButton
@ -116,6 +115,12 @@
{#if isUsersTable}
<EditRolesButton />
{/if}
{#if !isInternal}
<ExistingRelationshipButton
table={$tables.selected}
on:updatecolumns={onUpdateColumns}
/>
{/if}
<HideAutocolumnButton bind:hideAutocolumns />
<!-- always have the export last -->
<ExportButton view={$tables.selected?._id} />

View File

@ -16,8 +16,8 @@
export let value = defaultValue || (meta.type === "boolean" ? false : "")
export let readonly
$: type = meta.type
$: label = capitalise(meta.name)
$: type = meta?.type
$: label = meta.name ? capitalise(meta.name) : ""
</script>
{#if type === "options"}

View File

@ -129,7 +129,7 @@
bind:selectedRows
allowSelectRows={allowEditing && !isUsersTable}
allowEditRows={allowEditing}
allowEditColumns={allowEditing && isInternal}
allowEditColumns={allowEditing}
showAutoColumns={!hideAutocolumns}
on:editcolumn={e => editColumn(e.detail)}
on:editrow={e => editRow(e.detail)}

View File

@ -0,0 +1,54 @@
<script>
import { ActionButton, Modal, notifications } from "@budibase/bbui"
import CreateEditRelationship from "../../Datasources/CreateEditRelationship.svelte"
import { datasources, tables } from "../../../../stores/backend"
import { createEventDispatcher } from "svelte"
export let table
const dispatch = createEventDispatcher()
$: plusTables = datasource?.plus
? Object.values(datasource?.entities || {})
: []
$: datasource = $datasources.list.find(
source => source._id === table?.sourceId
)
let modal
async function saveRelationship() {
try {
// Create datasource
await datasources.save(datasource)
notifications.success(`Relationship information saved.`)
const tableList = await tables.fetch()
await tables.select(tableList.find(tbl => tbl._id === table._id))
dispatch("updatecolumns")
} catch (err) {
notifications.error(`Error saving relationship info: ${err}`)
}
}
</script>
{#if table.sourceId}
<div>
<ActionButton
icon="DataCorrelated"
primary
size="S"
quiet
on:click={modal.show}
>
Define existing relationship
</ActionButton>
</div>
<Modal bind:this={modal}>
<CreateEditRelationship
{datasource}
save={saveRelationship}
close={modal.hide}
{plusTables}
selectedFromTable={table}
/>
</Modal>
{/if}

View File

@ -31,6 +31,9 @@
const AUTO_TYPE = "auto"
const FORMULA_TYPE = FIELDS.FORMULA.type
const LINK_TYPE = FIELDS.LINK.type
const STRING_TYPE = FIELDS.STRING.type
const NUMBER_TYPE = FIELDS.NUMBER.type
const dispatch = createEventDispatcher()
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
const { hide } = getContext(Context.Modal)
@ -55,8 +58,9 @@
let confirmDeleteDialog
let deletion
$: checkConstraints(field)
$: tableOptions = $tables.list.filter(
table => table._id !== $tables.draft._id && table.type !== "external"
opt => opt._id !== $tables.draft._id && opt.type === table.type
)
$: required = !!field?.constraints?.presence || primaryDisplay
$: uneditable =
@ -83,6 +87,7 @@
$: canBeRequired =
field.type !== LINK_TYPE && !uneditable && field.type !== AUTO_TYPE
$: relationshipOptions = getRelationshipOptions(field)
$: external = table.type === "external"
async function saveColumn() {
if (field.type === AUTO_TYPE) {
@ -193,6 +198,45 @@
},
]
}
function getAllowedTypes() {
if (!external) {
return [
...Object.values(fieldDefinitions),
{ name: "Auto Column", type: AUTO_TYPE },
]
} else {
return [
FIELDS.STRING,
FIELDS.LONGFORM,
FIELDS.OPTIONS,
FIELDS.DATETIME,
FIELDS.NUMBER,
FIELDS.BOOLEAN,
FIELDS.ARRAY,
FIELDS.FORMULA,
FIELDS.LINK,
]
}
}
function checkConstraints(fieldToCheck) {
// most types need this, just make sure its always present
if (fieldToCheck && !fieldToCheck.constraints) {
fieldToCheck.constraints = {}
}
// some string types may have been built by server, may not always have constraints
if (fieldToCheck.type === STRING_TYPE && !fieldToCheck.constraints.length) {
fieldToCheck.constraints.length = {}
}
// some number types made server-side will be missing constraints
if (
fieldToCheck.type === NUMBER_TYPE &&
!fieldToCheck.constraints.numericality
) {
fieldToCheck.constraints.numericality = {}
}
}
</script>
<ModalContent
@ -215,10 +259,7 @@
label="Type"
bind:value={field.type}
on:change={handleTypeChange}
options={[
...Object.values(fieldDefinitions),
{ name: "Auto Column", type: AUTO_TYPE },
]}
options={getAllowedTypes()}
getOptionLabel={field => field.name}
getOptionValue={field => field.type}
/>
@ -245,7 +286,7 @@
</div>
{/if}
{#if canBeSearched}
{#if canBeSearched && !external}
<div>
<Label grey small>Search Indexes</Label>
<Toggle

View File

@ -18,10 +18,19 @@
export let fromRelationship = {}
export let toRelationship = {}
export let close
export let selectedFromTable
let originalFromName = fromRelationship.name,
originalToName = toRelationship.name
if (fromRelationship && !fromRelationship.relationshipType) {
fromRelationship.relationshipType = RelationshipTypes.MANY_TO_ONE
}
if (toRelationship && selectedFromTable) {
toRelationship.tableId = selectedFromTable._id
}
function inSchema(table, prop, ogName) {
if (!table || !prop || prop === ogName) {
return false
@ -114,6 +123,7 @@
},
]
$: updateRelationshipType(fromRelationship?.relationshipType)
$: tableChanged(fromTable, toTable)
function updateRelationshipType(fromType) {
if (fromType === RelationshipTypes.MANY_TO_MANY) {
@ -205,7 +215,6 @@
originalToName = toRelationship.name
originalFromName = fromRelationship.name
await save()
await tables.fetch()
}
async function deleteRelationship() {
@ -215,10 +224,26 @@
await tables.fetch()
close()
}
function tableChanged(fromTbl, toTbl) {
fromRelationship.name = toTbl?.name || ""
errors.fromCol = ""
toRelationship.name = fromTbl?.name || ""
errors.toCol = ""
if (toTbl || fromTbl) {
checkForErrors(
fromTable,
toTable,
through,
fromRelationship,
toRelationship
)
}
}
</script>
<ModalContent
title="Create Relationship"
title="Define Relationship"
confirmText="Save"
onConfirm={saveRelationship}
disabled={!valid}
@ -234,6 +259,7 @@
<Select
label="Select from table"
options={tableOptions}
disabled={!!selectedFromTable}
on:change={() => ($touched.from = true)}
bind:error={errors.from}
bind:value={toRelationship.tableId}

View File

@ -1,7 +1,7 @@
<script>
import { goto } from "@roxi/routify"
import { allScreens, store } from "builderStore"
import { tables } from "stores/backend"
import { tables, datasources } from "stores/backend"
import {
ActionMenu,
Icon,
@ -40,7 +40,10 @@
store.actions.screens.delete(templateScreens)
await tables.fetch()
notifications.success("Table deleted")
if (wasSelectedTable._id === table._id) {
if (table.type === "external") {
await datasources.fetch()
}
if (wasSelectedTable && wasSelectedTable._id === table._id) {
$goto("./table")
}
}
@ -64,9 +67,7 @@
<Icon s hoverable name="MoreSmallList" />
</div>
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
{#if !external}
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
{/if}
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
</ActionMenu>
<Modal bind:this={editorModal}>

View File

@ -32,6 +32,16 @@
.component("@budibase/standard-components/screenslot")
.instanceName("Content Placeholder")
.json()
// Messages that can be sent from the iframe preview to the builder
// Budibase events are and initalisation events
const MessageTypes = {
IFRAME_LOADED: "iframe-loaded",
READY: "ready",
ERROR: "error",
BUDIBASE: "type",
KEYDOWN: "keydown"
}
// Construct iframe template
$: template = iframeTemplate.replace(
@ -80,46 +90,44 @@
// Refresh the preview when required
$: refreshContent(strippedJson)
onMount(() => {
// Initialise the app when mounted
iframe.contentWindow.addEventListener(
"ready",
() => {
function receiveMessage(message) {
const handlers = {
[MessageTypes.READY]: () => {
// Initialise the app when mounted
// Display preview immediately if the intelligent loading feature
// is not supported
if (!loading) return
if (!$store.clientFeatures.intelligentLoading) {
loading = false
}
refreshContent(strippedJson)
},
{ once: true }
)
// Catch any app errors
iframe.contentWindow.addEventListener(
"error",
event => {
[MessageTypes.ERROR]: event => {
// Catch any app errors
loading = false
error = event.detail || "An unknown error occurred"
error = event.error || "An unknown error occurred"
},
{ once: true }
)
[MessageTypes.KEYDOWN]: handleKeydownEvent
}
// Add listener for events sent by client library in preview
iframe.contentWindow.addEventListener("bb-event", handleBudibaseEvent)
iframe.contentWindow.addEventListener("keydown", handleKeydownEvent)
const messageHandler = handlers[message.data.type] || handleBudibaseEvent
messageHandler(message)
}
onMount(() => {
window.addEventListener("message", receiveMessage)
})
// Remove all iframe event listeners on component destroy
onDestroy(() => {
if (iframe.contentWindow) {
iframe.contentWindow.removeEventListener("bb-event", handleBudibaseEvent)
iframe.contentWindow.removeEventListener("keydown", handleKeydownEvent)
window.removeEventListener("message", receiveMessage) //
}
})
const handleBudibaseEvent = event => {
const { type, data } = event.detail
const { type, data } = event.data
if (type === "select-component" && data.id) {
store.actions.components.select({ _id: data.id })
} else if (type === "update-prop") {
@ -151,13 +159,14 @@
store.actions.components.paste(destination, data.mode)
}
} else {
console.warning(`Client sent unknown event type: ${type}`)
console.warn(`Client sent unknown event type: ${type}`)
}
}
const handleKeydownEvent = event => {
const { key } = event.data
if (
(event.key === "Delete" || event.key === "Backspace") &&
(key === "Delete" || key === "Backspace") &&
selectedComponentId &&
["input", "textarea"].indexOf(
iframe.contentWindow.document.activeElement?.tagName.toLowerCase()

View File

@ -54,7 +54,7 @@ export default `
if (!parsed) {
return
}
// Extract data from message
const {
selectedComponentId,
@ -84,17 +84,20 @@ export default `
if (window.loadBudibase) {
window.loadBudibase()
document.documentElement.classList.add("loaded")
window.dispatchEvent(new Event("iframe-loaded"))
window.parent.postMessage({ type: "iframe-loaded" })
} else {
throw "The client library couldn't be loaded"
}
} catch (error) {
window.dispatchEvent(new CustomEvent("error", { detail: error }))
window.parent.postMessage({ type: "error", error })
}
}
window.addEventListener("message", receiveMessage)
window.dispatchEvent(new Event("ready"))
window.addEventListener("keydown", evt => {
window.parent.postMessage({ type: "keydown", key: event.key })
})
window.parent.postMessage({ type: "ready" })
</script>
</head>
<body/>

View File

@ -1,16 +1,25 @@
<script>
import { goto } from "@roxi/routify"
import { Button, Heading, Body, Divider, Layout, Modal } from "@budibase/bbui"
import {
Button,
Heading,
Body,
Divider,
Layout,
Modal,
InlineAlert,
ActionButton,
} from "@budibase/bbui"
import { datasources, integrations, queries, tables } from "stores/backend"
import { notifications } from "@budibase/bbui"
import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte"
import CreateEditRelationship from "./CreateEditRelationship/CreateEditRelationship.svelte"
import DisplayColumnModal from "./modals/EditDisplayColumnsModal.svelte"
import CreateEditRelationship from "components/backend/Datasources/CreateEditRelationship.svelte"
import CreateExternalTableModal from "./modals/CreateExternalTableModal.svelte"
import ICONS from "components/backend/DatasourceNavigator/icons"
import { capitalise } from "helpers"
let relationshipModal
let displayColumnModal
let createExternalTableModal
let selectedFromRelationship, selectedToRelationship
$: datasource = $datasources.list.find(ds => ds._id === $datasources.selected)
@ -19,6 +28,7 @@
? Object.values(datasource.entities || {})
: []
$: relationships = getRelationships(plusTables)
$: schemaError = $datasources.schemaError
function getRelationships(tables) {
if (!tables || !Array.isArray(tables)) {
@ -101,8 +111,8 @@
relationshipModal.show()
}
function openDisplayColumnModal() {
displayColumnModal.show()
function createNewTable() {
createExternalTableModal.show()
}
</script>
@ -117,8 +127,8 @@
/>
</Modal>
<Modal bind:this={displayColumnModal}>
<DisplayColumnModal {datasource} {plusTables} save={saveDatasource} />
<Modal bind:this={createExternalTableModal}>
<CreateExternalTableModal {datasource} />
</Modal>
{#if datasource && integration}
@ -154,15 +164,15 @@
<div class="query-header">
<Heading size="S">Tables</Heading>
<div class="table-buttons">
{#if plusTables && plusTables.length !== 0}
<Button primary on:click={openDisplayColumnModal}>
Update display columns
</Button>
{/if}
<div>
<Button primary on:click={updateDatasourceSchema}>
<ActionButton
size="S"
quiet
icon="DataRefresh"
on:click={updateDatasourceSchema}
>
Fetch tables from database
</Button>
</ActionButton>
</div>
</div>
</div>
@ -171,6 +181,14 @@
your tables directly from the database and you can use them without
having to write any queries at all.
</Body>
{#if schemaError}
<InlineAlert
type="error"
header="Error fetching tables"
message={schemaError}
onConfirm={datasources.removeSchemaError}
/>
{/if}
<div class="query-list">
{#each plusTables as table}
<div class="query-list-item" on:click={() => onClickTable(table)}>
@ -179,14 +197,23 @@
<p></p>
</div>
{/each}
<div class="add-table">
<Button cta on:click={createNewTable}>Create new table</Button>
</div>
</div>
{#if plusTables?.length !== 0}
<Divider />
<div class="query-header">
<Heading size="S">Relationships</Heading>
<Button primary on:click={() => openRelationshipModal()}
>Create relationship</Button
<ActionButton
icon="DataCorrelated"
primary
size="S"
quiet
on:click={openRelationshipModal}
>
Define existing relationship
</ActionButton>
</div>
<Body>
Tell budibase how your tables are related to get even more smart
@ -301,11 +328,14 @@
.table-buttons {
display: grid;
grid-gap: var(--spacing-l);
grid-template-columns: 1fr 1fr;
}
.table-buttons div {
grid-column-end: -1;
}
.add-table {
margin-top: var(--spacing-m);
}
</style>

View File

@ -0,0 +1,45 @@
<script>
import { ModalContent, Body, Input } from "@budibase/bbui"
import { tables, datasources } from "stores/backend"
import { goto } from "@roxi/routify"
export let datasource
let name = ""
$: valid = name && name.length > 0 && !datasource?.entities[name]
$: error =
name && datasource?.entities[name] ? "Table name already in use." : null
function buildDefaultTable(tableName, datasourceId) {
return {
name: tableName,
type: "external",
primary: ["id"],
sourceId: datasourceId,
schema: {
id: {
autocolumn: true,
type: "number",
},
},
}
}
async function saveTable() {
const table = await tables.save(buildDefaultTable(name, datasource._id))
await datasources.fetch()
$goto(`../../table/${table._id}`)
}
</script>
<ModalContent
title="Create new table"
confirmText="Create"
onConfirm={saveTable}
disabled={!valid}
>
<Body
>Provide a name for your new table; you can add columns once it is created.</Body
>
<Input label="Table Name" bind:error bind:value={name} />
</ModalContent>

View File

@ -1,43 +0,0 @@
<script>
import { ModalContent, Select, Body } from "@budibase/bbui"
import { tables } from "stores/backend"
export let datasource
export let plusTables
export let save
async function saveDisplayColumns() {
// be explicit about copying over
for (let table of plusTables) {
datasource.entities[table.name].primaryDisplay = table.primaryDisplay
}
save()
await tables.fetch()
}
function getColumnOptions(table) {
if (!table || !table.schema) {
return []
}
return Object.entries(table.schema)
.filter(field => field[1].type !== "link")
.map(([fieldName]) => fieldName)
}
</script>
<ModalContent
title="Edit display columns"
confirmText="Save"
onConfirm={saveDisplayColumns}
>
<Body
>Select the columns that will be shown when displaying relationships.</Body
>
{#each plusTables as table}
<Select
label={table.name}
options={getColumnOptions(table)}
bind:value={table.primaryDisplay}
/>
{/each}
</ModalContent>

View File

@ -44,7 +44,7 @@
}
} catch (err) {
console.error(err)
notifications.error("Invalid credentials")
notifications.error(err.message ? err.message : "Invalid Credentials")
}
}

View File

@ -5,12 +5,35 @@ import api from "../../builderStore/api"
export const INITIAL_DATASOURCE_VALUES = {
list: [],
selected: null,
schemaError: null,
}
export function createDatasourcesStore() {
const store = writable(INITIAL_DATASOURCE_VALUES)
const { subscribe, update, set } = store
async function updateDatasource(response) {
if (response.status !== 200) {
throw new Error(await response.text())
}
const { datasource, error } = await response.json()
update(state => {
const currentIdx = state.list.findIndex(ds => ds._id === datasource._id)
const sources = state.list
if (currentIdx >= 0) {
sources.splice(currentIdx, 1, datasource)
} else {
sources.push(datasource)
}
return { list: sources, selected: datasource._id, schemaError: error }
})
return datasource
}
return {
subscribe,
update,
@ -46,61 +69,20 @@ export function createDatasourcesStore() {
let url = `/api/datasources/${datasource._id}/schema`
const response = await api.post(url)
const json = await response.json()
if (response.status !== 200) {
throw new Error(json.message)
}
update(state => {
const currentIdx = state.list.findIndex(ds => ds._id === json._id)
const sources = state.list
if (currentIdx >= 0) {
sources.splice(currentIdx, 1, json)
} else {
sources.push(json)
}
return { list: sources, selected: json._id }
})
return json
return updateDatasource(response)
},
save: async (datasource, fetchSchema = false) => {
save: async (body, fetchSchema = false) => {
let response
if (datasource._id) {
response = await api.put(
`/api/datasources/${datasource._id}`,
datasource
)
if (body._id) {
response = await api.put(`/api/datasources/${body._id}`, body)
} else {
response = await api.post("/api/datasources", {
datasource: datasource,
datasource: body,
fetchSchema,
})
}
const json = await response.json()
if (response.status !== 200) {
throw new Error(json.message)
}
update(state => {
const currentIdx = state.list.findIndex(ds => ds._id === json._id)
const sources = state.list
if (currentIdx >= 0) {
sources.splice(currentIdx, 1, json)
} else {
sources.push(json)
}
return { list: sources, selected: json._id }
})
return json
return updateDatasource(response)
},
delete: async datasource => {
const response = await api.delete(
@ -115,6 +97,11 @@ export function createDatasourcesStore() {
return response
},
removeSchemaError: () => {
update(state => {
return { ...state, schemaError: null }
})
},
}
}

View File

@ -11,6 +11,7 @@ export function createTablesStore() {
const tablesResponse = await api.get(`/api/tables`)
const tables = await tablesResponse.json()
update(state => ({ ...state, list: tables }))
return tables
}
async function select(table) {
@ -62,6 +63,9 @@ export function createTablesStore() {
const response = await api.post(`/api/tables`, updatedTable)
const savedTable = await response.json()
await fetch()
if (table.type === "external") {
await datasources.fetch()
}
await select(savedTable)
return savedTable
}

View File

@ -53,7 +53,7 @@ describe("Datasources Store", () => {
})
expect(get(store).list).toEqual(expect.arrayContaining([SAVE_DATASOURCE]))
expect(get(store).list).toEqual(expect.arrayContaining([SAVE_DATASOURCE.datasource]))
})
it("deletes a datasource, updates the store and returns status message", async () => {
api.get.mockReturnValue({ json: () => SOME_DATASOURCE})

View File

@ -13,13 +13,15 @@ export const SOME_DATASOURCE = [
]
export const SAVE_DATASOURCE = {
type: "datasource",
name: "CoolDB",
source: "REST",
config: {
url: "localhost",
defaultHeaders: {},
datasource: {
type: "datasource",
name: "CoolDB",
source: "REST",
config: {
url: "localhost",
defaultHeaders: {},
},
_id: "datasource_04b003a7b4a8428eadd3bb2f7eae0255",
_rev: "1-4e72002f1011e9392e655948469b7908",
},
_id: "datasource_04b003a7b4a8428eadd3bb2f7eae0255",
_rev: "1-4e72002f1011e9392e655948469b7908",
}

View File

@ -112,7 +112,7 @@ export function createAuthStore() {
if (response.status === 200) {
setUser(json.user)
} else {
throw "Invalid credentials"
throw new Error(json.message ? json.message : "Invalid credentials")
}
return json
},

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "0.9.173-alpha.3",
"version": "0.9.176-alpha.3",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {

View File

@ -19,7 +19,7 @@ The object key is the name of the component, as exported by `index.js`.
- **bindable** - whether the components provides a bindable value or not
- **settings** - array of settings displayed in the builder
###Settings Definitions
### Settings Definitions
The `type` field in each setting is used by the builder to know which component to use to display
the setting, so it's important that this field is correct. The valid options are:

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "0.9.173-alpha.3",
"version": "0.9.176-alpha.3",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "^0.9.173-alpha.3",
"@budibase/bbui": "^0.9.176-alpha.3",
"@budibase/standard-components": "^0.9.139",
"@budibase/string-templates": "^0.9.173-alpha.3",
"@budibase/string-templates": "^0.9.176-alpha.3",
"regexparam": "^1.3.0",
"shortid": "^2.2.15",
"svelte-spa-router": "^3.0.5"

View File

@ -8,6 +8,12 @@
import { Modal, ModalContent, ActionButton } from "@budibase/bbui"
import { onDestroy } from "svelte"
const MessageTypes = {
NOTIFICATION: "notification",
CLOSE_SCREEN_MODAL: "close-screen-modal",
INVALIDATE_DATASOURCE: "invalidate-datasource",
}
let iframe
let listenersAttached = false
@ -21,32 +27,33 @@
notificationStore.actions.send(message, type, icon)
}
function receiveMessage(message) {
const handlers = {
[MessageTypes.NOTIFICATION]: () => {
proxyNotification(message.data)
},
[MessageTypes.CLOSE_SCREEN_MODAL]: peekStore.actions.hidePeek,
[MessageTypes.INVALIDATE_DATASOURCE]: () => {
invalidateDataSource(message.data)
},
}
const messageHandler = handlers[message.data.type]
if (messageHandler) {
messageHandler(message)
} else {
console.warning("Unknown event type", message?.data?.type)
}
}
const attachListeners = () => {
// Mirror datasource invalidation to keep the parent window up to date
iframe.contentWindow.addEventListener(
"invalidate-datasource",
invalidateDataSource
)
// Listen for a close event to close the screen peek
iframe.contentWindow.addEventListener(
"close-screen-modal",
peekStore.actions.hidePeek
)
// Proxy notifications back to the parent window instead of iframe
iframe.contentWindow.addEventListener("notification", proxyNotification)
window.addEventListener("message", receiveMessage)
}
const handleCancel = () => {
peekStore.actions.hidePeek()
iframe.contentWindow.removeEventListener(
"invalidate-datasource",
invalidateDataSource
)
iframe.contentWindow.removeEventListener(
"close-screen-modal",
peekStore.actions.hidePeek
)
iframe.contentWindow.removeEventListener("notification", proxyNotification)
window.removeEventListener("message", receiveMessage)
}
const handleFullscreen = () => {

View File

@ -4,11 +4,7 @@ import { findComponentById, findComponentPathById } from "../utils/components"
import { pingEndUser } from "../api"
const dispatchEvent = (type, data = {}) => {
window.dispatchEvent(
new CustomEvent("bb-event", {
detail: { type, data },
})
)
window.parent.postMessage({ type, data })
}
const createBuilderStore = () => {

View File

@ -26,11 +26,19 @@ const createNotificationStore = () => {
// If peeking, pass notifications back to parent window
if (get(routeStore).queryParams?.peek) {
window.dispatchEvent(
new CustomEvent("notification", {
detail: { message, type, icon },
})
)
window.parent.postMessage({
type: "notification",
detail: {
message,
type,
icon,
},
})
// window.dispatchEvent(
// new CustomEvent("notification", {
// detail: { message, type, icon },
// })
// )
return
}

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "0.9.173-alpha.3",
"version": "0.9.176-alpha.3",
"description": "Budibase Web Server",
"main": "src/index.js",
"repository": {
@ -68,9 +68,9 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@budibase/auth": "^0.9.173-alpha.3",
"@budibase/client": "^0.9.173-alpha.3",
"@budibase/string-templates": "^0.9.173-alpha.3",
"@budibase/auth": "^0.9.176-alpha.3",
"@budibase/client": "^0.9.176-alpha.3",
"@budibase/string-templates": "^0.9.176-alpha.3",
"@elastic/elasticsearch": "7.10.0",
"@koa/router": "8.0.0",
"@sendgrid/mail": "7.1.1",

View File

@ -7,8 +7,9 @@ const {
BudibaseInternalDB,
getTableParams,
} = require("../../db/utils")
const { BuildSchemaErrors } = require("../../constants")
const { integrations } = require("../../integrations")
const { makeExternalQuery } = require("./row/utils")
const { getDatasourceAndQuery } = require("./row/utils")
exports.fetch = async function (ctx) {
const database = new CouchDB(ctx.appId)
@ -43,13 +44,17 @@ exports.buildSchemaFromDb = async function (ctx) {
const db = new CouchDB(ctx.appId)
const datasource = await db.get(ctx.params.datasourceId)
const tables = await buildSchemaHelper(datasource)
const { tables, error } = await buildSchemaHelper(datasource)
datasource.entities = tables
const response = await db.put(datasource)
datasource._rev = response.rev
const dbResp = await db.put(datasource)
datasource._rev = dbResp.rev
ctx.body = datasource
const response = { datasource }
if (error) {
response.error = error
}
ctx.body = response
}
exports.update = async function (ctx) {
@ -71,7 +76,7 @@ exports.update = async function (ctx) {
ctx.status = 200
ctx.message = "Datasource saved successfully."
ctx.body = datasource
ctx.body = { datasource }
}
exports.save = async function (ctx) {
@ -85,13 +90,15 @@ exports.save = async function (ctx) {
...ctx.request.body.datasource,
}
let schemaError = null
if (fetchSchema) {
let tables = await buildSchemaHelper(datasource)
const { tables, error } = await buildSchemaHelper(datasource)
schemaError = error
datasource.entities = tables
}
const response = await db.put(datasource)
datasource._rev = response.rev
const dbResp = await db.put(datasource)
datasource._rev = dbResp.rev
// Drain connection pools when configuration is changed
if (datasource.source) {
@ -101,9 +108,11 @@ exports.save = async function (ctx) {
}
}
ctx.status = 200
ctx.message = "Datasource saved successfully."
ctx.body = datasource
const response = { datasource }
if (schemaError) {
response.error = schemaError
}
ctx.body = response
}
exports.destroy = async function (ctx) {
@ -129,7 +138,7 @@ exports.find = async function (ctx) {
exports.query = async function (ctx) {
const queryJson = ctx.request.body
try {
ctx.body = await makeExternalQuery(ctx.appId, queryJson)
ctx.body = await getDatasourceAndQuery(ctx.appId, queryJson)
} catch (err) {
ctx.throw(400, err)
}
@ -143,5 +152,28 @@ const buildSchemaHelper = async datasource => {
await connector.buildSchema(datasource._id, datasource.entities)
datasource.entities = connector.tables
return connector.tables
// make sure they all have a display name selected
for (let entity of Object.values(datasource.entities)) {
if (entity.primaryDisplay) {
continue
}
const notAutoColumn = Object.values(entity.schema).find(
schema => !schema.autocolumn
)
if (notAutoColumn) {
entity.primaryDisplay = notAutoColumn.name
}
}
const errors = connector.schemaErrors
let error = null
if (errors && Object.keys(errors).length > 0) {
const noKeyTables = Object.entries(errors)
.filter(entry => entry[1] === BuildSchemaErrors.NO_KEY)
.map(([name]) => name)
error = `No primary key constraint found for the following: ${noKeyTables.join(
", "
)}`
}
return { tables: connector.tables, error }
}

View File

@ -101,7 +101,9 @@ async function enrichQueryFields(fields, parameters = {}) {
enrichedQuery[key] = await enrichQueryFields(fields[key], parameters)
} else if (typeof fields[key] === "string") {
// enrich string value as normal
enrichedQuery[key] = await processString(fields[key], parameters)
enrichedQuery[key] = await processString(fields[key], parameters, {
noHelpers: true,
})
} else {
enrichedQuery[key] = fields[key]
}

View File

@ -36,7 +36,7 @@ interface RunConfig {
}
module External {
const { makeExternalQuery } = require("./utils")
const { getDatasourceAndQuery } = require("./utils")
const {
DataSourceOperation,
FieldTypes,
@ -46,6 +46,7 @@ module External {
const { processObjectSync } = require("@budibase/string-templates")
const { cloneDeep } = require("lodash/fp")
const CouchDB = require("../../../db")
const { processFormulas } = require("../../../utilities/rowProcessor/utils")
function buildFilters(
id: string | undefined,
@ -225,7 +226,7 @@ module External {
manyRelationships: ManyRelationship[] = []
for (let [key, field] of Object.entries(table.schema)) {
// if set already, or not set just skip it
if ((!row[key] && row[key] !== "") || newRow[key] || field.autocolumn) {
if (row[key] == null || newRow[key] || field.autocolumn || field.type === FieldTypes.FORMULA) {
continue
}
// if its an empty string then it means return the column to null (if possible)
@ -361,7 +362,7 @@ module External {
relationships
)
}
return Object.values(finalRows)
return processFormulas(table, Object.values(finalRows))
}
/**
@ -428,7 +429,7 @@ module External {
const tableId = isMany ? field.through : field.tableId
const manyKey = field.throughFrom || primaryKey
const fieldName = isMany ? manyKey : field.fieldName
const response = await makeExternalQuery(this.appId, {
const response = await getDatasourceAndQuery(this.appId, {
endpoint: getEndpoint(tableId, DataSourceOperation.READ),
filters: {
equal: {
@ -479,7 +480,7 @@ module External {
: DataSourceOperation.CREATE
if (!found) {
promises.push(
makeExternalQuery(appId, {
getDatasourceAndQuery(appId, {
endpoint: getEndpoint(tableId, operation),
// if we're doing many relationships then we're writing, only one response
body,
@ -509,7 +510,7 @@ module External {
: DataSourceOperation.UPDATE
const body = isMany ? null : { [colName]: null }
promises.push(
makeExternalQuery(this.appId, {
getDatasourceAndQuery(this.appId, {
endpoint: getEndpoint(tableId, op),
body,
filters,
@ -532,16 +533,17 @@ module External {
table: Table,
includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE
) {
function extractNonLinkFieldNames(table: Table, existing: string[] = []) {
function extractRealFields(table: Table, existing: string[] = []) {
return Object.entries(table.schema)
.filter(
column =>
column[1].type !== FieldTypes.LINK &&
column[1].type !== FieldTypes.FORMULA &&
!existing.find((field: string) => field === column[0])
)
.map(column => `${table.name}.${column[0]}`)
}
let fields = extractNonLinkFieldNames(table)
let fields = extractRealFields(table)
for (let field of Object.values(table.schema)) {
if (field.type !== FieldTypes.LINK || !includeRelations) {
continue
@ -549,7 +551,7 @@ module External {
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
const linkTable = this.tables[linkTableName]
if (linkTable) {
const linkedFields = extractNonLinkFieldNames(linkTable, fields)
const linkedFields = extractRealFields(linkTable, fields)
fields = fields.concat(linkedFields)
}
}
@ -609,7 +611,7 @@ module External {
},
}
// can't really use response right now
const response = await makeExternalQuery(appId, json)
const response = await getDatasourceAndQuery(appId, json)
// handle many to many relationships now if we know the ID (could be auto increment)
if (
operation !== DataSourceOperation.READ &&

View File

@ -4,8 +4,8 @@ const CouchDB = require("../../../db")
const { InternalTables } = require("../../../db/utils")
const userController = require("../user")
const { FieldTypes } = require("../../../constants")
const { integrations } = require("../../../integrations")
const { processStringSync } = require("@budibase/string-templates")
const { makeExternalQuery } = require("../../../integrations/base/utils")
validateJs.extend(validateJs.validators.datetime, {
parse: function (value) {
@ -17,18 +17,11 @@ validateJs.extend(validateJs.validators.datetime, {
},
})
exports.makeExternalQuery = async (appId, json) => {
exports.getDatasourceAndQuery = async (appId, json) => {
const datasourceId = json.endpoint.datasourceId
const db = new CouchDB(appId)
const datasource = await db.get(datasourceId)
const Integration = integrations[datasource.source]
// query is the opinionated function
if (Integration.prototype.query) {
const integration = new Integration(datasource.config)
return integration.query(json)
} else {
throw "Datasource does not support query."
}
return makeExternalQuery(datasource, json)
}
exports.findRow = async (ctx, db, tableId, rowId) => {

View File

@ -0,0 +1,276 @@
const CouchDB = require("../../../db")
const {
buildExternalTableId,
breakExternalTableId,
} = require("../../../integrations/utils")
const {
getTable,
generateForeignKey,
generateJunctionTableName,
foreignKeyStructure,
} = require("./utils")
const {
DataSourceOperation,
FieldTypes,
RelationshipTypes,
} = require("../../../constants")
const { makeExternalQuery } = require("../../../integrations/base/utils")
const { cloneDeep } = require("lodash/fp")
async function makeTableRequest(
datasource,
operation,
table,
tables,
oldTable = null
) {
const json = {
endpoint: {
datasourceId: datasource._id,
entityId: table._id,
operation,
},
meta: {
tables,
},
table,
}
if (oldTable) {
json.meta.table = oldTable
}
return makeExternalQuery(datasource, json)
}
function cleanupRelationships(table, tables, oldTable = null) {
const tableToIterate = oldTable ? oldTable : table
// clean up relationships in couch table schemas
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
if (
schema.type === FieldTypes.LINK &&
(!oldTable || table.schema[key] == null)
) {
const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId
)
const foreignKey = schema.foreignKey
if (!relatedTable || !foreignKey) {
continue
}
for (let [relatedKey, relatedSchema] of Object.entries(
relatedTable.schema
)) {
if (
relatedSchema.type === FieldTypes.LINK &&
relatedSchema.fieldName === foreignKey
) {
delete relatedTable.schema[relatedKey]
}
}
}
}
}
function getDatasourceId(table) {
if (!table) {
throw "No table supplied"
}
if (table.sourceId) {
return table.sourceId
}
return breakExternalTableId(table._id).datasourceId
}
function otherRelationshipType(type) {
if (type === RelationshipTypes.MANY_TO_MANY) {
return RelationshipTypes.MANY_TO_MANY
}
return type === RelationshipTypes.ONE_TO_MANY
? RelationshipTypes.MANY_TO_ONE
: RelationshipTypes.ONE_TO_MANY
}
function generateManyLinkSchema(datasource, column, table, relatedTable) {
const primary = table.name + table.primary[0]
const relatedPrimary = relatedTable.name + relatedTable.primary[0]
const jcTblName = generateJunctionTableName(column, table, relatedTable)
// first create the new table
const junctionTable = {
_id: buildExternalTableId(datasource._id, jcTblName),
name: jcTblName,
primary: [primary, relatedPrimary],
constrained: [primary, relatedPrimary],
schema: {
[primary]: foreignKeyStructure(primary, {
toTable: table.name,
toKey: table.primary[0],
}),
[relatedPrimary]: foreignKeyStructure(relatedPrimary, {
toTable: relatedTable.name,
toKey: relatedTable.primary[0],
}),
},
}
column.through = junctionTable._id
column.throughFrom = primary
column.throughTo = relatedPrimary
column.fieldName = relatedPrimary
return junctionTable
}
function generateLinkSchema(column, table, relatedTable, type) {
const isOneSide = type === RelationshipTypes.ONE_TO_MANY
const primary = isOneSide ? relatedTable.primary[0] : table.primary[0]
// generate a foreign key
const foreignKey = generateForeignKey(column, relatedTable)
column.relationshipType = type
column.foreignKey = isOneSide ? foreignKey : primary
column.fieldName = isOneSide ? primary : foreignKey
return foreignKey
}
function generateRelatedSchema(linkColumn, table, relatedTable, columnName) {
// generate column for other table
const relatedSchema = cloneDeep(linkColumn)
// swap them from the main link
if (linkColumn.foreignKey) {
relatedSchema.fieldName = linkColumn.foreignKey
relatedSchema.foreignKey = linkColumn.fieldName
}
// is many to many
else {
// don't need to copy through, already got it
relatedSchema.fieldName = linkColumn.throughFrom
relatedSchema.throughTo = linkColumn.throughFrom
relatedSchema.throughFrom = linkColumn.throughTo
}
relatedSchema.relationshipType = otherRelationshipType(
linkColumn.relationshipType
)
relatedSchema.tableId = relatedTable._id
relatedSchema.name = columnName
table.schema[columnName] = relatedSchema
}
function isRelationshipSetup(column) {
return column.foreignKey || column.through
}
exports.save = async function (ctx) {
const appId = ctx.appId
const table = ctx.request.body
// can't do this
delete table.dataImport
const datasourceId = getDatasourceId(ctx.request.body)
let tableToSave = {
type: "table",
_id: buildExternalTableId(datasourceId, table.name),
...table,
}
let oldTable
if (ctx.request.body && ctx.request.body._id) {
oldTable = await getTable(appId, ctx.request.body._id)
}
const db = new CouchDB(appId)
const datasource = await db.get(datasourceId)
const oldTables = cloneDeep(datasource.entities)
const tables = datasource.entities
const extraTablesToUpdate = []
// check if relations need setup
for (let schema of Object.values(tableToSave.schema)) {
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
continue
}
const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId
)
const relatedColumnName = schema.fieldName
const relationType = schema.relationshipType
if (relationType === RelationshipTypes.MANY_TO_MANY) {
const junctionTable = generateManyLinkSchema(
datasource,
schema,
table,
relatedTable
)
if (tables[junctionTable.name]) {
throw "Junction table already exists, cannot create another relationship."
}
tables[junctionTable.name] = junctionTable
extraTablesToUpdate.push(junctionTable)
} else {
const fkTable =
relationType === RelationshipTypes.ONE_TO_MANY ? table : relatedTable
const foreignKey = generateLinkSchema(
schema,
table,
relatedTable,
relationType
)
fkTable.schema[foreignKey] = foreignKeyStructure(foreignKey)
if (fkTable.constrained == null) {
fkTable.constrained = []
}
if (fkTable.constrained.indexOf(foreignKey) === -1) {
fkTable.constrained.push(foreignKey)
}
// foreign key is in other table, need to save it to external
if (fkTable._id !== table._id) {
extraTablesToUpdate.push(fkTable)
}
}
generateRelatedSchema(schema, relatedTable, table, relatedColumnName)
schema.main = true
}
cleanupRelationships(tableToSave, tables, oldTable)
const operation = oldTable
? DataSourceOperation.UPDATE_TABLE
: DataSourceOperation.CREATE_TABLE
await makeTableRequest(datasource, operation, tableToSave, tables, oldTable)
// update any extra tables (like foreign keys in other tables)
for (let extraTable of extraTablesToUpdate) {
const oldExtraTable = oldTables[extraTable.name]
let op = oldExtraTable
? DataSourceOperation.UPDATE_TABLE
: DataSourceOperation.CREATE_TABLE
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
}
// make sure the constrained list, all still exist
if (Array.isArray(tableToSave.constrained)) {
tableToSave.constrained = tableToSave.constrained.filter(constraint =>
Object.keys(tableToSave.schema).includes(constraint)
)
}
// store it into couch now for budibase reference
datasource.entities[tableToSave.name] = tableToSave
await db.put(datasource)
return tableToSave
}
exports.destroy = async function (ctx) {
const appId = ctx.appId
const tableToDelete = await getTable(appId, ctx.params.tableId)
const datasourceId = getDatasourceId(tableToDelete)
const db = new CouchDB(appId)
const datasource = await db.get(datasourceId)
const tables = datasource.entities
const operation = DataSourceOperation.DELETE_TABLE
await makeTableRequest(datasource, operation, tableToDelete, tables)
cleanupRelationships(tableToDelete, tables)
delete datasource.entities[tableToDelete.name]
await db.put(datasource)
return tableToDelete
}

View File

@ -1,16 +1,28 @@
const CouchDB = require("../../../db")
const linkRows = require("../../../db/linkedRows")
const internal = require("./internal")
const external = require("./external")
const csvParser = require("../../../utilities/csvParser")
const { isExternalTable } = require("../../../integrations/utils")
const {
getRowParams,
getTableParams,
generateTableID,
getDatasourceParams,
BudibaseInternalDB,
} = require("../../../db/utils")
const { FieldTypes } = require("../../../constants")
const { TableSaveFunctions, getTable } = require("./utils")
const { getTable } = require("./utils")
function pickApi({ tableId, table }) {
if (table && !tableId) {
tableId = table._id
}
if (table && table.type === "external") {
return external
} else if (tableId && isExternalTable(tableId)) {
return external
}
return internal
}
// covers both internal and external
exports.fetch = async function (ctx) {
const db = new CouchDB(ctx.appId)
@ -50,143 +62,23 @@ exports.find = async function (ctx) {
exports.save = async function (ctx) {
const appId = ctx.appId
const db = new CouchDB(appId)
const { dataImport, ...rest } = ctx.request.body
let tableToSave = {
type: "table",
_id: generateTableID(),
views: {},
...rest,
}
// if the table obj had an _id then it will have been retrieved
let oldTable
if (ctx.request.body && ctx.request.body._id) {
oldTable = await db.get(ctx.request.body._id)
}
// saving a table is a complex operation, involving many different steps, this
// has been broken out into a utility to make it more obvious/easier to manipulate
const tableSaveFunctions = new TableSaveFunctions({
db,
ctx,
oldTable,
dataImport,
})
tableToSave = await tableSaveFunctions.before(tableToSave)
// make sure that types don't change of a column, have to remove
// the column if you want to change the type
if (oldTable && oldTable.schema) {
for (let propKey of Object.keys(tableToSave.schema)) {
let column = tableToSave.schema[propKey]
let oldColumn = oldTable.schema[propKey]
if (oldColumn && oldColumn.type === "internal") {
oldColumn.type = "auto"
}
if (oldColumn && oldColumn.type !== column.type) {
ctx.throw(400, "Cannot change the type of a column")
}
}
}
// Don't rename if the name is the same
let { _rename } = tableToSave
/* istanbul ignore next */
if (_rename && _rename.old === _rename.updated) {
_rename = null
delete tableToSave._rename
}
// rename row fields when table column is renamed
/* istanbul ignore next */
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
ctx.throw(400, "Cannot rename a linked column.")
}
tableToSave = await tableSaveFunctions.mid(tableToSave)
// update schema of non-statistics views when new columns are added
for (let view in tableToSave.views) {
const tableView = tableToSave.views[view]
if (!tableView) continue
if (tableView.schema.group || tableView.schema.field) continue
tableView.schema = tableToSave.schema
}
// update linked rows
try {
const linkResp = await linkRows.updateLinks({
appId,
eventType: oldTable
? linkRows.EventType.TABLE_UPDATED
: linkRows.EventType.TABLE_SAVE,
table: tableToSave,
oldTable: oldTable,
})
if (linkResp != null && linkResp._rev) {
tableToSave._rev = linkResp._rev
}
} catch (err) {
ctx.throw(400, err)
}
// don't perform any updates until relationships have been
// checked by the updateLinks function
const updatedRows = tableSaveFunctions.getUpdatedRows()
if (updatedRows && updatedRows.length !== 0) {
await db.bulkDocs(updatedRows)
}
const result = await db.put(tableToSave)
tableToSave._rev = result.rev
tableToSave = await tableSaveFunctions.after(tableToSave)
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:save`, appId, tableToSave)
const table = ctx.request.body
const savedTable = await pickApi({ table }).save(ctx)
ctx.status = 200
ctx.message = `Table ${ctx.request.body.name} saved successfully.`
ctx.body = tableToSave
ctx.message = `Table ${table.name} saved successfully.`
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:save`, appId, savedTable)
ctx.body = savedTable
}
exports.destroy = async function (ctx) {
const appId = ctx.appId
const db = new CouchDB(appId)
const tableToDelete = await db.get(ctx.params.tableId)
// Delete all rows for that table
const rows = await db.allDocs(
getRowParams(ctx.params.tableId, null, {
include_docs: true,
})
)
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
// update linked rows
await linkRows.updateLinks({
appId,
eventType: linkRows.EventType.TABLE_DELETE,
table: tableToDelete,
})
// don't remove the table itself until very end
await db.remove(tableToDelete)
// remove table search index
const currentIndexes = await db.getIndexes()
const existingIndex = currentIndexes.indexes.find(
existing => existing.name === `search:${ctx.params.tableId}`
)
if (existingIndex) {
await db.deleteIndex(existingIndex)
}
const tableId = ctx.params.tableId
const deletedTable = await pickApi({ tableId }).destroy(ctx)
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:delete`, appId, tableToDelete)
ctx.eventEmitter.emitTable(`table:delete`, appId, deletedTable)
ctx.status = 200
ctx.body = { message: `Table ${ctx.params.tableId} deleted.` }
ctx.body = { message: `Table ${tableId} deleted.` }
}
exports.validateCSVSchema = async function (ctx) {

View File

@ -0,0 +1,138 @@
const CouchDB = require("../../../db")
const linkRows = require("../../../db/linkedRows")
const { getRowParams, generateTableID } = require("../../../db/utils")
const { FieldTypes } = require("../../../constants")
const { TableSaveFunctions } = require("./utils")
exports.save = async function (ctx) {
const appId = ctx.appId
const db = new CouchDB(appId)
const { dataImport, ...rest } = ctx.request.body
let tableToSave = {
type: "table",
_id: generateTableID(),
views: {},
...rest,
}
// if the table obj had an _id then it will have been retrieved
let oldTable
if (ctx.request.body && ctx.request.body._id) {
oldTable = await db.get(ctx.request.body._id)
}
// saving a table is a complex operation, involving many different steps, this
// has been broken out into a utility to make it more obvious/easier to manipulate
const tableSaveFunctions = new TableSaveFunctions({
db,
ctx,
oldTable,
dataImport,
})
tableToSave = await tableSaveFunctions.before(tableToSave)
// make sure that types don't change of a column, have to remove
// the column if you want to change the type
if (oldTable && oldTable.schema) {
for (let propKey of Object.keys(tableToSave.schema)) {
let column = tableToSave.schema[propKey]
let oldColumn = oldTable.schema[propKey]
if (oldColumn && oldColumn.type === "internal") {
oldColumn.type = "auto"
}
if (oldColumn && oldColumn.type !== column.type) {
ctx.throw(400, "Cannot change the type of a column")
}
}
}
// Don't rename if the name is the same
let { _rename } = tableToSave
/* istanbul ignore next */
if (_rename && _rename.old === _rename.updated) {
_rename = null
delete tableToSave._rename
}
// rename row fields when table column is renamed
/* istanbul ignore next */
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
ctx.throw(400, "Cannot rename a linked column.")
}
tableToSave = await tableSaveFunctions.mid(tableToSave)
// update schema of non-statistics views when new columns are added
for (let view in tableToSave.views) {
const tableView = tableToSave.views[view]
if (!tableView) continue
if (tableView.schema.group || tableView.schema.field) continue
tableView.schema = tableToSave.schema
}
// update linked rows
try {
const linkResp = await linkRows.updateLinks({
appId,
eventType: oldTable
? linkRows.EventType.TABLE_UPDATED
: linkRows.EventType.TABLE_SAVE,
table: tableToSave,
oldTable: oldTable,
})
if (linkResp != null && linkResp._rev) {
tableToSave._rev = linkResp._rev
}
} catch (err) {
ctx.throw(400, err)
}
// don't perform any updates until relationships have been
// checked by the updateLinks function
const updatedRows = tableSaveFunctions.getUpdatedRows()
if (updatedRows && updatedRows.length !== 0) {
await db.bulkDocs(updatedRows)
}
const result = await db.put(tableToSave)
tableToSave._rev = result.rev
tableToSave = await tableSaveFunctions.after(tableToSave)
return tableToSave
}
exports.destroy = async function (ctx) {
const appId = ctx.appId
const db = new CouchDB(appId)
const tableToDelete = await db.get(ctx.params.tableId)
// Delete all rows for that table
const rows = await db.allDocs(
getRowParams(ctx.params.tableId, null, {
include_docs: true,
})
)
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
// update linked rows
await linkRows.updateLinks({
appId,
eventType: linkRows.EventType.TABLE_DELETE,
table: tableToDelete,
})
// don't remove the table itself until very end
await db.remove(tableToDelete)
// remove table search index
const currentIndexes = await db.getIndexes()
const existingIndex = currentIndexes.indexes.find(
existing => existing.name === `search:${ctx.params.tableId}`
)
if (existingIndex) {
await db.deleteIndex(existingIndex)
}
return tableToDelete
}

View File

@ -315,4 +315,24 @@ exports.checkForViewUpdates = async (db, table, rename, deletedColumns) => {
}
}
exports.generateForeignKey = (column, relatedTable) => {
return `fk_${relatedTable.name}_${column.fieldName}`
}
exports.generateJunctionTableName = (column, table, relatedTable) => {
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
}
exports.foreignKeyStructure = (keyName, meta = null) => {
const structure = {
type: FieldTypes.NUMBER,
constraints: {},
name: keyName,
}
if (meta) {
structure.meta = meta
}
return structure
}
exports.TableSaveFunctions = TableSaveFunctions

View File

@ -26,8 +26,8 @@ describe("/datasources", () => {
.expect('Content-Type', /json/)
.expect(200)
expect(res.res.statusMessage).toEqual("Datasource saved successfully.")
expect(res.body.name).toEqual("Test")
expect(res.body.datasource.name).toEqual("Test")
expect(res.body.errors).toBeUndefined()
})
})

View File

@ -1,7 +1,6 @@
// mock out postgres for this
jest.mock("pg")
const { findLastKey } = require("lodash/fp")
const setup = require("./utilities")
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
const { basicQuery, basicDatasource } = setup.structures

View File

@ -62,6 +62,9 @@ exports.DataSourceOperation = {
READ: "READ",
UPDATE: "UPDATE",
DELETE: "DELETE",
CREATE_TABLE: "CREATE_TABLE",
UPDATE_TABLE: "UPDATE_TABLE",
DELETE_TABLE: "DELETE_TABLE",
}
exports.SortDirection = {
@ -152,5 +155,9 @@ exports.MetadataTypes = {
AUTOMATION_TEST_HISTORY: "automationTestHistory",
}
exports.BuildSchemaErrors = {
NO_KEY: "no_key",
}
// pass through the list from the auth/core lib
exports.ObjectStoreBuckets = ObjectStoreBuckets

View File

@ -17,6 +17,11 @@ export interface FieldSchema {
autocolumn?: boolean
throughFrom?: string
throughTo?: string
main?: boolean
meta?: {
toTable: string
toKey: string
}
constraints?: {
type?: string
email?: boolean
@ -36,11 +41,12 @@ export interface TableSchema {
export interface Table extends Base {
type?: string
views?: {}
name?: string
name: string
primary?: string[]
schema: TableSchema
primaryDisplay?: string
sourceId?: string
constrained?: string[]
}
export interface Row extends Base {

View File

@ -5,6 +5,9 @@ export enum Operation {
READ = "READ",
UPDATE = "UPDATE",
DELETE = "DELETE",
CREATE_TABLE = "CREATE_TABLE",
UPDATE_TABLE = "UPDATE_TABLE",
DELETE_TABLE = "DELETE_TABLE",
}
export enum SortDirection {
@ -142,8 +145,10 @@ export interface QueryJson {
sort?: SortJson
paginate?: PaginationJson
body?: object
table?: Table
meta?: {
table?: Table
tables?: Record<string, Table>
}
extra?: {
idFilter?: SearchFilters

View File

@ -0,0 +1,8 @@
import { Table } from "../../definitions/common"
export interface DatasourcePlus {
tables: Record<string, Table>
schemaErrors: Record<string, string>
buildSchema(datasourceId: string, entities: Record<string, Table>): any
}

View File

@ -1,19 +1,24 @@
import { Knex, knex } from "knex"
const BASE_LIMIT = 5000
import {
QueryJson,
SearchFilters,
QueryOptions,
SortDirection,
Operation,
QueryJson,
QueryOptions,
RelationshipsJson,
SearchFilters,
SortDirection,
} from "../../definitions/datasource"
import { isIsoDateString } from "../utils"
import SqlTableQueryBuilder from "./sqlTable"
const BASE_LIMIT = 5000
type KnexQuery = Knex.QueryBuilder | Knex
function parseBody(body: any) {
for (let [key, value] of Object.entries(body)) {
if (Array.isArray(value)) {
body[key] = JSON.stringify(value)
}
if (typeof value !== "string") {
continue
}
@ -243,23 +248,14 @@ function buildDelete(
}
}
class SqlQueryBuilder {
private readonly sqlClient: string
class SqlQueryBuilder extends SqlTableQueryBuilder {
private readonly limit: number
// pass through client to get flavour of SQL
constructor(client: string, limit: number = BASE_LIMIT) {
this.sqlClient = client
super(client)
this.limit = limit
}
/**
* @param json the input JSON structure from which an SQL query will be built.
* @return {string} the operation that was found in the JSON.
*/
_operation(json: QueryJson): Operation {
return json.endpoint.operation
}
/**
* @param json The JSON query DSL which is to be converted to SQL.
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
@ -267,7 +263,8 @@ class SqlQueryBuilder {
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver.
*/
_query(json: QueryJson, opts: QueryOptions = {}) {
const client = knex({ client: this.sqlClient })
const sqlClient = this.getSqlClient()
const client = knex({ client: sqlClient })
let query
switch (this._operation(json)) {
case Operation.CREATE:
@ -282,6 +279,8 @@ class SqlQueryBuilder {
case Operation.DELETE:
query = buildDelete(client, json, opts)
break
case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: case Operation.DELETE_TABLE:
return this._tableQuery(json)
default:
throw `Operation type is not supported by SQL query builder`
}

View File

@ -0,0 +1,167 @@
import { Knex, knex } from "knex"
import { Table } from "../../definitions/common"
import { Operation, QueryJson } from "../../definitions/datasource"
import { breakExternalTableId } from "../utils"
import SchemaBuilder = Knex.SchemaBuilder
import CreateTableBuilder = Knex.CreateTableBuilder
const { FieldTypes, RelationshipTypes } = require("../../constants")
function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record<string, Table>, oldTable: null | Table = null) {
let primaryKey = table && table.primary ? table.primary[0] : null
const columns = Object.values(table.schema)
// all columns in a junction table will be meta
let metaCols = columns.filter(col => col.meta)
let isJunction = metaCols.length === columns.length
// can't change primary once its set for now
if (primaryKey && !oldTable && !isJunction) {
schema.increments(primaryKey).primary()
} else if (!oldTable && isJunction) {
schema.primary(metaCols.map(col => col.name))
}
// check if any columns need added
const foreignKeys = Object.values(table.schema).map(col => col.foreignKey)
for (let [key, column] of Object.entries(table.schema)) {
// skip things that are already correct
const oldColumn = oldTable ? oldTable.schema[key] : null
if ((oldColumn && oldColumn.type === column.type) || (primaryKey === key && !isJunction)) {
continue
}
switch (column.type) {
case FieldTypes.STRING: case FieldTypes.OPTIONS: case FieldTypes.LONGFORM:
schema.string(key)
break
case FieldTypes.NUMBER:
// if meta is specified then this is a junction table entry
if (column.meta && column.meta.toKey && column.meta.toTable) {
const { toKey, toTable } = column.meta
schema.integer(key).unsigned()
schema.foreign(key).references(`${toTable}.${toKey}`)
} else if (foreignKeys.indexOf(key) === -1) {
schema.float(key)
}
break
case FieldTypes.BOOLEAN:
schema.boolean(key)
break
case FieldTypes.DATETIME:
schema.datetime(key)
break
case FieldTypes.ARRAY:
schema.json(key)
break
case FieldTypes.LINK:
// this side of the relationship doesn't need any SQL work
if (
column.relationshipType !== RelationshipTypes.MANY_TO_ONE &&
column.relationshipType !== RelationshipTypes.MANY_TO_MANY
) {
if (!column.foreignKey || !column.tableId) {
throw "Invalid relationship schema"
}
const { tableName } = breakExternalTableId(column.tableId)
// @ts-ignore
const relatedTable = tables[tableName]
if (!relatedTable) {
throw "Referenced table doesn't exist"
}
schema.integer(column.foreignKey).unsigned()
schema.foreign(column.foreignKey).references(`${tableName}.${relatedTable.primary[0]}`)
}
break
}
}
// need to check if any columns have been deleted
if (oldTable) {
const deletedColumns = Object.entries(oldTable.schema)
.filter(([key, schema]) => schema.type !== FieldTypes.LINK && table.schema[key] == null)
.map(([key]) => key)
deletedColumns.forEach(key => {
if (oldTable.constrained && oldTable.constrained.indexOf(key) !== -1) {
schema.dropForeign(key)
}
schema.dropColumn(key)
})
}
return schema
}
function buildCreateTable(
knex: Knex,
table: Table,
tables: Record<string, Table>,
): SchemaBuilder {
return knex.schema.createTable(table.name, schema => {
generateSchema(schema, table, tables)
})
}
function buildUpdateTable(
knex: Knex,
table: Table,
tables: Record<string, Table>,
oldTable: Table,
): SchemaBuilder {
return knex.schema.alterTable(table.name, schema => {
generateSchema(schema, table, tables, oldTable)
})
}
function buildDeleteTable(
knex: Knex,
table: Table,
): SchemaBuilder {
return knex.schema.dropTable(table.name)
}
class SqlTableQueryBuilder {
private readonly sqlClient: string
// pass through client to get flavour of SQL
constructor(client: string) {
this.sqlClient = client
}
getSqlClient(): string {
return this.sqlClient
}
/**
* @param json the input JSON structure from which an SQL query will be built.
* @return {string} the operation that was found in the JSON.
*/
_operation(json: QueryJson): Operation {
return json.endpoint.operation
}
_tableQuery(json: QueryJson): any {
const client = knex({ client: this.sqlClient })
let query
if (!json.table || !json.meta || !json.meta.tables) {
throw "Cannot execute without table being specified"
}
switch (this._operation(json)) {
case Operation.CREATE_TABLE:
query = buildCreateTable(client, json.table, json.meta.tables)
break
case Operation.UPDATE_TABLE:
if (!json.meta || !json.meta.table) {
throw "Must specify old table for update"
}
query = buildUpdateTable(client, json.table, json.meta.tables, json.meta.table)
break
case Operation.DELETE_TABLE:
query = buildDeleteTable(client, json.table)
break
default:
throw "Table operation is of unknown type"
}
return query.toSQL()
}
}
export default SqlTableQueryBuilder
module.exports = SqlTableQueryBuilder

View File

@ -0,0 +1,19 @@
import { QueryJson } from "../../definitions/datasource"
import { Datasource } from "../../definitions/common"
module DatasourceUtils {
const { integrations } = require("../index")
export async function makeExternalQuery(datasource: Datasource, json: QueryJson) {
const Integration = integrations[datasource.source]
// query is the opinionated function
if (Integration.prototype.query) {
const integration = new Integration(datasource.config)
return integration.query(json)
} else {
throw "Datasource does not support query."
}
}
module.exports.makeExternalQuery = makeExternalQuery
}

View File

@ -8,6 +8,7 @@ import {
} from "../definitions/datasource"
import { Table, TableSchema } from "../definitions/common"
import { getSqlQuery } from "./utils"
import { DatasourcePlus } from "./base/datasourcePlus"
module MySQLModule {
const mysql = require("mysql2")
@ -15,7 +16,7 @@ module MySQLModule {
const {
buildExternalTableId,
convertType,
copyExistingPropsOver,
finaliseExternalTables,
} = require("./utils")
const { FieldTypes } = require("../constants")
@ -131,9 +132,11 @@ module MySQLModule {
})
}
class MySQLIntegration extends Sql {
class MySQLIntegration extends Sql implements DatasourcePlus {
private config: MySQLConfig
private readonly client: any
public tables: Record<string, Table> = {}
public schemaErrors: Record<string, string> = {}
constructor(config: MySQLConfig) {
super("mysql")
@ -185,10 +188,6 @@ module MySQLModule {
constraints,
}
}
// for now just default to first column
if (primaryKeys.length === 0) {
primaryKeys.push(descResp[0].Field)
}
if (!tables[tableName]) {
tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName),
@ -197,12 +196,12 @@ module MySQLModule {
schema,
}
}
copyExistingPropsOver(tableName, tables, entities)
}
this.client.end()
this.tables = tables
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
}
async create(query: SqlQuery | string) {
@ -263,6 +262,13 @@ module MySQLModule {
const operation = this._operation(json)
this.client.connect()
const input = this._query(json, { disableReturning: true })
if (Array.isArray(input)) {
const responses = []
for (let query of input) {
responses.push(await internalQuery(this.client, query, false))
}
return responses
}
let row
// need to manage returning, a feature mySQL can't do
if (operation === operation.DELETE) {

View File

@ -7,6 +7,7 @@ import {
} from "../definitions/datasource"
import { Table } from "../definitions/common"
import { getSqlQuery } from "./utils"
import { DatasourcePlus } from "./base/datasourcePlus"
module PostgresModule {
const { Pool } = require("pg")
@ -15,7 +16,7 @@ module PostgresModule {
const {
buildExternalTableId,
convertType,
copyExistingPropsOver,
finaliseExternalTables,
} = require("./utils")
const { escapeDangerousCharacters } = require("../utilities")
@ -138,10 +139,12 @@ module PostgresModule {
}
}
class PostgresIntegration extends Sql {
class PostgresIntegration extends Sql implements DatasourcePlus {
static pool: any
private readonly client: any
private readonly config: PostgresConfig
public tables: Record<string, Table> = {}
public schemaErrors: Record<string, string> = {}
COLUMNS_SQL!: string
@ -223,7 +226,7 @@ module PostgresModule {
if (!tables[tableName] || !tables[tableName].schema) {
tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName),
primary: tableKeys[tableName] || ["id"],
primary: tableKeys[tableName] || [],
name: tableName,
schema: {},
}
@ -248,10 +251,9 @@ module PostgresModule {
}
}
for (let tableName of Object.keys(tables)) {
copyExistingPropsOver(tableName, tables, entities)
}
this.tables = tables
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
}
async create(query: SqlQuery | string) {
@ -277,8 +279,16 @@ module PostgresModule {
async query(json: QueryJson) {
const operation = this._operation(json).toLowerCase()
const input = this._query(json)
const response = await internalQuery(this.client, input)
return response.rows.length ? response.rows : [{ [operation]: true }]
if (Array.isArray(input)) {
const responses = []
for (let query of input) {
responses.push(await internalQuery(this.client, query))
}
return responses
} else {
const response = await internalQuery(this.client, input)
return response.rows.length ? response.rows : [{ [operation]: true }]
}
}
}

View File

@ -1,8 +1,8 @@
import { SqlQuery } from "../definitions/datasource"
import { Datasource } from "../definitions/common"
import { Datasource, Table } from "../definitions/common"
import { SourceNames } from "../definitions/datasource"
const { DocumentTypes, SEPARATOR } = require("../db/utils")
const { FieldTypes } = require("../constants")
const { FieldTypes, BuildSchemaErrors } = require("../constants")
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g
@ -102,14 +102,14 @@ export function isIsoDateString(str: string) {
}
// add the existing relationships from the entities if they exist, to prevent them from being overridden
export function copyExistingPropsOver(
function copyExistingPropsOver(
tableName: string,
tables: { [key: string]: any },
table: Table,
entities: { [key: string]: any }
) {
if (entities && entities[tableName]) {
if (entities[tableName].primaryDisplay) {
tables[tableName].primaryDisplay = entities[tableName].primaryDisplay
table.primaryDisplay = entities[tableName].primaryDisplay
}
const existingTableSchema = entities[tableName].schema
for (let key in existingTableSchema) {
@ -117,8 +117,27 @@ export function copyExistingPropsOver(
continue
}
if (existingTableSchema[key].type === "link") {
tables[tableName].schema[key] = existingTableSchema[key]
table.schema[key] = existingTableSchema[key]
}
}
}
return table
}
export function finaliseExternalTables(
tables: { [key: string]: any },
entities: { [key: string]: any }
) {
const finalTables: { [key: string]: any } = {}
const errors: { [key: string]: string } = {}
for (let [name, table] of Object.entries(tables)) {
// make sure every table has a key
if (table.primary == null || table.primary.length === 0) {
errors[name] = BuildSchemaErrors.NO_KEY
continue
}
// make sure all previous props have been added back
finalTables[name] = copyExistingPropsOver(name, table, entities)
}
return { tables: finalTables, errors }
}

View File

@ -311,7 +311,8 @@ class TestConfiguration {
async createDatasource(config = null) {
config = config || basicDatasource()
this.datasource = await this._req(config, null, controllers.datasource.save)
const response = await this._req(config, null, controllers.datasource.save)
this.datasource = response.datasource
return this.datasource
}

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
"version": "0.9.173-alpha.3",
"version": "0.9.176-alpha.3",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",

View File

@ -31,7 +31,7 @@ const HELPERS = [
}
// null/undefined values produce bad results
if (value == null || typeof value !== "string") {
return value || ""
return value == null ? "" : value
}
if (value && value.string) {
value = value.string

View File

@ -6,6 +6,7 @@ const manifest = require("../manifest.json")
const hbsInstance = handlebars.create()
registerAll(hbsInstance)
const hbsInstanceNoHelpers = handlebars.create()
/**
* utility function to check if the object is valid
@ -24,17 +25,30 @@ function testObject(object) {
* @param {object|array} object The input structure which is to be recursed, it is important to note that
* if the structure contains any cycles then this will fail.
* @param {object} context The context that handlebars should fill data from.
* @param {object} opts optional - specify some options for processing.
* @returns {Promise<object|array>} The structure input, as fully updated as possible.
*/
module.exports.processObject = async (object, context) => {
module.exports.processObject = async (
object,
context,
opts = { noHelpers: false }
) => {
testObject(object)
for (let key of Object.keys(object || {})) {
if (object[key] != null) {
let val = object[key]
if (typeof val === "string") {
object[key] = await module.exports.processString(object[key], context)
object[key] = await module.exports.processString(
object[key],
context,
opts
)
} else if (typeof val === "object") {
object[key] = await module.exports.processObject(object[key], context)
object[key] = await module.exports.processObject(
object[key],
context,
opts
)
}
}
}
@ -46,11 +60,16 @@ module.exports.processObject = async (object, context) => {
* then nothing will occur.
* @param {string} string The template string which is the filled from the context object.
* @param {object} context An object of information which will be used to enrich the string.
* @param {object} opts optional - specify some options for processing.
* @returns {Promise<string>} The enriched string, all templates should have been replaced if they can be.
*/
module.exports.processString = async (string, context) => {
module.exports.processString = async (
string,
context,
opts = { noHelpers: false }
) => {
// TODO: carry out any async calls before carrying out async call
return module.exports.processStringSync(string, context)
return module.exports.processStringSync(string, context, opts)
}
/**
@ -59,16 +78,21 @@ module.exports.processString = async (string, context) => {
* @param {object|array} object The input structure which is to be recursed, it is important to note that
* if the structure contains any cycles then this will fail.
* @param {object} context The context that handlebars should fill data from.
* @param {object} opts optional - specify some options for processing.
* @returns {object|array} The structure input, as fully updated as possible.
*/
module.exports.processObjectSync = (object, context) => {
module.exports.processObjectSync = (
object,
context,
opts = { noHelpers: false }
) => {
testObject(object)
for (let key of Object.keys(object || {})) {
let val = object[key]
if (typeof val === "string") {
object[key] = module.exports.processStringSync(object[key], context)
object[key] = module.exports.processStringSync(object[key], context, opts)
} else if (typeof val === "object") {
object[key] = module.exports.processObjectSync(object[key], context)
object[key] = module.exports.processObjectSync(object[key], context, opts)
}
}
return object
@ -79,9 +103,14 @@ module.exports.processObjectSync = (object, context) => {
* then nothing will occur. This is a pure sync call and therefore does not have the full functionality of the async call.
* @param {string} string The template string which is the filled from the context object.
* @param {object} context An object of information which will be used to enrich the string.
* @param {object} opts optional - specify some options for processing.
* @returns {string} The enriched string, all templates should have been replaced if they can be.
*/
module.exports.processStringSync = (string, context) => {
module.exports.processStringSync = (
string,
context,
opts = { noHelpers: false }
) => {
if (!exports.isValid(string)) {
return string
}
@ -91,9 +120,13 @@ module.exports.processStringSync = (string, context) => {
throw "Cannot process non-string types."
}
try {
string = processors.preprocess(string)
const noHelpers = opts && opts.noHelpers
// finalising adds a helper, can't do this with no helpers
const shouldFinalise = !noHelpers
string = processors.preprocess(string, shouldFinalise)
// this does not throw an error when template can't be fulfilled, have to try correct beforehand
const template = hbsInstance.compile(string, {
const instance = noHelpers ? hbsInstanceNoHelpers : hbsInstance
const template = instance.compile(string, {
strict: false,
})
return processors.postprocess(
@ -119,9 +152,10 @@ module.exports.makePropSafe = property => {
/**
* Checks whether or not a template string contains totally valid syntax (simply tries running it)
* @param string The string to test for valid syntax - this may contain no templates and will be considered valid.
* @param opts optional - specify some options for processing.
* @returns {boolean} Whether or not the input string is valid.
*/
module.exports.isValid = string => {
module.exports.isValid = (string, opts = { noHelpers: false }) => {
const validCases = [
"string",
"number",
@ -135,7 +169,8 @@ module.exports.isValid = string => {
// don't really need a real context to check if its valid
const context = {}
try {
hbsInstance.compile(processors.preprocess(string, false))(context)
const instance = opts && opts.noHelpers ? hbsInstanceNoHelpers : hbsInstance
instance.compile(processors.preprocess(string, false))(context)
return true
} catch (err) {
const msg = err && err.message ? err.message : err

View File

@ -125,6 +125,18 @@ describe("check the utility functions", () => {
})
})
describe("check falsy values", () => {
it("should get a zero out when context contains it", async () => {
const output = await processString("{{ number }}", { number: 0 })
expect(output).toEqual("0")
})
it("should get false out when context contains it", async () => {
const output = await processString("{{ bool }}", { bool: false })
expect(output).toEqual("false")
})
})
describe("check manifest", () => {
it("should be able to retrieve the manifest", () => {
const manifest = getManifest()

View File

@ -11,6 +11,15 @@ describe("test the custom helpers we have applied", () => {
})
})
describe("test that it can run without helpers", () => {
it("should be able to run without helpers", async () => {
const output = await processString("{{ avg 1 1 1 }}", {}, { noHelpers: true })
const valid = await processString("{{ avg 1 1 1 }}", {})
expect(valid).toBe("1")
expect(output).toBe("Invalid Binding")
})
})
describe("test the math helpers", () => {
it("should be able to produce an absolute", async () => {
const output = await processString("{{abs a}}", {
@ -267,6 +276,7 @@ describe("test the comparison helpers", () => {
)
expect(output).toBe("Success")
}
it("should allow use of the lt helper", async () => {
await compare("lt", 10, 15)
})

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/worker",
"email": "hi@budibase.com",
"version": "0.9.173-alpha.3",
"version": "0.9.176-alpha.3",
"description": "Budibase background service",
"main": "src/index.js",
"repository": {
@ -29,8 +29,8 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@budibase/auth": "^0.9.173-alpha.3",
"@budibase/string-templates": "^0.9.173-alpha.3",
"@budibase/auth": "^0.9.176-alpha.3",
"@budibase/string-templates": "^0.9.176-alpha.3",
"@koa/router": "^8.0.0",
"@sentry/node": "^6.0.0",
"@techpass/passport-openidconnect": "^0.3.0",