Merge branch 'develop' of github.com:Budibase/budibase into feature/mssql-plus

This commit is contained in:
mike12345567 2021-11-05 10:58:04 +00:00
commit 515ed75680
53 changed files with 1928 additions and 358 deletions

1
.gitignore vendored
View File

@ -3,6 +3,7 @@ builder/*
.temp/
packages/server/runtime_apps/
.idea/
bb-airgapped.tar.gz
# Logs
logs

View File

@ -0,0 +1,51 @@
const fs = require("fs")
const { execSync } = require("child_process")
const path = require("path")
const IMAGES = {
worker: "budibase/worker",
apps: "budibase/apps",
proxy: "envoyproxy/envoy:v1.16-latest",
minio: "minio/minio",
couch: "ibmcom/couchdb3",
curl: "curlimages/curl",
redis: "redis",
watchtower: "containrrr/watchtower"
}
const FILES = {
COMPOSE: "docker-compose.yaml",
ENVOY: "envoy.yaml",
PROPERTIES: "hosting.properties"
}
const OUTPUT_DIR = path.join(__dirname, "../", "bb-airgapped")
function copyFile(file) {
fs.copyFileSync(
path.join(__dirname, "../", "../", file),
path.join(OUTPUT_DIR, file)
)
}
// create output dir
console.log(`Creating ${OUTPUT_DIR} for build..`)
fs.rmdirSync(OUTPUT_DIR, { recursive: true })
fs.mkdirSync(OUTPUT_DIR)
// package images into tar files
for (let image in IMAGES) {
console.log(`Creating tar for ${image}..`)
execSync(`docker save ${IMAGES[image]} -o ${OUTPUT_DIR}/${image}.tar`)
}
// copy config files
copyFile(FILES.COMPOSE)
copyFile(FILES.ENVOY)
copyFile(FILES.PROPERTIES)
// compress
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)
// clean up
fs.rmdirSync(OUTPUT_DIR, { recursive: true })

View File

@ -1,5 +1,5 @@
{
"version": "0.9.173-alpha.4",
"version": "0.9.176-alpha.3",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -46,6 +46,7 @@
"build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:production": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION release && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
"release:helm": "./scripts/release_helm_chart.sh",
"env:multi:enable": "lerna run env:multi:enable",
"env:multi:disable": "lerna run env:multi:disable",

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/auth",
"version": "0.9.173-alpha.4",
"version": "0.9.176-alpha.3",
"description": "Authentication middlewares for budibase builder and apps",
"main": "src/index.js",
"author": "Budibase",

View File

@ -9,6 +9,8 @@ const { createASession } = require("../../security/sessions")
const { getTenantId } = require("../../tenancy")
const INVALID_ERR = "Invalid Credentials"
const SSO_NO_PASSWORD = "SSO user does not have a password set"
const EXPIRED = "This account has expired. Please reset your password"
exports.options = {
passReqToCallback: true,
@ -36,6 +38,19 @@ exports.authenticate = async function (ctx, email, password, done) {
return authError(done, INVALID_ERR)
}
// check that the user has a stored password before proceeding
if (!dbUser.password) {
if (
(dbUser.account && dbUser.account.authType === "sso") || // root account sso
dbUser.thirdPartyProfile // internal sso
) {
return authError(done, SSO_NO_PASSWORD)
}
console.error("Non SSO usser has no password set", dbUser)
return authError(done, EXPIRED)
}
// authenticate
if (await compare(password, dbUser.password)) {
const sessionId = newid()

View File

@ -181,8 +181,8 @@ exports.saveUser = async (
// check budibase users in other tenants
if (env.MULTI_TENANCY) {
dbUser = await getTenantUser(email)
if (dbUser != null && dbUser.tenantId !== tenantId) {
const tenantUser = await getTenantUser(email)
if (tenantUser != null && tenantUser.tenantId !== tenantId) {
throw `Email address ${email} already in use.`
}
}

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "0.9.173-alpha.4",
"version": "0.9.176-alpha.3",
"license": "AGPL-3.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "0.9.173-alpha.4",
"version": "0.9.176-alpha.3",
"license": "AGPL-3.0",
"private": true,
"scripts": {
@ -65,10 +65,10 @@
}
},
"dependencies": {
"@budibase/bbui": "^0.9.173-alpha.4",
"@budibase/client": "^0.9.173-alpha.4",
"@budibase/bbui": "^0.9.176-alpha.3",
"@budibase/client": "^0.9.176-alpha.3",
"@budibase/colorpicker": "1.1.2",
"@budibase/string-templates": "^0.9.173-alpha.4",
"@budibase/string-templates": "^0.9.176-alpha.3",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",

View File

@ -4,6 +4,7 @@
import CreateRowButton from "./buttons/CreateRowButton.svelte"
import CreateColumnButton from "./buttons/CreateColumnButton.svelte"
import CreateViewButton from "./buttons/CreateViewButton.svelte"
import ExistingRelationshipButton from "./buttons/ExistingRelationshipButton.svelte"
import ExportButton from "./buttons/ExportButton.svelte"
import EditRolesButton from "./buttons/EditRolesButton.svelte"
import ManageAccessButton from "./buttons/ManageAccessButton.svelte"
@ -98,9 +99,7 @@
on:updatecolumns={onUpdateColumns}
on:updaterows={onUpdateRows}
>
{#if isInternal}
<CreateColumnButton on:updatecolumns={onUpdateColumns} />
{/if}
<CreateColumnButton on:updatecolumns={onUpdateColumns} />
{#if schema && Object.keys(schema).length > 0}
{#if !isUsersTable}
<CreateRowButton
@ -116,6 +115,12 @@
{#if isUsersTable}
<EditRolesButton />
{/if}
{#if !isInternal}
<ExistingRelationshipButton
table={$tables.selected}
on:updatecolumns={onUpdateColumns}
/>
{/if}
<HideAutocolumnButton bind:hideAutocolumns />
<!-- always have the export last -->
<ExportButton view={$tables.selected?._id} />

View File

@ -16,8 +16,8 @@
export let value = defaultValue || (meta.type === "boolean" ? false : "")
export let readonly
$: type = meta.type
$: label = capitalise(meta.name)
$: type = meta?.type
$: label = meta.name ? capitalise(meta.name) : ""
</script>
{#if type === "options"}

View File

@ -129,7 +129,7 @@
bind:selectedRows
allowSelectRows={allowEditing && !isUsersTable}
allowEditRows={allowEditing}
allowEditColumns={allowEditing && isInternal}
allowEditColumns={allowEditing}
showAutoColumns={!hideAutocolumns}
on:editcolumn={e => editColumn(e.detail)}
on:editrow={e => editRow(e.detail)}

View File

@ -0,0 +1,54 @@
<script>
import { ActionButton, Modal, notifications } from "@budibase/bbui"
import CreateEditRelationship from "../../Datasources/CreateEditRelationship.svelte"
import { datasources, tables } from "../../../../stores/backend"
import { createEventDispatcher } from "svelte"
export let table
const dispatch = createEventDispatcher()
$: plusTables = datasource?.plus
? Object.values(datasource?.entities || {})
: []
$: datasource = $datasources.list.find(
source => source._id === table?.sourceId
)
let modal
async function saveRelationship() {
try {
// Create datasource
await datasources.save(datasource)
notifications.success(`Relationship information saved.`)
const tableList = await tables.fetch()
await tables.select(tableList.find(tbl => tbl._id === table._id))
dispatch("updatecolumns")
} catch (err) {
notifications.error(`Error saving relationship info: ${err}`)
}
}
</script>
{#if table.sourceId}
<div>
<ActionButton
icon="DataCorrelated"
primary
size="S"
quiet
on:click={modal.show}
>
Define existing relationship
</ActionButton>
</div>
<Modal bind:this={modal}>
<CreateEditRelationship
{datasource}
save={saveRelationship}
close={modal.hide}
{plusTables}
selectedFromTable={table}
/>
</Modal>
{/if}

View File

@ -31,6 +31,9 @@
const AUTO_TYPE = "auto"
const FORMULA_TYPE = FIELDS.FORMULA.type
const LINK_TYPE = FIELDS.LINK.type
const STRING_TYPE = FIELDS.STRING.type
const NUMBER_TYPE = FIELDS.NUMBER.type
const dispatch = createEventDispatcher()
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
const { hide } = getContext(Context.Modal)
@ -55,8 +58,9 @@
let confirmDeleteDialog
let deletion
$: checkConstraints(field)
$: tableOptions = $tables.list.filter(
table => table._id !== $tables.draft._id && table.type !== "external"
opt => opt._id !== $tables.draft._id && opt.type === table.type
)
$: required = !!field?.constraints?.presence || primaryDisplay
$: uneditable =
@ -83,6 +87,7 @@
$: canBeRequired =
field.type !== LINK_TYPE && !uneditable && field.type !== AUTO_TYPE
$: relationshipOptions = getRelationshipOptions(field)
$: external = table.type === "external"
async function saveColumn() {
if (field.type === AUTO_TYPE) {
@ -193,6 +198,45 @@
},
]
}
function getAllowedTypes() {
if (!external) {
return [
...Object.values(fieldDefinitions),
{ name: "Auto Column", type: AUTO_TYPE },
]
} else {
return [
FIELDS.STRING,
FIELDS.LONGFORM,
FIELDS.OPTIONS,
FIELDS.DATETIME,
FIELDS.NUMBER,
FIELDS.BOOLEAN,
FIELDS.ARRAY,
FIELDS.FORMULA,
FIELDS.LINK,
]
}
}
function checkConstraints(fieldToCheck) {
// most types need this, just make sure its always present
if (fieldToCheck && !fieldToCheck.constraints) {
fieldToCheck.constraints = {}
}
// some string types may have been built by server, may not always have constraints
if (fieldToCheck.type === STRING_TYPE && !fieldToCheck.constraints.length) {
fieldToCheck.constraints.length = {}
}
// some number types made server-side will be missing constraints
if (
fieldToCheck.type === NUMBER_TYPE &&
!fieldToCheck.constraints.numericality
) {
fieldToCheck.constraints.numericality = {}
}
}
</script>
<ModalContent
@ -215,10 +259,7 @@
label="Type"
bind:value={field.type}
on:change={handleTypeChange}
options={[
...Object.values(fieldDefinitions),
{ name: "Auto Column", type: AUTO_TYPE },
]}
options={getAllowedTypes()}
getOptionLabel={field => field.name}
getOptionValue={field => field.type}
/>
@ -245,7 +286,7 @@
</div>
{/if}
{#if canBeSearched}
{#if canBeSearched && !external}
<div>
<Label grey small>Search Indexes</Label>
<Toggle

View File

@ -18,10 +18,19 @@
export let fromRelationship = {}
export let toRelationship = {}
export let close
export let selectedFromTable
let originalFromName = fromRelationship.name,
originalToName = toRelationship.name
if (fromRelationship && !fromRelationship.relationshipType) {
fromRelationship.relationshipType = RelationshipTypes.MANY_TO_ONE
}
if (toRelationship && selectedFromTable) {
toRelationship.tableId = selectedFromTable._id
}
function inSchema(table, prop, ogName) {
if (!table || !prop || prop === ogName) {
return false
@ -114,6 +123,7 @@
},
]
$: updateRelationshipType(fromRelationship?.relationshipType)
$: tableChanged(fromTable, toTable)
function updateRelationshipType(fromType) {
if (fromType === RelationshipTypes.MANY_TO_MANY) {
@ -205,7 +215,6 @@
originalToName = toRelationship.name
originalFromName = fromRelationship.name
await save()
await tables.fetch()
}
async function deleteRelationship() {
@ -215,10 +224,26 @@
await tables.fetch()
close()
}
function tableChanged(fromTbl, toTbl) {
fromRelationship.name = toTbl?.name || ""
errors.fromCol = ""
toRelationship.name = fromTbl?.name || ""
errors.toCol = ""
if (toTbl || fromTbl) {
checkForErrors(
fromTable,
toTable,
through,
fromRelationship,
toRelationship
)
}
}
</script>
<ModalContent
title="Create Relationship"
title="Define Relationship"
confirmText="Save"
onConfirm={saveRelationship}
disabled={!valid}
@ -234,6 +259,7 @@
<Select
label="Select from table"
options={tableOptions}
disabled={!!selectedFromTable}
on:change={() => ($touched.from = true)}
bind:error={errors.from}
bind:value={toRelationship.tableId}

View File

@ -1,7 +1,7 @@
<script>
import { goto } from "@roxi/routify"
import { allScreens, store } from "builderStore"
import { tables } from "stores/backend"
import { tables, datasources } from "stores/backend"
import {
ActionMenu,
Icon,
@ -40,7 +40,10 @@
store.actions.screens.delete(templateScreens)
await tables.fetch()
notifications.success("Table deleted")
if (wasSelectedTable._id === table._id) {
if (table.type === "external") {
await datasources.fetch()
}
if (wasSelectedTable && wasSelectedTable._id === table._id) {
$goto("./table")
}
}
@ -64,9 +67,7 @@
<Icon s hoverable name="MoreSmallList" />
</div>
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
{#if !external}
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
{/if}
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
</ActionMenu>
<Modal bind:this={editorModal}>

View File

@ -32,6 +32,16 @@
.component("@budibase/standard-components/screenslot")
.instanceName("Content Placeholder")
.json()
// Messages that can be sent from the iframe preview to the builder
// Budibase events are and initalisation events
const MessageTypes = {
IFRAME_LOADED: "iframe-loaded",
READY: "ready",
ERROR: "error",
BUDIBASE: "type",
KEYDOWN: "keydown"
}
// Construct iframe template
$: template = iframeTemplate.replace(
@ -80,46 +90,44 @@
// Refresh the preview when required
$: refreshContent(strippedJson)
onMount(() => {
// Initialise the app when mounted
iframe.contentWindow.addEventListener(
"ready",
() => {
function receiveMessage(message) {
const handlers = {
[MessageTypes.READY]: () => {
// Initialise the app when mounted
// Display preview immediately if the intelligent loading feature
// is not supported
if (!loading) return
if (!$store.clientFeatures.intelligentLoading) {
loading = false
}
refreshContent(strippedJson)
},
{ once: true }
)
// Catch any app errors
iframe.contentWindow.addEventListener(
"error",
event => {
[MessageTypes.ERROR]: event => {
// Catch any app errors
loading = false
error = event.detail || "An unknown error occurred"
error = event.error || "An unknown error occurred"
},
{ once: true }
)
[MessageTypes.KEYDOWN]: handleKeydownEvent
}
// Add listener for events sent by client library in preview
iframe.contentWindow.addEventListener("bb-event", handleBudibaseEvent)
iframe.contentWindow.addEventListener("keydown", handleKeydownEvent)
const messageHandler = handlers[message.data.type] || handleBudibaseEvent
messageHandler(message)
}
onMount(() => {
window.addEventListener("message", receiveMessage)
})
// Remove all iframe event listeners on component destroy
onDestroy(() => {
if (iframe.contentWindow) {
iframe.contentWindow.removeEventListener("bb-event", handleBudibaseEvent)
iframe.contentWindow.removeEventListener("keydown", handleKeydownEvent)
window.removeEventListener("message", receiveMessage) //
}
})
const handleBudibaseEvent = event => {
const { type, data } = event.detail
const { type, data } = event.data
if (type === "select-component" && data.id) {
store.actions.components.select({ _id: data.id })
} else if (type === "update-prop") {
@ -151,13 +159,14 @@
store.actions.components.paste(destination, data.mode)
}
} else {
console.warning(`Client sent unknown event type: ${type}`)
console.warn(`Client sent unknown event type: ${type}`)
}
}
const handleKeydownEvent = event => {
const { key } = event.data
if (
(event.key === "Delete" || event.key === "Backspace") &&
(key === "Delete" || key === "Backspace") &&
selectedComponentId &&
["input", "textarea"].indexOf(
iframe.contentWindow.document.activeElement?.tagName.toLowerCase()

View File

@ -54,7 +54,7 @@ export default `
if (!parsed) {
return
}
// Extract data from message
const {
selectedComponentId,
@ -84,17 +84,20 @@ export default `
if (window.loadBudibase) {
window.loadBudibase()
document.documentElement.classList.add("loaded")
window.dispatchEvent(new Event("iframe-loaded"))
window.parent.postMessage({ type: "iframe-loaded" })
} else {
throw "The client library couldn't be loaded"
}
} catch (error) {
window.dispatchEvent(new CustomEvent("error", { detail: error }))
window.parent.postMessage({ type: "error", error })
}
}
window.addEventListener("message", receiveMessage)
window.dispatchEvent(new Event("ready"))
window.addEventListener("keydown", evt => {
window.parent.postMessage({ type: "keydown", key: event.key })
})
window.parent.postMessage({ type: "ready" })
</script>
</head>
<body/>

View File

@ -8,17 +8,18 @@
Layout,
Modal,
InlineAlert,
ActionButton,
} from "@budibase/bbui"
import { datasources, integrations, queries, tables } from "stores/backend"
import { notifications } from "@budibase/bbui"
import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte"
import CreateEditRelationship from "./CreateEditRelationship/CreateEditRelationship.svelte"
import DisplayColumnModal from "./modals/EditDisplayColumnsModal.svelte"
import CreateEditRelationship from "components/backend/Datasources/CreateEditRelationship.svelte"
import CreateExternalTableModal from "./modals/CreateExternalTableModal.svelte"
import ICONS from "components/backend/DatasourceNavigator/icons"
import { capitalise } from "helpers"
let relationshipModal
let displayColumnModal
let createExternalTableModal
let selectedFromRelationship, selectedToRelationship
$: datasource = $datasources.list.find(ds => ds._id === $datasources.selected)
@ -110,8 +111,8 @@
relationshipModal.show()
}
function openDisplayColumnModal() {
displayColumnModal.show()
function createNewTable() {
createExternalTableModal.show()
}
</script>
@ -126,8 +127,8 @@
/>
</Modal>
<Modal bind:this={displayColumnModal}>
<DisplayColumnModal {datasource} {plusTables} save={saveDatasource} />
<Modal bind:this={createExternalTableModal}>
<CreateExternalTableModal {datasource} />
</Modal>
{#if datasource && integration}
@ -163,15 +164,15 @@
<div class="query-header">
<Heading size="S">Tables</Heading>
<div class="table-buttons">
{#if plusTables && plusTables.length !== 0}
<Button primary on:click={openDisplayColumnModal}>
Update display columns
</Button>
{/if}
<div>
<Button primary on:click={updateDatasourceSchema}>
<ActionButton
size="S"
quiet
icon="DataRefresh"
on:click={updateDatasourceSchema}
>
Fetch tables from database
</Button>
</ActionButton>
</div>
</div>
</div>
@ -196,14 +197,23 @@
<p></p>
</div>
{/each}
<div class="add-table">
<Button cta on:click={createNewTable}>Create new table</Button>
</div>
</div>
{#if plusTables?.length !== 0}
<Divider />
<div class="query-header">
<Heading size="S">Relationships</Heading>
<Button primary on:click={() => openRelationshipModal()}
>Create relationship</Button
<ActionButton
icon="DataCorrelated"
primary
size="S"
quiet
on:click={openRelationshipModal}
>
Define existing relationship
</ActionButton>
</div>
<Body>
Tell budibase how your tables are related to get even more smart
@ -318,11 +328,14 @@
.table-buttons {
display: grid;
grid-gap: var(--spacing-l);
grid-template-columns: 1fr 1fr;
}
.table-buttons div {
grid-column-end: -1;
}
.add-table {
margin-top: var(--spacing-m);
}
</style>

View File

@ -0,0 +1,45 @@
<script>
import { ModalContent, Body, Input } from "@budibase/bbui"
import { tables, datasources } from "stores/backend"
import { goto } from "@roxi/routify"
export let datasource
let name = ""
$: valid = name && name.length > 0 && !datasource?.entities[name]
$: error =
name && datasource?.entities[name] ? "Table name already in use." : null
function buildDefaultTable(tableName, datasourceId) {
return {
name: tableName,
type: "external",
primary: ["id"],
sourceId: datasourceId,
schema: {
id: {
autocolumn: true,
type: "number",
},
},
}
}
async function saveTable() {
const table = await tables.save(buildDefaultTable(name, datasource._id))
await datasources.fetch()
$goto(`../../table/${table._id}`)
}
</script>
<ModalContent
title="Create new table"
confirmText="Create"
onConfirm={saveTable}
disabled={!valid}
>
<Body
>Provide a name for your new table; you can add columns once it is created.</Body
>
<Input label="Table Name" bind:error bind:value={name} />
</ModalContent>

View File

@ -1,43 +0,0 @@
<script>
import { ModalContent, Select, Body } from "@budibase/bbui"
import { tables } from "stores/backend"
export let datasource
export let plusTables
export let save
async function saveDisplayColumns() {
// be explicit about copying over
for (let table of plusTables) {
datasource.entities[table.name].primaryDisplay = table.primaryDisplay
}
save()
await tables.fetch()
}
function getColumnOptions(table) {
if (!table || !table.schema) {
return []
}
return Object.entries(table.schema)
.filter(field => field[1].type !== "link")
.map(([fieldName]) => fieldName)
}
</script>
<ModalContent
title="Edit display columns"
confirmText="Save"
onConfirm={saveDisplayColumns}
>
<Body
>Select the columns that will be shown when displaying relationships.</Body
>
{#each plusTables as table}
<Select
label={table.name}
options={getColumnOptions(table)}
bind:value={table.primaryDisplay}
/>
{/each}
</ModalContent>

View File

@ -44,7 +44,7 @@
}
} catch (err) {
console.error(err)
notifications.error("Invalid credentials")
notifications.error(err.message ? err.message : "Invalid Credentials")
}
}

View File

@ -11,6 +11,7 @@ export function createTablesStore() {
const tablesResponse = await api.get(`/api/tables`)
const tables = await tablesResponse.json()
update(state => ({ ...state, list: tables }))
return tables
}
async function select(table) {
@ -62,6 +63,9 @@ export function createTablesStore() {
const response = await api.post(`/api/tables`, updatedTable)
const savedTable = await response.json()
await fetch()
if (table.type === "external") {
await datasources.fetch()
}
await select(savedTable)
return savedTable
}

View File

@ -112,7 +112,7 @@ export function createAuthStore() {
if (response.status === 200) {
setUser(json.user)
} else {
throw "Invalid credentials"
throw new Error(json.message ? json.message : "Invalid credentials")
}
return json
},

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "0.9.173-alpha.4",
"version": "0.9.176-alpha.3",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {

View File

@ -19,7 +19,7 @@ The object key is the name of the component, as exported by `index.js`.
- **bindable** - whether the components provides a bindable value or not
- **settings** - array of settings displayed in the builder
###Settings Definitions
### Settings Definitions
The `type` field in each setting is used by the builder to know which component to use to display
the setting, so it's important that this field is correct. The valid options are:

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "0.9.173-alpha.4",
"version": "0.9.176-alpha.3",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,9 +19,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "^0.9.173-alpha.4",
"@budibase/bbui": "^0.9.176-alpha.3",
"@budibase/standard-components": "^0.9.139",
"@budibase/string-templates": "^0.9.173-alpha.4",
"@budibase/string-templates": "^0.9.176-alpha.3",
"regexparam": "^1.3.0",
"shortid": "^2.2.15",
"svelte-spa-router": "^3.0.5"

View File

@ -8,6 +8,12 @@
import { Modal, ModalContent, ActionButton } from "@budibase/bbui"
import { onDestroy } from "svelte"
const MessageTypes = {
NOTIFICATION: "notification",
CLOSE_SCREEN_MODAL: "close-screen-modal",
INVALIDATE_DATASOURCE: "invalidate-datasource",
}
let iframe
let listenersAttached = false
@ -21,32 +27,33 @@
notificationStore.actions.send(message, type, icon)
}
function receiveMessage(message) {
const handlers = {
[MessageTypes.NOTIFICATION]: () => {
proxyNotification(message.data)
},
[MessageTypes.CLOSE_SCREEN_MODAL]: peekStore.actions.hidePeek,
[MessageTypes.INVALIDATE_DATASOURCE]: () => {
invalidateDataSource(message.data)
},
}
const messageHandler = handlers[message.data.type]
if (messageHandler) {
messageHandler(message)
} else {
console.warning("Unknown event type", message?.data?.type)
}
}
const attachListeners = () => {
// Mirror datasource invalidation to keep the parent window up to date
iframe.contentWindow.addEventListener(
"invalidate-datasource",
invalidateDataSource
)
// Listen for a close event to close the screen peek
iframe.contentWindow.addEventListener(
"close-screen-modal",
peekStore.actions.hidePeek
)
// Proxy notifications back to the parent window instead of iframe
iframe.contentWindow.addEventListener("notification", proxyNotification)
window.addEventListener("message", receiveMessage)
}
const handleCancel = () => {
peekStore.actions.hidePeek()
iframe.contentWindow.removeEventListener(
"invalidate-datasource",
invalidateDataSource
)
iframe.contentWindow.removeEventListener(
"close-screen-modal",
peekStore.actions.hidePeek
)
iframe.contentWindow.removeEventListener("notification", proxyNotification)
window.removeEventListener("message", receiveMessage)
}
const handleFullscreen = () => {

View File

@ -4,11 +4,7 @@ import { findComponentById, findComponentPathById } from "../utils/components"
import { pingEndUser } from "../api"
const dispatchEvent = (type, data = {}) => {
window.dispatchEvent(
new CustomEvent("bb-event", {
detail: { type, data },
})
)
window.parent.postMessage({ type, data })
}
const createBuilderStore = () => {

View File

@ -26,11 +26,19 @@ const createNotificationStore = () => {
// If peeking, pass notifications back to parent window
if (get(routeStore).queryParams?.peek) {
window.dispatchEvent(
new CustomEvent("notification", {
detail: { message, type, icon },
})
)
window.parent.postMessage({
type: "notification",
detail: {
message,
type,
icon,
},
})
// window.dispatchEvent(
// new CustomEvent("notification", {
// detail: { message, type, icon },
// })
// )
return
}

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "0.9.173-alpha.4",
"version": "0.9.176-alpha.3",
"description": "Budibase Web Server",
"main": "src/index.js",
"repository": {
@ -68,9 +68,9 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@budibase/auth": "^0.9.173-alpha.4",
"@budibase/client": "^0.9.173-alpha.4",
"@budibase/string-templates": "^0.9.173-alpha.4",
"@budibase/auth": "^0.9.176-alpha.3",
"@budibase/client": "^0.9.176-alpha.3",
"@budibase/string-templates": "^0.9.176-alpha.3",
"@elastic/elasticsearch": "7.10.0",
"@koa/router": "8.0.0",
"@sendgrid/mail": "7.1.1",

View File

@ -9,7 +9,7 @@ const {
} = require("../../db/utils")
const { BuildSchemaErrors } = require("../../constants")
const { integrations } = require("../../integrations")
const { makeExternalQuery } = require("./row/utils")
const { getDatasourceAndQuery } = require("./row/utils")
exports.fetch = async function (ctx) {
const database = new CouchDB(ctx.appId)
@ -138,7 +138,7 @@ exports.find = async function (ctx) {
exports.query = async function (ctx) {
const queryJson = ctx.request.body
try {
ctx.body = await makeExternalQuery(ctx.appId, queryJson)
ctx.body = await getDatasourceAndQuery(ctx.appId, queryJson)
} catch (err) {
ctx.throw(400, err)
}
@ -152,6 +152,19 @@ const buildSchemaHelper = async datasource => {
await connector.buildSchema(datasource._id, datasource.entities)
datasource.entities = connector.tables
// make sure they all have a display name selected
for (let entity of Object.values(datasource.entities)) {
if (entity.primaryDisplay) {
continue
}
const notAutoColumn = Object.values(entity.schema).find(
schema => !schema.autocolumn
)
if (notAutoColumn) {
entity.primaryDisplay = notAutoColumn.name
}
}
const errors = connector.schemaErrors
let error = null
if (errors && Object.keys(errors).length > 0) {

View File

@ -36,7 +36,7 @@ interface RunConfig {
}
module External {
const { makeExternalQuery } = require("./utils")
const { getDatasourceAndQuery } = require("./utils")
const {
DataSourceOperation,
FieldTypes,
@ -46,6 +46,7 @@ module External {
const { processObjectSync } = require("@budibase/string-templates")
const { cloneDeep } = require("lodash/fp")
const CouchDB = require("../../../db")
const { processFormulas } = require("../../../utilities/rowProcessor/utils")
function buildFilters(
id: string | undefined,
@ -225,7 +226,7 @@ module External {
manyRelationships: ManyRelationship[] = []
for (let [key, field] of Object.entries(table.schema)) {
// if set already, or not set just skip it
if ((!row[key] && row[key] !== "") || newRow[key] || field.autocolumn) {
if (row[key] == null || newRow[key] || field.autocolumn || field.type === FieldTypes.FORMULA) {
continue
}
// if its an empty string then it means return the column to null (if possible)
@ -361,7 +362,7 @@ module External {
relationships
)
}
return Object.values(finalRows)
return processFormulas(table, Object.values(finalRows))
}
/**
@ -428,7 +429,7 @@ module External {
const tableId = isMany ? field.through : field.tableId
const manyKey = field.throughFrom || primaryKey
const fieldName = isMany ? manyKey : field.fieldName
const response = await makeExternalQuery(this.appId, {
const response = await getDatasourceAndQuery(this.appId, {
endpoint: getEndpoint(tableId, DataSourceOperation.READ),
filters: {
equal: {
@ -479,7 +480,7 @@ module External {
: DataSourceOperation.CREATE
if (!found) {
promises.push(
makeExternalQuery(appId, {
getDatasourceAndQuery(appId, {
endpoint: getEndpoint(tableId, operation),
// if we're doing many relationships then we're writing, only one response
body,
@ -509,7 +510,7 @@ module External {
: DataSourceOperation.UPDATE
const body = isMany ? null : { [colName]: null }
promises.push(
makeExternalQuery(this.appId, {
getDatasourceAndQuery(this.appId, {
endpoint: getEndpoint(tableId, op),
body,
filters,
@ -532,16 +533,17 @@ module External {
table: Table,
includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE
) {
function extractNonLinkFieldNames(table: Table, existing: string[] = []) {
function extractRealFields(table: Table, existing: string[] = []) {
return Object.entries(table.schema)
.filter(
column =>
column[1].type !== FieldTypes.LINK &&
column[1].type !== FieldTypes.FORMULA &&
!existing.find((field: string) => field === column[0])
)
.map(column => `${table.name}.${column[0]}`)
}
let fields = extractNonLinkFieldNames(table)
let fields = extractRealFields(table)
for (let field of Object.values(table.schema)) {
if (field.type !== FieldTypes.LINK || !includeRelations) {
continue
@ -549,7 +551,7 @@ module External {
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
const linkTable = this.tables[linkTableName]
if (linkTable) {
const linkedFields = extractNonLinkFieldNames(linkTable, fields)
const linkedFields = extractRealFields(linkTable, fields)
fields = fields.concat(linkedFields)
}
}
@ -609,7 +611,7 @@ module External {
},
}
// can't really use response right now
const response = await makeExternalQuery(appId, json)
const response = await getDatasourceAndQuery(appId, json)
// handle many to many relationships now if we know the ID (could be auto increment)
if (
operation !== DataSourceOperation.READ &&

View File

@ -4,8 +4,8 @@ const CouchDB = require("../../../db")
const { InternalTables } = require("../../../db/utils")
const userController = require("../user")
const { FieldTypes } = require("../../../constants")
const { integrations } = require("../../../integrations")
const { processStringSync } = require("@budibase/string-templates")
const { makeExternalQuery } = require("../../../integrations/base/utils")
validateJs.extend(validateJs.validators.datetime, {
parse: function (value) {
@ -17,18 +17,11 @@ validateJs.extend(validateJs.validators.datetime, {
},
})
exports.makeExternalQuery = async (appId, json) => {
exports.getDatasourceAndQuery = async (appId, json) => {
const datasourceId = json.endpoint.datasourceId
const db = new CouchDB(appId)
const datasource = await db.get(datasourceId)
const Integration = integrations[datasource.source]
// query is the opinionated function
if (Integration.prototype.query) {
const integration = new Integration(datasource.config)
return integration.query(json)
} else {
throw "Datasource does not support query."
}
return makeExternalQuery(datasource, json)
}
exports.findRow = async (ctx, db, tableId, rowId) => {

View File

@ -0,0 +1,276 @@
const CouchDB = require("../../../db")
const {
buildExternalTableId,
breakExternalTableId,
} = require("../../../integrations/utils")
const {
getTable,
generateForeignKey,
generateJunctionTableName,
foreignKeyStructure,
} = require("./utils")
const {
DataSourceOperation,
FieldTypes,
RelationshipTypes,
} = require("../../../constants")
const { makeExternalQuery } = require("../../../integrations/base/utils")
const { cloneDeep } = require("lodash/fp")
async function makeTableRequest(
datasource,
operation,
table,
tables,
oldTable = null
) {
const json = {
endpoint: {
datasourceId: datasource._id,
entityId: table._id,
operation,
},
meta: {
tables,
},
table,
}
if (oldTable) {
json.meta.table = oldTable
}
return makeExternalQuery(datasource, json)
}
function cleanupRelationships(table, tables, oldTable = null) {
const tableToIterate = oldTable ? oldTable : table
// clean up relationships in couch table schemas
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
if (
schema.type === FieldTypes.LINK &&
(!oldTable || table.schema[key] == null)
) {
const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId
)
const foreignKey = schema.foreignKey
if (!relatedTable || !foreignKey) {
continue
}
for (let [relatedKey, relatedSchema] of Object.entries(
relatedTable.schema
)) {
if (
relatedSchema.type === FieldTypes.LINK &&
relatedSchema.fieldName === foreignKey
) {
delete relatedTable.schema[relatedKey]
}
}
}
}
}
function getDatasourceId(table) {
if (!table) {
throw "No table supplied"
}
if (table.sourceId) {
return table.sourceId
}
return breakExternalTableId(table._id).datasourceId
}
function otherRelationshipType(type) {
if (type === RelationshipTypes.MANY_TO_MANY) {
return RelationshipTypes.MANY_TO_MANY
}
return type === RelationshipTypes.ONE_TO_MANY
? RelationshipTypes.MANY_TO_ONE
: RelationshipTypes.ONE_TO_MANY
}
function generateManyLinkSchema(datasource, column, table, relatedTable) {
const primary = table.name + table.primary[0]
const relatedPrimary = relatedTable.name + relatedTable.primary[0]
const jcTblName = generateJunctionTableName(column, table, relatedTable)
// first create the new table
const junctionTable = {
_id: buildExternalTableId(datasource._id, jcTblName),
name: jcTblName,
primary: [primary, relatedPrimary],
constrained: [primary, relatedPrimary],
schema: {
[primary]: foreignKeyStructure(primary, {
toTable: table.name,
toKey: table.primary[0],
}),
[relatedPrimary]: foreignKeyStructure(relatedPrimary, {
toTable: relatedTable.name,
toKey: relatedTable.primary[0],
}),
},
}
column.through = junctionTable._id
column.throughFrom = primary
column.throughTo = relatedPrimary
column.fieldName = relatedPrimary
return junctionTable
}
function generateLinkSchema(column, table, relatedTable, type) {
const isOneSide = type === RelationshipTypes.ONE_TO_MANY
const primary = isOneSide ? relatedTable.primary[0] : table.primary[0]
// generate a foreign key
const foreignKey = generateForeignKey(column, relatedTable)
column.relationshipType = type
column.foreignKey = isOneSide ? foreignKey : primary
column.fieldName = isOneSide ? primary : foreignKey
return foreignKey
}
function generateRelatedSchema(linkColumn, table, relatedTable, columnName) {
// generate column for other table
const relatedSchema = cloneDeep(linkColumn)
// swap them from the main link
if (linkColumn.foreignKey) {
relatedSchema.fieldName = linkColumn.foreignKey
relatedSchema.foreignKey = linkColumn.fieldName
}
// is many to many
else {
// don't need to copy through, already got it
relatedSchema.fieldName = linkColumn.throughFrom
relatedSchema.throughTo = linkColumn.throughFrom
relatedSchema.throughFrom = linkColumn.throughTo
}
relatedSchema.relationshipType = otherRelationshipType(
linkColumn.relationshipType
)
relatedSchema.tableId = relatedTable._id
relatedSchema.name = columnName
table.schema[columnName] = relatedSchema
}
function isRelationshipSetup(column) {
return column.foreignKey || column.through
}
exports.save = async function (ctx) {
const appId = ctx.appId
const table = ctx.request.body
// can't do this
delete table.dataImport
const datasourceId = getDatasourceId(ctx.request.body)
let tableToSave = {
type: "table",
_id: buildExternalTableId(datasourceId, table.name),
...table,
}
let oldTable
if (ctx.request.body && ctx.request.body._id) {
oldTable = await getTable(appId, ctx.request.body._id)
}
const db = new CouchDB(appId)
const datasource = await db.get(datasourceId)
const oldTables = cloneDeep(datasource.entities)
const tables = datasource.entities
const extraTablesToUpdate = []
// check if relations need setup
for (let schema of Object.values(tableToSave.schema)) {
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
continue
}
const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId
)
const relatedColumnName = schema.fieldName
const relationType = schema.relationshipType
if (relationType === RelationshipTypes.MANY_TO_MANY) {
const junctionTable = generateManyLinkSchema(
datasource,
schema,
table,
relatedTable
)
if (tables[junctionTable.name]) {
throw "Junction table already exists, cannot create another relationship."
}
tables[junctionTable.name] = junctionTable
extraTablesToUpdate.push(junctionTable)
} else {
const fkTable =
relationType === RelationshipTypes.ONE_TO_MANY ? table : relatedTable
const foreignKey = generateLinkSchema(
schema,
table,
relatedTable,
relationType
)
fkTable.schema[foreignKey] = foreignKeyStructure(foreignKey)
if (fkTable.constrained == null) {
fkTable.constrained = []
}
if (fkTable.constrained.indexOf(foreignKey) === -1) {
fkTable.constrained.push(foreignKey)
}
// foreign key is in other table, need to save it to external
if (fkTable._id !== table._id) {
extraTablesToUpdate.push(fkTable)
}
}
generateRelatedSchema(schema, relatedTable, table, relatedColumnName)
schema.main = true
}
cleanupRelationships(tableToSave, tables, oldTable)
const operation = oldTable
? DataSourceOperation.UPDATE_TABLE
: DataSourceOperation.CREATE_TABLE
await makeTableRequest(datasource, operation, tableToSave, tables, oldTable)
// update any extra tables (like foreign keys in other tables)
for (let extraTable of extraTablesToUpdate) {
const oldExtraTable = oldTables[extraTable.name]
let op = oldExtraTable
? DataSourceOperation.UPDATE_TABLE
: DataSourceOperation.CREATE_TABLE
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
}
// make sure the constrained list, all still exist
if (Array.isArray(tableToSave.constrained)) {
tableToSave.constrained = tableToSave.constrained.filter(constraint =>
Object.keys(tableToSave.schema).includes(constraint)
)
}
// store it into couch now for budibase reference
datasource.entities[tableToSave.name] = tableToSave
await db.put(datasource)
return tableToSave
}
exports.destroy = async function (ctx) {
const appId = ctx.appId
const tableToDelete = await getTable(appId, ctx.params.tableId)
const datasourceId = getDatasourceId(tableToDelete)
const db = new CouchDB(appId)
const datasource = await db.get(datasourceId)
const tables = datasource.entities
const operation = DataSourceOperation.DELETE_TABLE
await makeTableRequest(datasource, operation, tableToDelete, tables)
cleanupRelationships(tableToDelete, tables)
delete datasource.entities[tableToDelete.name]
await db.put(datasource)
return tableToDelete
}

View File

@ -1,16 +1,28 @@
const CouchDB = require("../../../db")
const linkRows = require("../../../db/linkedRows")
const internal = require("./internal")
const external = require("./external")
const csvParser = require("../../../utilities/csvParser")
const { isExternalTable } = require("../../../integrations/utils")
const {
getRowParams,
getTableParams,
generateTableID,
getDatasourceParams,
BudibaseInternalDB,
} = require("../../../db/utils")
const { FieldTypes } = require("../../../constants")
const { TableSaveFunctions, getTable } = require("./utils")
const { getTable } = require("./utils")
function pickApi({ tableId, table }) {
if (table && !tableId) {
tableId = table._id
}
if (table && table.type === "external") {
return external
} else if (tableId && isExternalTable(tableId)) {
return external
}
return internal
}
// covers both internal and external
exports.fetch = async function (ctx) {
const db = new CouchDB(ctx.appId)
@ -50,143 +62,23 @@ exports.find = async function (ctx) {
exports.save = async function (ctx) {
const appId = ctx.appId
const db = new CouchDB(appId)
const { dataImport, ...rest } = ctx.request.body
let tableToSave = {
type: "table",
_id: generateTableID(),
views: {},
...rest,
}
// if the table obj had an _id then it will have been retrieved
let oldTable
if (ctx.request.body && ctx.request.body._id) {
oldTable = await db.get(ctx.request.body._id)
}
// saving a table is a complex operation, involving many different steps, this
// has been broken out into a utility to make it more obvious/easier to manipulate
const tableSaveFunctions = new TableSaveFunctions({
db,
ctx,
oldTable,
dataImport,
})
tableToSave = await tableSaveFunctions.before(tableToSave)
// make sure that types don't change of a column, have to remove
// the column if you want to change the type
if (oldTable && oldTable.schema) {
for (let propKey of Object.keys(tableToSave.schema)) {
let column = tableToSave.schema[propKey]
let oldColumn = oldTable.schema[propKey]
if (oldColumn && oldColumn.type === "internal") {
oldColumn.type = "auto"
}
if (oldColumn && oldColumn.type !== column.type) {
ctx.throw(400, "Cannot change the type of a column")
}
}
}
// Don't rename if the name is the same
let { _rename } = tableToSave
/* istanbul ignore next */
if (_rename && _rename.old === _rename.updated) {
_rename = null
delete tableToSave._rename
}
// rename row fields when table column is renamed
/* istanbul ignore next */
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
ctx.throw(400, "Cannot rename a linked column.")
}
tableToSave = await tableSaveFunctions.mid(tableToSave)
// update schema of non-statistics views when new columns are added
for (let view in tableToSave.views) {
const tableView = tableToSave.views[view]
if (!tableView) continue
if (tableView.schema.group || tableView.schema.field) continue
tableView.schema = tableToSave.schema
}
// update linked rows
try {
const linkResp = await linkRows.updateLinks({
appId,
eventType: oldTable
? linkRows.EventType.TABLE_UPDATED
: linkRows.EventType.TABLE_SAVE,
table: tableToSave,
oldTable: oldTable,
})
if (linkResp != null && linkResp._rev) {
tableToSave._rev = linkResp._rev
}
} catch (err) {
ctx.throw(400, err)
}
// don't perform any updates until relationships have been
// checked by the updateLinks function
const updatedRows = tableSaveFunctions.getUpdatedRows()
if (updatedRows && updatedRows.length !== 0) {
await db.bulkDocs(updatedRows)
}
const result = await db.put(tableToSave)
tableToSave._rev = result.rev
tableToSave = await tableSaveFunctions.after(tableToSave)
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:save`, appId, tableToSave)
const table = ctx.request.body
const savedTable = await pickApi({ table }).save(ctx)
ctx.status = 200
ctx.message = `Table ${ctx.request.body.name} saved successfully.`
ctx.body = tableToSave
ctx.message = `Table ${table.name} saved successfully.`
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:save`, appId, savedTable)
ctx.body = savedTable
}
exports.destroy = async function (ctx) {
const appId = ctx.appId
const db = new CouchDB(appId)
const tableToDelete = await db.get(ctx.params.tableId)
// Delete all rows for that table
const rows = await db.allDocs(
getRowParams(ctx.params.tableId, null, {
include_docs: true,
})
)
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
// update linked rows
await linkRows.updateLinks({
appId,
eventType: linkRows.EventType.TABLE_DELETE,
table: tableToDelete,
})
// don't remove the table itself until very end
await db.remove(tableToDelete)
// remove table search index
const currentIndexes = await db.getIndexes()
const existingIndex = currentIndexes.indexes.find(
existing => existing.name === `search:${ctx.params.tableId}`
)
if (existingIndex) {
await db.deleteIndex(existingIndex)
}
const tableId = ctx.params.tableId
const deletedTable = await pickApi({ tableId }).destroy(ctx)
ctx.eventEmitter &&
ctx.eventEmitter.emitTable(`table:delete`, appId, tableToDelete)
ctx.eventEmitter.emitTable(`table:delete`, appId, deletedTable)
ctx.status = 200
ctx.body = { message: `Table ${ctx.params.tableId} deleted.` }
ctx.body = { message: `Table ${tableId} deleted.` }
}
exports.validateCSVSchema = async function (ctx) {

View File

@ -0,0 +1,138 @@
const CouchDB = require("../../../db")
const linkRows = require("../../../db/linkedRows")
const { getRowParams, generateTableID } = require("../../../db/utils")
const { FieldTypes } = require("../../../constants")
const { TableSaveFunctions } = require("./utils")
exports.save = async function (ctx) {
const appId = ctx.appId
const db = new CouchDB(appId)
const { dataImport, ...rest } = ctx.request.body
let tableToSave = {
type: "table",
_id: generateTableID(),
views: {},
...rest,
}
// if the table obj had an _id then it will have been retrieved
let oldTable
if (ctx.request.body && ctx.request.body._id) {
oldTable = await db.get(ctx.request.body._id)
}
// saving a table is a complex operation, involving many different steps, this
// has been broken out into a utility to make it more obvious/easier to manipulate
const tableSaveFunctions = new TableSaveFunctions({
db,
ctx,
oldTable,
dataImport,
})
tableToSave = await tableSaveFunctions.before(tableToSave)
// make sure that types don't change of a column, have to remove
// the column if you want to change the type
if (oldTable && oldTable.schema) {
for (let propKey of Object.keys(tableToSave.schema)) {
let column = tableToSave.schema[propKey]
let oldColumn = oldTable.schema[propKey]
if (oldColumn && oldColumn.type === "internal") {
oldColumn.type = "auto"
}
if (oldColumn && oldColumn.type !== column.type) {
ctx.throw(400, "Cannot change the type of a column")
}
}
}
// Don't rename if the name is the same
let { _rename } = tableToSave
/* istanbul ignore next */
if (_rename && _rename.old === _rename.updated) {
_rename = null
delete tableToSave._rename
}
// rename row fields when table column is renamed
/* istanbul ignore next */
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
ctx.throw(400, "Cannot rename a linked column.")
}
tableToSave = await tableSaveFunctions.mid(tableToSave)
// update schema of non-statistics views when new columns are added
for (let view in tableToSave.views) {
const tableView = tableToSave.views[view]
if (!tableView) continue
if (tableView.schema.group || tableView.schema.field) continue
tableView.schema = tableToSave.schema
}
// update linked rows
try {
const linkResp = await linkRows.updateLinks({
appId,
eventType: oldTable
? linkRows.EventType.TABLE_UPDATED
: linkRows.EventType.TABLE_SAVE,
table: tableToSave,
oldTable: oldTable,
})
if (linkResp != null && linkResp._rev) {
tableToSave._rev = linkResp._rev
}
} catch (err) {
ctx.throw(400, err)
}
// don't perform any updates until relationships have been
// checked by the updateLinks function
const updatedRows = tableSaveFunctions.getUpdatedRows()
if (updatedRows && updatedRows.length !== 0) {
await db.bulkDocs(updatedRows)
}
const result = await db.put(tableToSave)
tableToSave._rev = result.rev
tableToSave = await tableSaveFunctions.after(tableToSave)
return tableToSave
}
exports.destroy = async function (ctx) {
const appId = ctx.appId
const db = new CouchDB(appId)
const tableToDelete = await db.get(ctx.params.tableId)
// Delete all rows for that table
const rows = await db.allDocs(
getRowParams(ctx.params.tableId, null, {
include_docs: true,
})
)
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
// update linked rows
await linkRows.updateLinks({
appId,
eventType: linkRows.EventType.TABLE_DELETE,
table: tableToDelete,
})
// don't remove the table itself until very end
await db.remove(tableToDelete)
// remove table search index
const currentIndexes = await db.getIndexes()
const existingIndex = currentIndexes.indexes.find(
existing => existing.name === `search:${ctx.params.tableId}`
)
if (existingIndex) {
await db.deleteIndex(existingIndex)
}
return tableToDelete
}

View File

@ -315,4 +315,24 @@ exports.checkForViewUpdates = async (db, table, rename, deletedColumns) => {
}
}
exports.generateForeignKey = (column, relatedTable) => {
return `fk_${relatedTable.name}_${column.fieldName}`
}
exports.generateJunctionTableName = (column, table, relatedTable) => {
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
}
exports.foreignKeyStructure = (keyName, meta = null) => {
const structure = {
type: FieldTypes.NUMBER,
constraints: {},
name: keyName,
}
if (meta) {
structure.meta = meta
}
return structure
}
exports.TableSaveFunctions = TableSaveFunctions

View File

@ -62,6 +62,9 @@ exports.DataSourceOperation = {
READ: "READ",
UPDATE: "UPDATE",
DELETE: "DELETE",
CREATE_TABLE: "CREATE_TABLE",
UPDATE_TABLE: "UPDATE_TABLE",
DELETE_TABLE: "DELETE_TABLE",
}
exports.SortDirection = {

View File

@ -17,6 +17,11 @@ export interface FieldSchema {
autocolumn?: boolean
throughFrom?: string
throughTo?: string
main?: boolean
meta?: {
toTable: string
toKey: string
}
constraints?: {
type?: string
email?: boolean
@ -36,11 +41,12 @@ export interface TableSchema {
export interface Table extends Base {
type?: string
views?: {}
name?: string
name: string
primary?: string[]
schema: TableSchema
primaryDisplay?: string
sourceId?: string
constrained?: string[]
}
export interface Row extends Base {

View File

@ -5,6 +5,9 @@ export enum Operation {
READ = "READ",
UPDATE = "UPDATE",
DELETE = "DELETE",
CREATE_TABLE = "CREATE_TABLE",
UPDATE_TABLE = "UPDATE_TABLE",
DELETE_TABLE = "DELETE_TABLE",
}
export enum SortDirection {
@ -142,8 +145,10 @@ export interface QueryJson {
sort?: SortJson
paginate?: PaginationJson
body?: object
table?: Table
meta?: {
table?: Table
tables?: Record<string, Table>
}
extra?: {
idFilter?: SearchFilters

View File

@ -1,19 +1,24 @@
import { Knex, knex } from "knex"
const BASE_LIMIT = 5000
import {
QueryJson,
SearchFilters,
QueryOptions,
SortDirection,
Operation,
QueryJson,
QueryOptions,
RelationshipsJson,
SearchFilters,
SortDirection,
} from "../../definitions/datasource"
import { isIsoDateString } from "../utils"
import SqlTableQueryBuilder from "./sqlTable"
const BASE_LIMIT = 5000
type KnexQuery = Knex.QueryBuilder | Knex
function parseBody(body: any) {
for (let [key, value] of Object.entries(body)) {
if (Array.isArray(value)) {
body[key] = JSON.stringify(value)
}
if (typeof value !== "string") {
continue
}
@ -243,23 +248,14 @@ function buildDelete(
}
}
class SqlQueryBuilder {
private readonly sqlClient: string
class SqlQueryBuilder extends SqlTableQueryBuilder {
private readonly limit: number
// pass through client to get flavour of SQL
constructor(client: string, limit: number = BASE_LIMIT) {
this.sqlClient = client
super(client)
this.limit = limit
}
/**
* @param json the input JSON structure from which an SQL query will be built.
* @return {string} the operation that was found in the JSON.
*/
_operation(json: QueryJson): Operation {
return json.endpoint.operation
}
/**
* @param json The JSON query DSL which is to be converted to SQL.
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
@ -267,7 +263,8 @@ class SqlQueryBuilder {
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver.
*/
_query(json: QueryJson, opts: QueryOptions = {}) {
const client = knex({ client: this.sqlClient })
const sqlClient = this.getSqlClient()
const client = knex({ client: sqlClient })
let query
switch (this._operation(json)) {
case Operation.CREATE:
@ -282,6 +279,8 @@ class SqlQueryBuilder {
case Operation.DELETE:
query = buildDelete(client, json, opts)
break
case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: case Operation.DELETE_TABLE:
return this._tableQuery(json)
default:
throw `Operation type is not supported by SQL query builder`
}

View File

@ -0,0 +1,167 @@
import { Knex, knex } from "knex"
import { Table } from "../../definitions/common"
import { Operation, QueryJson } from "../../definitions/datasource"
import { breakExternalTableId } from "../utils"
import SchemaBuilder = Knex.SchemaBuilder
import CreateTableBuilder = Knex.CreateTableBuilder
const { FieldTypes, RelationshipTypes } = require("../../constants")
function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record<string, Table>, oldTable: null | Table = null) {
let primaryKey = table && table.primary ? table.primary[0] : null
const columns = Object.values(table.schema)
// all columns in a junction table will be meta
let metaCols = columns.filter(col => col.meta)
let isJunction = metaCols.length === columns.length
// can't change primary once its set for now
if (primaryKey && !oldTable && !isJunction) {
schema.increments(primaryKey).primary()
} else if (!oldTable && isJunction) {
schema.primary(metaCols.map(col => col.name))
}
// check if any columns need added
const foreignKeys = Object.values(table.schema).map(col => col.foreignKey)
for (let [key, column] of Object.entries(table.schema)) {
// skip things that are already correct
const oldColumn = oldTable ? oldTable.schema[key] : null
if ((oldColumn && oldColumn.type === column.type) || (primaryKey === key && !isJunction)) {
continue
}
switch (column.type) {
case FieldTypes.STRING: case FieldTypes.OPTIONS: case FieldTypes.LONGFORM:
schema.string(key)
break
case FieldTypes.NUMBER:
// if meta is specified then this is a junction table entry
if (column.meta && column.meta.toKey && column.meta.toTable) {
const { toKey, toTable } = column.meta
schema.integer(key).unsigned()
schema.foreign(key).references(`${toTable}.${toKey}`)
} else if (foreignKeys.indexOf(key) === -1) {
schema.float(key)
}
break
case FieldTypes.BOOLEAN:
schema.boolean(key)
break
case FieldTypes.DATETIME:
schema.datetime(key)
break
case FieldTypes.ARRAY:
schema.json(key)
break
case FieldTypes.LINK:
// this side of the relationship doesn't need any SQL work
if (
column.relationshipType !== RelationshipTypes.MANY_TO_ONE &&
column.relationshipType !== RelationshipTypes.MANY_TO_MANY
) {
if (!column.foreignKey || !column.tableId) {
throw "Invalid relationship schema"
}
const { tableName } = breakExternalTableId(column.tableId)
// @ts-ignore
const relatedTable = tables[tableName]
if (!relatedTable) {
throw "Referenced table doesn't exist"
}
schema.integer(column.foreignKey).unsigned()
schema.foreign(column.foreignKey).references(`${tableName}.${relatedTable.primary[0]}`)
}
break
}
}
// need to check if any columns have been deleted
if (oldTable) {
const deletedColumns = Object.entries(oldTable.schema)
.filter(([key, schema]) => schema.type !== FieldTypes.LINK && table.schema[key] == null)
.map(([key]) => key)
deletedColumns.forEach(key => {
if (oldTable.constrained && oldTable.constrained.indexOf(key) !== -1) {
schema.dropForeign(key)
}
schema.dropColumn(key)
})
}
return schema
}
function buildCreateTable(
knex: Knex,
table: Table,
tables: Record<string, Table>,
): SchemaBuilder {
return knex.schema.createTable(table.name, schema => {
generateSchema(schema, table, tables)
})
}
function buildUpdateTable(
knex: Knex,
table: Table,
tables: Record<string, Table>,
oldTable: Table,
): SchemaBuilder {
return knex.schema.alterTable(table.name, schema => {
generateSchema(schema, table, tables, oldTable)
})
}
function buildDeleteTable(
knex: Knex,
table: Table,
): SchemaBuilder {
return knex.schema.dropTable(table.name)
}
class SqlTableQueryBuilder {
private readonly sqlClient: string
// pass through client to get flavour of SQL
constructor(client: string) {
this.sqlClient = client
}
getSqlClient(): string {
return this.sqlClient
}
/**
* @param json the input JSON structure from which an SQL query will be built.
* @return {string} the operation that was found in the JSON.
*/
_operation(json: QueryJson): Operation {
return json.endpoint.operation
}
_tableQuery(json: QueryJson): any {
const client = knex({ client: this.sqlClient })
let query
if (!json.table || !json.meta || !json.meta.tables) {
throw "Cannot execute without table being specified"
}
switch (this._operation(json)) {
case Operation.CREATE_TABLE:
query = buildCreateTable(client, json.table, json.meta.tables)
break
case Operation.UPDATE_TABLE:
if (!json.meta || !json.meta.table) {
throw "Must specify old table for update"
}
query = buildUpdateTable(client, json.table, json.meta.tables, json.meta.table)
break
case Operation.DELETE_TABLE:
query = buildDeleteTable(client, json.table)
break
default:
throw "Table operation is of unknown type"
}
return query.toSQL()
}
}
export default SqlTableQueryBuilder
module.exports = SqlTableQueryBuilder

View File

@ -0,0 +1,19 @@
import { QueryJson } from "../../definitions/datasource"
import { Datasource } from "../../definitions/common"
module DatasourceUtils {
const { integrations } = require("../index")
export async function makeExternalQuery(datasource: Datasource, json: QueryJson) {
const Integration = integrations[datasource.source]
// query is the opinionated function
if (Integration.prototype.query) {
const integration = new Integration(datasource.config)
return integration.query(json)
} else {
throw "Datasource does not support query."
}
}
module.exports.makeExternalQuery = makeExternalQuery
}

View File

@ -262,6 +262,13 @@ module MySQLModule {
const operation = this._operation(json)
this.client.connect()
const input = this._query(json, { disableReturning: true })
if (Array.isArray(input)) {
const responses = []
for (let query of input) {
responses.push(await internalQuery(this.client, query, false))
}
return responses
}
let row
// need to manage returning, a feature mySQL can't do
if (operation === operation.DELETE) {

View File

@ -263,8 +263,16 @@ module PostgresModule {
async query(json: QueryJson) {
const operation = this._operation(json).toLowerCase()
const input = this._query(json)
const response = await internalQuery(this.client, input)
return response.rows.length ? response.rows : [{ [operation]: true }]
if (Array.isArray(input)) {
const responses = []
for (let query of input) {
responses.push(await internalQuery(this.client, query))
}
return responses
} else {
const response = await internalQuery(this.client, input)
return response.rows.length ? response.rows : [{ [operation]: true }]
}
}
}

View File

@ -124,7 +124,10 @@ function copyExistingPropsOver(
return table
}
export function finaliseExternalTables(tables: { [key: string]: any }, entities: { [key: string]: any }) {
export function finaliseExternalTables(
tables: { [key: string]: any },
entities: { [key: string]: any }
) {
const finalTables: { [key: string]: any } = {}
const errors: { [key: string]: string } = {}
for (let [name, table] of Object.entries(tables)) {

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
"version": "0.9.173-alpha.4",
"version": "0.9.176-alpha.3",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",

View File

@ -31,7 +31,7 @@ const HELPERS = [
}
// null/undefined values produce bad results
if (value == null || typeof value !== "string") {
return value || ""
return value == null ? "" : value
}
if (value && value.string) {
value = value.string

View File

@ -125,6 +125,18 @@ describe("check the utility functions", () => {
})
})
describe("check falsy values", () => {
it("should get a zero out when context contains it", async () => {
const output = await processString("{{ number }}", { number: 0 })
expect(output).toEqual("0")
})
it("should get false out when context contains it", async () => {
const output = await processString("{{ bool }}", { bool: false })
expect(output).toEqual("false")
})
})
describe("check manifest", () => {
it("should be able to retrieve the manifest", () => {
const manifest = getManifest()

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/worker",
"email": "hi@budibase.com",
"version": "0.9.173-alpha.4",
"version": "0.9.176-alpha.3",
"description": "Budibase background service",
"main": "src/index.js",
"repository": {
@ -29,8 +29,8 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
"@budibase/auth": "^0.9.173-alpha.4",
"@budibase/string-templates": "^0.9.173-alpha.4",
"@budibase/auth": "^0.9.176-alpha.3",
"@budibase/string-templates": "^0.9.176-alpha.3",
"@koa/router": "^8.0.0",
"@sentry/node": "^6.0.0",
"@techpass/passport-openidconnect": "^0.3.0",