Merge branch 'develop' into postgres-schema
This commit is contained in:
commit
2b6c6d7aaa
|
@ -3,6 +3,7 @@ builder/*
|
||||||
.temp/
|
.temp/
|
||||||
packages/server/runtime_apps/
|
packages/server/runtime_apps/
|
||||||
.idea/
|
.idea/
|
||||||
|
bb-airgapped.tar.gz
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
logs
|
logs
|
||||||
|
|
|
@ -0,0 +1,51 @@
|
||||||
|
const fs = require("fs")
|
||||||
|
const { execSync } = require("child_process")
|
||||||
|
const path = require("path")
|
||||||
|
|
||||||
|
const IMAGES = {
|
||||||
|
worker: "budibase/worker",
|
||||||
|
apps: "budibase/apps",
|
||||||
|
proxy: "envoyproxy/envoy:v1.16-latest",
|
||||||
|
minio: "minio/minio",
|
||||||
|
couch: "ibmcom/couchdb3",
|
||||||
|
curl: "curlimages/curl",
|
||||||
|
redis: "redis",
|
||||||
|
watchtower: "containrrr/watchtower"
|
||||||
|
}
|
||||||
|
|
||||||
|
const FILES = {
|
||||||
|
COMPOSE: "docker-compose.yaml",
|
||||||
|
ENVOY: "envoy.yaml",
|
||||||
|
PROPERTIES: "hosting.properties"
|
||||||
|
}
|
||||||
|
|
||||||
|
const OUTPUT_DIR = path.join(__dirname, "../", "bb-airgapped")
|
||||||
|
|
||||||
|
function copyFile(file) {
|
||||||
|
fs.copyFileSync(
|
||||||
|
path.join(__dirname, "../", "../", file),
|
||||||
|
path.join(OUTPUT_DIR, file)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// create output dir
|
||||||
|
console.log(`Creating ${OUTPUT_DIR} for build..`)
|
||||||
|
fs.rmdirSync(OUTPUT_DIR, { recursive: true })
|
||||||
|
fs.mkdirSync(OUTPUT_DIR)
|
||||||
|
|
||||||
|
// package images into tar files
|
||||||
|
for (let image in IMAGES) {
|
||||||
|
console.log(`Creating tar for ${image}..`)
|
||||||
|
execSync(`docker save ${IMAGES[image]} -o ${OUTPUT_DIR}/${image}.tar`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// copy config files
|
||||||
|
copyFile(FILES.COMPOSE)
|
||||||
|
copyFile(FILES.ENVOY)
|
||||||
|
copyFile(FILES.PROPERTIES)
|
||||||
|
|
||||||
|
// compress
|
||||||
|
execSync(`tar -czf bb-airgapped.tar.gz hosting/scripts/bb-airgapped`)
|
||||||
|
|
||||||
|
// clean up
|
||||||
|
fs.rmdirSync(OUTPUT_DIR, { recursive: true })
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "0.9.173-alpha.3",
|
"version": "0.9.176-alpha.3",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|
|
@ -46,6 +46,7 @@
|
||||||
"build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
"build:docker": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||||
"build:docker:production": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION release && cd -",
|
"build:docker:production": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION release && cd -",
|
||||||
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||||
|
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||||
"release:helm": "./scripts/release_helm_chart.sh",
|
"release:helm": "./scripts/release_helm_chart.sh",
|
||||||
"env:multi:enable": "lerna run env:multi:enable",
|
"env:multi:enable": "lerna run env:multi:enable",
|
||||||
"env:multi:disable": "lerna run env:multi:disable",
|
"env:multi:disable": "lerna run env:multi:disable",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/auth",
|
"name": "@budibase/auth",
|
||||||
"version": "0.9.173-alpha.3",
|
"version": "0.9.176-alpha.3",
|
||||||
"description": "Authentication middlewares for budibase builder and apps",
|
"description": "Authentication middlewares for budibase builder and apps",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
|
|
|
@ -9,6 +9,8 @@ const { createASession } = require("../../security/sessions")
|
||||||
const { getTenantId } = require("../../tenancy")
|
const { getTenantId } = require("../../tenancy")
|
||||||
|
|
||||||
const INVALID_ERR = "Invalid Credentials"
|
const INVALID_ERR = "Invalid Credentials"
|
||||||
|
const SSO_NO_PASSWORD = "SSO user does not have a password set"
|
||||||
|
const EXPIRED = "This account has expired. Please reset your password"
|
||||||
|
|
||||||
exports.options = {
|
exports.options = {
|
||||||
passReqToCallback: true,
|
passReqToCallback: true,
|
||||||
|
@ -36,6 +38,19 @@ exports.authenticate = async function (ctx, email, password, done) {
|
||||||
return authError(done, INVALID_ERR)
|
return authError(done, INVALID_ERR)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// check that the user has a stored password before proceeding
|
||||||
|
if (!dbUser.password) {
|
||||||
|
if (
|
||||||
|
(dbUser.account && dbUser.account.authType === "sso") || // root account sso
|
||||||
|
dbUser.thirdPartyProfile // internal sso
|
||||||
|
) {
|
||||||
|
return authError(done, SSO_NO_PASSWORD)
|
||||||
|
}
|
||||||
|
|
||||||
|
console.error("Non SSO usser has no password set", dbUser)
|
||||||
|
return authError(done, EXPIRED)
|
||||||
|
}
|
||||||
|
|
||||||
// authenticate
|
// authenticate
|
||||||
if (await compare(password, dbUser.password)) {
|
if (await compare(password, dbUser.password)) {
|
||||||
const sessionId = newid()
|
const sessionId = newid()
|
||||||
|
|
|
@ -181,8 +181,8 @@ exports.saveUser = async (
|
||||||
|
|
||||||
// check budibase users in other tenants
|
// check budibase users in other tenants
|
||||||
if (env.MULTI_TENANCY) {
|
if (env.MULTI_TENANCY) {
|
||||||
dbUser = await getTenantUser(email)
|
const tenantUser = await getTenantUser(email)
|
||||||
if (dbUser != null && dbUser.tenantId !== tenantId) {
|
if (tenantUser != null && tenantUser.tenantId !== tenantId) {
|
||||||
throw `Email address ${email} already in use.`
|
throw `Email address ${email} already in use.`
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/bbui",
|
"name": "@budibase/bbui",
|
||||||
"description": "A UI solution used in the different Budibase projects.",
|
"description": "A UI solution used in the different Budibase projects.",
|
||||||
"version": "0.9.173-alpha.3",
|
"version": "0.9.176-alpha.3",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"svelte": "src/index.js",
|
"svelte": "src/index.js",
|
||||||
"module": "dist/bbui.es.js",
|
"module": "dist/bbui.es.js",
|
||||||
|
@ -51,6 +51,7 @@
|
||||||
"@spectrum-css/fieldlabel": "^3.0.1",
|
"@spectrum-css/fieldlabel": "^3.0.1",
|
||||||
"@spectrum-css/icon": "^3.0.1",
|
"@spectrum-css/icon": "^3.0.1",
|
||||||
"@spectrum-css/illustratedmessage": "^3.0.2",
|
"@spectrum-css/illustratedmessage": "^3.0.2",
|
||||||
|
"@spectrum-css/inlinealert": "^2.0.1",
|
||||||
"@spectrum-css/inputgroup": "^3.0.2",
|
"@spectrum-css/inputgroup": "^3.0.2",
|
||||||
"@spectrum-css/label": "^2.0.10",
|
"@spectrum-css/label": "^2.0.10",
|
||||||
"@spectrum-css/link": "^3.1.1",
|
"@spectrum-css/link": "^3.1.1",
|
||||||
|
|
|
@ -0,0 +1,51 @@
|
||||||
|
<script>
|
||||||
|
import "@spectrum-css/inlinealert/dist/index-vars.css"
|
||||||
|
import Button from "../Button/Button.svelte"
|
||||||
|
|
||||||
|
export let type = "info"
|
||||||
|
export let header = ""
|
||||||
|
export let message = ""
|
||||||
|
export let onConfirm = undefined
|
||||||
|
|
||||||
|
$: icon = selectIcon(type)
|
||||||
|
|
||||||
|
function selectIcon(alertType) {
|
||||||
|
switch (alertType) {
|
||||||
|
case "error":
|
||||||
|
case "negative":
|
||||||
|
return "Alert"
|
||||||
|
case "success":
|
||||||
|
return "CheckmarkCircle"
|
||||||
|
case "help":
|
||||||
|
return "Help"
|
||||||
|
default:
|
||||||
|
return "Info"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="spectrum-InLineAlert spectrum-InLineAlert--{type}">
|
||||||
|
<svg
|
||||||
|
class="spectrum-Icon spectrum-Icon--sizeM spectrum-InLineAlert-icon"
|
||||||
|
focusable="false"
|
||||||
|
aria-hidden="true"
|
||||||
|
>
|
||||||
|
<use xlink:href="#spectrum-icon-18-{icon}" />
|
||||||
|
</svg>
|
||||||
|
<div class="spectrum-InLineAlert-header">{header}</div>
|
||||||
|
<div class="spectrum-InLineAlert-content">{message}</div>
|
||||||
|
{#if onConfirm}
|
||||||
|
<div class="spectrum-InLineAlert-footer">
|
||||||
|
<Button secondary on:click={onConfirm}>OK</Button>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.spectrum-InLineAlert {
|
||||||
|
--spectrum-semantic-negative-border-color: #e34850;
|
||||||
|
--spectrum-semantic-positive-border-color: #2d9d78;
|
||||||
|
--spectrum-semantic-positive-icon-color: #2d9d78;
|
||||||
|
--spectrum-semantic-negative-icon-color: #e34850;
|
||||||
|
}
|
||||||
|
</style>
|
|
@ -58,6 +58,7 @@ export { default as Pagination } from "./Pagination/Pagination.svelte"
|
||||||
export { default as Badge } from "./Badge/Badge.svelte"
|
export { default as Badge } from "./Badge/Badge.svelte"
|
||||||
export { default as StatusLight } from "./StatusLight/StatusLight.svelte"
|
export { default as StatusLight } from "./StatusLight/StatusLight.svelte"
|
||||||
export { default as ColorPicker } from "./ColorPicker/ColorPicker.svelte"
|
export { default as ColorPicker } from "./ColorPicker/ColorPicker.svelte"
|
||||||
|
export { default as InlineAlert } from "./InlineAlert/InlineAlert.svelte"
|
||||||
|
|
||||||
// Typography
|
// Typography
|
||||||
export { default as Body } from "./Typography/Body.svelte"
|
export { default as Body } from "./Typography/Body.svelte"
|
||||||
|
|
|
@ -136,6 +136,11 @@
|
||||||
resolved "https://registry.yarnpkg.com/@spectrum-css/illustratedmessage/-/illustratedmessage-3.0.2.tgz#6a480be98b027e050b086e7899e40d87adb0a8c0"
|
resolved "https://registry.yarnpkg.com/@spectrum-css/illustratedmessage/-/illustratedmessage-3.0.2.tgz#6a480be98b027e050b086e7899e40d87adb0a8c0"
|
||||||
integrity sha512-dqnE8X27bGcO0HN8+dYx8O4o0dNNIAqeivOzDHhe2El+V4dTzMrNIerF6G0NLm3GjVf6XliwmitsZK+K6FmbtA==
|
integrity sha512-dqnE8X27bGcO0HN8+dYx8O4o0dNNIAqeivOzDHhe2El+V4dTzMrNIerF6G0NLm3GjVf6XliwmitsZK+K6FmbtA==
|
||||||
|
|
||||||
|
"@spectrum-css/inlinealert@^2.0.1":
|
||||||
|
version "2.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/@spectrum-css/inlinealert/-/inlinealert-2.0.1.tgz#7521f88f6c845806403cc7d925773c7414e204a2"
|
||||||
|
integrity sha512-Xy5RCOwgurqUXuGQCsEDUduDd5408bmEpmFg+feynG7VFUgLFZWBeylSENB/OqjlFtO76PHXNVdHkhDscPIHTA==
|
||||||
|
|
||||||
"@spectrum-css/inputgroup@^3.0.2":
|
"@spectrum-css/inputgroup@^3.0.2":
|
||||||
version "3.0.2"
|
version "3.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/@spectrum-css/inputgroup/-/inputgroup-3.0.2.tgz#f1b13603832cbd22394f3d898af13203961f8691"
|
resolved "https://registry.yarnpkg.com/@spectrum-css/inputgroup/-/inputgroup-3.0.2.tgz#f1b13603832cbd22394f3d898af13203961f8691"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/builder",
|
"name": "@budibase/builder",
|
||||||
"version": "0.9.173-alpha.3",
|
"version": "0.9.176-alpha.3",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -65,10 +65,10 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^0.9.173-alpha.3",
|
"@budibase/bbui": "^0.9.176-alpha.3",
|
||||||
"@budibase/client": "^0.9.173-alpha.3",
|
"@budibase/client": "^0.9.176-alpha.3",
|
||||||
"@budibase/colorpicker": "1.1.2",
|
"@budibase/colorpicker": "1.1.2",
|
||||||
"@budibase/string-templates": "^0.9.173-alpha.3",
|
"@budibase/string-templates": "^0.9.176-alpha.3",
|
||||||
"@sentry/browser": "5.19.1",
|
"@sentry/browser": "5.19.1",
|
||||||
"@spectrum-css/page": "^3.0.1",
|
"@spectrum-css/page": "^3.0.1",
|
||||||
"@spectrum-css/vars": "^3.0.1",
|
"@spectrum-css/vars": "^3.0.1",
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
import CreateRowButton from "./buttons/CreateRowButton.svelte"
|
import CreateRowButton from "./buttons/CreateRowButton.svelte"
|
||||||
import CreateColumnButton from "./buttons/CreateColumnButton.svelte"
|
import CreateColumnButton from "./buttons/CreateColumnButton.svelte"
|
||||||
import CreateViewButton from "./buttons/CreateViewButton.svelte"
|
import CreateViewButton from "./buttons/CreateViewButton.svelte"
|
||||||
|
import ExistingRelationshipButton from "./buttons/ExistingRelationshipButton.svelte"
|
||||||
import ExportButton from "./buttons/ExportButton.svelte"
|
import ExportButton from "./buttons/ExportButton.svelte"
|
||||||
import EditRolesButton from "./buttons/EditRolesButton.svelte"
|
import EditRolesButton from "./buttons/EditRolesButton.svelte"
|
||||||
import ManageAccessButton from "./buttons/ManageAccessButton.svelte"
|
import ManageAccessButton from "./buttons/ManageAccessButton.svelte"
|
||||||
|
@ -98,9 +99,7 @@
|
||||||
on:updatecolumns={onUpdateColumns}
|
on:updatecolumns={onUpdateColumns}
|
||||||
on:updaterows={onUpdateRows}
|
on:updaterows={onUpdateRows}
|
||||||
>
|
>
|
||||||
{#if isInternal}
|
<CreateColumnButton on:updatecolumns={onUpdateColumns} />
|
||||||
<CreateColumnButton on:updatecolumns={onUpdateColumns} />
|
|
||||||
{/if}
|
|
||||||
{#if schema && Object.keys(schema).length > 0}
|
{#if schema && Object.keys(schema).length > 0}
|
||||||
{#if !isUsersTable}
|
{#if !isUsersTable}
|
||||||
<CreateRowButton
|
<CreateRowButton
|
||||||
|
@ -116,6 +115,12 @@
|
||||||
{#if isUsersTable}
|
{#if isUsersTable}
|
||||||
<EditRolesButton />
|
<EditRolesButton />
|
||||||
{/if}
|
{/if}
|
||||||
|
{#if !isInternal}
|
||||||
|
<ExistingRelationshipButton
|
||||||
|
table={$tables.selected}
|
||||||
|
on:updatecolumns={onUpdateColumns}
|
||||||
|
/>
|
||||||
|
{/if}
|
||||||
<HideAutocolumnButton bind:hideAutocolumns />
|
<HideAutocolumnButton bind:hideAutocolumns />
|
||||||
<!-- always have the export last -->
|
<!-- always have the export last -->
|
||||||
<ExportButton view={$tables.selected?._id} />
|
<ExportButton view={$tables.selected?._id} />
|
||||||
|
|
|
@ -16,8 +16,8 @@
|
||||||
export let value = defaultValue || (meta.type === "boolean" ? false : "")
|
export let value = defaultValue || (meta.type === "boolean" ? false : "")
|
||||||
export let readonly
|
export let readonly
|
||||||
|
|
||||||
$: type = meta.type
|
$: type = meta?.type
|
||||||
$: label = capitalise(meta.name)
|
$: label = meta.name ? capitalise(meta.name) : ""
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{#if type === "options"}
|
{#if type === "options"}
|
||||||
|
|
|
@ -129,7 +129,7 @@
|
||||||
bind:selectedRows
|
bind:selectedRows
|
||||||
allowSelectRows={allowEditing && !isUsersTable}
|
allowSelectRows={allowEditing && !isUsersTable}
|
||||||
allowEditRows={allowEditing}
|
allowEditRows={allowEditing}
|
||||||
allowEditColumns={allowEditing && isInternal}
|
allowEditColumns={allowEditing}
|
||||||
showAutoColumns={!hideAutocolumns}
|
showAutoColumns={!hideAutocolumns}
|
||||||
on:editcolumn={e => editColumn(e.detail)}
|
on:editcolumn={e => editColumn(e.detail)}
|
||||||
on:editrow={e => editRow(e.detail)}
|
on:editrow={e => editRow(e.detail)}
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
<script>
|
||||||
|
import { ActionButton, Modal, notifications } from "@budibase/bbui"
|
||||||
|
import CreateEditRelationship from "../../Datasources/CreateEditRelationship.svelte"
|
||||||
|
import { datasources, tables } from "../../../../stores/backend"
|
||||||
|
import { createEventDispatcher } from "svelte"
|
||||||
|
|
||||||
|
export let table
|
||||||
|
const dispatch = createEventDispatcher()
|
||||||
|
|
||||||
|
$: plusTables = datasource?.plus
|
||||||
|
? Object.values(datasource?.entities || {})
|
||||||
|
: []
|
||||||
|
$: datasource = $datasources.list.find(
|
||||||
|
source => source._id === table?.sourceId
|
||||||
|
)
|
||||||
|
|
||||||
|
let modal
|
||||||
|
|
||||||
|
async function saveRelationship() {
|
||||||
|
try {
|
||||||
|
// Create datasource
|
||||||
|
await datasources.save(datasource)
|
||||||
|
notifications.success(`Relationship information saved.`)
|
||||||
|
const tableList = await tables.fetch()
|
||||||
|
await tables.select(tableList.find(tbl => tbl._id === table._id))
|
||||||
|
dispatch("updatecolumns")
|
||||||
|
} catch (err) {
|
||||||
|
notifications.error(`Error saving relationship info: ${err}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
{#if table.sourceId}
|
||||||
|
<div>
|
||||||
|
<ActionButton
|
||||||
|
icon="DataCorrelated"
|
||||||
|
primary
|
||||||
|
size="S"
|
||||||
|
quiet
|
||||||
|
on:click={modal.show}
|
||||||
|
>
|
||||||
|
Define existing relationship
|
||||||
|
</ActionButton>
|
||||||
|
</div>
|
||||||
|
<Modal bind:this={modal}>
|
||||||
|
<CreateEditRelationship
|
||||||
|
{datasource}
|
||||||
|
save={saveRelationship}
|
||||||
|
close={modal.hide}
|
||||||
|
{plusTables}
|
||||||
|
selectedFromTable={table}
|
||||||
|
/>
|
||||||
|
</Modal>
|
||||||
|
{/if}
|
|
@ -31,6 +31,9 @@
|
||||||
const AUTO_TYPE = "auto"
|
const AUTO_TYPE = "auto"
|
||||||
const FORMULA_TYPE = FIELDS.FORMULA.type
|
const FORMULA_TYPE = FIELDS.FORMULA.type
|
||||||
const LINK_TYPE = FIELDS.LINK.type
|
const LINK_TYPE = FIELDS.LINK.type
|
||||||
|
const STRING_TYPE = FIELDS.STRING.type
|
||||||
|
const NUMBER_TYPE = FIELDS.NUMBER.type
|
||||||
|
|
||||||
const dispatch = createEventDispatcher()
|
const dispatch = createEventDispatcher()
|
||||||
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
|
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
|
||||||
const { hide } = getContext(Context.Modal)
|
const { hide } = getContext(Context.Modal)
|
||||||
|
@ -55,8 +58,9 @@
|
||||||
let confirmDeleteDialog
|
let confirmDeleteDialog
|
||||||
let deletion
|
let deletion
|
||||||
|
|
||||||
|
$: checkConstraints(field)
|
||||||
$: tableOptions = $tables.list.filter(
|
$: tableOptions = $tables.list.filter(
|
||||||
table => table._id !== $tables.draft._id && table.type !== "external"
|
opt => opt._id !== $tables.draft._id && opt.type === table.type
|
||||||
)
|
)
|
||||||
$: required = !!field?.constraints?.presence || primaryDisplay
|
$: required = !!field?.constraints?.presence || primaryDisplay
|
||||||
$: uneditable =
|
$: uneditable =
|
||||||
|
@ -83,6 +87,7 @@
|
||||||
$: canBeRequired =
|
$: canBeRequired =
|
||||||
field.type !== LINK_TYPE && !uneditable && field.type !== AUTO_TYPE
|
field.type !== LINK_TYPE && !uneditable && field.type !== AUTO_TYPE
|
||||||
$: relationshipOptions = getRelationshipOptions(field)
|
$: relationshipOptions = getRelationshipOptions(field)
|
||||||
|
$: external = table.type === "external"
|
||||||
|
|
||||||
async function saveColumn() {
|
async function saveColumn() {
|
||||||
if (field.type === AUTO_TYPE) {
|
if (field.type === AUTO_TYPE) {
|
||||||
|
@ -193,6 +198,45 @@
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getAllowedTypes() {
|
||||||
|
if (!external) {
|
||||||
|
return [
|
||||||
|
...Object.values(fieldDefinitions),
|
||||||
|
{ name: "Auto Column", type: AUTO_TYPE },
|
||||||
|
]
|
||||||
|
} else {
|
||||||
|
return [
|
||||||
|
FIELDS.STRING,
|
||||||
|
FIELDS.LONGFORM,
|
||||||
|
FIELDS.OPTIONS,
|
||||||
|
FIELDS.DATETIME,
|
||||||
|
FIELDS.NUMBER,
|
||||||
|
FIELDS.BOOLEAN,
|
||||||
|
FIELDS.ARRAY,
|
||||||
|
FIELDS.FORMULA,
|
||||||
|
FIELDS.LINK,
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function checkConstraints(fieldToCheck) {
|
||||||
|
// most types need this, just make sure its always present
|
||||||
|
if (fieldToCheck && !fieldToCheck.constraints) {
|
||||||
|
fieldToCheck.constraints = {}
|
||||||
|
}
|
||||||
|
// some string types may have been built by server, may not always have constraints
|
||||||
|
if (fieldToCheck.type === STRING_TYPE && !fieldToCheck.constraints.length) {
|
||||||
|
fieldToCheck.constraints.length = {}
|
||||||
|
}
|
||||||
|
// some number types made server-side will be missing constraints
|
||||||
|
if (
|
||||||
|
fieldToCheck.type === NUMBER_TYPE &&
|
||||||
|
!fieldToCheck.constraints.numericality
|
||||||
|
) {
|
||||||
|
fieldToCheck.constraints.numericality = {}
|
||||||
|
}
|
||||||
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<ModalContent
|
<ModalContent
|
||||||
|
@ -215,10 +259,7 @@
|
||||||
label="Type"
|
label="Type"
|
||||||
bind:value={field.type}
|
bind:value={field.type}
|
||||||
on:change={handleTypeChange}
|
on:change={handleTypeChange}
|
||||||
options={[
|
options={getAllowedTypes()}
|
||||||
...Object.values(fieldDefinitions),
|
|
||||||
{ name: "Auto Column", type: AUTO_TYPE },
|
|
||||||
]}
|
|
||||||
getOptionLabel={field => field.name}
|
getOptionLabel={field => field.name}
|
||||||
getOptionValue={field => field.type}
|
getOptionValue={field => field.type}
|
||||||
/>
|
/>
|
||||||
|
@ -245,7 +286,7 @@
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
{#if canBeSearched}
|
{#if canBeSearched && !external}
|
||||||
<div>
|
<div>
|
||||||
<Label grey small>Search Indexes</Label>
|
<Label grey small>Search Indexes</Label>
|
||||||
<Toggle
|
<Toggle
|
||||||
|
|
|
@ -18,10 +18,19 @@
|
||||||
export let fromRelationship = {}
|
export let fromRelationship = {}
|
||||||
export let toRelationship = {}
|
export let toRelationship = {}
|
||||||
export let close
|
export let close
|
||||||
|
export let selectedFromTable
|
||||||
|
|
||||||
let originalFromName = fromRelationship.name,
|
let originalFromName = fromRelationship.name,
|
||||||
originalToName = toRelationship.name
|
originalToName = toRelationship.name
|
||||||
|
|
||||||
|
if (fromRelationship && !fromRelationship.relationshipType) {
|
||||||
|
fromRelationship.relationshipType = RelationshipTypes.MANY_TO_ONE
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toRelationship && selectedFromTable) {
|
||||||
|
toRelationship.tableId = selectedFromTable._id
|
||||||
|
}
|
||||||
|
|
||||||
function inSchema(table, prop, ogName) {
|
function inSchema(table, prop, ogName) {
|
||||||
if (!table || !prop || prop === ogName) {
|
if (!table || !prop || prop === ogName) {
|
||||||
return false
|
return false
|
||||||
|
@ -114,6 +123,7 @@
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
$: updateRelationshipType(fromRelationship?.relationshipType)
|
$: updateRelationshipType(fromRelationship?.relationshipType)
|
||||||
|
$: tableChanged(fromTable, toTable)
|
||||||
|
|
||||||
function updateRelationshipType(fromType) {
|
function updateRelationshipType(fromType) {
|
||||||
if (fromType === RelationshipTypes.MANY_TO_MANY) {
|
if (fromType === RelationshipTypes.MANY_TO_MANY) {
|
||||||
|
@ -205,7 +215,6 @@
|
||||||
originalToName = toRelationship.name
|
originalToName = toRelationship.name
|
||||||
originalFromName = fromRelationship.name
|
originalFromName = fromRelationship.name
|
||||||
await save()
|
await save()
|
||||||
await tables.fetch()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function deleteRelationship() {
|
async function deleteRelationship() {
|
||||||
|
@ -215,10 +224,26 @@
|
||||||
await tables.fetch()
|
await tables.fetch()
|
||||||
close()
|
close()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function tableChanged(fromTbl, toTbl) {
|
||||||
|
fromRelationship.name = toTbl?.name || ""
|
||||||
|
errors.fromCol = ""
|
||||||
|
toRelationship.name = fromTbl?.name || ""
|
||||||
|
errors.toCol = ""
|
||||||
|
if (toTbl || fromTbl) {
|
||||||
|
checkForErrors(
|
||||||
|
fromTable,
|
||||||
|
toTable,
|
||||||
|
through,
|
||||||
|
fromRelationship,
|
||||||
|
toRelationship
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<ModalContent
|
<ModalContent
|
||||||
title="Create Relationship"
|
title="Define Relationship"
|
||||||
confirmText="Save"
|
confirmText="Save"
|
||||||
onConfirm={saveRelationship}
|
onConfirm={saveRelationship}
|
||||||
disabled={!valid}
|
disabled={!valid}
|
||||||
|
@ -234,6 +259,7 @@
|
||||||
<Select
|
<Select
|
||||||
label="Select from table"
|
label="Select from table"
|
||||||
options={tableOptions}
|
options={tableOptions}
|
||||||
|
disabled={!!selectedFromTable}
|
||||||
on:change={() => ($touched.from = true)}
|
on:change={() => ($touched.from = true)}
|
||||||
bind:error={errors.from}
|
bind:error={errors.from}
|
||||||
bind:value={toRelationship.tableId}
|
bind:value={toRelationship.tableId}
|
|
@ -1,7 +1,7 @@
|
||||||
<script>
|
<script>
|
||||||
import { goto } from "@roxi/routify"
|
import { goto } from "@roxi/routify"
|
||||||
import { allScreens, store } from "builderStore"
|
import { allScreens, store } from "builderStore"
|
||||||
import { tables } from "stores/backend"
|
import { tables, datasources } from "stores/backend"
|
||||||
import {
|
import {
|
||||||
ActionMenu,
|
ActionMenu,
|
||||||
Icon,
|
Icon,
|
||||||
|
@ -40,7 +40,10 @@
|
||||||
store.actions.screens.delete(templateScreens)
|
store.actions.screens.delete(templateScreens)
|
||||||
await tables.fetch()
|
await tables.fetch()
|
||||||
notifications.success("Table deleted")
|
notifications.success("Table deleted")
|
||||||
if (wasSelectedTable._id === table._id) {
|
if (table.type === "external") {
|
||||||
|
await datasources.fetch()
|
||||||
|
}
|
||||||
|
if (wasSelectedTable && wasSelectedTable._id === table._id) {
|
||||||
$goto("./table")
|
$goto("./table")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -64,9 +67,7 @@
|
||||||
<Icon s hoverable name="MoreSmallList" />
|
<Icon s hoverable name="MoreSmallList" />
|
||||||
</div>
|
</div>
|
||||||
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
|
<MenuItem icon="Edit" on:click={editorModal.show}>Edit</MenuItem>
|
||||||
{#if !external}
|
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
|
||||||
<MenuItem icon="Delete" on:click={showDeleteModal}>Delete</MenuItem>
|
|
||||||
{/if}
|
|
||||||
</ActionMenu>
|
</ActionMenu>
|
||||||
|
|
||||||
<Modal bind:this={editorModal}>
|
<Modal bind:this={editorModal}>
|
||||||
|
|
|
@ -32,6 +32,16 @@
|
||||||
.component("@budibase/standard-components/screenslot")
|
.component("@budibase/standard-components/screenslot")
|
||||||
.instanceName("Content Placeholder")
|
.instanceName("Content Placeholder")
|
||||||
.json()
|
.json()
|
||||||
|
|
||||||
|
// Messages that can be sent from the iframe preview to the builder
|
||||||
|
// Budibase events are and initalisation events
|
||||||
|
const MessageTypes = {
|
||||||
|
IFRAME_LOADED: "iframe-loaded",
|
||||||
|
READY: "ready",
|
||||||
|
ERROR: "error",
|
||||||
|
BUDIBASE: "type",
|
||||||
|
KEYDOWN: "keydown"
|
||||||
|
}
|
||||||
|
|
||||||
// Construct iframe template
|
// Construct iframe template
|
||||||
$: template = iframeTemplate.replace(
|
$: template = iframeTemplate.replace(
|
||||||
|
@ -80,46 +90,44 @@
|
||||||
// Refresh the preview when required
|
// Refresh the preview when required
|
||||||
$: refreshContent(strippedJson)
|
$: refreshContent(strippedJson)
|
||||||
|
|
||||||
onMount(() => {
|
function receiveMessage(message) {
|
||||||
// Initialise the app when mounted
|
const handlers = {
|
||||||
iframe.contentWindow.addEventListener(
|
[MessageTypes.READY]: () => {
|
||||||
"ready",
|
// Initialise the app when mounted
|
||||||
() => {
|
|
||||||
// Display preview immediately if the intelligent loading feature
|
// Display preview immediately if the intelligent loading feature
|
||||||
// is not supported
|
// is not supported
|
||||||
|
if (!loading) return
|
||||||
|
|
||||||
if (!$store.clientFeatures.intelligentLoading) {
|
if (!$store.clientFeatures.intelligentLoading) {
|
||||||
loading = false
|
loading = false
|
||||||
}
|
}
|
||||||
refreshContent(strippedJson)
|
refreshContent(strippedJson)
|
||||||
},
|
},
|
||||||
{ once: true }
|
[MessageTypes.ERROR]: event => {
|
||||||
)
|
// Catch any app errors
|
||||||
|
|
||||||
// Catch any app errors
|
|
||||||
iframe.contentWindow.addEventListener(
|
|
||||||
"error",
|
|
||||||
event => {
|
|
||||||
loading = false
|
loading = false
|
||||||
error = event.detail || "An unknown error occurred"
|
error = event.error || "An unknown error occurred"
|
||||||
},
|
},
|
||||||
{ once: true }
|
[MessageTypes.KEYDOWN]: handleKeydownEvent
|
||||||
)
|
}
|
||||||
|
|
||||||
// Add listener for events sent by client library in preview
|
const messageHandler = handlers[message.data.type] || handleBudibaseEvent
|
||||||
iframe.contentWindow.addEventListener("bb-event", handleBudibaseEvent)
|
messageHandler(message)
|
||||||
iframe.contentWindow.addEventListener("keydown", handleKeydownEvent)
|
}
|
||||||
|
|
||||||
|
onMount(() => {
|
||||||
|
window.addEventListener("message", receiveMessage)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Remove all iframe event listeners on component destroy
|
// Remove all iframe event listeners on component destroy
|
||||||
onDestroy(() => {
|
onDestroy(() => {
|
||||||
if (iframe.contentWindow) {
|
if (iframe.contentWindow) {
|
||||||
iframe.contentWindow.removeEventListener("bb-event", handleBudibaseEvent)
|
window.removeEventListener("message", receiveMessage) //
|
||||||
iframe.contentWindow.removeEventListener("keydown", handleKeydownEvent)
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
const handleBudibaseEvent = event => {
|
const handleBudibaseEvent = event => {
|
||||||
const { type, data } = event.detail
|
const { type, data } = event.data
|
||||||
if (type === "select-component" && data.id) {
|
if (type === "select-component" && data.id) {
|
||||||
store.actions.components.select({ _id: data.id })
|
store.actions.components.select({ _id: data.id })
|
||||||
} else if (type === "update-prop") {
|
} else if (type === "update-prop") {
|
||||||
|
@ -151,13 +159,14 @@
|
||||||
store.actions.components.paste(destination, data.mode)
|
store.actions.components.paste(destination, data.mode)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
console.warning(`Client sent unknown event type: ${type}`)
|
console.warn(`Client sent unknown event type: ${type}`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleKeydownEvent = event => {
|
const handleKeydownEvent = event => {
|
||||||
|
const { key } = event.data
|
||||||
if (
|
if (
|
||||||
(event.key === "Delete" || event.key === "Backspace") &&
|
(key === "Delete" || key === "Backspace") &&
|
||||||
selectedComponentId &&
|
selectedComponentId &&
|
||||||
["input", "textarea"].indexOf(
|
["input", "textarea"].indexOf(
|
||||||
iframe.contentWindow.document.activeElement?.tagName.toLowerCase()
|
iframe.contentWindow.document.activeElement?.tagName.toLowerCase()
|
||||||
|
|
|
@ -54,7 +54,7 @@ export default `
|
||||||
if (!parsed) {
|
if (!parsed) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract data from message
|
// Extract data from message
|
||||||
const {
|
const {
|
||||||
selectedComponentId,
|
selectedComponentId,
|
||||||
|
@ -84,17 +84,20 @@ export default `
|
||||||
if (window.loadBudibase) {
|
if (window.loadBudibase) {
|
||||||
window.loadBudibase()
|
window.loadBudibase()
|
||||||
document.documentElement.classList.add("loaded")
|
document.documentElement.classList.add("loaded")
|
||||||
window.dispatchEvent(new Event("iframe-loaded"))
|
window.parent.postMessage({ type: "iframe-loaded" })
|
||||||
} else {
|
} else {
|
||||||
throw "The client library couldn't be loaded"
|
throw "The client library couldn't be loaded"
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
window.dispatchEvent(new CustomEvent("error", { detail: error }))
|
window.parent.postMessage({ type: "error", error })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
window.addEventListener("message", receiveMessage)
|
window.addEventListener("message", receiveMessage)
|
||||||
window.dispatchEvent(new Event("ready"))
|
window.addEventListener("keydown", evt => {
|
||||||
|
window.parent.postMessage({ type: "keydown", key: event.key })
|
||||||
|
})
|
||||||
|
window.parent.postMessage({ type: "ready" })
|
||||||
</script>
|
</script>
|
||||||
</head>
|
</head>
|
||||||
<body/>
|
<body/>
|
||||||
|
|
|
@ -1,16 +1,25 @@
|
||||||
<script>
|
<script>
|
||||||
import { goto } from "@roxi/routify"
|
import { goto } from "@roxi/routify"
|
||||||
import { Button, Heading, Body, Divider, Layout, Modal } from "@budibase/bbui"
|
import {
|
||||||
|
Button,
|
||||||
|
Heading,
|
||||||
|
Body,
|
||||||
|
Divider,
|
||||||
|
Layout,
|
||||||
|
Modal,
|
||||||
|
InlineAlert,
|
||||||
|
ActionButton,
|
||||||
|
} from "@budibase/bbui"
|
||||||
import { datasources, integrations, queries, tables } from "stores/backend"
|
import { datasources, integrations, queries, tables } from "stores/backend"
|
||||||
import { notifications } from "@budibase/bbui"
|
import { notifications } from "@budibase/bbui"
|
||||||
import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte"
|
import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte"
|
||||||
import CreateEditRelationship from "./CreateEditRelationship/CreateEditRelationship.svelte"
|
import CreateEditRelationship from "components/backend/Datasources/CreateEditRelationship.svelte"
|
||||||
import DisplayColumnModal from "./modals/EditDisplayColumnsModal.svelte"
|
import CreateExternalTableModal from "./modals/CreateExternalTableModal.svelte"
|
||||||
import ICONS from "components/backend/DatasourceNavigator/icons"
|
import ICONS from "components/backend/DatasourceNavigator/icons"
|
||||||
import { capitalise } from "helpers"
|
import { capitalise } from "helpers"
|
||||||
|
|
||||||
let relationshipModal
|
let relationshipModal
|
||||||
let displayColumnModal
|
let createExternalTableModal
|
||||||
let selectedFromRelationship, selectedToRelationship
|
let selectedFromRelationship, selectedToRelationship
|
||||||
|
|
||||||
$: datasource = $datasources.list.find(ds => ds._id === $datasources.selected)
|
$: datasource = $datasources.list.find(ds => ds._id === $datasources.selected)
|
||||||
|
@ -19,6 +28,7 @@
|
||||||
? Object.values(datasource.entities || {})
|
? Object.values(datasource.entities || {})
|
||||||
: []
|
: []
|
||||||
$: relationships = getRelationships(plusTables)
|
$: relationships = getRelationships(plusTables)
|
||||||
|
$: schemaError = $datasources.schemaError
|
||||||
|
|
||||||
function getRelationships(tables) {
|
function getRelationships(tables) {
|
||||||
if (!tables || !Array.isArray(tables)) {
|
if (!tables || !Array.isArray(tables)) {
|
||||||
|
@ -101,8 +111,8 @@
|
||||||
relationshipModal.show()
|
relationshipModal.show()
|
||||||
}
|
}
|
||||||
|
|
||||||
function openDisplayColumnModal() {
|
function createNewTable() {
|
||||||
displayColumnModal.show()
|
createExternalTableModal.show()
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
@ -117,8 +127,8 @@
|
||||||
/>
|
/>
|
||||||
</Modal>
|
</Modal>
|
||||||
|
|
||||||
<Modal bind:this={displayColumnModal}>
|
<Modal bind:this={createExternalTableModal}>
|
||||||
<DisplayColumnModal {datasource} {plusTables} save={saveDatasource} />
|
<CreateExternalTableModal {datasource} />
|
||||||
</Modal>
|
</Modal>
|
||||||
|
|
||||||
{#if datasource && integration}
|
{#if datasource && integration}
|
||||||
|
@ -154,15 +164,15 @@
|
||||||
<div class="query-header">
|
<div class="query-header">
|
||||||
<Heading size="S">Tables</Heading>
|
<Heading size="S">Tables</Heading>
|
||||||
<div class="table-buttons">
|
<div class="table-buttons">
|
||||||
{#if plusTables && plusTables.length !== 0}
|
|
||||||
<Button primary on:click={openDisplayColumnModal}>
|
|
||||||
Update display columns
|
|
||||||
</Button>
|
|
||||||
{/if}
|
|
||||||
<div>
|
<div>
|
||||||
<Button primary on:click={updateDatasourceSchema}>
|
<ActionButton
|
||||||
|
size="S"
|
||||||
|
quiet
|
||||||
|
icon="DataRefresh"
|
||||||
|
on:click={updateDatasourceSchema}
|
||||||
|
>
|
||||||
Fetch tables from database
|
Fetch tables from database
|
||||||
</Button>
|
</ActionButton>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -171,6 +181,14 @@
|
||||||
your tables directly from the database and you can use them without
|
your tables directly from the database and you can use them without
|
||||||
having to write any queries at all.
|
having to write any queries at all.
|
||||||
</Body>
|
</Body>
|
||||||
|
{#if schemaError}
|
||||||
|
<InlineAlert
|
||||||
|
type="error"
|
||||||
|
header="Error fetching tables"
|
||||||
|
message={schemaError}
|
||||||
|
onConfirm={datasources.removeSchemaError}
|
||||||
|
/>
|
||||||
|
{/if}
|
||||||
<div class="query-list">
|
<div class="query-list">
|
||||||
{#each plusTables as table}
|
{#each plusTables as table}
|
||||||
<div class="query-list-item" on:click={() => onClickTable(table)}>
|
<div class="query-list-item" on:click={() => onClickTable(table)}>
|
||||||
|
@ -179,14 +197,23 @@
|
||||||
<p>→</p>
|
<p>→</p>
|
||||||
</div>
|
</div>
|
||||||
{/each}
|
{/each}
|
||||||
|
<div class="add-table">
|
||||||
|
<Button cta on:click={createNewTable}>Create new table</Button>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{#if plusTables?.length !== 0}
|
{#if plusTables?.length !== 0}
|
||||||
<Divider />
|
<Divider />
|
||||||
<div class="query-header">
|
<div class="query-header">
|
||||||
<Heading size="S">Relationships</Heading>
|
<Heading size="S">Relationships</Heading>
|
||||||
<Button primary on:click={() => openRelationshipModal()}
|
<ActionButton
|
||||||
>Create relationship</Button
|
icon="DataCorrelated"
|
||||||
|
primary
|
||||||
|
size="S"
|
||||||
|
quiet
|
||||||
|
on:click={openRelationshipModal}
|
||||||
>
|
>
|
||||||
|
Define existing relationship
|
||||||
|
</ActionButton>
|
||||||
</div>
|
</div>
|
||||||
<Body>
|
<Body>
|
||||||
Tell budibase how your tables are related to get even more smart
|
Tell budibase how your tables are related to get even more smart
|
||||||
|
@ -301,11 +328,14 @@
|
||||||
|
|
||||||
.table-buttons {
|
.table-buttons {
|
||||||
display: grid;
|
display: grid;
|
||||||
grid-gap: var(--spacing-l);
|
|
||||||
grid-template-columns: 1fr 1fr;
|
grid-template-columns: 1fr 1fr;
|
||||||
}
|
}
|
||||||
|
|
||||||
.table-buttons div {
|
.table-buttons div {
|
||||||
grid-column-end: -1;
|
grid-column-end: -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.add-table {
|
||||||
|
margin-top: var(--spacing-m);
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
<script>
|
||||||
|
import { ModalContent, Body, Input } from "@budibase/bbui"
|
||||||
|
import { tables, datasources } from "stores/backend"
|
||||||
|
import { goto } from "@roxi/routify"
|
||||||
|
|
||||||
|
export let datasource
|
||||||
|
|
||||||
|
let name = ""
|
||||||
|
$: valid = name && name.length > 0 && !datasource?.entities[name]
|
||||||
|
$: error =
|
||||||
|
name && datasource?.entities[name] ? "Table name already in use." : null
|
||||||
|
|
||||||
|
function buildDefaultTable(tableName, datasourceId) {
|
||||||
|
return {
|
||||||
|
name: tableName,
|
||||||
|
type: "external",
|
||||||
|
primary: ["id"],
|
||||||
|
sourceId: datasourceId,
|
||||||
|
schema: {
|
||||||
|
id: {
|
||||||
|
autocolumn: true,
|
||||||
|
type: "number",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveTable() {
|
||||||
|
const table = await tables.save(buildDefaultTable(name, datasource._id))
|
||||||
|
await datasources.fetch()
|
||||||
|
$goto(`../../table/${table._id}`)
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<ModalContent
|
||||||
|
title="Create new table"
|
||||||
|
confirmText="Create"
|
||||||
|
onConfirm={saveTable}
|
||||||
|
disabled={!valid}
|
||||||
|
>
|
||||||
|
<Body
|
||||||
|
>Provide a name for your new table; you can add columns once it is created.</Body
|
||||||
|
>
|
||||||
|
<Input label="Table Name" bind:error bind:value={name} />
|
||||||
|
</ModalContent>
|
|
@ -1,43 +0,0 @@
|
||||||
<script>
|
|
||||||
import { ModalContent, Select, Body } from "@budibase/bbui"
|
|
||||||
import { tables } from "stores/backend"
|
|
||||||
|
|
||||||
export let datasource
|
|
||||||
export let plusTables
|
|
||||||
export let save
|
|
||||||
|
|
||||||
async function saveDisplayColumns() {
|
|
||||||
// be explicit about copying over
|
|
||||||
for (let table of plusTables) {
|
|
||||||
datasource.entities[table.name].primaryDisplay = table.primaryDisplay
|
|
||||||
}
|
|
||||||
save()
|
|
||||||
await tables.fetch()
|
|
||||||
}
|
|
||||||
|
|
||||||
function getColumnOptions(table) {
|
|
||||||
if (!table || !table.schema) {
|
|
||||||
return []
|
|
||||||
}
|
|
||||||
return Object.entries(table.schema)
|
|
||||||
.filter(field => field[1].type !== "link")
|
|
||||||
.map(([fieldName]) => fieldName)
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<ModalContent
|
|
||||||
title="Edit display columns"
|
|
||||||
confirmText="Save"
|
|
||||||
onConfirm={saveDisplayColumns}
|
|
||||||
>
|
|
||||||
<Body
|
|
||||||
>Select the columns that will be shown when displaying relationships.</Body
|
|
||||||
>
|
|
||||||
{#each plusTables as table}
|
|
||||||
<Select
|
|
||||||
label={table.name}
|
|
||||||
options={getColumnOptions(table)}
|
|
||||||
bind:value={table.primaryDisplay}
|
|
||||||
/>
|
|
||||||
{/each}
|
|
||||||
</ModalContent>
|
|
|
@ -44,7 +44,7 @@
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err)
|
console.error(err)
|
||||||
notifications.error("Invalid credentials")
|
notifications.error(err.message ? err.message : "Invalid Credentials")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,12 +5,35 @@ import api from "../../builderStore/api"
|
||||||
export const INITIAL_DATASOURCE_VALUES = {
|
export const INITIAL_DATASOURCE_VALUES = {
|
||||||
list: [],
|
list: [],
|
||||||
selected: null,
|
selected: null,
|
||||||
|
schemaError: null,
|
||||||
}
|
}
|
||||||
|
|
||||||
export function createDatasourcesStore() {
|
export function createDatasourcesStore() {
|
||||||
const store = writable(INITIAL_DATASOURCE_VALUES)
|
const store = writable(INITIAL_DATASOURCE_VALUES)
|
||||||
const { subscribe, update, set } = store
|
const { subscribe, update, set } = store
|
||||||
|
|
||||||
|
async function updateDatasource(response) {
|
||||||
|
if (response.status !== 200) {
|
||||||
|
throw new Error(await response.text())
|
||||||
|
}
|
||||||
|
|
||||||
|
const { datasource, error } = await response.json()
|
||||||
|
update(state => {
|
||||||
|
const currentIdx = state.list.findIndex(ds => ds._id === datasource._id)
|
||||||
|
|
||||||
|
const sources = state.list
|
||||||
|
|
||||||
|
if (currentIdx >= 0) {
|
||||||
|
sources.splice(currentIdx, 1, datasource)
|
||||||
|
} else {
|
||||||
|
sources.push(datasource)
|
||||||
|
}
|
||||||
|
|
||||||
|
return { list: sources, selected: datasource._id, schemaError: error }
|
||||||
|
})
|
||||||
|
return datasource
|
||||||
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
subscribe,
|
subscribe,
|
||||||
update,
|
update,
|
||||||
|
@ -46,61 +69,20 @@ export function createDatasourcesStore() {
|
||||||
let url = `/api/datasources/${datasource._id}/schema`
|
let url = `/api/datasources/${datasource._id}/schema`
|
||||||
|
|
||||||
const response = await api.post(url)
|
const response = await api.post(url)
|
||||||
const json = await response.json()
|
return updateDatasource(response)
|
||||||
|
|
||||||
if (response.status !== 200) {
|
|
||||||
throw new Error(json.message)
|
|
||||||
}
|
|
||||||
|
|
||||||
update(state => {
|
|
||||||
const currentIdx = state.list.findIndex(ds => ds._id === json._id)
|
|
||||||
|
|
||||||
const sources = state.list
|
|
||||||
|
|
||||||
if (currentIdx >= 0) {
|
|
||||||
sources.splice(currentIdx, 1, json)
|
|
||||||
} else {
|
|
||||||
sources.push(json)
|
|
||||||
}
|
|
||||||
|
|
||||||
return { list: sources, selected: json._id }
|
|
||||||
})
|
|
||||||
return json
|
|
||||||
},
|
},
|
||||||
save: async (datasource, fetchSchema = false) => {
|
save: async (body, fetchSchema = false) => {
|
||||||
let response
|
let response
|
||||||
if (datasource._id) {
|
if (body._id) {
|
||||||
response = await api.put(
|
response = await api.put(`/api/datasources/${body._id}`, body)
|
||||||
`/api/datasources/${datasource._id}`,
|
|
||||||
datasource
|
|
||||||
)
|
|
||||||
} else {
|
} else {
|
||||||
response = await api.post("/api/datasources", {
|
response = await api.post("/api/datasources", {
|
||||||
datasource: datasource,
|
datasource: body,
|
||||||
fetchSchema,
|
fetchSchema,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const json = await response.json()
|
return updateDatasource(response)
|
||||||
|
|
||||||
if (response.status !== 200) {
|
|
||||||
throw new Error(json.message)
|
|
||||||
}
|
|
||||||
|
|
||||||
update(state => {
|
|
||||||
const currentIdx = state.list.findIndex(ds => ds._id === json._id)
|
|
||||||
|
|
||||||
const sources = state.list
|
|
||||||
|
|
||||||
if (currentIdx >= 0) {
|
|
||||||
sources.splice(currentIdx, 1, json)
|
|
||||||
} else {
|
|
||||||
sources.push(json)
|
|
||||||
}
|
|
||||||
|
|
||||||
return { list: sources, selected: json._id }
|
|
||||||
})
|
|
||||||
return json
|
|
||||||
},
|
},
|
||||||
delete: async datasource => {
|
delete: async datasource => {
|
||||||
const response = await api.delete(
|
const response = await api.delete(
|
||||||
|
@ -115,6 +97,11 @@ export function createDatasourcesStore() {
|
||||||
|
|
||||||
return response
|
return response
|
||||||
},
|
},
|
||||||
|
removeSchemaError: () => {
|
||||||
|
update(state => {
|
||||||
|
return { ...state, schemaError: null }
|
||||||
|
})
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,7 @@ export function createTablesStore() {
|
||||||
const tablesResponse = await api.get(`/api/tables`)
|
const tablesResponse = await api.get(`/api/tables`)
|
||||||
const tables = await tablesResponse.json()
|
const tables = await tablesResponse.json()
|
||||||
update(state => ({ ...state, list: tables }))
|
update(state => ({ ...state, list: tables }))
|
||||||
|
return tables
|
||||||
}
|
}
|
||||||
|
|
||||||
async function select(table) {
|
async function select(table) {
|
||||||
|
@ -62,6 +63,9 @@ export function createTablesStore() {
|
||||||
const response = await api.post(`/api/tables`, updatedTable)
|
const response = await api.post(`/api/tables`, updatedTable)
|
||||||
const savedTable = await response.json()
|
const savedTable = await response.json()
|
||||||
await fetch()
|
await fetch()
|
||||||
|
if (table.type === "external") {
|
||||||
|
await datasources.fetch()
|
||||||
|
}
|
||||||
await select(savedTable)
|
await select(savedTable)
|
||||||
return savedTable
|
return savedTable
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,7 @@ describe("Datasources Store", () => {
|
||||||
|
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(get(store).list).toEqual(expect.arrayContaining([SAVE_DATASOURCE]))
|
expect(get(store).list).toEqual(expect.arrayContaining([SAVE_DATASOURCE.datasource]))
|
||||||
})
|
})
|
||||||
it("deletes a datasource, updates the store and returns status message", async () => {
|
it("deletes a datasource, updates the store and returns status message", async () => {
|
||||||
api.get.mockReturnValue({ json: () => SOME_DATASOURCE})
|
api.get.mockReturnValue({ json: () => SOME_DATASOURCE})
|
||||||
|
|
|
@ -13,13 +13,15 @@ export const SOME_DATASOURCE = [
|
||||||
]
|
]
|
||||||
|
|
||||||
export const SAVE_DATASOURCE = {
|
export const SAVE_DATASOURCE = {
|
||||||
type: "datasource",
|
datasource: {
|
||||||
name: "CoolDB",
|
type: "datasource",
|
||||||
source: "REST",
|
name: "CoolDB",
|
||||||
config: {
|
source: "REST",
|
||||||
url: "localhost",
|
config: {
|
||||||
defaultHeaders: {},
|
url: "localhost",
|
||||||
|
defaultHeaders: {},
|
||||||
|
},
|
||||||
|
_id: "datasource_04b003a7b4a8428eadd3bb2f7eae0255",
|
||||||
|
_rev: "1-4e72002f1011e9392e655948469b7908",
|
||||||
},
|
},
|
||||||
_id: "datasource_04b003a7b4a8428eadd3bb2f7eae0255",
|
|
||||||
_rev: "1-4e72002f1011e9392e655948469b7908",
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -112,7 +112,7 @@ export function createAuthStore() {
|
||||||
if (response.status === 200) {
|
if (response.status === 200) {
|
||||||
setUser(json.user)
|
setUser(json.user)
|
||||||
} else {
|
} else {
|
||||||
throw "Invalid credentials"
|
throw new Error(json.message ? json.message : "Invalid credentials")
|
||||||
}
|
}
|
||||||
return json
|
return json
|
||||||
},
|
},
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/cli",
|
"name": "@budibase/cli",
|
||||||
"version": "0.9.173-alpha.3",
|
"version": "0.9.176-alpha.3",
|
||||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"bin": {
|
"bin": {
|
||||||
|
|
|
@ -19,7 +19,7 @@ The object key is the name of the component, as exported by `index.js`.
|
||||||
- **bindable** - whether the components provides a bindable value or not
|
- **bindable** - whether the components provides a bindable value or not
|
||||||
- **settings** - array of settings displayed in the builder
|
- **settings** - array of settings displayed in the builder
|
||||||
|
|
||||||
###Settings Definitions
|
### Settings Definitions
|
||||||
|
|
||||||
The `type` field in each setting is used by the builder to know which component to use to display
|
The `type` field in each setting is used by the builder to know which component to use to display
|
||||||
the setting, so it's important that this field is correct. The valid options are:
|
the setting, so it's important that this field is correct. The valid options are:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/client",
|
"name": "@budibase/client",
|
||||||
"version": "0.9.173-alpha.3",
|
"version": "0.9.176-alpha.3",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"module": "dist/budibase-client.js",
|
"module": "dist/budibase-client.js",
|
||||||
"main": "dist/budibase-client.js",
|
"main": "dist/budibase-client.js",
|
||||||
|
@ -19,9 +19,9 @@
|
||||||
"dev:builder": "rollup -cw"
|
"dev:builder": "rollup -cw"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^0.9.173-alpha.3",
|
"@budibase/bbui": "^0.9.176-alpha.3",
|
||||||
"@budibase/standard-components": "^0.9.139",
|
"@budibase/standard-components": "^0.9.139",
|
||||||
"@budibase/string-templates": "^0.9.173-alpha.3",
|
"@budibase/string-templates": "^0.9.176-alpha.3",
|
||||||
"regexparam": "^1.3.0",
|
"regexparam": "^1.3.0",
|
||||||
"shortid": "^2.2.15",
|
"shortid": "^2.2.15",
|
||||||
"svelte-spa-router": "^3.0.5"
|
"svelte-spa-router": "^3.0.5"
|
||||||
|
|
|
@ -8,6 +8,12 @@
|
||||||
import { Modal, ModalContent, ActionButton } from "@budibase/bbui"
|
import { Modal, ModalContent, ActionButton } from "@budibase/bbui"
|
||||||
import { onDestroy } from "svelte"
|
import { onDestroy } from "svelte"
|
||||||
|
|
||||||
|
const MessageTypes = {
|
||||||
|
NOTIFICATION: "notification",
|
||||||
|
CLOSE_SCREEN_MODAL: "close-screen-modal",
|
||||||
|
INVALIDATE_DATASOURCE: "invalidate-datasource",
|
||||||
|
}
|
||||||
|
|
||||||
let iframe
|
let iframe
|
||||||
let listenersAttached = false
|
let listenersAttached = false
|
||||||
|
|
||||||
|
@ -21,32 +27,33 @@
|
||||||
notificationStore.actions.send(message, type, icon)
|
notificationStore.actions.send(message, type, icon)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function receiveMessage(message) {
|
||||||
|
const handlers = {
|
||||||
|
[MessageTypes.NOTIFICATION]: () => {
|
||||||
|
proxyNotification(message.data)
|
||||||
|
},
|
||||||
|
[MessageTypes.CLOSE_SCREEN_MODAL]: peekStore.actions.hidePeek,
|
||||||
|
[MessageTypes.INVALIDATE_DATASOURCE]: () => {
|
||||||
|
invalidateDataSource(message.data)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageHandler = handlers[message.data.type]
|
||||||
|
if (messageHandler) {
|
||||||
|
messageHandler(message)
|
||||||
|
} else {
|
||||||
|
console.warning("Unknown event type", message?.data?.type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const attachListeners = () => {
|
const attachListeners = () => {
|
||||||
// Mirror datasource invalidation to keep the parent window up to date
|
// Mirror datasource invalidation to keep the parent window up to date
|
||||||
iframe.contentWindow.addEventListener(
|
window.addEventListener("message", receiveMessage)
|
||||||
"invalidate-datasource",
|
|
||||||
invalidateDataSource
|
|
||||||
)
|
|
||||||
// Listen for a close event to close the screen peek
|
|
||||||
iframe.contentWindow.addEventListener(
|
|
||||||
"close-screen-modal",
|
|
||||||
peekStore.actions.hidePeek
|
|
||||||
)
|
|
||||||
// Proxy notifications back to the parent window instead of iframe
|
|
||||||
iframe.contentWindow.addEventListener("notification", proxyNotification)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleCancel = () => {
|
const handleCancel = () => {
|
||||||
peekStore.actions.hidePeek()
|
peekStore.actions.hidePeek()
|
||||||
iframe.contentWindow.removeEventListener(
|
window.removeEventListener("message", receiveMessage)
|
||||||
"invalidate-datasource",
|
|
||||||
invalidateDataSource
|
|
||||||
)
|
|
||||||
iframe.contentWindow.removeEventListener(
|
|
||||||
"close-screen-modal",
|
|
||||||
peekStore.actions.hidePeek
|
|
||||||
)
|
|
||||||
iframe.contentWindow.removeEventListener("notification", proxyNotification)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleFullscreen = () => {
|
const handleFullscreen = () => {
|
||||||
|
|
|
@ -4,11 +4,7 @@ import { findComponentById, findComponentPathById } from "../utils/components"
|
||||||
import { pingEndUser } from "../api"
|
import { pingEndUser } from "../api"
|
||||||
|
|
||||||
const dispatchEvent = (type, data = {}) => {
|
const dispatchEvent = (type, data = {}) => {
|
||||||
window.dispatchEvent(
|
window.parent.postMessage({ type, data })
|
||||||
new CustomEvent("bb-event", {
|
|
||||||
detail: { type, data },
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const createBuilderStore = () => {
|
const createBuilderStore = () => {
|
||||||
|
|
|
@ -26,11 +26,19 @@ const createNotificationStore = () => {
|
||||||
|
|
||||||
// If peeking, pass notifications back to parent window
|
// If peeking, pass notifications back to parent window
|
||||||
if (get(routeStore).queryParams?.peek) {
|
if (get(routeStore).queryParams?.peek) {
|
||||||
window.dispatchEvent(
|
window.parent.postMessage({
|
||||||
new CustomEvent("notification", {
|
type: "notification",
|
||||||
detail: { message, type, icon },
|
detail: {
|
||||||
})
|
message,
|
||||||
)
|
type,
|
||||||
|
icon,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
// window.dispatchEvent(
|
||||||
|
// new CustomEvent("notification", {
|
||||||
|
// detail: { message, type, icon },
|
||||||
|
// })
|
||||||
|
// )
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/server",
|
"name": "@budibase/server",
|
||||||
"email": "hi@budibase.com",
|
"email": "hi@budibase.com",
|
||||||
"version": "0.9.173-alpha.3",
|
"version": "0.9.176-alpha.3",
|
||||||
"description": "Budibase Web Server",
|
"description": "Budibase Web Server",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -68,9 +68,9 @@
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "AGPL-3.0-or-later",
|
"license": "AGPL-3.0-or-later",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/auth": "^0.9.173-alpha.3",
|
"@budibase/auth": "^0.9.176-alpha.3",
|
||||||
"@budibase/client": "^0.9.173-alpha.3",
|
"@budibase/client": "^0.9.176-alpha.3",
|
||||||
"@budibase/string-templates": "^0.9.173-alpha.3",
|
"@budibase/string-templates": "^0.9.176-alpha.3",
|
||||||
"@elastic/elasticsearch": "7.10.0",
|
"@elastic/elasticsearch": "7.10.0",
|
||||||
"@koa/router": "8.0.0",
|
"@koa/router": "8.0.0",
|
||||||
"@sendgrid/mail": "7.1.1",
|
"@sendgrid/mail": "7.1.1",
|
||||||
|
|
|
@ -7,8 +7,9 @@ const {
|
||||||
BudibaseInternalDB,
|
BudibaseInternalDB,
|
||||||
getTableParams,
|
getTableParams,
|
||||||
} = require("../../db/utils")
|
} = require("../../db/utils")
|
||||||
|
const { BuildSchemaErrors } = require("../../constants")
|
||||||
const { integrations } = require("../../integrations")
|
const { integrations } = require("../../integrations")
|
||||||
const { makeExternalQuery } = require("./row/utils")
|
const { getDatasourceAndQuery } = require("./row/utils")
|
||||||
|
|
||||||
exports.fetch = async function (ctx) {
|
exports.fetch = async function (ctx) {
|
||||||
const database = new CouchDB(ctx.appId)
|
const database = new CouchDB(ctx.appId)
|
||||||
|
@ -43,13 +44,17 @@ exports.buildSchemaFromDb = async function (ctx) {
|
||||||
const db = new CouchDB(ctx.appId)
|
const db = new CouchDB(ctx.appId)
|
||||||
const datasource = await db.get(ctx.params.datasourceId)
|
const datasource = await db.get(ctx.params.datasourceId)
|
||||||
|
|
||||||
const tables = await buildSchemaHelper(datasource)
|
const { tables, error } = await buildSchemaHelper(datasource)
|
||||||
datasource.entities = tables
|
datasource.entities = tables
|
||||||
|
|
||||||
const response = await db.put(datasource)
|
const dbResp = await db.put(datasource)
|
||||||
datasource._rev = response.rev
|
datasource._rev = dbResp.rev
|
||||||
|
|
||||||
ctx.body = datasource
|
const response = { datasource }
|
||||||
|
if (error) {
|
||||||
|
response.error = error
|
||||||
|
}
|
||||||
|
ctx.body = response
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.update = async function (ctx) {
|
exports.update = async function (ctx) {
|
||||||
|
@ -71,7 +76,7 @@ exports.update = async function (ctx) {
|
||||||
|
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
ctx.message = "Datasource saved successfully."
|
ctx.message = "Datasource saved successfully."
|
||||||
ctx.body = datasource
|
ctx.body = { datasource }
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.save = async function (ctx) {
|
exports.save = async function (ctx) {
|
||||||
|
@ -85,13 +90,15 @@ exports.save = async function (ctx) {
|
||||||
...ctx.request.body.datasource,
|
...ctx.request.body.datasource,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let schemaError = null
|
||||||
if (fetchSchema) {
|
if (fetchSchema) {
|
||||||
let tables = await buildSchemaHelper(datasource)
|
const { tables, error } = await buildSchemaHelper(datasource)
|
||||||
|
schemaError = error
|
||||||
datasource.entities = tables
|
datasource.entities = tables
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await db.put(datasource)
|
const dbResp = await db.put(datasource)
|
||||||
datasource._rev = response.rev
|
datasource._rev = dbResp.rev
|
||||||
|
|
||||||
// Drain connection pools when configuration is changed
|
// Drain connection pools when configuration is changed
|
||||||
if (datasource.source) {
|
if (datasource.source) {
|
||||||
|
@ -101,9 +108,11 @@ exports.save = async function (ctx) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.status = 200
|
const response = { datasource }
|
||||||
ctx.message = "Datasource saved successfully."
|
if (schemaError) {
|
||||||
ctx.body = datasource
|
response.error = schemaError
|
||||||
|
}
|
||||||
|
ctx.body = response
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.destroy = async function (ctx) {
|
exports.destroy = async function (ctx) {
|
||||||
|
@ -129,7 +138,7 @@ exports.find = async function (ctx) {
|
||||||
exports.query = async function (ctx) {
|
exports.query = async function (ctx) {
|
||||||
const queryJson = ctx.request.body
|
const queryJson = ctx.request.body
|
||||||
try {
|
try {
|
||||||
ctx.body = await makeExternalQuery(ctx.appId, queryJson)
|
ctx.body = await getDatasourceAndQuery(ctx.appId, queryJson)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
ctx.throw(400, err)
|
ctx.throw(400, err)
|
||||||
}
|
}
|
||||||
|
@ -143,5 +152,28 @@ const buildSchemaHelper = async datasource => {
|
||||||
await connector.buildSchema(datasource._id, datasource.entities)
|
await connector.buildSchema(datasource._id, datasource.entities)
|
||||||
datasource.entities = connector.tables
|
datasource.entities = connector.tables
|
||||||
|
|
||||||
return connector.tables
|
// make sure they all have a display name selected
|
||||||
|
for (let entity of Object.values(datasource.entities)) {
|
||||||
|
if (entity.primaryDisplay) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const notAutoColumn = Object.values(entity.schema).find(
|
||||||
|
schema => !schema.autocolumn
|
||||||
|
)
|
||||||
|
if (notAutoColumn) {
|
||||||
|
entity.primaryDisplay = notAutoColumn.name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const errors = connector.schemaErrors
|
||||||
|
let error = null
|
||||||
|
if (errors && Object.keys(errors).length > 0) {
|
||||||
|
const noKeyTables = Object.entries(errors)
|
||||||
|
.filter(entry => entry[1] === BuildSchemaErrors.NO_KEY)
|
||||||
|
.map(([name]) => name)
|
||||||
|
error = `No primary key constraint found for the following: ${noKeyTables.join(
|
||||||
|
", "
|
||||||
|
)}`
|
||||||
|
}
|
||||||
|
return { tables: connector.tables, error }
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,7 +101,9 @@ async function enrichQueryFields(fields, parameters = {}) {
|
||||||
enrichedQuery[key] = await enrichQueryFields(fields[key], parameters)
|
enrichedQuery[key] = await enrichQueryFields(fields[key], parameters)
|
||||||
} else if (typeof fields[key] === "string") {
|
} else if (typeof fields[key] === "string") {
|
||||||
// enrich string value as normal
|
// enrich string value as normal
|
||||||
enrichedQuery[key] = await processString(fields[key], parameters)
|
enrichedQuery[key] = await processString(fields[key], parameters, {
|
||||||
|
noHelpers: true,
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
enrichedQuery[key] = fields[key]
|
enrichedQuery[key] = fields[key]
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,7 +36,7 @@ interface RunConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
module External {
|
module External {
|
||||||
const { makeExternalQuery } = require("./utils")
|
const { getDatasourceAndQuery } = require("./utils")
|
||||||
const {
|
const {
|
||||||
DataSourceOperation,
|
DataSourceOperation,
|
||||||
FieldTypes,
|
FieldTypes,
|
||||||
|
@ -46,6 +46,7 @@ module External {
|
||||||
const { processObjectSync } = require("@budibase/string-templates")
|
const { processObjectSync } = require("@budibase/string-templates")
|
||||||
const { cloneDeep } = require("lodash/fp")
|
const { cloneDeep } = require("lodash/fp")
|
||||||
const CouchDB = require("../../../db")
|
const CouchDB = require("../../../db")
|
||||||
|
const { processFormulas } = require("../../../utilities/rowProcessor/utils")
|
||||||
|
|
||||||
function buildFilters(
|
function buildFilters(
|
||||||
id: string | undefined,
|
id: string | undefined,
|
||||||
|
@ -225,7 +226,7 @@ module External {
|
||||||
manyRelationships: ManyRelationship[] = []
|
manyRelationships: ManyRelationship[] = []
|
||||||
for (let [key, field] of Object.entries(table.schema)) {
|
for (let [key, field] of Object.entries(table.schema)) {
|
||||||
// if set already, or not set just skip it
|
// if set already, or not set just skip it
|
||||||
if ((!row[key] && row[key] !== "") || newRow[key] || field.autocolumn) {
|
if (row[key] == null || newRow[key] || field.autocolumn || field.type === FieldTypes.FORMULA) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
// if its an empty string then it means return the column to null (if possible)
|
// if its an empty string then it means return the column to null (if possible)
|
||||||
|
@ -361,7 +362,7 @@ module External {
|
||||||
relationships
|
relationships
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
return Object.values(finalRows)
|
return processFormulas(table, Object.values(finalRows))
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -428,7 +429,7 @@ module External {
|
||||||
const tableId = isMany ? field.through : field.tableId
|
const tableId = isMany ? field.through : field.tableId
|
||||||
const manyKey = field.throughFrom || primaryKey
|
const manyKey = field.throughFrom || primaryKey
|
||||||
const fieldName = isMany ? manyKey : field.fieldName
|
const fieldName = isMany ? manyKey : field.fieldName
|
||||||
const response = await makeExternalQuery(this.appId, {
|
const response = await getDatasourceAndQuery(this.appId, {
|
||||||
endpoint: getEndpoint(tableId, DataSourceOperation.READ),
|
endpoint: getEndpoint(tableId, DataSourceOperation.READ),
|
||||||
filters: {
|
filters: {
|
||||||
equal: {
|
equal: {
|
||||||
|
@ -479,7 +480,7 @@ module External {
|
||||||
: DataSourceOperation.CREATE
|
: DataSourceOperation.CREATE
|
||||||
if (!found) {
|
if (!found) {
|
||||||
promises.push(
|
promises.push(
|
||||||
makeExternalQuery(appId, {
|
getDatasourceAndQuery(appId, {
|
||||||
endpoint: getEndpoint(tableId, operation),
|
endpoint: getEndpoint(tableId, operation),
|
||||||
// if we're doing many relationships then we're writing, only one response
|
// if we're doing many relationships then we're writing, only one response
|
||||||
body,
|
body,
|
||||||
|
@ -509,7 +510,7 @@ module External {
|
||||||
: DataSourceOperation.UPDATE
|
: DataSourceOperation.UPDATE
|
||||||
const body = isMany ? null : { [colName]: null }
|
const body = isMany ? null : { [colName]: null }
|
||||||
promises.push(
|
promises.push(
|
||||||
makeExternalQuery(this.appId, {
|
getDatasourceAndQuery(this.appId, {
|
||||||
endpoint: getEndpoint(tableId, op),
|
endpoint: getEndpoint(tableId, op),
|
||||||
body,
|
body,
|
||||||
filters,
|
filters,
|
||||||
|
@ -532,16 +533,17 @@ module External {
|
||||||
table: Table,
|
table: Table,
|
||||||
includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE
|
includeRelations: IncludeRelationships = IncludeRelationships.INCLUDE
|
||||||
) {
|
) {
|
||||||
function extractNonLinkFieldNames(table: Table, existing: string[] = []) {
|
function extractRealFields(table: Table, existing: string[] = []) {
|
||||||
return Object.entries(table.schema)
|
return Object.entries(table.schema)
|
||||||
.filter(
|
.filter(
|
||||||
column =>
|
column =>
|
||||||
column[1].type !== FieldTypes.LINK &&
|
column[1].type !== FieldTypes.LINK &&
|
||||||
|
column[1].type !== FieldTypes.FORMULA &&
|
||||||
!existing.find((field: string) => field === column[0])
|
!existing.find((field: string) => field === column[0])
|
||||||
)
|
)
|
||||||
.map(column => `${table.name}.${column[0]}`)
|
.map(column => `${table.name}.${column[0]}`)
|
||||||
}
|
}
|
||||||
let fields = extractNonLinkFieldNames(table)
|
let fields = extractRealFields(table)
|
||||||
for (let field of Object.values(table.schema)) {
|
for (let field of Object.values(table.schema)) {
|
||||||
if (field.type !== FieldTypes.LINK || !includeRelations) {
|
if (field.type !== FieldTypes.LINK || !includeRelations) {
|
||||||
continue
|
continue
|
||||||
|
@ -549,7 +551,7 @@ module External {
|
||||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||||
const linkTable = this.tables[linkTableName]
|
const linkTable = this.tables[linkTableName]
|
||||||
if (linkTable) {
|
if (linkTable) {
|
||||||
const linkedFields = extractNonLinkFieldNames(linkTable, fields)
|
const linkedFields = extractRealFields(linkTable, fields)
|
||||||
fields = fields.concat(linkedFields)
|
fields = fields.concat(linkedFields)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -609,7 +611,7 @@ module External {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
// can't really use response right now
|
// can't really use response right now
|
||||||
const response = await makeExternalQuery(appId, json)
|
const response = await getDatasourceAndQuery(appId, json)
|
||||||
// handle many to many relationships now if we know the ID (could be auto increment)
|
// handle many to many relationships now if we know the ID (could be auto increment)
|
||||||
if (
|
if (
|
||||||
operation !== DataSourceOperation.READ &&
|
operation !== DataSourceOperation.READ &&
|
||||||
|
|
|
@ -4,8 +4,8 @@ const CouchDB = require("../../../db")
|
||||||
const { InternalTables } = require("../../../db/utils")
|
const { InternalTables } = require("../../../db/utils")
|
||||||
const userController = require("../user")
|
const userController = require("../user")
|
||||||
const { FieldTypes } = require("../../../constants")
|
const { FieldTypes } = require("../../../constants")
|
||||||
const { integrations } = require("../../../integrations")
|
|
||||||
const { processStringSync } = require("@budibase/string-templates")
|
const { processStringSync } = require("@budibase/string-templates")
|
||||||
|
const { makeExternalQuery } = require("../../../integrations/base/utils")
|
||||||
|
|
||||||
validateJs.extend(validateJs.validators.datetime, {
|
validateJs.extend(validateJs.validators.datetime, {
|
||||||
parse: function (value) {
|
parse: function (value) {
|
||||||
|
@ -17,18 +17,11 @@ validateJs.extend(validateJs.validators.datetime, {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
exports.makeExternalQuery = async (appId, json) => {
|
exports.getDatasourceAndQuery = async (appId, json) => {
|
||||||
const datasourceId = json.endpoint.datasourceId
|
const datasourceId = json.endpoint.datasourceId
|
||||||
const db = new CouchDB(appId)
|
const db = new CouchDB(appId)
|
||||||
const datasource = await db.get(datasourceId)
|
const datasource = await db.get(datasourceId)
|
||||||
const Integration = integrations[datasource.source]
|
return makeExternalQuery(datasource, json)
|
||||||
// query is the opinionated function
|
|
||||||
if (Integration.prototype.query) {
|
|
||||||
const integration = new Integration(datasource.config)
|
|
||||||
return integration.query(json)
|
|
||||||
} else {
|
|
||||||
throw "Datasource does not support query."
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.findRow = async (ctx, db, tableId, rowId) => {
|
exports.findRow = async (ctx, db, tableId, rowId) => {
|
||||||
|
|
|
@ -0,0 +1,276 @@
|
||||||
|
const CouchDB = require("../../../db")
|
||||||
|
const {
|
||||||
|
buildExternalTableId,
|
||||||
|
breakExternalTableId,
|
||||||
|
} = require("../../../integrations/utils")
|
||||||
|
const {
|
||||||
|
getTable,
|
||||||
|
generateForeignKey,
|
||||||
|
generateJunctionTableName,
|
||||||
|
foreignKeyStructure,
|
||||||
|
} = require("./utils")
|
||||||
|
const {
|
||||||
|
DataSourceOperation,
|
||||||
|
FieldTypes,
|
||||||
|
RelationshipTypes,
|
||||||
|
} = require("../../../constants")
|
||||||
|
const { makeExternalQuery } = require("../../../integrations/base/utils")
|
||||||
|
const { cloneDeep } = require("lodash/fp")
|
||||||
|
|
||||||
|
async function makeTableRequest(
|
||||||
|
datasource,
|
||||||
|
operation,
|
||||||
|
table,
|
||||||
|
tables,
|
||||||
|
oldTable = null
|
||||||
|
) {
|
||||||
|
const json = {
|
||||||
|
endpoint: {
|
||||||
|
datasourceId: datasource._id,
|
||||||
|
entityId: table._id,
|
||||||
|
operation,
|
||||||
|
},
|
||||||
|
meta: {
|
||||||
|
tables,
|
||||||
|
},
|
||||||
|
table,
|
||||||
|
}
|
||||||
|
if (oldTable) {
|
||||||
|
json.meta.table = oldTable
|
||||||
|
}
|
||||||
|
return makeExternalQuery(datasource, json)
|
||||||
|
}
|
||||||
|
|
||||||
|
function cleanupRelationships(table, tables, oldTable = null) {
|
||||||
|
const tableToIterate = oldTable ? oldTable : table
|
||||||
|
// clean up relationships in couch table schemas
|
||||||
|
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
|
||||||
|
if (
|
||||||
|
schema.type === FieldTypes.LINK &&
|
||||||
|
(!oldTable || table.schema[key] == null)
|
||||||
|
) {
|
||||||
|
const relatedTable = Object.values(tables).find(
|
||||||
|
table => table._id === schema.tableId
|
||||||
|
)
|
||||||
|
const foreignKey = schema.foreignKey
|
||||||
|
if (!relatedTable || !foreignKey) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for (let [relatedKey, relatedSchema] of Object.entries(
|
||||||
|
relatedTable.schema
|
||||||
|
)) {
|
||||||
|
if (
|
||||||
|
relatedSchema.type === FieldTypes.LINK &&
|
||||||
|
relatedSchema.fieldName === foreignKey
|
||||||
|
) {
|
||||||
|
delete relatedTable.schema[relatedKey]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDatasourceId(table) {
|
||||||
|
if (!table) {
|
||||||
|
throw "No table supplied"
|
||||||
|
}
|
||||||
|
if (table.sourceId) {
|
||||||
|
return table.sourceId
|
||||||
|
}
|
||||||
|
return breakExternalTableId(table._id).datasourceId
|
||||||
|
}
|
||||||
|
|
||||||
|
function otherRelationshipType(type) {
|
||||||
|
if (type === RelationshipTypes.MANY_TO_MANY) {
|
||||||
|
return RelationshipTypes.MANY_TO_MANY
|
||||||
|
}
|
||||||
|
return type === RelationshipTypes.ONE_TO_MANY
|
||||||
|
? RelationshipTypes.MANY_TO_ONE
|
||||||
|
: RelationshipTypes.ONE_TO_MANY
|
||||||
|
}
|
||||||
|
|
||||||
|
function generateManyLinkSchema(datasource, column, table, relatedTable) {
|
||||||
|
const primary = table.name + table.primary[0]
|
||||||
|
const relatedPrimary = relatedTable.name + relatedTable.primary[0]
|
||||||
|
const jcTblName = generateJunctionTableName(column, table, relatedTable)
|
||||||
|
// first create the new table
|
||||||
|
const junctionTable = {
|
||||||
|
_id: buildExternalTableId(datasource._id, jcTblName),
|
||||||
|
name: jcTblName,
|
||||||
|
primary: [primary, relatedPrimary],
|
||||||
|
constrained: [primary, relatedPrimary],
|
||||||
|
schema: {
|
||||||
|
[primary]: foreignKeyStructure(primary, {
|
||||||
|
toTable: table.name,
|
||||||
|
toKey: table.primary[0],
|
||||||
|
}),
|
||||||
|
[relatedPrimary]: foreignKeyStructure(relatedPrimary, {
|
||||||
|
toTable: relatedTable.name,
|
||||||
|
toKey: relatedTable.primary[0],
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
column.through = junctionTable._id
|
||||||
|
column.throughFrom = primary
|
||||||
|
column.throughTo = relatedPrimary
|
||||||
|
column.fieldName = relatedPrimary
|
||||||
|
return junctionTable
|
||||||
|
}
|
||||||
|
|
||||||
|
function generateLinkSchema(column, table, relatedTable, type) {
|
||||||
|
const isOneSide = type === RelationshipTypes.ONE_TO_MANY
|
||||||
|
const primary = isOneSide ? relatedTable.primary[0] : table.primary[0]
|
||||||
|
// generate a foreign key
|
||||||
|
const foreignKey = generateForeignKey(column, relatedTable)
|
||||||
|
column.relationshipType = type
|
||||||
|
column.foreignKey = isOneSide ? foreignKey : primary
|
||||||
|
column.fieldName = isOneSide ? primary : foreignKey
|
||||||
|
return foreignKey
|
||||||
|
}
|
||||||
|
|
||||||
|
function generateRelatedSchema(linkColumn, table, relatedTable, columnName) {
|
||||||
|
// generate column for other table
|
||||||
|
const relatedSchema = cloneDeep(linkColumn)
|
||||||
|
// swap them from the main link
|
||||||
|
if (linkColumn.foreignKey) {
|
||||||
|
relatedSchema.fieldName = linkColumn.foreignKey
|
||||||
|
relatedSchema.foreignKey = linkColumn.fieldName
|
||||||
|
}
|
||||||
|
// is many to many
|
||||||
|
else {
|
||||||
|
// don't need to copy through, already got it
|
||||||
|
relatedSchema.fieldName = linkColumn.throughFrom
|
||||||
|
relatedSchema.throughTo = linkColumn.throughFrom
|
||||||
|
relatedSchema.throughFrom = linkColumn.throughTo
|
||||||
|
}
|
||||||
|
relatedSchema.relationshipType = otherRelationshipType(
|
||||||
|
linkColumn.relationshipType
|
||||||
|
)
|
||||||
|
relatedSchema.tableId = relatedTable._id
|
||||||
|
relatedSchema.name = columnName
|
||||||
|
table.schema[columnName] = relatedSchema
|
||||||
|
}
|
||||||
|
|
||||||
|
function isRelationshipSetup(column) {
|
||||||
|
return column.foreignKey || column.through
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.save = async function (ctx) {
|
||||||
|
const appId = ctx.appId
|
||||||
|
const table = ctx.request.body
|
||||||
|
// can't do this
|
||||||
|
delete table.dataImport
|
||||||
|
const datasourceId = getDatasourceId(ctx.request.body)
|
||||||
|
let tableToSave = {
|
||||||
|
type: "table",
|
||||||
|
_id: buildExternalTableId(datasourceId, table.name),
|
||||||
|
...table,
|
||||||
|
}
|
||||||
|
|
||||||
|
let oldTable
|
||||||
|
if (ctx.request.body && ctx.request.body._id) {
|
||||||
|
oldTable = await getTable(appId, ctx.request.body._id)
|
||||||
|
}
|
||||||
|
|
||||||
|
const db = new CouchDB(appId)
|
||||||
|
const datasource = await db.get(datasourceId)
|
||||||
|
const oldTables = cloneDeep(datasource.entities)
|
||||||
|
const tables = datasource.entities
|
||||||
|
|
||||||
|
const extraTablesToUpdate = []
|
||||||
|
|
||||||
|
// check if relations need setup
|
||||||
|
for (let schema of Object.values(tableToSave.schema)) {
|
||||||
|
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const relatedTable = Object.values(tables).find(
|
||||||
|
table => table._id === schema.tableId
|
||||||
|
)
|
||||||
|
const relatedColumnName = schema.fieldName
|
||||||
|
const relationType = schema.relationshipType
|
||||||
|
if (relationType === RelationshipTypes.MANY_TO_MANY) {
|
||||||
|
const junctionTable = generateManyLinkSchema(
|
||||||
|
datasource,
|
||||||
|
schema,
|
||||||
|
table,
|
||||||
|
relatedTable
|
||||||
|
)
|
||||||
|
if (tables[junctionTable.name]) {
|
||||||
|
throw "Junction table already exists, cannot create another relationship."
|
||||||
|
}
|
||||||
|
tables[junctionTable.name] = junctionTable
|
||||||
|
extraTablesToUpdate.push(junctionTable)
|
||||||
|
} else {
|
||||||
|
const fkTable =
|
||||||
|
relationType === RelationshipTypes.ONE_TO_MANY ? table : relatedTable
|
||||||
|
const foreignKey = generateLinkSchema(
|
||||||
|
schema,
|
||||||
|
table,
|
||||||
|
relatedTable,
|
||||||
|
relationType
|
||||||
|
)
|
||||||
|
fkTable.schema[foreignKey] = foreignKeyStructure(foreignKey)
|
||||||
|
if (fkTable.constrained == null) {
|
||||||
|
fkTable.constrained = []
|
||||||
|
}
|
||||||
|
if (fkTable.constrained.indexOf(foreignKey) === -1) {
|
||||||
|
fkTable.constrained.push(foreignKey)
|
||||||
|
}
|
||||||
|
// foreign key is in other table, need to save it to external
|
||||||
|
if (fkTable._id !== table._id) {
|
||||||
|
extraTablesToUpdate.push(fkTable)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
generateRelatedSchema(schema, relatedTable, table, relatedColumnName)
|
||||||
|
schema.main = true
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanupRelationships(tableToSave, tables, oldTable)
|
||||||
|
|
||||||
|
const operation = oldTable
|
||||||
|
? DataSourceOperation.UPDATE_TABLE
|
||||||
|
: DataSourceOperation.CREATE_TABLE
|
||||||
|
await makeTableRequest(datasource, operation, tableToSave, tables, oldTable)
|
||||||
|
// update any extra tables (like foreign keys in other tables)
|
||||||
|
for (let extraTable of extraTablesToUpdate) {
|
||||||
|
const oldExtraTable = oldTables[extraTable.name]
|
||||||
|
let op = oldExtraTable
|
||||||
|
? DataSourceOperation.UPDATE_TABLE
|
||||||
|
: DataSourceOperation.CREATE_TABLE
|
||||||
|
await makeTableRequest(datasource, op, extraTable, tables, oldExtraTable)
|
||||||
|
}
|
||||||
|
|
||||||
|
// make sure the constrained list, all still exist
|
||||||
|
if (Array.isArray(tableToSave.constrained)) {
|
||||||
|
tableToSave.constrained = tableToSave.constrained.filter(constraint =>
|
||||||
|
Object.keys(tableToSave.schema).includes(constraint)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// store it into couch now for budibase reference
|
||||||
|
datasource.entities[tableToSave.name] = tableToSave
|
||||||
|
await db.put(datasource)
|
||||||
|
|
||||||
|
return tableToSave
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.destroy = async function (ctx) {
|
||||||
|
const appId = ctx.appId
|
||||||
|
const tableToDelete = await getTable(appId, ctx.params.tableId)
|
||||||
|
const datasourceId = getDatasourceId(tableToDelete)
|
||||||
|
|
||||||
|
const db = new CouchDB(appId)
|
||||||
|
const datasource = await db.get(datasourceId)
|
||||||
|
const tables = datasource.entities
|
||||||
|
|
||||||
|
const operation = DataSourceOperation.DELETE_TABLE
|
||||||
|
await makeTableRequest(datasource, operation, tableToDelete, tables)
|
||||||
|
|
||||||
|
cleanupRelationships(tableToDelete, tables)
|
||||||
|
|
||||||
|
delete datasource.entities[tableToDelete.name]
|
||||||
|
await db.put(datasource)
|
||||||
|
|
||||||
|
return tableToDelete
|
||||||
|
}
|
|
@ -1,16 +1,28 @@
|
||||||
const CouchDB = require("../../../db")
|
const CouchDB = require("../../../db")
|
||||||
const linkRows = require("../../../db/linkedRows")
|
const internal = require("./internal")
|
||||||
|
const external = require("./external")
|
||||||
const csvParser = require("../../../utilities/csvParser")
|
const csvParser = require("../../../utilities/csvParser")
|
||||||
|
const { isExternalTable } = require("../../../integrations/utils")
|
||||||
const {
|
const {
|
||||||
getRowParams,
|
|
||||||
getTableParams,
|
getTableParams,
|
||||||
generateTableID,
|
|
||||||
getDatasourceParams,
|
getDatasourceParams,
|
||||||
BudibaseInternalDB,
|
BudibaseInternalDB,
|
||||||
} = require("../../../db/utils")
|
} = require("../../../db/utils")
|
||||||
const { FieldTypes } = require("../../../constants")
|
const { getTable } = require("./utils")
|
||||||
const { TableSaveFunctions, getTable } = require("./utils")
|
|
||||||
|
|
||||||
|
function pickApi({ tableId, table }) {
|
||||||
|
if (table && !tableId) {
|
||||||
|
tableId = table._id
|
||||||
|
}
|
||||||
|
if (table && table.type === "external") {
|
||||||
|
return external
|
||||||
|
} else if (tableId && isExternalTable(tableId)) {
|
||||||
|
return external
|
||||||
|
}
|
||||||
|
return internal
|
||||||
|
}
|
||||||
|
|
||||||
|
// covers both internal and external
|
||||||
exports.fetch = async function (ctx) {
|
exports.fetch = async function (ctx) {
|
||||||
const db = new CouchDB(ctx.appId)
|
const db = new CouchDB(ctx.appId)
|
||||||
|
|
||||||
|
@ -50,143 +62,23 @@ exports.find = async function (ctx) {
|
||||||
|
|
||||||
exports.save = async function (ctx) {
|
exports.save = async function (ctx) {
|
||||||
const appId = ctx.appId
|
const appId = ctx.appId
|
||||||
const db = new CouchDB(appId)
|
const table = ctx.request.body
|
||||||
const { dataImport, ...rest } = ctx.request.body
|
const savedTable = await pickApi({ table }).save(ctx)
|
||||||
let tableToSave = {
|
|
||||||
type: "table",
|
|
||||||
_id: generateTableID(),
|
|
||||||
views: {},
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
|
|
||||||
// if the table obj had an _id then it will have been retrieved
|
|
||||||
let oldTable
|
|
||||||
if (ctx.request.body && ctx.request.body._id) {
|
|
||||||
oldTable = await db.get(ctx.request.body._id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// saving a table is a complex operation, involving many different steps, this
|
|
||||||
// has been broken out into a utility to make it more obvious/easier to manipulate
|
|
||||||
const tableSaveFunctions = new TableSaveFunctions({
|
|
||||||
db,
|
|
||||||
ctx,
|
|
||||||
oldTable,
|
|
||||||
dataImport,
|
|
||||||
})
|
|
||||||
tableToSave = await tableSaveFunctions.before(tableToSave)
|
|
||||||
|
|
||||||
// make sure that types don't change of a column, have to remove
|
|
||||||
// the column if you want to change the type
|
|
||||||
if (oldTable && oldTable.schema) {
|
|
||||||
for (let propKey of Object.keys(tableToSave.schema)) {
|
|
||||||
let column = tableToSave.schema[propKey]
|
|
||||||
let oldColumn = oldTable.schema[propKey]
|
|
||||||
if (oldColumn && oldColumn.type === "internal") {
|
|
||||||
oldColumn.type = "auto"
|
|
||||||
}
|
|
||||||
if (oldColumn && oldColumn.type !== column.type) {
|
|
||||||
ctx.throw(400, "Cannot change the type of a column")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't rename if the name is the same
|
|
||||||
let { _rename } = tableToSave
|
|
||||||
/* istanbul ignore next */
|
|
||||||
if (_rename && _rename.old === _rename.updated) {
|
|
||||||
_rename = null
|
|
||||||
delete tableToSave._rename
|
|
||||||
}
|
|
||||||
|
|
||||||
// rename row fields when table column is renamed
|
|
||||||
/* istanbul ignore next */
|
|
||||||
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
|
|
||||||
ctx.throw(400, "Cannot rename a linked column.")
|
|
||||||
}
|
|
||||||
|
|
||||||
tableToSave = await tableSaveFunctions.mid(tableToSave)
|
|
||||||
|
|
||||||
// update schema of non-statistics views when new columns are added
|
|
||||||
for (let view in tableToSave.views) {
|
|
||||||
const tableView = tableToSave.views[view]
|
|
||||||
if (!tableView) continue
|
|
||||||
|
|
||||||
if (tableView.schema.group || tableView.schema.field) continue
|
|
||||||
tableView.schema = tableToSave.schema
|
|
||||||
}
|
|
||||||
|
|
||||||
// update linked rows
|
|
||||||
try {
|
|
||||||
const linkResp = await linkRows.updateLinks({
|
|
||||||
appId,
|
|
||||||
eventType: oldTable
|
|
||||||
? linkRows.EventType.TABLE_UPDATED
|
|
||||||
: linkRows.EventType.TABLE_SAVE,
|
|
||||||
table: tableToSave,
|
|
||||||
oldTable: oldTable,
|
|
||||||
})
|
|
||||||
if (linkResp != null && linkResp._rev) {
|
|
||||||
tableToSave._rev = linkResp._rev
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
ctx.throw(400, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// don't perform any updates until relationships have been
|
|
||||||
// checked by the updateLinks function
|
|
||||||
const updatedRows = tableSaveFunctions.getUpdatedRows()
|
|
||||||
if (updatedRows && updatedRows.length !== 0) {
|
|
||||||
await db.bulkDocs(updatedRows)
|
|
||||||
}
|
|
||||||
const result = await db.put(tableToSave)
|
|
||||||
tableToSave._rev = result.rev
|
|
||||||
|
|
||||||
tableToSave = await tableSaveFunctions.after(tableToSave)
|
|
||||||
|
|
||||||
ctx.eventEmitter &&
|
|
||||||
ctx.eventEmitter.emitTable(`table:save`, appId, tableToSave)
|
|
||||||
|
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
ctx.message = `Table ${ctx.request.body.name} saved successfully.`
|
ctx.message = `Table ${table.name} saved successfully.`
|
||||||
ctx.body = tableToSave
|
ctx.eventEmitter &&
|
||||||
|
ctx.eventEmitter.emitTable(`table:save`, appId, savedTable)
|
||||||
|
ctx.body = savedTable
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.destroy = async function (ctx) {
|
exports.destroy = async function (ctx) {
|
||||||
const appId = ctx.appId
|
const appId = ctx.appId
|
||||||
const db = new CouchDB(appId)
|
const tableId = ctx.params.tableId
|
||||||
const tableToDelete = await db.get(ctx.params.tableId)
|
const deletedTable = await pickApi({ tableId }).destroy(ctx)
|
||||||
|
|
||||||
// Delete all rows for that table
|
|
||||||
const rows = await db.allDocs(
|
|
||||||
getRowParams(ctx.params.tableId, null, {
|
|
||||||
include_docs: true,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
|
|
||||||
|
|
||||||
// update linked rows
|
|
||||||
await linkRows.updateLinks({
|
|
||||||
appId,
|
|
||||||
eventType: linkRows.EventType.TABLE_DELETE,
|
|
||||||
table: tableToDelete,
|
|
||||||
})
|
|
||||||
|
|
||||||
// don't remove the table itself until very end
|
|
||||||
await db.remove(tableToDelete)
|
|
||||||
|
|
||||||
// remove table search index
|
|
||||||
const currentIndexes = await db.getIndexes()
|
|
||||||
const existingIndex = currentIndexes.indexes.find(
|
|
||||||
existing => existing.name === `search:${ctx.params.tableId}`
|
|
||||||
)
|
|
||||||
if (existingIndex) {
|
|
||||||
await db.deleteIndex(existingIndex)
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx.eventEmitter &&
|
ctx.eventEmitter &&
|
||||||
ctx.eventEmitter.emitTable(`table:delete`, appId, tableToDelete)
|
ctx.eventEmitter.emitTable(`table:delete`, appId, deletedTable)
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
ctx.body = { message: `Table ${ctx.params.tableId} deleted.` }
|
ctx.body = { message: `Table ${tableId} deleted.` }
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.validateCSVSchema = async function (ctx) {
|
exports.validateCSVSchema = async function (ctx) {
|
||||||
|
|
|
@ -0,0 +1,138 @@
|
||||||
|
const CouchDB = require("../../../db")
|
||||||
|
const linkRows = require("../../../db/linkedRows")
|
||||||
|
const { getRowParams, generateTableID } = require("../../../db/utils")
|
||||||
|
const { FieldTypes } = require("../../../constants")
|
||||||
|
const { TableSaveFunctions } = require("./utils")
|
||||||
|
|
||||||
|
exports.save = async function (ctx) {
|
||||||
|
const appId = ctx.appId
|
||||||
|
const db = new CouchDB(appId)
|
||||||
|
const { dataImport, ...rest } = ctx.request.body
|
||||||
|
let tableToSave = {
|
||||||
|
type: "table",
|
||||||
|
_id: generateTableID(),
|
||||||
|
views: {},
|
||||||
|
...rest,
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the table obj had an _id then it will have been retrieved
|
||||||
|
let oldTable
|
||||||
|
if (ctx.request.body && ctx.request.body._id) {
|
||||||
|
oldTable = await db.get(ctx.request.body._id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// saving a table is a complex operation, involving many different steps, this
|
||||||
|
// has been broken out into a utility to make it more obvious/easier to manipulate
|
||||||
|
const tableSaveFunctions = new TableSaveFunctions({
|
||||||
|
db,
|
||||||
|
ctx,
|
||||||
|
oldTable,
|
||||||
|
dataImport,
|
||||||
|
})
|
||||||
|
tableToSave = await tableSaveFunctions.before(tableToSave)
|
||||||
|
|
||||||
|
// make sure that types don't change of a column, have to remove
|
||||||
|
// the column if you want to change the type
|
||||||
|
if (oldTable && oldTable.schema) {
|
||||||
|
for (let propKey of Object.keys(tableToSave.schema)) {
|
||||||
|
let column = tableToSave.schema[propKey]
|
||||||
|
let oldColumn = oldTable.schema[propKey]
|
||||||
|
if (oldColumn && oldColumn.type === "internal") {
|
||||||
|
oldColumn.type = "auto"
|
||||||
|
}
|
||||||
|
if (oldColumn && oldColumn.type !== column.type) {
|
||||||
|
ctx.throw(400, "Cannot change the type of a column")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't rename if the name is the same
|
||||||
|
let { _rename } = tableToSave
|
||||||
|
/* istanbul ignore next */
|
||||||
|
if (_rename && _rename.old === _rename.updated) {
|
||||||
|
_rename = null
|
||||||
|
delete tableToSave._rename
|
||||||
|
}
|
||||||
|
|
||||||
|
// rename row fields when table column is renamed
|
||||||
|
/* istanbul ignore next */
|
||||||
|
if (_rename && tableToSave.schema[_rename.updated].type === FieldTypes.LINK) {
|
||||||
|
ctx.throw(400, "Cannot rename a linked column.")
|
||||||
|
}
|
||||||
|
|
||||||
|
tableToSave = await tableSaveFunctions.mid(tableToSave)
|
||||||
|
|
||||||
|
// update schema of non-statistics views when new columns are added
|
||||||
|
for (let view in tableToSave.views) {
|
||||||
|
const tableView = tableToSave.views[view]
|
||||||
|
if (!tableView) continue
|
||||||
|
|
||||||
|
if (tableView.schema.group || tableView.schema.field) continue
|
||||||
|
tableView.schema = tableToSave.schema
|
||||||
|
}
|
||||||
|
|
||||||
|
// update linked rows
|
||||||
|
try {
|
||||||
|
const linkResp = await linkRows.updateLinks({
|
||||||
|
appId,
|
||||||
|
eventType: oldTable
|
||||||
|
? linkRows.EventType.TABLE_UPDATED
|
||||||
|
: linkRows.EventType.TABLE_SAVE,
|
||||||
|
table: tableToSave,
|
||||||
|
oldTable: oldTable,
|
||||||
|
})
|
||||||
|
if (linkResp != null && linkResp._rev) {
|
||||||
|
tableToSave._rev = linkResp._rev
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
ctx.throw(400, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// don't perform any updates until relationships have been
|
||||||
|
// checked by the updateLinks function
|
||||||
|
const updatedRows = tableSaveFunctions.getUpdatedRows()
|
||||||
|
if (updatedRows && updatedRows.length !== 0) {
|
||||||
|
await db.bulkDocs(updatedRows)
|
||||||
|
}
|
||||||
|
const result = await db.put(tableToSave)
|
||||||
|
tableToSave._rev = result.rev
|
||||||
|
|
||||||
|
tableToSave = await tableSaveFunctions.after(tableToSave)
|
||||||
|
|
||||||
|
return tableToSave
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.destroy = async function (ctx) {
|
||||||
|
const appId = ctx.appId
|
||||||
|
const db = new CouchDB(appId)
|
||||||
|
const tableToDelete = await db.get(ctx.params.tableId)
|
||||||
|
|
||||||
|
// Delete all rows for that table
|
||||||
|
const rows = await db.allDocs(
|
||||||
|
getRowParams(ctx.params.tableId, null, {
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
await db.bulkDocs(rows.rows.map(row => ({ ...row.doc, _deleted: true })))
|
||||||
|
|
||||||
|
// update linked rows
|
||||||
|
await linkRows.updateLinks({
|
||||||
|
appId,
|
||||||
|
eventType: linkRows.EventType.TABLE_DELETE,
|
||||||
|
table: tableToDelete,
|
||||||
|
})
|
||||||
|
|
||||||
|
// don't remove the table itself until very end
|
||||||
|
await db.remove(tableToDelete)
|
||||||
|
|
||||||
|
// remove table search index
|
||||||
|
const currentIndexes = await db.getIndexes()
|
||||||
|
const existingIndex = currentIndexes.indexes.find(
|
||||||
|
existing => existing.name === `search:${ctx.params.tableId}`
|
||||||
|
)
|
||||||
|
if (existingIndex) {
|
||||||
|
await db.deleteIndex(existingIndex)
|
||||||
|
}
|
||||||
|
|
||||||
|
return tableToDelete
|
||||||
|
}
|
|
@ -315,4 +315,24 @@ exports.checkForViewUpdates = async (db, table, rename, deletedColumns) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.generateForeignKey = (column, relatedTable) => {
|
||||||
|
return `fk_${relatedTable.name}_${column.fieldName}`
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.generateJunctionTableName = (column, table, relatedTable) => {
|
||||||
|
return `jt_${table.name}_${relatedTable.name}_${column.name}_${column.fieldName}`
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.foreignKeyStructure = (keyName, meta = null) => {
|
||||||
|
const structure = {
|
||||||
|
type: FieldTypes.NUMBER,
|
||||||
|
constraints: {},
|
||||||
|
name: keyName,
|
||||||
|
}
|
||||||
|
if (meta) {
|
||||||
|
structure.meta = meta
|
||||||
|
}
|
||||||
|
return structure
|
||||||
|
}
|
||||||
|
|
||||||
exports.TableSaveFunctions = TableSaveFunctions
|
exports.TableSaveFunctions = TableSaveFunctions
|
||||||
|
|
|
@ -26,8 +26,8 @@ describe("/datasources", () => {
|
||||||
.expect('Content-Type', /json/)
|
.expect('Content-Type', /json/)
|
||||||
.expect(200)
|
.expect(200)
|
||||||
|
|
||||||
expect(res.res.statusMessage).toEqual("Datasource saved successfully.")
|
expect(res.body.datasource.name).toEqual("Test")
|
||||||
expect(res.body.name).toEqual("Test")
|
expect(res.body.errors).toBeUndefined()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
// mock out postgres for this
|
// mock out postgres for this
|
||||||
jest.mock("pg")
|
jest.mock("pg")
|
||||||
|
|
||||||
const { findLastKey } = require("lodash/fp")
|
|
||||||
const setup = require("./utilities")
|
const setup = require("./utilities")
|
||||||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
||||||
const { basicQuery, basicDatasource } = setup.structures
|
const { basicQuery, basicDatasource } = setup.structures
|
||||||
|
|
|
@ -62,6 +62,9 @@ exports.DataSourceOperation = {
|
||||||
READ: "READ",
|
READ: "READ",
|
||||||
UPDATE: "UPDATE",
|
UPDATE: "UPDATE",
|
||||||
DELETE: "DELETE",
|
DELETE: "DELETE",
|
||||||
|
CREATE_TABLE: "CREATE_TABLE",
|
||||||
|
UPDATE_TABLE: "UPDATE_TABLE",
|
||||||
|
DELETE_TABLE: "DELETE_TABLE",
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.SortDirection = {
|
exports.SortDirection = {
|
||||||
|
@ -152,5 +155,9 @@ exports.MetadataTypes = {
|
||||||
AUTOMATION_TEST_HISTORY: "automationTestHistory",
|
AUTOMATION_TEST_HISTORY: "automationTestHistory",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.BuildSchemaErrors = {
|
||||||
|
NO_KEY: "no_key",
|
||||||
|
}
|
||||||
|
|
||||||
// pass through the list from the auth/core lib
|
// pass through the list from the auth/core lib
|
||||||
exports.ObjectStoreBuckets = ObjectStoreBuckets
|
exports.ObjectStoreBuckets = ObjectStoreBuckets
|
||||||
|
|
|
@ -17,6 +17,11 @@ export interface FieldSchema {
|
||||||
autocolumn?: boolean
|
autocolumn?: boolean
|
||||||
throughFrom?: string
|
throughFrom?: string
|
||||||
throughTo?: string
|
throughTo?: string
|
||||||
|
main?: boolean
|
||||||
|
meta?: {
|
||||||
|
toTable: string
|
||||||
|
toKey: string
|
||||||
|
}
|
||||||
constraints?: {
|
constraints?: {
|
||||||
type?: string
|
type?: string
|
||||||
email?: boolean
|
email?: boolean
|
||||||
|
@ -36,11 +41,12 @@ export interface TableSchema {
|
||||||
export interface Table extends Base {
|
export interface Table extends Base {
|
||||||
type?: string
|
type?: string
|
||||||
views?: {}
|
views?: {}
|
||||||
name?: string
|
name: string
|
||||||
primary?: string[]
|
primary?: string[]
|
||||||
schema: TableSchema
|
schema: TableSchema
|
||||||
primaryDisplay?: string
|
primaryDisplay?: string
|
||||||
sourceId?: string
|
sourceId?: string
|
||||||
|
constrained?: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Row extends Base {
|
export interface Row extends Base {
|
||||||
|
|
|
@ -5,6 +5,9 @@ export enum Operation {
|
||||||
READ = "READ",
|
READ = "READ",
|
||||||
UPDATE = "UPDATE",
|
UPDATE = "UPDATE",
|
||||||
DELETE = "DELETE",
|
DELETE = "DELETE",
|
||||||
|
CREATE_TABLE = "CREATE_TABLE",
|
||||||
|
UPDATE_TABLE = "UPDATE_TABLE",
|
||||||
|
DELETE_TABLE = "DELETE_TABLE",
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum SortDirection {
|
export enum SortDirection {
|
||||||
|
@ -142,8 +145,10 @@ export interface QueryJson {
|
||||||
sort?: SortJson
|
sort?: SortJson
|
||||||
paginate?: PaginationJson
|
paginate?: PaginationJson
|
||||||
body?: object
|
body?: object
|
||||||
|
table?: Table
|
||||||
meta?: {
|
meta?: {
|
||||||
table?: Table
|
table?: Table
|
||||||
|
tables?: Record<string, Table>
|
||||||
}
|
}
|
||||||
extra?: {
|
extra?: {
|
||||||
idFilter?: SearchFilters
|
idFilter?: SearchFilters
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
import { Table } from "../../definitions/common"
|
||||||
|
|
||||||
|
export interface DatasourcePlus {
|
||||||
|
tables: Record<string, Table>
|
||||||
|
schemaErrors: Record<string, string>
|
||||||
|
|
||||||
|
buildSchema(datasourceId: string, entities: Record<string, Table>): any
|
||||||
|
}
|
|
@ -1,19 +1,24 @@
|
||||||
import { Knex, knex } from "knex"
|
import { Knex, knex } from "knex"
|
||||||
const BASE_LIMIT = 5000
|
|
||||||
import {
|
import {
|
||||||
QueryJson,
|
|
||||||
SearchFilters,
|
|
||||||
QueryOptions,
|
|
||||||
SortDirection,
|
|
||||||
Operation,
|
Operation,
|
||||||
|
QueryJson,
|
||||||
|
QueryOptions,
|
||||||
RelationshipsJson,
|
RelationshipsJson,
|
||||||
|
SearchFilters,
|
||||||
|
SortDirection,
|
||||||
} from "../../definitions/datasource"
|
} from "../../definitions/datasource"
|
||||||
import { isIsoDateString } from "../utils"
|
import { isIsoDateString } from "../utils"
|
||||||
|
import SqlTableQueryBuilder from "./sqlTable"
|
||||||
|
|
||||||
|
const BASE_LIMIT = 5000
|
||||||
|
|
||||||
type KnexQuery = Knex.QueryBuilder | Knex
|
type KnexQuery = Knex.QueryBuilder | Knex
|
||||||
|
|
||||||
function parseBody(body: any) {
|
function parseBody(body: any) {
|
||||||
for (let [key, value] of Object.entries(body)) {
|
for (let [key, value] of Object.entries(body)) {
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
body[key] = JSON.stringify(value)
|
||||||
|
}
|
||||||
if (typeof value !== "string") {
|
if (typeof value !== "string") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -243,23 +248,14 @@ function buildDelete(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class SqlQueryBuilder {
|
class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
private readonly sqlClient: string
|
|
||||||
private readonly limit: number
|
private readonly limit: number
|
||||||
// pass through client to get flavour of SQL
|
// pass through client to get flavour of SQL
|
||||||
constructor(client: string, limit: number = BASE_LIMIT) {
|
constructor(client: string, limit: number = BASE_LIMIT) {
|
||||||
this.sqlClient = client
|
super(client)
|
||||||
this.limit = limit
|
this.limit = limit
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* @param json the input JSON structure from which an SQL query will be built.
|
|
||||||
* @return {string} the operation that was found in the JSON.
|
|
||||||
*/
|
|
||||||
_operation(json: QueryJson): Operation {
|
|
||||||
return json.endpoint.operation
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param json The JSON query DSL which is to be converted to SQL.
|
* @param json The JSON query DSL which is to be converted to SQL.
|
||||||
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
|
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
|
||||||
|
@ -267,7 +263,8 @@ class SqlQueryBuilder {
|
||||||
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver.
|
* @return {{ sql: string, bindings: object }} the query ready to be passed to the driver.
|
||||||
*/
|
*/
|
||||||
_query(json: QueryJson, opts: QueryOptions = {}) {
|
_query(json: QueryJson, opts: QueryOptions = {}) {
|
||||||
const client = knex({ client: this.sqlClient })
|
const sqlClient = this.getSqlClient()
|
||||||
|
const client = knex({ client: sqlClient })
|
||||||
let query
|
let query
|
||||||
switch (this._operation(json)) {
|
switch (this._operation(json)) {
|
||||||
case Operation.CREATE:
|
case Operation.CREATE:
|
||||||
|
@ -282,6 +279,8 @@ class SqlQueryBuilder {
|
||||||
case Operation.DELETE:
|
case Operation.DELETE:
|
||||||
query = buildDelete(client, json, opts)
|
query = buildDelete(client, json, opts)
|
||||||
break
|
break
|
||||||
|
case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: case Operation.DELETE_TABLE:
|
||||||
|
return this._tableQuery(json)
|
||||||
default:
|
default:
|
||||||
throw `Operation type is not supported by SQL query builder`
|
throw `Operation type is not supported by SQL query builder`
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,167 @@
|
||||||
|
import { Knex, knex } from "knex"
|
||||||
|
import { Table } from "../../definitions/common"
|
||||||
|
import { Operation, QueryJson } from "../../definitions/datasource"
|
||||||
|
import { breakExternalTableId } from "../utils"
|
||||||
|
import SchemaBuilder = Knex.SchemaBuilder
|
||||||
|
import CreateTableBuilder = Knex.CreateTableBuilder
|
||||||
|
const { FieldTypes, RelationshipTypes } = require("../../constants")
|
||||||
|
|
||||||
|
function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record<string, Table>, oldTable: null | Table = null) {
|
||||||
|
let primaryKey = table && table.primary ? table.primary[0] : null
|
||||||
|
const columns = Object.values(table.schema)
|
||||||
|
// all columns in a junction table will be meta
|
||||||
|
let metaCols = columns.filter(col => col.meta)
|
||||||
|
let isJunction = metaCols.length === columns.length
|
||||||
|
// can't change primary once its set for now
|
||||||
|
if (primaryKey && !oldTable && !isJunction) {
|
||||||
|
schema.increments(primaryKey).primary()
|
||||||
|
} else if (!oldTable && isJunction) {
|
||||||
|
schema.primary(metaCols.map(col => col.name))
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// check if any columns need added
|
||||||
|
const foreignKeys = Object.values(table.schema).map(col => col.foreignKey)
|
||||||
|
for (let [key, column] of Object.entries(table.schema)) {
|
||||||
|
// skip things that are already correct
|
||||||
|
const oldColumn = oldTable ? oldTable.schema[key] : null
|
||||||
|
if ((oldColumn && oldColumn.type === column.type) || (primaryKey === key && !isJunction)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
switch (column.type) {
|
||||||
|
case FieldTypes.STRING: case FieldTypes.OPTIONS: case FieldTypes.LONGFORM:
|
||||||
|
schema.string(key)
|
||||||
|
break
|
||||||
|
case FieldTypes.NUMBER:
|
||||||
|
// if meta is specified then this is a junction table entry
|
||||||
|
if (column.meta && column.meta.toKey && column.meta.toTable) {
|
||||||
|
const { toKey, toTable } = column.meta
|
||||||
|
schema.integer(key).unsigned()
|
||||||
|
schema.foreign(key).references(`${toTable}.${toKey}`)
|
||||||
|
} else if (foreignKeys.indexOf(key) === -1) {
|
||||||
|
schema.float(key)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case FieldTypes.BOOLEAN:
|
||||||
|
schema.boolean(key)
|
||||||
|
break
|
||||||
|
case FieldTypes.DATETIME:
|
||||||
|
schema.datetime(key)
|
||||||
|
break
|
||||||
|
case FieldTypes.ARRAY:
|
||||||
|
schema.json(key)
|
||||||
|
break
|
||||||
|
case FieldTypes.LINK:
|
||||||
|
// this side of the relationship doesn't need any SQL work
|
||||||
|
if (
|
||||||
|
column.relationshipType !== RelationshipTypes.MANY_TO_ONE &&
|
||||||
|
column.relationshipType !== RelationshipTypes.MANY_TO_MANY
|
||||||
|
) {
|
||||||
|
if (!column.foreignKey || !column.tableId) {
|
||||||
|
throw "Invalid relationship schema"
|
||||||
|
}
|
||||||
|
const { tableName } = breakExternalTableId(column.tableId)
|
||||||
|
// @ts-ignore
|
||||||
|
const relatedTable = tables[tableName]
|
||||||
|
if (!relatedTable) {
|
||||||
|
throw "Referenced table doesn't exist"
|
||||||
|
}
|
||||||
|
schema.integer(column.foreignKey).unsigned()
|
||||||
|
schema.foreign(column.foreignKey).references(`${tableName}.${relatedTable.primary[0]}`)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// need to check if any columns have been deleted
|
||||||
|
if (oldTable) {
|
||||||
|
const deletedColumns = Object.entries(oldTable.schema)
|
||||||
|
.filter(([key, schema]) => schema.type !== FieldTypes.LINK && table.schema[key] == null)
|
||||||
|
.map(([key]) => key)
|
||||||
|
deletedColumns.forEach(key => {
|
||||||
|
if (oldTable.constrained && oldTable.constrained.indexOf(key) !== -1) {
|
||||||
|
schema.dropForeign(key)
|
||||||
|
}
|
||||||
|
schema.dropColumn(key)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return schema
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildCreateTable(
|
||||||
|
knex: Knex,
|
||||||
|
table: Table,
|
||||||
|
tables: Record<string, Table>,
|
||||||
|
): SchemaBuilder {
|
||||||
|
return knex.schema.createTable(table.name, schema => {
|
||||||
|
generateSchema(schema, table, tables)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildUpdateTable(
|
||||||
|
knex: Knex,
|
||||||
|
table: Table,
|
||||||
|
tables: Record<string, Table>,
|
||||||
|
oldTable: Table,
|
||||||
|
): SchemaBuilder {
|
||||||
|
return knex.schema.alterTable(table.name, schema => {
|
||||||
|
generateSchema(schema, table, tables, oldTable)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildDeleteTable(
|
||||||
|
knex: Knex,
|
||||||
|
table: Table,
|
||||||
|
): SchemaBuilder {
|
||||||
|
return knex.schema.dropTable(table.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
class SqlTableQueryBuilder {
|
||||||
|
private readonly sqlClient: string
|
||||||
|
|
||||||
|
// pass through client to get flavour of SQL
|
||||||
|
constructor(client: string) {
|
||||||
|
this.sqlClient = client
|
||||||
|
}
|
||||||
|
|
||||||
|
getSqlClient(): string {
|
||||||
|
return this.sqlClient
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param json the input JSON structure from which an SQL query will be built.
|
||||||
|
* @return {string} the operation that was found in the JSON.
|
||||||
|
*/
|
||||||
|
_operation(json: QueryJson): Operation {
|
||||||
|
return json.endpoint.operation
|
||||||
|
}
|
||||||
|
|
||||||
|
_tableQuery(json: QueryJson): any {
|
||||||
|
const client = knex({ client: this.sqlClient })
|
||||||
|
let query
|
||||||
|
if (!json.table || !json.meta || !json.meta.tables) {
|
||||||
|
throw "Cannot execute without table being specified"
|
||||||
|
}
|
||||||
|
switch (this._operation(json)) {
|
||||||
|
case Operation.CREATE_TABLE:
|
||||||
|
query = buildCreateTable(client, json.table, json.meta.tables)
|
||||||
|
break
|
||||||
|
case Operation.UPDATE_TABLE:
|
||||||
|
if (!json.meta || !json.meta.table) {
|
||||||
|
throw "Must specify old table for update"
|
||||||
|
}
|
||||||
|
query = buildUpdateTable(client, json.table, json.meta.tables, json.meta.table)
|
||||||
|
break
|
||||||
|
case Operation.DELETE_TABLE:
|
||||||
|
query = buildDeleteTable(client, json.table)
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
throw "Table operation is of unknown type"
|
||||||
|
}
|
||||||
|
return query.toSQL()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default SqlTableQueryBuilder
|
||||||
|
module.exports = SqlTableQueryBuilder
|
|
@ -0,0 +1,19 @@
|
||||||
|
import { QueryJson } from "../../definitions/datasource"
|
||||||
|
import { Datasource } from "../../definitions/common"
|
||||||
|
|
||||||
|
module DatasourceUtils {
|
||||||
|
const { integrations } = require("../index")
|
||||||
|
|
||||||
|
export async function makeExternalQuery(datasource: Datasource, json: QueryJson) {
|
||||||
|
const Integration = integrations[datasource.source]
|
||||||
|
// query is the opinionated function
|
||||||
|
if (Integration.prototype.query) {
|
||||||
|
const integration = new Integration(datasource.config)
|
||||||
|
return integration.query(json)
|
||||||
|
} else {
|
||||||
|
throw "Datasource does not support query."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports.makeExternalQuery = makeExternalQuery
|
||||||
|
}
|
|
@ -8,6 +8,7 @@ import {
|
||||||
} from "../definitions/datasource"
|
} from "../definitions/datasource"
|
||||||
import { Table, TableSchema } from "../definitions/common"
|
import { Table, TableSchema } from "../definitions/common"
|
||||||
import { getSqlQuery } from "./utils"
|
import { getSqlQuery } from "./utils"
|
||||||
|
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||||
|
|
||||||
module MySQLModule {
|
module MySQLModule {
|
||||||
const mysql = require("mysql2")
|
const mysql = require("mysql2")
|
||||||
|
@ -15,7 +16,7 @@ module MySQLModule {
|
||||||
const {
|
const {
|
||||||
buildExternalTableId,
|
buildExternalTableId,
|
||||||
convertType,
|
convertType,
|
||||||
copyExistingPropsOver,
|
finaliseExternalTables,
|
||||||
} = require("./utils")
|
} = require("./utils")
|
||||||
const { FieldTypes } = require("../constants")
|
const { FieldTypes } = require("../constants")
|
||||||
|
|
||||||
|
@ -131,9 +132,11 @@ module MySQLModule {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
class MySQLIntegration extends Sql {
|
class MySQLIntegration extends Sql implements DatasourcePlus {
|
||||||
private config: MySQLConfig
|
private config: MySQLConfig
|
||||||
private readonly client: any
|
private readonly client: any
|
||||||
|
public tables: Record<string, Table> = {}
|
||||||
|
public schemaErrors: Record<string, string> = {}
|
||||||
|
|
||||||
constructor(config: MySQLConfig) {
|
constructor(config: MySQLConfig) {
|
||||||
super("mysql")
|
super("mysql")
|
||||||
|
@ -185,10 +188,6 @@ module MySQLModule {
|
||||||
constraints,
|
constraints,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// for now just default to first column
|
|
||||||
if (primaryKeys.length === 0) {
|
|
||||||
primaryKeys.push(descResp[0].Field)
|
|
||||||
}
|
|
||||||
if (!tables[tableName]) {
|
if (!tables[tableName]) {
|
||||||
tables[tableName] = {
|
tables[tableName] = {
|
||||||
_id: buildExternalTableId(datasourceId, tableName),
|
_id: buildExternalTableId(datasourceId, tableName),
|
||||||
|
@ -197,12 +196,12 @@ module MySQLModule {
|
||||||
schema,
|
schema,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
copyExistingPropsOver(tableName, tables, entities)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.client.end()
|
this.client.end()
|
||||||
this.tables = tables
|
const final = finaliseExternalTables(tables, entities)
|
||||||
|
this.tables = final.tables
|
||||||
|
this.schemaErrors = final.errors
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(query: SqlQuery | string) {
|
async create(query: SqlQuery | string) {
|
||||||
|
@ -263,6 +262,13 @@ module MySQLModule {
|
||||||
const operation = this._operation(json)
|
const operation = this._operation(json)
|
||||||
this.client.connect()
|
this.client.connect()
|
||||||
const input = this._query(json, { disableReturning: true })
|
const input = this._query(json, { disableReturning: true })
|
||||||
|
if (Array.isArray(input)) {
|
||||||
|
const responses = []
|
||||||
|
for (let query of input) {
|
||||||
|
responses.push(await internalQuery(this.client, query, false))
|
||||||
|
}
|
||||||
|
return responses
|
||||||
|
}
|
||||||
let row
|
let row
|
||||||
// need to manage returning, a feature mySQL can't do
|
// need to manage returning, a feature mySQL can't do
|
||||||
if (operation === operation.DELETE) {
|
if (operation === operation.DELETE) {
|
||||||
|
|
|
@ -7,6 +7,7 @@ import {
|
||||||
} from "../definitions/datasource"
|
} from "../definitions/datasource"
|
||||||
import { Table } from "../definitions/common"
|
import { Table } from "../definitions/common"
|
||||||
import { getSqlQuery } from "./utils"
|
import { getSqlQuery } from "./utils"
|
||||||
|
import { DatasourcePlus } from "./base/datasourcePlus"
|
||||||
|
|
||||||
module PostgresModule {
|
module PostgresModule {
|
||||||
const { Pool } = require("pg")
|
const { Pool } = require("pg")
|
||||||
|
@ -15,7 +16,7 @@ module PostgresModule {
|
||||||
const {
|
const {
|
||||||
buildExternalTableId,
|
buildExternalTableId,
|
||||||
convertType,
|
convertType,
|
||||||
copyExistingPropsOver,
|
finaliseExternalTables,
|
||||||
} = require("./utils")
|
} = require("./utils")
|
||||||
const { escapeDangerousCharacters } = require("../utilities")
|
const { escapeDangerousCharacters } = require("../utilities")
|
||||||
|
|
||||||
|
@ -138,10 +139,12 @@ module PostgresModule {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
class PostgresIntegration extends Sql {
|
class PostgresIntegration extends Sql implements DatasourcePlus {
|
||||||
static pool: any
|
static pool: any
|
||||||
private readonly client: any
|
private readonly client: any
|
||||||
private readonly config: PostgresConfig
|
private readonly config: PostgresConfig
|
||||||
|
public tables: Record<string, Table> = {}
|
||||||
|
public schemaErrors: Record<string, string> = {}
|
||||||
|
|
||||||
COLUMNS_SQL!: string
|
COLUMNS_SQL!: string
|
||||||
|
|
||||||
|
@ -223,7 +226,7 @@ module PostgresModule {
|
||||||
if (!tables[tableName] || !tables[tableName].schema) {
|
if (!tables[tableName] || !tables[tableName].schema) {
|
||||||
tables[tableName] = {
|
tables[tableName] = {
|
||||||
_id: buildExternalTableId(datasourceId, tableName),
|
_id: buildExternalTableId(datasourceId, tableName),
|
||||||
primary: tableKeys[tableName] || ["id"],
|
primary: tableKeys[tableName] || [],
|
||||||
name: tableName,
|
name: tableName,
|
||||||
schema: {},
|
schema: {},
|
||||||
}
|
}
|
||||||
|
@ -248,10 +251,9 @@ module PostgresModule {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (let tableName of Object.keys(tables)) {
|
const final = finaliseExternalTables(tables, entities)
|
||||||
copyExistingPropsOver(tableName, tables, entities)
|
this.tables = final.tables
|
||||||
}
|
this.schemaErrors = final.errors
|
||||||
this.tables = tables
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(query: SqlQuery | string) {
|
async create(query: SqlQuery | string) {
|
||||||
|
@ -277,8 +279,16 @@ module PostgresModule {
|
||||||
async query(json: QueryJson) {
|
async query(json: QueryJson) {
|
||||||
const operation = this._operation(json).toLowerCase()
|
const operation = this._operation(json).toLowerCase()
|
||||||
const input = this._query(json)
|
const input = this._query(json)
|
||||||
const response = await internalQuery(this.client, input)
|
if (Array.isArray(input)) {
|
||||||
return response.rows.length ? response.rows : [{ [operation]: true }]
|
const responses = []
|
||||||
|
for (let query of input) {
|
||||||
|
responses.push(await internalQuery(this.client, query))
|
||||||
|
}
|
||||||
|
return responses
|
||||||
|
} else {
|
||||||
|
const response = await internalQuery(this.client, input)
|
||||||
|
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import { SqlQuery } from "../definitions/datasource"
|
import { SqlQuery } from "../definitions/datasource"
|
||||||
import { Datasource } from "../definitions/common"
|
import { Datasource, Table } from "../definitions/common"
|
||||||
import { SourceNames } from "../definitions/datasource"
|
import { SourceNames } from "../definitions/datasource"
|
||||||
const { DocumentTypes, SEPARATOR } = require("../db/utils")
|
const { DocumentTypes, SEPARATOR } = require("../db/utils")
|
||||||
const { FieldTypes } = require("../constants")
|
const { FieldTypes, BuildSchemaErrors } = require("../constants")
|
||||||
|
|
||||||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||||
const ROW_ID_REGEX = /^\[.*]$/g
|
const ROW_ID_REGEX = /^\[.*]$/g
|
||||||
|
@ -102,14 +102,14 @@ export function isIsoDateString(str: string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// add the existing relationships from the entities if they exist, to prevent them from being overridden
|
// add the existing relationships from the entities if they exist, to prevent them from being overridden
|
||||||
export function copyExistingPropsOver(
|
function copyExistingPropsOver(
|
||||||
tableName: string,
|
tableName: string,
|
||||||
tables: { [key: string]: any },
|
table: Table,
|
||||||
entities: { [key: string]: any }
|
entities: { [key: string]: any }
|
||||||
) {
|
) {
|
||||||
if (entities && entities[tableName]) {
|
if (entities && entities[tableName]) {
|
||||||
if (entities[tableName].primaryDisplay) {
|
if (entities[tableName].primaryDisplay) {
|
||||||
tables[tableName].primaryDisplay = entities[tableName].primaryDisplay
|
table.primaryDisplay = entities[tableName].primaryDisplay
|
||||||
}
|
}
|
||||||
const existingTableSchema = entities[tableName].schema
|
const existingTableSchema = entities[tableName].schema
|
||||||
for (let key in existingTableSchema) {
|
for (let key in existingTableSchema) {
|
||||||
|
@ -117,8 +117,27 @@ export function copyExistingPropsOver(
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if (existingTableSchema[key].type === "link") {
|
if (existingTableSchema[key].type === "link") {
|
||||||
tables[tableName].schema[key] = existingTableSchema[key]
|
table.schema[key] = existingTableSchema[key]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
export function finaliseExternalTables(
|
||||||
|
tables: { [key: string]: any },
|
||||||
|
entities: { [key: string]: any }
|
||||||
|
) {
|
||||||
|
const finalTables: { [key: string]: any } = {}
|
||||||
|
const errors: { [key: string]: string } = {}
|
||||||
|
for (let [name, table] of Object.entries(tables)) {
|
||||||
|
// make sure every table has a key
|
||||||
|
if (table.primary == null || table.primary.length === 0) {
|
||||||
|
errors[name] = BuildSchemaErrors.NO_KEY
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// make sure all previous props have been added back
|
||||||
|
finalTables[name] = copyExistingPropsOver(name, table, entities)
|
||||||
|
}
|
||||||
|
return { tables: finalTables, errors }
|
||||||
}
|
}
|
||||||
|
|
|
@ -311,7 +311,8 @@ class TestConfiguration {
|
||||||
|
|
||||||
async createDatasource(config = null) {
|
async createDatasource(config = null) {
|
||||||
config = config || basicDatasource()
|
config = config || basicDatasource()
|
||||||
this.datasource = await this._req(config, null, controllers.datasource.save)
|
const response = await this._req(config, null, controllers.datasource.save)
|
||||||
|
this.datasource = response.datasource
|
||||||
return this.datasource
|
return this.datasource
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/string-templates",
|
"name": "@budibase/string-templates",
|
||||||
"version": "0.9.173-alpha.3",
|
"version": "0.9.176-alpha.3",
|
||||||
"description": "Handlebars wrapper for Budibase templating.",
|
"description": "Handlebars wrapper for Budibase templating.",
|
||||||
"main": "src/index.cjs",
|
"main": "src/index.cjs",
|
||||||
"module": "dist/bundle.mjs",
|
"module": "dist/bundle.mjs",
|
||||||
|
|
|
@ -31,7 +31,7 @@ const HELPERS = [
|
||||||
}
|
}
|
||||||
// null/undefined values produce bad results
|
// null/undefined values produce bad results
|
||||||
if (value == null || typeof value !== "string") {
|
if (value == null || typeof value !== "string") {
|
||||||
return value || ""
|
return value == null ? "" : value
|
||||||
}
|
}
|
||||||
if (value && value.string) {
|
if (value && value.string) {
|
||||||
value = value.string
|
value = value.string
|
||||||
|
|
|
@ -6,6 +6,7 @@ const manifest = require("../manifest.json")
|
||||||
|
|
||||||
const hbsInstance = handlebars.create()
|
const hbsInstance = handlebars.create()
|
||||||
registerAll(hbsInstance)
|
registerAll(hbsInstance)
|
||||||
|
const hbsInstanceNoHelpers = handlebars.create()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* utility function to check if the object is valid
|
* utility function to check if the object is valid
|
||||||
|
@ -24,17 +25,30 @@ function testObject(object) {
|
||||||
* @param {object|array} object The input structure which is to be recursed, it is important to note that
|
* @param {object|array} object The input structure which is to be recursed, it is important to note that
|
||||||
* if the structure contains any cycles then this will fail.
|
* if the structure contains any cycles then this will fail.
|
||||||
* @param {object} context The context that handlebars should fill data from.
|
* @param {object} context The context that handlebars should fill data from.
|
||||||
|
* @param {object} opts optional - specify some options for processing.
|
||||||
* @returns {Promise<object|array>} The structure input, as fully updated as possible.
|
* @returns {Promise<object|array>} The structure input, as fully updated as possible.
|
||||||
*/
|
*/
|
||||||
module.exports.processObject = async (object, context) => {
|
module.exports.processObject = async (
|
||||||
|
object,
|
||||||
|
context,
|
||||||
|
opts = { noHelpers: false }
|
||||||
|
) => {
|
||||||
testObject(object)
|
testObject(object)
|
||||||
for (let key of Object.keys(object || {})) {
|
for (let key of Object.keys(object || {})) {
|
||||||
if (object[key] != null) {
|
if (object[key] != null) {
|
||||||
let val = object[key]
|
let val = object[key]
|
||||||
if (typeof val === "string") {
|
if (typeof val === "string") {
|
||||||
object[key] = await module.exports.processString(object[key], context)
|
object[key] = await module.exports.processString(
|
||||||
|
object[key],
|
||||||
|
context,
|
||||||
|
opts
|
||||||
|
)
|
||||||
} else if (typeof val === "object") {
|
} else if (typeof val === "object") {
|
||||||
object[key] = await module.exports.processObject(object[key], context)
|
object[key] = await module.exports.processObject(
|
||||||
|
object[key],
|
||||||
|
context,
|
||||||
|
opts
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -46,11 +60,16 @@ module.exports.processObject = async (object, context) => {
|
||||||
* then nothing will occur.
|
* then nothing will occur.
|
||||||
* @param {string} string The template string which is the filled from the context object.
|
* @param {string} string The template string which is the filled from the context object.
|
||||||
* @param {object} context An object of information which will be used to enrich the string.
|
* @param {object} context An object of information which will be used to enrich the string.
|
||||||
|
* @param {object} opts optional - specify some options for processing.
|
||||||
* @returns {Promise<string>} The enriched string, all templates should have been replaced if they can be.
|
* @returns {Promise<string>} The enriched string, all templates should have been replaced if they can be.
|
||||||
*/
|
*/
|
||||||
module.exports.processString = async (string, context) => {
|
module.exports.processString = async (
|
||||||
|
string,
|
||||||
|
context,
|
||||||
|
opts = { noHelpers: false }
|
||||||
|
) => {
|
||||||
// TODO: carry out any async calls before carrying out async call
|
// TODO: carry out any async calls before carrying out async call
|
||||||
return module.exports.processStringSync(string, context)
|
return module.exports.processStringSync(string, context, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -59,16 +78,21 @@ module.exports.processString = async (string, context) => {
|
||||||
* @param {object|array} object The input structure which is to be recursed, it is important to note that
|
* @param {object|array} object The input structure which is to be recursed, it is important to note that
|
||||||
* if the structure contains any cycles then this will fail.
|
* if the structure contains any cycles then this will fail.
|
||||||
* @param {object} context The context that handlebars should fill data from.
|
* @param {object} context The context that handlebars should fill data from.
|
||||||
|
* @param {object} opts optional - specify some options for processing.
|
||||||
* @returns {object|array} The structure input, as fully updated as possible.
|
* @returns {object|array} The structure input, as fully updated as possible.
|
||||||
*/
|
*/
|
||||||
module.exports.processObjectSync = (object, context) => {
|
module.exports.processObjectSync = (
|
||||||
|
object,
|
||||||
|
context,
|
||||||
|
opts = { noHelpers: false }
|
||||||
|
) => {
|
||||||
testObject(object)
|
testObject(object)
|
||||||
for (let key of Object.keys(object || {})) {
|
for (let key of Object.keys(object || {})) {
|
||||||
let val = object[key]
|
let val = object[key]
|
||||||
if (typeof val === "string") {
|
if (typeof val === "string") {
|
||||||
object[key] = module.exports.processStringSync(object[key], context)
|
object[key] = module.exports.processStringSync(object[key], context, opts)
|
||||||
} else if (typeof val === "object") {
|
} else if (typeof val === "object") {
|
||||||
object[key] = module.exports.processObjectSync(object[key], context)
|
object[key] = module.exports.processObjectSync(object[key], context, opts)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return object
|
return object
|
||||||
|
@ -79,9 +103,14 @@ module.exports.processObjectSync = (object, context) => {
|
||||||
* then nothing will occur. This is a pure sync call and therefore does not have the full functionality of the async call.
|
* then nothing will occur. This is a pure sync call and therefore does not have the full functionality of the async call.
|
||||||
* @param {string} string The template string which is the filled from the context object.
|
* @param {string} string The template string which is the filled from the context object.
|
||||||
* @param {object} context An object of information which will be used to enrich the string.
|
* @param {object} context An object of information which will be used to enrich the string.
|
||||||
|
* @param {object} opts optional - specify some options for processing.
|
||||||
* @returns {string} The enriched string, all templates should have been replaced if they can be.
|
* @returns {string} The enriched string, all templates should have been replaced if they can be.
|
||||||
*/
|
*/
|
||||||
module.exports.processStringSync = (string, context) => {
|
module.exports.processStringSync = (
|
||||||
|
string,
|
||||||
|
context,
|
||||||
|
opts = { noHelpers: false }
|
||||||
|
) => {
|
||||||
if (!exports.isValid(string)) {
|
if (!exports.isValid(string)) {
|
||||||
return string
|
return string
|
||||||
}
|
}
|
||||||
|
@ -91,9 +120,13 @@ module.exports.processStringSync = (string, context) => {
|
||||||
throw "Cannot process non-string types."
|
throw "Cannot process non-string types."
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
string = processors.preprocess(string)
|
const noHelpers = opts && opts.noHelpers
|
||||||
|
// finalising adds a helper, can't do this with no helpers
|
||||||
|
const shouldFinalise = !noHelpers
|
||||||
|
string = processors.preprocess(string, shouldFinalise)
|
||||||
// this does not throw an error when template can't be fulfilled, have to try correct beforehand
|
// this does not throw an error when template can't be fulfilled, have to try correct beforehand
|
||||||
const template = hbsInstance.compile(string, {
|
const instance = noHelpers ? hbsInstanceNoHelpers : hbsInstance
|
||||||
|
const template = instance.compile(string, {
|
||||||
strict: false,
|
strict: false,
|
||||||
})
|
})
|
||||||
return processors.postprocess(
|
return processors.postprocess(
|
||||||
|
@ -119,9 +152,10 @@ module.exports.makePropSafe = property => {
|
||||||
/**
|
/**
|
||||||
* Checks whether or not a template string contains totally valid syntax (simply tries running it)
|
* Checks whether or not a template string contains totally valid syntax (simply tries running it)
|
||||||
* @param string The string to test for valid syntax - this may contain no templates and will be considered valid.
|
* @param string The string to test for valid syntax - this may contain no templates and will be considered valid.
|
||||||
|
* @param opts optional - specify some options for processing.
|
||||||
* @returns {boolean} Whether or not the input string is valid.
|
* @returns {boolean} Whether or not the input string is valid.
|
||||||
*/
|
*/
|
||||||
module.exports.isValid = string => {
|
module.exports.isValid = (string, opts = { noHelpers: false }) => {
|
||||||
const validCases = [
|
const validCases = [
|
||||||
"string",
|
"string",
|
||||||
"number",
|
"number",
|
||||||
|
@ -135,7 +169,8 @@ module.exports.isValid = string => {
|
||||||
// don't really need a real context to check if its valid
|
// don't really need a real context to check if its valid
|
||||||
const context = {}
|
const context = {}
|
||||||
try {
|
try {
|
||||||
hbsInstance.compile(processors.preprocess(string, false))(context)
|
const instance = opts && opts.noHelpers ? hbsInstanceNoHelpers : hbsInstance
|
||||||
|
instance.compile(processors.preprocess(string, false))(context)
|
||||||
return true
|
return true
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
const msg = err && err.message ? err.message : err
|
const msg = err && err.message ? err.message : err
|
||||||
|
|
|
@ -125,6 +125,18 @@ describe("check the utility functions", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("check falsy values", () => {
|
||||||
|
it("should get a zero out when context contains it", async () => {
|
||||||
|
const output = await processString("{{ number }}", { number: 0 })
|
||||||
|
expect(output).toEqual("0")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should get false out when context contains it", async () => {
|
||||||
|
const output = await processString("{{ bool }}", { bool: false })
|
||||||
|
expect(output).toEqual("false")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe("check manifest", () => {
|
describe("check manifest", () => {
|
||||||
it("should be able to retrieve the manifest", () => {
|
it("should be able to retrieve the manifest", () => {
|
||||||
const manifest = getManifest()
|
const manifest = getManifest()
|
||||||
|
|
|
@ -11,6 +11,15 @@ describe("test the custom helpers we have applied", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("test that it can run without helpers", () => {
|
||||||
|
it("should be able to run without helpers", async () => {
|
||||||
|
const output = await processString("{{ avg 1 1 1 }}", {}, { noHelpers: true })
|
||||||
|
const valid = await processString("{{ avg 1 1 1 }}", {})
|
||||||
|
expect(valid).toBe("1")
|
||||||
|
expect(output).toBe("Invalid Binding")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe("test the math helpers", () => {
|
describe("test the math helpers", () => {
|
||||||
it("should be able to produce an absolute", async () => {
|
it("should be able to produce an absolute", async () => {
|
||||||
const output = await processString("{{abs a}}", {
|
const output = await processString("{{abs a}}", {
|
||||||
|
@ -267,6 +276,7 @@ describe("test the comparison helpers", () => {
|
||||||
)
|
)
|
||||||
expect(output).toBe("Success")
|
expect(output).toBe("Success")
|
||||||
}
|
}
|
||||||
|
|
||||||
it("should allow use of the lt helper", async () => {
|
it("should allow use of the lt helper", async () => {
|
||||||
await compare("lt", 10, 15)
|
await compare("lt", 10, 15)
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/worker",
|
"name": "@budibase/worker",
|
||||||
"email": "hi@budibase.com",
|
"email": "hi@budibase.com",
|
||||||
"version": "0.9.173-alpha.3",
|
"version": "0.9.176-alpha.3",
|
||||||
"description": "Budibase background service",
|
"description": "Budibase background service",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -29,8 +29,8 @@
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "AGPL-3.0-or-later",
|
"license": "AGPL-3.0-or-later",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/auth": "^0.9.173-alpha.3",
|
"@budibase/auth": "^0.9.176-alpha.3",
|
||||||
"@budibase/string-templates": "^0.9.173-alpha.3",
|
"@budibase/string-templates": "^0.9.176-alpha.3",
|
||||||
"@koa/router": "^8.0.0",
|
"@koa/router": "^8.0.0",
|
||||||
"@sentry/node": "^6.0.0",
|
"@sentry/node": "^6.0.0",
|
||||||
"@techpass/passport-openidconnect": "^0.3.0",
|
"@techpass/passport-openidconnect": "^0.3.0",
|
||||||
|
|
Loading…
Reference in New Issue