Merge branch 'develop' of github.com:Budibase/budibase into spectrum-apps
This commit is contained in:
commit
dcaf02ecee
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "0.9.69",
|
||||
"version": "0.9.70",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/auth",
|
||||
"version": "0.9.69",
|
||||
"version": "0.9.70",
|
||||
"description": "Authentication middlewares for budibase builder and apps",
|
||||
"main": "src/index.js",
|
||||
"author": "Budibase",
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/bbui",
|
||||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "0.9.69",
|
||||
"version": "0.9.70",
|
||||
"license": "AGPL-3.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/builder",
|
||||
"version": "0.9.69",
|
||||
"version": "0.9.70",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
@ -65,10 +65,10 @@
|
|||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^0.9.69",
|
||||
"@budibase/client": "^0.9.69",
|
||||
"@budibase/bbui": "^0.9.70",
|
||||
"@budibase/client": "^0.9.70",
|
||||
"@budibase/colorpicker": "1.1.2",
|
||||
"@budibase/string-templates": "^0.9.69",
|
||||
"@budibase/string-templates": "^0.9.70",
|
||||
"@sentry/browser": "5.19.1",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
|
|
|
@ -86,7 +86,6 @@ const createScreen = table => {
|
|||
valueType: "Binding",
|
||||
},
|
||||
],
|
||||
limit: 1,
|
||||
paginate: false,
|
||||
})
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@
|
|||
let deletion
|
||||
|
||||
$: tableOptions = $tables.list.filter(
|
||||
table => table._id !== $tables.draft._id
|
||||
table => table._id !== $tables.draft._id && table.type !== "external"
|
||||
)
|
||||
$: required = !!field?.constraints?.presence || primaryDisplay
|
||||
$: uneditable =
|
||||
|
@ -172,11 +172,6 @@
|
|||
alt: `Many ${table.name} rows → many ${linkTable.name} rows`,
|
||||
value: RelationshipTypes.MANY_TO_MANY,
|
||||
},
|
||||
{
|
||||
name: `One ${linkName} row → many ${thisName} rows`,
|
||||
alt: `One ${linkTable.name} rows → many ${table.name} rows`,
|
||||
value: RelationshipTypes.ONE_TO_MANY,
|
||||
},
|
||||
{
|
||||
name: `One ${thisName} row → many ${linkName} rows`,
|
||||
alt: `One ${table.name} rows → many ${linkTable.name} rows`,
|
||||
|
|
|
@ -6,9 +6,14 @@
|
|||
import EditViewPopover from "./popovers/EditViewPopover.svelte"
|
||||
import NavItem from "components/common/NavItem.svelte"
|
||||
|
||||
const alphabetical = (a, b) => a.name?.toLowerCase() > b.name?.toLowerCase()
|
||||
|
||||
export let sourceId
|
||||
|
||||
$: selectedView = $views.selected && $views.selected.name
|
||||
$: sortedTables = $tables.list
|
||||
.filter(table => table.sourceId === sourceId)
|
||||
.sort(alphabetical)
|
||||
|
||||
function selectTable(table) {
|
||||
tables.select(table)
|
||||
|
@ -33,7 +38,7 @@
|
|||
|
||||
{#if $database?._id}
|
||||
<div class="hierarchy-items-container">
|
||||
{#each $tables.list.filter(table => table.sourceId === sourceId) as table, idx}
|
||||
{#each sortedTables as table, idx}
|
||||
<NavItem
|
||||
indentLevel={1}
|
||||
border={idx > 0}
|
||||
|
@ -46,7 +51,7 @@
|
|||
<EditTablePopover {table} />
|
||||
{/if}
|
||||
</NavItem>
|
||||
{#each Object.keys(table.views || {}) as viewName, idx (idx)}
|
||||
{#each [...Object.keys(table.views || {})].sort() as viewName, idx (idx)}
|
||||
<NavItem
|
||||
indentLevel={2}
|
||||
icon="Remove"
|
||||
|
|
|
@ -0,0 +1,194 @@
|
|||
<script>
|
||||
import { RelationshipTypes } from "constants/backend"
|
||||
import { Button, Input, ModalContent, Select, Detail } from "@budibase/bbui"
|
||||
import { tables } from "stores/backend"
|
||||
import { uuid } from "builderStore/uuid"
|
||||
|
||||
export let save
|
||||
export let datasource
|
||||
export let plusTables = []
|
||||
export let fromRelationship = {}
|
||||
export let toRelationship = {}
|
||||
export let close
|
||||
|
||||
let originalFromName = fromRelationship.name,
|
||||
originalToName = toRelationship.name
|
||||
|
||||
function isValid(relationship) {
|
||||
if (
|
||||
relationship.relationshipType === RelationshipTypes.MANY_TO_MANY &&
|
||||
!relationship.through
|
||||
) {
|
||||
return false
|
||||
}
|
||||
return (
|
||||
relationship.name && relationship.tableId && relationship.relationshipType
|
||||
)
|
||||
}
|
||||
|
||||
$: tableOptions = plusTables.map(table => ({
|
||||
label: table.name,
|
||||
value: table._id,
|
||||
}))
|
||||
$: fromTable = plusTables.find(table => table._id === toRelationship?.tableId)
|
||||
$: toTable = plusTables.find(table => table._id === fromRelationship?.tableId)
|
||||
$: through = plusTables.find(table => table._id === fromRelationship?.through)
|
||||
$: valid = toTable && fromTable && isValid(fromRelationship)
|
||||
$: linkTable = through || toTable
|
||||
$: relationshipTypes = [
|
||||
{
|
||||
label: "Many",
|
||||
value: RelationshipTypes.MANY_TO_MANY,
|
||||
},
|
||||
{
|
||||
label: "One",
|
||||
value: RelationshipTypes.MANY_TO_ONE,
|
||||
},
|
||||
]
|
||||
$: updateRelationshipType(fromRelationship?.relationshipType)
|
||||
|
||||
function updateRelationshipType(fromType) {
|
||||
if (fromType === RelationshipTypes.MANY_TO_MANY) {
|
||||
toRelationship.relationshipType = RelationshipTypes.MANY_TO_MANY
|
||||
} else {
|
||||
toRelationship.relationshipType = RelationshipTypes.MANY_TO_ONE
|
||||
}
|
||||
}
|
||||
|
||||
function buildRelationships() {
|
||||
// if any to many only need to check from
|
||||
const manyToMany =
|
||||
fromRelationship.relationshipType === RelationshipTypes.MANY_TO_MANY
|
||||
// main is simply used to know this is the side the user configured it from
|
||||
const id = uuid()
|
||||
if (!manyToMany) {
|
||||
delete fromRelationship.through
|
||||
delete toRelationship.through
|
||||
}
|
||||
let relateFrom = {
|
||||
...fromRelationship,
|
||||
type: "link",
|
||||
main: true,
|
||||
_id: id,
|
||||
}
|
||||
let relateTo = {
|
||||
...toRelationship,
|
||||
type: "link",
|
||||
_id: id,
|
||||
}
|
||||
|
||||
// [0] is because we don't support composite keys for relationships right now
|
||||
if (manyToMany) {
|
||||
relateFrom = {
|
||||
...relateFrom,
|
||||
through: through._id,
|
||||
fieldName: toTable.primary[0],
|
||||
}
|
||||
relateTo = {
|
||||
...relateTo,
|
||||
through: through._id,
|
||||
fieldName: fromTable.primary[0],
|
||||
}
|
||||
} else {
|
||||
relateFrom = {
|
||||
...relateFrom,
|
||||
foreignKey: relateFrom.fieldName,
|
||||
fieldName: fromTable.primary[0],
|
||||
}
|
||||
relateTo = {
|
||||
...relateTo,
|
||||
relationshipType: RelationshipTypes.ONE_TO_MANY,
|
||||
foreignKey: relateFrom.fieldName,
|
||||
fieldName: fromTable.primary[0],
|
||||
}
|
||||
}
|
||||
|
||||
fromRelationship = relateFrom
|
||||
toRelationship = relateTo
|
||||
}
|
||||
|
||||
// save the relationship on to the datasource
|
||||
async function saveRelationship() {
|
||||
buildRelationships()
|
||||
// source of relationship
|
||||
datasource.entities[fromTable.name].schema[fromRelationship.name] =
|
||||
fromRelationship
|
||||
// save other side of relationship in the other schema
|
||||
datasource.entities[toTable.name].schema[toRelationship.name] =
|
||||
toRelationship
|
||||
|
||||
// If relationship has been renamed
|
||||
if (originalFromName !== fromRelationship.name) {
|
||||
delete datasource.entities[fromTable.name].schema[originalFromName]
|
||||
}
|
||||
if (originalToName !== toRelationship.name) {
|
||||
delete datasource.entities[toTable.name].schema[originalToName]
|
||||
}
|
||||
|
||||
await save()
|
||||
await tables.fetch()
|
||||
}
|
||||
|
||||
async function deleteRelationship() {
|
||||
delete datasource.entities[fromTable.name].schema[fromRelationship.name]
|
||||
delete datasource.entities[toTable.name].schema[toRelationship.name]
|
||||
await save()
|
||||
await tables.fetch()
|
||||
close()
|
||||
}
|
||||
</script>
|
||||
|
||||
<ModalContent
|
||||
title="Create Relationship"
|
||||
confirmText="Save"
|
||||
onConfirm={saveRelationship}
|
||||
disabled={!valid}
|
||||
>
|
||||
<Select
|
||||
label="Relationship type"
|
||||
options={relationshipTypes}
|
||||
bind:value={fromRelationship.relationshipType}
|
||||
/>
|
||||
<div class="headings">
|
||||
<Detail>Tables</Detail>
|
||||
</div>
|
||||
<Select
|
||||
label="Select from table"
|
||||
options={tableOptions}
|
||||
bind:value={toRelationship.tableId}
|
||||
/>
|
||||
<Select
|
||||
label={"Select to table"}
|
||||
options={tableOptions}
|
||||
bind:value={fromRelationship.tableId}
|
||||
/>
|
||||
{#if fromRelationship?.relationshipType === RelationshipTypes.MANY_TO_MANY}
|
||||
<Select
|
||||
label={"Through"}
|
||||
options={tableOptions}
|
||||
bind:value={fromRelationship.through}
|
||||
/>
|
||||
{:else if toTable}
|
||||
<Select
|
||||
label={`Foreign Key (${toTable?.name})`}
|
||||
options={Object.keys(toTable?.schema)}
|
||||
bind:value={fromRelationship.fieldName}
|
||||
/>
|
||||
{/if}
|
||||
<div class="headings">
|
||||
<Detail>Column names</Detail>
|
||||
</div>
|
||||
<Input label="From table column" bind:value={fromRelationship.name} />
|
||||
<Input label="To table column" bind:value={toRelationship.name} />
|
||||
<div slot="footer">
|
||||
{#if originalFromName != null}
|
||||
<Button warning text on:click={deleteRelationship}>Delete</Button>
|
||||
{/if}
|
||||
</div>
|
||||
</ModalContent>
|
||||
|
||||
<style>
|
||||
.headings {
|
||||
margin-top: var(--spacing-s);
|
||||
}
|
||||
</style>
|
|
@ -0,0 +1,21 @@
|
|||
<script>
|
||||
import { Menu, Icon, MenuSection, MenuItem } from "@budibase/bbui"
|
||||
|
||||
export let heading
|
||||
export let tables
|
||||
export let selected = false
|
||||
export let select
|
||||
</script>
|
||||
|
||||
<Menu>
|
||||
<MenuSection {heading}>
|
||||
{#each tables as table}
|
||||
<MenuItem noClose icon="Table" on:click={() => select(table)}>
|
||||
{table.name}
|
||||
{#if selected}
|
||||
<Icon size="S" name="Checkmark" />
|
||||
{/if}
|
||||
</MenuItem>
|
||||
{/each}
|
||||
</MenuSection>
|
||||
</Menu>
|
|
@ -1,16 +1,70 @@
|
|||
<script>
|
||||
import { goto, beforeUrlChange } from "@roxi/routify"
|
||||
import { Button, Heading, Body, Divider, Layout } from "@budibase/bbui"
|
||||
import { Button, Heading, Body, Divider, Layout, Modal } from "@budibase/bbui"
|
||||
import { datasources, integrations, queries, tables } from "stores/backend"
|
||||
import { notifications } from "@budibase/bbui"
|
||||
import IntegrationConfigForm from "components/backend/DatasourceNavigator/TableIntegrationMenu/IntegrationConfigForm.svelte"
|
||||
import CreateEditRelationship from "./CreateEditRelationship/CreateEditRelationship.svelte"
|
||||
import DisplayColumnModal from "./modals/EditDisplayColumnsModal.svelte"
|
||||
import ICONS from "components/backend/DatasourceNavigator/icons"
|
||||
import { capitalise } from "helpers"
|
||||
|
||||
let unsaved = false
|
||||
let relationshipModal
|
||||
let displayColumnModal
|
||||
let selectedFromRelationship, selectedToRelationship
|
||||
|
||||
$: datasource = $datasources.list.find(ds => ds._id === $datasources.selected)
|
||||
$: integration = datasource && $integrations[datasource.source]
|
||||
$: plusTables = datasource?.plus
|
||||
? Object.values(datasource.entities || {})
|
||||
: []
|
||||
$: relationships = getRelationships(plusTables)
|
||||
|
||||
function getRelationships(tables) {
|
||||
if (!tables || !Array.isArray(tables)) {
|
||||
return {}
|
||||
}
|
||||
let pairs = {}
|
||||
for (let table of tables) {
|
||||
for (let column of Object.values(table.schema)) {
|
||||
if (column.type !== "link") {
|
||||
continue
|
||||
}
|
||||
// these relationships have an id to pair them to each other
|
||||
// one has a main for the from side
|
||||
const key = column.main ? "from" : "to"
|
||||
pairs[column._id] = {
|
||||
...pairs[column._id],
|
||||
[key]: column,
|
||||
}
|
||||
}
|
||||
}
|
||||
return pairs
|
||||
}
|
||||
|
||||
function buildRelationshipDisplayString(fromCol, toCol) {
|
||||
function getTableName(tableId) {
|
||||
if (!tableId || typeof tableId !== "string") {
|
||||
return null
|
||||
}
|
||||
return plusTables.find(table => table._id === tableId)?.name || "Unknown"
|
||||
}
|
||||
if (!toCol || !fromCol) {
|
||||
return "Cannot build name"
|
||||
}
|
||||
const fromTableName = getTableName(toCol.tableId)
|
||||
const toTableName = getTableName(fromCol.tableId)
|
||||
const throughTableName = getTableName(fromCol.through)
|
||||
|
||||
let displayString
|
||||
if (throughTableName) {
|
||||
displayString = `${fromTableName} through ${throughTableName} → ${toTableName}`
|
||||
} else {
|
||||
displayString = `${fromTableName} → ${toTableName}`
|
||||
}
|
||||
return displayString
|
||||
}
|
||||
|
||||
async function saveDatasource() {
|
||||
try {
|
||||
|
@ -48,6 +102,16 @@
|
|||
unsaved = true
|
||||
}
|
||||
|
||||
function openRelationshipModal(fromRelationship, toRelationship) {
|
||||
selectedFromRelationship = fromRelationship || {}
|
||||
selectedToRelationship = toRelationship || {}
|
||||
relationshipModal.show()
|
||||
}
|
||||
|
||||
function openDisplayColumnModal() {
|
||||
displayColumnModal.show()
|
||||
}
|
||||
|
||||
$beforeUrlChange(() => {
|
||||
if (unsaved) {
|
||||
notifications.error(
|
||||
|
@ -59,6 +123,21 @@
|
|||
})
|
||||
</script>
|
||||
|
||||
<Modal bind:this={relationshipModal}>
|
||||
<CreateEditRelationship
|
||||
{datasource}
|
||||
save={saveDatasource}
|
||||
close={relationshipModal.hide}
|
||||
{plusTables}
|
||||
fromRelationship={selectedFromRelationship}
|
||||
toRelationship={selectedToRelationship}
|
||||
/>
|
||||
</Modal>
|
||||
|
||||
<Modal bind:this={displayColumnModal}>
|
||||
<DisplayColumnModal {datasource} {plusTables} save={saveDatasource} />
|
||||
</Modal>
|
||||
|
||||
{#if datasource && integration}
|
||||
<section>
|
||||
<Layout>
|
||||
|
@ -92,9 +171,18 @@
|
|||
<Divider />
|
||||
<div class="query-header">
|
||||
<Heading size="S">Tables</Heading>
|
||||
<Button primary on:click={updateDatasourceSchema}
|
||||
>Fetch Tables From Database</Button
|
||||
>
|
||||
<div class="table-buttons">
|
||||
{#if plusTables && plusTables.length !== 0}
|
||||
<Button primary on:click={openDisplayColumnModal}>
|
||||
Update display columns
|
||||
</Button>
|
||||
{/if}
|
||||
<div>
|
||||
<Button primary on:click={updateDatasourceSchema}>
|
||||
Fetch tables from database
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<Body>
|
||||
This datasource can determine tables automatically. Budibase can fetch
|
||||
|
@ -102,18 +190,44 @@
|
|||
having to write any queries at all.
|
||||
</Body>
|
||||
<div class="query-list">
|
||||
{#if datasource.entities}
|
||||
{#each Object.keys(datasource.entities) as entity}
|
||||
<div
|
||||
class="query-list-item"
|
||||
on:click={() => onClickTable(datasource.entities[entity])}
|
||||
>
|
||||
<p class="query-name">{entity}</p>
|
||||
<p>Primary Key: {datasource.entities[entity].primary}</p>
|
||||
{#each plusTables as table}
|
||||
<div class="query-list-item" on:click={() => onClickTable(table)}>
|
||||
<p class="query-name">{table.name}</p>
|
||||
<p>Primary Key: {table.primary}</p>
|
||||
<p>→</p>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{#if plusTables?.length !== 0}
|
||||
<Divider />
|
||||
<div class="query-header">
|
||||
<Heading size="S">Relationships</Heading>
|
||||
<Button primary on:click={() => openRelationshipModal()}
|
||||
>Create relationship</Button
|
||||
>
|
||||
</div>
|
||||
<Body>
|
||||
Tell budibase how your tables are related to get even more smart
|
||||
features.
|
||||
</Body>
|
||||
{/if}
|
||||
<div class="query-list">
|
||||
{#each Object.values(relationships) as relationship}
|
||||
<div
|
||||
class="query-list-item"
|
||||
on:click={() =>
|
||||
openRelationshipModal(relationship.from, relationship.to)}
|
||||
>
|
||||
<p class="query-name">
|
||||
{buildRelationshipDisplayString(
|
||||
relationship.from,
|
||||
relationship.to
|
||||
)}
|
||||
</p>
|
||||
<p>{relationship.from?.name} to {relationship.to?.name}</p>
|
||||
<p>→</p>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
<Divider />
|
||||
|
@ -202,4 +316,14 @@
|
|||
text-overflow: ellipsis;
|
||||
font-size: var(--font-size-s);
|
||||
}
|
||||
|
||||
.table-buttons {
|
||||
display: grid;
|
||||
grid-gap: var(--spacing-l);
|
||||
grid-template-columns: 1fr 1fr;
|
||||
}
|
||||
|
||||
.table-buttons div {
|
||||
grid-column-end: -1;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
<script>
|
||||
import { ModalContent, Select, Body } from "@budibase/bbui"
|
||||
import { tables } from "stores/backend"
|
||||
|
||||
export let datasource
|
||||
export let plusTables
|
||||
export let save
|
||||
|
||||
async function saveDisplayColumns() {
|
||||
// be explicit about copying over
|
||||
for (let table of plusTables) {
|
||||
datasource.entities[table.name].primaryDisplay = table.primaryDisplay
|
||||
}
|
||||
save()
|
||||
await tables.fetch()
|
||||
}
|
||||
|
||||
function getColumnOptions(table) {
|
||||
if (!table || !table.schema) {
|
||||
return []
|
||||
}
|
||||
return Object.entries(table.schema)
|
||||
.filter(field => field[1].type !== "link")
|
||||
.map(([fieldName]) => fieldName)
|
||||
}
|
||||
</script>
|
||||
|
||||
<ModalContent
|
||||
title="Edit display columns"
|
||||
confirmText="Save"
|
||||
onConfirm={saveDisplayColumns}
|
||||
>
|
||||
<Body
|
||||
>Select the columns that will be shown when displaying relationships.</Body
|
||||
>
|
||||
{#each plusTables as table}
|
||||
<Select
|
||||
label={table.name}
|
||||
options={getColumnOptions(table)}
|
||||
bind:value={table.primaryDisplay}
|
||||
/>
|
||||
{/each}
|
||||
</ModalContent>
|
|
@ -60,6 +60,16 @@
|
|||
|
||||
let toggleDisabled = false
|
||||
|
||||
async function updateUserFirstName(evt) {
|
||||
await users.save({ ...$userFetch?.data, firstName: evt.target.value })
|
||||
await userFetch.refresh()
|
||||
}
|
||||
|
||||
async function updateUserLastName(evt) {
|
||||
await users.save({ ...$userFetch?.data, lastName: evt.target.value })
|
||||
await userFetch.refresh()
|
||||
}
|
||||
|
||||
async function toggleFlag(flagName, detail) {
|
||||
toggleDisabled = true
|
||||
await users.save({ ...$userFetch?.data, [flagName]: { global: detail } })
|
||||
|
@ -113,11 +123,19 @@
|
|||
</div>
|
||||
<div class="field">
|
||||
<Label size="L">First name</Label>
|
||||
<Input disabled thin value={$userFetch?.data?.firstName} />
|
||||
<Input
|
||||
thin
|
||||
value={$userFetch?.data?.firstName}
|
||||
on:blur={updateUserFirstName}
|
||||
/>
|
||||
</div>
|
||||
<div class="field">
|
||||
<Label size="L">Last name</Label>
|
||||
<Input disabled thin value={$userFetch?.data?.lastName} />
|
||||
<Input
|
||||
thin
|
||||
value={$userFetch?.data?.lastName}
|
||||
on:blur={updateUserLastName}
|
||||
/>
|
||||
</div>
|
||||
<!-- don't let a user remove the privileges that let them be here -->
|
||||
{#if userId !== $auth.user._id}
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
const dispatch = createEventDispatcher()
|
||||
|
||||
const roles = app.roles
|
||||
let options = roles.map(role => role._id)
|
||||
let options = roles.map(role => ({ value: role._id, label: role.name }))
|
||||
let selectedRole = user?.roles?.[app?._id]
|
||||
|
||||
async function updateUserRoles() {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/cli",
|
||||
"version": "0.9.69",
|
||||
"version": "0.9.70",
|
||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||
"main": "src/index.js",
|
||||
"bin": {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/client",
|
||||
"version": "0.9.69",
|
||||
"version": "0.9.70",
|
||||
"license": "MPL-2.0",
|
||||
"module": "dist/budibase-client.js",
|
||||
"main": "dist/budibase-client.js",
|
||||
|
@ -18,9 +18,9 @@
|
|||
"dev:builder": "rollup -cw"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^0.9.69",
|
||||
"@budibase/standard-components": "^0.9.69",
|
||||
"@budibase/string-templates": "^0.9.69",
|
||||
"@budibase/bbui": "^0.9.70",
|
||||
"@budibase/standard-components": "^0.9.70",
|
||||
"@budibase/string-templates": "^0.9.70",
|
||||
"regexparam": "^1.3.0",
|
||||
"shortid": "^2.2.15",
|
||||
"svelte-spa-router": "^3.0.5"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/server",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "0.9.69",
|
||||
"version": "0.9.70",
|
||||
"description": "Budibase Web Server",
|
||||
"main": "src/index.js",
|
||||
"repository": {
|
||||
|
@ -59,9 +59,9 @@
|
|||
"author": "Budibase",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@budibase/auth": "^0.9.69",
|
||||
"@budibase/client": "^0.9.69",
|
||||
"@budibase/string-templates": "^0.9.69",
|
||||
"@budibase/auth": "^0.9.70",
|
||||
"@budibase/client": "^0.9.70",
|
||||
"@budibase/string-templates": "^0.9.70",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
"@koa/router": "8.0.0",
|
||||
"@sendgrid/mail": "7.1.1",
|
||||
|
@ -114,7 +114,7 @@
|
|||
"devDependencies": {
|
||||
"@babel/core": "^7.14.3",
|
||||
"@babel/preset-env": "^7.14.4",
|
||||
"@budibase/standard-components": "^0.9.69",
|
||||
"@budibase/standard-components": "^0.9.70",
|
||||
"@jest/test-sequencer": "^24.8.0",
|
||||
"@types/bull": "^3.15.1",
|
||||
"@types/jest": "^26.0.23",
|
||||
|
|
|
@ -15,3 +15,28 @@ CREATE TABLE Tasks (
|
|||
FOREIGN KEY(PersonID)
|
||||
REFERENCES Persons(PersonID)
|
||||
);
|
||||
CREATE TABLE Products (
|
||||
ProductID INT NOT NULL PRIMARY KEY,
|
||||
ProductName varchar(255)
|
||||
);
|
||||
CREATE TABLE Products_Tasks (
|
||||
ProductID INT NOT NULL,
|
||||
TaskID INT NOT NULL,
|
||||
CONSTRAINT fkProducts
|
||||
FOREIGN KEY(ProductID)
|
||||
REFERENCES Products(ProductID),
|
||||
CONSTRAINT fkTasks
|
||||
FOREIGN KEY(TaskID)
|
||||
REFERENCES Tasks(TaskID),
|
||||
PRIMARY KEY (ProductID, TaskID)
|
||||
);
|
||||
INSERT INTO Persons (PersonID, FirstName, LastName, Address, City) VALUES (1, 'Mike', 'Hughes', '123 Fake Street', 'Belfast');
|
||||
INSERT INTO Tasks (TaskID, PersonID, TaskName) VALUES (1, 1, 'assembling');
|
||||
INSERT INTO Tasks (TaskID, PersonID, TaskName) VALUES (2, 1, 'processing');
|
||||
INSERT INTO Products (ProductID, ProductName) VALUES (1, 'Computers');
|
||||
INSERT INTO Products (ProductID, ProductName) VALUES (2, 'Laptops');
|
||||
INSERT INTO Products (ProductID, ProductName) VALUES (3, 'Chairs');
|
||||
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 1);
|
||||
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1);
|
||||
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1);
|
||||
INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2);
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/bash
|
||||
docker-compose down
|
||||
docker volume prune -f
|
|
@ -48,7 +48,7 @@ exports.buildSchemaFromDb = async function (ctx) {
|
|||
|
||||
// Connect to the DB and build the schema
|
||||
const connector = new Connector(datasource.config)
|
||||
await connector.buildSchema(datasource._id)
|
||||
await connector.buildSchema(datasource._id, datasource.entities)
|
||||
datasource.entities = connector.tables
|
||||
|
||||
const response = await db.post(datasource)
|
||||
|
|
|
@ -0,0 +1,525 @@
|
|||
import {
|
||||
Operation,
|
||||
SearchFilters,
|
||||
SortJson,
|
||||
PaginationJson,
|
||||
RelationshipsJson,
|
||||
} from "../../../definitions/datasource"
|
||||
import {
|
||||
Row,
|
||||
Table,
|
||||
FieldSchema,
|
||||
Datasource,
|
||||
} from "../../../definitions/common"
|
||||
import {
|
||||
breakRowIdField,
|
||||
generateRowIdField,
|
||||
} from "../../../integrations/utils"
|
||||
|
||||
interface ManyRelationship {
|
||||
tableId?: string
|
||||
id?: string
|
||||
isUpdate?: boolean
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface RunConfig {
|
||||
id: string
|
||||
row: Row
|
||||
filters: SearchFilters
|
||||
sort: SortJson
|
||||
paginate: PaginationJson
|
||||
}
|
||||
|
||||
module External {
|
||||
const { makeExternalQuery } = require("./utils")
|
||||
const { DataSourceOperation, FieldTypes } = require("../../../constants")
|
||||
const { breakExternalTableId, isSQL } = require("../../../integrations/utils")
|
||||
const { processObjectSync } = require("@budibase/string-templates")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const { isEqual } = require("lodash")
|
||||
const CouchDB = require("../../../db")
|
||||
|
||||
function buildFilters(
|
||||
id: string | undefined,
|
||||
filters: SearchFilters,
|
||||
table: Table
|
||||
) {
|
||||
const primary = table.primary
|
||||
// if passed in array need to copy for shifting etc
|
||||
let idCopy = cloneDeep(id)
|
||||
if (filters) {
|
||||
// need to map over the filters and make sure the _id field isn't present
|
||||
for (let filter of Object.values(filters)) {
|
||||
if (filter._id && primary) {
|
||||
const parts = breakRowIdField(filter._id)
|
||||
for (let field of primary) {
|
||||
filter[field] = parts.shift()
|
||||
}
|
||||
}
|
||||
// make sure this field doesn't exist on any filter
|
||||
delete filter._id
|
||||
}
|
||||
}
|
||||
// there is no id, just use the user provided filters
|
||||
if (!idCopy || !table) {
|
||||
return filters
|
||||
}
|
||||
// if used as URL parameter it will have been joined
|
||||
if (!Array.isArray(idCopy)) {
|
||||
idCopy = breakRowIdField(idCopy)
|
||||
}
|
||||
const equal: any = {}
|
||||
if (primary && idCopy) {
|
||||
for (let field of primary) {
|
||||
// work through the ID and get the parts
|
||||
equal[field] = idCopy.shift()
|
||||
}
|
||||
}
|
||||
return {
|
||||
equal,
|
||||
}
|
||||
}
|
||||
|
||||
function generateIdForRow(row: Row, table: Table): string {
|
||||
const primary = table.primary
|
||||
if (!row || !primary) {
|
||||
return ""
|
||||
}
|
||||
// build id array
|
||||
let idParts = []
|
||||
for (let field of primary) {
|
||||
if (row[field]) {
|
||||
idParts.push(row[field])
|
||||
}
|
||||
}
|
||||
if (idParts.length === 0) {
|
||||
return ""
|
||||
}
|
||||
return generateRowIdField(idParts)
|
||||
}
|
||||
|
||||
function getEndpoint(tableId: string | undefined, operation: string) {
|
||||
if (!tableId) {
|
||||
return {}
|
||||
}
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
return {
|
||||
datasourceId,
|
||||
entityId: tableName,
|
||||
operation,
|
||||
}
|
||||
}
|
||||
|
||||
function basicProcessing(row: Row, table: Table) {
|
||||
const thisRow: { [key: string]: any } = {}
|
||||
// filter the row down to what is actually the row (not joined)
|
||||
for (let fieldName of Object.keys(table.schema)) {
|
||||
thisRow[fieldName] = row[fieldName]
|
||||
}
|
||||
thisRow._id = generateIdForRow(row, table)
|
||||
thisRow.tableId = table._id
|
||||
thisRow._rev = "rev"
|
||||
return thisRow
|
||||
}
|
||||
|
||||
function isMany(field: FieldSchema) {
|
||||
return (
|
||||
field.relationshipType && field.relationshipType.split("-")[0] === "many"
|
||||
)
|
||||
}
|
||||
|
||||
class ExternalRequest {
|
||||
private readonly appId: string
|
||||
private operation: Operation
|
||||
private tableId: string
|
||||
private datasource: Datasource
|
||||
private tables: { [key: string]: Table } = {}
|
||||
|
||||
constructor(
|
||||
appId: string,
|
||||
operation: Operation,
|
||||
tableId: string,
|
||||
datasource: Datasource
|
||||
) {
|
||||
this.appId = appId
|
||||
this.operation = operation
|
||||
this.tableId = tableId
|
||||
this.datasource = datasource
|
||||
if (datasource && datasource.entities) {
|
||||
this.tables = datasource.entities
|
||||
}
|
||||
}
|
||||
|
||||
inputProcessing(row: Row, table: Table) {
|
||||
if (!row) {
|
||||
return { row, manyRelationships: [] }
|
||||
}
|
||||
// we don't really support composite keys for relationships, this is why [0] is used
|
||||
// @ts-ignore
|
||||
const tablePrimary: string = table.primary[0]
|
||||
let newRow: Row = {},
|
||||
manyRelationships: ManyRelationship[] = []
|
||||
for (let [key, field] of Object.entries(table.schema)) {
|
||||
// if set already, or not set just skip it
|
||||
if (!row[key] || newRow[key]) {
|
||||
continue
|
||||
}
|
||||
// if its not a link then just copy it over
|
||||
if (field.type !== FieldTypes.LINK) {
|
||||
newRow[key] = row[key]
|
||||
continue
|
||||
}
|
||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
// table has to exist for many to many
|
||||
if (!this.tables[linkTableName]) {
|
||||
continue
|
||||
}
|
||||
const linkTable = this.tables[linkTableName]
|
||||
// @ts-ignore
|
||||
const linkTablePrimary = linkTable.primary[0]
|
||||
if (!isMany(field)) {
|
||||
newRow[field.foreignKey || linkTablePrimary] = breakRowIdField(
|
||||
row[key][0]
|
||||
)[0]
|
||||
} else {
|
||||
// we're not inserting a doc, will be a bunch of update calls
|
||||
const isUpdate = !field.through
|
||||
const thisKey: string = isUpdate ? "id" : linkTablePrimary
|
||||
// @ts-ignore
|
||||
const otherKey: string = isUpdate ? field.foreignKey : tablePrimary
|
||||
row[key].map((relationship: any) => {
|
||||
// we don't really support composite keys for relationships, this is why [0] is used
|
||||
manyRelationships.push({
|
||||
tableId: field.through || field.tableId,
|
||||
isUpdate,
|
||||
[thisKey]: breakRowIdField(relationship)[0],
|
||||
// leave the ID for enrichment later
|
||||
[otherKey]: `{{ literal ${tablePrimary} }}`,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
// we return the relationships that may need to be created in the through table
|
||||
// we do this so that if the ID is generated by the DB it can be inserted
|
||||
// after the fact
|
||||
return { row: newRow, manyRelationships }
|
||||
}
|
||||
|
||||
/**
|
||||
* This iterates through the returned rows and works out what elements of the rows
|
||||
* actually match up to another row (based on primary keys) - this is pretty specific
|
||||
* to SQL and the way that SQL relationships are returned based on joins.
|
||||
*/
|
||||
updateRelationshipColumns(
|
||||
row: Row,
|
||||
rows: { [key: string]: Row },
|
||||
relationships: RelationshipsJson[]
|
||||
) {
|
||||
const columns: { [key: string]: any } = {}
|
||||
for (let relationship of relationships) {
|
||||
const linkedTable = this.tables[relationship.tableName]
|
||||
if (!linkedTable) {
|
||||
continue
|
||||
}
|
||||
let linked = basicProcessing(row, linkedTable)
|
||||
if (!linked._id) {
|
||||
continue
|
||||
}
|
||||
// if not returning full docs then get the minimal links out
|
||||
const display = linkedTable.primaryDisplay
|
||||
linked = {
|
||||
primaryDisplay: display ? linked[display] : undefined,
|
||||
_id: linked._id,
|
||||
}
|
||||
columns[relationship.column] = linked
|
||||
}
|
||||
for (let [column, related] of Object.entries(columns)) {
|
||||
if (!row._id) {
|
||||
continue
|
||||
}
|
||||
const rowId: string = row._id
|
||||
if (!Array.isArray(rows[rowId][column])) {
|
||||
rows[rowId][column] = []
|
||||
}
|
||||
// make sure relationship hasn't been found already
|
||||
if (
|
||||
!rows[rowId][column].find(
|
||||
(relation: Row) => relation._id === related._id
|
||||
)
|
||||
) {
|
||||
rows[rowId][column].push(related)
|
||||
}
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
outputProcessing(
|
||||
rows: Row[],
|
||||
table: Table,
|
||||
relationships: RelationshipsJson[]
|
||||
) {
|
||||
if (rows[0].read === true) {
|
||||
return []
|
||||
}
|
||||
let finalRows: { [key: string]: Row } = {}
|
||||
for (let row of rows) {
|
||||
const rowId = generateIdForRow(row, table)
|
||||
row._id = rowId
|
||||
// this is a relationship of some sort
|
||||
if (finalRows[rowId]) {
|
||||
finalRows = this.updateRelationshipColumns(
|
||||
row,
|
||||
finalRows,
|
||||
relationships
|
||||
)
|
||||
continue
|
||||
}
|
||||
const thisRow = basicProcessing(row, table)
|
||||
finalRows[thisRow._id] = thisRow
|
||||
// do this at end once its been added to the final rows
|
||||
finalRows = this.updateRelationshipColumns(
|
||||
row,
|
||||
finalRows,
|
||||
relationships
|
||||
)
|
||||
}
|
||||
return Object.values(finalRows)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the list of relationship JSON structures based on the columns in the table,
|
||||
* this will be used by the underlying library to build whatever relationship mechanism
|
||||
* it has (e.g. SQL joins).
|
||||
*/
|
||||
buildRelationships(table: Table): RelationshipsJson[] {
|
||||
const relationships = []
|
||||
for (let [fieldName, field] of Object.entries(table.schema)) {
|
||||
if (field.type !== FieldTypes.LINK) {
|
||||
continue
|
||||
}
|
||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
// no table to link to, this is not a valid relationships
|
||||
if (!this.tables[linkTableName]) {
|
||||
continue
|
||||
}
|
||||
const linkTable = this.tables[linkTableName]
|
||||
if (!table.primary || !linkTable.primary) {
|
||||
continue
|
||||
}
|
||||
const definition = {
|
||||
// if no foreign key specified then use the name of the field in other table
|
||||
from: field.foreignKey || table.primary[0],
|
||||
to: field.fieldName,
|
||||
tableName: linkTableName,
|
||||
through: undefined,
|
||||
// need to specify where to put this back into
|
||||
column: fieldName,
|
||||
}
|
||||
if (field.through) {
|
||||
const { tableName: throughTableName } = breakExternalTableId(
|
||||
field.through
|
||||
)
|
||||
definition.through = throughTableName
|
||||
// don't support composite keys for relationships
|
||||
definition.from = table.primary[0]
|
||||
definition.to = linkTable.primary[0]
|
||||
}
|
||||
relationships.push(definition)
|
||||
}
|
||||
return relationships
|
||||
}
|
||||
|
||||
/**
|
||||
* This is a cached lookup, of relationship records, this is mainly for creating/deleting junction
|
||||
* information.
|
||||
*/
|
||||
async lookup(
|
||||
row: Row,
|
||||
relationship: ManyRelationship,
|
||||
cache: { [key: string]: Row[] } = {}
|
||||
) {
|
||||
const { tableId, isUpdate, id, ...rest } = relationship
|
||||
const { tableName } = breakExternalTableId(tableId)
|
||||
const table = this.tables[tableName]
|
||||
if (isUpdate) {
|
||||
return { rows: [], table }
|
||||
}
|
||||
// if not updating need to make sure we have a list of all possible options
|
||||
let fullKey: string = tableId + "/",
|
||||
rowKey: string = ""
|
||||
for (let key of Object.keys(rest)) {
|
||||
if (row[key]) {
|
||||
fullKey += key
|
||||
rowKey = key
|
||||
}
|
||||
}
|
||||
if (cache[fullKey] == null) {
|
||||
cache[fullKey] = await makeExternalQuery(this.appId, {
|
||||
endpoint: getEndpoint(tableId, DataSourceOperation.READ),
|
||||
filters: {
|
||||
equal: {
|
||||
[rowKey]: row[rowKey],
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
return { rows: cache[fullKey], table }
|
||||
}
|
||||
|
||||
/**
|
||||
* Once a row has been written we may need to update a many field, e.g. updating foreign keys
|
||||
* in a bunch of rows in another table, or inserting/deleting rows from a junction table (many to many).
|
||||
* This is quite a complex process and is handled by this function, there are a few things going on here:
|
||||
* 1. If updating foreign keys its relatively simple, just create a filter for the row that needs updated
|
||||
* and write the various components.
|
||||
* 2. If junction table, then we lookup what exists already, write what doesn't exist, work out what
|
||||
* isn't supposed to exist anymore and delete those. This is better than the usual method of delete them
|
||||
* all and then re-create, as theres no chance of losing data (e.g. delete succeed, but write fail).
|
||||
*/
|
||||
async handleManyRelationships(row: Row, relationships: ManyRelationship[]) {
|
||||
const { appId } = this
|
||||
if (relationships.length === 0) {
|
||||
return
|
||||
}
|
||||
// if we're creating (in a through table) need to wipe the existing ones first
|
||||
const promises = []
|
||||
const cache: { [key: string]: Row[] } = {}
|
||||
for (let relationship of relationships) {
|
||||
const { tableId, isUpdate, id, ...rest } = relationship
|
||||
const body = processObjectSync(rest, row)
|
||||
const { table, rows } = await this.lookup(row, relationship, cache)
|
||||
const found = rows.find(row => isEqual(body, row))
|
||||
const operation = isUpdate
|
||||
? DataSourceOperation.UPDATE
|
||||
: DataSourceOperation.CREATE
|
||||
if (!found) {
|
||||
promises.push(
|
||||
makeExternalQuery(appId, {
|
||||
endpoint: getEndpoint(tableId, operation),
|
||||
// if we're doing many relationships then we're writing, only one response
|
||||
body,
|
||||
filters: buildFilters(id, {}, table),
|
||||
})
|
||||
)
|
||||
} else {
|
||||
// remove the relationship from the rows
|
||||
rows.splice(rows.indexOf(found), 1)
|
||||
}
|
||||
}
|
||||
// finally if creating, cleanup any rows that aren't supposed to be here
|
||||
for (let [key, rows] of Object.entries(cache)) {
|
||||
// @ts-ignore
|
||||
const tableId: string = key.split("/").shift()
|
||||
const { tableName } = breakExternalTableId(tableId)
|
||||
const table = this.tables[tableName]
|
||||
for (let row of rows) {
|
||||
promises.push(
|
||||
makeExternalQuery(this.appId, {
|
||||
endpoint: getEndpoint(tableId, DataSourceOperation.DELETE),
|
||||
filters: buildFilters(generateIdForRow(row, table), {}, table),
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which
|
||||
* you have column overlap in relationships, e.g. we join a few different tables and they all have the
|
||||
* concept of an ID, but for some of them it will be null (if they say don't have a relationship).
|
||||
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
|
||||
* is more performant and has the added benefit of protecting against this scenario.
|
||||
*/
|
||||
buildFields(table: Table) {
|
||||
function extractNonLinkFieldNames(table: Table, existing: string[] = []) {
|
||||
return Object.entries(table.schema)
|
||||
.filter(
|
||||
column =>
|
||||
column[1].type !== FieldTypes.LINK &&
|
||||
!existing.find((field: string) => field.includes(column[0]))
|
||||
)
|
||||
.map(column => `${table.name}.${column[0]}`)
|
||||
}
|
||||
let fields = extractNonLinkFieldNames(table)
|
||||
for (let field of Object.values(table.schema)) {
|
||||
if (field.type !== FieldTypes.LINK) {
|
||||
continue
|
||||
}
|
||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
const linkTable = this.tables[linkTableName]
|
||||
if (linkTable) {
|
||||
const linkedFields = extractNonLinkFieldNames(linkTable, fields)
|
||||
fields = fields.concat(linkedFields)
|
||||
}
|
||||
}
|
||||
return fields
|
||||
}
|
||||
|
||||
async run({ id, row, filters, sort, paginate }: RunConfig) {
|
||||
const { appId, operation, tableId } = this
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
if (!this.datasource) {
|
||||
const db = new CouchDB(appId)
|
||||
this.datasource = await db.get(datasourceId)
|
||||
if (!this.datasource || !this.datasource.entities) {
|
||||
throw "No tables found, fetch tables before query."
|
||||
}
|
||||
this.tables = this.datasource.entities
|
||||
}
|
||||
const table = this.tables[tableName]
|
||||
let isSql = isSQL(this.datasource)
|
||||
if (!table) {
|
||||
throw `Unable to process query, table "${tableName}" not defined.`
|
||||
}
|
||||
// clean up row on ingress using schema
|
||||
filters = buildFilters(id, filters, table)
|
||||
const relationships = this.buildRelationships(table)
|
||||
const processed = this.inputProcessing(row, table)
|
||||
row = processed.row
|
||||
if (
|
||||
operation === DataSourceOperation.DELETE &&
|
||||
(filters == null || Object.keys(filters).length === 0)
|
||||
) {
|
||||
throw "Deletion must be filtered"
|
||||
}
|
||||
let json = {
|
||||
endpoint: {
|
||||
datasourceId,
|
||||
entityId: tableName,
|
||||
operation,
|
||||
},
|
||||
resource: {
|
||||
// have to specify the fields to avoid column overlap (for SQL)
|
||||
fields: isSql ? this.buildFields(table) : [],
|
||||
},
|
||||
filters,
|
||||
sort,
|
||||
paginate,
|
||||
relationships,
|
||||
body: row,
|
||||
// pass an id filter into extra, purely for mysql/returning
|
||||
extra: {
|
||||
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
|
||||
},
|
||||
}
|
||||
// can't really use response right now
|
||||
const response = await makeExternalQuery(appId, json)
|
||||
// handle many to many relationships now if we know the ID (could be auto increment)
|
||||
if (processed.manyRelationships) {
|
||||
await this.handleManyRelationships(
|
||||
response[0],
|
||||
processed.manyRelationships
|
||||
)
|
||||
}
|
||||
const output = this.outputProcessing(response, table, relationships)
|
||||
// if reading it'll just be an array of rows, return whole thing
|
||||
return operation === DataSourceOperation.READ && Array.isArray(response)
|
||||
? output
|
||||
: { row: output[0], table }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ExternalRequest
|
||||
}
|
|
@ -1,136 +1,19 @@
|
|||
const { makeExternalQuery } = require("./utils")
|
||||
const { DataSourceOperation, SortDirection } = require("../../../constants")
|
||||
const { getExternalTable } = require("../table/utils")
|
||||
const {
|
||||
DataSourceOperation,
|
||||
SortDirection,
|
||||
FieldTypes,
|
||||
} = require("../../../constants")
|
||||
const {
|
||||
breakExternalTableId,
|
||||
generateRowIdField,
|
||||
breakRowIdField,
|
||||
} = require("../../../integrations/utils")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const ExternalRequest = require("./ExternalRequest")
|
||||
const CouchDB = require("../../../db")
|
||||
|
||||
function inputProcessing(row, table) {
|
||||
if (!row) {
|
||||
return row
|
||||
}
|
||||
let newRow = {}
|
||||
for (let key of Object.keys(table.schema)) {
|
||||
// currently excludes empty strings
|
||||
if (row[key]) {
|
||||
newRow[key] = row[key]
|
||||
}
|
||||
}
|
||||
return newRow
|
||||
}
|
||||
|
||||
function generateIdForRow(row, table) {
|
||||
if (!row) {
|
||||
return
|
||||
}
|
||||
const primary = table.primary
|
||||
// build id array
|
||||
let idParts = []
|
||||
for (let field of primary) {
|
||||
idParts.push(row[field])
|
||||
}
|
||||
return generateRowIdField(idParts)
|
||||
}
|
||||
|
||||
function outputProcessing(rows, table) {
|
||||
// if no rows this is what is returned? Might be PG only
|
||||
if (rows[0].read === true) {
|
||||
return []
|
||||
}
|
||||
for (let row of rows) {
|
||||
row._id = generateIdForRow(row, table)
|
||||
row.tableId = table._id
|
||||
row._rev = "rev"
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
function buildFilters(id, filters, table) {
|
||||
const primary = table.primary
|
||||
// if passed in array need to copy for shifting etc
|
||||
let idCopy = cloneDeep(id)
|
||||
if (filters) {
|
||||
// need to map over the filters and make sure the _id field isn't present
|
||||
for (let filter of Object.values(filters)) {
|
||||
if (filter._id) {
|
||||
const parts = breakRowIdField(filter._id)
|
||||
for (let field of primary) {
|
||||
filter[field] = parts.shift()
|
||||
}
|
||||
}
|
||||
// make sure this field doesn't exist on any filter
|
||||
delete filter._id
|
||||
}
|
||||
}
|
||||
// there is no id, just use the user provided filters
|
||||
if (!idCopy || !table) {
|
||||
return filters
|
||||
}
|
||||
// if used as URL parameter it will have been joined
|
||||
if (typeof idCopy === "string") {
|
||||
idCopy = breakRowIdField(idCopy)
|
||||
}
|
||||
const equal = {}
|
||||
for (let field of primary) {
|
||||
// work through the ID and get the parts
|
||||
equal[field] = idCopy.shift()
|
||||
}
|
||||
return {
|
||||
equal,
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRequest(
|
||||
appId,
|
||||
operation,
|
||||
tableId,
|
||||
{ id, row, filters, sort, paginate } = {}
|
||||
) {
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const table = await getExternalTable(appId, datasourceId, tableName)
|
||||
if (!table) {
|
||||
throw `Unable to process query, table "${tableName}" not defined.`
|
||||
}
|
||||
// clean up row on ingress using schema
|
||||
filters = buildFilters(id, filters, table)
|
||||
row = inputProcessing(row, table)
|
||||
if (
|
||||
operation === DataSourceOperation.DELETE &&
|
||||
(filters == null || Object.keys(filters).length === 0)
|
||||
) {
|
||||
throw "Deletion must be filtered"
|
||||
}
|
||||
let json = {
|
||||
endpoint: {
|
||||
datasourceId,
|
||||
entityId: tableName,
|
||||
operation,
|
||||
},
|
||||
resource: {
|
||||
// not specifying any fields means "*"
|
||||
fields: [],
|
||||
},
|
||||
filters,
|
||||
sort,
|
||||
paginate,
|
||||
body: row,
|
||||
// pass an id filter into extra, purely for mysql/returning
|
||||
extra: {
|
||||
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
|
||||
},
|
||||
}
|
||||
// can't really use response right now
|
||||
const response = await makeExternalQuery(appId, json)
|
||||
// we searched for rows in someway
|
||||
if (operation === DataSourceOperation.READ && Array.isArray(response)) {
|
||||
return outputProcessing(response, table)
|
||||
} else {
|
||||
row = outputProcessing(response, table)[0]
|
||||
return { row, table }
|
||||
}
|
||||
async function handleRequest(appId, operation, tableId, opts = {}) {
|
||||
return new ExternalRequest(appId, operation, tableId, opts.datasource).run(
|
||||
opts
|
||||
)
|
||||
}
|
||||
|
||||
exports.patch = async ctx => {
|
||||
|
@ -172,9 +55,15 @@ exports.find = async ctx => {
|
|||
const appId = ctx.appId
|
||||
const id = ctx.params.rowId
|
||||
const tableId = ctx.params.tableId
|
||||
return handleRequest(appId, DataSourceOperation.READ, tableId, {
|
||||
const response = await handleRequest(
|
||||
appId,
|
||||
DataSourceOperation.READ,
|
||||
tableId,
|
||||
{
|
||||
id,
|
||||
})
|
||||
}
|
||||
)
|
||||
return response ? response[0] : response
|
||||
}
|
||||
|
||||
exports.destroy = async ctx => {
|
||||
|
@ -270,7 +159,56 @@ exports.validate = async () => {
|
|||
return { valid: true }
|
||||
}
|
||||
|
||||
exports.fetchEnrichedRow = async () => {
|
||||
// TODO: How does this work
|
||||
throw "Not Implemented"
|
||||
exports.fetchEnrichedRow = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const id = ctx.params.rowId
|
||||
const tableId = ctx.params.tableId
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const db = new CouchDB(appId)
|
||||
const datasource = await db.get(datasourceId)
|
||||
if (!datasource || !datasource.entities) {
|
||||
ctx.throw(400, "Datasource has not been configured for plus API.")
|
||||
}
|
||||
const tables = datasource.entities
|
||||
const response = await handleRequest(
|
||||
appId,
|
||||
DataSourceOperation.READ,
|
||||
tableId,
|
||||
{
|
||||
id,
|
||||
datasource,
|
||||
}
|
||||
)
|
||||
const table = tables[tableName]
|
||||
const row = response[0]
|
||||
// this seems like a lot of work, but basically we need to dig deeper for the enrich
|
||||
// for a single row, there is probably a better way to do this with some smart multi-layer joins
|
||||
for (let [fieldName, field] of Object.entries(table.schema)) {
|
||||
if (
|
||||
field.type !== FieldTypes.LINK ||
|
||||
!row[fieldName] ||
|
||||
row[fieldName].length === 0
|
||||
) {
|
||||
continue
|
||||
}
|
||||
const links = row[fieldName]
|
||||
const linkedTableId = field.tableId
|
||||
const linkedTable = tables[breakExternalTableId(linkedTableId).tableName]
|
||||
// don't support composite keys right now
|
||||
const linkedIds = links.map(link => breakRowIdField(link._id)[0])
|
||||
row[fieldName] = await handleRequest(
|
||||
appId,
|
||||
DataSourceOperation.READ,
|
||||
linkedTableId,
|
||||
{
|
||||
tables,
|
||||
filters: {
|
||||
oneOf: {
|
||||
[linkedTable.primary]: linkedIds,
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
}
|
||||
return row
|
||||
}
|
||||
|
|
|
@ -204,15 +204,18 @@ class TableSaveFunctions {
|
|||
}
|
||||
}
|
||||
|
||||
exports.getExternalTable = async (appId, datasourceId, tableName) => {
|
||||
exports.getAllExternalTables = async (appId, datasourceId) => {
|
||||
const db = new CouchDB(appId)
|
||||
const datasource = await db.get(datasourceId)
|
||||
if (!datasource || !datasource.entities) {
|
||||
throw "Datasource is not configured fully."
|
||||
}
|
||||
return Object.values(datasource.entities).find(
|
||||
entity => entity.name === tableName
|
||||
)
|
||||
return datasource.entities
|
||||
}
|
||||
|
||||
exports.getExternalTable = async (appId, datasourceId, tableName) => {
|
||||
const entities = await exports.getAllExternalTables(appId, datasourceId)
|
||||
return entities[tableName]
|
||||
}
|
||||
|
||||
exports.TableSaveFunctions = TableSaveFunctions
|
||||
|
|
|
@ -94,7 +94,7 @@ describe("/datasources", () => {
|
|||
.expect(200)
|
||||
// this is mock data, can't test it
|
||||
expect(res.body).toBeDefined()
|
||||
expect(pg.queryMock).toHaveBeenCalledWith(`select "name", "age" from "users" where "name" like $1 limit $2`, ["John%", 5000])
|
||||
expect(pg.queryMock).toHaveBeenCalledWith(`select "name", "age" from "users" where "users"."name" like $1 limit $2`, ["John%", 5000])
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -26,3 +26,17 @@ export interface Table {
|
|||
primaryDisplay?: string
|
||||
sourceId?: string
|
||||
}
|
||||
|
||||
export interface BudibaseAppMetadata {
|
||||
_id: string
|
||||
_rev?: string
|
||||
appId: string
|
||||
type: string
|
||||
version: string
|
||||
componentlibraries: string[]
|
||||
name: string
|
||||
url: string
|
||||
instance: { _id: string }
|
||||
updatedAt: Date
|
||||
createdAt: Date
|
||||
}
|
||||
|
|
|
@ -0,0 +1,100 @@
|
|||
import { SourceNames } from "./datasource"
|
||||
|
||||
interface Base {
|
||||
_id?: string
|
||||
_rev?: string
|
||||
}
|
||||
|
||||
export interface FieldSchema {
|
||||
// TODO: replace with field types enum when done
|
||||
type: string
|
||||
fieldName?: string
|
||||
name: string
|
||||
tableId?: string
|
||||
relationshipType?: string
|
||||
through?: string
|
||||
foreignKey?: string
|
||||
constraints?: {
|
||||
type?: string
|
||||
email?: boolean
|
||||
inclusion?: string[]
|
||||
length?: {
|
||||
minimum?: string | number
|
||||
maximum?: string | number
|
||||
}
|
||||
presence?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export interface TableSchema {
|
||||
[key: string]: FieldSchema
|
||||
}
|
||||
|
||||
export interface Table extends Base {
|
||||
type?: string
|
||||
views?: {}
|
||||
name?: string
|
||||
primary?: string[]
|
||||
schema: TableSchema
|
||||
primaryDisplay?: string
|
||||
sourceId?: string
|
||||
}
|
||||
|
||||
export interface Row extends Base {
|
||||
type?: string
|
||||
tableId?: string
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface JsonSchemaField {
|
||||
properties: {
|
||||
[key: string]: {
|
||||
type: string
|
||||
title: string
|
||||
customType?: string
|
||||
}
|
||||
}
|
||||
required?: string[]
|
||||
}
|
||||
|
||||
export interface AutomationStep {
|
||||
description: string
|
||||
event?: string
|
||||
icon: string
|
||||
id: string
|
||||
inputs: {
|
||||
[key: string]: any
|
||||
}
|
||||
name: string
|
||||
schema: {
|
||||
inputs: JsonSchemaField
|
||||
outputs: JsonSchemaField
|
||||
}
|
||||
stepId: string
|
||||
tagline: string
|
||||
type: string
|
||||
}
|
||||
|
||||
export interface Automation extends Base {
|
||||
name: string
|
||||
type: string
|
||||
appId?: string
|
||||
definition: {
|
||||
steps: AutomationStep[]
|
||||
trigger?: AutomationStep
|
||||
}
|
||||
}
|
||||
|
||||
export interface Datasource extends Base {
|
||||
type: string
|
||||
name: string
|
||||
source: SourceNames
|
||||
// the config is defined by the schema
|
||||
config: {
|
||||
[key: string]: string | number | boolean
|
||||
}
|
||||
plus: boolean
|
||||
entities?: {
|
||||
[key: string]: Table
|
||||
}
|
||||
}
|
|
@ -26,6 +26,20 @@ export enum DatasourceFieldTypes {
|
|||
JSON = "json",
|
||||
}
|
||||
|
||||
export enum SourceNames {
|
||||
POSTGRES = "POSTGRES",
|
||||
DYNAMODB = "DYNAMODB",
|
||||
MONGODB = "MONGODB",
|
||||
ELASTICSEARCH = "ELASTICSEARCH",
|
||||
COUCHDB = "COUCHDB",
|
||||
SQL_SERVER = "SQL_SERVER",
|
||||
S3 = "S3",
|
||||
AIRTABLE = "AIRTABLE",
|
||||
MYSQL = "MYSQL",
|
||||
ARANGODB = "ARANGODB",
|
||||
REST = "REST",
|
||||
}
|
||||
|
||||
export interface QueryDefinition {
|
||||
type: QueryTypes
|
||||
displayName?: string
|
||||
|
@ -47,7 +61,7 @@ export interface Integration {
|
|||
}
|
||||
|
||||
export interface SearchFilters {
|
||||
allOr: boolean
|
||||
allOr?: boolean
|
||||
string?: {
|
||||
[key: string]: string
|
||||
}
|
||||
|
@ -72,6 +86,26 @@ export interface SearchFilters {
|
|||
notEmpty?: {
|
||||
[key: string]: any
|
||||
}
|
||||
oneOf?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
}
|
||||
|
||||
export interface SortJson {
|
||||
[key: string]: SortDirection
|
||||
}
|
||||
|
||||
export interface PaginationJson {
|
||||
limit: number
|
||||
page: string | number
|
||||
}
|
||||
|
||||
export interface RelationshipsJson {
|
||||
through?: string
|
||||
from?: string
|
||||
to?: string
|
||||
tableName: string
|
||||
column: string
|
||||
}
|
||||
|
||||
export interface QueryJson {
|
||||
|
@ -84,17 +118,13 @@ export interface QueryJson {
|
|||
fields: string[]
|
||||
}
|
||||
filters?: SearchFilters
|
||||
sort?: {
|
||||
[key: string]: SortDirection
|
||||
}
|
||||
paginate?: {
|
||||
limit: number
|
||||
page: string | number
|
||||
}
|
||||
sort?: SortJson
|
||||
paginate?: PaginationJson
|
||||
body?: object
|
||||
extra: {
|
||||
extra?: {
|
||||
idFilter?: SearchFilters
|
||||
}
|
||||
relationships?: RelationshipsJson[]
|
||||
}
|
||||
|
||||
export interface SqlQuery {
|
|
@ -2,7 +2,7 @@ import {
|
|||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "./base/definitions"
|
||||
} from "../definitions/datasource"
|
||||
|
||||
module AirtableModule {
|
||||
const Airtable = require("airtable")
|
||||
|
|
|
@ -2,7 +2,7 @@ import {
|
|||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "./base/definitions"
|
||||
} from "../definitions/datasource"
|
||||
|
||||
module ArangoModule {
|
||||
const { Database, aql } = require("arangojs")
|
||||
|
|
|
@ -6,18 +6,23 @@ import {
|
|||
QueryOptions,
|
||||
SortDirection,
|
||||
Operation,
|
||||
} from "./definitions"
|
||||
RelationshipsJson,
|
||||
} from "../../definitions/datasource"
|
||||
|
||||
type KnexQuery = Knex.QueryBuilder | Knex
|
||||
|
||||
// right now we only do filters on the specific table being queried
|
||||
function addFilters(
|
||||
query: any,
|
||||
tableName: string,
|
||||
query: KnexQuery,
|
||||
filters: SearchFilters | undefined
|
||||
): Knex.QueryBuilder {
|
||||
): KnexQuery {
|
||||
function iterate(
|
||||
structure: { [key: string]: any },
|
||||
fn: (key: string, value: any) => void
|
||||
) {
|
||||
for (let [key, value] of Object.entries(structure)) {
|
||||
fn(key, value)
|
||||
fn(`${tableName}.${key}`, value)
|
||||
}
|
||||
}
|
||||
if (!filters) {
|
||||
|
@ -25,6 +30,12 @@ function addFilters(
|
|||
}
|
||||
// if all or specified in filters, then everything is an or
|
||||
const allOr = filters.allOr
|
||||
if (filters.oneOf) {
|
||||
iterate(filters.oneOf, (key, array) => {
|
||||
const fnc = allOr ? "orWhereIn" : "whereIn"
|
||||
query = query[fnc](key, array)
|
||||
})
|
||||
}
|
||||
if (filters.string) {
|
||||
iterate(filters.string, (key, value) => {
|
||||
const fnc = allOr ? "orWhere" : "where"
|
||||
|
@ -67,9 +78,47 @@ function addFilters(
|
|||
return query
|
||||
}
|
||||
|
||||
function buildCreate(knex: Knex, json: QueryJson, opts: QueryOptions) {
|
||||
function addRelationships(
|
||||
query: KnexQuery,
|
||||
fromTable: string,
|
||||
relationships: RelationshipsJson[] | undefined
|
||||
): KnexQuery {
|
||||
if (!relationships) {
|
||||
return query
|
||||
}
|
||||
for (let relationship of relationships) {
|
||||
const from = relationship.from,
|
||||
to = relationship.to,
|
||||
toTable = relationship.tableName
|
||||
if (!relationship.through) {
|
||||
// @ts-ignore
|
||||
query = query.leftJoin(
|
||||
toTable,
|
||||
`${fromTable}.${from}`,
|
||||
`${relationship.tableName}.${to}`
|
||||
)
|
||||
} else {
|
||||
const throughTable = relationship.through
|
||||
query = query
|
||||
// @ts-ignore
|
||||
.leftJoin(
|
||||
throughTable,
|
||||
`${fromTable}.${from}`,
|
||||
`${throughTable}.${from}`
|
||||
)
|
||||
.leftJoin(toTable, `${toTable}.${to}`, `${throughTable}.${to}`)
|
||||
}
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
function buildCreate(
|
||||
knex: Knex,
|
||||
json: QueryJson,
|
||||
opts: QueryOptions
|
||||
): KnexQuery {
|
||||
const { endpoint, body } = json
|
||||
let query = knex(endpoint.entityId)
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
// mysql can't use returning
|
||||
if (opts.disableReturning) {
|
||||
return query.insert(body)
|
||||
|
@ -78,9 +127,10 @@ function buildCreate(knex: Knex, json: QueryJson, opts: QueryOptions) {
|
|||
}
|
||||
}
|
||||
|
||||
function buildRead(knex: Knex, json: QueryJson, limit: number) {
|
||||
let { endpoint, resource, filters, sort, paginate } = json
|
||||
let query: Knex.QueryBuilder = knex(endpoint.entityId)
|
||||
function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
|
||||
let { endpoint, resource, filters, sort, paginate, relationships } = json
|
||||
const tableName = endpoint.entityId
|
||||
let query: KnexQuery = knex(tableName)
|
||||
// select all if not specified
|
||||
if (!resource) {
|
||||
resource = { fields: [] }
|
||||
|
@ -92,7 +142,9 @@ function buildRead(knex: Knex, json: QueryJson, limit: number) {
|
|||
query = query.select("*")
|
||||
}
|
||||
// handle where
|
||||
query = addFilters(query, filters)
|
||||
query = addFilters(tableName, query, filters)
|
||||
// handle join
|
||||
query = addRelationships(query, tableName, relationships)
|
||||
// handle sorting
|
||||
if (sort) {
|
||||
for (let [key, value] of Object.entries(sort)) {
|
||||
|
@ -114,10 +166,14 @@ function buildRead(knex: Knex, json: QueryJson, limit: number) {
|
|||
return query
|
||||
}
|
||||
|
||||
function buildUpdate(knex: Knex, json: QueryJson, opts: QueryOptions) {
|
||||
function buildUpdate(
|
||||
knex: Knex,
|
||||
json: QueryJson,
|
||||
opts: QueryOptions
|
||||
): KnexQuery {
|
||||
const { endpoint, body, filters } = json
|
||||
let query = knex(endpoint.entityId)
|
||||
query = addFilters(query, filters)
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
query = addFilters(endpoint.entityId, query, filters)
|
||||
// mysql can't use returning
|
||||
if (opts.disableReturning) {
|
||||
return query.update(body)
|
||||
|
@ -126,10 +182,14 @@ function buildUpdate(knex: Knex, json: QueryJson, opts: QueryOptions) {
|
|||
}
|
||||
}
|
||||
|
||||
function buildDelete(knex: Knex, json: QueryJson, opts: QueryOptions) {
|
||||
function buildDelete(
|
||||
knex: Knex,
|
||||
json: QueryJson,
|
||||
opts: QueryOptions
|
||||
): KnexQuery {
|
||||
const { endpoint, filters } = json
|
||||
let query = knex(endpoint.entityId)
|
||||
query = addFilters(query, filters)
|
||||
let query: KnexQuery = knex(endpoint.entityId)
|
||||
query = addFilters(endpoint.entityId, query, filters)
|
||||
// mysql can't use returning
|
||||
if (opts.disableReturning) {
|
||||
return query.delete()
|
||||
|
@ -180,6 +240,8 @@ class SqlQueryBuilder {
|
|||
default:
|
||||
throw `Operation type is not supported by SQL query builder`
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
return query.toSQL().toNative()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ import {
|
|||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "./base/definitions"
|
||||
} from "../definitions/datasource"
|
||||
|
||||
module CouchDBModule {
|
||||
const PouchDB = require("pouchdb")
|
||||
|
|
|
@ -2,7 +2,7 @@ import {
|
|||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "./base/definitions"
|
||||
} from "../definitions/datasource"
|
||||
|
||||
module DynamoModule {
|
||||
const AWS = require("aws-sdk")
|
||||
|
|
|
@ -2,7 +2,7 @@ import {
|
|||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "./base/definitions"
|
||||
} from "../definitions/datasource"
|
||||
|
||||
module ElasticsearchModule {
|
||||
const { Client } = require("@elastic/elasticsearch")
|
||||
|
|
|
@ -9,33 +9,34 @@ const airtable = require("./airtable")
|
|||
const mysql = require("./mysql")
|
||||
const arangodb = require("./arangodb")
|
||||
const rest = require("./rest")
|
||||
const { SourceNames } = require("../definitions/datasource")
|
||||
|
||||
const DEFINITIONS = {
|
||||
POSTGRES: postgres.schema,
|
||||
DYNAMODB: dynamodb.schema,
|
||||
MONGODB: mongodb.schema,
|
||||
ELASTICSEARCH: elasticsearch.schema,
|
||||
COUCHDB: couchdb.schema,
|
||||
SQL_SERVER: sqlServer.schema,
|
||||
S3: s3.schema,
|
||||
AIRTABLE: airtable.schema,
|
||||
MYSQL: mysql.schema,
|
||||
ARANGODB: arangodb.schema,
|
||||
REST: rest.schema,
|
||||
[SourceNames.POSTGRES]: postgres.schema,
|
||||
[SourceNames.DYNAMODB]: dynamodb.schema,
|
||||
[SourceNames.MONGODB]: mongodb.schema,
|
||||
[SourceNames.ELASTICSEARCH]: elasticsearch.schema,
|
||||
[SourceNames.COUCHDB]: couchdb.schema,
|
||||
[SourceNames.SQL_SERVER]: sqlServer.schema,
|
||||
[SourceNames.S3]: s3.schema,
|
||||
[SourceNames.AIRTABLE]: airtable.schema,
|
||||
[SourceNames.MYSQL]: mysql.schema,
|
||||
[SourceNames.ARANGODB]: arangodb.schema,
|
||||
[SourceNames.REST]: rest.schema,
|
||||
}
|
||||
|
||||
const INTEGRATIONS = {
|
||||
POSTGRES: postgres.integration,
|
||||
DYNAMODB: dynamodb.integration,
|
||||
MONGODB: mongodb.integration,
|
||||
ELASTICSEARCH: elasticsearch.integration,
|
||||
COUCHDB: couchdb.integration,
|
||||
S3: s3.integration,
|
||||
SQL_SERVER: sqlServer.integration,
|
||||
AIRTABLE: airtable.integration,
|
||||
MYSQL: mysql.integration,
|
||||
ARANGODB: arangodb.integration,
|
||||
REST: rest.integration,
|
||||
[SourceNames.POSTGRES]: postgres.integration,
|
||||
[SourceNames.DYNAMODB]: dynamodb.integration,
|
||||
[SourceNames.MONGODB]: mongodb.integration,
|
||||
[SourceNames.ELASTICSEARCH]: elasticsearch.integration,
|
||||
[SourceNames.COUCHDB]: couchdb.integration,
|
||||
[SourceNames.SQL_SERVER]: s3.integration,
|
||||
[SourceNames.S3]: sqlServer.integration,
|
||||
[SourceNames.AIRTABLE]: airtable.integration,
|
||||
[SourceNames.MYSQL]: mysql.integration,
|
||||
[SourceNames.ARANGODB]: arangodb.integration,
|
||||
[SourceNames.REST]: rest.integration,
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -4,7 +4,7 @@ import {
|
|||
QueryTypes,
|
||||
QueryJson,
|
||||
SqlQuery,
|
||||
} from "./base/definitions"
|
||||
} from "../definitions/datasource"
|
||||
import { getSqlQuery } from "./utils"
|
||||
|
||||
module MSSQLModule {
|
||||
|
|
|
@ -2,7 +2,7 @@ import {
|
|||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "./base/definitions"
|
||||
} from "../definitions/datasource"
|
||||
|
||||
module MongoDBModule {
|
||||
const { MongoClient } = require("mongodb")
|
||||
|
|
|
@ -5,7 +5,8 @@ import {
|
|||
Operation,
|
||||
QueryJson,
|
||||
SqlQuery,
|
||||
} from "./base/definitions"
|
||||
} from "../definitions/datasource"
|
||||
import { Table, TableSchema } from "../definitions/common"
|
||||
import { getSqlQuery } from "./utils"
|
||||
|
||||
module MySQLModule {
|
||||
|
@ -139,7 +140,7 @@ module MySQLModule {
|
|||
}
|
||||
|
||||
async buildSchema(datasourceId: string) {
|
||||
const tables: any = {}
|
||||
const tables: { [key: string]: Table } = {}
|
||||
const database = this.config.database
|
||||
this.client.connect()
|
||||
|
||||
|
@ -154,7 +155,7 @@ module MySQLModule {
|
|||
)
|
||||
for (let tableName of tableNames) {
|
||||
const primaryKeys = []
|
||||
const schema: any = {}
|
||||
const schema: TableSchema = {}
|
||||
const descResp = await internalQuery(
|
||||
this.client,
|
||||
{ sql: `DESCRIBE ${tableName};` },
|
||||
|
@ -166,7 +167,7 @@ module MySQLModule {
|
|||
primaryKeys.push(columnName)
|
||||
}
|
||||
const constraints = {
|
||||
required: column.Null !== "YES",
|
||||
presence: column.Null !== "YES",
|
||||
}
|
||||
schema[columnName] = {
|
||||
name: columnName,
|
||||
|
@ -212,7 +213,7 @@ module MySQLModule {
|
|||
}
|
||||
|
||||
async getReturningRow(json: QueryJson) {
|
||||
if (!json.extra.idFilter) {
|
||||
if (!json.extra || !json.extra.idFilter) {
|
||||
return {}
|
||||
}
|
||||
const input = this._query({
|
||||
|
|
|
@ -4,8 +4,8 @@ import {
|
|||
QueryTypes,
|
||||
QueryJson,
|
||||
SqlQuery,
|
||||
} from "./base/definitions"
|
||||
import { Table } from "../constants/definitions"
|
||||
} from "../definitions/datasource"
|
||||
import { Table } from "../definitions/common"
|
||||
import { getSqlQuery } from "./utils"
|
||||
|
||||
module PostgresModule {
|
||||
|
@ -134,8 +134,9 @@ module PostgresModule {
|
|||
/**
|
||||
* Fetches the tables from the postgres table and assigns them to the datasource.
|
||||
* @param {*} datasourceId - datasourceId to fetch
|
||||
* @param entities - the tables that are to be built
|
||||
*/
|
||||
async buildSchema(datasourceId: string) {
|
||||
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
||||
let tableKeys: { [key: string]: string[] } = {}
|
||||
try {
|
||||
const primaryKeysResponse = await this.client.query(
|
||||
|
@ -167,6 +168,19 @@ module PostgresModule {
|
|||
name: tableName,
|
||||
schema: {},
|
||||
}
|
||||
|
||||
// add the existing relationships from the entities if they exist, to prevent them from being overridden
|
||||
if (entities && entities[tableName]) {
|
||||
const existingTableSchema = entities[tableName].schema
|
||||
for (let key in existingTableSchema) {
|
||||
if (!existingTableSchema.hasOwnProperty(key)) {
|
||||
continue
|
||||
}
|
||||
if (existingTableSchema[key].type === "link") {
|
||||
tables[tableName].schema[key] = existingTableSchema[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const type: string = convertType(column.data_type, TYPE_MAP)
|
||||
|
|
|
@ -2,7 +2,7 @@ import {
|
|||
Integration,
|
||||
DatasourceFieldTypes,
|
||||
QueryTypes,
|
||||
} from "./base/definitions"
|
||||
} from "../definitions/datasource"
|
||||
|
||||
module RestModule {
|
||||
const fetch = require("node-fetch")
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { Integration, QueryTypes } from "./base/definitions"
|
||||
import { Integration, QueryTypes } from "../definitions/datasource"
|
||||
|
||||
module S3Module {
|
||||
const AWS = require("aws-sdk")
|
||||
|
|
|
@ -81,7 +81,7 @@ describe("SQL query builder", () => {
|
|||
}))
|
||||
expect(query).toEqual({
|
||||
bindings: ["John%", limit],
|
||||
sql: `select * from "${TABLE_NAME}" where "name" like $1 limit $2`
|
||||
sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."name" like $1 limit $2`
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -98,7 +98,7 @@ describe("SQL query builder", () => {
|
|||
}))
|
||||
expect(query).toEqual({
|
||||
bindings: [2, 10, limit],
|
||||
sql: `select * from "${TABLE_NAME}" where "age" between $1 and $2 limit $3`
|
||||
sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age" between $1 and $2 limit $3`
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -114,7 +114,7 @@ describe("SQL query builder", () => {
|
|||
}))
|
||||
expect(query).toEqual({
|
||||
bindings: [10, "John", limit],
|
||||
sql: `select * from "${TABLE_NAME}" where ("age" = $1) or ("name" = $2) limit $3`
|
||||
sql: `select * from "${TABLE_NAME}" where ("${TABLE_NAME}"."age" = $1) or ("${TABLE_NAME}"."name" = $2) limit $3`
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -139,7 +139,7 @@ describe("SQL query builder", () => {
|
|||
}))
|
||||
expect(query).toEqual({
|
||||
bindings: ["John", 1001],
|
||||
sql: `update "${TABLE_NAME}" set "name" = $1 where "id" = $2 returning *`
|
||||
sql: `update "${TABLE_NAME}" set "name" = $1 where "${TABLE_NAME}"."id" = $2 returning *`
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -151,7 +151,7 @@ describe("SQL query builder", () => {
|
|||
}))
|
||||
expect(query).toEqual({
|
||||
bindings: [1001],
|
||||
sql: `delete from "${TABLE_NAME}" where "id" = $1 returning *`
|
||||
sql: `delete from "${TABLE_NAME}" where "${TABLE_NAME}"."id" = $1 returning *`
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import { SqlQuery } from "./base/definitions"
|
||||
import { SqlQuery } from "../definitions/datasource"
|
||||
import { Datasource } from "../definitions/common"
|
||||
import { SourceNames } from "../definitions/datasource"
|
||||
const { DocumentTypes, SEPARATOR } = require("../db/utils")
|
||||
const { FieldTypes } = require("../constants")
|
||||
|
||||
|
@ -25,15 +27,21 @@ export function generateRowIdField(keyProps: any[] = []) {
|
|||
keyProps = [keyProps]
|
||||
}
|
||||
// this conserves order and types
|
||||
// we have to swap the double quotes to single quotes for use in HBS statements
|
||||
// when using the literal helper the double quotes can break things
|
||||
return encodeURIComponent(JSON.stringify(keyProps).replace(/"/g, "'"))
|
||||
}
|
||||
|
||||
// should always return an array
|
||||
export function breakRowIdField(_id: string) {
|
||||
export function breakRowIdField(_id: string): any[] {
|
||||
if (!_id) {
|
||||
return null
|
||||
return []
|
||||
}
|
||||
return JSON.parse(decodeURIComponent(_id))
|
||||
// have to replace on the way back as we swapped out the double quotes
|
||||
// when encoding, but JSON can't handle the single quotes
|
||||
const decoded: string = decodeURIComponent(_id).replace(/'/g, '"')
|
||||
const parsed = JSON.parse(decoded)
|
||||
return Array.isArray(parsed) ? parsed : [parsed]
|
||||
}
|
||||
|
||||
export function convertType(type: string, map: { [key: string]: any }) {
|
||||
|
@ -52,3 +60,11 @@ export function getSqlQuery(query: SqlQuery | string): SqlQuery {
|
|||
return query
|
||||
}
|
||||
}
|
||||
|
||||
export function isSQL(datasource: Datasource): boolean {
|
||||
if (!datasource || !datasource.source) {
|
||||
return false
|
||||
}
|
||||
const SQL = [SourceNames.POSTGRES, SourceNames.SQL_SERVER, SourceNames.MYSQL]
|
||||
return SQL.indexOf(datasource.source) !== -1
|
||||
}
|
||||
|
|
|
@ -8,7 +8,8 @@
|
|||
"strict": true,
|
||||
"noImplicitAny": true,
|
||||
"esModuleInterop": true,
|
||||
"resolveJsonModule": true
|
||||
"resolveJsonModule": true,
|
||||
"incremental": true
|
||||
},
|
||||
"include": [
|
||||
"./src/**/*"
|
||||
|
|
|
@ -29,11 +29,11 @@
|
|||
"keywords": [
|
||||
"svelte"
|
||||
],
|
||||
"version": "0.9.69",
|
||||
"version": "0.9.70",
|
||||
"license": "MIT",
|
||||
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc",
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^0.9.69",
|
||||
"@budibase/bbui": "^0.9.70",
|
||||
"@spectrum-css/card": "^3.0.3",
|
||||
"@spectrum-css/link": "^3.1.3",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/string-templates",
|
||||
"version": "0.9.69",
|
||||
"version": "0.9.70",
|
||||
"description": "Handlebars wrapper for Budibase templating.",
|
||||
"main": "src/index.cjs",
|
||||
"module": "dist/bundle.mjs",
|
||||
|
|
|
@ -1,12 +1,7 @@
|
|||
const handlebars = require("handlebars")
|
||||
const { registerAll } = require("./helpers/index")
|
||||
const processors = require("./processors")
|
||||
const { cloneDeep } = require("lodash/fp")
|
||||
const {
|
||||
removeNull,
|
||||
updateContext,
|
||||
removeHandlebarsStatements,
|
||||
} = require("./utilities")
|
||||
const { removeHandlebarsStatements } = require("./utilities")
|
||||
const manifest = require("../manifest.json")
|
||||
|
||||
const hbsInstance = handlebars.create()
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/worker",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "0.9.69",
|
||||
"version": "0.9.70",
|
||||
"description": "Budibase background service",
|
||||
"main": "src/index.js",
|
||||
"repository": {
|
||||
|
@ -21,8 +21,8 @@
|
|||
"author": "Budibase",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@budibase/auth": "^0.9.69",
|
||||
"@budibase/string-templates": "^0.9.69",
|
||||
"@budibase/auth": "^0.9.70",
|
||||
"@budibase/string-templates": "^0.9.70",
|
||||
"@koa/router": "^8.0.0",
|
||||
"aws-sdk": "^2.811.0",
|
||||
"bcryptjs": "^2.4.3",
|
||||
|
|
Loading…
Reference in New Issue