diff --git a/lerna.json b/lerna.json
index f5c82b9cb7..c35a2dbdc9 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "0.9.69",
+ "version": "0.9.70",
"npmClient": "yarn",
"packages": [
"packages/*"
diff --git a/packages/auth/package.json b/packages/auth/package.json
index 9581a4ce1f..650d41b633 100644
--- a/packages/auth/package.json
+++ b/packages/auth/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/auth",
- "version": "0.9.69",
+ "version": "0.9.70",
"description": "Authentication middlewares for budibase builder and apps",
"main": "src/index.js",
"author": "Budibase",
diff --git a/packages/bbui/package.json b/packages/bbui/package.json
index f7bb019803..9b2b2823ef 100644
--- a/packages/bbui/package.json
+++ b/packages/bbui/package.json
@@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
- "version": "0.9.69",
+ "version": "0.9.70",
"license": "AGPL-3.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
diff --git a/packages/builder/package.json b/packages/builder/package.json
index e2712c6bed..c13f883a3d 100644
--- a/packages/builder/package.json
+++ b/packages/builder/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
- "version": "0.9.69",
+ "version": "0.9.70",
"license": "AGPL-3.0",
"private": true,
"scripts": {
@@ -65,10 +65,10 @@
}
},
"dependencies": {
- "@budibase/bbui": "^0.9.69",
- "@budibase/client": "^0.9.69",
+ "@budibase/bbui": "^0.9.70",
+ "@budibase/client": "^0.9.70",
"@budibase/colorpicker": "1.1.2",
- "@budibase/string-templates": "^0.9.69",
+ "@budibase/string-templates": "^0.9.70",
"@sentry/browser": "5.19.1",
"@spectrum-css/page": "^3.0.1",
"@spectrum-css/vars": "^3.0.1",
diff --git a/packages/builder/src/builderStore/store/screenTemplates/rowDetailScreen.js b/packages/builder/src/builderStore/store/screenTemplates/rowDetailScreen.js
index e219e27dba..21be879b14 100644
--- a/packages/builder/src/builderStore/store/screenTemplates/rowDetailScreen.js
+++ b/packages/builder/src/builderStore/store/screenTemplates/rowDetailScreen.js
@@ -86,7 +86,6 @@ const createScreen = table => {
valueType: "Binding",
},
],
- limit: 1,
paginate: false,
})
diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte
index 306c1cc90a..7f02b3c215 100644
--- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte
+++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte
@@ -53,7 +53,7 @@
let deletion
$: tableOptions = $tables.list.filter(
- table => table._id !== $tables.draft._id
+ table => table._id !== $tables.draft._id && table.type !== "external"
)
$: required = !!field?.constraints?.presence || primaryDisplay
$: uneditable =
@@ -172,11 +172,6 @@
alt: `Many ${table.name} rows → many ${linkTable.name} rows`,
value: RelationshipTypes.MANY_TO_MANY,
},
- {
- name: `One ${linkName} row → many ${thisName} rows`,
- alt: `One ${linkTable.name} rows → many ${table.name} rows`,
- value: RelationshipTypes.ONE_TO_MANY,
- },
{
name: `One ${thisName} row → many ${linkName} rows`,
alt: `One ${table.name} rows → many ${linkTable.name} rows`,
diff --git a/packages/builder/src/components/backend/TableNavigator/TableNavigator.svelte b/packages/builder/src/components/backend/TableNavigator/TableNavigator.svelte
index 4291738d3d..ac1d82bf67 100644
--- a/packages/builder/src/components/backend/TableNavigator/TableNavigator.svelte
+++ b/packages/builder/src/components/backend/TableNavigator/TableNavigator.svelte
@@ -6,9 +6,14 @@
import EditViewPopover from "./popovers/EditViewPopover.svelte"
import NavItem from "components/common/NavItem.svelte"
+ const alphabetical = (a, b) => a.name?.toLowerCase() > b.name?.toLowerCase()
+
export let sourceId
$: selectedView = $views.selected && $views.selected.name
+ $: sortedTables = $tables.list
+ .filter(table => table.sourceId === sourceId)
+ .sort(alphabetical)
function selectTable(table) {
tables.select(table)
@@ -33,7 +38,7 @@
{#if $database?._id}
- {#each $tables.list.filter(table => table.sourceId === sourceId) as table, idx}
+ {#each sortedTables as table, idx}
0}
@@ -46,7 +51,7 @@
{/if}
- {#each Object.keys(table.views || {}) as viewName, idx (idx)}
+ {#each [...Object.keys(table.views || {})].sort() as viewName, idx (idx)}
+ import { RelationshipTypes } from "constants/backend"
+ import { Button, Input, ModalContent, Select, Detail } from "@budibase/bbui"
+ import { tables } from "stores/backend"
+ import { uuid } from "builderStore/uuid"
+
+ export let save
+ export let datasource
+ export let plusTables = []
+ export let fromRelationship = {}
+ export let toRelationship = {}
+ export let close
+
+ let originalFromName = fromRelationship.name,
+ originalToName = toRelationship.name
+
+ function isValid(relationship) {
+ if (
+ relationship.relationshipType === RelationshipTypes.MANY_TO_MANY &&
+ !relationship.through
+ ) {
+ return false
+ }
+ return (
+ relationship.name && relationship.tableId && relationship.relationshipType
+ )
+ }
+
+ $: tableOptions = plusTables.map(table => ({
+ label: table.name,
+ value: table._id,
+ }))
+ $: fromTable = plusTables.find(table => table._id === toRelationship?.tableId)
+ $: toTable = plusTables.find(table => table._id === fromRelationship?.tableId)
+ $: through = plusTables.find(table => table._id === fromRelationship?.through)
+ $: valid = toTable && fromTable && isValid(fromRelationship)
+ $: linkTable = through || toTable
+ $: relationshipTypes = [
+ {
+ label: "Many",
+ value: RelationshipTypes.MANY_TO_MANY,
+ },
+ {
+ label: "One",
+ value: RelationshipTypes.MANY_TO_ONE,
+ },
+ ]
+ $: updateRelationshipType(fromRelationship?.relationshipType)
+
+ function updateRelationshipType(fromType) {
+ if (fromType === RelationshipTypes.MANY_TO_MANY) {
+ toRelationship.relationshipType = RelationshipTypes.MANY_TO_MANY
+ } else {
+ toRelationship.relationshipType = RelationshipTypes.MANY_TO_ONE
+ }
+ }
+
+ function buildRelationships() {
+ // if any to many only need to check from
+ const manyToMany =
+ fromRelationship.relationshipType === RelationshipTypes.MANY_TO_MANY
+ // main is simply used to know this is the side the user configured it from
+ const id = uuid()
+ if (!manyToMany) {
+ delete fromRelationship.through
+ delete toRelationship.through
+ }
+ let relateFrom = {
+ ...fromRelationship,
+ type: "link",
+ main: true,
+ _id: id,
+ }
+ let relateTo = {
+ ...toRelationship,
+ type: "link",
+ _id: id,
+ }
+
+ // [0] is because we don't support composite keys for relationships right now
+ if (manyToMany) {
+ relateFrom = {
+ ...relateFrom,
+ through: through._id,
+ fieldName: toTable.primary[0],
+ }
+ relateTo = {
+ ...relateTo,
+ through: through._id,
+ fieldName: fromTable.primary[0],
+ }
+ } else {
+ relateFrom = {
+ ...relateFrom,
+ foreignKey: relateFrom.fieldName,
+ fieldName: fromTable.primary[0],
+ }
+ relateTo = {
+ ...relateTo,
+ relationshipType: RelationshipTypes.ONE_TO_MANY,
+ foreignKey: relateFrom.fieldName,
+ fieldName: fromTable.primary[0],
+ }
+ }
+
+ fromRelationship = relateFrom
+ toRelationship = relateTo
+ }
+
+ // save the relationship on to the datasource
+ async function saveRelationship() {
+ buildRelationships()
+ // source of relationship
+ datasource.entities[fromTable.name].schema[fromRelationship.name] =
+ fromRelationship
+ // save other side of relationship in the other schema
+ datasource.entities[toTable.name].schema[toRelationship.name] =
+ toRelationship
+
+ // If relationship has been renamed
+ if (originalFromName !== fromRelationship.name) {
+ delete datasource.entities[fromTable.name].schema[originalFromName]
+ }
+ if (originalToName !== toRelationship.name) {
+ delete datasource.entities[toTable.name].schema[originalToName]
+ }
+
+ await save()
+ await tables.fetch()
+ }
+
+ async function deleteRelationship() {
+ delete datasource.entities[fromTable.name].schema[fromRelationship.name]
+ delete datasource.entities[toTable.name].schema[toRelationship.name]
+ await save()
+ await tables.fetch()
+ close()
+ }
+
+
+
+
+
+ Tables
+
+
+
+ {#if fromRelationship?.relationshipType === RelationshipTypes.MANY_TO_MANY}
+
+ {:else if toTable}
+
+ {/if}
+
+ Column names
+
+
+
+
+ {#if originalFromName != null}
+
+ {/if}
+
+
+
+
diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/CreateEditRelationship/TableSelect.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/CreateEditRelationship/TableSelect.svelte
new file mode 100644
index 0000000000..5595d5d3ab
--- /dev/null
+++ b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/CreateEditRelationship/TableSelect.svelte
@@ -0,0 +1,21 @@
+
+
+
diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/index.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/index.svelte
index 5d8e2ca100..b0377d2f27 100644
--- a/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/index.svelte
+++ b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/index.svelte
@@ -1,16 +1,70 @@
+
+
+
+
+
+
+
+
{#if datasource && integration}
@@ -92,9 +171,18 @@
This datasource can determine tables automatically. Budibase can fetch
@@ -102,18 +190,44 @@
having to write any queries at all.
- {#if datasource.entities}
- {#each Object.keys(datasource.entities) as entity}
-
onClickTable(datasource.entities[entity])}
- >
-
{entity}
-
Primary Key: {datasource.entities[entity].primary}
-
→
-
- {/each}
- {/if}
+ {#each plusTables as table}
+
onClickTable(table)}>
+
{table.name}
+
Primary Key: {table.primary}
+
→
+
+ {/each}
+
+ {#if plusTables?.length !== 0}
+
+
+
+ Tell budibase how your tables are related to get even more smart
+ features.
+
+ {/if}
+
+ {#each Object.values(relationships) as relationship}
+
+ openRelationshipModal(relationship.from, relationship.to)}
+ >
+
+ {buildRelationshipDisplayString(
+ relationship.from,
+ relationship.to
+ )}
+
+
{relationship.from?.name} to {relationship.to?.name}
+
→
+
+ {/each}
{/if}
@@ -202,4 +316,14 @@
text-overflow: ellipsis;
font-size: var(--font-size-s);
}
+
+ .table-buttons {
+ display: grid;
+ grid-gap: var(--spacing-l);
+ grid-template-columns: 1fr 1fr;
+ }
+
+ .table-buttons div {
+ grid-column-end: -1;
+ }
diff --git a/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/modals/EditDisplayColumnsModal.svelte b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/modals/EditDisplayColumnsModal.svelte
new file mode 100644
index 0000000000..ffb6b3c58e
--- /dev/null
+++ b/packages/builder/src/pages/builder/app/[application]/data/datasource/[selectedDatasource]/modals/EditDisplayColumnsModal.svelte
@@ -0,0 +1,43 @@
+
+
+
+ Select the columns that will be shown when displaying relationships.
+ {#each plusTables as table}
+
+ {/each}
+
diff --git a/packages/builder/src/pages/builder/portal/manage/users/[userId].svelte b/packages/builder/src/pages/builder/portal/manage/users/[userId].svelte
index ac5b569411..912506d0cf 100644
--- a/packages/builder/src/pages/builder/portal/manage/users/[userId].svelte
+++ b/packages/builder/src/pages/builder/portal/manage/users/[userId].svelte
@@ -60,6 +60,16 @@
let toggleDisabled = false
+ async function updateUserFirstName(evt) {
+ await users.save({ ...$userFetch?.data, firstName: evt.target.value })
+ await userFetch.refresh()
+ }
+
+ async function updateUserLastName(evt) {
+ await users.save({ ...$userFetch?.data, lastName: evt.target.value })
+ await userFetch.refresh()
+ }
+
async function toggleFlag(flagName, detail) {
toggleDisabled = true
await users.save({ ...$userFetch?.data, [flagName]: { global: detail } })
@@ -113,11 +123,19 @@
-
+
-
+
{#if userId !== $auth.user._id}
diff --git a/packages/builder/src/pages/builder/portal/manage/users/_components/UpdateRolesModal.svelte b/packages/builder/src/pages/builder/portal/manage/users/_components/UpdateRolesModal.svelte
index 08e4a2ec8b..e881fa37d2 100644
--- a/packages/builder/src/pages/builder/portal/manage/users/_components/UpdateRolesModal.svelte
+++ b/packages/builder/src/pages/builder/portal/manage/users/_components/UpdateRolesModal.svelte
@@ -9,7 +9,7 @@
const dispatch = createEventDispatcher()
const roles = app.roles
- let options = roles.map(role => role._id)
+ let options = roles.map(role => ({ value: role._id, label: role.name }))
let selectedRole = user?.roles?.[app?._id]
async function updateUserRoles() {
diff --git a/packages/cli/package.json b/packages/cli/package.json
index 084c83b5cd..4873ef5ec0 100644
--- a/packages/cli/package.json
+++ b/packages/cli/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
- "version": "0.9.69",
+ "version": "0.9.70",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js",
"bin": {
diff --git a/packages/client/package.json b/packages/client/package.json
index 2ac54c9b7b..917c8c10bd 100644
--- a/packages/client/package.json
+++ b/packages/client/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/client",
- "version": "0.9.69",
+ "version": "0.9.70",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@@ -18,9 +18,9 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
- "@budibase/bbui": "^0.9.69",
- "@budibase/standard-components": "^0.9.69",
- "@budibase/string-templates": "^0.9.69",
+ "@budibase/bbui": "^0.9.70",
+ "@budibase/standard-components": "^0.9.70",
+ "@budibase/string-templates": "^0.9.70",
"regexparam": "^1.3.0",
"shortid": "^2.2.15",
"svelte-spa-router": "^3.0.5"
diff --git a/packages/server/package.json b/packages/server/package.json
index ad758e9bf2..48bbe9d647 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
- "version": "0.9.69",
+ "version": "0.9.70",
"description": "Budibase Web Server",
"main": "src/index.js",
"repository": {
@@ -59,9 +59,9 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
- "@budibase/auth": "^0.9.69",
- "@budibase/client": "^0.9.69",
- "@budibase/string-templates": "^0.9.69",
+ "@budibase/auth": "^0.9.70",
+ "@budibase/client": "^0.9.70",
+ "@budibase/string-templates": "^0.9.70",
"@elastic/elasticsearch": "7.10.0",
"@koa/router": "8.0.0",
"@sendgrid/mail": "7.1.1",
@@ -114,7 +114,7 @@
"devDependencies": {
"@babel/core": "^7.14.3",
"@babel/preset-env": "^7.14.4",
- "@budibase/standard-components": "^0.9.69",
+ "@budibase/standard-components": "^0.9.70",
"@jest/test-sequencer": "^24.8.0",
"@types/bull": "^3.15.1",
"@types/jest": "^26.0.23",
diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql
index 8d76f54a10..37835af4a7 100644
--- a/packages/server/scripts/integrations/postgres/init.sql
+++ b/packages/server/scripts/integrations/postgres/init.sql
@@ -15,3 +15,28 @@ CREATE TABLE Tasks (
FOREIGN KEY(PersonID)
REFERENCES Persons(PersonID)
);
+CREATE TABLE Products (
+ ProductID INT NOT NULL PRIMARY KEY,
+ ProductName varchar(255)
+);
+CREATE TABLE Products_Tasks (
+ ProductID INT NOT NULL,
+ TaskID INT NOT NULL,
+ CONSTRAINT fkProducts
+ FOREIGN KEY(ProductID)
+ REFERENCES Products(ProductID),
+ CONSTRAINT fkTasks
+ FOREIGN KEY(TaskID)
+ REFERENCES Tasks(TaskID),
+ PRIMARY KEY (ProductID, TaskID)
+);
+INSERT INTO Persons (PersonID, FirstName, LastName, Address, City) VALUES (1, 'Mike', 'Hughes', '123 Fake Street', 'Belfast');
+INSERT INTO Tasks (TaskID, PersonID, TaskName) VALUES (1, 1, 'assembling');
+INSERT INTO Tasks (TaskID, PersonID, TaskName) VALUES (2, 1, 'processing');
+INSERT INTO Products (ProductID, ProductName) VALUES (1, 'Computers');
+INSERT INTO Products (ProductID, ProductName) VALUES (2, 'Laptops');
+INSERT INTO Products (ProductID, ProductName) VALUES (3, 'Chairs');
+INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 1);
+INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (2, 1);
+INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (3, 1);
+INSERT INTO Products_Tasks (ProductID, TaskID) VALUES (1, 2);
diff --git a/packages/server/scripts/integrations/postgres/reset.sh b/packages/server/scripts/integrations/postgres/reset.sh
new file mode 100755
index 0000000000..32778bd11f
--- /dev/null
+++ b/packages/server/scripts/integrations/postgres/reset.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+docker-compose down
+docker volume prune -f
diff --git a/packages/server/src/api/controllers/datasource.js b/packages/server/src/api/controllers/datasource.js
index b6d25b7b83..dabd5a6e96 100644
--- a/packages/server/src/api/controllers/datasource.js
+++ b/packages/server/src/api/controllers/datasource.js
@@ -48,7 +48,7 @@ exports.buildSchemaFromDb = async function (ctx) {
// Connect to the DB and build the schema
const connector = new Connector(datasource.config)
- await connector.buildSchema(datasource._id)
+ await connector.buildSchema(datasource._id, datasource.entities)
datasource.entities = connector.tables
const response = await db.post(datasource)
diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts
new file mode 100644
index 0000000000..855c64e4c1
--- /dev/null
+++ b/packages/server/src/api/controllers/row/ExternalRequest.ts
@@ -0,0 +1,525 @@
+import {
+ Operation,
+ SearchFilters,
+ SortJson,
+ PaginationJson,
+ RelationshipsJson,
+} from "../../../definitions/datasource"
+import {
+ Row,
+ Table,
+ FieldSchema,
+ Datasource,
+} from "../../../definitions/common"
+import {
+ breakRowIdField,
+ generateRowIdField,
+} from "../../../integrations/utils"
+
+interface ManyRelationship {
+ tableId?: string
+ id?: string
+ isUpdate?: boolean
+ [key: string]: any
+}
+
+interface RunConfig {
+ id: string
+ row: Row
+ filters: SearchFilters
+ sort: SortJson
+ paginate: PaginationJson
+}
+
+module External {
+ const { makeExternalQuery } = require("./utils")
+ const { DataSourceOperation, FieldTypes } = require("../../../constants")
+ const { breakExternalTableId, isSQL } = require("../../../integrations/utils")
+ const { processObjectSync } = require("@budibase/string-templates")
+ const { cloneDeep } = require("lodash/fp")
+ const { isEqual } = require("lodash")
+ const CouchDB = require("../../../db")
+
+ function buildFilters(
+ id: string | undefined,
+ filters: SearchFilters,
+ table: Table
+ ) {
+ const primary = table.primary
+ // if passed in array need to copy for shifting etc
+ let idCopy = cloneDeep(id)
+ if (filters) {
+ // need to map over the filters and make sure the _id field isn't present
+ for (let filter of Object.values(filters)) {
+ if (filter._id && primary) {
+ const parts = breakRowIdField(filter._id)
+ for (let field of primary) {
+ filter[field] = parts.shift()
+ }
+ }
+ // make sure this field doesn't exist on any filter
+ delete filter._id
+ }
+ }
+ // there is no id, just use the user provided filters
+ if (!idCopy || !table) {
+ return filters
+ }
+ // if used as URL parameter it will have been joined
+ if (!Array.isArray(idCopy)) {
+ idCopy = breakRowIdField(idCopy)
+ }
+ const equal: any = {}
+ if (primary && idCopy) {
+ for (let field of primary) {
+ // work through the ID and get the parts
+ equal[field] = idCopy.shift()
+ }
+ }
+ return {
+ equal,
+ }
+ }
+
+ function generateIdForRow(row: Row, table: Table): string {
+ const primary = table.primary
+ if (!row || !primary) {
+ return ""
+ }
+ // build id array
+ let idParts = []
+ for (let field of primary) {
+ if (row[field]) {
+ idParts.push(row[field])
+ }
+ }
+ if (idParts.length === 0) {
+ return ""
+ }
+ return generateRowIdField(idParts)
+ }
+
+ function getEndpoint(tableId: string | undefined, operation: string) {
+ if (!tableId) {
+ return {}
+ }
+ const { datasourceId, tableName } = breakExternalTableId(tableId)
+ return {
+ datasourceId,
+ entityId: tableName,
+ operation,
+ }
+ }
+
+ function basicProcessing(row: Row, table: Table) {
+ const thisRow: { [key: string]: any } = {}
+ // filter the row down to what is actually the row (not joined)
+ for (let fieldName of Object.keys(table.schema)) {
+ thisRow[fieldName] = row[fieldName]
+ }
+ thisRow._id = generateIdForRow(row, table)
+ thisRow.tableId = table._id
+ thisRow._rev = "rev"
+ return thisRow
+ }
+
+ function isMany(field: FieldSchema) {
+ return (
+ field.relationshipType && field.relationshipType.split("-")[0] === "many"
+ )
+ }
+
+ class ExternalRequest {
+ private readonly appId: string
+ private operation: Operation
+ private tableId: string
+ private datasource: Datasource
+ private tables: { [key: string]: Table } = {}
+
+ constructor(
+ appId: string,
+ operation: Operation,
+ tableId: string,
+ datasource: Datasource
+ ) {
+ this.appId = appId
+ this.operation = operation
+ this.tableId = tableId
+ this.datasource = datasource
+ if (datasource && datasource.entities) {
+ this.tables = datasource.entities
+ }
+ }
+
+ inputProcessing(row: Row, table: Table) {
+ if (!row) {
+ return { row, manyRelationships: [] }
+ }
+ // we don't really support composite keys for relationships, this is why [0] is used
+ // @ts-ignore
+ const tablePrimary: string = table.primary[0]
+ let newRow: Row = {},
+ manyRelationships: ManyRelationship[] = []
+ for (let [key, field] of Object.entries(table.schema)) {
+ // if set already, or not set just skip it
+ if (!row[key] || newRow[key]) {
+ continue
+ }
+ // if its not a link then just copy it over
+ if (field.type !== FieldTypes.LINK) {
+ newRow[key] = row[key]
+ continue
+ }
+ const { tableName: linkTableName } = breakExternalTableId(field.tableId)
+ // table has to exist for many to many
+ if (!this.tables[linkTableName]) {
+ continue
+ }
+ const linkTable = this.tables[linkTableName]
+ // @ts-ignore
+ const linkTablePrimary = linkTable.primary[0]
+ if (!isMany(field)) {
+ newRow[field.foreignKey || linkTablePrimary] = breakRowIdField(
+ row[key][0]
+ )[0]
+ } else {
+ // we're not inserting a doc, will be a bunch of update calls
+ const isUpdate = !field.through
+ const thisKey: string = isUpdate ? "id" : linkTablePrimary
+ // @ts-ignore
+ const otherKey: string = isUpdate ? field.foreignKey : tablePrimary
+ row[key].map((relationship: any) => {
+ // we don't really support composite keys for relationships, this is why [0] is used
+ manyRelationships.push({
+ tableId: field.through || field.tableId,
+ isUpdate,
+ [thisKey]: breakRowIdField(relationship)[0],
+ // leave the ID for enrichment later
+ [otherKey]: `{{ literal ${tablePrimary} }}`,
+ })
+ })
+ }
+ }
+ // we return the relationships that may need to be created in the through table
+ // we do this so that if the ID is generated by the DB it can be inserted
+ // after the fact
+ return { row: newRow, manyRelationships }
+ }
+
+ /**
+ * This iterates through the returned rows and works out what elements of the rows
+ * actually match up to another row (based on primary keys) - this is pretty specific
+ * to SQL and the way that SQL relationships are returned based on joins.
+ */
+ updateRelationshipColumns(
+ row: Row,
+ rows: { [key: string]: Row },
+ relationships: RelationshipsJson[]
+ ) {
+ const columns: { [key: string]: any } = {}
+ for (let relationship of relationships) {
+ const linkedTable = this.tables[relationship.tableName]
+ if (!linkedTable) {
+ continue
+ }
+ let linked = basicProcessing(row, linkedTable)
+ if (!linked._id) {
+ continue
+ }
+ // if not returning full docs then get the minimal links out
+ const display = linkedTable.primaryDisplay
+ linked = {
+ primaryDisplay: display ? linked[display] : undefined,
+ _id: linked._id,
+ }
+ columns[relationship.column] = linked
+ }
+ for (let [column, related] of Object.entries(columns)) {
+ if (!row._id) {
+ continue
+ }
+ const rowId: string = row._id
+ if (!Array.isArray(rows[rowId][column])) {
+ rows[rowId][column] = []
+ }
+ // make sure relationship hasn't been found already
+ if (
+ !rows[rowId][column].find(
+ (relation: Row) => relation._id === related._id
+ )
+ ) {
+ rows[rowId][column].push(related)
+ }
+ }
+ return rows
+ }
+
+ outputProcessing(
+ rows: Row[],
+ table: Table,
+ relationships: RelationshipsJson[]
+ ) {
+ if (rows[0].read === true) {
+ return []
+ }
+ let finalRows: { [key: string]: Row } = {}
+ for (let row of rows) {
+ const rowId = generateIdForRow(row, table)
+ row._id = rowId
+ // this is a relationship of some sort
+ if (finalRows[rowId]) {
+ finalRows = this.updateRelationshipColumns(
+ row,
+ finalRows,
+ relationships
+ )
+ continue
+ }
+ const thisRow = basicProcessing(row, table)
+ finalRows[thisRow._id] = thisRow
+ // do this at end once its been added to the final rows
+ finalRows = this.updateRelationshipColumns(
+ row,
+ finalRows,
+ relationships
+ )
+ }
+ return Object.values(finalRows)
+ }
+
+ /**
+ * Gets the list of relationship JSON structures based on the columns in the table,
+ * this will be used by the underlying library to build whatever relationship mechanism
+ * it has (e.g. SQL joins).
+ */
+ buildRelationships(table: Table): RelationshipsJson[] {
+ const relationships = []
+ for (let [fieldName, field] of Object.entries(table.schema)) {
+ if (field.type !== FieldTypes.LINK) {
+ continue
+ }
+ const { tableName: linkTableName } = breakExternalTableId(field.tableId)
+ // no table to link to, this is not a valid relationships
+ if (!this.tables[linkTableName]) {
+ continue
+ }
+ const linkTable = this.tables[linkTableName]
+ if (!table.primary || !linkTable.primary) {
+ continue
+ }
+ const definition = {
+ // if no foreign key specified then use the name of the field in other table
+ from: field.foreignKey || table.primary[0],
+ to: field.fieldName,
+ tableName: linkTableName,
+ through: undefined,
+ // need to specify where to put this back into
+ column: fieldName,
+ }
+ if (field.through) {
+ const { tableName: throughTableName } = breakExternalTableId(
+ field.through
+ )
+ definition.through = throughTableName
+ // don't support composite keys for relationships
+ definition.from = table.primary[0]
+ definition.to = linkTable.primary[0]
+ }
+ relationships.push(definition)
+ }
+ return relationships
+ }
+
+ /**
+ * This is a cached lookup, of relationship records, this is mainly for creating/deleting junction
+ * information.
+ */
+ async lookup(
+ row: Row,
+ relationship: ManyRelationship,
+ cache: { [key: string]: Row[] } = {}
+ ) {
+ const { tableId, isUpdate, id, ...rest } = relationship
+ const { tableName } = breakExternalTableId(tableId)
+ const table = this.tables[tableName]
+ if (isUpdate) {
+ return { rows: [], table }
+ }
+ // if not updating need to make sure we have a list of all possible options
+ let fullKey: string = tableId + "/",
+ rowKey: string = ""
+ for (let key of Object.keys(rest)) {
+ if (row[key]) {
+ fullKey += key
+ rowKey = key
+ }
+ }
+ if (cache[fullKey] == null) {
+ cache[fullKey] = await makeExternalQuery(this.appId, {
+ endpoint: getEndpoint(tableId, DataSourceOperation.READ),
+ filters: {
+ equal: {
+ [rowKey]: row[rowKey],
+ },
+ },
+ })
+ }
+ return { rows: cache[fullKey], table }
+ }
+
+ /**
+ * Once a row has been written we may need to update a many field, e.g. updating foreign keys
+ * in a bunch of rows in another table, or inserting/deleting rows from a junction table (many to many).
+ * This is quite a complex process and is handled by this function, there are a few things going on here:
+ * 1. If updating foreign keys its relatively simple, just create a filter for the row that needs updated
+ * and write the various components.
+ * 2. If junction table, then we lookup what exists already, write what doesn't exist, work out what
+ * isn't supposed to exist anymore and delete those. This is better than the usual method of delete them
+ * all and then re-create, as theres no chance of losing data (e.g. delete succeed, but write fail).
+ */
+ async handleManyRelationships(row: Row, relationships: ManyRelationship[]) {
+ const { appId } = this
+ if (relationships.length === 0) {
+ return
+ }
+ // if we're creating (in a through table) need to wipe the existing ones first
+ const promises = []
+ const cache: { [key: string]: Row[] } = {}
+ for (let relationship of relationships) {
+ const { tableId, isUpdate, id, ...rest } = relationship
+ const body = processObjectSync(rest, row)
+ const { table, rows } = await this.lookup(row, relationship, cache)
+ const found = rows.find(row => isEqual(body, row))
+ const operation = isUpdate
+ ? DataSourceOperation.UPDATE
+ : DataSourceOperation.CREATE
+ if (!found) {
+ promises.push(
+ makeExternalQuery(appId, {
+ endpoint: getEndpoint(tableId, operation),
+ // if we're doing many relationships then we're writing, only one response
+ body,
+ filters: buildFilters(id, {}, table),
+ })
+ )
+ } else {
+ // remove the relationship from the rows
+ rows.splice(rows.indexOf(found), 1)
+ }
+ }
+ // finally if creating, cleanup any rows that aren't supposed to be here
+ for (let [key, rows] of Object.entries(cache)) {
+ // @ts-ignore
+ const tableId: string = key.split("/").shift()
+ const { tableName } = breakExternalTableId(tableId)
+ const table = this.tables[tableName]
+ for (let row of rows) {
+ promises.push(
+ makeExternalQuery(this.appId, {
+ endpoint: getEndpoint(tableId, DataSourceOperation.DELETE),
+ filters: buildFilters(generateIdForRow(row, table), {}, table),
+ })
+ )
+ }
+ }
+ await Promise.all(promises)
+ }
+
+ /**
+ * This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which
+ * you have column overlap in relationships, e.g. we join a few different tables and they all have the
+ * concept of an ID, but for some of them it will be null (if they say don't have a relationship).
+ * Creating the specific list of fields that we desire, and excluding the ones that are no use to us
+ * is more performant and has the added benefit of protecting against this scenario.
+ */
+ buildFields(table: Table) {
+ function extractNonLinkFieldNames(table: Table, existing: string[] = []) {
+ return Object.entries(table.schema)
+ .filter(
+ column =>
+ column[1].type !== FieldTypes.LINK &&
+ !existing.find((field: string) => field.includes(column[0]))
+ )
+ .map(column => `${table.name}.${column[0]}`)
+ }
+ let fields = extractNonLinkFieldNames(table)
+ for (let field of Object.values(table.schema)) {
+ if (field.type !== FieldTypes.LINK) {
+ continue
+ }
+ const { tableName: linkTableName } = breakExternalTableId(field.tableId)
+ const linkTable = this.tables[linkTableName]
+ if (linkTable) {
+ const linkedFields = extractNonLinkFieldNames(linkTable, fields)
+ fields = fields.concat(linkedFields)
+ }
+ }
+ return fields
+ }
+
+ async run({ id, row, filters, sort, paginate }: RunConfig) {
+ const { appId, operation, tableId } = this
+ let { datasourceId, tableName } = breakExternalTableId(tableId)
+ if (!this.datasource) {
+ const db = new CouchDB(appId)
+ this.datasource = await db.get(datasourceId)
+ if (!this.datasource || !this.datasource.entities) {
+ throw "No tables found, fetch tables before query."
+ }
+ this.tables = this.datasource.entities
+ }
+ const table = this.tables[tableName]
+ let isSql = isSQL(this.datasource)
+ if (!table) {
+ throw `Unable to process query, table "${tableName}" not defined.`
+ }
+ // clean up row on ingress using schema
+ filters = buildFilters(id, filters, table)
+ const relationships = this.buildRelationships(table)
+ const processed = this.inputProcessing(row, table)
+ row = processed.row
+ if (
+ operation === DataSourceOperation.DELETE &&
+ (filters == null || Object.keys(filters).length === 0)
+ ) {
+ throw "Deletion must be filtered"
+ }
+ let json = {
+ endpoint: {
+ datasourceId,
+ entityId: tableName,
+ operation,
+ },
+ resource: {
+ // have to specify the fields to avoid column overlap (for SQL)
+ fields: isSql ? this.buildFields(table) : [],
+ },
+ filters,
+ sort,
+ paginate,
+ relationships,
+ body: row,
+ // pass an id filter into extra, purely for mysql/returning
+ extra: {
+ idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
+ },
+ }
+ // can't really use response right now
+ const response = await makeExternalQuery(appId, json)
+ // handle many to many relationships now if we know the ID (could be auto increment)
+ if (processed.manyRelationships) {
+ await this.handleManyRelationships(
+ response[0],
+ processed.manyRelationships
+ )
+ }
+ const output = this.outputProcessing(response, table, relationships)
+ // if reading it'll just be an array of rows, return whole thing
+ return operation === DataSourceOperation.READ && Array.isArray(response)
+ ? output
+ : { row: output[0], table }
+ }
+ }
+
+ module.exports = ExternalRequest
+}
diff --git a/packages/server/src/api/controllers/row/external.js b/packages/server/src/api/controllers/row/external.js
index 896f5a78e2..3a96064a9f 100644
--- a/packages/server/src/api/controllers/row/external.js
+++ b/packages/server/src/api/controllers/row/external.js
@@ -1,136 +1,19 @@
-const { makeExternalQuery } = require("./utils")
-const { DataSourceOperation, SortDirection } = require("../../../constants")
-const { getExternalTable } = require("../table/utils")
+const {
+ DataSourceOperation,
+ SortDirection,
+ FieldTypes,
+} = require("../../../constants")
const {
breakExternalTableId,
- generateRowIdField,
breakRowIdField,
} = require("../../../integrations/utils")
-const { cloneDeep } = require("lodash/fp")
+const ExternalRequest = require("./ExternalRequest")
+const CouchDB = require("../../../db")
-function inputProcessing(row, table) {
- if (!row) {
- return row
- }
- let newRow = {}
- for (let key of Object.keys(table.schema)) {
- // currently excludes empty strings
- if (row[key]) {
- newRow[key] = row[key]
- }
- }
- return newRow
-}
-
-function generateIdForRow(row, table) {
- if (!row) {
- return
- }
- const primary = table.primary
- // build id array
- let idParts = []
- for (let field of primary) {
- idParts.push(row[field])
- }
- return generateRowIdField(idParts)
-}
-
-function outputProcessing(rows, table) {
- // if no rows this is what is returned? Might be PG only
- if (rows[0].read === true) {
- return []
- }
- for (let row of rows) {
- row._id = generateIdForRow(row, table)
- row.tableId = table._id
- row._rev = "rev"
- }
- return rows
-}
-
-function buildFilters(id, filters, table) {
- const primary = table.primary
- // if passed in array need to copy for shifting etc
- let idCopy = cloneDeep(id)
- if (filters) {
- // need to map over the filters and make sure the _id field isn't present
- for (let filter of Object.values(filters)) {
- if (filter._id) {
- const parts = breakRowIdField(filter._id)
- for (let field of primary) {
- filter[field] = parts.shift()
- }
- }
- // make sure this field doesn't exist on any filter
- delete filter._id
- }
- }
- // there is no id, just use the user provided filters
- if (!idCopy || !table) {
- return filters
- }
- // if used as URL parameter it will have been joined
- if (typeof idCopy === "string") {
- idCopy = breakRowIdField(idCopy)
- }
- const equal = {}
- for (let field of primary) {
- // work through the ID and get the parts
- equal[field] = idCopy.shift()
- }
- return {
- equal,
- }
-}
-
-async function handleRequest(
- appId,
- operation,
- tableId,
- { id, row, filters, sort, paginate } = {}
-) {
- let { datasourceId, tableName } = breakExternalTableId(tableId)
- const table = await getExternalTable(appId, datasourceId, tableName)
- if (!table) {
- throw `Unable to process query, table "${tableName}" not defined.`
- }
- // clean up row on ingress using schema
- filters = buildFilters(id, filters, table)
- row = inputProcessing(row, table)
- if (
- operation === DataSourceOperation.DELETE &&
- (filters == null || Object.keys(filters).length === 0)
- ) {
- throw "Deletion must be filtered"
- }
- let json = {
- endpoint: {
- datasourceId,
- entityId: tableName,
- operation,
- },
- resource: {
- // not specifying any fields means "*"
- fields: [],
- },
- filters,
- sort,
- paginate,
- body: row,
- // pass an id filter into extra, purely for mysql/returning
- extra: {
- idFilter: buildFilters(id || generateIdForRow(row, table), {}, table),
- },
- }
- // can't really use response right now
- const response = await makeExternalQuery(appId, json)
- // we searched for rows in someway
- if (operation === DataSourceOperation.READ && Array.isArray(response)) {
- return outputProcessing(response, table)
- } else {
- row = outputProcessing(response, table)[0]
- return { row, table }
- }
+async function handleRequest(appId, operation, tableId, opts = {}) {
+ return new ExternalRequest(appId, operation, tableId, opts.datasource).run(
+ opts
+ )
}
exports.patch = async ctx => {
@@ -172,9 +55,15 @@ exports.find = async ctx => {
const appId = ctx.appId
const id = ctx.params.rowId
const tableId = ctx.params.tableId
- return handleRequest(appId, DataSourceOperation.READ, tableId, {
- id,
- })
+ const response = await handleRequest(
+ appId,
+ DataSourceOperation.READ,
+ tableId,
+ {
+ id,
+ }
+ )
+ return response ? response[0] : response
}
exports.destroy = async ctx => {
@@ -270,7 +159,56 @@ exports.validate = async () => {
return { valid: true }
}
-exports.fetchEnrichedRow = async () => {
- // TODO: How does this work
- throw "Not Implemented"
+exports.fetchEnrichedRow = async ctx => {
+ const appId = ctx.appId
+ const id = ctx.params.rowId
+ const tableId = ctx.params.tableId
+ const { datasourceId, tableName } = breakExternalTableId(tableId)
+ const db = new CouchDB(appId)
+ const datasource = await db.get(datasourceId)
+ if (!datasource || !datasource.entities) {
+ ctx.throw(400, "Datasource has not been configured for plus API.")
+ }
+ const tables = datasource.entities
+ const response = await handleRequest(
+ appId,
+ DataSourceOperation.READ,
+ tableId,
+ {
+ id,
+ datasource,
+ }
+ )
+ const table = tables[tableName]
+ const row = response[0]
+ // this seems like a lot of work, but basically we need to dig deeper for the enrich
+ // for a single row, there is probably a better way to do this with some smart multi-layer joins
+ for (let [fieldName, field] of Object.entries(table.schema)) {
+ if (
+ field.type !== FieldTypes.LINK ||
+ !row[fieldName] ||
+ row[fieldName].length === 0
+ ) {
+ continue
+ }
+ const links = row[fieldName]
+ const linkedTableId = field.tableId
+ const linkedTable = tables[breakExternalTableId(linkedTableId).tableName]
+ // don't support composite keys right now
+ const linkedIds = links.map(link => breakRowIdField(link._id)[0])
+ row[fieldName] = await handleRequest(
+ appId,
+ DataSourceOperation.READ,
+ linkedTableId,
+ {
+ tables,
+ filters: {
+ oneOf: {
+ [linkedTable.primary]: linkedIds,
+ },
+ },
+ }
+ )
+ }
+ return row
}
diff --git a/packages/server/src/api/controllers/table/utils.js b/packages/server/src/api/controllers/table/utils.js
index cdfd390027..78dae60ab1 100644
--- a/packages/server/src/api/controllers/table/utils.js
+++ b/packages/server/src/api/controllers/table/utils.js
@@ -204,15 +204,18 @@ class TableSaveFunctions {
}
}
-exports.getExternalTable = async (appId, datasourceId, tableName) => {
+exports.getAllExternalTables = async (appId, datasourceId) => {
const db = new CouchDB(appId)
const datasource = await db.get(datasourceId)
if (!datasource || !datasource.entities) {
throw "Datasource is not configured fully."
}
- return Object.values(datasource.entities).find(
- entity => entity.name === tableName
- )
+ return datasource.entities
+}
+
+exports.getExternalTable = async (appId, datasourceId, tableName) => {
+ const entities = await exports.getAllExternalTables(appId, datasourceId)
+ return entities[tableName]
}
exports.TableSaveFunctions = TableSaveFunctions
diff --git a/packages/server/src/api/routes/tests/datasource.spec.js b/packages/server/src/api/routes/tests/datasource.spec.js
index d53001b06e..a041de4310 100644
--- a/packages/server/src/api/routes/tests/datasource.spec.js
+++ b/packages/server/src/api/routes/tests/datasource.spec.js
@@ -94,7 +94,7 @@ describe("/datasources", () => {
.expect(200)
// this is mock data, can't test it
expect(res.body).toBeDefined()
- expect(pg.queryMock).toHaveBeenCalledWith(`select "name", "age" from "users" where "name" like $1 limit $2`, ["John%", 5000])
+ expect(pg.queryMock).toHaveBeenCalledWith(`select "name", "age" from "users" where "users"."name" like $1 limit $2`, ["John%", 5000])
})
})
diff --git a/packages/server/src/constants/definitions.ts b/packages/server/src/constants/definitions.ts
index 685c2a9824..8ab995adc4 100644
--- a/packages/server/src/constants/definitions.ts
+++ b/packages/server/src/constants/definitions.ts
@@ -26,3 +26,17 @@ export interface Table {
primaryDisplay?: string
sourceId?: string
}
+
+export interface BudibaseAppMetadata {
+ _id: string
+ _rev?: string
+ appId: string
+ type: string
+ version: string
+ componentlibraries: string[]
+ name: string
+ url: string
+ instance: { _id: string }
+ updatedAt: Date
+ createdAt: Date
+}
diff --git a/packages/server/src/definitions/common.ts b/packages/server/src/definitions/common.ts
new file mode 100644
index 0000000000..497f8f68f2
--- /dev/null
+++ b/packages/server/src/definitions/common.ts
@@ -0,0 +1,100 @@
+import { SourceNames } from "./datasource"
+
+interface Base {
+ _id?: string
+ _rev?: string
+}
+
+export interface FieldSchema {
+ // TODO: replace with field types enum when done
+ type: string
+ fieldName?: string
+ name: string
+ tableId?: string
+ relationshipType?: string
+ through?: string
+ foreignKey?: string
+ constraints?: {
+ type?: string
+ email?: boolean
+ inclusion?: string[]
+ length?: {
+ minimum?: string | number
+ maximum?: string | number
+ }
+ presence?: boolean
+ }
+}
+
+export interface TableSchema {
+ [key: string]: FieldSchema
+}
+
+export interface Table extends Base {
+ type?: string
+ views?: {}
+ name?: string
+ primary?: string[]
+ schema: TableSchema
+ primaryDisplay?: string
+ sourceId?: string
+}
+
+export interface Row extends Base {
+ type?: string
+ tableId?: string
+ [key: string]: any
+}
+
+interface JsonSchemaField {
+ properties: {
+ [key: string]: {
+ type: string
+ title: string
+ customType?: string
+ }
+ }
+ required?: string[]
+}
+
+export interface AutomationStep {
+ description: string
+ event?: string
+ icon: string
+ id: string
+ inputs: {
+ [key: string]: any
+ }
+ name: string
+ schema: {
+ inputs: JsonSchemaField
+ outputs: JsonSchemaField
+ }
+ stepId: string
+ tagline: string
+ type: string
+}
+
+export interface Automation extends Base {
+ name: string
+ type: string
+ appId?: string
+ definition: {
+ steps: AutomationStep[]
+ trigger?: AutomationStep
+ }
+}
+
+export interface Datasource extends Base {
+ type: string
+ name: string
+ source: SourceNames
+ // the config is defined by the schema
+ config: {
+ [key: string]: string | number | boolean
+ }
+ plus: boolean
+ entities?: {
+ [key: string]: Table
+ }
+}
diff --git a/packages/server/src/integrations/base/definitions.ts b/packages/server/src/definitions/datasource.ts
similarity index 69%
rename from packages/server/src/integrations/base/definitions.ts
rename to packages/server/src/definitions/datasource.ts
index 9d5567b6c8..22f1998601 100644
--- a/packages/server/src/integrations/base/definitions.ts
+++ b/packages/server/src/definitions/datasource.ts
@@ -26,6 +26,20 @@ export enum DatasourceFieldTypes {
JSON = "json",
}
+export enum SourceNames {
+ POSTGRES = "POSTGRES",
+ DYNAMODB = "DYNAMODB",
+ MONGODB = "MONGODB",
+ ELASTICSEARCH = "ELASTICSEARCH",
+ COUCHDB = "COUCHDB",
+ SQL_SERVER = "SQL_SERVER",
+ S3 = "S3",
+ AIRTABLE = "AIRTABLE",
+ MYSQL = "MYSQL",
+ ARANGODB = "ARANGODB",
+ REST = "REST",
+}
+
export interface QueryDefinition {
type: QueryTypes
displayName?: string
@@ -47,7 +61,7 @@ export interface Integration {
}
export interface SearchFilters {
- allOr: boolean
+ allOr?: boolean
string?: {
[key: string]: string
}
@@ -72,6 +86,26 @@ export interface SearchFilters {
notEmpty?: {
[key: string]: any
}
+ oneOf?: {
+ [key: string]: any[]
+ }
+}
+
+export interface SortJson {
+ [key: string]: SortDirection
+}
+
+export interface PaginationJson {
+ limit: number
+ page: string | number
+}
+
+export interface RelationshipsJson {
+ through?: string
+ from?: string
+ to?: string
+ tableName: string
+ column: string
}
export interface QueryJson {
@@ -84,17 +118,13 @@ export interface QueryJson {
fields: string[]
}
filters?: SearchFilters
- sort?: {
- [key: string]: SortDirection
- }
- paginate?: {
- limit: number
- page: string | number
- }
+ sort?: SortJson
+ paginate?: PaginationJson
body?: object
- extra: {
+ extra?: {
idFilter?: SearchFilters
}
+ relationships?: RelationshipsJson[]
}
export interface SqlQuery {
diff --git a/packages/server/src/integrations/airtable.ts b/packages/server/src/integrations/airtable.ts
index a99dfc7c72..7a80f51bd0 100644
--- a/packages/server/src/integrations/airtable.ts
+++ b/packages/server/src/integrations/airtable.ts
@@ -2,7 +2,7 @@ import {
Integration,
DatasourceFieldTypes,
QueryTypes,
-} from "./base/definitions"
+} from "../definitions/datasource"
module AirtableModule {
const Airtable = require("airtable")
diff --git a/packages/server/src/integrations/arangodb.ts b/packages/server/src/integrations/arangodb.ts
index 7741b8be94..c5eac32892 100644
--- a/packages/server/src/integrations/arangodb.ts
+++ b/packages/server/src/integrations/arangodb.ts
@@ -2,7 +2,7 @@ import {
Integration,
DatasourceFieldTypes,
QueryTypes,
-} from "./base/definitions"
+} from "../definitions/datasource"
module ArangoModule {
const { Database, aql } = require("arangojs")
diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts
index 6573a0c47c..e1c065dd26 100644
--- a/packages/server/src/integrations/base/sql.ts
+++ b/packages/server/src/integrations/base/sql.ts
@@ -6,18 +6,23 @@ import {
QueryOptions,
SortDirection,
Operation,
-} from "./definitions"
+ RelationshipsJson,
+} from "../../definitions/datasource"
+type KnexQuery = Knex.QueryBuilder | Knex
+
+// right now we only do filters on the specific table being queried
function addFilters(
- query: any,
+ tableName: string,
+ query: KnexQuery,
filters: SearchFilters | undefined
-): Knex.QueryBuilder {
+): KnexQuery {
function iterate(
structure: { [key: string]: any },
fn: (key: string, value: any) => void
) {
for (let [key, value] of Object.entries(structure)) {
- fn(key, value)
+ fn(`${tableName}.${key}`, value)
}
}
if (!filters) {
@@ -25,6 +30,12 @@ function addFilters(
}
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
+ if (filters.oneOf) {
+ iterate(filters.oneOf, (key, array) => {
+ const fnc = allOr ? "orWhereIn" : "whereIn"
+ query = query[fnc](key, array)
+ })
+ }
if (filters.string) {
iterate(filters.string, (key, value) => {
const fnc = allOr ? "orWhere" : "where"
@@ -67,9 +78,47 @@ function addFilters(
return query
}
-function buildCreate(knex: Knex, json: QueryJson, opts: QueryOptions) {
+function addRelationships(
+ query: KnexQuery,
+ fromTable: string,
+ relationships: RelationshipsJson[] | undefined
+): KnexQuery {
+ if (!relationships) {
+ return query
+ }
+ for (let relationship of relationships) {
+ const from = relationship.from,
+ to = relationship.to,
+ toTable = relationship.tableName
+ if (!relationship.through) {
+ // @ts-ignore
+ query = query.leftJoin(
+ toTable,
+ `${fromTable}.${from}`,
+ `${relationship.tableName}.${to}`
+ )
+ } else {
+ const throughTable = relationship.through
+ query = query
+ // @ts-ignore
+ .leftJoin(
+ throughTable,
+ `${fromTable}.${from}`,
+ `${throughTable}.${from}`
+ )
+ .leftJoin(toTable, `${toTable}.${to}`, `${throughTable}.${to}`)
+ }
+ }
+ return query
+}
+
+function buildCreate(
+ knex: Knex,
+ json: QueryJson,
+ opts: QueryOptions
+): KnexQuery {
const { endpoint, body } = json
- let query = knex(endpoint.entityId)
+ let query: KnexQuery = knex(endpoint.entityId)
// mysql can't use returning
if (opts.disableReturning) {
return query.insert(body)
@@ -78,9 +127,10 @@ function buildCreate(knex: Knex, json: QueryJson, opts: QueryOptions) {
}
}
-function buildRead(knex: Knex, json: QueryJson, limit: number) {
- let { endpoint, resource, filters, sort, paginate } = json
- let query: Knex.QueryBuilder = knex(endpoint.entityId)
+function buildRead(knex: Knex, json: QueryJson, limit: number): KnexQuery {
+ let { endpoint, resource, filters, sort, paginate, relationships } = json
+ const tableName = endpoint.entityId
+ let query: KnexQuery = knex(tableName)
// select all if not specified
if (!resource) {
resource = { fields: [] }
@@ -92,7 +142,9 @@ function buildRead(knex: Knex, json: QueryJson, limit: number) {
query = query.select("*")
}
// handle where
- query = addFilters(query, filters)
+ query = addFilters(tableName, query, filters)
+ // handle join
+ query = addRelationships(query, tableName, relationships)
// handle sorting
if (sort) {
for (let [key, value] of Object.entries(sort)) {
@@ -114,10 +166,14 @@ function buildRead(knex: Knex, json: QueryJson, limit: number) {
return query
}
-function buildUpdate(knex: Knex, json: QueryJson, opts: QueryOptions) {
+function buildUpdate(
+ knex: Knex,
+ json: QueryJson,
+ opts: QueryOptions
+): KnexQuery {
const { endpoint, body, filters } = json
- let query = knex(endpoint.entityId)
- query = addFilters(query, filters)
+ let query: KnexQuery = knex(endpoint.entityId)
+ query = addFilters(endpoint.entityId, query, filters)
// mysql can't use returning
if (opts.disableReturning) {
return query.update(body)
@@ -126,10 +182,14 @@ function buildUpdate(knex: Knex, json: QueryJson, opts: QueryOptions) {
}
}
-function buildDelete(knex: Knex, json: QueryJson, opts: QueryOptions) {
+function buildDelete(
+ knex: Knex,
+ json: QueryJson,
+ opts: QueryOptions
+): KnexQuery {
const { endpoint, filters } = json
- let query = knex(endpoint.entityId)
- query = addFilters(query, filters)
+ let query: KnexQuery = knex(endpoint.entityId)
+ query = addFilters(endpoint.entityId, query, filters)
// mysql can't use returning
if (opts.disableReturning) {
return query.delete()
@@ -180,6 +240,8 @@ class SqlQueryBuilder {
default:
throw `Operation type is not supported by SQL query builder`
}
+
+ // @ts-ignore
return query.toSQL().toNative()
}
}
diff --git a/packages/server/src/integrations/couchdb.ts b/packages/server/src/integrations/couchdb.ts
index a813cf2385..983e6cdac2 100644
--- a/packages/server/src/integrations/couchdb.ts
+++ b/packages/server/src/integrations/couchdb.ts
@@ -2,7 +2,7 @@ import {
Integration,
DatasourceFieldTypes,
QueryTypes,
-} from "./base/definitions"
+} from "../definitions/datasource"
module CouchDBModule {
const PouchDB = require("pouchdb")
diff --git a/packages/server/src/integrations/dynamodb.ts b/packages/server/src/integrations/dynamodb.ts
index 0baf09a866..6b99ba04cc 100644
--- a/packages/server/src/integrations/dynamodb.ts
+++ b/packages/server/src/integrations/dynamodb.ts
@@ -2,7 +2,7 @@ import {
Integration,
DatasourceFieldTypes,
QueryTypes,
-} from "./base/definitions"
+} from "../definitions/datasource"
module DynamoModule {
const AWS = require("aws-sdk")
diff --git a/packages/server/src/integrations/elasticsearch.ts b/packages/server/src/integrations/elasticsearch.ts
index 2562ca0dcd..147858c8dd 100644
--- a/packages/server/src/integrations/elasticsearch.ts
+++ b/packages/server/src/integrations/elasticsearch.ts
@@ -2,7 +2,7 @@ import {
Integration,
DatasourceFieldTypes,
QueryTypes,
-} from "./base/definitions"
+} from "../definitions/datasource"
module ElasticsearchModule {
const { Client } = require("@elastic/elasticsearch")
diff --git a/packages/server/src/integrations/index.ts b/packages/server/src/integrations/index.ts
index 4999f0c867..c0acd6b225 100644
--- a/packages/server/src/integrations/index.ts
+++ b/packages/server/src/integrations/index.ts
@@ -9,33 +9,34 @@ const airtable = require("./airtable")
const mysql = require("./mysql")
const arangodb = require("./arangodb")
const rest = require("./rest")
+const { SourceNames } = require("../definitions/datasource")
const DEFINITIONS = {
- POSTGRES: postgres.schema,
- DYNAMODB: dynamodb.schema,
- MONGODB: mongodb.schema,
- ELASTICSEARCH: elasticsearch.schema,
- COUCHDB: couchdb.schema,
- SQL_SERVER: sqlServer.schema,
- S3: s3.schema,
- AIRTABLE: airtable.schema,
- MYSQL: mysql.schema,
- ARANGODB: arangodb.schema,
- REST: rest.schema,
+ [SourceNames.POSTGRES]: postgres.schema,
+ [SourceNames.DYNAMODB]: dynamodb.schema,
+ [SourceNames.MONGODB]: mongodb.schema,
+ [SourceNames.ELASTICSEARCH]: elasticsearch.schema,
+ [SourceNames.COUCHDB]: couchdb.schema,
+ [SourceNames.SQL_SERVER]: sqlServer.schema,
+ [SourceNames.S3]: s3.schema,
+ [SourceNames.AIRTABLE]: airtable.schema,
+ [SourceNames.MYSQL]: mysql.schema,
+ [SourceNames.ARANGODB]: arangodb.schema,
+ [SourceNames.REST]: rest.schema,
}
const INTEGRATIONS = {
- POSTGRES: postgres.integration,
- DYNAMODB: dynamodb.integration,
- MONGODB: mongodb.integration,
- ELASTICSEARCH: elasticsearch.integration,
- COUCHDB: couchdb.integration,
- S3: s3.integration,
- SQL_SERVER: sqlServer.integration,
- AIRTABLE: airtable.integration,
- MYSQL: mysql.integration,
- ARANGODB: arangodb.integration,
- REST: rest.integration,
+ [SourceNames.POSTGRES]: postgres.integration,
+ [SourceNames.DYNAMODB]: dynamodb.integration,
+ [SourceNames.MONGODB]: mongodb.integration,
+ [SourceNames.ELASTICSEARCH]: elasticsearch.integration,
+ [SourceNames.COUCHDB]: couchdb.integration,
+ [SourceNames.SQL_SERVER]: s3.integration,
+ [SourceNames.S3]: sqlServer.integration,
+ [SourceNames.AIRTABLE]: airtable.integration,
+ [SourceNames.MYSQL]: mysql.integration,
+ [SourceNames.ARANGODB]: arangodb.integration,
+ [SourceNames.REST]: rest.integration,
}
module.exports = {
diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts
index ff428eacff..f5a9d73b09 100644
--- a/packages/server/src/integrations/microsoftSqlServer.ts
+++ b/packages/server/src/integrations/microsoftSqlServer.ts
@@ -4,7 +4,7 @@ import {
QueryTypes,
QueryJson,
SqlQuery,
-} from "./base/definitions"
+} from "../definitions/datasource"
import { getSqlQuery } from "./utils"
module MSSQLModule {
diff --git a/packages/server/src/integrations/mongodb.ts b/packages/server/src/integrations/mongodb.ts
index b248be84c4..af7b49153d 100644
--- a/packages/server/src/integrations/mongodb.ts
+++ b/packages/server/src/integrations/mongodb.ts
@@ -2,7 +2,7 @@ import {
Integration,
DatasourceFieldTypes,
QueryTypes,
-} from "./base/definitions"
+} from "../definitions/datasource"
module MongoDBModule {
const { MongoClient } = require("mongodb")
diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts
index 6940f1e3c6..1cfe96986f 100644
--- a/packages/server/src/integrations/mysql.ts
+++ b/packages/server/src/integrations/mysql.ts
@@ -5,7 +5,8 @@ import {
Operation,
QueryJson,
SqlQuery,
-} from "./base/definitions"
+} from "../definitions/datasource"
+import { Table, TableSchema } from "../definitions/common"
import { getSqlQuery } from "./utils"
module MySQLModule {
@@ -139,7 +140,7 @@ module MySQLModule {
}
async buildSchema(datasourceId: string) {
- const tables: any = {}
+ const tables: { [key: string]: Table } = {}
const database = this.config.database
this.client.connect()
@@ -154,7 +155,7 @@ module MySQLModule {
)
for (let tableName of tableNames) {
const primaryKeys = []
- const schema: any = {}
+ const schema: TableSchema = {}
const descResp = await internalQuery(
this.client,
{ sql: `DESCRIBE ${tableName};` },
@@ -166,7 +167,7 @@ module MySQLModule {
primaryKeys.push(columnName)
}
const constraints = {
- required: column.Null !== "YES",
+ presence: column.Null !== "YES",
}
schema[columnName] = {
name: columnName,
@@ -212,7 +213,7 @@ module MySQLModule {
}
async getReturningRow(json: QueryJson) {
- if (!json.extra.idFilter) {
+ if (!json.extra || !json.extra.idFilter) {
return {}
}
const input = this._query({
diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts
index 27286ebd02..935bfbeeea 100644
--- a/packages/server/src/integrations/postgres.ts
+++ b/packages/server/src/integrations/postgres.ts
@@ -4,8 +4,8 @@ import {
QueryTypes,
QueryJson,
SqlQuery,
-} from "./base/definitions"
-import { Table } from "../constants/definitions"
+} from "../definitions/datasource"
+import { Table } from "../definitions/common"
import { getSqlQuery } from "./utils"
module PostgresModule {
@@ -134,8 +134,9 @@ module PostgresModule {
/**
* Fetches the tables from the postgres table and assigns them to the datasource.
* @param {*} datasourceId - datasourceId to fetch
+ * @param entities - the tables that are to be built
*/
- async buildSchema(datasourceId: string) {
+ async buildSchema(datasourceId: string, entities: Record) {
let tableKeys: { [key: string]: string[] } = {}
try {
const primaryKeysResponse = await this.client.query(
@@ -167,6 +168,19 @@ module PostgresModule {
name: tableName,
schema: {},
}
+
+ // add the existing relationships from the entities if they exist, to prevent them from being overridden
+ if (entities && entities[tableName]) {
+ const existingTableSchema = entities[tableName].schema
+ for (let key in existingTableSchema) {
+ if (!existingTableSchema.hasOwnProperty(key)) {
+ continue
+ }
+ if (existingTableSchema[key].type === "link") {
+ tables[tableName].schema[key] = existingTableSchema[key]
+ }
+ }
+ }
}
const type: string = convertType(column.data_type, TYPE_MAP)
diff --git a/packages/server/src/integrations/rest.ts b/packages/server/src/integrations/rest.ts
index 8b6d0e70da..c55e991980 100644
--- a/packages/server/src/integrations/rest.ts
+++ b/packages/server/src/integrations/rest.ts
@@ -2,7 +2,7 @@ import {
Integration,
DatasourceFieldTypes,
QueryTypes,
-} from "./base/definitions"
+} from "../definitions/datasource"
module RestModule {
const fetch = require("node-fetch")
diff --git a/packages/server/src/integrations/s3.ts b/packages/server/src/integrations/s3.ts
index 58d58be443..691f3a05c0 100644
--- a/packages/server/src/integrations/s3.ts
+++ b/packages/server/src/integrations/s3.ts
@@ -1,4 +1,4 @@
-import { Integration, QueryTypes } from "./base/definitions"
+import { Integration, QueryTypes } from "../definitions/datasource"
module S3Module {
const AWS = require("aws-sdk")
diff --git a/packages/server/src/integrations/tests/sql.spec.js b/packages/server/src/integrations/tests/sql.spec.js
index 2b6badd92d..fb57fe79e7 100644
--- a/packages/server/src/integrations/tests/sql.spec.js
+++ b/packages/server/src/integrations/tests/sql.spec.js
@@ -81,7 +81,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: ["John%", limit],
- sql: `select * from "${TABLE_NAME}" where "name" like $1 limit $2`
+ sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."name" like $1 limit $2`
})
})
@@ -98,7 +98,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: [2, 10, limit],
- sql: `select * from "${TABLE_NAME}" where "age" between $1 and $2 limit $3`
+ sql: `select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age" between $1 and $2 limit $3`
})
})
@@ -114,7 +114,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: [10, "John", limit],
- sql: `select * from "${TABLE_NAME}" where ("age" = $1) or ("name" = $2) limit $3`
+ sql: `select * from "${TABLE_NAME}" where ("${TABLE_NAME}"."age" = $1) or ("${TABLE_NAME}"."name" = $2) limit $3`
})
})
@@ -139,7 +139,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: ["John", 1001],
- sql: `update "${TABLE_NAME}" set "name" = $1 where "id" = $2 returning *`
+ sql: `update "${TABLE_NAME}" set "name" = $1 where "${TABLE_NAME}"."id" = $2 returning *`
})
})
@@ -151,7 +151,7 @@ describe("SQL query builder", () => {
}))
expect(query).toEqual({
bindings: [1001],
- sql: `delete from "${TABLE_NAME}" where "id" = $1 returning *`
+ sql: `delete from "${TABLE_NAME}" where "${TABLE_NAME}"."id" = $1 returning *`
})
})
diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils.ts
index 968d9da58e..d0af0e99a9 100644
--- a/packages/server/src/integrations/utils.ts
+++ b/packages/server/src/integrations/utils.ts
@@ -1,4 +1,6 @@
-import { SqlQuery } from "./base/definitions"
+import { SqlQuery } from "../definitions/datasource"
+import { Datasource } from "../definitions/common"
+import { SourceNames } from "../definitions/datasource"
const { DocumentTypes, SEPARATOR } = require("../db/utils")
const { FieldTypes } = require("../constants")
@@ -25,15 +27,21 @@ export function generateRowIdField(keyProps: any[] = []) {
keyProps = [keyProps]
}
// this conserves order and types
+ // we have to swap the double quotes to single quotes for use in HBS statements
+ // when using the literal helper the double quotes can break things
return encodeURIComponent(JSON.stringify(keyProps).replace(/"/g, "'"))
}
// should always return an array
-export function breakRowIdField(_id: string) {
+export function breakRowIdField(_id: string): any[] {
if (!_id) {
- return null
+ return []
}
- return JSON.parse(decodeURIComponent(_id))
+ // have to replace on the way back as we swapped out the double quotes
+ // when encoding, but JSON can't handle the single quotes
+ const decoded: string = decodeURIComponent(_id).replace(/'/g, '"')
+ const parsed = JSON.parse(decoded)
+ return Array.isArray(parsed) ? parsed : [parsed]
}
export function convertType(type: string, map: { [key: string]: any }) {
@@ -52,3 +60,11 @@ export function getSqlQuery(query: SqlQuery | string): SqlQuery {
return query
}
}
+
+export function isSQL(datasource: Datasource): boolean {
+ if (!datasource || !datasource.source) {
+ return false
+ }
+ const SQL = [SourceNames.POSTGRES, SourceNames.SQL_SERVER, SourceNames.MYSQL]
+ return SQL.indexOf(datasource.source) !== -1
+}
diff --git a/packages/server/tsconfig.json b/packages/server/tsconfig.json
index bd6dbc1686..934d0bdd2b 100644
--- a/packages/server/tsconfig.json
+++ b/packages/server/tsconfig.json
@@ -8,7 +8,8 @@
"strict": true,
"noImplicitAny": true,
"esModuleInterop": true,
- "resolveJsonModule": true
+ "resolveJsonModule": true,
+ "incremental": true
},
"include": [
"./src/**/*"
diff --git a/packages/standard-components/package.json b/packages/standard-components/package.json
index a1a18c0afc..9c7d48edf7 100644
--- a/packages/standard-components/package.json
+++ b/packages/standard-components/package.json
@@ -29,11 +29,11 @@
"keywords": [
"svelte"
],
- "version": "0.9.69",
+ "version": "0.9.70",
"license": "MIT",
"gitHead": "d1836a898cab3f8ab80ee6d8f42be1a9eed7dcdc",
"dependencies": {
- "@budibase/bbui": "^0.9.69",
+ "@budibase/bbui": "^0.9.70",
"@spectrum-css/card": "^3.0.3",
"@spectrum-css/link": "^3.1.3",
"@spectrum-css/page": "^3.0.1",
diff --git a/packages/string-templates/package.json b/packages/string-templates/package.json
index fcfb2ff9b3..78f822028c 100644
--- a/packages/string-templates/package.json
+++ b/packages/string-templates/package.json
@@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
- "version": "0.9.69",
+ "version": "0.9.70",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",
diff --git a/packages/string-templates/src/index.cjs b/packages/string-templates/src/index.cjs
index 7dbd788819..573ea76f12 100644
--- a/packages/string-templates/src/index.cjs
+++ b/packages/string-templates/src/index.cjs
@@ -1,12 +1,7 @@
const handlebars = require("handlebars")
const { registerAll } = require("./helpers/index")
const processors = require("./processors")
-const { cloneDeep } = require("lodash/fp")
-const {
- removeNull,
- updateContext,
- removeHandlebarsStatements,
-} = require("./utilities")
+const { removeHandlebarsStatements } = require("./utilities")
const manifest = require("../manifest.json")
const hbsInstance = handlebars.create()
diff --git a/packages/worker/package.json b/packages/worker/package.json
index e7f6cc5a59..d6ce2edce1 100644
--- a/packages/worker/package.json
+++ b/packages/worker/package.json
@@ -1,7 +1,7 @@
{
"name": "@budibase/worker",
"email": "hi@budibase.com",
- "version": "0.9.69",
+ "version": "0.9.70",
"description": "Budibase background service",
"main": "src/index.js",
"repository": {
@@ -21,8 +21,8 @@
"author": "Budibase",
"license": "AGPL-3.0-or-later",
"dependencies": {
- "@budibase/auth": "^0.9.69",
- "@budibase/string-templates": "^0.9.69",
+ "@budibase/auth": "^0.9.70",
+ "@budibase/string-templates": "^0.9.70",
"@koa/router": "^8.0.0",
"aws-sdk": "^2.811.0",
"bcryptjs": "^2.4.3",