Merge branch 'feature/opinionated-sql' of github.com:Budibase/budibase into feature/opinionated-sql
This commit is contained in:
commit
190e17cc4c
|
@ -22,6 +22,7 @@ async function activate() {
|
|||
if (sentryConfigured) Sentry.init({ dsn: process.env.SENTRY_DSN })
|
||||
if (posthogConfigured) {
|
||||
posthog.init(process.env.POSTHOG_TOKEN, {
|
||||
autocapture: false,
|
||||
api_host: process.env.POSTHOG_URL,
|
||||
})
|
||||
posthog.set_config({ persistence: "cookie" })
|
||||
|
|
|
@ -18,13 +18,18 @@
|
|||
}
|
||||
|
||||
function selectIntegration(integrationType) {
|
||||
schema = integrations[integrationType].datasource
|
||||
const selected = integrations[integrationType]
|
||||
|
||||
// build the schema
|
||||
const schema = {}
|
||||
for (let key in selected.datasource) {
|
||||
schema[key] = selected.datasource[key].default
|
||||
}
|
||||
|
||||
integration = {
|
||||
type: integrationType,
|
||||
...Object.keys(schema).reduce(
|
||||
(acc, next) => ({ ...acc, [next]: schema[next].default }),
|
||||
{}
|
||||
),
|
||||
plus: selected.plus,
|
||||
...schema
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,16 +23,17 @@
|
|||
}
|
||||
|
||||
async function saveDatasource() {
|
||||
const { type, ...config } = integration
|
||||
const { type, plus, ...config } = integration
|
||||
|
||||
// Create datasource
|
||||
const response = await datasources.save({
|
||||
name,
|
||||
source: type,
|
||||
config,
|
||||
plus
|
||||
})
|
||||
notifications.success(`Datasource ${name} created successfully.`)
|
||||
analytics.captureEvent("Datasource Created", { name })
|
||||
analytics.captureEvent("Datasource Created", { name, type })
|
||||
|
||||
// Navigate to new datasource
|
||||
$goto(`./datasource/${response._id}`)
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
import { store } from "builderStore"
|
||||
import api from "builderStore/api"
|
||||
import analytics from "analytics"
|
||||
import FeedbackIframe from "components/feedback/FeedbackIframe.svelte"
|
||||
|
||||
const DeploymentStatus = {
|
||||
SUCCESS: "SUCCESS",
|
||||
|
@ -30,10 +29,6 @@
|
|||
} else {
|
||||
notifications.success(`Application published successfully`)
|
||||
}
|
||||
|
||||
if (analytics.requestFeedbackOnDeploy()) {
|
||||
feedbackModal.show()
|
||||
}
|
||||
} catch (err) {
|
||||
analytics.captureException(err)
|
||||
notifications.error(`Error publishing app: ${err}`)
|
||||
|
|
|
@ -88,7 +88,8 @@
|
|||
|
||||
onMount(() => {
|
||||
fetchDeployments()
|
||||
poll = setInterval(fetchDeployments, POLL_INTERVAL)
|
||||
// TODO: fix
|
||||
// poll = setInterval(fetchDeployments, POLL_INTERVAL)
|
||||
})
|
||||
|
||||
onDestroy(() => clearInterval(poll))
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
<script>
|
||||
import { Body } from "@budibase/bbui"
|
||||
</script>
|
||||
|
||||
<div class="root">
|
||||
<Body size="S">This action doesn't require any additional settings.</Body>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.root {
|
||||
max-width: 800px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
</style>
|
|
@ -4,6 +4,7 @@ import DeleteRow from "./DeleteRow.svelte"
|
|||
import ExecuteQuery from "./ExecuteQuery.svelte"
|
||||
import TriggerAutomation from "./TriggerAutomation.svelte"
|
||||
import ValidateForm from "./ValidateForm.svelte"
|
||||
import LogOut from "./LogOut.svelte"
|
||||
|
||||
// Defines which actions are available to configure in the front end.
|
||||
// Unfortunately the "name" property is used as the identifier so please don't
|
||||
|
@ -37,4 +38,8 @@ export default [
|
|||
name: "Validate Form",
|
||||
component: ValidateForm,
|
||||
},
|
||||
{
|
||||
name: "Log Out",
|
||||
component: LogOut,
|
||||
},
|
||||
]
|
||||
|
|
|
@ -2,12 +2,12 @@
|
|||
import { params } from "@roxi/routify"
|
||||
import { queries } from "stores/backend"
|
||||
|
||||
if ($params.query) {
|
||||
const query = $queries.list.find(m => m._id === $params.query)
|
||||
if (query) {
|
||||
queries.select(query)
|
||||
}
|
||||
}
|
||||
// if ($params.query) {
|
||||
// const query = $queries.list.find(m => m._id === $params.query)
|
||||
// if (query) {
|
||||
// queries.select(query)
|
||||
// }
|
||||
// }
|
||||
</script>
|
||||
|
||||
<slot />
|
||||
|
|
|
@ -13,10 +13,14 @@
|
|||
$: integration = datasource && $integrations[datasource.source]
|
||||
|
||||
async function saveDatasource() {
|
||||
try {
|
||||
// Create datasource
|
||||
await datasources.save(datasource)
|
||||
notifications.success(`Datasource ${name} saved successfully.`)
|
||||
unsaved = false
|
||||
} catch (err) {
|
||||
notifications.error(`Error saving datasource: ${err}`)
|
||||
}
|
||||
}
|
||||
|
||||
function onClickQuery(query) {
|
||||
|
@ -66,6 +70,7 @@
|
|||
on:change={setUnsaved}
|
||||
/>
|
||||
</div>
|
||||
{#if !integration.plus}
|
||||
<Divider />
|
||||
<div class="query-header">
|
||||
<Heading size="S">Queries</Heading>
|
||||
|
@ -80,6 +85,7 @@
|
|||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
</Layout>
|
||||
</section>
|
||||
{/if}
|
||||
|
|
|
@ -29,9 +29,19 @@ export function createDatasourcesStore() {
|
|||
queries.update(state => ({ ...state, selected: null }))
|
||||
},
|
||||
save: async datasource => {
|
||||
const response = await api.post("/api/datasources", datasource)
|
||||
let url = "/api/datasources"
|
||||
|
||||
if (datasource.plus) {
|
||||
url += "?refresh=1"
|
||||
}
|
||||
|
||||
const response = await api.post(url, datasource)
|
||||
const json = await response.json()
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(json.message)
|
||||
}
|
||||
|
||||
update(state => {
|
||||
const currentIdx = state.list.findIndex(ds => ds._id === json._id)
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -3,6 +3,7 @@ const {
|
|||
generateDatasourceID,
|
||||
getDatasourceParams,
|
||||
getQueryParams,
|
||||
DocumentTypes,
|
||||
} = require("../../db/utils")
|
||||
const { integrations } = require("../../integrations")
|
||||
const plusIntegrations = require("../../integrations/plus")
|
||||
|
@ -20,13 +21,24 @@ exports.fetch = async function (ctx) {
|
|||
|
||||
exports.save = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const plus = ctx.request.body.plus
|
||||
|
||||
const datasource = {
|
||||
_id: generateDatasourceID(),
|
||||
type: "datasource",
|
||||
_id: generateDatasourceID({ plus }),
|
||||
type: plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE,
|
||||
...ctx.request.body,
|
||||
}
|
||||
|
||||
// update the schema
|
||||
if (ctx.query.refresh) {
|
||||
const PlusConnector = plusIntegrations[datasource.source].integration
|
||||
|
||||
const connector = new PlusConnector(ctx.request.body.config)
|
||||
await connector.init()
|
||||
|
||||
datasource.entities = connector.tables
|
||||
}
|
||||
|
||||
const response = await db.post(datasource)
|
||||
datasource._rev = response.rev
|
||||
|
||||
|
@ -77,26 +89,3 @@ exports.query = async function (ctx) {
|
|||
ctx.throw(400, "Datasource does not support query.")
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: merge endpoint with main datasource endpoint
|
||||
exports.plus = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
|
||||
const PlusConnector = plusIntegrations[ctx.request.body.source].integration
|
||||
|
||||
const connector = new PlusConnector(ctx.request.body)
|
||||
await connector.init()
|
||||
|
||||
const datasource = {
|
||||
_id: generateDatasourceID({ plus: true }),
|
||||
type: "datasource_plus",
|
||||
relationships: [],
|
||||
...ctx.request.body,
|
||||
entities: connector.tables,
|
||||
}
|
||||
|
||||
const response = await db.post(datasource)
|
||||
datasource._rev = response.rev
|
||||
|
||||
ctx.body = datasource
|
||||
}
|
||||
|
|
|
@ -5,6 +5,10 @@
|
|||
export let appId
|
||||
export let production
|
||||
export let clientLibPath
|
||||
|
||||
function initialise() {
|
||||
alert("Yeet")
|
||||
}
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
|
|
|
@ -5,18 +5,30 @@ const {
|
|||
getRowParams,
|
||||
getTableParams,
|
||||
generateTableID,
|
||||
getDatasourceParams,
|
||||
} = require("../../../db/utils")
|
||||
const { FieldTypes } = require("../../../constants")
|
||||
const { TableSaveFunctions } = require("./utils")
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const body = await db.allDocs(
|
||||
const internalTables = await db.allDocs(
|
||||
getTableParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
ctx.body = body.rows.map(row => row.doc)
|
||||
const internal = internalTables.rows.map(row => row.doc)
|
||||
|
||||
const externalTables = await db.allDocs(
|
||||
getDatasourceParams("plus", {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
const external = externalTables.rows.flatMap(row =>
|
||||
Object.values(row.doc.entities)
|
||||
)
|
||||
|
||||
ctx.body = [...internal, ...external]
|
||||
}
|
||||
|
||||
exports.find = async function (ctx) {
|
||||
|
|
|
@ -8,26 +8,26 @@ const {
|
|||
PermissionTypes,
|
||||
} = require("@budibase/auth/permissions")
|
||||
const Joi = require("joi")
|
||||
const { FieldTypes, DataSourceOperation, SortDirection } = require("../../constants")
|
||||
const { DataSourceOperation } = require("../../constants")
|
||||
|
||||
const router = Router()
|
||||
|
||||
function generatePlusDatasourceSchema() {
|
||||
function generateDatasourceSchema() {
|
||||
// prettier-ignore
|
||||
return joiValidator.body(Joi.object({
|
||||
_id: Joi.string(),
|
||||
_rev: Joi.string(),
|
||||
source: Joi.string().valid("postgres"),
|
||||
type: Joi.string().valid("datasource_plus"),
|
||||
relationships: Joi.array().required().items(Joi.object({
|
||||
source: Joi.string().valid("POSTGRES_PLUS"),
|
||||
type: Joi.string().allow("datasource_plus"),
|
||||
relationships: Joi.array().items(Joi.object({
|
||||
from: Joi.string().required(),
|
||||
to: Joi.string().required(),
|
||||
cardinality: Joi.valid("1:N", "1:1", "N:N").required()
|
||||
})),
|
||||
entities: Joi.array().required().items(Joi.object({
|
||||
type: Joi.string().valid(...Object.values(FieldTypes)).required(),
|
||||
name: Joi.string().required(),
|
||||
})),
|
||||
// entities: Joi.array().items(Joi.object({
|
||||
// type: Joi.string().valid(...Object.values(FieldTypes)).required(),
|
||||
// name: Joi.string().required(),
|
||||
// })),
|
||||
}).unknown(true))
|
||||
}
|
||||
|
||||
|
@ -59,7 +59,6 @@ function generateQueryDatasourceSchema() {
|
|||
}))
|
||||
}
|
||||
|
||||
|
||||
router
|
||||
.get("/api/datasources", authorized(BUILDER), datasourceController.fetch)
|
||||
.get(
|
||||
|
@ -67,19 +66,18 @@ router
|
|||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||
datasourceController.find
|
||||
)
|
||||
.post(
|
||||
"/api/datasources/plus",
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||
generatePlusDatasourceSchema(),
|
||||
datasourceController.plus
|
||||
)
|
||||
.post(
|
||||
"/api/datasources/query",
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||
generateQueryDatasourceSchema(),
|
||||
datasourceController.query
|
||||
)
|
||||
.post("/api/datasources", authorized(BUILDER), datasourceController.save)
|
||||
.post(
|
||||
"/api/datasources",
|
||||
authorized(BUILDER),
|
||||
generateDatasourceSchema(),
|
||||
datasourceController.save
|
||||
)
|
||||
.delete(
|
||||
"/api/datasources/:datasourceId/:revId",
|
||||
authorized(BUILDER),
|
||||
|
|
|
@ -39,6 +39,7 @@ const INTEGRATIONS = {
|
|||
MYSQL: mysql.integration,
|
||||
ARANGODB: arangodb.integration,
|
||||
REST: rest.integration,
|
||||
POSTGRES_PLUS: postgresPlus.integration,
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const postgres = require("./postgres")
|
||||
|
||||
module.exports = {
|
||||
postgres,
|
||||
POSTGRES_PLUS: postgres,
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
const Sql = require("../base/sql")
|
||||
const { Pool } = require("pg")
|
||||
const { FieldTypes } = require("../../constants")
|
||||
const { FIELD_TYPES } = require("../Integration")
|
||||
|
@ -18,6 +19,7 @@ const SCHEMA = {
|
|||
friendlyName: "PostgreSQL",
|
||||
description:
|
||||
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
||||
plus: true,
|
||||
datasource: {
|
||||
host: {
|
||||
type: FIELD_TYPES.STRING,
|
||||
|
@ -52,12 +54,13 @@ const SCHEMA = {
|
|||
},
|
||||
}
|
||||
|
||||
class PostgresPlus {
|
||||
class PostgresPlus extends Sql {
|
||||
static pool
|
||||
COLUMNS_SQL =
|
||||
"select * from information_schema.columns where table_schema = 'public'"
|
||||
|
||||
constructor(config) {
|
||||
super("pg")
|
||||
this.config = config
|
||||
if (!this.pool) {
|
||||
this.pool = new Pool(this.config)
|
||||
|
@ -71,24 +74,32 @@ class PostgresPlus {
|
|||
|
||||
const tables = {}
|
||||
for (let column of response.rows) {
|
||||
const tableName = column.table_name
|
||||
const columnName = column.column_name
|
||||
|
||||
// table key doesn't exist yet
|
||||
if (!tables[column.table_name]) {
|
||||
tables[column.table_name] = []
|
||||
if (!tables[tableName]) {
|
||||
tables[tableName] = {
|
||||
_id: "something",
|
||||
name: tableName,
|
||||
schema: {},
|
||||
}
|
||||
}
|
||||
|
||||
// Add the new column
|
||||
const columnData = {
|
||||
type: TYPE_MAP[column.data_type] || "unknown",
|
||||
table: column.table_name,
|
||||
name: column.column_name,
|
||||
updateable: column.is_updatable,
|
||||
precision: column.numeric_precision,
|
||||
nullable: column.is_nullable === "YES",
|
||||
tables[tableName].schema[columnName] = {
|
||||
name: columnName,
|
||||
type: TYPE_MAP[column.data_type],
|
||||
}
|
||||
tables[column.table_name].push(columnData)
|
||||
}
|
||||
this.tables = tables
|
||||
}
|
||||
|
||||
async query(json) {
|
||||
const operation = this._operation(json).toLowerCase()
|
||||
const sql = this._query(json)
|
||||
const response = await this.client.query(sql)
|
||||
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue