external/internal tables and datasources merged, removed autocapture, re-added logout action
This commit is contained in:
parent
a44d59541d
commit
114eac4134
|
@ -22,6 +22,7 @@ async function activate() {
|
||||||
if (sentryConfigured) Sentry.init({ dsn: process.env.SENTRY_DSN })
|
if (sentryConfigured) Sentry.init({ dsn: process.env.SENTRY_DSN })
|
||||||
if (posthogConfigured) {
|
if (posthogConfigured) {
|
||||||
posthog.init(process.env.POSTHOG_TOKEN, {
|
posthog.init(process.env.POSTHOG_TOKEN, {
|
||||||
|
autocapture: false,
|
||||||
api_host: process.env.POSTHOG_URL,
|
api_host: process.env.POSTHOG_URL,
|
||||||
})
|
})
|
||||||
posthog.set_config({ persistence: "cookie" })
|
posthog.set_config({ persistence: "cookie" })
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
queries.select(query)
|
queries.select(query)
|
||||||
$goto(`./datasource/${query.datasourceId}/${query._id}`)
|
$goto(`./datasource/${query.datasourceId}/query/${query._id}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
|
|
|
@ -18,13 +18,18 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
function selectIntegration(integrationType) {
|
function selectIntegration(integrationType) {
|
||||||
schema = integrations[integrationType].datasource
|
const selected = integrations[integrationType]
|
||||||
|
|
||||||
|
// build the schema
|
||||||
|
const schema = {}
|
||||||
|
for (let key in selected.datasource) {
|
||||||
|
schema[key] = selected.datasource[key].default
|
||||||
|
}
|
||||||
|
|
||||||
integration = {
|
integration = {
|
||||||
type: integrationType,
|
type: integrationType,
|
||||||
...Object.keys(schema).reduce(
|
plus: selected.plus,
|
||||||
(acc, next) => ({ ...acc, [next]: schema[next].default }),
|
...schema
|
||||||
{}
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,16 +23,17 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
async function saveDatasource() {
|
async function saveDatasource() {
|
||||||
const { type, ...config } = integration
|
const { type, plus, ...config } = integration
|
||||||
|
|
||||||
// Create datasource
|
// Create datasource
|
||||||
const response = await datasources.save({
|
const response = await datasources.save({
|
||||||
name,
|
name,
|
||||||
source: type,
|
source: type,
|
||||||
config,
|
config,
|
||||||
|
plus
|
||||||
})
|
})
|
||||||
notifications.success(`Datasource ${name} created successfully.`)
|
notifications.success(`Datasource ${name} created successfully.`)
|
||||||
analytics.captureEvent("Datasource Created", { name })
|
analytics.captureEvent("Datasource Created", { name, type })
|
||||||
|
|
||||||
// Navigate to new datasource
|
// Navigate to new datasource
|
||||||
$goto(`./datasource/${response._id}`)
|
$goto(`./datasource/${response._id}`)
|
||||||
|
|
|
@ -4,7 +4,6 @@
|
||||||
import { store } from "builderStore"
|
import { store } from "builderStore"
|
||||||
import api from "builderStore/api"
|
import api from "builderStore/api"
|
||||||
import analytics from "analytics"
|
import analytics from "analytics"
|
||||||
import FeedbackIframe from "components/feedback/FeedbackIframe.svelte"
|
|
||||||
|
|
||||||
const DeploymentStatus = {
|
const DeploymentStatus = {
|
||||||
SUCCESS: "SUCCESS",
|
SUCCESS: "SUCCESS",
|
||||||
|
@ -30,10 +29,6 @@
|
||||||
} else {
|
} else {
|
||||||
notifications.success(`Application published successfully`)
|
notifications.success(`Application published successfully`)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (analytics.requestFeedbackOnDeploy()) {
|
|
||||||
feedbackModal.show()
|
|
||||||
}
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
analytics.captureException(err)
|
analytics.captureException(err)
|
||||||
notifications.error(`Error publishing app: ${err}`)
|
notifications.error(`Error publishing app: ${err}`)
|
||||||
|
|
|
@ -88,7 +88,8 @@
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
fetchDeployments()
|
fetchDeployments()
|
||||||
poll = setInterval(fetchDeployments, POLL_INTERVAL)
|
// TODO: fix
|
||||||
|
// poll = setInterval(fetchDeployments, POLL_INTERVAL)
|
||||||
})
|
})
|
||||||
|
|
||||||
onDestroy(() => clearInterval(poll))
|
onDestroy(() => clearInterval(poll))
|
||||||
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
<script>
|
||||||
|
import { Body } from "@budibase/bbui"
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="root">
|
||||||
|
<Body size="S">This action doesn't require any additional settings.</Body>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
.root {
|
||||||
|
max-width: 800px;
|
||||||
|
margin: 0 auto;
|
||||||
|
}
|
||||||
|
</style>
|
|
@ -4,6 +4,7 @@ import DeleteRow from "./DeleteRow.svelte"
|
||||||
import ExecuteQuery from "./ExecuteQuery.svelte"
|
import ExecuteQuery from "./ExecuteQuery.svelte"
|
||||||
import TriggerAutomation from "./TriggerAutomation.svelte"
|
import TriggerAutomation from "./TriggerAutomation.svelte"
|
||||||
import ValidateForm from "./ValidateForm.svelte"
|
import ValidateForm from "./ValidateForm.svelte"
|
||||||
|
import LogOut from "./LogOut.svelte"
|
||||||
|
|
||||||
// Defines which actions are available to configure in the front end.
|
// Defines which actions are available to configure in the front end.
|
||||||
// Unfortunately the "name" property is used as the identifier so please don't
|
// Unfortunately the "name" property is used as the identifier so please don't
|
||||||
|
@ -37,4 +38,8 @@ export default [
|
||||||
name: "Validate Form",
|
name: "Validate Form",
|
||||||
component: ValidateForm,
|
component: ValidateForm,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "Log Out",
|
||||||
|
component: LogOut,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
|
@ -0,0 +1,62 @@
|
||||||
|
<script>
|
||||||
|
import { params } from "@roxi/routify"
|
||||||
|
import { notifications } from "@budibase/bbui"
|
||||||
|
import { database, queries } from "stores/backend"
|
||||||
|
import api from "builderStore/api"
|
||||||
|
import Table from "components/backend/DataTable/Table.svelte"
|
||||||
|
|
||||||
|
let loading = false
|
||||||
|
let data = []
|
||||||
|
let schema = {}
|
||||||
|
|
||||||
|
async function fetchData() {
|
||||||
|
try {
|
||||||
|
const query = {
|
||||||
|
endpoint: {
|
||||||
|
datasourceId: $params.selectedDatasource,
|
||||||
|
operation: "READ",
|
||||||
|
// table name below
|
||||||
|
entityId: $params.entity,
|
||||||
|
},
|
||||||
|
resource: {
|
||||||
|
// fields: ["name", "age"],
|
||||||
|
},
|
||||||
|
filters: {
|
||||||
|
// string: {
|
||||||
|
// name: "John",
|
||||||
|
// },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
const response = await api.post(`/api/datasources/query`, query)
|
||||||
|
const json = await response.json()
|
||||||
|
console.log(json)
|
||||||
|
} catch (err) {
|
||||||
|
notifications.error("Error fetching data")
|
||||||
|
console.error(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// $: selectedQuery = $queries.list.find(
|
||||||
|
// query => query._id === $queries.selected
|
||||||
|
// ) || {
|
||||||
|
// datasourceId: $params.selectedDatasource,
|
||||||
|
// parameters: [],
|
||||||
|
// fields: {},
|
||||||
|
// queryVerb: "read",
|
||||||
|
// }
|
||||||
|
|
||||||
|
fetchData()
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<Table
|
||||||
|
{title}
|
||||||
|
{schema}
|
||||||
|
{data}
|
||||||
|
allowEditing={true}
|
||||||
|
{loading}
|
||||||
|
/>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
</style>
|
|
@ -13,10 +13,14 @@
|
||||||
$: integration = datasource && $integrations[datasource.source]
|
$: integration = datasource && $integrations[datasource.source]
|
||||||
|
|
||||||
async function saveDatasource() {
|
async function saveDatasource() {
|
||||||
// Create datasource
|
try {
|
||||||
await datasources.save(datasource)
|
// Create datasource
|
||||||
notifications.success(`Datasource ${name} saved successfully.`)
|
await datasources.save(datasource)
|
||||||
unsaved = false
|
notifications.success(`Datasource ${name} saved successfully.`)
|
||||||
|
unsaved = false
|
||||||
|
} catch (err) {
|
||||||
|
notifications.error(`Error saving datasource: ${err}`)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function onClickQuery(query) {
|
function onClickQuery(query) {
|
||||||
|
@ -66,20 +70,22 @@
|
||||||
on:change={setUnsaved}
|
on:change={setUnsaved}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<Divider />
|
{#if !integration.plus}
|
||||||
<div class="query-header">
|
<Divider />
|
||||||
<Heading size="S">Queries</Heading>
|
<div class="query-header">
|
||||||
<Button secondary on:click={() => $goto("./new")}>Add Query</Button>
|
<Heading size="S">Queries</Heading>
|
||||||
</div>
|
<Button secondary on:click={() => $goto("./new")}>Add Query</Button>
|
||||||
<div class="query-list">
|
</div>
|
||||||
{#each $queries.list.filter(query => query.datasourceId === datasource._id) as query}
|
<div class="query-list">
|
||||||
<div class="query-list-item" on:click={() => onClickQuery(query)}>
|
{#each $queries.list.filter(query => query.datasourceId === datasource._id) as query}
|
||||||
<p class="query-name">{query.name}</p>
|
<div class="query-list-item" on:click={() => onClickQuery(query)}>
|
||||||
<p>{capitalise(query.queryVerb)}</p>
|
<p class="query-name">{query.name}</p>
|
||||||
<p>→</p>
|
<p>{capitalise(query.queryVerb)}</p>
|
||||||
</div>
|
<p>→</p>
|
||||||
{/each}
|
</div>
|
||||||
</div>
|
{/each}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
</Layout>
|
</Layout>
|
||||||
</section>
|
</section>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
<script>
|
||||||
|
import { params } from "@roxi/routify"
|
||||||
|
import { queries } from "stores/backend"
|
||||||
|
|
||||||
|
// if ($params.query) {
|
||||||
|
// const query = $queries.list.find(m => m._id === $params.query)
|
||||||
|
// if (query) {
|
||||||
|
// queries.select(query)
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<slot />
|
|
@ -29,9 +29,19 @@ export function createDatasourcesStore() {
|
||||||
queries.update(state => ({ ...state, selected: null }))
|
queries.update(state => ({ ...state, selected: null }))
|
||||||
},
|
},
|
||||||
save: async datasource => {
|
save: async datasource => {
|
||||||
const response = await api.post("/api/datasources", datasource)
|
let url = "/api/datasources"
|
||||||
|
|
||||||
|
if (datasource.plus) {
|
||||||
|
url += "?refresh=1"
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await api.post(url, datasource)
|
||||||
const json = await response.json()
|
const json = await response.json()
|
||||||
|
|
||||||
|
if (response.status !== 200) {
|
||||||
|
throw new Error(json.message)
|
||||||
|
}
|
||||||
|
|
||||||
update(state => {
|
update(state => {
|
||||||
const currentIdx = state.list.findIndex(ds => ds._id === json._id)
|
const currentIdx = state.list.findIndex(ds => ds._id === json._id)
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -3,6 +3,7 @@ const {
|
||||||
generateDatasourceID,
|
generateDatasourceID,
|
||||||
getDatasourceParams,
|
getDatasourceParams,
|
||||||
getQueryParams,
|
getQueryParams,
|
||||||
|
DocumentTypes,
|
||||||
} = require("../../db/utils")
|
} = require("../../db/utils")
|
||||||
const { integrations } = require("../../integrations")
|
const { integrations } = require("../../integrations")
|
||||||
const plusIntegrations = require("../../integrations/plus")
|
const plusIntegrations = require("../../integrations/plus")
|
||||||
|
@ -20,13 +21,24 @@ exports.fetch = async function (ctx) {
|
||||||
|
|
||||||
exports.save = async function (ctx) {
|
exports.save = async function (ctx) {
|
||||||
const db = new CouchDB(ctx.appId)
|
const db = new CouchDB(ctx.appId)
|
||||||
|
const plus = ctx.request.body.plus
|
||||||
|
|
||||||
const datasource = {
|
const datasource = {
|
||||||
_id: generateDatasourceID(),
|
_id: generateDatasourceID({ plus }),
|
||||||
type: "datasource",
|
type: plus ? DocumentTypes.DATASOURCE_PLUS : DocumentTypes.DATASOURCE,
|
||||||
...ctx.request.body,
|
...ctx.request.body,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// update the schema
|
||||||
|
if (ctx.query.refresh) {
|
||||||
|
const PlusConnector = plusIntegrations[datasource.source].integration
|
||||||
|
|
||||||
|
const connector = new PlusConnector(ctx.request.body.config)
|
||||||
|
await connector.init()
|
||||||
|
|
||||||
|
datasource.entities = connector.tables
|
||||||
|
}
|
||||||
|
|
||||||
const response = await db.post(datasource)
|
const response = await db.post(datasource)
|
||||||
datasource._rev = response.rev
|
datasource._rev = response.rev
|
||||||
|
|
||||||
|
@ -77,26 +89,3 @@ exports.query = async function (ctx) {
|
||||||
ctx.throw(400, "Datasource does not support query.")
|
ctx.throw(400, "Datasource does not support query.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: merge endpoint with main datasource endpoint
|
|
||||||
exports.plus = async function (ctx) {
|
|
||||||
const db = new CouchDB(ctx.appId)
|
|
||||||
|
|
||||||
const PlusConnector = plusIntegrations[ctx.request.body.source].integration
|
|
||||||
|
|
||||||
const connector = new PlusConnector(ctx.request.body)
|
|
||||||
await connector.init()
|
|
||||||
|
|
||||||
const datasource = {
|
|
||||||
_id: generateDatasourceID({ plus: true }),
|
|
||||||
type: "datasource_plus",
|
|
||||||
relationships: [],
|
|
||||||
...ctx.request.body,
|
|
||||||
entities: connector.tables,
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await db.post(datasource)
|
|
||||||
datasource._rev = response.rev
|
|
||||||
|
|
||||||
ctx.body = datasource
|
|
||||||
}
|
|
||||||
|
|
|
@ -5,6 +5,10 @@
|
||||||
export let appId
|
export let appId
|
||||||
export let production
|
export let production
|
||||||
export let clientLibPath
|
export let clientLibPath
|
||||||
|
|
||||||
|
function initialise() {
|
||||||
|
alert("Yeet")
|
||||||
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<svelte:head>
|
<svelte:head>
|
||||||
|
|
|
@ -5,18 +5,30 @@ const {
|
||||||
getRowParams,
|
getRowParams,
|
||||||
getTableParams,
|
getTableParams,
|
||||||
generateTableID,
|
generateTableID,
|
||||||
|
getDatasourceParams,
|
||||||
} = require("../../../db/utils")
|
} = require("../../../db/utils")
|
||||||
const { FieldTypes } = require("../../../constants")
|
const { FieldTypes } = require("../../../constants")
|
||||||
const { TableSaveFunctions } = require("./utils")
|
const { TableSaveFunctions } = require("./utils")
|
||||||
|
|
||||||
exports.fetch = async function (ctx) {
|
exports.fetch = async function (ctx) {
|
||||||
const db = new CouchDB(ctx.appId)
|
const db = new CouchDB(ctx.appId)
|
||||||
const body = await db.allDocs(
|
const internalTables = await db.allDocs(
|
||||||
getTableParams(null, {
|
getTableParams(null, {
|
||||||
include_docs: true,
|
include_docs: true,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
ctx.body = body.rows.map(row => row.doc)
|
const internal = internalTables.rows.map(row => row.doc)
|
||||||
|
|
||||||
|
const externalTables = await db.allDocs(
|
||||||
|
getDatasourceParams("plus", {
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
const external = externalTables.rows.flatMap(row =>
|
||||||
|
Object.values(row.doc.entities)
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx.body = [...internal, ...external]
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.find = async function (ctx) {
|
exports.find = async function (ctx) {
|
||||||
|
|
|
@ -8,26 +8,26 @@ const {
|
||||||
PermissionTypes,
|
PermissionTypes,
|
||||||
} = require("@budibase/auth/permissions")
|
} = require("@budibase/auth/permissions")
|
||||||
const Joi = require("joi")
|
const Joi = require("joi")
|
||||||
const { FieldTypes, DataSourceOperation, SortDirection } = require("../../constants")
|
const { DataSourceOperation } = require("../../constants")
|
||||||
|
|
||||||
const router = Router()
|
const router = Router()
|
||||||
|
|
||||||
function generatePlusDatasourceSchema() {
|
function generateDatasourceSchema() {
|
||||||
// prettier-ignore
|
// prettier-ignore
|
||||||
return joiValidator.body(Joi.object({
|
return joiValidator.body(Joi.object({
|
||||||
_id: Joi.string(),
|
_id: Joi.string(),
|
||||||
_rev: Joi.string(),
|
_rev: Joi.string(),
|
||||||
source: Joi.string().valid("postgres"),
|
source: Joi.string().valid("POSTGRES_PLUS"),
|
||||||
type: Joi.string().valid("datasource_plus"),
|
type: Joi.string().allow("datasource_plus"),
|
||||||
relationships: Joi.array().required().items(Joi.object({
|
relationships: Joi.array().items(Joi.object({
|
||||||
from: Joi.string().required(),
|
from: Joi.string().required(),
|
||||||
to: Joi.string().required(),
|
to: Joi.string().required(),
|
||||||
cardinality: Joi.valid("1:N", "1:1", "N:N").required()
|
cardinality: Joi.valid("1:N", "1:1", "N:N").required()
|
||||||
})),
|
})),
|
||||||
entities: Joi.array().required().items(Joi.object({
|
// entities: Joi.array().items(Joi.object({
|
||||||
type: Joi.string().valid(...Object.values(FieldTypes)).required(),
|
// type: Joi.string().valid(...Object.values(FieldTypes)).required(),
|
||||||
name: Joi.string().required(),
|
// name: Joi.string().required(),
|
||||||
})),
|
// })),
|
||||||
}).unknown(true))
|
}).unknown(true))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -59,7 +59,6 @@ function generateQueryDatasourceSchema() {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
router
|
router
|
||||||
.get("/api/datasources", authorized(BUILDER), datasourceController.fetch)
|
.get("/api/datasources", authorized(BUILDER), datasourceController.fetch)
|
||||||
.get(
|
.get(
|
||||||
|
@ -67,19 +66,18 @@ router
|
||||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||||
datasourceController.find
|
datasourceController.find
|
||||||
)
|
)
|
||||||
.post(
|
|
||||||
"/api/datasources/plus",
|
|
||||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
|
||||||
generatePlusDatasourceSchema(),
|
|
||||||
datasourceController.plus
|
|
||||||
)
|
|
||||||
.post(
|
.post(
|
||||||
"/api/datasources/query",
|
"/api/datasources/query",
|
||||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||||
generateQueryDatasourceSchema(),
|
generateQueryDatasourceSchema(),
|
||||||
datasourceController.query
|
datasourceController.query
|
||||||
)
|
)
|
||||||
.post("/api/datasources", authorized(BUILDER), datasourceController.save)
|
.post(
|
||||||
|
"/api/datasources",
|
||||||
|
authorized(BUILDER),
|
||||||
|
generateDatasourceSchema(),
|
||||||
|
datasourceController.save
|
||||||
|
)
|
||||||
.delete(
|
.delete(
|
||||||
"/api/datasources/:datasourceId/:revId",
|
"/api/datasources/:datasourceId/:revId",
|
||||||
authorized(BUILDER),
|
authorized(BUILDER),
|
||||||
|
|
|
@ -39,6 +39,7 @@ const INTEGRATIONS = {
|
||||||
MYSQL: mysql.integration,
|
MYSQL: mysql.integration,
|
||||||
ARANGODB: arangodb.integration,
|
ARANGODB: arangodb.integration,
|
||||||
REST: rest.integration,
|
REST: rest.integration,
|
||||||
|
POSTGRES_PLUS: postgresPlus.integration,
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
const postgres = require("./postgres")
|
const postgres = require("./postgres")
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
postgres,
|
POSTGRES_PLUS: postgres,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
const Sql = require("../base/sql")
|
||||||
const { Pool } = require("pg")
|
const { Pool } = require("pg")
|
||||||
const { FieldTypes } = require("../../constants")
|
const { FieldTypes } = require("../../constants")
|
||||||
const { FIELD_TYPES } = require("../Integration")
|
const { FIELD_TYPES } = require("../Integration")
|
||||||
|
@ -18,6 +19,7 @@ const SCHEMA = {
|
||||||
friendlyName: "PostgreSQL",
|
friendlyName: "PostgreSQL",
|
||||||
description:
|
description:
|
||||||
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
||||||
|
plus: true,
|
||||||
datasource: {
|
datasource: {
|
||||||
host: {
|
host: {
|
||||||
type: FIELD_TYPES.STRING,
|
type: FIELD_TYPES.STRING,
|
||||||
|
@ -52,12 +54,13 @@ const SCHEMA = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
class PostgresPlus {
|
class PostgresPlus extends Sql {
|
||||||
static pool
|
static pool
|
||||||
COLUMNS_SQL =
|
COLUMNS_SQL =
|
||||||
"select * from information_schema.columns where table_schema = 'public'"
|
"select * from information_schema.columns where table_schema = 'public'"
|
||||||
|
|
||||||
constructor(config) {
|
constructor(config) {
|
||||||
|
super("pg")
|
||||||
this.config = config
|
this.config = config
|
||||||
if (!this.pool) {
|
if (!this.pool) {
|
||||||
this.pool = new Pool(this.config)
|
this.pool = new Pool(this.config)
|
||||||
|
@ -71,24 +74,32 @@ class PostgresPlus {
|
||||||
|
|
||||||
const tables = {}
|
const tables = {}
|
||||||
for (let column of response.rows) {
|
for (let column of response.rows) {
|
||||||
|
const tableName = column.table_name
|
||||||
|
const columnName = column.column_name
|
||||||
|
|
||||||
// table key doesn't exist yet
|
// table key doesn't exist yet
|
||||||
if (!tables[column.table_name]) {
|
if (!tables[tableName]) {
|
||||||
tables[column.table_name] = []
|
tables[tableName] = {
|
||||||
|
_id: "something",
|
||||||
|
name: tableName,
|
||||||
|
schema: {},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the new column
|
tables[tableName].schema[columnName] = {
|
||||||
const columnData = {
|
name: columnName,
|
||||||
type: TYPE_MAP[column.data_type] || "unknown",
|
type: TYPE_MAP[column.data_type],
|
||||||
table: column.table_name,
|
|
||||||
name: column.column_name,
|
|
||||||
updateable: column.is_updatable,
|
|
||||||
precision: column.numeric_precision,
|
|
||||||
nullable: column.is_nullable === "YES",
|
|
||||||
}
|
}
|
||||||
tables[column.table_name].push(columnData)
|
|
||||||
}
|
}
|
||||||
this.tables = tables
|
this.tables = tables
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async query(json) {
|
||||||
|
const operation = this._operation(json).toLowerCase()
|
||||||
|
const sql = this._query(json)
|
||||||
|
const response = await this.client.query(sql)
|
||||||
|
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue