consolidate postgres datas sources
This commit is contained in:
parent
2598af16bf
commit
aabbbdecfe
|
@ -15,12 +15,22 @@
|
|||
async function saveDatasource() {
|
||||
try {
|
||||
// Create datasource
|
||||
await datasources.save(datasource, { refresh: true })
|
||||
await datasources.save(datasource)
|
||||
notifications.success(`Datasource ${name} saved successfully.`)
|
||||
unsaved = false
|
||||
} catch (err) {
|
||||
notifications.error(`Error saving datasource: ${err}`)
|
||||
}
|
||||
}
|
||||
|
||||
async function updateDatasourceSchema() {
|
||||
try {
|
||||
await datasources.updateSchema(datasource)
|
||||
notifications.success(`Datasource ${name} schema saved successfully.`)
|
||||
unsaved = false
|
||||
await tables.fetch()
|
||||
} catch (err) {
|
||||
notifications.error(`Error saving datasource: ${err}`)
|
||||
notifications.error(`Error updating datasource schema: ${err}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -71,7 +81,6 @@
|
|||
on:change={setUnsaved}
|
||||
/>
|
||||
</div>
|
||||
{#if !integration.plus}
|
||||
<Divider />
|
||||
<div class="query-header">
|
||||
<Heading size="S">Queries</Heading>
|
||||
|
@ -86,6 +95,8 @@
|
|||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{#if datasource.plus}
|
||||
<Button cta on:click={updateDatasourceSchema}>Fetch Tables From Database</Button>
|
||||
{/if}
|
||||
</Layout>
|
||||
</section>
|
||||
|
|
|
@ -28,14 +28,34 @@ export function createDatasourcesStore() {
|
|||
update(state => ({ ...state, selected: datasourceId }))
|
||||
queries.update(state => ({ ...state, selected: null }))
|
||||
},
|
||||
save: async (datasource, opts = {}) => {
|
||||
let url = "/api/datasources"
|
||||
updateSchema: async (datasource) => {
|
||||
let url = `/api/datasources/${datasource._id}/schema`
|
||||
|
||||
if (datasource.plus && opts.refresh) {
|
||||
// Pull the latest tables from the datasource
|
||||
url += "?refresh=1"
|
||||
const response = await api.post(url)
|
||||
const json = await response.json()
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(json.message)
|
||||
}
|
||||
|
||||
update(state => {
|
||||
const currentIdx = state.list.findIndex(ds => ds._id === json._id)
|
||||
|
||||
const sources = state.list
|
||||
|
||||
if (currentIdx >= 0) {
|
||||
sources.splice(currentIdx, 1, json)
|
||||
} else {
|
||||
sources.push(json)
|
||||
}
|
||||
|
||||
return { list: sources, selected: json._id }
|
||||
})
|
||||
return json
|
||||
},
|
||||
save: async (datasource) => {
|
||||
let url = "/api/datasources"
|
||||
|
||||
const response = await api.post(url, datasource)
|
||||
const json = await response.json()
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ const {
|
|||
getTableParams,
|
||||
} = require("../../db/utils")
|
||||
const { integrations } = require("../../integrations")
|
||||
const plusIntegrations = require("../../integrations/plus")
|
||||
const { makeExternalQuery } = require("./row/utils")
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
|
@ -40,6 +39,24 @@ exports.fetch = async function (ctx) {
|
|||
ctx.body = [bbInternalDb, ...datasources]
|
||||
}
|
||||
|
||||
exports.buildSchemaFromDb = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const datasourceId = ctx.params.datasourceId
|
||||
const datasource = await db.get(datasourceId)
|
||||
|
||||
const Connector = integrations[datasource.source]
|
||||
|
||||
// Connect to the DB and build the schema
|
||||
const connector = new Connector(datasource.config)
|
||||
await connector.buildSchema(datasource._id)
|
||||
datasource.entities = connector.tables
|
||||
|
||||
const response = await db.post(datasource)
|
||||
datasource._rev = response.rev
|
||||
|
||||
ctx.body = datasource
|
||||
}
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const plus = ctx.request.body.plus
|
||||
|
@ -50,16 +67,6 @@ exports.save = async function (ctx) {
|
|||
...ctx.request.body,
|
||||
}
|
||||
|
||||
// update the schema
|
||||
if (ctx.query.refresh) {
|
||||
const PlusConnector = plusIntegrations[datasource.source].integration
|
||||
|
||||
const connector = new PlusConnector(ctx.request.body.config)
|
||||
await connector.init(datasource._id)
|
||||
|
||||
datasource.entities = connector.tables
|
||||
}
|
||||
|
||||
const response = await db.post(datasource)
|
||||
datasource._rev = response.rev
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ exports.fetch = async function (ctx) {
|
|||
)
|
||||
|
||||
const external = externalTables.rows.flatMap(row => {
|
||||
return Object.values(row.doc.entities).map(entity => ({
|
||||
return Object.values(row.doc.entities || {}).map(entity => ({
|
||||
...entity,
|
||||
sourceId: row.doc._id,
|
||||
}))
|
||||
|
|
|
@ -72,6 +72,11 @@ router
|
|||
generateQueryDatasourceSchema(),
|
||||
datasourceController.query
|
||||
)
|
||||
.post(
|
||||
"/api/datasources/:datasourceId/schema",
|
||||
authorized(BUILDER),
|
||||
datasourceController.buildSchemaFromDb
|
||||
)
|
||||
.post(
|
||||
"/api/datasources",
|
||||
authorized(BUILDER),
|
||||
|
|
|
@ -24,7 +24,6 @@ const DEFINITIONS = {
|
|||
MYSQL: mysql.schema,
|
||||
ARANGODB: arangodb.schema,
|
||||
REST: rest.schema,
|
||||
POSTGRES_PLUS: postgresPlus.schema,
|
||||
}
|
||||
|
||||
const INTEGRATIONS = {
|
||||
|
@ -39,7 +38,6 @@ const INTEGRATIONS = {
|
|||
MYSQL: mysql.integration,
|
||||
ARANGODB: arangodb.integration,
|
||||
REST: rest.integration,
|
||||
POSTGRES_PLUS: postgresPlus.integration,
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -1,135 +1,135 @@
|
|||
const Sql = require("../base/sql")
|
||||
const { Pool } = require("pg")
|
||||
const { FieldTypes } = require("../../constants")
|
||||
const { FIELD_TYPES } = require("../Integration")
|
||||
const { SEPARATOR } = require("@budibase/auth/db")
|
||||
// const Sql = require("../base/sql")
|
||||
// const { Pool } = require("pg")
|
||||
// const { FieldTypes } = require("../../constants")
|
||||
// const { FIELD_TYPES } = require("../Integration")
|
||||
// const { SEPARATOR } = require("@budibase/auth/db")
|
||||
|
||||
const TYPE_MAP = {
|
||||
text: FieldTypes.LONGFORM,
|
||||
varchar: FieldTypes.STRING,
|
||||
integer: FieldTypes.NUMBER,
|
||||
bigint: FieldTypes.NUMBER,
|
||||
decimal: FieldTypes.NUMBER,
|
||||
smallint: FieldTypes.NUMBER,
|
||||
timestamp: FieldTypes.DATETIME,
|
||||
time: FieldTypes.DATETIME,
|
||||
boolean: FieldTypes.BOOLEAN,
|
||||
json: FIELD_TYPES.JSON,
|
||||
}
|
||||
// const TYPE_MAP = {
|
||||
// text: FieldTypes.LONGFORM,
|
||||
// varchar: FieldTypes.STRING,
|
||||
// integer: FieldTypes.NUMBER,
|
||||
// bigint: FieldTypes.NUMBER,
|
||||
// decimal: FieldTypes.NUMBER,
|
||||
// smallint: FieldTypes.NUMBER,
|
||||
// timestamp: FieldTypes.DATETIME,
|
||||
// time: FieldTypes.DATETIME,
|
||||
// boolean: FieldTypes.BOOLEAN,
|
||||
// json: FIELD_TYPES.JSON,
|
||||
// }
|
||||
|
||||
const SCHEMA = {
|
||||
friendlyName: "PostgreSQL",
|
||||
description:
|
||||
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
||||
plus: true,
|
||||
datasource: {
|
||||
host: {
|
||||
type: FIELD_TYPES.STRING,
|
||||
default: "localhost",
|
||||
required: true,
|
||||
},
|
||||
port: {
|
||||
type: FIELD_TYPES.NUMBER,
|
||||
required: true,
|
||||
default: 5432,
|
||||
},
|
||||
database: {
|
||||
type: FIELD_TYPES.STRING,
|
||||
default: "postgres",
|
||||
required: true,
|
||||
},
|
||||
user: {
|
||||
type: FIELD_TYPES.STRING,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
password: {
|
||||
type: FIELD_TYPES.PASSWORD,
|
||||
default: "root",
|
||||
required: true,
|
||||
},
|
||||
ssl: {
|
||||
type: FIELD_TYPES.BOOLEAN,
|
||||
default: false,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
}
|
||||
// const SCHEMA = {
|
||||
// friendlyName: "PostgreSQL",
|
||||
// description:
|
||||
// "PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
||||
// plus: true,
|
||||
// datasource: {
|
||||
// host: {
|
||||
// type: FIELD_TYPES.STRING,
|
||||
// default: "localhost",
|
||||
// required: true,
|
||||
// },
|
||||
// port: {
|
||||
// type: FIELD_TYPES.NUMBER,
|
||||
// required: true,
|
||||
// default: 5432,
|
||||
// },
|
||||
// database: {
|
||||
// type: FIELD_TYPES.STRING,
|
||||
// default: "postgres",
|
||||
// required: true,
|
||||
// },
|
||||
// user: {
|
||||
// type: FIELD_TYPES.STRING,
|
||||
// default: "root",
|
||||
// required: true,
|
||||
// },
|
||||
// password: {
|
||||
// type: FIELD_TYPES.PASSWORD,
|
||||
// default: "root",
|
||||
// required: true,
|
||||
// },
|
||||
// ssl: {
|
||||
// type: FIELD_TYPES.BOOLEAN,
|
||||
// default: false,
|
||||
// required: false,
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
|
||||
class PostgresPlus extends Sql {
|
||||
static pool
|
||||
COLUMNS_SQL =
|
||||
"select * from information_schema.columns where table_schema = 'public'"
|
||||
// class PostgresPlus extends Sql {
|
||||
// static pool
|
||||
// COLUMNS_SQL =
|
||||
// "select * from information_schema.columns where table_schema = 'public'"
|
||||
|
||||
PRIMARY_KEYS_SQL = `
|
||||
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
||||
from information_schema.table_constraints tc
|
||||
join
|
||||
information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
||||
and kc.table_schema = tc.table_schema
|
||||
and kc.constraint_name = tc.constraint_name
|
||||
where tc.constraint_type = 'PRIMARY KEY';
|
||||
`
|
||||
// PRIMARY_KEYS_SQL = `
|
||||
// select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
||||
// from information_schema.table_constraints tc
|
||||
// join
|
||||
// information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
||||
// and kc.table_schema = tc.table_schema
|
||||
// and kc.constraint_name = tc.constraint_name
|
||||
// where tc.constraint_type = 'PRIMARY KEY';
|
||||
// `
|
||||
|
||||
constructor(config, datasource) {
|
||||
super("pg")
|
||||
this.config = config
|
||||
this.datasource = datasource
|
||||
// constructor(config, datasource) {
|
||||
// super("pg")
|
||||
// this.config = config
|
||||
// this.datasource = datasource
|
||||
|
||||
if (!this.pool) {
|
||||
this.pool = new Pool(this.config)
|
||||
}
|
||||
// if (!this.pool) {
|
||||
// this.pool = new Pool(this.config)
|
||||
// }
|
||||
|
||||
this.client = this.pool
|
||||
}
|
||||
// this.client = this.pool
|
||||
// }
|
||||
|
||||
async init(datasourceId) {
|
||||
let keys = []
|
||||
try {
|
||||
const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
|
||||
for (let table of primaryKeysResponse.rows) {
|
||||
keys.push(table.column_name || table.primary_key)
|
||||
}
|
||||
} catch (err) {
|
||||
// TODO: this try catch method isn't right
|
||||
keys = ["id"]
|
||||
}
|
||||
// async init(datasourceId) {
|
||||
// let keys = []
|
||||
// try {
|
||||
// const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
|
||||
// for (let table of primaryKeysResponse.rows) {
|
||||
// keys.push(table.column_name || table.primary_key)
|
||||
// }
|
||||
// } catch (err) {
|
||||
// // TODO: this try catch method isn't right
|
||||
// keys = ["id"]
|
||||
// }
|
||||
|
||||
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
||||
const tables = {}
|
||||
// const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
||||
// const tables = {}
|
||||
|
||||
for (let column of columnsResponse.rows) {
|
||||
const tableName = column.table_name
|
||||
const columnName = column.column_name
|
||||
// for (let column of columnsResponse.rows) {
|
||||
// const tableName = column.table_name
|
||||
// const columnName = column.column_name
|
||||
|
||||
// table key doesn't exist yet
|
||||
if (!tables[tableName]) {
|
||||
tables[tableName] = {
|
||||
_id: `${datasourceId}${SEPARATOR}${tableName}`,
|
||||
// TODO: this needs to accommodate composite keys
|
||||
primary: keys,
|
||||
name: tableName,
|
||||
schema: {},
|
||||
}
|
||||
}
|
||||
// // table key doesn't exist yet
|
||||
// if (!tables[tableName]) {
|
||||
// tables[tableName] = {
|
||||
// _id: `${datasourceId}${SEPARATOR}${tableName}`,
|
||||
// // TODO: this needs to accommodate composite keys
|
||||
// primary: keys,
|
||||
// name: tableName,
|
||||
// schema: {},
|
||||
// }
|
||||
// }
|
||||
|
||||
tables[tableName].schema[columnName] = {
|
||||
name: columnName,
|
||||
type: TYPE_MAP[column.data_type] || FIELD_TYPES.STRING,
|
||||
}
|
||||
}
|
||||
this.tables = tables
|
||||
}
|
||||
// tables[tableName].schema[columnName] = {
|
||||
// name: columnName,
|
||||
// type: TYPE_MAP[column.data_type] || FIELD_TYPES.STRING,
|
||||
// }
|
||||
// }
|
||||
// this.tables = tables
|
||||
// }
|
||||
|
||||
async query(json) {
|
||||
const operation = this._operation(json).toLowerCase()
|
||||
const sql = this._query(json)
|
||||
const response = await this.client.query(sql.sql, sql.bindings)
|
||||
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||
}
|
||||
}
|
||||
// async query(json) {
|
||||
// const operation = this._operation(json).toLowerCase()
|
||||
// const sql = this._query(json)
|
||||
// const response = await this.client.query(sql.sql, sql.bindings)
|
||||
// return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||
// }
|
||||
// }
|
||||
|
||||
module.exports = {
|
||||
schema: SCHEMA,
|
||||
integration: PostgresPlus,
|
||||
}
|
||||
// module.exports = {
|
||||
// schema: SCHEMA,
|
||||
// integration: PostgresPlus,
|
||||
// }
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
const { Pool } = require("pg")
|
||||
const { FIELD_TYPES } = require("./Integration")
|
||||
const Sql = require("./base/sql")
|
||||
const { FieldTypes } = require("../constants")
|
||||
const { SEPARATOR } = require("@budibase/auth/db")
|
||||
|
||||
const SCHEMA = {
|
||||
docs: "https://node-postgres.com",
|
||||
plus: true,
|
||||
friendlyName: "PostgreSQL",
|
||||
description:
|
||||
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
||||
|
@ -55,6 +58,19 @@ const SCHEMA = {
|
|||
},
|
||||
}
|
||||
|
||||
const TYPE_MAP = {
|
||||
text: FieldTypes.LONGFORM,
|
||||
varchar: FieldTypes.STRING,
|
||||
integer: FieldTypes.NUMBER,
|
||||
bigint: FieldTypes.NUMBER,
|
||||
decimal: FieldTypes.NUMBER,
|
||||
smallint: FieldTypes.NUMBER,
|
||||
timestamp: FieldTypes.DATETIME,
|
||||
time: FieldTypes.DATETIME,
|
||||
boolean: FieldTypes.BOOLEAN,
|
||||
json: FIELD_TYPES.JSON,
|
||||
}
|
||||
|
||||
async function internalQuery(client, sql) {
|
||||
try {
|
||||
return await client.query(sql)
|
||||
|
@ -66,6 +82,19 @@ async function internalQuery(client, sql) {
|
|||
class PostgresIntegration extends Sql {
|
||||
static pool
|
||||
|
||||
COLUMNS_SQL =
|
||||
"select * from information_schema.columns where table_schema = 'public'"
|
||||
|
||||
PRIMARY_KEYS_SQL = `
|
||||
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
||||
from information_schema.table_constraints tc
|
||||
join
|
||||
information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
||||
and kc.table_schema = tc.table_schema
|
||||
and kc.constraint_name = tc.constraint_name
|
||||
where tc.constraint_type = 'PRIMARY KEY';
|
||||
`
|
||||
|
||||
constructor(config) {
|
||||
super("pg")
|
||||
this.config = config
|
||||
|
@ -82,6 +111,48 @@ class PostgresIntegration extends Sql {
|
|||
this.client = this.pool
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches the tables from the postgres table and assigns them to the datasource.
|
||||
* @param {*} datasourceId - datasourceId to fetch
|
||||
*/
|
||||
async buildSchema(datasourceId) {
|
||||
let keys = []
|
||||
try {
|
||||
const primaryKeysResponse = await this.client.query(this.PRIMARY_KEYS_SQL)
|
||||
for (let table of primaryKeysResponse.rows) {
|
||||
keys.push(table.column_name || table.primary_key)
|
||||
}
|
||||
} catch (err) {
|
||||
// TODO: this try catch method isn't right
|
||||
keys = ["id"]
|
||||
}
|
||||
|
||||
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
||||
const tables = {}
|
||||
|
||||
for (let column of columnsResponse.rows) {
|
||||
const tableName = column.table_name
|
||||
const columnName = column.column_name
|
||||
|
||||
// table key doesn't exist yet
|
||||
if (!tables[tableName]) {
|
||||
tables[tableName] = {
|
||||
_id: `${datasourceId}${SEPARATOR}${tableName}`,
|
||||
// TODO: this needs to accommodate composite keys
|
||||
primary: keys,
|
||||
name: tableName,
|
||||
schema: {},
|
||||
}
|
||||
}
|
||||
|
||||
tables[tableName].schema[columnName] = {
|
||||
name: columnName,
|
||||
type: TYPE_MAP[column.data_type] || FIELD_TYPES.STRING,
|
||||
}
|
||||
}
|
||||
this.tables = tables
|
||||
}
|
||||
|
||||
async create({ sql }) {
|
||||
const response = await internalQuery(this.client, sql)
|
||||
return response.rows.length ? response.rows : [{ created: true }]
|
||||
|
|
Loading…
Reference in New Issue