2021-06-24 19:17:26 +02:00
|
|
|
import {
|
|
|
|
Integration,
|
|
|
|
DatasourceFieldTypes,
|
|
|
|
QueryTypes,
|
|
|
|
QueryJson,
|
2021-06-25 14:46:02 +02:00
|
|
|
SqlQuery,
|
2021-06-27 00:09:46 +02:00
|
|
|
} from "../definitions/datasource"
|
|
|
|
import { Table } from "../definitions/common"
|
2021-06-25 14:46:02 +02:00
|
|
|
import { getSqlQuery } from "./utils"
|
2021-06-24 19:16:48 +02:00
|
|
|
|
|
|
|
module PostgresModule {
|
|
|
|
const { Pool } = require("pg")
|
|
|
|
const Sql = require("./base/sql")
|
|
|
|
const { FieldTypes } = require("../constants")
|
2021-09-24 00:25:25 +02:00
|
|
|
const {
|
|
|
|
buildExternalTableId,
|
|
|
|
convertType,
|
|
|
|
copyExistingPropsOver,
|
|
|
|
} = require("./utils")
|
2021-09-24 19:10:30 +02:00
|
|
|
const { escapeDangerousCharacters } = require("../utilities")
|
|
|
|
|
|
|
|
const JSON_REGEX = /'{.*}'::json/s
|
2021-06-24 19:16:48 +02:00
|
|
|
|
|
|
|
interface PostgresConfig {
|
2021-06-24 19:17:26 +02:00
|
|
|
host: string
|
|
|
|
port: number
|
|
|
|
database: string
|
|
|
|
user: string
|
|
|
|
password: string
|
|
|
|
ssl?: boolean
|
2021-06-24 19:16:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
const SCHEMA: Integration = {
|
|
|
|
docs: "https://node-postgres.com",
|
|
|
|
plus: true,
|
|
|
|
friendlyName: "PostgreSQL",
|
|
|
|
description:
|
|
|
|
"PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.",
|
|
|
|
datasource: {
|
|
|
|
host: {
|
|
|
|
type: DatasourceFieldTypes.STRING,
|
|
|
|
default: "localhost",
|
|
|
|
required: true,
|
|
|
|
},
|
|
|
|
port: {
|
|
|
|
type: DatasourceFieldTypes.NUMBER,
|
|
|
|
required: true,
|
|
|
|
default: 5432,
|
|
|
|
},
|
|
|
|
database: {
|
|
|
|
type: DatasourceFieldTypes.STRING,
|
|
|
|
default: "postgres",
|
|
|
|
required: true,
|
|
|
|
},
|
|
|
|
user: {
|
|
|
|
type: DatasourceFieldTypes.STRING,
|
|
|
|
default: "root",
|
|
|
|
required: true,
|
|
|
|
},
|
|
|
|
password: {
|
|
|
|
type: DatasourceFieldTypes.PASSWORD,
|
|
|
|
default: "root",
|
|
|
|
required: true,
|
|
|
|
},
|
|
|
|
ssl: {
|
|
|
|
type: DatasourceFieldTypes.BOOLEAN,
|
|
|
|
default: false,
|
|
|
|
required: false,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
query: {
|
|
|
|
create: {
|
|
|
|
type: QueryTypes.SQL,
|
|
|
|
},
|
|
|
|
read: {
|
|
|
|
type: QueryTypes.SQL,
|
|
|
|
},
|
|
|
|
update: {
|
|
|
|
type: QueryTypes.SQL,
|
|
|
|
},
|
|
|
|
delete: {
|
|
|
|
type: QueryTypes.SQL,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
const TYPE_MAP = {
|
|
|
|
text: FieldTypes.LONGFORM,
|
|
|
|
varchar: FieldTypes.STRING,
|
|
|
|
integer: FieldTypes.NUMBER,
|
|
|
|
bigint: FieldTypes.NUMBER,
|
|
|
|
decimal: FieldTypes.NUMBER,
|
|
|
|
smallint: FieldTypes.NUMBER,
|
2021-09-23 09:54:58 +02:00
|
|
|
real: FieldTypes.NUMBER,
|
|
|
|
"double precision": FieldTypes.NUMBER,
|
2021-06-24 19:16:48 +02:00
|
|
|
timestamp: FieldTypes.DATETIME,
|
|
|
|
time: FieldTypes.DATETIME,
|
|
|
|
boolean: FieldTypes.BOOLEAN,
|
|
|
|
json: FieldTypes.JSON,
|
2021-09-23 09:54:58 +02:00
|
|
|
date: FieldTypes.DATETIME,
|
2021-06-24 19:16:48 +02:00
|
|
|
}
|
|
|
|
|
2021-06-25 14:46:02 +02:00
|
|
|
async function internalQuery(client: any, query: SqlQuery) {
|
2021-09-24 19:10:30 +02:00
|
|
|
// need to handle a specific issue with json data types in postgres,
|
|
|
|
// new lines inside the JSON data will break it
|
2021-09-27 14:17:31 +02:00
|
|
|
if (query && query.sql) {
|
|
|
|
const matches = query.sql.match(JSON_REGEX)
|
|
|
|
if (matches && matches.length > 0) {
|
|
|
|
for (let match of matches) {
|
|
|
|
const escaped = escapeDangerousCharacters(match)
|
|
|
|
query.sql = query.sql.replace(match, escaped)
|
|
|
|
}
|
2021-09-24 19:10:30 +02:00
|
|
|
}
|
|
|
|
}
|
2021-06-24 19:16:48 +02:00
|
|
|
try {
|
2021-07-13 18:11:11 +02:00
|
|
|
return await client.query(query.sql, query.bindings || [])
|
2021-06-24 19:16:48 +02:00
|
|
|
} catch (err) {
|
2021-09-02 19:33:41 +02:00
|
|
|
// @ts-ignore
|
2021-06-24 19:16:48 +02:00
|
|
|
throw new Error(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
class PostgresIntegration extends Sql {
|
|
|
|
static pool: any
|
|
|
|
private readonly client: any
|
|
|
|
private readonly config: PostgresConfig
|
|
|
|
|
|
|
|
COLUMNS_SQL =
|
2021-09-23 11:11:00 +02:00
|
|
|
"select * from information_schema.columns where not table_schema = 'information_schema' and not table_schema = 'pg_catalog'"
|
2021-06-24 19:16:48 +02:00
|
|
|
|
|
|
|
PRIMARY_KEYS_SQL = `
|
|
|
|
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
|
|
|
from information_schema.table_constraints tc
|
|
|
|
join
|
|
|
|
information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
|
|
|
and kc.table_schema = tc.table_schema
|
|
|
|
and kc.constraint_name = tc.constraint_name
|
|
|
|
where tc.constraint_type = 'PRIMARY KEY';
|
|
|
|
`
|
|
|
|
|
|
|
|
constructor(config: PostgresConfig) {
|
|
|
|
super("pg")
|
|
|
|
this.config = config
|
|
|
|
|
|
|
|
let newConfig = {
|
|
|
|
...this.config,
|
|
|
|
ssl: this.config.ssl ? { rejectUnauthorized: true } : undefined,
|
|
|
|
}
|
|
|
|
if (!this.pool) {
|
|
|
|
this.pool = new Pool(newConfig)
|
|
|
|
}
|
|
|
|
|
|
|
|
this.client = this.pool
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Fetches the tables from the postgres table and assigns them to the datasource.
|
|
|
|
* @param {*} datasourceId - datasourceId to fetch
|
2021-07-03 11:26:37 +02:00
|
|
|
* @param entities - the tables that are to be built
|
2021-06-24 19:16:48 +02:00
|
|
|
*/
|
2021-07-02 15:33:05 +02:00
|
|
|
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
|
2021-06-24 19:17:26 +02:00
|
|
|
let tableKeys: { [key: string]: string[] } = {}
|
2021-06-24 19:16:48 +02:00
|
|
|
try {
|
2021-06-24 19:17:26 +02:00
|
|
|
const primaryKeysResponse = await this.client.query(
|
|
|
|
this.PRIMARY_KEYS_SQL
|
|
|
|
)
|
2021-06-24 19:16:48 +02:00
|
|
|
for (let table of primaryKeysResponse.rows) {
|
|
|
|
const tableName = table.table_name
|
|
|
|
if (!tableKeys[tableName]) {
|
|
|
|
tableKeys[tableName] = []
|
|
|
|
}
|
2021-07-06 16:45:14 +02:00
|
|
|
const key = table.column_name || table.primary_key
|
|
|
|
// only add the unique keys
|
|
|
|
if (key && tableKeys[tableName].indexOf(key) === -1) {
|
|
|
|
tableKeys[tableName].push(key)
|
|
|
|
}
|
2021-06-24 19:16:48 +02:00
|
|
|
}
|
|
|
|
} catch (err) {
|
|
|
|
tableKeys = {}
|
|
|
|
}
|
|
|
|
|
|
|
|
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
|
|
|
|
const tables: { [key: string]: Table } = {}
|
|
|
|
|
|
|
|
for (let column of columnsResponse.rows) {
|
|
|
|
const tableName: string = column.table_name
|
|
|
|
const columnName: string = column.column_name
|
|
|
|
|
|
|
|
// table key doesn't exist yet
|
|
|
|
if (!tables[tableName] || !tables[tableName].schema) {
|
|
|
|
tables[tableName] = {
|
|
|
|
_id: buildExternalTableId(datasourceId, tableName),
|
|
|
|
primary: tableKeys[tableName] || ["id"],
|
|
|
|
name: tableName,
|
|
|
|
schema: {},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const type: string = convertType(column.data_type, TYPE_MAP)
|
2021-09-24 00:25:25 +02:00
|
|
|
const identity = !!(
|
|
|
|
column.identity_generation ||
|
|
|
|
column.identity_start ||
|
|
|
|
column.identity_increment
|
|
|
|
)
|
|
|
|
const hasDefault =
|
|
|
|
typeof column.column_default === "string" &&
|
2021-07-05 19:16:04 +02:00
|
|
|
column.column_default.startsWith("nextval")
|
2021-09-24 00:25:25 +02:00
|
|
|
const isGenerated =
|
|
|
|
column.is_generated && column.is_generated !== "NEVER"
|
2021-09-22 18:46:54 +02:00
|
|
|
const isAuto: boolean = hasDefault || identity || isGenerated
|
2021-06-24 19:16:48 +02:00
|
|
|
tables[tableName].schema[columnName] = {
|
2021-07-05 19:11:23 +02:00
|
|
|
autocolumn: isAuto,
|
2021-06-24 19:16:48 +02:00
|
|
|
name: columnName,
|
|
|
|
type,
|
|
|
|
}
|
|
|
|
}
|
2021-09-22 18:46:54 +02:00
|
|
|
|
|
|
|
for (let tableName of Object.keys(tables)) {
|
|
|
|
copyExistingPropsOver(tableName, tables, entities)
|
|
|
|
}
|
2021-06-24 19:16:48 +02:00
|
|
|
this.tables = tables
|
|
|
|
}
|
|
|
|
|
2021-06-25 14:46:02 +02:00
|
|
|
async create(query: SqlQuery | string) {
|
|
|
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
2021-06-24 19:17:26 +02:00
|
|
|
return response.rows.length ? response.rows : [{ created: true }]
|
2021-06-24 19:16:48 +02:00
|
|
|
}
|
|
|
|
|
2021-06-25 14:46:02 +02:00
|
|
|
async read(query: SqlQuery | string) {
|
|
|
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
2021-06-24 19:16:48 +02:00
|
|
|
return response.rows
|
|
|
|
}
|
|
|
|
|
2021-06-25 14:46:02 +02:00
|
|
|
async update(query: SqlQuery | string) {
|
|
|
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
2021-06-24 19:17:26 +02:00
|
|
|
return response.rows.length ? response.rows : [{ updated: true }]
|
2021-06-24 19:16:48 +02:00
|
|
|
}
|
|
|
|
|
2021-06-25 14:46:02 +02:00
|
|
|
async delete(query: SqlQuery | string) {
|
|
|
|
const response = await internalQuery(this.client, getSqlQuery(query))
|
2021-06-24 19:17:26 +02:00
|
|
|
return response.rows.length ? response.rows : [{ deleted: true }]
|
2021-06-24 19:16:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
async query(json: QueryJson) {
|
|
|
|
const operation = this._operation(json).toLowerCase()
|
|
|
|
const input = this._query(json)
|
|
|
|
const response = await internalQuery(this.client, input)
|
2021-06-24 19:17:26 +02:00
|
|
|
return response.rows.length ? response.rows : [{ [operation]: true }]
|
2021-06-24 19:16:48 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
module.exports = {
|
|
|
|
schema: SCHEMA,
|
|
|
|
integration: PostgresIntegration,
|
|
|
|
}
|
|
|
|
}
|