Adding a mySQL plus integration, as well as fixing some issues with running queries directly.
This commit is contained in:
parent
a04c930c1e
commit
7065bf1ea9
|
@ -100,6 +100,7 @@
|
|||
having to write any queries at all.
|
||||
</Body>
|
||||
<div class="query-list">
|
||||
{#if datasource.entities}
|
||||
{#each Object.keys(datasource.entities) as entity}
|
||||
<div
|
||||
class="query-list-item"
|
||||
|
@ -110,6 +111,7 @@
|
|||
<p>→</p>
|
||||
</div>
|
||||
{/each}
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
<Divider />
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
# Use root/example as user/password credentials
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
db:
|
||||
image: mysql
|
||||
restart: always
|
||||
command: --init-file /data/application/init.sql --default-authentication-plugin=mysql_native_password
|
||||
volumes:
|
||||
- ./init.sql:/data/application/init.sql
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: root
|
||||
ports:
|
||||
- 3306:3306
|
||||
|
||||
adminer:
|
||||
image: adminer
|
||||
restart: always
|
||||
ports:
|
||||
- 8080:8080
|
|
@ -0,0 +1,9 @@
|
|||
CREATE DATABASE IF NOT EXISTS main;
|
||||
USE main;
|
||||
CREATE TABLE Persons (
|
||||
PersonID int NOT NULL PRIMARY KEY,
|
||||
LastName varchar(255),
|
||||
FirstName varchar(255),
|
||||
Address varchar(255),
|
||||
City varchar(255)
|
||||
);
|
|
@ -51,9 +51,11 @@ const SCHEMA = {
|
|||
},
|
||||
}
|
||||
|
||||
async function internalQuery(client, sql) {
|
||||
async function internalQuery(client, query) {
|
||||
const sql = typeof query === "string" ? query : query.sql
|
||||
const bindings = typeof query === "string" ? {} : query.bindings
|
||||
try {
|
||||
return await client.query(sql.sql, sql.bindings)
|
||||
return await client.query(sql, bindings)
|
||||
} catch (err) {
|
||||
throw new Error(err)
|
||||
}
|
||||
|
|
|
@ -1,9 +1,35 @@
|
|||
const mysql = require("mysql")
|
||||
const { FIELD_TYPES, QUERY_TYPES } = require("./Integration")
|
||||
const Sql = require("./base/sql")
|
||||
const { buildExternalTableId, convertType } = require("./utils")
|
||||
const { FieldTypes } = require("../constants")
|
||||
|
||||
const TYPE_MAP = {
|
||||
text: FieldTypes.LONGFORM,
|
||||
blob: FieldTypes.LONGFORM,
|
||||
enum: FieldTypes.STRING,
|
||||
varchar: FieldTypes.STRING,
|
||||
int: FieldTypes.NUMBER,
|
||||
numeric: FieldTypes.NUMBER,
|
||||
bigint: FieldTypes.NUMBER,
|
||||
mediumint: FieldTypes.NUMBER,
|
||||
decimal: FieldTypes.NUMBER,
|
||||
dec: FieldTypes.NUMBER,
|
||||
double: FieldTypes.NUMBER,
|
||||
real: FieldTypes.NUMBER,
|
||||
fixed: FieldTypes.NUMBER,
|
||||
smallint: FieldTypes.NUMBER,
|
||||
timestamp: FieldTypes.DATETIME,
|
||||
date: FieldTypes.DATETIME,
|
||||
datetime: FieldTypes.DATETIME,
|
||||
time: FieldTypes.DATETIME,
|
||||
tinyint: FieldTypes.BOOLEAN,
|
||||
json: FIELD_TYPES.JSON,
|
||||
}
|
||||
|
||||
const SCHEMA = {
|
||||
docs: "https://github.com/mysqljs/mysql",
|
||||
plus: true,
|
||||
friendlyName: "MySQL",
|
||||
description:
|
||||
"MySQL Database Service is a fully managed database service to deploy cloud-native applications. ",
|
||||
|
@ -53,15 +79,21 @@ const SCHEMA = {
|
|||
},
|
||||
}
|
||||
|
||||
function internalQuery(client, query) {
|
||||
function internalQuery(client, query, connect = true) {
|
||||
const sql = typeof query === "string" ? query : query.sql
|
||||
const bindings = typeof query === "string" ? {} : query.bindings
|
||||
// Node MySQL is callback based, so we must wrap our call in a promise
|
||||
return new Promise((resolve, reject) => {
|
||||
if (connect) {
|
||||
client.connect()
|
||||
return client.query(query.sql, query.bindings, (error, results) => {
|
||||
}
|
||||
return client.query(sql, bindings, (error, results) => {
|
||||
if (error) {
|
||||
reject(error)
|
||||
} else {
|
||||
resolve(results)
|
||||
}
|
||||
if (connect) {
|
||||
client.end()
|
||||
}
|
||||
})
|
||||
|
@ -69,15 +101,73 @@ function internalQuery(client, query) {
|
|||
}
|
||||
|
||||
class MySQLIntegration extends Sql {
|
||||
GET_TABLES_SQL =
|
||||
"select * from information_schema.columns where table_schema = 'public'"
|
||||
|
||||
PRIMARY_KEYS_SQL = `
|
||||
select tc.table_schema, tc.table_name, kc.column_name as primary_key
|
||||
from information_schema.table_constraints tc
|
||||
join
|
||||
information_schema.key_column_usage kc on kc.table_name = tc.table_name
|
||||
and kc.table_schema = tc.table_schema
|
||||
and kc.constraint_name = tc.constraint_name
|
||||
where tc.constraint_type = 'PRIMARY KEY';
|
||||
`
|
||||
|
||||
constructor(config) {
|
||||
super("mysql")
|
||||
this.config = config
|
||||
if (Object.keys(config.ssl).length === 0) {
|
||||
if (config.ssl && Object.keys(config.ssl).length === 0) {
|
||||
delete config.ssl
|
||||
}
|
||||
this.client = mysql.createConnection(config)
|
||||
}
|
||||
|
||||
async buildSchema(datasourceId) {
|
||||
const tables = {}
|
||||
const database = this.config.database
|
||||
this.client.connect()
|
||||
|
||||
// get the tables first
|
||||
const tablesResp = await internalQuery(this.client, "SHOW TABLES;", false)
|
||||
const tableNames = tablesResp.map(obj => obj[`Tables_in_${database}`])
|
||||
for (let tableName of tableNames) {
|
||||
const primaryKeys = []
|
||||
const schema = {}
|
||||
const descResp = await internalQuery(this.client, `DESCRIBE ${tableName};`, false)
|
||||
for (let column of descResp) {
|
||||
const columnName = column.Field
|
||||
if (column.Key === "PRI") {
|
||||
primaryKeys.push(columnName)
|
||||
}
|
||||
const constraints = {}
|
||||
if (column.Null !== "YES") {
|
||||
constraints.required = true
|
||||
}
|
||||
schema[columnName] = {
|
||||
name: columnName,
|
||||
type: convertType(column.Type, TYPE_MAP),
|
||||
constraints,
|
||||
}
|
||||
}
|
||||
// for now just default to first column
|
||||
if (primaryKeys.length === 0) {
|
||||
primaryKeys.push(descResp[0].Field)
|
||||
}
|
||||
if (!tables[tableName]) {
|
||||
tables[tableName] = {
|
||||
_id: buildExternalTableId(datasourceId, tableName),
|
||||
primary: primaryKeys,
|
||||
name: tableName,
|
||||
schema,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.client.end()
|
||||
this.tables = tables
|
||||
}
|
||||
|
||||
async create(query) {
|
||||
const results = await internalQuery(this.client, query)
|
||||
return results.length ? results : [{ created: true }]
|
||||
|
|
|
@ -2,7 +2,7 @@ const { Pool } = require("pg")
|
|||
const { FIELD_TYPES } = require("./Integration")
|
||||
const Sql = require("./base/sql")
|
||||
const { FieldTypes } = require("../constants")
|
||||
const { buildExternalTableId } = require("./utils")
|
||||
const { buildExternalTableId, convertType } = require("./utils")
|
||||
|
||||
const SCHEMA = {
|
||||
docs: "https://node-postgres.com",
|
||||
|
@ -71,9 +71,11 @@ const TYPE_MAP = {
|
|||
json: FIELD_TYPES.JSON,
|
||||
}
|
||||
|
||||
async function internalQuery(client, sql) {
|
||||
async function internalQuery(client, query) {
|
||||
const sql = typeof query === "string" ? query : query.sql
|
||||
const bindings = typeof query === "string" ? {} : query.bindings
|
||||
try {
|
||||
return await client.query(sql.sql, sql.bindings)
|
||||
return await client.query(sql, bindings)
|
||||
} catch (err) {
|
||||
throw new Error(err)
|
||||
}
|
||||
|
@ -147,7 +149,7 @@ class PostgresIntegration extends Sql {
|
|||
|
||||
tables[tableName].schema[columnName] = {
|
||||
name: columnName,
|
||||
type: TYPE_MAP[column.data_type] || FIELD_TYPES.STRING,
|
||||
type: convertType(column.data_type, TYPE_MAP),
|
||||
}
|
||||
}
|
||||
this.tables = tables
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
const { DocumentTypes, SEPARATOR } = require("../db/utils")
|
||||
const { FieldTypes } = require("../constants")
|
||||
|
||||
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
|
||||
|
||||
|
@ -33,3 +34,12 @@ exports.breakRowIdField = _id => {
|
|||
}
|
||||
return JSON.parse(decodeURIComponent(_id))
|
||||
}
|
||||
|
||||
exports.convertType = (type, map) => {
|
||||
for (let [external, internal] of Object.entries(map)) {
|
||||
if (type.toLowerCase().includes(external)) {
|
||||
return internal
|
||||
}
|
||||
}
|
||||
return FieldTypes.STRING
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue