Revert "Datasource plus - build schema"

This reverts commit 3c1d32ba24.
This commit is contained in:
Mel O'Hagan 2022-06-01 09:36:42 +01:00
parent be8cd07cdc
commit b282102998
3 changed files with 9 additions and 94 deletions

View File

@ -29,7 +29,6 @@ export const IntegrationNames = {
ARANGODB: "ArangoDB", ARANGODB: "ArangoDB",
ORACLE: "Oracle", ORACLE: "Oracle",
GOOGLE_SHEETS: "Google Sheets", GOOGLE_SHEETS: "Google Sheets",
SNOWFLAKE: "Snowflake",
} }
// fields on the user table that cannot be edited // fields on the user table that cannot be edited

View File

@ -1,27 +1,12 @@
import { import { Integration, QueryTypes, SqlQuery } from "../definitions/datasource"
Integration,
QueryTypes,
SqlQuery,
DatasourceFieldTypes,
} from "../definitions/datasource"
import { import {
SnowflakeError, SnowflakeError,
Statement, Statement,
createConnection, createConnection,
Connection, Connection,
} from "snowflake-sdk" } from "snowflake-sdk"
import {
SqlClients,
finaliseExternalTables,
buildExternalTableId,
convertSqlType,
} from "./utils"
import { DatasourcePlus } from "./base/datasourcePlus"
import { Table, TableSchema } from "../definitions/common"
module SnowflakeModule { module SnowflakeModule {
const Sql = require("./base/sql")
interface SnowflakeConfig { interface SnowflakeConfig {
account: string account: string
username: string username: string
@ -32,34 +17,33 @@ module SnowflakeModule {
} }
const SCHEMA: Integration = { const SCHEMA: Integration = {
plus: true,
docs: "https://developers.snowflake.com/", docs: "https://developers.snowflake.com/",
description: description:
"Snowflake is a solution for data warehousing, data lakes, data engineering, data science, data application development, and securely sharing and consuming shared data.", "Snowflake is a solution for data warehousing, data lakes, data engineering, data science, data application development, and securely sharing and consuming shared data.",
friendlyName: "Snowflake", friendlyName: "Snowflake",
datasource: { datasource: {
account: { account: {
type: DatasourceFieldTypes.STRING, type: "string",
required: true, required: true,
}, },
username: { username: {
type: DatasourceFieldTypes.STRING, type: "string",
required: true, required: true,
}, },
password: { password: {
type: DatasourceFieldTypes.PASSWORD, type: "password",
required: true, required: true,
}, },
warehouse: { warehouse: {
type: DatasourceFieldTypes.STRING, type: "string",
required: true, required: true,
}, },
database: { database: {
type: DatasourceFieldTypes.STRING, type: "string",
required: true, required: true,
}, },
schema: { schema: {
type: DatasourceFieldTypes.STRING, type: "string",
required: true, required: true,
}, },
}, },
@ -79,77 +63,13 @@ module SnowflakeModule {
}, },
} }
class SnowflakeIntegration extends Sql implements DatasourcePlus { class SnowflakeIntegration {
private client: Connection private client: Connection
private config: SnowflakeConfig
public tables: Record<string, Table> = {}
public schemaErrors: Record<string, string> = {}
constructor(config: SnowflakeConfig) { constructor(config: SnowflakeConfig) {
super(SqlClients.SNOWFLAKE)
this.config = config
this.client = createConnection(config) this.client = createConnection(config)
} }
getBindingIdentifier(): string {
return "?"
}
getStringConcat(parts: string[]): string {
return `concat(${parts.join(", ")})`
}
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
const tables: { [key: string]: Table } = {}
const database = this.config.database
// get the tables first
const tablesResp = await this.internalQuery({ sql: "SHOW TABLES;" })
const tableNames = tablesResp.map((obj: any) => obj.name)
for (let tableName of tableNames) {
const primaryKeys = []
const schema: TableSchema = {}
const descResp = await this.internalQuery({
sql: `DESCRIBE TABLE ${tableName};`,
})
if (tableName === "CUSTOMER") {
console.log("DESC = ", descResp)
}
for (let column of descResp) {
const columnName = column.Field
if (
column["primary key"] === "Y" &&
primaryKeys.indexOf(column.Key) === -1
) {
primaryKeys.push(columnName)
}
const constraints = {
presence: column["null?"] !== "Y",
}
const isAuto: boolean = column.default
?.toLowerCase()
.includes("increment")
schema[columnName] = {
name: columnName,
autocolumn: isAuto,
constraints,
...convertSqlType(column["type"]),
}
}
if (!tables[tableName]) {
tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName),
primary: primaryKeys,
name: tableName,
schema,
}
}
}
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
}
async connectAsync() { async connectAsync() {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
this.client.connect(function (err: any, conn: any) { this.client.connect(function (err: any, conn: any) {
@ -160,9 +80,7 @@ module SnowflakeModule {
} }
async internalQuery(query: SqlQuery) { async internalQuery(query: SqlQuery) {
if (!this.client.isUp()) {
await this.connectAsync() await this.connectAsync()
}
let response: any = await new Promise((resolve, reject) => let response: any = await new Promise((resolve, reject) =>
this.client.execute({ this.client.execute({
sqlText: query.sql, sqlText: query.sql,

View File

@ -73,7 +73,6 @@ export enum SqlClients {
POSTGRES = "pg", POSTGRES = "pg",
MY_SQL = "mysql2", MY_SQL = "mysql2",
ORACLE = "oracledb", ORACLE = "oracledb",
SNOWFLAKE = "snowflake-sdk",
} }
export function isExternalTable(tableId: string) { export function isExternalTable(tableId: string) {
@ -174,7 +173,6 @@ export function isSQL(datasource: Datasource): boolean {
SourceNames.SQL_SERVER, SourceNames.SQL_SERVER,
SourceNames.MYSQL, SourceNames.MYSQL,
SourceNames.ORACLE, SourceNames.ORACLE,
SourceNames.SNOWFLAKE,
] ]
return SQL.indexOf(datasource.source) !== -1 return SQL.indexOf(datasource.source) !== -1
} }