Linting and updating SQL Server schema generation to include auto column and primary key recognition.

This commit is contained in:
mike12345567 2021-11-05 12:33:48 +00:00
parent 515ed75680
commit 9c933b629f
6 changed files with 123 additions and 31 deletions

View File

@ -7,15 +7,30 @@ CREATE TABLE products
(
id int IDENTITY(1,1),
name varchar (20),
description varchar(30)
description varchar(30),
CONSTRAINT pk_products PRIMARY KEY NONCLUSTERED (id)
);
IF OBJECT_ID ('dbo.tasks', 'U') IS NOT NULL
DROP TABLE tasks;
GO
CREATE TABLE tasks
(
taskid int IDENTITY(1,1),
taskname varchar (20)
taskname varchar (20),
productid int,
CONSTRAINT pk_tasks PRIMARY KEY NONCLUSTERED (taskid),
CONSTRAINT fk_products FOREIGN KEY (productid) REFERENCES products (id),
);
IF OBJECT_ID ('dbo.people', 'U') IS NOT NULL
DROP TABLE people;
GO
CREATE TABLE people
(
name varchar(30),
age varchar(20),
CONSTRAINT pk_people PRIMARY KEY NONCLUSTERED (name, age)
);
INSERT products
@ -29,6 +44,11 @@ VALUES
('Meat', 'Animal thing');
INSERT tasks
(taskname)
(taskname, productid)
VALUES
('Processing');
('Processing', 1);
INSERT people
(name, age)
VALUES
('Bob', '30');

View File

@ -226,7 +226,12 @@ module External {
manyRelationships: ManyRelationship[] = []
for (let [key, field] of Object.entries(table.schema)) {
// if set already, or not set just skip it
if (row[key] == null || newRow[key] || field.autocolumn || field.type === FieldTypes.FORMULA) {
if (
row[key] == null ||
newRow[key] ||
field.autocolumn ||
field.type === FieldTypes.FORMULA
) {
continue
}
// if its an empty string then it means return the column to null (if possible)

View File

@ -279,7 +279,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
case Operation.DELETE:
query = buildDelete(client, json, opts)
break
case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: case Operation.DELETE_TABLE:
case Operation.CREATE_TABLE:
case Operation.UPDATE_TABLE:
case Operation.DELETE_TABLE:
return this._tableQuery(json)
default:
throw `Operation type is not supported by SQL query builder`

View File

@ -6,7 +6,12 @@ import SchemaBuilder = Knex.SchemaBuilder
import CreateTableBuilder = Knex.CreateTableBuilder
const { FieldTypes, RelationshipTypes } = require("../../constants")
function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record<string, Table>, oldTable: null | Table = null) {
function generateSchema(
schema: CreateTableBuilder,
table: Table,
tables: Record<string, Table>,
oldTable: null | Table = null
) {
let primaryKey = table && table.primary ? table.primary[0] : null
const columns = Object.values(table.schema)
// all columns in a junction table will be meta
@ -19,17 +24,21 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
schema.primary(metaCols.map(col => col.name))
}
// check if any columns need added
const foreignKeys = Object.values(table.schema).map(col => col.foreignKey)
for (let [key, column] of Object.entries(table.schema)) {
// skip things that are already correct
const oldColumn = oldTable ? oldTable.schema[key] : null
if ((oldColumn && oldColumn.type === column.type) || (primaryKey === key && !isJunction)) {
if (
(oldColumn && oldColumn.type === column.type) ||
(primaryKey === key && !isJunction)
) {
continue
}
switch (column.type) {
case FieldTypes.STRING: case FieldTypes.OPTIONS: case FieldTypes.LONGFORM:
case FieldTypes.STRING:
case FieldTypes.OPTIONS:
case FieldTypes.LONGFORM:
schema.string(key)
break
case FieldTypes.NUMBER:
@ -67,7 +76,9 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
throw "Referenced table doesn't exist"
}
schema.integer(column.foreignKey).unsigned()
schema.foreign(column.foreignKey).references(`${tableName}.${relatedTable.primary[0]}`)
schema
.foreign(column.foreignKey)
.references(`${tableName}.${relatedTable.primary[0]}`)
}
break
}
@ -76,7 +87,10 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
// need to check if any columns have been deleted
if (oldTable) {
const deletedColumns = Object.entries(oldTable.schema)
.filter(([key, schema]) => schema.type !== FieldTypes.LINK && table.schema[key] == null)
.filter(
([key, schema]) =>
schema.type !== FieldTypes.LINK && table.schema[key] == null
)
.map(([key]) => key)
deletedColumns.forEach(key => {
if (oldTable.constrained && oldTable.constrained.indexOf(key) !== -1) {
@ -92,7 +106,7 @@ function generateSchema(schema: CreateTableBuilder, table: Table, tables: Record
function buildCreateTable(
knex: Knex,
table: Table,
tables: Record<string, Table>,
tables: Record<string, Table>
): SchemaBuilder {
return knex.schema.createTable(table.name, schema => {
generateSchema(schema, table, tables)
@ -103,17 +117,14 @@ function buildUpdateTable(
knex: Knex,
table: Table,
tables: Record<string, Table>,
oldTable: Table,
oldTable: Table
): SchemaBuilder {
return knex.schema.alterTable(table.name, schema => {
generateSchema(schema, table, tables, oldTable)
})
}
function buildDeleteTable(
knex: Knex,
table: Table,
): SchemaBuilder {
function buildDeleteTable(knex: Knex, table: Table): SchemaBuilder {
return knex.schema.dropTable(table.name)
}
@ -151,7 +162,12 @@ class SqlTableQueryBuilder {
if (!json.meta || !json.meta.table) {
throw "Must specify old table for update"
}
query = buildUpdateTable(client, json.table, json.meta.tables, json.meta.table)
query = buildUpdateTable(
client,
json.table,
json.meta.tables,
json.meta.table
)
break
case Operation.DELETE_TABLE:
query = buildDeleteTable(client, json.table)
@ -164,4 +180,4 @@ class SqlTableQueryBuilder {
}
export default SqlTableQueryBuilder
module.exports = SqlTableQueryBuilder
module.exports = SqlTableQueryBuilder

View File

@ -4,7 +4,10 @@ import { Datasource } from "../../definitions/common"
module DatasourceUtils {
const { integrations } = require("../index")
export async function makeExternalQuery(datasource: Datasource, json: QueryJson) {
export async function makeExternalQuery(
datasource: Datasource,
json: QueryJson
) {
const Integration = integrations[datasource.source]
// query is the opinionated function
if (Integration.prototype.query) {

View File

@ -7,7 +7,7 @@ import {
} from "../definitions/datasource"
import { getSqlQuery } from "./utils"
import { DatasourcePlus } from "./base/datasourcePlus"
import { Table, TableSchema } from "../definitions/common";
import { Table, TableSchema } from "../definitions/common"
module MSSQLModule {
const sqlServer = require("mssql")
@ -129,9 +129,10 @@ module MSSQLModule {
"spt_fallback_dev",
"spt_fallback_usg",
"spt_monitor",
"MSreplication_options"
"MSreplication_options",
]
TABLES_SQL = "SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
TABLES_SQL =
"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE'"
getDefinitionSQL(tableName: string) {
return `select *
@ -139,6 +140,28 @@ module MSSQLModule {
where TABLE_NAME='${tableName}'`
}
getConstraintsSQL(tableName: string) {
return `SELECT * FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
INNER JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KU
ON TC.CONSTRAINT_TYPE = 'PRIMARY KEY'
AND TC.CONSTRAINT_NAME = KU.CONSTRAINT_NAME
AND KU.table_name='${tableName}'
ORDER BY
KU.TABLE_NAME,
KU.ORDINAL_POSITION;`
}
getAutoColumnsSQL(tableName: string) {
return `SELECT
COLUMNPROPERTY(OBJECT_ID(TABLE_SCHEMA+'.'+TABLE_NAME),COLUMN_NAME,'IsComputed')
AS IS_COMPUTED,
COLUMNPROPERTY(object_id(TABLE_SCHEMA+'.'+TABLE_NAME), COLUMN_NAME, 'IsIdentity')
AS IS_IDENTITY,
*
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME='${tableName}'`
}
constructor(config: MSSQLConfig) {
super("mssql")
this.config = config
@ -171,16 +194,39 @@ module MSSQLModule {
* @param entities - the tables that are to be built
*/
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
await this.connect()
let tableNames = await internalQuery(this.client, getSqlQuery(this.TABLES_SQL))
let tableNames = await internalQuery(
this.client,
getSqlQuery(this.TABLES_SQL)
)
if (tableNames == null || !Array.isArray(tableNames.recordset)) {
throw "Unable to get list of tables in database"
}
tableNames = tableNames.recordset.map((record: any) => record.TABLE_NAME).filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
tableNames = tableNames.recordset
.map((record: any) => record.TABLE_NAME)
.filter((name: string) => this.MASTER_TABLES.indexOf(name) === -1)
const tables: Record<string, Table> = {}
for (let tableName of tableNames) {
const definition = await internalQuery(this.client, getSqlQuery(this.getDefinitionSQL(tableName)))
const definition = await internalQuery(
this.client,
getSqlQuery(this.getDefinitionSQL(tableName))
)
const constraints = await internalQuery(
this.client,
getSqlQuery(this.getConstraintsSQL(tableName))
)
const columns = await internalQuery(
this.client,
getSqlQuery(this.getAutoColumnsSQL(tableName))
)
const autoColumns = columns.recordset
.filter((col: any) => col.IS_COMPUTED || col.IS_IDENTITY)
.map((col: any) => col.COLUMN_NAME)
const primaryKeys = constraints.recordset
.filter(
(constraint: any) => constraint.CONSTRAINT_TYPE === "PRIMARY KEY"
)
.map((constraint: any) => constraint.COLUMN_NAME)
let schema: TableSchema = {}
for (let def of definition.recordset) {
const name = def.COLUMN_NAME
@ -188,16 +234,16 @@ module MSSQLModule {
continue
}
const type: string = convertType(def.DATA_TYPE, TYPE_MAP)
const identity = false
schema[name] = {
autocolumn: identity,
autocolumn: !!autoColumns.find((col: string) => col === name),
name: name,
type,
}
}
tables[tableName] = {
_id: buildExternalTableId(datasourceId, tableName),
primary: ["id"],
primary: primaryKeys,
name: tableName,
schema,
}