Merge pull request #8915 from Budibase/fix/8882

Fix/8882
This commit is contained in:
Michael Drury 2022-12-05 12:56:34 +00:00 committed by GitHub
commit 0621c7b197
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 251 additions and 67 deletions

View File

@ -45,6 +45,23 @@
const touched = writable({})
function invalidThroughTable({ through, throughTo, throughFrom }) {
// need to know the foreign key columns to check error
if (!through || !throughTo || !throughFrom) {
return false
}
const throughTable = plusTables.find(tbl => tbl._id === through)
const otherColumns = Object.values(throughTable.schema).filter(
col => col.name !== throughFrom && col.name !== throughTo
)
for (let col of otherColumns) {
if (col.constraints?.presence && !col.autocolumn) {
return true
}
}
return false
}
function checkForErrors(fromRelate, toRelate) {
const isMany =
fromRelate.relationshipType === RelationshipTypes.MANY_TO_MANY
@ -59,6 +76,10 @@
if ($touched.through && isMany && !fromRelate.through) {
errObj.through = tableNotSet
}
if ($touched.through && invalidThroughTable(fromRelate)) {
errObj.through =
"Ensure all columns in table are nullable or auto generated"
}
if ($touched.foreign && !isMany && !fromRelate.fieldName) {
errObj.foreign = "Please pick the foreign key"
}

View File

@ -11,7 +11,7 @@ GO
CREATE TABLE products
(
id int IDENTITY(1,1),
name varchar (20),
name varchar (20) NOT NULL,
description varchar(30),
CONSTRAINT pk_products PRIMARY KEY NONCLUSTERED (id)
);
@ -22,7 +22,7 @@ GO
CREATE TABLE tasks
(
taskid int IDENTITY(1,1),
taskname varchar (20),
taskname varchar (20) NOT NULL,
productid int,
CONSTRAINT pk_tasks PRIMARY KEY NONCLUSTERED (taskid),
CONSTRAINT fk_products FOREIGN KEY (productid) REFERENCES products (id),
@ -33,7 +33,7 @@ IF OBJECT_ID ('dbo.people', 'U') IS NOT NULL
GO
CREATE TABLE people
(
name varchar(30),
name varchar(30) NOT NULL,
age varchar(20),
CONSTRAINT pk_people PRIMARY KEY NONCLUSTERED (name, age)
);

View File

@ -0,0 +1,46 @@
SELECT 'CREATE DATABASE main'
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'main')\gexec
CREATE TABLE IF NOT EXISTS public."Employee"
(
id integer NOT NULL,
name text COLLATE pg_catalog."default",
CONSTRAINT "Employee_pkey" PRIMARY KEY (id)
)
WITH (
OIDS = FALSE
);
INSERT INTO public."Employee" ("id", "name") VALUES (1, 'Alice');
INSERT INTO public."Employee" ("id", "name") VALUES (2, 'Bob');
CREATE TABLE IF NOT EXISTS public."Skills"
(
id integer NOT NULL,
name text COLLATE pg_catalog."default",
CONSTRAINT "Skills_pkey" PRIMARY KEY (id)
)
WITH (
OIDS = FALSE
);
INSERT INTO public."Skills" ("id", "name") VALUES (1, 'Docker');
INSERT INTO public."Skills" ("id", "name") VALUES (2, 'Microservices');
INSERT INTO public."Skills" ("id", "name") VALUES (3, 'Kubernetes');
INSERT INTO public."Skills" ("id", "name") VALUES (4, 'Spring');
CREATE TABLE IF NOT EXISTS public."jt_employee_skills_Skills_employee"
(
employee_id integer,
skills_id integer,
id integer NOT NULL,
CONSTRAINT "jt_employee_skills_Skills_employee_pkey" PRIMARY KEY (id)
)
WITH (
OIDS = FALSE
);
insert into public."jt_employee_skills_Skills_employee" ("id", "employee_id", "skills_id") VALUES (1, 1, 1);
insert into public."jt_employee_skills_Skills_employee" ("id", "employee_id", "skills_id") VALUES (2, 1, 2);
insert into public."jt_employee_skills_Skills_employee" ("id", "employee_id", "skills_id") VALUES (3, 1, 3);
insert into public."jt_employee_skills_Skills_employee" ("id", "employee_id", "skills_id") VALUES (4, 2, 2);
insert into public."jt_employee_skills_Skills_employee" ("id", "employee_id", "skills_id") VALUES (5, 2, 3);
insert into public."jt_employee_skills_Skills_employee" ("id", "employee_id", "skills_id") VALUES (6, 2, 4);

View File

@ -588,7 +588,10 @@ export class ExternalRequest {
for (let [colName, { isMany, rows, tableId }] of Object.entries(related)) {
const table: Table | undefined = this.getTable(tableId)
// if its not the foreign key skip it, nothing to do
if (!table || (table.primary && table.primary.indexOf(colName) !== -1)) {
if (
!table ||
(!isMany && table.primary && table.primary.indexOf(colName) !== -1)
) {
continue
}
for (let row of rows) {

View File

@ -0,0 +1,134 @@
export interface MSSQLTablesResponse {
TABLE_CATALOG: string
TABLE_SCHEMA: string
TABLE_NAME: string
TABLE_TYPE: string
}
export interface MSSQLColumn {
IS_COMPUTED: number
IS_IDENTITY: number
TABLE_CATALOG: string
TABLE_SCHEMA: string
TABLE_NAME: string
COLUMN_NAME: string
ORDINAL_POSITION: number
COLUMN_DEFAULT: null | any
IS_NULLABLE: "NO" | "YES"
DATA_TYPE: string
CHARACTER_MAXIMUM_LENGTH: null | number
CHARACTER_OCTET_LENGTH: null | number
NUMERIC_PRECISION: null | number
NUMERIC_PRECISION_RADIX: null | number
NUMERIC_SCALE: null | number
DATETIME_PRECISION: null | string
CHARACTER_SET_CATALOG: null | string
CHARACTER_SET_SCHEMA: null | string
CHARACTER_SET_NAME: null | string
COLLATION_CATALOG: null | string
COLLATION_SCHEMA: null | string
COLLATION_NAME: null | string
DOMAIN_CATALOG: null | string
DOMAIN_SCHEMA: null | string
DOMAIN_NAME: null | string
}
export interface PostgresColumn {
table_catalog: string
table_schema: string
table_name: string
column_name: string
ordinal_position: number
column_default: null | any
is_nullable: "NO" | "YES"
data_type: string
character_maximum_length: null | number
character_octet_length: null | number
numeric_precision: null | number
numeric_precision_radix: null | number
numeric_scale: null | number
datetime_precision: null | string
interval_type: null | string
interval_precision: null | string
character_set_catalog: null | string
character_set_schema: null | string
character_set_name: null | string
collation_catalog: null | string
collation_schema: null | string
collation_name: null | string
domain_catalog: null | string
domain_schema: null | string
domain_name: null | string
udt_catalog: string
udt_schema: string
udt_name: string
scope_catalog: null | string
scope_schema: null | string
scope_name: null | string
maximum_cardinality: null | string
dtd_identifier: string
is_self_referencing: "NO" | "YES"
is_identity: "NO" | "YES"
identity_generation: null | number
identity_start: null | number
identity_increment: null | number
identity_maximum: null | number
identity_minimum: null | number
identity_cycle: "NO" | "YES"
is_generated: "NEVER"
generation_expression: null | string
is_updatable: "NO" | "YES"
}
export interface MySQLColumn {
Field: string
Type: string
Null: "NO" | "YES"
Key: "PRI" | "MUL" | ""
Default: null | any
Extra: null | string
}
/**
* Raw query response
*/
export interface OracleColumnsResponse {
TABLE_NAME: string
COLUMN_NAME: string
DATA_TYPE: string
DATA_DEFAULT: null | string
COLUMN_ID: number
CONSTRAINT_NAME: null | string
CONSTRAINT_TYPE: null | string
R_CONSTRAINT_NAME: null | string
SEARCH_CONDITION: null | string
}
/**
* An oracle constraint
*/
export interface OracleConstraint {
name: string
type: string
relatedConstraintName: null | string
searchCondition: null | string
}
/**
* An oracle column and it's related constraints
*/
export interface OracleColumn {
name: string
type: string
default: null | string
id: number
constraints: { [key: string]: OracleConstraint }
}
/**
* An oracle table and it's related columns
*/
export interface OracleTable {
name: string
columns: { [key: string]: OracleColumn }
}

View File

@ -17,6 +17,7 @@ import {
SqlClient,
} from "./utils"
import Sql from "./base/sql"
import { MSSQLTablesResponse, MSSQLColumn } from "./base/types"
const sqlServer = require("mssql")
const DEFAULT_SCHEMA = "dbo"
@ -31,13 +32,6 @@ interface MSSQLConfig {
encrypt?: boolean
}
interface TablesResponse {
TABLE_CATALOG: string
TABLE_SCHEMA: string
TABLE_NAME: string
TABLE_TYPE: string
}
const SCHEMA: Integration = {
docs: "https://github.com/tediousjs/node-mssql",
plus: true,
@ -210,7 +204,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
*/
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
await this.connect()
let tableInfo: TablesResponse[] = await this.runSQL(this.TABLES_SQL)
let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL)
if (tableInfo == null || !Array.isArray(tableInfo)) {
throw "Unable to get list of tables in database"
}
@ -228,15 +222,20 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
// find primary key constraints
const constraints = await this.runSQL(this.getConstraintsSQL(tableName))
// find the computed and identity columns (auto columns)
const columns = await this.runSQL(this.getAutoColumnsSQL(tableName))
const columns: MSSQLColumn[] = await this.runSQL(
this.getAutoColumnsSQL(tableName)
)
const primaryKeys = constraints
.filter(
(constraint: any) => constraint.CONSTRAINT_TYPE === "PRIMARY KEY"
)
.map((constraint: any) => constraint.COLUMN_NAME)
const autoColumns = columns
.filter((col: any) => col.IS_COMPUTED || col.IS_IDENTITY)
.map((col: any) => col.COLUMN_NAME)
.filter(col => col.IS_COMPUTED || col.IS_IDENTITY)
.map(col => col.COLUMN_NAME)
const requiredColumns = columns
.filter(col => col.IS_NULLABLE === "NO")
.map(col => col.COLUMN_NAME)
let schema: TableSchema = {}
for (let def of definition) {
@ -245,8 +244,11 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
continue
}
schema[name] = {
autocolumn: !!autoColumns.find((col: string) => col === name),
autocolumn: !!autoColumns.find(col => col === name),
name: name,
constraints: {
presence: requiredColumns.find(col => col === name),
},
...convertSqlType(def.DATA_TYPE),
externalType: def.DATA_TYPE,
}

View File

@ -18,6 +18,7 @@ import {
import dayjs from "dayjs"
const { NUMBER_REGEX } = require("../utilities")
import Sql from "./base/sql"
import { MySQLColumn } from "./base/types"
const mysql = require("mysql2/promise")
@ -203,11 +204,11 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
try {
// get the tables first
const tablesResp = await this.internalQuery(
const tablesResp: Record<string, string>[] = await this.internalQuery(
{ sql: "SHOW TABLES;" },
{ connect: false }
)
const tableNames = tablesResp.map(
const tableNames: string[] = tablesResp.map(
(obj: any) =>
obj[`Tables_in_${database}`] ||
obj[`Tables_in_${database.toLowerCase()}`]
@ -215,7 +216,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
for (let tableName of tableNames) {
const primaryKeys = []
const schema: TableSchema = {}
const descResp = await this.internalQuery(
const descResp: MySQLColumn[] = await this.internalQuery(
{ sql: `DESCRIBE \`${tableName}\`;` },
{ connect: false }
)

View File

@ -24,6 +24,12 @@ import {
ExecuteOptions,
Result,
} from "oracledb"
import {
OracleTable,
OracleColumn,
OracleColumnsResponse,
OracleConstraint,
} from "./base/types"
let oracledb: any
try {
oracledb = require("oracledb")
@ -89,50 +95,6 @@ const SCHEMA: Integration = {
const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"]
/**
* Raw query response
*/
interface ColumnsResponse {
TABLE_NAME: string
COLUMN_NAME: string
DATA_TYPE: string
DATA_DEFAULT: string | null
COLUMN_ID: number
CONSTRAINT_NAME: string | null
CONSTRAINT_TYPE: string | null
R_CONSTRAINT_NAME: string | null
SEARCH_CONDITION: string | null
}
/**
* An oracle constraint
*/
interface OracleConstraint {
name: string
type: string
relatedConstraintName: string | null
searchCondition: string | null
}
/**
* An oracle column and it's related constraints
*/
interface OracleColumn {
name: string
type: string
default: string | null
id: number
constraints: { [key: string]: OracleConstraint }
}
/**
* An oracle table and it's related columns
*/
interface OracleTable {
name: string
columns: { [key: string]: OracleColumn }
}
const OracleContraintTypes = {
PRIMARY: "P",
NOT_NULL_OR_CHECK: "C",
@ -195,7 +157,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
/**
* Map the flat tabular columns and constraints data into a nested object
*/
private mapColumns(result: Result<ColumnsResponse>): {
private mapColumns(result: Result<OracleColumnsResponse>): {
[key: string]: OracleTable
} {
const oracleTables: { [key: string]: OracleTable } = {}
@ -299,7 +261,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
* @param entities - the tables that are to be built
*/
async buildSchema(datasourceId: string, entities: Record<string, Table>) {
const columnsResponse = await this.internalQuery<ColumnsResponse>({
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
sql: this.COLUMNS_SQL,
})
const oracleTables = this.mapColumns(columnsResponse)
@ -334,6 +296,9 @@ class OracleIntegration extends Sql implements DatasourcePlus {
fieldSchema = {
autocolumn: OracleIntegration.isAutoColumn(oracleColumn),
name: columnName,
constraints: {
presence: false,
},
...this.internalConvertType(oracleColumn),
}
table.schema[columnName] = fieldSchema
@ -343,6 +308,12 @@ class OracleIntegration extends Sql implements DatasourcePlus {
Object.values(oracleColumn.constraints).forEach(oracleConstraint => {
if (oracleConstraint.type === OracleContraintTypes.PRIMARY) {
table.primary!.push(columnName)
} else if (
oracleConstraint.type === OracleContraintTypes.NOT_NULL_OR_CHECK
) {
table.schema[columnName].constraints = {
presence: true,
}
}
})
})

View File

@ -15,9 +15,10 @@ import {
SqlClient,
} from "./utils"
import Sql from "./base/sql"
import { PostgresColumn } from "./base/types"
import { escapeDangerousCharacters } from "../utilities"
const { Client, types } = require("pg")
const { escapeDangerousCharacters } = require("../utilities")
// Return "date" and "timestamp" types as plain strings.
// This lets us reference the original stored timezone.
@ -237,7 +238,8 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
}
try {
const columnsResponse = await this.client.query(this.COLUMNS_SQL)
const columnsResponse: { rows: PostgresColumn[] } =
await this.client.query(this.COLUMNS_SQL)
const tables: { [key: string]: Table } = {}
@ -260,6 +262,9 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
column.identity_start ||
column.identity_increment
)
const constraints = {
presence: column.is_nullable === "NO",
}
const hasDefault =
typeof column.column_default === "string" &&
column.column_default.startsWith("nextval")
@ -269,6 +274,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
tables[tableName].schema[columnName] = {
autocolumn: isAuto,
name: columnName,
constraints,
...convertSqlType(column.data_type),
externalType: column.data_type,
}