FIX: cannot delete a renamed column with a mysql datasource (#13147)
* Support MySQL column rename in Budibase * Delete NOT ignored types * unit tests * update account portal
This commit is contained in:
parent
9a9b745795
commit
039df5d476
|
@ -1 +1 @@
|
||||||
Subproject commit de6d44c372a7f48ca0ce8c6c0c19311d4bc21646
|
Subproject commit 19f7a5829f4d23cbc694136e45d94482a59a475a
|
|
@ -400,7 +400,7 @@ class InternalBuilder {
|
||||||
return query.limit(BASE_LIMIT)
|
return query.limit(BASE_LIMIT)
|
||||||
}
|
}
|
||||||
|
|
||||||
create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||||
const { endpoint, body } = json
|
const { endpoint, body } = json
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
if (endpoint.schema) {
|
if (endpoint.schema) {
|
||||||
|
@ -422,7 +422,7 @@ class InternalBuilder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bulkCreate(knex: Knex, json: QueryJson): KnexQuery {
|
bulkCreate(knex: Knex, json: QueryJson): Knex.QueryBuilder {
|
||||||
const { endpoint, body } = json
|
const { endpoint, body } = json
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
if (endpoint.schema) {
|
if (endpoint.schema) {
|
||||||
|
@ -491,7 +491,7 @@ class InternalBuilder {
|
||||||
return this.addFilters(query, filters, { relationship: true })
|
return this.addFilters(query, filters, { relationship: true })
|
||||||
}
|
}
|
||||||
|
|
||||||
update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
update(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||||
const { endpoint, body, filters } = json
|
const { endpoint, body, filters } = json
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
if (endpoint.schema) {
|
if (endpoint.schema) {
|
||||||
|
@ -507,7 +507,7 @@ class InternalBuilder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery {
|
delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
|
||||||
const { endpoint, filters } = json
|
const { endpoint, filters } = json
|
||||||
let query: KnexQuery = knex(endpoint.entityId)
|
let query: KnexQuery = knex(endpoint.entityId)
|
||||||
if (endpoint.schema) {
|
if (endpoint.schema) {
|
||||||
|
@ -537,17 +537,17 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
|
* which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes.
|
||||||
* @return the query ready to be passed to the driver.
|
* @return the query ready to be passed to the driver.
|
||||||
*/
|
*/
|
||||||
_query(json: QueryJson, opts: QueryOptions = {}) {
|
_query(json: QueryJson, opts: QueryOptions = {}): Knex.SqlNative | Knex.Sql {
|
||||||
const sqlClient = this.getSqlClient()
|
const sqlClient = this.getSqlClient()
|
||||||
const client = knex({ client: sqlClient })
|
const client = knex({ client: sqlClient })
|
||||||
let query
|
let query: Knex.QueryBuilder
|
||||||
const builder = new InternalBuilder(sqlClient)
|
const builder = new InternalBuilder(sqlClient)
|
||||||
switch (this._operation(json)) {
|
switch (this._operation(json)) {
|
||||||
case Operation.CREATE:
|
case Operation.CREATE:
|
||||||
query = builder.create(client, json, opts)
|
query = builder.create(client, json, opts)
|
||||||
break
|
break
|
||||||
case Operation.READ:
|
case Operation.READ:
|
||||||
query = builder.read(client, json, this.limit)
|
query = builder.read(client, json, this.limit) as Knex.QueryBuilder
|
||||||
break
|
break
|
||||||
case Operation.UPDATE:
|
case Operation.UPDATE:
|
||||||
query = builder.update(client, json, opts)
|
query = builder.update(client, json, opts)
|
||||||
|
@ -565,8 +565,6 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
||||||
default:
|
default:
|
||||||
throw `Operation type is not supported by SQL query builder`
|
throw `Operation type is not supported by SQL query builder`
|
||||||
}
|
}
|
||||||
|
|
||||||
// @ts-ignore
|
|
||||||
return query.toSQL().toNative()
|
return query.toSQL().toNative()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ import {
|
||||||
Table,
|
Table,
|
||||||
FieldType,
|
FieldType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { breakExternalTableId } from "../utils"
|
import { breakExternalTableId, SqlClient } from "../utils"
|
||||||
import SchemaBuilder = Knex.SchemaBuilder
|
import SchemaBuilder = Knex.SchemaBuilder
|
||||||
import CreateTableBuilder = Knex.CreateTableBuilder
|
import CreateTableBuilder = Knex.CreateTableBuilder
|
||||||
import { utils } from "@budibase/shared-core"
|
import { utils } from "@budibase/shared-core"
|
||||||
|
@ -135,7 +135,8 @@ function generateSchema(
|
||||||
// need to check if any columns have been deleted
|
// need to check if any columns have been deleted
|
||||||
if (oldTable) {
|
if (oldTable) {
|
||||||
const deletedColumns = Object.entries(oldTable.schema).filter(
|
const deletedColumns = Object.entries(oldTable.schema).filter(
|
||||||
([key, column]) => isIgnoredType(column.type) && table.schema[key] == null
|
([key, column]) =>
|
||||||
|
!isIgnoredType(column.type) && table.schema[key] == null
|
||||||
)
|
)
|
||||||
deletedColumns.forEach(([key, column]) => {
|
deletedColumns.forEach(([key, column]) => {
|
||||||
if (renamed?.old === key || isIgnoredType(column.type)) {
|
if (renamed?.old === key || isIgnoredType(column.type)) {
|
||||||
|
@ -197,13 +198,14 @@ class SqlTableQueryBuilder {
|
||||||
return json.endpoint.operation
|
return json.endpoint.operation
|
||||||
}
|
}
|
||||||
|
|
||||||
_tableQuery(json: QueryJson): any {
|
_tableQuery(json: QueryJson): Knex.Sql | Knex.SqlNative {
|
||||||
let client = knex({ client: this.sqlClient }).schema
|
let client = knex({ client: this.sqlClient }).schema
|
||||||
if (json?.endpoint?.schema) {
|
let schemaName = json?.endpoint?.schema
|
||||||
client = client.withSchema(json.endpoint.schema)
|
if (schemaName) {
|
||||||
|
client = client.withSchema(schemaName)
|
||||||
}
|
}
|
||||||
|
|
||||||
let query
|
let query: Knex.SchemaBuilder
|
||||||
if (!json.table || !json.meta || !json.meta.tables) {
|
if (!json.table || !json.meta || !json.meta.tables) {
|
||||||
throw "Cannot execute without table being specified"
|
throw "Cannot execute without table being specified"
|
||||||
}
|
}
|
||||||
|
@ -215,6 +217,18 @@ class SqlTableQueryBuilder {
|
||||||
if (!json.meta || !json.meta.table) {
|
if (!json.meta || !json.meta.table) {
|
||||||
throw "Must specify old table for update"
|
throw "Must specify old table for update"
|
||||||
}
|
}
|
||||||
|
// renameColumn does not work for MySQL, so return a raw query
|
||||||
|
if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) {
|
||||||
|
const updatedColumn = json.meta.renamed.updated
|
||||||
|
const tableName = schemaName
|
||||||
|
? `\`${schemaName}\`.\`${json.table.name}\``
|
||||||
|
: `\`${json.table.name}\``
|
||||||
|
const externalType = json.table.schema[updatedColumn].externalType!
|
||||||
|
return {
|
||||||
|
sql: `alter table ${tableName} change column \`${json.meta.renamed.old}\` \`${updatedColumn}\` ${externalType};`,
|
||||||
|
bindings: [],
|
||||||
|
}
|
||||||
|
}
|
||||||
query = buildUpdateTable(
|
query = buildUpdateTable(
|
||||||
client,
|
client,
|
||||||
json.table,
|
json.table,
|
||||||
|
|
|
@ -12,7 +12,6 @@ import {
|
||||||
SourceName,
|
SourceName,
|
||||||
Schema,
|
Schema,
|
||||||
TableSourceType,
|
TableSourceType,
|
||||||
FieldType,
|
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import {
|
import {
|
||||||
getSqlQuery,
|
getSqlQuery,
|
||||||
|
|
|
@ -421,7 +421,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
|
||||||
|
|
||||||
async query(json: QueryJson) {
|
async query(json: QueryJson) {
|
||||||
const operation = this._operation(json)
|
const operation = this._operation(json)
|
||||||
const input = this._query(json, { disableReturning: true })
|
const input = this._query(json, { disableReturning: true }) as SqlQuery
|
||||||
if (Array.isArray(input)) {
|
if (Array.isArray(input)) {
|
||||||
const responses = []
|
const responses = []
|
||||||
for (let query of input) {
|
for (let query of input) {
|
||||||
|
|
|
@ -419,7 +419,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
||||||
|
|
||||||
async query(json: QueryJson) {
|
async query(json: QueryJson) {
|
||||||
const operation = this._operation(json).toLowerCase()
|
const operation = this._operation(json).toLowerCase()
|
||||||
const input = this._query(json)
|
const input = this._query(json) as SqlQuery
|
||||||
if (Array.isArray(input)) {
|
if (Array.isArray(input)) {
|
||||||
const responses = []
|
const responses = []
|
||||||
for (let query of input) {
|
for (let query of input) {
|
||||||
|
|
|
@ -1,3 +1,11 @@
|
||||||
|
import {
|
||||||
|
Operation,
|
||||||
|
QueryJson,
|
||||||
|
TableSourceType,
|
||||||
|
Table,
|
||||||
|
FieldType,
|
||||||
|
} from "@budibase/types"
|
||||||
|
|
||||||
const Sql = require("../base/sql").default
|
const Sql = require("../base/sql").default
|
||||||
const { SqlClient } = require("../utils")
|
const { SqlClient } = require("../utils")
|
||||||
|
|
||||||
|
@ -17,7 +25,7 @@ function generateReadJson({
|
||||||
filters,
|
filters,
|
||||||
sort,
|
sort,
|
||||||
paginate,
|
paginate,
|
||||||
}: any = {}) {
|
}: any = {}): QueryJson {
|
||||||
return {
|
return {
|
||||||
endpoint: endpoint(table || TABLE_NAME, "READ"),
|
endpoint: endpoint(table || TABLE_NAME, "READ"),
|
||||||
resource: {
|
resource: {
|
||||||
|
@ -28,6 +36,10 @@ function generateReadJson({
|
||||||
paginate: paginate || {},
|
paginate: paginate || {},
|
||||||
meta: {
|
meta: {
|
||||||
table: {
|
table: {
|
||||||
|
type: "table",
|
||||||
|
sourceType: TableSourceType.EXTERNAL,
|
||||||
|
sourceId: "SOURCE_ID",
|
||||||
|
schema: {},
|
||||||
name: table || TABLE_NAME,
|
name: table || TABLE_NAME,
|
||||||
primary: ["id"],
|
primary: ["id"],
|
||||||
},
|
},
|
||||||
|
@ -35,34 +47,40 @@ function generateReadJson({
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateCreateJson(table = TABLE_NAME, body = {}) {
|
function generateCreateJson(table = TABLE_NAME, body = {}): QueryJson {
|
||||||
return {
|
return {
|
||||||
endpoint: endpoint(table, "CREATE"),
|
endpoint: endpoint(table, "CREATE"),
|
||||||
body,
|
body,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateUpdateJson(table = TABLE_NAME, body = {}, filters = {}) {
|
function generateUpdateJson({
|
||||||
|
table = TABLE_NAME,
|
||||||
|
body = {},
|
||||||
|
filters = {},
|
||||||
|
meta = {},
|
||||||
|
}): QueryJson {
|
||||||
return {
|
return {
|
||||||
endpoint: endpoint(table, "UPDATE"),
|
endpoint: endpoint(table, "UPDATE"),
|
||||||
filters,
|
filters,
|
||||||
body,
|
body,
|
||||||
|
meta,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateDeleteJson(table = TABLE_NAME, filters = {}) {
|
function generateDeleteJson(table = TABLE_NAME, filters = {}): QueryJson {
|
||||||
return {
|
return {
|
||||||
endpoint: endpoint(table, "DELETE"),
|
endpoint: endpoint(table, "DELETE"),
|
||||||
filters,
|
filters,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateRelationshipJson(config: { schema?: string } = {}) {
|
function generateRelationshipJson(config: { schema?: string } = {}): QueryJson {
|
||||||
return {
|
return {
|
||||||
endpoint: {
|
endpoint: {
|
||||||
datasourceId: "Postgres",
|
datasourceId: "Postgres",
|
||||||
entityId: "brands",
|
entityId: "brands",
|
||||||
operation: "READ",
|
operation: Operation.READ,
|
||||||
schema: config.schema,
|
schema: config.schema,
|
||||||
},
|
},
|
||||||
resource: {
|
resource: {
|
||||||
|
@ -76,7 +94,6 @@ function generateRelationshipJson(config: { schema?: string } = {}) {
|
||||||
},
|
},
|
||||||
filters: {},
|
filters: {},
|
||||||
sort: {},
|
sort: {},
|
||||||
paginate: {},
|
|
||||||
relationships: [
|
relationships: [
|
||||||
{
|
{
|
||||||
from: "brand_id",
|
from: "brand_id",
|
||||||
|
@ -240,17 +257,17 @@ describe("SQL query builder", () => {
|
||||||
|
|
||||||
it("should test an update statement", () => {
|
it("should test an update statement", () => {
|
||||||
const query = sql._query(
|
const query = sql._query(
|
||||||
generateUpdateJson(
|
generateUpdateJson({
|
||||||
TABLE_NAME,
|
table: TABLE_NAME,
|
||||||
{
|
body: {
|
||||||
name: "John",
|
name: "John",
|
||||||
},
|
},
|
||||||
{
|
filters: {
|
||||||
equal: {
|
equal: {
|
||||||
id: 1001,
|
id: 1001,
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
)
|
})
|
||||||
)
|
)
|
||||||
expect(query).toEqual({
|
expect(query).toEqual({
|
||||||
bindings: ["John", 1001],
|
bindings: ["John", 1001],
|
||||||
|
@ -682,4 +699,99 @@ describe("SQL query builder", () => {
|
||||||
sql: `insert into \"test\" (\"name\") values ($1) returning *`,
|
sql: `insert into \"test\" (\"name\") values ($1) returning *`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should be able to rename column for MySQL", () => {
|
||||||
|
const table: Table = {
|
||||||
|
type: "table",
|
||||||
|
sourceType: TableSourceType.EXTERNAL,
|
||||||
|
name: TABLE_NAME,
|
||||||
|
schema: {
|
||||||
|
first_name: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "first_name",
|
||||||
|
externalType: "varchar(45)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
sourceId: "SOURCE_ID",
|
||||||
|
}
|
||||||
|
const oldTable: Table = {
|
||||||
|
...table,
|
||||||
|
schema: {
|
||||||
|
name: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "name",
|
||||||
|
externalType: "varchar(45)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
const query = new Sql(SqlClient.MY_SQL, limit)._query({
|
||||||
|
table,
|
||||||
|
endpoint: {
|
||||||
|
datasourceId: "MySQL",
|
||||||
|
operation: Operation.UPDATE_TABLE,
|
||||||
|
entityId: TABLE_NAME,
|
||||||
|
},
|
||||||
|
meta: {
|
||||||
|
table: oldTable,
|
||||||
|
tables: [oldTable],
|
||||||
|
renamed: {
|
||||||
|
old: "name",
|
||||||
|
updated: "first_name",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(query).toEqual({
|
||||||
|
bindings: [],
|
||||||
|
sql: `alter table \`${TABLE_NAME}\` change column \`name\` \`first_name\` varchar(45);`,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be able to delete a column", () => {
|
||||||
|
const table: Table = {
|
||||||
|
type: "table",
|
||||||
|
sourceType: TableSourceType.EXTERNAL,
|
||||||
|
name: TABLE_NAME,
|
||||||
|
schema: {
|
||||||
|
first_name: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "first_name",
|
||||||
|
externalType: "varchar(45)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
sourceId: "SOURCE_ID",
|
||||||
|
}
|
||||||
|
const oldTable: Table = {
|
||||||
|
...table,
|
||||||
|
schema: {
|
||||||
|
first_name: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "first_name",
|
||||||
|
externalType: "varchar(45)",
|
||||||
|
},
|
||||||
|
last_name: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "last_name",
|
||||||
|
externalType: "varchar(45)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
const query = sql._query({
|
||||||
|
table,
|
||||||
|
endpoint: {
|
||||||
|
datasourceId: "Postgres",
|
||||||
|
operation: Operation.UPDATE_TABLE,
|
||||||
|
entityId: TABLE_NAME,
|
||||||
|
},
|
||||||
|
meta: {
|
||||||
|
table: oldTable,
|
||||||
|
tables: [oldTable],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(query).toEqual([
|
||||||
|
{
|
||||||
|
bindings: [],
|
||||||
|
sql: `alter table "${TABLE_NAME}" drop column "last_name"`,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
Loading…
Reference in New Issue