Convert mysql.spec.ts away from rawQuery.
This commit is contained in:
parent
42437d8e71
commit
16cacb3de7
|
@ -1,16 +1,12 @@
|
||||||
import fetch from "node-fetch"
|
|
||||||
import * as setup from "../api/routes/tests/utilities"
|
import * as setup from "../api/routes/tests/utilities"
|
||||||
import { Datasource, FieldType } from "@budibase/types"
|
import { Datasource, FieldType } from "@budibase/types"
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
getDatasource,
|
||||||
rawQuery,
|
knexClient,
|
||||||
} from "../integrations/tests/utils"
|
} from "../integrations/tests/utils"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import { tableForDatasource } from "../../src/tests/utilities/structures"
|
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
// @ts-ignore
|
|
||||||
fetch.mockSearch()
|
|
||||||
|
|
||||||
function uniqueTableName(length?: number): string {
|
function uniqueTableName(length?: number): string {
|
||||||
return generator
|
return generator
|
||||||
|
@ -21,21 +17,6 @@ function uniqueTableName(length?: number): string {
|
||||||
|
|
||||||
const config = setup.getConfig()!
|
const config = setup.getConfig()!
|
||||||
|
|
||||||
jest.mock("../websockets", () => ({
|
|
||||||
clientAppSocket: jest.fn(),
|
|
||||||
gridAppSocket: jest.fn(),
|
|
||||||
initialise: jest.fn(),
|
|
||||||
builderSocket: {
|
|
||||||
emitTableUpdate: jest.fn(),
|
|
||||||
emitTableDeletion: jest.fn(),
|
|
||||||
emitDatasourceUpdate: jest.fn(),
|
|
||||||
emitDatasourceDeletion: jest.fn(),
|
|
||||||
emitScreenUpdate: jest.fn(),
|
|
||||||
emitAppMetadataUpdate: jest.fn(),
|
|
||||||
emitAppPublish: jest.fn(),
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
describe("mysql integrations", () => {
|
describe("mysql integrations", () => {
|
||||||
let rawDatasource: Datasource
|
let rawDatasource: Datasource
|
||||||
let datasource: Datasource
|
let datasource: Datasource
|
||||||
|
@ -43,68 +24,40 @@ describe("mysql integrations", () => {
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
|
|
||||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
datasource = await config.api.datasource.create(rawDatasource)
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(config.end)
|
afterAll(config.end)
|
||||||
|
|
||||||
it("validate table schema", async () => {
|
|
||||||
// Creating a table so that `entities` is populated.
|
|
||||||
await config.api.table.save(tableForDatasource(datasource))
|
|
||||||
const res = await config.api.datasource.get(datasource._id!)
|
|
||||||
expect(res).toEqual({
|
|
||||||
config: {
|
|
||||||
database: expect.any(String),
|
|
||||||
host: datasource.config!.host,
|
|
||||||
password: "--secret-value--",
|
|
||||||
port: datasource.config!.port,
|
|
||||||
user: "root",
|
|
||||||
},
|
|
||||||
plus: true,
|
|
||||||
source: "MYSQL",
|
|
||||||
type: "datasource_plus",
|
|
||||||
isSQL: true,
|
|
||||||
_id: expect.any(String),
|
|
||||||
_rev: expect.any(String),
|
|
||||||
createdAt: expect.any(String),
|
|
||||||
updatedAt: expect.any(String),
|
|
||||||
entities: expect.any(Object),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("Integration compatibility with mysql search_path", () => {
|
describe("Integration compatibility with mysql search_path", () => {
|
||||||
let datasource: Datasource, rawDatasource: Datasource
|
let datasource: Datasource
|
||||||
|
let rawDatasource: Datasource
|
||||||
|
let client: Knex
|
||||||
const database = generator.guid()
|
const database = generator.guid()
|
||||||
const database2 = generator.guid()
|
const database2 = generator.guid()
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
|
|
||||||
await rawQuery(rawDatasource, `CREATE DATABASE \`${database}\`;`)
|
await client.raw(`CREATE DATABASE \`${database}\`;`)
|
||||||
await rawQuery(rawDatasource, `CREATE DATABASE \`${database2}\`;`)
|
await client.raw(`CREATE DATABASE \`${database2}\`;`)
|
||||||
|
|
||||||
const pathConfig: any = {
|
rawDatasource.config!.database = database
|
||||||
...rawDatasource,
|
datasource = await config.api.datasource.create(rawDatasource)
|
||||||
config: {
|
|
||||||
...rawDatasource.config!,
|
|
||||||
database,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
datasource = await config.api.datasource.create(pathConfig)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
await rawQuery(rawDatasource, `DROP DATABASE \`${database}\`;`)
|
await client.raw(`DROP DATABASE \`${database}\`;`)
|
||||||
await rawQuery(rawDatasource, `DROP DATABASE \`${database2}\`;`)
|
await client.raw(`DROP DATABASE \`${database2}\`;`)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("discovers tables from any schema in search path", async () => {
|
it("discovers tables from any schema in search path", async () => {
|
||||||
await rawQuery(
|
await client.schema.createTable(`${database}.table1`, table => {
|
||||||
rawDatasource,
|
table.increments("id1").primary()
|
||||||
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
|
})
|
||||||
)
|
|
||||||
const res = await config.api.datasource.info(datasource)
|
const res = await config.api.datasource.info(datasource)
|
||||||
expect(res.tableNames).toBeDefined()
|
expect(res.tableNames).toBeDefined()
|
||||||
expect(res.tableNames).toEqual(expect.arrayContaining(["table1"]))
|
expect(res.tableNames).toEqual(expect.arrayContaining(["table1"]))
|
||||||
|
@ -112,13 +65,19 @@ describe("mysql integrations", () => {
|
||||||
|
|
||||||
it("does not mix columns from different tables", async () => {
|
it("does not mix columns from different tables", async () => {
|
||||||
const repeated_table_name = "table_same_name"
|
const repeated_table_name = "table_same_name"
|
||||||
await rawQuery(
|
await client.schema.createTable(
|
||||||
rawDatasource,
|
`${database}.${repeated_table_name}`,
|
||||||
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
|
table => {
|
||||||
|
table.increments("id").primary()
|
||||||
|
table.string("val1")
|
||||||
|
}
|
||||||
)
|
)
|
||||||
await rawQuery(
|
await client.schema.createTable(
|
||||||
rawDatasource,
|
`${database2}.${repeated_table_name}`,
|
||||||
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
|
table => {
|
||||||
|
table.increments("id2").primary()
|
||||||
|
table.string("val2")
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
const res = await config.api.datasource.fetchSchema({
|
const res = await config.api.datasource.fetchSchema({
|
||||||
|
@ -139,21 +98,21 @@ describe("mysql integrations", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(async () => {
|
afterEach(async () => {
|
||||||
await rawQuery(rawDatasource, `DROP TABLE IF EXISTS \`${tableName}\``)
|
await client.schema.dropTableIfExists(tableName)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("recognises enum columns as options", async () => {
|
it("recognises enum columns as options", async () => {
|
||||||
const enumColumnName = "status"
|
const enumColumnName = "status"
|
||||||
|
|
||||||
const createTableQuery = `
|
await client.schema.createTable(tableName, table => {
|
||||||
CREATE TABLE \`${tableName}\` (
|
table.increments("order_id").primary()
|
||||||
\`order_id\` INT AUTO_INCREMENT PRIMARY KEY,
|
table.string("customer_name", 100).notNullable()
|
||||||
\`customer_name\` VARCHAR(100) NOT NULL,
|
table.enum(
|
||||||
\`${enumColumnName}\` ENUM('pending', 'processing', 'shipped', 'delivered', 'cancelled')
|
enumColumnName,
|
||||||
);
|
["pending", "processing", "shipped", "delivered", "cancelled"],
|
||||||
`
|
{ useNative: true, enumName: `${tableName}_${enumColumnName}` }
|
||||||
|
)
|
||||||
await rawQuery(rawDatasource, createTableQuery)
|
})
|
||||||
|
|
||||||
const res = await config.api.datasource.fetchSchema({
|
const res = await config.api.datasource.fetchSchema({
|
||||||
datasourceId: datasource._id!,
|
datasourceId: datasource._id!,
|
||||||
|
|
|
@ -93,7 +93,7 @@ describe("postgres integrations", () => {
|
||||||
describe("Integration compatibility with postgres search_path", () => {
|
describe("Integration compatibility with postgres search_path", () => {
|
||||||
let rawDatasource: Datasource,
|
let rawDatasource: Datasource,
|
||||||
datasource: Datasource,
|
datasource: Datasource,
|
||||||
client: Knex<any, unknown[]>,
|
client: Knex,
|
||||||
schema1: string,
|
schema1: string,
|
||||||
schema2: string
|
schema2: string
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ import knex from "knex"
|
||||||
|
|
||||||
let ports: Promise<testContainerUtils.Port[]>
|
let ports: Promise<testContainerUtils.Port[]>
|
||||||
|
|
||||||
export async function getDatasource(): Promise<Datasource> {
|
export async function GetDatasource(): Promise<Datasource> {
|
||||||
if (!ports) {
|
if (!ports) {
|
||||||
ports = startContainer(
|
ports = startContainer(
|
||||||
new GenericContainer("mcr.microsoft.com/mssql/server:2022-latest")
|
new GenericContainer("mcr.microsoft.com/mssql/server:2022-latest")
|
||||||
|
|
|
@ -88,7 +88,7 @@ export async function knexClient(ds: Datasource) {
|
||||||
}
|
}
|
||||||
|
|
||||||
return knex({
|
return knex({
|
||||||
client: "mysql",
|
client: "mysql2",
|
||||||
connection: ds.config,
|
connection: ds.config,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue