Convert postgres.spec.ts to use Knex instead of rawQuery.
This commit is contained in:
parent
c6a7bf6ed1
commit
c2320e4f5b
|
@ -1,9 +1,3 @@
|
||||||
import fetch from "node-fetch"
|
|
||||||
import {
|
|
||||||
generateMakeRequest,
|
|
||||||
MakeRequestResponse,
|
|
||||||
} from "../api/routes/public/tests/utils"
|
|
||||||
|
|
||||||
import * as setup from "../api/routes/tests/utilities"
|
import * as setup from "../api/routes/tests/utilities"
|
||||||
import { Datasource, FieldType } from "@budibase/types"
|
import { Datasource, FieldType } from "@budibase/types"
|
||||||
import _ from "lodash"
|
import _ from "lodash"
|
||||||
|
@ -11,29 +5,22 @@ import { generator } from "@budibase/backend-core/tests"
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
getDatasource,
|
||||||
rawQuery,
|
knexClient,
|
||||||
} from "../integrations/tests/utils"
|
} from "../integrations/tests/utils"
|
||||||
|
import { Knex } from "knex"
|
||||||
// @ts-ignore
|
|
||||||
fetch.mockSearch()
|
|
||||||
|
|
||||||
const config = setup.getConfig()!
|
const config = setup.getConfig()!
|
||||||
|
|
||||||
jest.mock("../websockets")
|
|
||||||
|
|
||||||
describe("postgres integrations", () => {
|
describe("postgres integrations", () => {
|
||||||
let makeRequest: MakeRequestResponse,
|
let rawDatasource: Datasource
|
||||||
rawDatasource: Datasource,
|
let datasource: Datasource
|
||||||
datasource: Datasource
|
let client: Knex<any, unknown[]>
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
const apiKey = await config.generateApiKey()
|
|
||||||
|
|
||||||
makeRequest = generateMakeRequest(apiKey, true)
|
|
||||||
|
|
||||||
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
datasource = await config.api.datasource.create(rawDatasource)
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(config.end)
|
afterAll(config.end)
|
||||||
|
@ -46,11 +33,13 @@ describe("postgres integrations", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(async () => {
|
afterEach(async () => {
|
||||||
await rawQuery(rawDatasource, `DROP TABLE IF EXISTS "${tableName}"`)
|
await client.schema.dropTableIfExists(tableName)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("recognises when a table has no primary key", async () => {
|
it("recognises when a table has no primary key", async () => {
|
||||||
await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`)
|
await client.schema.createTable(tableName, table => {
|
||||||
|
table.increments("id", { primaryKey: false })
|
||||||
|
})
|
||||||
|
|
||||||
const response = await config.api.datasource.fetchSchema({
|
const response = await config.api.datasource.fetchSchema({
|
||||||
datasourceId: datasource._id!,
|
datasourceId: datasource._id!,
|
||||||
|
@ -62,10 +51,9 @@ describe("postgres integrations", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("recognises when a table is using a reserved column name", async () => {
|
it("recognises when a table is using a reserved column name", async () => {
|
||||||
await rawQuery(
|
await client.schema.createTable(tableName, table => {
|
||||||
rawDatasource,
|
table.increments("_id").primary()
|
||||||
`CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) `
|
})
|
||||||
)
|
|
||||||
|
|
||||||
const response = await config.api.datasource.fetchSchema({
|
const response = await config.api.datasource.fetchSchema({
|
||||||
datasourceId: datasource._id!,
|
datasourceId: datasource._id!,
|
||||||
|
@ -81,20 +69,15 @@ describe("postgres integrations", () => {
|
||||||
.guid()
|
.guid()
|
||||||
.replaceAll("-", "")
|
.replaceAll("-", "")
|
||||||
.substring(0, 6)}`
|
.substring(0, 6)}`
|
||||||
const enumColumnName = "status"
|
|
||||||
|
|
||||||
await rawQuery(
|
await client.schema.createTable(tableName, table => {
|
||||||
rawDatasource,
|
table.increments("order_id").primary()
|
||||||
`
|
table.string("customer_name").notNullable()
|
||||||
CREATE TYPE order_status AS ENUM ('pending', 'processing', 'shipped', 'delivered', 'cancelled');
|
table.enum("status", ["pending", "processing", "shipped"], {
|
||||||
|
useNative: true,
|
||||||
CREATE TABLE ${tableName} (
|
enumName: `${tableName}_status`,
|
||||||
order_id SERIAL PRIMARY KEY,
|
})
|
||||||
customer_name VARCHAR(100) NOT NULL,
|
})
|
||||||
${enumColumnName} order_status
|
|
||||||
);
|
|
||||||
`
|
|
||||||
)
|
|
||||||
|
|
||||||
const response = await config.api.datasource.fetchSchema({
|
const response = await config.api.datasource.fetchSchema({
|
||||||
datasourceId: datasource._id!,
|
datasourceId: datasource._id!,
|
||||||
|
@ -103,13 +86,14 @@ describe("postgres integrations", () => {
|
||||||
const table = response.datasource.entities?.[tableName]
|
const table = response.datasource.entities?.[tableName]
|
||||||
|
|
||||||
expect(table).toBeDefined()
|
expect(table).toBeDefined()
|
||||||
expect(table?.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
|
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("Integration compatibility with postgres search_path", () => {
|
describe("Integration compatibility with postgres search_path", () => {
|
||||||
let rawDatasource: Datasource,
|
let rawDatasource: Datasource,
|
||||||
datasource: Datasource,
|
datasource: Datasource,
|
||||||
|
client: Knex<any, unknown[]>,
|
||||||
schema1: string,
|
schema1: string,
|
||||||
schema2: string
|
schema2: string
|
||||||
|
|
||||||
|
@ -118,54 +102,55 @@ describe("postgres integrations", () => {
|
||||||
schema2 = generator.guid().replaceAll("-", "")
|
schema2 = generator.guid().replaceAll("-", "")
|
||||||
|
|
||||||
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||||
const dbConfig = rawDatasource.config!
|
client = await knexClient(rawDatasource)
|
||||||
|
|
||||||
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema1}";`)
|
await client.schema.createSchema(schema1)
|
||||||
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema2}";`)
|
await client.schema.createSchema(schema2)
|
||||||
|
|
||||||
const pathConfig: any = {
|
rawDatasource.config!.schema = `${schema1}, ${schema2}`
|
||||||
...rawDatasource,
|
|
||||||
config: {
|
client = await knexClient(rawDatasource)
|
||||||
...dbConfig,
|
datasource = await config.api.datasource.create(rawDatasource)
|
||||||
schema: `${schema1}, ${schema2}`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
datasource = await config.api.datasource.create(pathConfig)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(async () => {
|
afterEach(async () => {
|
||||||
await rawQuery(rawDatasource, `DROP SCHEMA "${schema1}" CASCADE;`)
|
await client.schema.dropSchema(schema1, true)
|
||||||
await rawQuery(rawDatasource, `DROP SCHEMA "${schema2}" CASCADE;`)
|
await client.schema.dropSchema(schema2, true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("discovers tables from any schema in search path", async () => {
|
it("discovers tables from any schema in search path", async () => {
|
||||||
await rawQuery(
|
await client.schema.createTable(`${schema1}.table1`, table => {
|
||||||
rawDatasource,
|
table.increments("id1").primary()
|
||||||
`CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);`
|
|
||||||
)
|
|
||||||
await rawQuery(
|
|
||||||
rawDatasource,
|
|
||||||
`CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);`
|
|
||||||
)
|
|
||||||
const response = await makeRequest("post", "/api/datasources/info", {
|
|
||||||
datasource: datasource,
|
|
||||||
})
|
})
|
||||||
expect(response.status).toBe(200)
|
|
||||||
expect(response.body.tableNames).toBeDefined()
|
await client.schema.createTable(`${schema2}.table2`, table => {
|
||||||
expect(response.body.tableNames).toEqual(
|
table.increments("id2").primary()
|
||||||
|
})
|
||||||
|
|
||||||
|
const response = await config.api.datasource.info(datasource)
|
||||||
|
expect(response.tableNames).toBeDefined()
|
||||||
|
expect(response.tableNames).toEqual(
|
||||||
expect.arrayContaining(["table1", "table2"])
|
expect.arrayContaining(["table1", "table2"])
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("does not mix columns from different tables", async () => {
|
it("does not mix columns from different tables", async () => {
|
||||||
const repeated_table_name = "table_same_name"
|
const repeated_table_name = "table_same_name"
|
||||||
await rawQuery(
|
|
||||||
rawDatasource,
|
await client.schema.createTable(
|
||||||
`CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
|
`${schema1}.${repeated_table_name}`,
|
||||||
|
table => {
|
||||||
|
table.increments("id").primary()
|
||||||
|
table.string("val1")
|
||||||
|
}
|
||||||
)
|
)
|
||||||
await rawQuery(
|
|
||||||
rawDatasource,
|
await client.schema.createTable(
|
||||||
`CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
|
`${schema2}.${repeated_table_name}`,
|
||||||
|
table => {
|
||||||
|
table.increments("id2").primary()
|
||||||
|
table.string("val2")
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
const response = await config.api.datasource.fetchSchema({
|
const response = await config.api.datasource.fetchSchema({
|
||||||
|
@ -182,15 +167,11 @@ describe("postgres integrations", () => {
|
||||||
|
|
||||||
describe("check custom column types", () => {
|
describe("check custom column types", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await rawQuery(
|
await client.schema.createTable("binaryTable", table => {
|
||||||
rawDatasource,
|
table.binary("id").primary()
|
||||||
`CREATE TABLE binaryTable (
|
table.string("column1")
|
||||||
id BYTEA PRIMARY KEY,
|
table.integer("column2")
|
||||||
column1 TEXT,
|
})
|
||||||
column2 INT
|
|
||||||
);
|
|
||||||
`
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle binary columns", async () => {
|
it("should handle binary columns", async () => {
|
||||||
|
@ -198,7 +179,7 @@ describe("postgres integrations", () => {
|
||||||
datasourceId: datasource._id!,
|
datasourceId: datasource._id!,
|
||||||
})
|
})
|
||||||
expect(response.datasource.entities).toBeDefined()
|
expect(response.datasource.entities).toBeDefined()
|
||||||
const table = response.datasource.entities?.["binarytable"]
|
const table = response.datasource.entities?.["binaryTable"]
|
||||||
expect(table).toBeDefined()
|
expect(table).toBeDefined()
|
||||||
expect(table?.schema.id.externalType).toBe("bytea")
|
expect(table?.schema.id.externalType).toBe("bytea")
|
||||||
const row = await config.api.row.save(table?._id!, {
|
const row = await config.api.row.save(table?._id!, {
|
||||||
|
@ -214,14 +195,10 @@ describe("postgres integrations", () => {
|
||||||
|
|
||||||
describe("check fetching null/not null table", () => {
|
describe("check fetching null/not null table", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await rawQuery(
|
await client.schema.createTable("nullableTable", table => {
|
||||||
rawDatasource,
|
table.increments("order_id").primary()
|
||||||
`CREATE TABLE nullableTable (
|
table.integer("order_number").notNullable()
|
||||||
order_id SERIAL PRIMARY KEY,
|
})
|
||||||
order_number INT NOT NULL
|
|
||||||
);
|
|
||||||
`
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to change the table to allow nullable and refetch this", async () => {
|
it("should be able to change the table to allow nullable and refetch this", async () => {
|
||||||
|
@ -230,25 +207,24 @@ describe("postgres integrations", () => {
|
||||||
})
|
})
|
||||||
const entities = response.datasource.entities
|
const entities = response.datasource.entities
|
||||||
expect(entities).toBeDefined()
|
expect(entities).toBeDefined()
|
||||||
const nullableTable = entities?.["nullabletable"]
|
const nullableTable = entities?.["nullableTable"]
|
||||||
expect(nullableTable).toBeDefined()
|
expect(nullableTable).toBeDefined()
|
||||||
expect(
|
expect(
|
||||||
nullableTable?.schema["order_number"].constraints?.presence
|
nullableTable?.schema["order_number"].constraints?.presence
|
||||||
).toEqual(true)
|
).toEqual(true)
|
||||||
|
|
||||||
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
|
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
|
||||||
// is aware of - therefore we can try to fetch and make sure BB updates correctly
|
// is aware of - therefore we can try to fetch and make sure BB updates correctly
|
||||||
await rawQuery(
|
await client.schema.alterTable("nullableTable", table => {
|
||||||
rawDatasource,
|
table.setNullable("order_number")
|
||||||
`ALTER TABLE nullableTable
|
})
|
||||||
ALTER COLUMN order_number DROP NOT NULL;
|
|
||||||
`
|
|
||||||
)
|
|
||||||
const responseAfter = await config.api.datasource.fetchSchema({
|
const responseAfter = await config.api.datasource.fetchSchema({
|
||||||
datasourceId: datasource._id!,
|
datasourceId: datasource._id!,
|
||||||
})
|
})
|
||||||
const entitiesAfter = responseAfter.datasource.entities
|
const entitiesAfter = responseAfter.datasource.entities
|
||||||
expect(entitiesAfter).toBeDefined()
|
expect(entitiesAfter).toBeDefined()
|
||||||
const nullableTableAfter = entitiesAfter?.["nullabletable"]
|
const nullableTableAfter = entitiesAfter?.["nullableTable"]
|
||||||
expect(nullableTableAfter).toBeDefined()
|
expect(nullableTableAfter).toBeDefined()
|
||||||
expect(
|
expect(
|
||||||
nullableTableAfter?.schema["order_number"].constraints?.presence
|
nullableTableAfter?.schema["order_number"].constraints?.presence
|
||||||
|
|
|
@ -65,6 +65,23 @@ export async function rawQuery(ds: Datasource, sql: string): Promise<any> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function knexClient(ds: Datasource) {
|
||||||
|
switch (ds.source) {
|
||||||
|
case SourceName.POSTGRES: {
|
||||||
|
return postgres.knexClient(ds)
|
||||||
|
}
|
||||||
|
case SourceName.MYSQL: {
|
||||||
|
return mysql.knexClient(ds)
|
||||||
|
}
|
||||||
|
case SourceName.SQL_SERVER: {
|
||||||
|
return mssql.knexClient(ds)
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new Error(`Unsupported source: ${ds.source}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function startContainer(container: GenericContainer) {
|
export async function startContainer(container: GenericContainer) {
|
||||||
const imageName = (container as any).imageName.string as string
|
const imageName = (container as any).imageName.string as string
|
||||||
const key = imageName.replaceAll("/", "-").replaceAll(":", "-")
|
const key = imageName.replaceAll("/", "-").replaceAll(":", "-")
|
||||||
|
|
|
@ -3,6 +3,7 @@ import { GenericContainer, Wait } from "testcontainers"
|
||||||
import mssql from "mssql"
|
import mssql from "mssql"
|
||||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
import { startContainer } from "."
|
import { startContainer } from "."
|
||||||
|
import knex from "knex"
|
||||||
|
|
||||||
let ports: Promise<testContainerUtils.Port[]>
|
let ports: Promise<testContainerUtils.Port[]>
|
||||||
|
|
||||||
|
@ -72,3 +73,17 @@ export async function rawQuery(ds: Datasource, sql: string) {
|
||||||
await pool.close()
|
await pool.close()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function knexClient(ds: Datasource) {
|
||||||
|
if (!ds.config) {
|
||||||
|
throw new Error("Datasource config is missing")
|
||||||
|
}
|
||||||
|
if (ds.source !== SourceName.SQL_SERVER) {
|
||||||
|
throw new Error("Datasource source is not MSSQL")
|
||||||
|
}
|
||||||
|
|
||||||
|
return knex({
|
||||||
|
client: "mssql",
|
||||||
|
connection: ds.config,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@ import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-
|
||||||
import mysql from "mysql2/promise"
|
import mysql from "mysql2/promise"
|
||||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
import { startContainer } from "."
|
import { startContainer } from "."
|
||||||
|
import knex from "knex"
|
||||||
|
|
||||||
let ports: Promise<testContainerUtils.Port[]>
|
let ports: Promise<testContainerUtils.Port[]>
|
||||||
|
|
||||||
|
@ -77,3 +78,17 @@ export async function rawQuery(ds: Datasource, sql: string) {
|
||||||
connection.end()
|
connection.end()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function knexClient(ds: Datasource) {
|
||||||
|
if (!ds.config) {
|
||||||
|
throw new Error("Datasource config is missing")
|
||||||
|
}
|
||||||
|
if (ds.source !== SourceName.MYSQL) {
|
||||||
|
throw new Error("Datasource source is not MySQL")
|
||||||
|
}
|
||||||
|
|
||||||
|
return knex({
|
||||||
|
client: "mysql",
|
||||||
|
connection: ds.config,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -3,6 +3,7 @@ import { GenericContainer, Wait } from "testcontainers"
|
||||||
import pg from "pg"
|
import pg from "pg"
|
||||||
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
import { startContainer } from "."
|
import { startContainer } from "."
|
||||||
|
import knex from "knex"
|
||||||
|
|
||||||
let ports: Promise<testContainerUtils.Port[]>
|
let ports: Promise<testContainerUtils.Port[]>
|
||||||
|
|
||||||
|
@ -66,3 +67,17 @@ export async function rawQuery(ds: Datasource, sql: string) {
|
||||||
await client.end()
|
await client.end()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function knexClient(ds: Datasource) {
|
||||||
|
if (!ds.config) {
|
||||||
|
throw new Error("Datasource config is missing")
|
||||||
|
}
|
||||||
|
if (ds.source !== SourceName.POSTGRES) {
|
||||||
|
throw new Error("Datasource source is not Postgres")
|
||||||
|
}
|
||||||
|
|
||||||
|
return knex({
|
||||||
|
client: "pg",
|
||||||
|
connection: ds.config,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue