Implement a test that exports an external schema, then reimports it, asserting the tables match.

This commit is contained in:
Sam Rose 2024-12-05 10:47:03 +00:00
parent 8d897ca434
commit 48c106276d
No known key found for this signature in database
11 changed files with 214 additions and 44 deletions

View File

@ -312,9 +312,10 @@ export async function getExternalSchema(
if (!connector.getExternalSchema) {
ctx.throw(400, "Datasource does not support exporting external schema")
}
const response = await connector.getExternalSchema()
ctx.body = {
schema: response,
try {
ctx.body = { schema: await connector.getExternalSchema() }
} catch (e: any) {
ctx.throw(400, e.message)
}
}

View File

@ -20,10 +20,11 @@ import {
import {
DatabaseName,
datasourceDescribe,
knexClient,
} from "../../../integrations/tests/utils"
import { tableForDatasource } from "../../../tests/utilities/structures"
import nock from "nock"
import { Knex } from "knex"
import knex, { Knex } from "knex"
describe("/datasources", () => {
const config = setup.getConfig()
@ -588,3 +589,102 @@ if (descriptions.length) {
}
)
}
const datasources = datasourceDescribe({
exclude: [DatabaseName.MONGODB, DatabaseName.SQS, DatabaseName.ORACLE],
})
if (datasources.length) {
describe.each(datasources)(
"$dbName",
({ config, dsProvider, isPostgres, isMySQL, isMariaDB }) => {
let datasource: Datasource
let client: Knex
beforeEach(async () => {
const ds = await dsProvider()
datasource = ds.datasource!
client = ds.client!
})
describe("external export", () => {
let table: Table
beforeEach(async () => {
table = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
primary: ["id"],
primaryDisplay: "name",
schema: {
id: {
name: "id",
autocolumn: true,
type: FieldType.NUMBER,
constraints: {
presence: false,
},
},
name: {
name: "name",
autocolumn: false,
type: FieldType.STRING,
constraints: {
presence: false,
},
},
},
})
)
})
it.only("should be able to export and reimport a schema", async () => {
let { schema } = await config.api.datasource.externalSchema(
datasource
)
if (isPostgres) {
// pg_dump 17 puts this config parameter into the dump but no DB < 17
// can load it. We're using postgres 16 in tests at the time of writing.
schema = schema.replace("SET transaction_timeout = 0;", "")
}
await config.api.table.destroy(table._id!, table._rev!)
if (isMySQL || isMariaDB) {
// MySQL/MariaDB clients don't let you run multiple queries in a
// single call. They also throw an error when given an empty query.
// The below handles both of these things.
for (let query of schema.split(";\n")) {
query = query.trim()
if (!query) {
continue
}
await client.raw(query)
}
} else {
await client.raw(schema)
}
await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const tables = await config.api.table.fetch()
const newTable = tables.find(t => t.name === table.name)!
// This is only set on tables created through Budibase, we don't
// expect it to match after we import the table.
delete table.created
for (const field of Object.values(newTable.schema)) {
// Will differ per-database, not useful for this test.
delete field.externalType
}
expect(newTable).toEqual(table)
})
})
}
)
}

View File

@ -193,6 +193,34 @@ const SCHEMA: Integration = {
},
}
interface MSSQLColumnDefinition {
TableName: string
ColumnName: string
DataType: string
MaxLength: number
IsNullable: boolean
IsIdentity: boolean
Precision: number
Scale: number
}
interface ColumnDefinitionMetadata {
usesMaxLength?: boolean
usesPrecision?: boolean
}
const COLUMN_DEFINITION_METADATA: Record<string, ColumnDefinitionMetadata> = {
DATETIME2: { usesMaxLength: true },
TIME: { usesMaxLength: true },
DATETIMEOFFSET: { usesMaxLength: true },
NCHAR: { usesMaxLength: true },
NVARCHAR: { usesMaxLength: true },
BINARY: { usesMaxLength: true },
VARBINARY: { usesMaxLength: true },
DECIMAL: { usesPrecision: true },
NUMERIC: { usesPrecision: true },
}
class SqlServerIntegration extends Sql implements DatasourcePlus {
private readonly config: MSSQLConfig
private index: number = 0
@ -527,20 +555,24 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
return this.queryWithReturning(json, queryFn, processFn)
}
async getExternalSchema() {
private async getColumnDefinitions(): Promise<MSSQLColumnDefinition[]> {
// Query to retrieve table schema
const query = `
SELECT
t.name AS TableName,
c.name AS ColumnName,
ty.name AS DataType,
ty.precision AS Precision,
ty.scale AS Scale,
c.max_length AS MaxLength,
c.is_nullable AS IsNullable,
c.is_identity AS IsIdentity
FROM
sys.tables t
INNER JOIN sys.columns c ON t.object_id = c.object_id
INNER JOIN sys.types ty ON c.system_type_id = ty.system_type_id
INNER JOIN sys.types ty
ON c.system_type_id = ty.system_type_id
AND c.user_type_id = ty.user_type_id
WHERE
t.is_ms_shipped = 0
ORDER BY
@ -553,17 +585,36 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
sql: query,
})
return result.recordset as MSSQLColumnDefinition[]
}
private getDataType(columnDef: MSSQLColumnDefinition): string {
const { DataType, MaxLength, Precision, Scale } = columnDef
const { usesMaxLength = false, usesPrecision = false } =
COLUMN_DEFINITION_METADATA[DataType] || {}
let dataType = DataType
if (usesMaxLength) {
if (MaxLength === -1) {
dataType += `(MAX)`
} else {
dataType += `(${MaxLength})`
}
}
if (usesPrecision) {
dataType += `(${Precision}, ${Scale})`
}
return dataType
}
async getExternalSchema() {
const scriptParts = []
const tables: any = {}
for (const row of result.recordset) {
const {
TableName,
ColumnName,
DataType,
MaxLength,
IsNullable,
IsIdentity,
} = row
const columns = await this.getColumnDefinitions()
for (const row of columns) {
const { TableName, ColumnName, IsNullable, IsIdentity } = row
if (!tables[TableName]) {
tables[TableName] = {
@ -571,9 +622,11 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
}
}
const columnDefinition = `${ColumnName} ${DataType}${
MaxLength ? `(${MaxLength})` : ""
}${IsNullable ? " NULL" : " NOT NULL"}`
const nullable = IsNullable ? "NULL" : "NOT NULL"
const identity = IsIdentity ? "IDENTITY" : ""
const columnDefinition = `[${ColumnName}] ${this.getDataType(
row
)} ${nullable} ${identity}`
tables[TableName].columns.push(columnDefinition)

View File

@ -412,7 +412,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async getExternalSchema() {
try {
const [databaseResult] = await this.internalQuery({
sql: `SHOW CREATE DATABASE ${this.config.database}`,
sql: `SHOW CREATE DATABASE IF NOT EXISTS \`${this.config.database}\``,
})
let dumpContent = [databaseResult["Create Database"]]
@ -432,7 +432,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
dumpContent.push(createTableStatement)
}
return dumpContent.join("\n")
return dumpContent.join(";\n") + ";"
} finally {
this.disconnect()
}

View File

@ -476,21 +476,15 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
this.config.password
}" pg_dump --schema-only "${dumpCommandParts.join(" ")}"`
return new Promise<string>((res, rej) => {
return new Promise<string>((resolve, reject) => {
exec(dumpCommand, (error, stdout, stderr) => {
if (error) {
console.error(`Error generating dump: ${error.message}`)
rej(error.message)
if (error || stderr) {
console.error(stderr)
reject(new Error(stderr))
return
}
if (stderr) {
console.error(`pg_dump error: ${stderr}`)
rej(stderr)
return
}
res(stdout)
resolve(stdout)
console.log("SQL dump generated successfully!")
})
})

View File

@ -149,6 +149,7 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) {
isMongodb: dbName === DatabaseName.MONGODB,
isMSSQL: dbName === DatabaseName.SQL_SERVER,
isOracle: dbName === DatabaseName.ORACLE,
isMariaDB: dbName === DatabaseName.MARIADB,
}))
}
@ -158,19 +159,19 @@ function getDatasource(
return providers[sourceName]()
}
export async function knexClient(ds: Datasource) {
export async function knexClient(ds: Datasource, opts?: Knex.Config) {
switch (ds.source) {
case SourceName.POSTGRES: {
return postgres.knexClient(ds)
return postgres.knexClient(ds, opts)
}
case SourceName.MYSQL: {
return mysql.knexClient(ds)
return mysql.knexClient(ds, opts)
}
case SourceName.SQL_SERVER: {
return mssql.knexClient(ds)
return mssql.knexClient(ds, opts)
}
case SourceName.ORACLE: {
return oracle.knexClient(ds)
return oracle.knexClient(ds, opts)
}
default: {
throw new Error(`Unsupported source: ${ds.source}`)

View File

@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
import knex from "knex"
import knex, { Knex } from "knex"
import { MSSQL_IMAGE } from "./images"
let ports: Promise<testContainerUtils.Port[]>
@ -57,7 +57,7 @@ export async function getDatasource(): Promise<Datasource> {
return datasource
}
export async function knexClient(ds: Datasource) {
export async function knexClient(ds: Datasource, opts?: Knex.Config) {
if (!ds.config) {
throw new Error("Datasource config is missing")
}
@ -68,5 +68,6 @@ export async function knexClient(ds: Datasource) {
return knex({
client: "mssql",
connection: ds.config,
...opts,
})
}

View File

@ -3,7 +3,7 @@ import { GenericContainer, Wait } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
import knex from "knex"
import knex, { Knex } from "knex"
import { MYSQL_IMAGE } from "./images"
let ports: Promise<testContainerUtils.Port[]>
@ -63,7 +63,7 @@ export async function getDatasource(): Promise<Datasource> {
return datasource
}
export async function knexClient(ds: Datasource) {
export async function knexClient(ds: Datasource, opts?: Knex.Config) {
if (!ds.config) {
throw new Error("Datasource config is missing")
}
@ -74,5 +74,6 @@ export async function knexClient(ds: Datasource) {
return knex({
client: "mysql2",
connection: ds.config,
...opts,
})
}

View File

@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
import knex from "knex"
import knex, { Knex } from "knex"
let ports: Promise<testContainerUtils.Port[]>
@ -58,7 +58,7 @@ export async function getDatasource(): Promise<Datasource> {
return datasource
}
export async function knexClient(ds: Datasource) {
export async function knexClient(ds: Datasource, opts?: Knex.Config) {
if (!ds.config) {
throw new Error("Datasource config is missing")
}
@ -76,6 +76,7 @@ export async function knexClient(ds: Datasource) {
user: ds.config.user,
password: ds.config.password,
},
...opts,
})
return c

View File

@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
import knex from "knex"
import knex, { Knex } from "knex"
import { POSTGRES_IMAGE } from "./images"
let ports: Promise<testContainerUtils.Port[]>
@ -51,7 +51,10 @@ export async function getDatasource(): Promise<Datasource> {
return datasource
}
export async function knexClient(ds: Datasource) {
export async function knexClient(
ds: Datasource,
opts?: Knex.Config
): Promise<Knex> {
if (!ds.config) {
throw new Error("Datasource config is missing")
}
@ -62,5 +65,6 @@ export async function knexClient(ds: Datasource) {
return knex({
client: "pg",
connection: ds.config,
...opts,
})
}

View File

@ -3,6 +3,7 @@ import {
CreateDatasourceResponse,
Datasource,
FetchDatasourceInfoResponse,
FetchExternalSchemaResponse,
FieldType,
RelationshipType,
UpdateDatasourceRequest,
@ -96,6 +97,19 @@ export class DatasourceAPI extends TestAPI {
)
}
externalSchema = async (
datasource: Datasource | string,
expectations?: Expectations
): Promise<FetchExternalSchemaResponse> => {
const id = typeof datasource === "string" ? datasource : datasource._id
return await this._get<FetchExternalSchemaResponse>(
`/api/datasources/${id}/schema/external`,
{
expectations,
}
)
}
addExistingRelationship = async (
{
one,