From 48c106276d9f922605fb95eb4e4f4c4eddbb4018 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 5 Dec 2024 10:47:03 +0000 Subject: [PATCH 1/5] Implement a test that exports an external schema, then reimports it, asserting the tables match. --- .../server/src/api/controllers/datasource.ts | 7 +- .../src/api/routes/tests/datasource.spec.ts | 102 +++++++++++++++++- .../src/integrations/microsoftSqlServer.ts | 81 +++++++++++--- packages/server/src/integrations/mysql.ts | 4 +- packages/server/src/integrations/postgres.ts | 16 +-- .../src/integrations/tests/utils/index.ts | 11 +- .../src/integrations/tests/utils/mssql.ts | 5 +- .../src/integrations/tests/utils/mysql.ts | 5 +- .../src/integrations/tests/utils/oracle.ts | 5 +- .../src/integrations/tests/utils/postgres.ts | 8 +- .../src/tests/utilities/api/datasource.ts | 14 +++ 11 files changed, 214 insertions(+), 44 deletions(-) diff --git a/packages/server/src/api/controllers/datasource.ts b/packages/server/src/api/controllers/datasource.ts index c4492f304c..7180f5f765 100644 --- a/packages/server/src/api/controllers/datasource.ts +++ b/packages/server/src/api/controllers/datasource.ts @@ -312,9 +312,10 @@ export async function getExternalSchema( if (!connector.getExternalSchema) { ctx.throw(400, "Datasource does not support exporting external schema") } - const response = await connector.getExternalSchema() - ctx.body = { - schema: response, + try { + ctx.body = { schema: await connector.getExternalSchema() } + } catch (e: any) { + ctx.throw(400, e.message) } } diff --git a/packages/server/src/api/routes/tests/datasource.spec.ts b/packages/server/src/api/routes/tests/datasource.spec.ts index ad6a0a2e18..4d46a51173 100644 --- a/packages/server/src/api/routes/tests/datasource.spec.ts +++ b/packages/server/src/api/routes/tests/datasource.spec.ts @@ -20,10 +20,11 @@ import { import { DatabaseName, datasourceDescribe, + knexClient, } from "../../../integrations/tests/utils" import { tableForDatasource } from "../../../tests/utilities/structures" import nock from "nock" -import { Knex } from "knex" +import knex, { Knex } from "knex" describe("/datasources", () => { const config = setup.getConfig() @@ -588,3 +589,102 @@ if (descriptions.length) { } ) } + +const datasources = datasourceDescribe({ + exclude: [DatabaseName.MONGODB, DatabaseName.SQS, DatabaseName.ORACLE], +}) + +if (datasources.length) { + describe.each(datasources)( + "$dbName", + ({ config, dsProvider, isPostgres, isMySQL, isMariaDB }) => { + let datasource: Datasource + let client: Knex + + beforeEach(async () => { + const ds = await dsProvider() + datasource = ds.datasource! + client = ds.client! + }) + + describe("external export", () => { + let table: Table + + beforeEach(async () => { + table = await config.api.table.save( + tableForDatasource(datasource, { + name: "simple", + primary: ["id"], + primaryDisplay: "name", + schema: { + id: { + name: "id", + autocolumn: true, + type: FieldType.NUMBER, + constraints: { + presence: false, + }, + }, + name: { + name: "name", + autocolumn: false, + type: FieldType.STRING, + constraints: { + presence: false, + }, + }, + }, + }) + ) + }) + + it.only("should be able to export and reimport a schema", async () => { + let { schema } = await config.api.datasource.externalSchema( + datasource + ) + + if (isPostgres) { + // pg_dump 17 puts this config parameter into the dump but no DB < 17 + // can load it. We're using postgres 16 in tests at the time of writing. + schema = schema.replace("SET transaction_timeout = 0;", "") + } + + await config.api.table.destroy(table._id!, table._rev!) + + if (isMySQL || isMariaDB) { + // MySQL/MariaDB clients don't let you run multiple queries in a + // single call. They also throw an error when given an empty query. + // The below handles both of these things. + for (let query of schema.split(";\n")) { + query = query.trim() + if (!query) { + continue + } + await client.raw(query) + } + } else { + await client.raw(schema) + } + + await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + + const tables = await config.api.table.fetch() + const newTable = tables.find(t => t.name === table.name)! + + // This is only set on tables created through Budibase, we don't + // expect it to match after we import the table. + delete table.created + + for (const field of Object.values(newTable.schema)) { + // Will differ per-database, not useful for this test. + delete field.externalType + } + + expect(newTable).toEqual(table) + }) + }) + } + ) +} diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts index 2fac306590..7d886fc33a 100644 --- a/packages/server/src/integrations/microsoftSqlServer.ts +++ b/packages/server/src/integrations/microsoftSqlServer.ts @@ -193,6 +193,34 @@ const SCHEMA: Integration = { }, } +interface MSSQLColumnDefinition { + TableName: string + ColumnName: string + DataType: string + MaxLength: number + IsNullable: boolean + IsIdentity: boolean + Precision: number + Scale: number +} + +interface ColumnDefinitionMetadata { + usesMaxLength?: boolean + usesPrecision?: boolean +} + +const COLUMN_DEFINITION_METADATA: Record = { + DATETIME2: { usesMaxLength: true }, + TIME: { usesMaxLength: true }, + DATETIMEOFFSET: { usesMaxLength: true }, + NCHAR: { usesMaxLength: true }, + NVARCHAR: { usesMaxLength: true }, + BINARY: { usesMaxLength: true }, + VARBINARY: { usesMaxLength: true }, + DECIMAL: { usesPrecision: true }, + NUMERIC: { usesPrecision: true }, +} + class SqlServerIntegration extends Sql implements DatasourcePlus { private readonly config: MSSQLConfig private index: number = 0 @@ -527,20 +555,24 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { return this.queryWithReturning(json, queryFn, processFn) } - async getExternalSchema() { + private async getColumnDefinitions(): Promise { // Query to retrieve table schema const query = ` SELECT t.name AS TableName, c.name AS ColumnName, ty.name AS DataType, + ty.precision AS Precision, + ty.scale AS Scale, c.max_length AS MaxLength, c.is_nullable AS IsNullable, c.is_identity AS IsIdentity FROM sys.tables t INNER JOIN sys.columns c ON t.object_id = c.object_id - INNER JOIN sys.types ty ON c.system_type_id = ty.system_type_id + INNER JOIN sys.types ty + ON c.system_type_id = ty.system_type_id + AND c.user_type_id = ty.user_type_id WHERE t.is_ms_shipped = 0 ORDER BY @@ -553,17 +585,36 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { sql: query, }) + return result.recordset as MSSQLColumnDefinition[] + } + + private getDataType(columnDef: MSSQLColumnDefinition): string { + const { DataType, MaxLength, Precision, Scale } = columnDef + const { usesMaxLength = false, usesPrecision = false } = + COLUMN_DEFINITION_METADATA[DataType] || {} + + let dataType = DataType + + if (usesMaxLength) { + if (MaxLength === -1) { + dataType += `(MAX)` + } else { + dataType += `(${MaxLength})` + } + } + if (usesPrecision) { + dataType += `(${Precision}, ${Scale})` + } + + return dataType + } + + async getExternalSchema() { const scriptParts = [] const tables: any = {} - for (const row of result.recordset) { - const { - TableName, - ColumnName, - DataType, - MaxLength, - IsNullable, - IsIdentity, - } = row + const columns = await this.getColumnDefinitions() + for (const row of columns) { + const { TableName, ColumnName, IsNullable, IsIdentity } = row if (!tables[TableName]) { tables[TableName] = { @@ -571,9 +622,11 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { } } - const columnDefinition = `${ColumnName} ${DataType}${ - MaxLength ? `(${MaxLength})` : "" - }${IsNullable ? " NULL" : " NOT NULL"}` + const nullable = IsNullable ? "NULL" : "NOT NULL" + const identity = IsIdentity ? "IDENTITY" : "" + const columnDefinition = `[${ColumnName}] ${this.getDataType( + row + )} ${nullable} ${identity}` tables[TableName].columns.push(columnDefinition) diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts index 34da3dd3c8..b7833ae105 100644 --- a/packages/server/src/integrations/mysql.ts +++ b/packages/server/src/integrations/mysql.ts @@ -412,7 +412,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus { async getExternalSchema() { try { const [databaseResult] = await this.internalQuery({ - sql: `SHOW CREATE DATABASE ${this.config.database}`, + sql: `SHOW CREATE DATABASE IF NOT EXISTS \`${this.config.database}\``, }) let dumpContent = [databaseResult["Create Database"]] @@ -432,7 +432,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus { dumpContent.push(createTableStatement) } - return dumpContent.join("\n") + return dumpContent.join(";\n") + ";" } finally { this.disconnect() } diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index 164468b5ae..b41ac28ede 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -476,21 +476,15 @@ class PostgresIntegration extends Sql implements DatasourcePlus { this.config.password }" pg_dump --schema-only "${dumpCommandParts.join(" ")}"` - return new Promise((res, rej) => { + return new Promise((resolve, reject) => { exec(dumpCommand, (error, stdout, stderr) => { - if (error) { - console.error(`Error generating dump: ${error.message}`) - rej(error.message) + if (error || stderr) { + console.error(stderr) + reject(new Error(stderr)) return } - if (stderr) { - console.error(`pg_dump error: ${stderr}`) - rej(stderr) - return - } - - res(stdout) + resolve(stdout) console.log("SQL dump generated successfully!") }) }) diff --git a/packages/server/src/integrations/tests/utils/index.ts b/packages/server/src/integrations/tests/utils/index.ts index e5d2e1f229..cdf2c4021c 100644 --- a/packages/server/src/integrations/tests/utils/index.ts +++ b/packages/server/src/integrations/tests/utils/index.ts @@ -149,6 +149,7 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) { isMongodb: dbName === DatabaseName.MONGODB, isMSSQL: dbName === DatabaseName.SQL_SERVER, isOracle: dbName === DatabaseName.ORACLE, + isMariaDB: dbName === DatabaseName.MARIADB, })) } @@ -158,19 +159,19 @@ function getDatasource( return providers[sourceName]() } -export async function knexClient(ds: Datasource) { +export async function knexClient(ds: Datasource, opts?: Knex.Config) { switch (ds.source) { case SourceName.POSTGRES: { - return postgres.knexClient(ds) + return postgres.knexClient(ds, opts) } case SourceName.MYSQL: { - return mysql.knexClient(ds) + return mysql.knexClient(ds, opts) } case SourceName.SQL_SERVER: { - return mssql.knexClient(ds) + return mssql.knexClient(ds, opts) } case SourceName.ORACLE: { - return oracle.knexClient(ds) + return oracle.knexClient(ds, opts) } default: { throw new Error(`Unsupported source: ${ds.source}`) diff --git a/packages/server/src/integrations/tests/utils/mssql.ts b/packages/server/src/integrations/tests/utils/mssql.ts index 28a7928f3b..548631a987 100644 --- a/packages/server/src/integrations/tests/utils/mssql.ts +++ b/packages/server/src/integrations/tests/utils/mssql.ts @@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types" import { GenericContainer, Wait } from "testcontainers" import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { startContainer } from "." -import knex from "knex" +import knex, { Knex } from "knex" import { MSSQL_IMAGE } from "./images" let ports: Promise @@ -57,7 +57,7 @@ export async function getDatasource(): Promise { return datasource } -export async function knexClient(ds: Datasource) { +export async function knexClient(ds: Datasource, opts?: Knex.Config) { if (!ds.config) { throw new Error("Datasource config is missing") } @@ -68,5 +68,6 @@ export async function knexClient(ds: Datasource) { return knex({ client: "mssql", connection: ds.config, + ...opts, }) } diff --git a/packages/server/src/integrations/tests/utils/mysql.ts b/packages/server/src/integrations/tests/utils/mysql.ts index 5fa4e4f46d..be3835c0e3 100644 --- a/packages/server/src/integrations/tests/utils/mysql.ts +++ b/packages/server/src/integrations/tests/utils/mysql.ts @@ -3,7 +3,7 @@ import { GenericContainer, Wait } from "testcontainers" import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy" import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { startContainer } from "." -import knex from "knex" +import knex, { Knex } from "knex" import { MYSQL_IMAGE } from "./images" let ports: Promise @@ -63,7 +63,7 @@ export async function getDatasource(): Promise { return datasource } -export async function knexClient(ds: Datasource) { +export async function knexClient(ds: Datasource, opts?: Knex.Config) { if (!ds.config) { throw new Error("Datasource config is missing") } @@ -74,5 +74,6 @@ export async function knexClient(ds: Datasource) { return knex({ client: "mysql2", connection: ds.config, + ...opts, }) } diff --git a/packages/server/src/integrations/tests/utils/oracle.ts b/packages/server/src/integrations/tests/utils/oracle.ts index 2ec8da5902..3292ed5856 100644 --- a/packages/server/src/integrations/tests/utils/oracle.ts +++ b/packages/server/src/integrations/tests/utils/oracle.ts @@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types" import { GenericContainer, Wait } from "testcontainers" import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { startContainer } from "." -import knex from "knex" +import knex, { Knex } from "knex" let ports: Promise @@ -58,7 +58,7 @@ export async function getDatasource(): Promise { return datasource } -export async function knexClient(ds: Datasource) { +export async function knexClient(ds: Datasource, opts?: Knex.Config) { if (!ds.config) { throw new Error("Datasource config is missing") } @@ -76,6 +76,7 @@ export async function knexClient(ds: Datasource) { user: ds.config.user, password: ds.config.password, }, + ...opts, }) return c diff --git a/packages/server/src/integrations/tests/utils/postgres.ts b/packages/server/src/integrations/tests/utils/postgres.ts index cc77226ff6..fc52a724ad 100644 --- a/packages/server/src/integrations/tests/utils/postgres.ts +++ b/packages/server/src/integrations/tests/utils/postgres.ts @@ -2,7 +2,7 @@ import { Datasource, SourceName } from "@budibase/types" import { GenericContainer, Wait } from "testcontainers" import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { startContainer } from "." -import knex from "knex" +import knex, { Knex } from "knex" import { POSTGRES_IMAGE } from "./images" let ports: Promise @@ -51,7 +51,10 @@ export async function getDatasource(): Promise { return datasource } -export async function knexClient(ds: Datasource) { +export async function knexClient( + ds: Datasource, + opts?: Knex.Config +): Promise { if (!ds.config) { throw new Error("Datasource config is missing") } @@ -62,5 +65,6 @@ export async function knexClient(ds: Datasource) { return knex({ client: "pg", connection: ds.config, + ...opts, }) } diff --git a/packages/server/src/tests/utilities/api/datasource.ts b/packages/server/src/tests/utilities/api/datasource.ts index 0bc365661e..be4e6d2de2 100644 --- a/packages/server/src/tests/utilities/api/datasource.ts +++ b/packages/server/src/tests/utilities/api/datasource.ts @@ -3,6 +3,7 @@ import { CreateDatasourceResponse, Datasource, FetchDatasourceInfoResponse, + FetchExternalSchemaResponse, FieldType, RelationshipType, UpdateDatasourceRequest, @@ -96,6 +97,19 @@ export class DatasourceAPI extends TestAPI { ) } + externalSchema = async ( + datasource: Datasource | string, + expectations?: Expectations + ): Promise => { + const id = typeof datasource === "string" ? datasource : datasource._id + return await this._get( + `/api/datasources/${id}/schema/external`, + { + expectations, + } + ) + } + addExistingRelationship = async ( { one, From 2750b806c2b34ac9dfbc5b2b330b1c43a5b1c96f Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 5 Dec 2024 10:51:38 +0000 Subject: [PATCH 2/5] Remove focus test. --- packages/server/src/api/routes/tests/datasource.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/src/api/routes/tests/datasource.spec.ts b/packages/server/src/api/routes/tests/datasource.spec.ts index 4d46a51173..2b8ef6610c 100644 --- a/packages/server/src/api/routes/tests/datasource.spec.ts +++ b/packages/server/src/api/routes/tests/datasource.spec.ts @@ -638,7 +638,7 @@ if (datasources.length) { ) }) - it.only("should be able to export and reimport a schema", async () => { + it("should be able to export and reimport a schema", async () => { let { schema } = await config.api.datasource.externalSchema( datasource ) From b0fd28a944bd868b6c2a6ba19ee54675b27ff63f Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 5 Dec 2024 10:58:35 +0000 Subject: [PATCH 3/5] Install Postgres 16 when testing postgres. --- .github/workflows/budibase_ci.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index 90b747f07e..cb20bb8c37 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -200,6 +200,14 @@ jobs: - run: yarn --frozen-lockfile + - name: Set up PostgreSQL 16 + if: matrix.datasource == 'postgres' + run: | + sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' + wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - + sudo apt-get update + sudo apt-get install -y postgresql-16 + - name: Test server env: DATASOURCE: ${{ matrix.datasource }} From 2d776e1db6b416dd9690c698ea24d6e049a8cf71 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 5 Dec 2024 11:00:12 +0000 Subject: [PATCH 4/5] Fix lint. --- packages/server/src/api/routes/tests/datasource.spec.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/server/src/api/routes/tests/datasource.spec.ts b/packages/server/src/api/routes/tests/datasource.spec.ts index 2b8ef6610c..514ed02c86 100644 --- a/packages/server/src/api/routes/tests/datasource.spec.ts +++ b/packages/server/src/api/routes/tests/datasource.spec.ts @@ -20,11 +20,10 @@ import { import { DatabaseName, datasourceDescribe, - knexClient, } from "../../../integrations/tests/utils" import { tableForDatasource } from "../../../tests/utilities/structures" import nock from "nock" -import knex, { Knex } from "knex" +import { Knex } from "knex" describe("/datasources", () => { const config = setup.getConfig() From 7cbb4e361cb113f14d1353e9e5d23e21cb4a4c3c Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Thu, 5 Dec 2024 11:09:12 +0000 Subject: [PATCH 5/5] Uninstall old postgres before installing new one. --- .github/workflows/budibase_ci.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index cb20bb8c37..2151e1e342 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -203,6 +203,12 @@ jobs: - name: Set up PostgreSQL 16 if: matrix.datasource == 'postgres' run: | + sudo systemctl stop postgresql + sudo apt-get remove --purge -y postgresql* libpq-dev + sudo rm -rf /etc/postgresql /var/lib/postgresql + sudo apt-get autoremove -y + sudo apt-get autoclean + sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - sudo apt-get update