Merge pull request #10953 from Budibase/budi-7115-sql-schema-export
BUDI-7115 - Sql schema export
This commit is contained in:
commit
cdd15fd75f
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
let password = null
|
let password = null
|
||||||
const validation = createValidationStore()
|
const validation = createValidationStore()
|
||||||
validation.addValidatorType("password", "password", true)
|
validation.addValidatorType("password", "password", true, { minLength: 8 })
|
||||||
$: validation.observe("password", password)
|
$: validation.observe("password", password)
|
||||||
|
|
||||||
const Step = { CONFIG: "config", SET_PASSWORD: "set_password" }
|
const Step = { CONFIG: "config", SET_PASSWORD: "set_password" }
|
||||||
|
|
|
@ -21,7 +21,7 @@ export const createValidationStore = () => {
|
||||||
validator[propertyName] = propertyValidator
|
validator[propertyName] = propertyValidator
|
||||||
}
|
}
|
||||||
|
|
||||||
const addValidatorType = (propertyName, type, required) => {
|
const addValidatorType = (propertyName, type, required, options) => {
|
||||||
if (!type || !propertyName) {
|
if (!type || !propertyName) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -45,11 +45,8 @@ export const createValidationStore = () => {
|
||||||
propertyValidator = propertyValidator.required()
|
propertyValidator = propertyValidator.required()
|
||||||
}
|
}
|
||||||
|
|
||||||
// We want to do this after the possible required validation, to prioritise the required error
|
if (options?.minLength) {
|
||||||
switch (type) {
|
propertyValidator = propertyValidator.min(options.minLength)
|
||||||
case "password":
|
|
||||||
propertyValidator = propertyValidator.min(8)
|
|
||||||
break
|
|
||||||
}
|
}
|
||||||
|
|
||||||
validator[propertyName] = propertyValidator
|
validator[propertyName] = propertyValidator
|
||||||
|
|
|
@ -441,3 +441,18 @@ export async function query(ctx: UserCtx) {
|
||||||
ctx.throw(400, err)
|
ctx.throw(400, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function getExternalSchema(ctx: UserCtx) {
|
||||||
|
const { datasource } = ctx.request.body
|
||||||
|
const enrichedDatasource = await getAndMergeDatasource(datasource)
|
||||||
|
const connector = await getConnector(enrichedDatasource)
|
||||||
|
|
||||||
|
if (!connector.getExternalSchema) {
|
||||||
|
ctx.throw(400, "Datasource does not support exporting external schema")
|
||||||
|
}
|
||||||
|
const response = await connector.getExternalSchema()
|
||||||
|
|
||||||
|
ctx.body = {
|
||||||
|
schema: response,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -66,5 +66,10 @@ router
|
||||||
authorized(permissions.BUILDER),
|
authorized(permissions.BUILDER),
|
||||||
datasourceController.destroy
|
datasourceController.destroy
|
||||||
)
|
)
|
||||||
|
.get(
|
||||||
|
"/api/datasources/:datasourceId/schema/external",
|
||||||
|
authorized(permissions.BUILDER),
|
||||||
|
datasourceController.getExternalSchema
|
||||||
|
)
|
||||||
|
|
||||||
export default router
|
export default router
|
||||||
|
|
|
@ -43,6 +43,7 @@ const SCHEMA: Integration = {
|
||||||
features: {
|
features: {
|
||||||
[DatasourceFeature.CONNECTION_CHECKING]: true,
|
[DatasourceFeature.CONNECTION_CHECKING]: true,
|
||||||
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
|
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
|
||||||
|
[DatasourceFeature.EXPORT_SCHEMA]: true,
|
||||||
},
|
},
|
||||||
datasource: {
|
datasource: {
|
||||||
user: {
|
user: {
|
||||||
|
@ -336,6 +337,81 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
||||||
result.recordset ? result.recordset : [{ [operation]: true }]
|
result.recordset ? result.recordset : [{ [operation]: true }]
|
||||||
return this.queryWithReturning(json, queryFn, processFn)
|
return this.queryWithReturning(json, queryFn, processFn)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async getExternalSchema() {
|
||||||
|
// Query to retrieve table schema
|
||||||
|
const query = `
|
||||||
|
SELECT
|
||||||
|
t.name AS TableName,
|
||||||
|
c.name AS ColumnName,
|
||||||
|
ty.name AS DataType,
|
||||||
|
c.max_length AS MaxLength,
|
||||||
|
c.is_nullable AS IsNullable,
|
||||||
|
c.is_identity AS IsIdentity
|
||||||
|
FROM
|
||||||
|
sys.tables t
|
||||||
|
INNER JOIN sys.columns c ON t.object_id = c.object_id
|
||||||
|
INNER JOIN sys.types ty ON c.system_type_id = ty.system_type_id
|
||||||
|
WHERE
|
||||||
|
t.is_ms_shipped = 0
|
||||||
|
ORDER BY
|
||||||
|
t.name, c.column_id
|
||||||
|
`
|
||||||
|
|
||||||
|
await this.connect()
|
||||||
|
|
||||||
|
const result = await this.internalQuery({
|
||||||
|
sql: query,
|
||||||
|
})
|
||||||
|
|
||||||
|
const scriptParts = []
|
||||||
|
const tables: any = {}
|
||||||
|
for (const row of result.recordset) {
|
||||||
|
const {
|
||||||
|
TableName,
|
||||||
|
ColumnName,
|
||||||
|
DataType,
|
||||||
|
MaxLength,
|
||||||
|
IsNullable,
|
||||||
|
IsIdentity,
|
||||||
|
} = row
|
||||||
|
|
||||||
|
if (!tables[TableName]) {
|
||||||
|
tables[TableName] = {
|
||||||
|
columns: [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const columnDefinition = `${ColumnName} ${DataType}${
|
||||||
|
MaxLength ? `(${MaxLength})` : ""
|
||||||
|
}${IsNullable ? " NULL" : " NOT NULL"}`
|
||||||
|
|
||||||
|
tables[TableName].columns.push(columnDefinition)
|
||||||
|
|
||||||
|
if (IsIdentity) {
|
||||||
|
tables[TableName].identityColumn = ColumnName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate SQL statements for table creation
|
||||||
|
for (const tableName in tables) {
|
||||||
|
const { columns, identityColumn } = tables[tableName]
|
||||||
|
|
||||||
|
let createTableStatement = `CREATE TABLE [${tableName}] (\n`
|
||||||
|
createTableStatement += columns.join(",\n")
|
||||||
|
|
||||||
|
if (identityColumn) {
|
||||||
|
createTableStatement += `,\n CONSTRAINT [PK_${tableName}] PRIMARY KEY (${identityColumn})`
|
||||||
|
}
|
||||||
|
|
||||||
|
createTableStatement += "\n);"
|
||||||
|
|
||||||
|
scriptParts.push(createTableStatement)
|
||||||
|
}
|
||||||
|
|
||||||
|
const schema = scriptParts.join("\n")
|
||||||
|
return schema
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
|
|
|
@ -39,6 +39,7 @@ const SCHEMA: Integration = {
|
||||||
features: {
|
features: {
|
||||||
[DatasourceFeature.CONNECTION_CHECKING]: true,
|
[DatasourceFeature.CONNECTION_CHECKING]: true,
|
||||||
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
|
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
|
||||||
|
[DatasourceFeature.EXPORT_SCHEMA]: true,
|
||||||
},
|
},
|
||||||
datasource: {
|
datasource: {
|
||||||
host: {
|
host: {
|
||||||
|
@ -324,6 +325,36 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
||||||
await this.disconnect()
|
await this.disconnect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async getExternalSchema() {
|
||||||
|
try {
|
||||||
|
const [databaseResult] = await this.internalQuery({
|
||||||
|
sql: `SHOW CREATE DATABASE ${this.config.database}`,
|
||||||
|
})
|
||||||
|
let dumpContent = [databaseResult["Create Database"]]
|
||||||
|
|
||||||
|
const tablesResult = await this.internalQuery({
|
||||||
|
sql: `SHOW TABLES`,
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const row of tablesResult) {
|
||||||
|
const tableName = row[`Tables_in_${this.config.database}`]
|
||||||
|
|
||||||
|
const createTableResults = await this.internalQuery({
|
||||||
|
sql: `SHOW CREATE TABLE \`${tableName}\``,
|
||||||
|
})
|
||||||
|
|
||||||
|
const createTableStatement = createTableResults[0]["Create Table"]
|
||||||
|
|
||||||
|
dumpContent.push(createTableStatement)
|
||||||
|
}
|
||||||
|
|
||||||
|
const schema = dumpContent.join("\n")
|
||||||
|
return schema
|
||||||
|
} finally {
|
||||||
|
this.disconnect()
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import fs from "fs"
|
||||||
import {
|
import {
|
||||||
Integration,
|
Integration,
|
||||||
DatasourceFieldType,
|
DatasourceFieldType,
|
||||||
|
@ -21,6 +22,8 @@ import { PostgresColumn } from "./base/types"
|
||||||
import { escapeDangerousCharacters } from "../utilities"
|
import { escapeDangerousCharacters } from "../utilities"
|
||||||
|
|
||||||
import { Client, ClientConfig, types } from "pg"
|
import { Client, ClientConfig, types } from "pg"
|
||||||
|
import { exec } from "child_process"
|
||||||
|
import { storeTempFile } from "../utilities/fileSystem"
|
||||||
|
|
||||||
// Return "date" and "timestamp" types as plain strings.
|
// Return "date" and "timestamp" types as plain strings.
|
||||||
// This lets us reference the original stored timezone.
|
// This lets us reference the original stored timezone.
|
||||||
|
@ -57,6 +60,7 @@ const SCHEMA: Integration = {
|
||||||
features: {
|
features: {
|
||||||
[DatasourceFeature.CONNECTION_CHECKING]: true,
|
[DatasourceFeature.CONNECTION_CHECKING]: true,
|
||||||
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
|
[DatasourceFeature.FETCH_TABLE_NAMES]: true,
|
||||||
|
[DatasourceFeature.EXPORT_SCHEMA]: true,
|
||||||
},
|
},
|
||||||
datasource: {
|
datasource: {
|
||||||
host: {
|
host: {
|
||||||
|
@ -178,6 +182,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
||||||
const response: ConnectionInfo = {
|
const response: ConnectionInfo = {
|
||||||
connected: false,
|
connected: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await this.openConnection()
|
await this.openConnection()
|
||||||
response.connected = true
|
response.connected = true
|
||||||
|
@ -381,6 +386,59 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
|
||||||
return response.rows.length ? response.rows : [{ [operation]: true }]
|
return response.rows.length ? response.rows : [{ [operation]: true }]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async getExternalSchema() {
|
||||||
|
const dumpCommandParts = [
|
||||||
|
`user=${this.config.user}`,
|
||||||
|
`host=${this.config.host}`,
|
||||||
|
`port=${this.config.port}`,
|
||||||
|
`dbname=${this.config.database}`,
|
||||||
|
]
|
||||||
|
|
||||||
|
if (this.config.ssl) {
|
||||||
|
dumpCommandParts.push("sslmode=verify-ca")
|
||||||
|
if (this.config.ca) {
|
||||||
|
const caFilePath = storeTempFile(this.config.ca)
|
||||||
|
fs.chmodSync(caFilePath, "0600")
|
||||||
|
dumpCommandParts.push(`sslrootcert=${caFilePath}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.config.clientCert) {
|
||||||
|
const clientCertFilePath = storeTempFile(this.config.clientCert)
|
||||||
|
fs.chmodSync(clientCertFilePath, "0600")
|
||||||
|
dumpCommandParts.push(`sslcert=${clientCertFilePath}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.config.clientKey) {
|
||||||
|
const clientKeyFilePath = storeTempFile(this.config.clientKey)
|
||||||
|
fs.chmodSync(clientKeyFilePath, "0600")
|
||||||
|
dumpCommandParts.push(`sslkey=${clientKeyFilePath}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const dumpCommand = `PGPASSWORD="${
|
||||||
|
this.config.password
|
||||||
|
}" pg_dump --schema-only "${dumpCommandParts.join(" ")}"`
|
||||||
|
|
||||||
|
return new Promise<string>((res, rej) => {
|
||||||
|
exec(dumpCommand, (error, stdout, stderr) => {
|
||||||
|
if (error) {
|
||||||
|
console.error(`Error generating dump: ${error.message}`)
|
||||||
|
rej(error.message)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stderr) {
|
||||||
|
console.error(`pg_dump error: ${stderr}`)
|
||||||
|
rej(stderr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
res(stdout)
|
||||||
|
console.log("SQL dump generated successfully!")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
|
|
|
@ -81,7 +81,9 @@ export const streamFile = (path: string) => {
|
||||||
* @param {string} fileContents contents which will be written to a temp file.
|
* @param {string} fileContents contents which will be written to a temp file.
|
||||||
* @return {string} the path to the temp file.
|
* @return {string} the path to the temp file.
|
||||||
*/
|
*/
|
||||||
export const storeTempFile = (fileContents: any) => {
|
export const storeTempFile = (
|
||||||
|
fileContents: string | NodeJS.ArrayBufferView
|
||||||
|
) => {
|
||||||
const path = join(budibaseTempDir(), uuid())
|
const path = join(budibaseTempDir(), uuid())
|
||||||
fs.writeFileSync(path, fileContents)
|
fs.writeFileSync(path, fileContents)
|
||||||
return path
|
return path
|
||||||
|
|
|
@ -76,6 +76,7 @@ export enum FilterType {
|
||||||
export enum DatasourceFeature {
|
export enum DatasourceFeature {
|
||||||
CONNECTION_CHECKING = "connection",
|
CONNECTION_CHECKING = "connection",
|
||||||
FETCH_TABLE_NAMES = "fetch_table_names",
|
FETCH_TABLE_NAMES = "fetch_table_names",
|
||||||
|
EXPORT_SCHEMA = "export_schema",
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface StepDefinition {
|
export interface StepDefinition {
|
||||||
|
@ -140,6 +141,7 @@ export interface IntegrationBase {
|
||||||
update?(query: any): Promise<any[] | any>
|
update?(query: any): Promise<any[] | any>
|
||||||
delete?(query: any): Promise<any[] | any>
|
delete?(query: any): Promise<any[] | any>
|
||||||
testConnection?(): Promise<ConnectionInfo>
|
testConnection?(): Promise<ConnectionInfo>
|
||||||
|
getExternalSchema?(): Promise<string>
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface DatasourcePlus extends IntegrationBase {
|
export interface DatasourcePlus extends IntegrationBase {
|
||||||
|
|
|
@ -0,0 +1,112 @@
|
||||||
|
import { GenericContainer, Wait } from "testcontainers"
|
||||||
|
import { Duration, TemporalUnit } from "node-duration"
|
||||||
|
import mssql from "../../../../packages/server/src/integrations/microsoftSqlServer"
|
||||||
|
|
||||||
|
jest.unmock("mssql")
|
||||||
|
|
||||||
|
describe("getExternalSchema", () => {
|
||||||
|
describe("postgres", () => {
|
||||||
|
let config: any
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const password = "Str0Ng_p@ssW0rd!"
|
||||||
|
const container = await new GenericContainer(
|
||||||
|
"mcr.microsoft.com/mssql/server"
|
||||||
|
)
|
||||||
|
.withExposedPorts(1433)
|
||||||
|
.withEnv("ACCEPT_EULA", "Y")
|
||||||
|
.withEnv("MSSQL_SA_PASSWORD", password)
|
||||||
|
.withEnv("MSSQL_PID", "Developer")
|
||||||
|
.withWaitStrategy(Wait.forHealthCheck())
|
||||||
|
.withHealthCheck({
|
||||||
|
test: `/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "${password}" -Q "SELECT 1" -b -o /dev/null`,
|
||||||
|
interval: new Duration(1000, TemporalUnit.MILLISECONDS),
|
||||||
|
timeout: new Duration(3, TemporalUnit.SECONDS),
|
||||||
|
retries: 20,
|
||||||
|
startPeriod: new Duration(100, TemporalUnit.MILLISECONDS),
|
||||||
|
})
|
||||||
|
.start()
|
||||||
|
|
||||||
|
const host = container.getContainerIpAddress()
|
||||||
|
const port = container.getMappedPort(1433)
|
||||||
|
config = {
|
||||||
|
user: "sa",
|
||||||
|
password,
|
||||||
|
server: host,
|
||||||
|
port: port,
|
||||||
|
database: "master",
|
||||||
|
schema: "dbo",
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can export an empty database", async () => {
|
||||||
|
const integration = new mssql.integration(config)
|
||||||
|
const result = await integration.getExternalSchema()
|
||||||
|
expect(result).toMatchInlineSnapshot(`""`)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can export a database with tables", async () => {
|
||||||
|
const integration = new mssql.integration(config)
|
||||||
|
|
||||||
|
await integration.connect()
|
||||||
|
await integration.internalQuery({
|
||||||
|
sql: `
|
||||||
|
CREATE TABLE users (
|
||||||
|
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
name VARCHAR(100) NOT NULL,
|
||||||
|
role VARCHAR(15) NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE products (
|
||||||
|
id INT IDENTITY(1,1) PRIMARY KEY,
|
||||||
|
name VARCHAR(100) NOT NULL,
|
||||||
|
price DECIMAL(10, 2) NOT NULL
|
||||||
|
);
|
||||||
|
`,
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await integration.getExternalSchema()
|
||||||
|
expect(result).toMatchInlineSnapshot(`
|
||||||
|
"CREATE TABLE [products] (
|
||||||
|
id int(4) NOT NULL,
|
||||||
|
name varchar(100) NOT NULL,
|
||||||
|
price decimal(9) NOT NULL,
|
||||||
|
CONSTRAINT [PK_products] PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
CREATE TABLE [users] (
|
||||||
|
id int(4) NOT NULL,
|
||||||
|
name varchar(100) NOT NULL,
|
||||||
|
role varchar(15) NOT NULL,
|
||||||
|
CONSTRAINT [PK_users] PRIMARY KEY (id)
|
||||||
|
);"
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("does not export a data", async () => {
|
||||||
|
const integration = new mssql.integration(config)
|
||||||
|
|
||||||
|
await integration.connect()
|
||||||
|
await integration.internalQuery({
|
||||||
|
sql: `INSERT INTO [users] ([name], [role]) VALUES ('John Doe', 'Administrator');
|
||||||
|
INSERT INTO [products] ([name], [price]) VALUES ('Book', 7.68);
|
||||||
|
`,
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await integration.getExternalSchema()
|
||||||
|
expect(result).toMatchInlineSnapshot(`
|
||||||
|
"CREATE TABLE [products] (
|
||||||
|
id int(4) NOT NULL,
|
||||||
|
name varchar(100) NOT NULL,
|
||||||
|
price decimal(9) NOT NULL,
|
||||||
|
CONSTRAINT [PK_products] PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
CREATE TABLE [users] (
|
||||||
|
id int(4) NOT NULL,
|
||||||
|
name varchar(100) NOT NULL,
|
||||||
|
role varchar(15) NOT NULL,
|
||||||
|
CONSTRAINT [PK_users] PRIMARY KEY (id)
|
||||||
|
);"
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,108 @@
|
||||||
|
import { GenericContainer } from "testcontainers"
|
||||||
|
import mysql from "../../../../packages/server/src/integrations/mysql"
|
||||||
|
|
||||||
|
jest.unmock("mysql2/promise")
|
||||||
|
|
||||||
|
describe("datasource validators", () => {
|
||||||
|
describe("mysql", () => {
|
||||||
|
let config: any
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const container = await new GenericContainer("mysql")
|
||||||
|
.withExposedPorts(3306)
|
||||||
|
.withEnv("MYSQL_ROOT_PASSWORD", "admin")
|
||||||
|
.withEnv("MYSQL_DATABASE", "db")
|
||||||
|
.withEnv("MYSQL_USER", "user")
|
||||||
|
.withEnv("MYSQL_PASSWORD", "password")
|
||||||
|
.start()
|
||||||
|
|
||||||
|
const host = container.getContainerIpAddress()
|
||||||
|
const port = container.getMappedPort(3306)
|
||||||
|
config = {
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
user: "user",
|
||||||
|
database: "db",
|
||||||
|
password: "password",
|
||||||
|
rejectUnauthorized: true,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can export an empty database", async () => {
|
||||||
|
const integration = new mysql.integration(config)
|
||||||
|
const result = await integration.getExternalSchema()
|
||||||
|
expect(result).toMatchInlineSnapshot(
|
||||||
|
`"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */"`
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can export a database with tables", async () => {
|
||||||
|
const integration = new mysql.integration(config)
|
||||||
|
|
||||||
|
await integration.internalQuery({
|
||||||
|
sql: `
|
||||||
|
CREATE TABLE users (
|
||||||
|
id INT AUTO_INCREMENT,
|
||||||
|
name VARCHAR(100) NOT NULL,
|
||||||
|
role VARCHAR(15) NOT NULL,
|
||||||
|
PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
CREATE TABLE products (
|
||||||
|
id INT AUTO_INCREMENT,
|
||||||
|
name VARCHAR(100) NOT NULL,
|
||||||
|
price DECIMAL,
|
||||||
|
PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
`,
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await integration.getExternalSchema()
|
||||||
|
expect(result).toMatchInlineSnapshot(`
|
||||||
|
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
|
||||||
|
CREATE TABLE \`products\` (
|
||||||
|
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||||
|
\`name\` varchar(100) NOT NULL,
|
||||||
|
\`price\` decimal(10,0) DEFAULT NULL,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
|
||||||
|
CREATE TABLE \`users\` (
|
||||||
|
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||||
|
\`name\` varchar(100) NOT NULL,
|
||||||
|
\`role\` varchar(15) NOT NULL,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("does not export a data", async () => {
|
||||||
|
const integration = new mysql.integration(config)
|
||||||
|
|
||||||
|
await integration.internalQuery({
|
||||||
|
sql: `INSERT INTO users (name, role) VALUES ('John Doe', 'Administrator');`,
|
||||||
|
})
|
||||||
|
|
||||||
|
await integration.internalQuery({
|
||||||
|
sql: `INSERT INTO products (name, price) VALUES ('Book', 7.68);`,
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await integration.getExternalSchema()
|
||||||
|
expect(result).toMatchInlineSnapshot(`
|
||||||
|
"CREATE DATABASE \`db\` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci */ /*!80016 DEFAULT ENCRYPTION='N' */
|
||||||
|
CREATE TABLE \`products\` (
|
||||||
|
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||||
|
\`name\` varchar(100) NOT NULL,
|
||||||
|
\`price\` decimal(10,0) DEFAULT NULL,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci
|
||||||
|
CREATE TABLE \`users\` (
|
||||||
|
\`id\` int NOT NULL AUTO_INCREMENT,
|
||||||
|
\`name\` varchar(100) NOT NULL,
|
||||||
|
\`role\` varchar(15) NOT NULL,
|
||||||
|
PRIMARY KEY (\`id\`)
|
||||||
|
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci"
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,354 @@
|
||||||
|
import { GenericContainer } from "testcontainers"
|
||||||
|
import postgres from "../../../../packages/server/src/integrations/postgres"
|
||||||
|
|
||||||
|
jest.unmock("pg")
|
||||||
|
|
||||||
|
describe("getExternalSchema", () => {
|
||||||
|
describe("postgres", () => {
|
||||||
|
let host: string
|
||||||
|
let port: number
|
||||||
|
let config: any
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const container = await new GenericContainer("postgres")
|
||||||
|
.withExposedPorts(5432)
|
||||||
|
.withEnv("POSTGRES_PASSWORD", "password")
|
||||||
|
.start()
|
||||||
|
|
||||||
|
host = container.getContainerIpAddress()
|
||||||
|
port = container.getMappedPort(5432)
|
||||||
|
|
||||||
|
config = {
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
database: "postgres",
|
||||||
|
user: "postgres",
|
||||||
|
password: "password",
|
||||||
|
schema: "public",
|
||||||
|
ssl: false,
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can export an empty database", async () => {
|
||||||
|
const integration = new postgres.integration(config)
|
||||||
|
const result = await integration.getExternalSchema()
|
||||||
|
expect(result).toMatchInlineSnapshot(`
|
||||||
|
"--
|
||||||
|
-- PostgreSQL database dump
|
||||||
|
--
|
||||||
|
|
||||||
|
-- Dumped from database version 15.3 (Debian 15.3-1.pgdg120+1)
|
||||||
|
-- Dumped by pg_dump version 15.3
|
||||||
|
|
||||||
|
SET statement_timeout = 0;
|
||||||
|
SET lock_timeout = 0;
|
||||||
|
SET idle_in_transaction_session_timeout = 0;
|
||||||
|
SET client_encoding = 'UTF8';
|
||||||
|
SET standard_conforming_strings = on;
|
||||||
|
SELECT pg_catalog.set_config('search_path', '', false);
|
||||||
|
SET check_function_bodies = false;
|
||||||
|
SET xmloption = content;
|
||||||
|
SET client_min_messages = warning;
|
||||||
|
SET row_security = off;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- PostgreSQL database dump complete
|
||||||
|
--
|
||||||
|
|
||||||
|
"
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can export a database with tables", async () => {
|
||||||
|
const integration = new postgres.integration(config)
|
||||||
|
|
||||||
|
await integration.internalQuery({
|
||||||
|
sql: `
|
||||||
|
CREATE TABLE "users" (
|
||||||
|
"id" SERIAL,
|
||||||
|
"name" VARCHAR(100) NOT NULL,
|
||||||
|
"role" VARCHAR(15) NOT NULL,
|
||||||
|
PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
CREATE TABLE "products" (
|
||||||
|
"id" SERIAL,
|
||||||
|
"name" VARCHAR(100) NOT NULL,
|
||||||
|
"price" DECIMAL NOT NULL,
|
||||||
|
PRIMARY KEY ("id")
|
||||||
|
);`,
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await integration.getExternalSchema()
|
||||||
|
expect(result).toMatchInlineSnapshot(`
|
||||||
|
"--
|
||||||
|
-- PostgreSQL database dump
|
||||||
|
--
|
||||||
|
|
||||||
|
-- Dumped from database version 15.3 (Debian 15.3-1.pgdg120+1)
|
||||||
|
-- Dumped by pg_dump version 15.3
|
||||||
|
|
||||||
|
SET statement_timeout = 0;
|
||||||
|
SET lock_timeout = 0;
|
||||||
|
SET idle_in_transaction_session_timeout = 0;
|
||||||
|
SET client_encoding = 'UTF8';
|
||||||
|
SET standard_conforming_strings = on;
|
||||||
|
SELECT pg_catalog.set_config('search_path', '', false);
|
||||||
|
SET check_function_bodies = false;
|
||||||
|
SET xmloption = content;
|
||||||
|
SET client_min_messages = warning;
|
||||||
|
SET row_security = off;
|
||||||
|
|
||||||
|
SET default_tablespace = '';
|
||||||
|
|
||||||
|
SET default_table_access_method = heap;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE public.products (
|
||||||
|
id integer NOT NULL,
|
||||||
|
name character varying(100) NOT NULL,
|
||||||
|
price numeric NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE public.products OWNER TO postgres;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE SEQUENCE public.products_id_seq
|
||||||
|
AS integer
|
||||||
|
START WITH 1
|
||||||
|
INCREMENT BY 1
|
||||||
|
NO MINVALUE
|
||||||
|
NO MAXVALUE
|
||||||
|
CACHE 1;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE public.products_id_seq OWNER TO postgres;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE public.users (
|
||||||
|
id integer NOT NULL,
|
||||||
|
name character varying(100) NOT NULL,
|
||||||
|
role character varying(15) NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE public.users OWNER TO postgres;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE SEQUENCE public.users_id_seq
|
||||||
|
AS integer
|
||||||
|
START WITH 1
|
||||||
|
INCREMENT BY 1
|
||||||
|
NO MINVALUE
|
||||||
|
NO MAXVALUE
|
||||||
|
CACHE 1;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE public.users_id_seq OWNER TO postgres;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.products
|
||||||
|
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.users
|
||||||
|
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- PostgreSQL database dump complete
|
||||||
|
--
|
||||||
|
|
||||||
|
"
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("does not export a data", async () => {
|
||||||
|
const integration = new postgres.integration(config)
|
||||||
|
|
||||||
|
await integration.internalQuery({
|
||||||
|
sql: `INSERT INTO "users" ("name", "role") VALUES ('John Doe', 'Administrator');
|
||||||
|
INSERT INTO "products" ("name", "price") VALUES ('Book', 7.68);`,
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await integration.getExternalSchema()
|
||||||
|
expect(result).toMatchInlineSnapshot(`
|
||||||
|
"--
|
||||||
|
-- PostgreSQL database dump
|
||||||
|
--
|
||||||
|
|
||||||
|
-- Dumped from database version 15.3 (Debian 15.3-1.pgdg120+1)
|
||||||
|
-- Dumped by pg_dump version 15.3
|
||||||
|
|
||||||
|
SET statement_timeout = 0;
|
||||||
|
SET lock_timeout = 0;
|
||||||
|
SET idle_in_transaction_session_timeout = 0;
|
||||||
|
SET client_encoding = 'UTF8';
|
||||||
|
SET standard_conforming_strings = on;
|
||||||
|
SELECT pg_catalog.set_config('search_path', '', false);
|
||||||
|
SET check_function_bodies = false;
|
||||||
|
SET xmloption = content;
|
||||||
|
SET client_min_messages = warning;
|
||||||
|
SET row_security = off;
|
||||||
|
|
||||||
|
SET default_tablespace = '';
|
||||||
|
|
||||||
|
SET default_table_access_method = heap;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: products; Type: TABLE; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE public.products (
|
||||||
|
id integer NOT NULL,
|
||||||
|
name character varying(100) NOT NULL,
|
||||||
|
price numeric NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE public.products OWNER TO postgres;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: products_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE SEQUENCE public.products_id_seq
|
||||||
|
AS integer
|
||||||
|
START WITH 1
|
||||||
|
INCREMENT BY 1
|
||||||
|
NO MINVALUE
|
||||||
|
NO MAXVALUE
|
||||||
|
CACHE 1;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE public.products_id_seq OWNER TO postgres;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: products_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER SEQUENCE public.products_id_seq OWNED BY public.products.id;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: users; Type: TABLE; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE TABLE public.users (
|
||||||
|
id integer NOT NULL,
|
||||||
|
name character varying(100) NOT NULL,
|
||||||
|
role character varying(15) NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE public.users OWNER TO postgres;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
CREATE SEQUENCE public.users_id_seq
|
||||||
|
AS integer
|
||||||
|
START WITH 1
|
||||||
|
INCREMENT BY 1
|
||||||
|
NO MINVALUE
|
||||||
|
NO MAXVALUE
|
||||||
|
CACHE 1;
|
||||||
|
|
||||||
|
|
||||||
|
ALTER TABLE public.users_id_seq OWNER TO postgres;
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id;
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: products id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.products ALTER COLUMN id SET DEFAULT nextval('public.products_id_seq'::regclass);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: products products_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.products
|
||||||
|
ADD CONSTRAINT products_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres
|
||||||
|
--
|
||||||
|
|
||||||
|
ALTER TABLE ONLY public.users
|
||||||
|
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
|
||||||
|
|
||||||
|
|
||||||
|
--
|
||||||
|
-- PostgreSQL database dump complete
|
||||||
|
--
|
||||||
|
|
||||||
|
"
|
||||||
|
`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
Loading…
Reference in New Issue