Converting tests over to using datasourceDescribe.
This commit is contained in:
parent
b2d80d7dcc
commit
6b9b0801a8
|
@ -3,32 +3,19 @@ import {
|
||||||
Operation,
|
Operation,
|
||||||
Query,
|
Query,
|
||||||
QueryPreview,
|
QueryPreview,
|
||||||
SourceName,
|
|
||||||
TableSourceType,
|
TableSourceType,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as setup from "../utilities"
|
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
datasourceDescribe,
|
||||||
knexClient,
|
|
||||||
} from "../../../../integrations/tests/utils"
|
} from "../../../../integrations/tests/utils"
|
||||||
import { Expectations } from "src/tests/utilities/api/base"
|
import { Expectations } from "src/tests/utilities/api/base"
|
||||||
import { events } from "@budibase/backend-core"
|
import { events } from "@budibase/backend-core"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
|
|
||||||
describe.each(
|
datasourceDescribe(
|
||||||
[
|
{ name: "queries (%s)", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] },
|
||||||
DatabaseName.POSTGRES,
|
({ config, dsProvider, isOracle, isMSSQL, isPostgres }) => {
|
||||||
DatabaseName.MYSQL,
|
|
||||||
DatabaseName.SQL_SERVER,
|
|
||||||
DatabaseName.MARIADB,
|
|
||||||
DatabaseName.ORACLE,
|
|
||||||
].map(name => [name, getDatasource(name)])
|
|
||||||
)("queries (%s)", (dbName, dsProvider) => {
|
|
||||||
const config = setup.getConfig()
|
|
||||||
const isOracle = dbName === DatabaseName.ORACLE
|
|
||||||
const isMsSQL = dbName === DatabaseName.SQL_SERVER
|
|
||||||
const isPostgres = dbName === DatabaseName.POSTGRES
|
|
||||||
const mainTableName = "test_table"
|
const mainTableName = "test_table"
|
||||||
|
|
||||||
let rawDatasource: Datasource
|
let rawDatasource: Datasource
|
||||||
|
@ -59,19 +46,17 @@ describe.each(
|
||||||
}
|
}
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
const ds = await dsProvider
|
||||||
|
rawDatasource = ds.rawDatasource!
|
||||||
|
datasource = ds.datasource!
|
||||||
|
client = ds.client!
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
rawDatasource = await dsProvider
|
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
|
||||||
|
|
||||||
// The Datasource API doesn ot return the password, but we need it later to
|
// The Datasource API doesn ot return the password, but we need it later to
|
||||||
// connect to the underlying database, so we fill it back in here.
|
// connect to the underlying database, so we fill it back in here.
|
||||||
datasource.config!.password = rawDatasource.config!.password
|
datasource.config!.password = rawDatasource.config!.password
|
||||||
|
|
||||||
client = await knexClient(rawDatasource)
|
|
||||||
|
|
||||||
await client.schema.dropTableIfExists(mainTableName)
|
await client.schema.dropTableIfExists(mainTableName)
|
||||||
await client.schema.createTable(mainTableName, table => {
|
await client.schema.createTable(mainTableName, table => {
|
||||||
table.increments("id").primary()
|
table.increments("id").primary()
|
||||||
|
@ -91,15 +76,6 @@ describe.each(
|
||||||
jest.clearAllMocks()
|
jest.clearAllMocks()
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(async () => {
|
|
||||||
const ds = await config.api.datasource.get(datasource._id!)
|
|
||||||
await config.api.datasource.delete(ds)
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(async () => {
|
|
||||||
setup.afterAll()
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("query admin", () => {
|
describe("query admin", () => {
|
||||||
describe("create", () => {
|
describe("create", () => {
|
||||||
it("should be able to create a query", async () => {
|
it("should be able to create a query", async () => {
|
||||||
|
@ -349,10 +325,10 @@ describe.each(
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should work with static variables", async () => {
|
it("should work with static variables", async () => {
|
||||||
await config.api.datasource.update({
|
const datasource = await config.api.datasource.create({
|
||||||
...datasource,
|
...rawDatasource,
|
||||||
config: {
|
config: {
|
||||||
...datasource.config,
|
...rawDatasource.config,
|
||||||
staticVariables: {
|
staticVariables: {
|
||||||
foo: "bar",
|
foo: "bar",
|
||||||
},
|
},
|
||||||
|
@ -390,9 +366,15 @@ describe.each(
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should work with dynamic variables", async () => {
|
it("should work with dynamic variables", async () => {
|
||||||
|
const datasource = await config.api.datasource.create(rawDatasource)
|
||||||
|
|
||||||
const basedOnQuery = await createQuery({
|
const basedOnQuery = await createQuery({
|
||||||
|
datasourceId: datasource._id!,
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).select("name").where({ id: 1 }).toString(),
|
sql: client(mainTableName)
|
||||||
|
.select("name")
|
||||||
|
.where({ id: 1 })
|
||||||
|
.toString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -439,9 +421,15 @@ describe.each(
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle the dynamic base query being deleted", async () => {
|
it("should handle the dynamic base query being deleted", async () => {
|
||||||
|
const datasource = await config.api.datasource.create(rawDatasource)
|
||||||
|
|
||||||
const basedOnQuery = await createQuery({
|
const basedOnQuery = await createQuery({
|
||||||
|
datasourceId: datasource._id!,
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).select("name").where({ id: 1 }).toString(),
|
sql: client(mainTableName)
|
||||||
|
.select("name")
|
||||||
|
.where({ id: 1 })
|
||||||
|
.toString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -482,11 +470,7 @@ describe.each(
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(preview.rows).toEqual([
|
expect(preview.rows).toEqual([{ [key]: isMSSQL ? "" : null }])
|
||||||
{
|
|
||||||
[key]: datasource.source === SourceName.SQL_SERVER ? "" : null,
|
|
||||||
},
|
|
||||||
])
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -495,7 +479,9 @@ describe.each(
|
||||||
it("should be able to insert with bindings", async () => {
|
it("should be able to insert with bindings", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).insert({ name: "{{ foo }}" }).toString(),
|
sql: client(mainTableName)
|
||||||
|
.insert({ name: "{{ foo }}" })
|
||||||
|
.toString(),
|
||||||
},
|
},
|
||||||
parameters: [
|
parameters: [
|
||||||
{
|
{
|
||||||
|
@ -518,7 +504,9 @@ describe.each(
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
|
|
||||||
const rows = await client(mainTableName).where({ name: "baz" }).select()
|
const rows = await client(mainTableName)
|
||||||
|
.where({ name: "baz" })
|
||||||
|
.select()
|
||||||
expect(rows).toHaveLength(1)
|
expect(rows).toHaveLength(1)
|
||||||
for (const row of rows) {
|
for (const row of rows) {
|
||||||
expect(row).toMatchObject({ name: "baz" })
|
expect(row).toMatchObject({ name: "baz" })
|
||||||
|
@ -528,7 +516,9 @@ describe.each(
|
||||||
it("should not allow handlebars as parameters", async () => {
|
it("should not allow handlebars as parameters", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).insert({ name: "{{ foo }}" }).toString(),
|
sql: client(mainTableName)
|
||||||
|
.insert({ name: "{{ foo }}" })
|
||||||
|
.toString(),
|
||||||
},
|
},
|
||||||
parameters: [
|
parameters: [
|
||||||
{
|
{
|
||||||
|
@ -678,7 +668,10 @@ describe.each(
|
||||||
it("should be able to transform a query", async () => {
|
it("should be able to transform a query", async () => {
|
||||||
const query = await createQuery({
|
const query = await createQuery({
|
||||||
fields: {
|
fields: {
|
||||||
sql: client(mainTableName).where({ id: 1 }).select("*").toString(),
|
sql: client(mainTableName)
|
||||||
|
.where({ id: 1 })
|
||||||
|
.select("*")
|
||||||
|
.toString(),
|
||||||
},
|
},
|
||||||
transformer: `
|
transformer: `
|
||||||
data[0].id = data[0].id + 1;
|
data[0].id = data[0].id + 1;
|
||||||
|
@ -832,6 +825,8 @@ describe.each(
|
||||||
|
|
||||||
describe("query through datasource", () => {
|
describe("query through datasource", () => {
|
||||||
it("should be able to query the datasource", async () => {
|
it("should be able to query the datasource", async () => {
|
||||||
|
const datasource = await config.api.datasource.create(rawDatasource)
|
||||||
|
|
||||||
const entityId = mainTableName
|
const entityId = mainTableName
|
||||||
await config.api.datasource.update({
|
await config.api.datasource.update({
|
||||||
...datasource,
|
...datasource,
|
||||||
|
@ -846,6 +841,7 @@ describe.each(
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const res = await config.api.datasource.query({
|
const res = await config.api.datasource.query({
|
||||||
endpoint: {
|
endpoint: {
|
||||||
datasourceId: datasource._id!,
|
datasourceId: datasource._id!,
|
||||||
|
@ -911,7 +907,7 @@ describe.each(
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
error = err.message
|
error = err.message
|
||||||
}
|
}
|
||||||
if (isMsSQL || isOracle) {
|
if (isMSSQL || isOracle) {
|
||||||
expect(error).toBeUndefined()
|
expect(error).toBeUndefined()
|
||||||
} else {
|
} else {
|
||||||
expect(error).toBeDefined()
|
expect(error).toBeDefined()
|
||||||
|
@ -960,4 +956,5 @@ describe.each(
|
||||||
expect(results.data.length).toEqual(1)
|
expect(results.data.length).toEqual(1)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -1,10 +1,5 @@
|
||||||
import * as setup from "../api/routes/tests/utilities"
|
|
||||||
import { Datasource, FieldType } from "@budibase/types"
|
import { Datasource, FieldType } from "@budibase/types"
|
||||||
import {
|
import { DatabaseName, datasourceDescribe } from "../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
getDatasource,
|
|
||||||
knexClient,
|
|
||||||
} from "../integrations/tests/utils"
|
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
|
|
||||||
|
@ -15,31 +10,24 @@ function uniqueTableName(length?: number): string {
|
||||||
.substring(0, length || 10)
|
.substring(0, length || 10)
|
||||||
}
|
}
|
||||||
|
|
||||||
const config = setup.getConfig()!
|
datasourceDescribe(
|
||||||
|
{
|
||||||
describe("mysql integrations", () => {
|
name: "Integration compatibility with mysql search_path",
|
||||||
let datasource: Datasource
|
only: [DatabaseName.MYSQL],
|
||||||
let client: Knex
|
},
|
||||||
|
({ config, dsProvider }) => {
|
||||||
beforeAll(async () => {
|
|
||||||
await config.init()
|
|
||||||
const rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
|
||||||
client = await knexClient(rawDatasource)
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(config.end)
|
|
||||||
|
|
||||||
describe("Integration compatibility with mysql search_path", () => {
|
|
||||||
let datasource: Datasource
|
|
||||||
let rawDatasource: Datasource
|
let rawDatasource: Datasource
|
||||||
|
let datasource: Datasource
|
||||||
let client: Knex
|
let client: Knex
|
||||||
|
|
||||||
const database = generator.guid()
|
const database = generator.guid()
|
||||||
const database2 = generator.guid()
|
const database2 = generator.guid()
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
const ds = await dsProvider
|
||||||
client = await knexClient(rawDatasource)
|
rawDatasource = ds.rawDatasource!
|
||||||
|
datasource = ds.datasource!
|
||||||
|
client = ds.client!
|
||||||
|
|
||||||
await client.raw(`CREATE DATABASE \`${database}\`;`)
|
await client.raw(`CREATE DATABASE \`${database}\`;`)
|
||||||
await client.raw(`CREATE DATABASE \`${database2}\`;`)
|
await client.raw(`CREATE DATABASE \`${database2}\`;`)
|
||||||
|
@ -87,11 +75,25 @@ describe("mysql integrations", () => {
|
||||||
const schema = res.datasource.entities![repeated_table_name].schema
|
const schema = res.datasource.entities![repeated_table_name].schema
|
||||||
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
datasourceDescribe(
|
||||||
|
{
|
||||||
|
name: "POST /api/datasources/:datasourceId/schema",
|
||||||
|
only: [DatabaseName.MYSQL],
|
||||||
|
},
|
||||||
|
({ config, dsProvider }) => {
|
||||||
|
let datasource: Datasource
|
||||||
|
let client: Knex
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const ds = await dsProvider
|
||||||
|
datasource = ds.datasource!
|
||||||
|
client = ds.client!
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
|
||||||
let tableName: string
|
let tableName: string
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
tableName = uniqueTableName()
|
tableName = uniqueTableName()
|
||||||
})
|
})
|
||||||
|
@ -122,5 +124,5 @@ describe("mysql integrations", () => {
|
||||||
expect(table).toBeDefined()
|
expect(table).toBeDefined()
|
||||||
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
|
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
})
|
)
|
||||||
|
|
|
@ -1,25 +1,23 @@
|
||||||
import * as setup from "../api/routes/tests/utilities"
|
|
||||||
import { Datasource, FieldType, Table } from "@budibase/types"
|
import { Datasource, FieldType, Table } from "@budibase/types"
|
||||||
import _ from "lodash"
|
import _ from "lodash"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
datasourceDescribe,
|
||||||
knexClient,
|
knexClient,
|
||||||
} from "../integrations/tests/utils"
|
} from "../integrations/tests/utils"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
|
|
||||||
const config = setup.getConfig()!
|
datasourceDescribe(
|
||||||
|
{ name: "postgres integrations", only: [DatabaseName.POSTGRES] },
|
||||||
describe("postgres integrations", () => {
|
({ config, dsProvider }) => {
|
||||||
let datasource: Datasource
|
let datasource: Datasource
|
||||||
let client: Knex
|
let client: Knex
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
const ds = await dsProvider
|
||||||
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
datasource = ds.datasource!
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
client = ds.client!
|
||||||
client = await knexClient(rawDatasource)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(config.end)
|
afterAll(config.end)
|
||||||
|
@ -89,80 +87,6 @@ describe("postgres integrations", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("Integration compatibility with postgres search_path", () => {
|
|
||||||
let datasource: Datasource
|
|
||||||
let client: Knex
|
|
||||||
let schema1: string
|
|
||||||
let schema2: string
|
|
||||||
|
|
||||||
beforeEach(async () => {
|
|
||||||
schema1 = generator.guid().replaceAll("-", "")
|
|
||||||
schema2 = generator.guid().replaceAll("-", "")
|
|
||||||
|
|
||||||
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
|
||||||
client = await knexClient(rawDatasource)
|
|
||||||
|
|
||||||
await client.schema.createSchema(schema1)
|
|
||||||
await client.schema.createSchema(schema2)
|
|
||||||
|
|
||||||
rawDatasource.config!.schema = `${schema1}, ${schema2}`
|
|
||||||
|
|
||||||
client = await knexClient(rawDatasource)
|
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
|
||||||
})
|
|
||||||
|
|
||||||
afterEach(async () => {
|
|
||||||
await client.schema.dropSchema(schema1, true)
|
|
||||||
await client.schema.dropSchema(schema2, true)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("discovers tables from any schema in search path", async () => {
|
|
||||||
await client.schema.createTable(`${schema1}.table1`, table => {
|
|
||||||
table.increments("id1").primary()
|
|
||||||
})
|
|
||||||
|
|
||||||
await client.schema.createTable(`${schema2}.table2`, table => {
|
|
||||||
table.increments("id2").primary()
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await config.api.datasource.info(datasource)
|
|
||||||
expect(response.tableNames).toBeDefined()
|
|
||||||
expect(response.tableNames).toEqual(
|
|
||||||
expect.arrayContaining(["table1", "table2"])
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("does not mix columns from different tables", async () => {
|
|
||||||
const repeated_table_name = "table_same_name"
|
|
||||||
|
|
||||||
await client.schema.createTable(
|
|
||||||
`${schema1}.${repeated_table_name}`,
|
|
||||||
table => {
|
|
||||||
table.increments("id").primary()
|
|
||||||
table.string("val1")
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
await client.schema.createTable(
|
|
||||||
`${schema2}.${repeated_table_name}`,
|
|
||||||
table => {
|
|
||||||
table.increments("id2").primary()
|
|
||||||
table.string("val2")
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const response = await config.api.datasource.fetchSchema({
|
|
||||||
datasourceId: datasource._id!,
|
|
||||||
tablesFilter: [repeated_table_name],
|
|
||||||
})
|
|
||||||
expect(
|
|
||||||
response.datasource.entities?.[repeated_table_name].schema
|
|
||||||
).toBeDefined()
|
|
||||||
const schema = response.datasource.entities?.[repeated_table_name].schema
|
|
||||||
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("check custom column types", () => {
|
describe("check custom column types", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await client.schema.createTable("binaryTable", table => {
|
await client.schema.createTable("binaryTable", table => {
|
||||||
|
@ -279,4 +203,88 @@ describe("postgres integrations", () => {
|
||||||
expect(row.price).toBe("400.00")
|
expect(row.price).toBe("400.00")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
datasourceDescribe(
|
||||||
|
{
|
||||||
|
name: "Integration compatibility with postgres search_path",
|
||||||
|
only: [DatabaseName.POSTGRES],
|
||||||
|
},
|
||||||
|
({ config, dsProvider }) => {
|
||||||
|
let datasource: Datasource
|
||||||
|
let client: Knex
|
||||||
|
let schema1: string
|
||||||
|
let schema2: string
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
const ds = await dsProvider
|
||||||
|
datasource = ds.datasource!
|
||||||
|
const rawDatasource = ds.rawDatasource!
|
||||||
|
|
||||||
|
schema1 = generator.guid().replaceAll("-", "")
|
||||||
|
schema2 = generator.guid().replaceAll("-", "")
|
||||||
|
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
|
|
||||||
|
await client.schema.createSchema(schema1)
|
||||||
|
await client.schema.createSchema(schema2)
|
||||||
|
|
||||||
|
rawDatasource.config!.schema = `${schema1}, ${schema2}`
|
||||||
|
|
||||||
|
client = await knexClient(rawDatasource)
|
||||||
|
datasource = await config.api.datasource.create(rawDatasource)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await client.schema.dropSchema(schema1, true)
|
||||||
|
await client.schema.dropSchema(schema2, true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("discovers tables from any schema in search path", async () => {
|
||||||
|
await client.schema.createTable(`${schema1}.table1`, table => {
|
||||||
|
table.increments("id1").primary()
|
||||||
|
})
|
||||||
|
|
||||||
|
await client.schema.createTable(`${schema2}.table2`, table => {
|
||||||
|
table.increments("id2").primary()
|
||||||
|
})
|
||||||
|
|
||||||
|
const response = await config.api.datasource.info(datasource)
|
||||||
|
expect(response.tableNames).toBeDefined()
|
||||||
|
expect(response.tableNames).toEqual(
|
||||||
|
expect.arrayContaining(["table1", "table2"])
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("does not mix columns from different tables", async () => {
|
||||||
|
const repeated_table_name = "table_same_name"
|
||||||
|
|
||||||
|
await client.schema.createTable(
|
||||||
|
`${schema1}.${repeated_table_name}`,
|
||||||
|
table => {
|
||||||
|
table.increments("id").primary()
|
||||||
|
table.string("val1")
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
await client.schema.createTable(
|
||||||
|
`${schema2}.${repeated_table_name}`,
|
||||||
|
table => {
|
||||||
|
table.increments("id2").primary()
|
||||||
|
table.string("val2")
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const response = await config.api.datasource.fetchSchema({
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
tablesFilter: [repeated_table_name],
|
||||||
|
})
|
||||||
|
expect(
|
||||||
|
response.datasource.entities?.[repeated_table_name].schema
|
||||||
|
).toBeDefined()
|
||||||
|
const schema = response.datasource.entities?.[repeated_table_name].schema
|
||||||
|
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -7,40 +7,27 @@ import {
|
||||||
Table,
|
Table,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
|
||||||
import { search } from "../../../../../sdk/app/rows/search"
|
import { search } from "../../../../../sdk/app/rows/search"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
datasourceDescribe,
|
||||||
} from "../../../../../integrations/tests/utils"
|
} from "../../../../../integrations/tests/utils"
|
||||||
import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
||||||
|
|
||||||
// These test cases are only for things that cannot be tested through the API
|
// These test cases are only for things that cannot be tested through the API
|
||||||
// (e.g. limiting searches to returning specific fields). If it's possible to
|
// (e.g. limiting searches to returning specific fields). If it's possible to
|
||||||
// test through the API, it should be done there instead.
|
// test through the API, it should be done there instead.
|
||||||
describe.each([
|
datasourceDescribe(
|
||||||
["internal", undefined],
|
{ name: "search sdk (%s)", exclude: [DatabaseName.MONGODB] },
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
({ config, dsProvider, isInternal }) => {
|
||||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
|
||||||
])("search sdk (%s)", (name, dsProvider) => {
|
|
||||||
const isInternal = name === "internal"
|
|
||||||
const config = new TestConfiguration()
|
|
||||||
|
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
let table: Table
|
let table: Table
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
const ds = await dsProvider
|
||||||
|
datasource = ds.datasource
|
||||||
if (dsProvider) {
|
|
||||||
datasource = await config.createDatasource({
|
|
||||||
datasource: await dsProvider,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
|
@ -229,4 +216,5 @@ describe.each([
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
import { DATASOURCE_TEST_FILES } from "."
|
||||||
|
|
||||||
|
export default (paths: string[]) => {
|
||||||
|
return {
|
||||||
|
filtered: paths
|
||||||
|
.filter(path => DATASOURCE_TEST_FILES.includes(path))
|
||||||
|
.map(path => ({ test: path })),
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,13 @@
|
||||||
|
export const DATASOURCE_TEST_FILES = [
|
||||||
|
"src/integration-test/mysql.spec.ts",
|
||||||
|
"src/integration-test/postgres.spec.ts",
|
||||||
|
"src/api/routes/tests/queries/generic-sql.spec.ts",
|
||||||
|
"src/sdk/app/rows/search/tests/search.spec.ts",
|
||||||
|
"src/api/routes/tests/queries/mongodb.spec.ts",
|
||||||
|
"src/api/routes/tests/search.spec.ts",
|
||||||
|
"src/api/routes/tests/datasource.spec.ts",
|
||||||
|
"src/api/routes/tests/viewV2.spec.ts",
|
||||||
|
"src/api/routes/tests/row.spec.ts",
|
||||||
|
"src/api/routes/tests/rowAction.spec.ts",
|
||||||
|
"src/api/routes/tests/table.spec.ts",
|
||||||
|
]
|
|
@ -0,0 +1,9 @@
|
||||||
|
import { DATASOURCE_TEST_FILES } from "."
|
||||||
|
|
||||||
|
export default (paths: string[]) => {
|
||||||
|
return {
|
||||||
|
filtered: paths
|
||||||
|
.filter(path => !DATASOURCE_TEST_FILES.includes(path))
|
||||||
|
.map(path => ({ test: path })),
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue