Converting tests over to using datasourceDescribe.

This commit is contained in:
Sam Rose 2024-11-05 16:24:00 +00:00
parent b2d80d7dcc
commit 6b9b0801a8
No known key found for this signature in database
7 changed files with 1293 additions and 1267 deletions

View File

@ -1,10 +1,5 @@
import * as setup from "../api/routes/tests/utilities"
import { Datasource, FieldType } from "@budibase/types" import { Datasource, FieldType } from "@budibase/types"
import { import { DatabaseName, datasourceDescribe } from "../integrations/tests/utils"
DatabaseName,
getDatasource,
knexClient,
} from "../integrations/tests/utils"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { Knex } from "knex" import { Knex } from "knex"
@ -15,31 +10,24 @@ function uniqueTableName(length?: number): string {
.substring(0, length || 10) .substring(0, length || 10)
} }
const config = setup.getConfig()! datasourceDescribe(
{
describe("mysql integrations", () => { name: "Integration compatibility with mysql search_path",
let datasource: Datasource only: [DatabaseName.MYSQL],
let client: Knex },
({ config, dsProvider }) => {
beforeAll(async () => {
await config.init()
const rawDatasource = await getDatasource(DatabaseName.MYSQL)
datasource = await config.api.datasource.create(rawDatasource)
client = await knexClient(rawDatasource)
})
afterAll(config.end)
describe("Integration compatibility with mysql search_path", () => {
let datasource: Datasource
let rawDatasource: Datasource let rawDatasource: Datasource
let datasource: Datasource
let client: Knex let client: Knex
const database = generator.guid() const database = generator.guid()
const database2 = generator.guid() const database2 = generator.guid()
beforeAll(async () => { beforeAll(async () => {
rawDatasource = await getDatasource(DatabaseName.MYSQL) const ds = await dsProvider
client = await knexClient(rawDatasource) rawDatasource = ds.rawDatasource!
datasource = ds.datasource!
client = ds.client!
await client.raw(`CREATE DATABASE \`${database}\`;`) await client.raw(`CREATE DATABASE \`${database}\`;`)
await client.raw(`CREATE DATABASE \`${database2}\`;`) await client.raw(`CREATE DATABASE \`${database2}\`;`)
@ -87,11 +75,25 @@ describe("mysql integrations", () => {
const schema = res.datasource.entities![repeated_table_name].schema const schema = res.datasource.entities![repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"]) expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
}) })
}) }
)
datasourceDescribe(
{
name: "POST /api/datasources/:datasourceId/schema",
only: [DatabaseName.MYSQL],
},
({ config, dsProvider }) => {
let datasource: Datasource
let client: Knex
beforeAll(async () => {
const ds = await dsProvider
datasource = ds.datasource!
client = ds.client!
})
describe("POST /api/datasources/:datasourceId/schema", () => {
let tableName: string let tableName: string
beforeEach(async () => { beforeEach(async () => {
tableName = uniqueTableName() tableName = uniqueTableName()
}) })
@ -122,5 +124,5 @@ describe("mysql integrations", () => {
expect(table).toBeDefined() expect(table).toBeDefined()
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
}) })
}) }
}) )

View File

@ -1,105 +1,230 @@
import * as setup from "../api/routes/tests/utilities"
import { Datasource, FieldType, Table } from "@budibase/types" import { Datasource, FieldType, Table } from "@budibase/types"
import _ from "lodash" import _ from "lodash"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { import {
DatabaseName, DatabaseName,
getDatasource, datasourceDescribe,
knexClient, knexClient,
} from "../integrations/tests/utils" } from "../integrations/tests/utils"
import { Knex } from "knex" import { Knex } from "knex"
const config = setup.getConfig()! datasourceDescribe(
{ name: "postgres integrations", only: [DatabaseName.POSTGRES] },
({ config, dsProvider }) => {
let datasource: Datasource
let client: Knex
describe("postgres integrations", () => { beforeAll(async () => {
let datasource: Datasource const ds = await dsProvider
let client: Knex datasource = ds.datasource!
client = ds.client!
beforeAll(async () => {
await config.init()
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
datasource = await config.api.datasource.create(rawDatasource)
client = await knexClient(rawDatasource)
})
afterAll(config.end)
describe("POST /api/datasources/:datasourceId/schema", () => {
let tableName: string
beforeEach(async () => {
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
}) })
afterEach(async () => { afterAll(config.end)
await client.schema.dropTableIfExists(tableName)
})
it("recognises when a table has no primary key", async () => { describe("POST /api/datasources/:datasourceId/schema", () => {
await client.schema.createTable(tableName, table => { let tableName: string
table.increments("id", { primaryKey: false })
beforeEach(async () => {
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
}) })
const response = await config.api.datasource.fetchSchema({ afterEach(async () => {
datasourceId: datasource._id!, await client.schema.dropTableIfExists(tableName)
}) })
expect(response.errors).toEqual({ it("recognises when a table has no primary key", async () => {
[tableName]: "Table must have a primary key.", await client.schema.createTable(tableName, table => {
}) table.increments("id", { primaryKey: false })
}) })
it("recognises when a table is using a reserved column name", async () => { const response = await config.api.datasource.fetchSchema({
await client.schema.createTable(tableName, table => { datasourceId: datasource._id!,
table.increments("_id").primary() })
})
const response = await config.api.datasource.fetchSchema({ expect(response.errors).toEqual({
datasourceId: datasource._id!, [tableName]: "Table must have a primary key.",
})
expect(response.errors).toEqual({
[tableName]: "Table contains invalid columns.",
})
})
it("recognises enum columns as options", async () => {
const tableName = `orders_${generator
.guid()
.replaceAll("-", "")
.substring(0, 6)}`
await client.schema.createTable(tableName, table => {
table.increments("order_id").primary()
table.string("customer_name").notNullable()
table.enum("status", ["pending", "processing", "shipped"], {
useNative: true,
enumName: `${tableName}_status`,
}) })
}) })
const response = await config.api.datasource.fetchSchema({ it("recognises when a table is using a reserved column name", async () => {
datasourceId: datasource._id!, await client.schema.createTable(tableName, table => {
table.increments("_id").primary()
})
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(response.errors).toEqual({
[tableName]: "Table contains invalid columns.",
})
}) })
const table = response.datasource.entities?.[tableName] it("recognises enum columns as options", async () => {
const tableName = `orders_${generator
.guid()
.replaceAll("-", "")
.substring(0, 6)}`
expect(table).toBeDefined() await client.schema.createTable(tableName, table => {
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS) table.increments("order_id").primary()
table.string("customer_name").notNullable()
table.enum("status", ["pending", "processing", "shipped"], {
useNative: true,
enumName: `${tableName}_status`,
})
})
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const table = response.datasource.entities?.[tableName]
expect(table).toBeDefined()
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
})
}) })
})
describe("Integration compatibility with postgres search_path", () => { describe("check custom column types", () => {
beforeAll(async () => {
await client.schema.createTable("binaryTable", table => {
table.binary("id").primary()
table.string("column1")
table.integer("column2")
})
})
it("should handle binary columns", async () => {
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(response.datasource.entities).toBeDefined()
const table = response.datasource.entities?.["binaryTable"]
expect(table).toBeDefined()
expect(table?.schema.id.externalType).toBe("bytea")
const row = await config.api.row.save(table?._id!, {
id: "1111",
column1: "hello",
column2: 222,
})
expect(row._id).toBeDefined()
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
expect(JSON.parse(decoded)[0]).toBe("1111")
})
})
describe("check fetching null/not null table", () => {
beforeAll(async () => {
await client.schema.createTable("nullableTable", table => {
table.increments("order_id").primary()
table.integer("order_number").notNullable()
})
})
it("should be able to change the table to allow nullable and refetch this", async () => {
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entities = response.datasource.entities
expect(entities).toBeDefined()
const nullableTable = entities?.["nullableTable"]
expect(nullableTable).toBeDefined()
expect(
nullableTable?.schema["order_number"].constraints?.presence
).toEqual(true)
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
// is aware of - therefore we can try to fetch and make sure BB updates correctly
await client.schema.alterTable("nullableTable", table => {
table.setNullable("order_number")
})
const responseAfter = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entitiesAfter = responseAfter.datasource.entities
expect(entitiesAfter).toBeDefined()
const nullableTableAfter = entitiesAfter?.["nullableTable"]
expect(nullableTableAfter).toBeDefined()
expect(
nullableTableAfter?.schema["order_number"].constraints?.presence
).toBeUndefined()
})
})
describe("money field 💰", () => {
const tableName = "moneytable"
let table: Table
beforeAll(async () => {
await client.raw(`
CREATE TABLE ${tableName} (
id serial PRIMARY KEY,
price money
)
`)
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
table = response.datasource.entities![tableName]
})
it("should be able to import a money field", async () => {
expect(table).toBeDefined()
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
})
it("should be able to search a money field", async () => {
await config.api.row.bulkImport(table._id!, {
rows: [{ price: 200 }, { price: 300 }],
})
const { rows } = await config.api.row.search(table._id!, {
query: {
equal: {
price: 200,
},
},
})
expect(rows).toHaveLength(1)
expect(rows[0].price).toBe("200.00")
})
it("should be able to update a money field", async () => {
let row = await config.api.row.save(table._id!, { price: 200 })
expect(row.price).toBe("200.00")
row = await config.api.row.save(table._id!, { ...row, price: 300 })
expect(row.price).toBe("300.00")
row = await config.api.row.save(table._id!, { ...row, price: "400.00" })
expect(row.price).toBe("400.00")
})
})
}
)
datasourceDescribe(
{
name: "Integration compatibility with postgres search_path",
only: [DatabaseName.POSTGRES],
},
({ config, dsProvider }) => {
let datasource: Datasource let datasource: Datasource
let client: Knex let client: Knex
let schema1: string let schema1: string
let schema2: string let schema2: string
beforeEach(async () => { beforeEach(async () => {
const ds = await dsProvider
datasource = ds.datasource!
const rawDatasource = ds.rawDatasource!
schema1 = generator.guid().replaceAll("-", "") schema1 = generator.guid().replaceAll("-", "")
schema2 = generator.guid().replaceAll("-", "") schema2 = generator.guid().replaceAll("-", "")
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
client = await knexClient(rawDatasource) client = await knexClient(rawDatasource)
await client.schema.createSchema(schema1) await client.schema.createSchema(schema1)
@ -161,122 +286,5 @@ describe("postgres integrations", () => {
const schema = response.datasource.entities?.[repeated_table_name].schema const schema = response.datasource.entities?.[repeated_table_name].schema
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"]) expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
}) })
}) }
)
describe("check custom column types", () => {
beforeAll(async () => {
await client.schema.createTable("binaryTable", table => {
table.binary("id").primary()
table.string("column1")
table.integer("column2")
})
})
it("should handle binary columns", async () => {
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
expect(response.datasource.entities).toBeDefined()
const table = response.datasource.entities?.["binaryTable"]
expect(table).toBeDefined()
expect(table?.schema.id.externalType).toBe("bytea")
const row = await config.api.row.save(table?._id!, {
id: "1111",
column1: "hello",
column2: 222,
})
expect(row._id).toBeDefined()
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
expect(JSON.parse(decoded)[0]).toBe("1111")
})
})
describe("check fetching null/not null table", () => {
beforeAll(async () => {
await client.schema.createTable("nullableTable", table => {
table.increments("order_id").primary()
table.integer("order_number").notNullable()
})
})
it("should be able to change the table to allow nullable and refetch this", async () => {
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entities = response.datasource.entities
expect(entities).toBeDefined()
const nullableTable = entities?.["nullableTable"]
expect(nullableTable).toBeDefined()
expect(
nullableTable?.schema["order_number"].constraints?.presence
).toEqual(true)
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
// is aware of - therefore we can try to fetch and make sure BB updates correctly
await client.schema.alterTable("nullableTable", table => {
table.setNullable("order_number")
})
const responseAfter = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entitiesAfter = responseAfter.datasource.entities
expect(entitiesAfter).toBeDefined()
const nullableTableAfter = entitiesAfter?.["nullableTable"]
expect(nullableTableAfter).toBeDefined()
expect(
nullableTableAfter?.schema["order_number"].constraints?.presence
).toBeUndefined()
})
})
describe("money field 💰", () => {
const tableName = "moneytable"
let table: Table
beforeAll(async () => {
await client.raw(`
CREATE TABLE ${tableName} (
id serial PRIMARY KEY,
price money
)
`)
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
table = response.datasource.entities![tableName]
})
it("should be able to import a money field", async () => {
expect(table).toBeDefined()
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
})
it("should be able to search a money field", async () => {
await config.api.row.bulkImport(table._id!, {
rows: [{ price: 200 }, { price: 300 }],
})
const { rows } = await config.api.row.search(table._id!, {
query: {
equal: {
price: 200,
},
},
})
expect(rows).toHaveLength(1)
expect(rows[0].price).toBe("200.00")
})
it("should be able to update a money field", async () => {
let row = await config.api.row.save(table._id!, { price: 200 })
expect(row.price).toBe("200.00")
row = await config.api.row.save(table._id!, { ...row, price: 300 })
expect(row.price).toBe("300.00")
row = await config.api.row.save(table._id!, { ...row, price: "400.00" })
expect(row.price).toBe("400.00")
})
})
})

View File

@ -7,226 +7,214 @@ import {
Table, Table,
} from "@budibase/types" } from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { search } from "../../../../../sdk/app/rows/search" import { search } from "../../../../../sdk/app/rows/search"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { import {
DatabaseName, DatabaseName,
getDatasource, datasourceDescribe,
} from "../../../../../integrations/tests/utils" } from "../../../../../integrations/tests/utils"
import { tableForDatasource } from "../../../../../tests/utilities/structures" import { tableForDatasource } from "../../../../../tests/utilities/structures"
// These test cases are only for things that cannot be tested through the API // These test cases are only for things that cannot be tested through the API
// (e.g. limiting searches to returning specific fields). If it's possible to // (e.g. limiting searches to returning specific fields). If it's possible to
// test through the API, it should be done there instead. // test through the API, it should be done there instead.
describe.each([ datasourceDescribe(
["internal", undefined], { name: "search sdk (%s)", exclude: [DatabaseName.MONGODB] },
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], ({ config, dsProvider, isInternal }) => {
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], let datasource: Datasource | undefined
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], let table: Table
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("search sdk (%s)", (name, dsProvider) => {
const isInternal = name === "internal"
const config = new TestConfiguration()
let datasource: Datasource | undefined beforeAll(async () => {
let table: Table const ds = await dsProvider
datasource = ds.datasource
beforeAll(async () => {
await config.init()
if (dsProvider) {
datasource = await config.createDatasource({
datasource: await dsProvider,
})
}
})
beforeEach(async () => {
const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata =
isInternal
? {
name: "id",
type: FieldType.AUTO,
subtype: AutoFieldSubType.AUTO_ID,
autocolumn: true,
}
: {
name: "id",
type: FieldType.NUMBER,
autocolumn: true,
}
table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: {
id: idFieldSchema,
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
},
},
})
)
for (let i = 0; i < 10; i++) {
await config.api.row.save(table._id!, {
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
})
}
})
afterAll(async () => {
config.end()
})
it("querying by fields will always return data attribute columns", async () => {
await config.doInContext(config.appId, async () => {
const { rows } = await search({
tableId: table._id!,
query: {},
fields: ["name", "age"],
})
expect(rows).toHaveLength(10)
for (const row of rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
}) })
})
!isInternal && beforeEach(async () => {
it("will decode _id in oneOf query", async () => { const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata =
await config.doInContext(config.appId, async () => { isInternal
const result = await search({ ? {
tableId: table._id!, name: "id",
query: { type: FieldType.AUTO,
oneOf: { subtype: AutoFieldSubType.AUTO_ID,
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"], autocolumn: true,
}
: {
name: "id",
type: FieldType.NUMBER,
autocolumn: true,
}
table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: {
id: idFieldSchema,
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
}, },
}, },
}) })
)
expect(result.rows).toHaveLength(3) for (let i = 0; i < 10; i++) {
expect(result.rows.map(row => row.id)).toEqual( await config.api.row.save(table._id!, {
expect.arrayContaining([1, 4, 8]) name: generator.first(),
) surname: generator.last(),
}) age: generator.age(),
}) address: generator.address(),
})
it("does not allow accessing hidden fields", async () => {
await config.doInContext(config.appId, async () => {
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("surname")
expect(keys).toContain("address")
expect(keys).not.toContain("age")
} }
}) })
})
it("does not allow accessing hidden fields even if requested", async () => { afterAll(async () => {
await config.doInContext(config.appId, async () => { config.end()
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
fields: ["name", "age"],
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).not.toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
}) })
})
it.each([ it("querying by fields will always return data attribute columns", async () => {
[["id", "name", "age"], 3],
[["name", "age"], 10],
])(
"cannot query by non search fields (fields: %s)",
async (queryFields, expectedRows) => {
await config.doInContext(config.appId, async () => { await config.doInContext(config.appId, async () => {
const { rows } = await search({ const { rows } = await search({
tableId: table._id!, tableId: table._id!,
query: { query: {},
$or: { fields: ["name", "age"],
conditions: [
{
$and: {
conditions: [
{ range: { id: { low: 2, high: 4 } } },
{ range: { id: { low: 3, high: 5 } } },
],
},
},
{ equal: { id: 7 } },
],
},
},
fields: queryFields,
}) })
expect(rows).toHaveLength(expectedRows) expect(rows).toHaveLength(10)
for (const row of rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
}) })
} })
)
}) !isInternal &&
it("will decode _id in oneOf query", async () => {
await config.doInContext(config.appId, async () => {
const result = await search({
tableId: table._id!,
query: {
oneOf: {
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
},
},
})
expect(result.rows).toHaveLength(3)
expect(result.rows.map(row => row.id)).toEqual(
expect.arrayContaining([1, 4, 8])
)
})
})
it("does not allow accessing hidden fields", async () => {
await config.doInContext(config.appId, async () => {
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("surname")
expect(keys).toContain("address")
expect(keys).not.toContain("age")
}
})
})
it("does not allow accessing hidden fields even if requested", async () => {
await config.doInContext(config.appId, async () => {
await config.api.table.save({
...table,
schema: {
...table.schema,
name: {
...table.schema.name,
visible: true,
},
age: {
...table.schema.age,
visible: false,
},
},
})
const result = await search({
tableId: table._id!,
query: {},
fields: ["name", "age"],
})
expect(result.rows).toHaveLength(10)
for (const row of result.rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).not.toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
})
})
it.each([
[["id", "name", "age"], 3],
[["name", "age"], 10],
])(
"cannot query by non search fields (fields: %s)",
async (queryFields, expectedRows) => {
await config.doInContext(config.appId, async () => {
const { rows } = await search({
tableId: table._id!,
query: {
$or: {
conditions: [
{
$and: {
conditions: [
{ range: { id: { low: 2, high: 4 } } },
{ range: { id: { low: 3, high: 5 } } },
],
},
},
{ equal: { id: 7 } },
],
},
},
fields: queryFields,
})
expect(rows).toHaveLength(expectedRows)
})
}
)
}
)

View File

@ -0,0 +1,9 @@
import { DATASOURCE_TEST_FILES } from "."
export default (paths: string[]) => {
return {
filtered: paths
.filter(path => DATASOURCE_TEST_FILES.includes(path))
.map(path => ({ test: path })),
}
}

View File

@ -0,0 +1,13 @@
export const DATASOURCE_TEST_FILES = [
"src/integration-test/mysql.spec.ts",
"src/integration-test/postgres.spec.ts",
"src/api/routes/tests/queries/generic-sql.spec.ts",
"src/sdk/app/rows/search/tests/search.spec.ts",
"src/api/routes/tests/queries/mongodb.spec.ts",
"src/api/routes/tests/search.spec.ts",
"src/api/routes/tests/datasource.spec.ts",
"src/api/routes/tests/viewV2.spec.ts",
"src/api/routes/tests/row.spec.ts",
"src/api/routes/tests/rowAction.spec.ts",
"src/api/routes/tests/table.spec.ts",
]

View File

@ -0,0 +1,9 @@
import { DATASOURCE_TEST_FILES } from "."
export default (paths: string[]) => {
return {
filtered: paths
.filter(path => !DATASOURCE_TEST_FILES.includes(path))
.map(path => ({ test: path })),
}
}