Converting tests over to using datasourceDescribe.
This commit is contained in:
parent
b2d80d7dcc
commit
6b9b0801a8
File diff suppressed because it is too large
Load Diff
|
@ -1,10 +1,5 @@
|
|||
import * as setup from "../api/routes/tests/utilities"
|
||||
import { Datasource, FieldType } from "@budibase/types"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
knexClient,
|
||||
} from "../integrations/tests/utils"
|
||||
import { DatabaseName, datasourceDescribe } from "../integrations/tests/utils"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import { Knex } from "knex"
|
||||
|
||||
|
@ -15,31 +10,24 @@ function uniqueTableName(length?: number): string {
|
|||
.substring(0, length || 10)
|
||||
}
|
||||
|
||||
const config = setup.getConfig()!
|
||||
|
||||
describe("mysql integrations", () => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
const rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
client = await knexClient(rawDatasource)
|
||||
})
|
||||
|
||||
afterAll(config.end)
|
||||
|
||||
describe("Integration compatibility with mysql search_path", () => {
|
||||
let datasource: Datasource
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "Integration compatibility with mysql search_path",
|
||||
only: [DatabaseName.MYSQL],
|
||||
},
|
||||
({ config, dsProvider }) => {
|
||||
let rawDatasource: Datasource
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
const database = generator.guid()
|
||||
const database2 = generator.guid()
|
||||
|
||||
beforeAll(async () => {
|
||||
rawDatasource = await getDatasource(DatabaseName.MYSQL)
|
||||
client = await knexClient(rawDatasource)
|
||||
const ds = await dsProvider
|
||||
rawDatasource = ds.rawDatasource!
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
|
||||
await client.raw(`CREATE DATABASE \`${database}\`;`)
|
||||
await client.raw(`CREATE DATABASE \`${database2}\`;`)
|
||||
|
@ -87,11 +75,25 @@ describe("mysql integrations", () => {
|
|||
const schema = res.datasource.entities![repeated_table_name].schema
|
||||
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "POST /api/datasources/:datasourceId/schema",
|
||||
only: [DatabaseName.MYSQL],
|
||||
},
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
|
||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||
let tableName: string
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = uniqueTableName()
|
||||
})
|
||||
|
@ -122,5 +124,5 @@ describe("mysql integrations", () => {
|
|||
expect(table).toBeDefined()
|
||||
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,105 +1,230 @@
|
|||
import * as setup from "../api/routes/tests/utilities"
|
||||
import { Datasource, FieldType, Table } from "@budibase/types"
|
||||
import _ from "lodash"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
datasourceDescribe,
|
||||
knexClient,
|
||||
} from "../integrations/tests/utils"
|
||||
import { Knex } from "knex"
|
||||
|
||||
const config = setup.getConfig()!
|
||||
datasourceDescribe(
|
||||
{ name: "postgres integrations", only: [DatabaseName.POSTGRES] },
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
describe("postgres integrations", () => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||
datasource = await config.api.datasource.create(rawDatasource)
|
||||
client = await knexClient(rawDatasource)
|
||||
})
|
||||
|
||||
afterAll(config.end)
|
||||
|
||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||
let tableName: string
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider
|
||||
datasource = ds.datasource!
|
||||
client = ds.client!
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTableIfExists(tableName)
|
||||
})
|
||||
afterAll(config.end)
|
||||
|
||||
it("recognises when a table has no primary key", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id", { primaryKey: false })
|
||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||
let tableName: string
|
||||
|
||||
beforeEach(async () => {
|
||||
tableName = generator.guid().replaceAll("-", "").substring(0, 10)
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
afterEach(async () => {
|
||||
await client.schema.dropTableIfExists(tableName)
|
||||
})
|
||||
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table must have a primary key.",
|
||||
})
|
||||
})
|
||||
it("recognises when a table has no primary key", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("id", { primaryKey: false })
|
||||
})
|
||||
|
||||
it("recognises when a table is using a reserved column name", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("_id").primary()
|
||||
})
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table contains invalid columns.",
|
||||
})
|
||||
})
|
||||
|
||||
it("recognises enum columns as options", async () => {
|
||||
const tableName = `orders_${generator
|
||||
.guid()
|
||||
.replaceAll("-", "")
|
||||
.substring(0, 6)}`
|
||||
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("order_id").primary()
|
||||
table.string("customer_name").notNullable()
|
||||
table.enum("status", ["pending", "processing", "shipped"], {
|
||||
useNative: true,
|
||||
enumName: `${tableName}_status`,
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table must have a primary key.",
|
||||
})
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
it("recognises when a table is using a reserved column name", async () => {
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("_id").primary()
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
expect(response.errors).toEqual({
|
||||
[tableName]: "Table contains invalid columns.",
|
||||
})
|
||||
})
|
||||
|
||||
const table = response.datasource.entities?.[tableName]
|
||||
it("recognises enum columns as options", async () => {
|
||||
const tableName = `orders_${generator
|
||||
.guid()
|
||||
.replaceAll("-", "")
|
||||
.substring(0, 6)}`
|
||||
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
|
||||
await client.schema.createTable(tableName, table => {
|
||||
table.increments("order_id").primary()
|
||||
table.string("customer_name").notNullable()
|
||||
table.enum("status", ["pending", "processing", "shipped"], {
|
||||
useNative: true,
|
||||
enumName: `${tableName}_status`,
|
||||
})
|
||||
})
|
||||
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
|
||||
const table = response.datasource.entities?.[tableName]
|
||||
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Integration compatibility with postgres search_path", () => {
|
||||
describe("check custom column types", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("binaryTable", table => {
|
||||
table.binary("id").primary()
|
||||
table.string("column1")
|
||||
table.integer("column2")
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle binary columns", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
expect(response.datasource.entities).toBeDefined()
|
||||
const table = response.datasource.entities?.["binaryTable"]
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.id.externalType).toBe("bytea")
|
||||
const row = await config.api.row.save(table?._id!, {
|
||||
id: "1111",
|
||||
column1: "hello",
|
||||
column2: 222,
|
||||
})
|
||||
expect(row._id).toBeDefined()
|
||||
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
|
||||
expect(JSON.parse(decoded)[0]).toBe("1111")
|
||||
})
|
||||
})
|
||||
|
||||
describe("check fetching null/not null table", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("nullableTable", table => {
|
||||
table.increments("order_id").primary()
|
||||
table.integer("order_number").notNullable()
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to change the table to allow nullable and refetch this", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const entities = response.datasource.entities
|
||||
expect(entities).toBeDefined()
|
||||
const nullableTable = entities?.["nullableTable"]
|
||||
expect(nullableTable).toBeDefined()
|
||||
expect(
|
||||
nullableTable?.schema["order_number"].constraints?.presence
|
||||
).toEqual(true)
|
||||
|
||||
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
|
||||
// is aware of - therefore we can try to fetch and make sure BB updates correctly
|
||||
await client.schema.alterTable("nullableTable", table => {
|
||||
table.setNullable("order_number")
|
||||
})
|
||||
|
||||
const responseAfter = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const entitiesAfter = responseAfter.datasource.entities
|
||||
expect(entitiesAfter).toBeDefined()
|
||||
const nullableTableAfter = entitiesAfter?.["nullableTable"]
|
||||
expect(nullableTableAfter).toBeDefined()
|
||||
expect(
|
||||
nullableTableAfter?.schema["order_number"].constraints?.presence
|
||||
).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("money field 💰", () => {
|
||||
const tableName = "moneytable"
|
||||
let table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await client.raw(`
|
||||
CREATE TABLE ${tableName} (
|
||||
id serial PRIMARY KEY,
|
||||
price money
|
||||
)
|
||||
`)
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
table = response.datasource.entities![tableName]
|
||||
})
|
||||
|
||||
it("should be able to import a money field", async () => {
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
|
||||
})
|
||||
|
||||
it("should be able to search a money field", async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [{ price: 200 }, { price: 300 }],
|
||||
})
|
||||
|
||||
const { rows } = await config.api.row.search(table._id!, {
|
||||
query: {
|
||||
equal: {
|
||||
price: 200,
|
||||
},
|
||||
},
|
||||
})
|
||||
expect(rows).toHaveLength(1)
|
||||
expect(rows[0].price).toBe("200.00")
|
||||
})
|
||||
|
||||
it("should be able to update a money field", async () => {
|
||||
let row = await config.api.row.save(table._id!, { price: 200 })
|
||||
expect(row.price).toBe("200.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: 300 })
|
||||
expect(row.price).toBe("300.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: "400.00" })
|
||||
expect(row.price).toBe("400.00")
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
datasourceDescribe(
|
||||
{
|
||||
name: "Integration compatibility with postgres search_path",
|
||||
only: [DatabaseName.POSTGRES],
|
||||
},
|
||||
({ config, dsProvider }) => {
|
||||
let datasource: Datasource
|
||||
let client: Knex
|
||||
let schema1: string
|
||||
let schema2: string
|
||||
|
||||
beforeEach(async () => {
|
||||
const ds = await dsProvider
|
||||
datasource = ds.datasource!
|
||||
const rawDatasource = ds.rawDatasource!
|
||||
|
||||
schema1 = generator.guid().replaceAll("-", "")
|
||||
schema2 = generator.guid().replaceAll("-", "")
|
||||
|
||||
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
|
||||
client = await knexClient(rawDatasource)
|
||||
|
||||
await client.schema.createSchema(schema1)
|
||||
|
@ -161,122 +286,5 @@ describe("postgres integrations", () => {
|
|||
const schema = response.datasource.entities?.[repeated_table_name].schema
|
||||
expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"])
|
||||
})
|
||||
})
|
||||
|
||||
describe("check custom column types", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("binaryTable", table => {
|
||||
table.binary("id").primary()
|
||||
table.string("column1")
|
||||
table.integer("column2")
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle binary columns", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
expect(response.datasource.entities).toBeDefined()
|
||||
const table = response.datasource.entities?.["binaryTable"]
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.id.externalType).toBe("bytea")
|
||||
const row = await config.api.row.save(table?._id!, {
|
||||
id: "1111",
|
||||
column1: "hello",
|
||||
column2: 222,
|
||||
})
|
||||
expect(row._id).toBeDefined()
|
||||
const decoded = decodeURIComponent(row._id!).replace(/'/g, '"')
|
||||
expect(JSON.parse(decoded)[0]).toBe("1111")
|
||||
})
|
||||
})
|
||||
|
||||
describe("check fetching null/not null table", () => {
|
||||
beforeAll(async () => {
|
||||
await client.schema.createTable("nullableTable", table => {
|
||||
table.increments("order_id").primary()
|
||||
table.integer("order_number").notNullable()
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to change the table to allow nullable and refetch this", async () => {
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const entities = response.datasource.entities
|
||||
expect(entities).toBeDefined()
|
||||
const nullableTable = entities?.["nullableTable"]
|
||||
expect(nullableTable).toBeDefined()
|
||||
expect(
|
||||
nullableTable?.schema["order_number"].constraints?.presence
|
||||
).toEqual(true)
|
||||
|
||||
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
|
||||
// is aware of - therefore we can try to fetch and make sure BB updates correctly
|
||||
await client.schema.alterTable("nullableTable", table => {
|
||||
table.setNullable("order_number")
|
||||
})
|
||||
|
||||
const responseAfter = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
const entitiesAfter = responseAfter.datasource.entities
|
||||
expect(entitiesAfter).toBeDefined()
|
||||
const nullableTableAfter = entitiesAfter?.["nullableTable"]
|
||||
expect(nullableTableAfter).toBeDefined()
|
||||
expect(
|
||||
nullableTableAfter?.schema["order_number"].constraints?.presence
|
||||
).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("money field 💰", () => {
|
||||
const tableName = "moneytable"
|
||||
let table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await client.raw(`
|
||||
CREATE TABLE ${tableName} (
|
||||
id serial PRIMARY KEY,
|
||||
price money
|
||||
)
|
||||
`)
|
||||
const response = await config.api.datasource.fetchSchema({
|
||||
datasourceId: datasource._id!,
|
||||
})
|
||||
table = response.datasource.entities![tableName]
|
||||
})
|
||||
|
||||
it("should be able to import a money field", async () => {
|
||||
expect(table).toBeDefined()
|
||||
expect(table?.schema.price.type).toBe(FieldType.NUMBER)
|
||||
})
|
||||
|
||||
it("should be able to search a money field", async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [{ price: 200 }, { price: 300 }],
|
||||
})
|
||||
|
||||
const { rows } = await config.api.row.search(table._id!, {
|
||||
query: {
|
||||
equal: {
|
||||
price: 200,
|
||||
},
|
||||
},
|
||||
})
|
||||
expect(rows).toHaveLength(1)
|
||||
expect(rows[0].price).toBe("200.00")
|
||||
})
|
||||
|
||||
it("should be able to update a money field", async () => {
|
||||
let row = await config.api.row.save(table._id!, { price: 200 })
|
||||
expect(row.price).toBe("200.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: 300 })
|
||||
expect(row.price).toBe("300.00")
|
||||
|
||||
row = await config.api.row.save(table._id!, { ...row, price: "400.00" })
|
||||
expect(row.price).toBe("400.00")
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
)
|
||||
|
|
|
@ -7,226 +7,214 @@ import {
|
|||
Table,
|
||||
} from "@budibase/types"
|
||||
|
||||
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
||||
import { search } from "../../../../../sdk/app/rows/search"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
|
||||
import {
|
||||
DatabaseName,
|
||||
getDatasource,
|
||||
datasourceDescribe,
|
||||
} from "../../../../../integrations/tests/utils"
|
||||
import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
||||
|
||||
// These test cases are only for things that cannot be tested through the API
|
||||
// (e.g. limiting searches to returning specific fields). If it's possible to
|
||||
// test through the API, it should be done there instead.
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("search sdk (%s)", (name, dsProvider) => {
|
||||
const isInternal = name === "internal"
|
||||
const config = new TestConfiguration()
|
||||
datasourceDescribe(
|
||||
{ name: "search sdk (%s)", exclude: [DatabaseName.MONGODB] },
|
||||
({ config, dsProvider, isInternal }) => {
|
||||
let datasource: Datasource | undefined
|
||||
let table: Table
|
||||
|
||||
let datasource: Datasource | undefined
|
||||
let table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
|
||||
if (dsProvider) {
|
||||
datasource = await config.createDatasource({
|
||||
datasource: await dsProvider,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata =
|
||||
isInternal
|
||||
? {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
autocolumn: true,
|
||||
}
|
||||
: {
|
||||
name: "id",
|
||||
type: FieldType.NUMBER,
|
||||
autocolumn: true,
|
||||
}
|
||||
|
||||
table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: idFieldSchema,
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
surname: {
|
||||
name: "surname",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
address: {
|
||||
name: "address",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await config.api.row.save(table._id!, {
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
config.end()
|
||||
})
|
||||
|
||||
it("querying by fields will always return data attribute columns", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const { rows } = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
|
||||
expect(rows).toHaveLength(10)
|
||||
for (const row of rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
beforeAll(async () => {
|
||||
const ds = await dsProvider
|
||||
datasource = ds.datasource
|
||||
})
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
it("will decode _id in oneOf query", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
|
||||
beforeEach(async () => {
|
||||
const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata =
|
||||
isInternal
|
||||
? {
|
||||
name: "id",
|
||||
type: FieldType.AUTO,
|
||||
subtype: AutoFieldSubType.AUTO_ID,
|
||||
autocolumn: true,
|
||||
}
|
||||
: {
|
||||
name: "id",
|
||||
type: FieldType.NUMBER,
|
||||
autocolumn: true,
|
||||
}
|
||||
|
||||
table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
primary: ["id"],
|
||||
schema: {
|
||||
id: idFieldSchema,
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
surname: {
|
||||
name: "surname",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
address: {
|
||||
name: "address",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
expect(result.rows).toHaveLength(3)
|
||||
expect(result.rows.map(row => row.id)).toEqual(
|
||||
expect.arrayContaining([1, 4, 8])
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("surname")
|
||||
expect(keys).toContain("address")
|
||||
expect(keys).not.toContain("age")
|
||||
for (let i = 0; i < 10; i++) {
|
||||
await config.api.row.save(table._id!, {
|
||||
name: generator.first(),
|
||||
surname: generator.last(),
|
||||
age: generator.age(),
|
||||
address: generator.address(),
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields even if requested", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).not.toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
afterAll(async () => {
|
||||
config.end()
|
||||
})
|
||||
})
|
||||
|
||||
it.each([
|
||||
[["id", "name", "age"], 3],
|
||||
[["name", "age"], 10],
|
||||
])(
|
||||
"cannot query by non search fields (fields: %s)",
|
||||
async (queryFields, expectedRows) => {
|
||||
it("querying by fields will always return data attribute columns", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const { rows } = await search({
|
||||
tableId: table._id!,
|
||||
query: {
|
||||
$or: {
|
||||
conditions: [
|
||||
{
|
||||
$and: {
|
||||
conditions: [
|
||||
{ range: { id: { low: 2, high: 4 } } },
|
||||
{ range: { id: { low: 3, high: 5 } } },
|
||||
],
|
||||
},
|
||||
},
|
||||
{ equal: { id: 7 } },
|
||||
],
|
||||
},
|
||||
},
|
||||
fields: queryFields,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
|
||||
expect(rows).toHaveLength(expectedRows)
|
||||
expect(rows).toHaveLength(10)
|
||||
for (const row of rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
it("will decode _id in oneOf query", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {
|
||||
oneOf: {
|
||||
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(result.rows).toHaveLength(3)
|
||||
expect(result.rows.map(row => row.id)).toEqual(
|
||||
expect.arrayContaining([1, 4, 8])
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).toContain("surname")
|
||||
expect(keys).toContain("address")
|
||||
expect(keys).not.toContain("age")
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it("does not allow accessing hidden fields even if requested", async () => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
name: {
|
||||
...table.schema.name,
|
||||
visible: true,
|
||||
},
|
||||
age: {
|
||||
...table.schema.age,
|
||||
visible: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
const result = await search({
|
||||
tableId: table._id!,
|
||||
query: {},
|
||||
fields: ["name", "age"],
|
||||
})
|
||||
expect(result.rows).toHaveLength(10)
|
||||
for (const row of result.rows) {
|
||||
const keys = Object.keys(row)
|
||||
expect(keys).toContain("name")
|
||||
expect(keys).not.toContain("age")
|
||||
expect(keys).not.toContain("surname")
|
||||
expect(keys).not.toContain("address")
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
it.each([
|
||||
[["id", "name", "age"], 3],
|
||||
[["name", "age"], 10],
|
||||
])(
|
||||
"cannot query by non search fields (fields: %s)",
|
||||
async (queryFields, expectedRows) => {
|
||||
await config.doInContext(config.appId, async () => {
|
||||
const { rows } = await search({
|
||||
tableId: table._id!,
|
||||
query: {
|
||||
$or: {
|
||||
conditions: [
|
||||
{
|
||||
$and: {
|
||||
conditions: [
|
||||
{ range: { id: { low: 2, high: 4 } } },
|
||||
{ range: { id: { low: 3, high: 5 } } },
|
||||
],
|
||||
},
|
||||
},
|
||||
{ equal: { id: 7 } },
|
||||
],
|
||||
},
|
||||
},
|
||||
fields: queryFields,
|
||||
})
|
||||
|
||||
expect(rows).toHaveLength(expectedRows)
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
import { DATASOURCE_TEST_FILES } from "."
|
||||
|
||||
export default (paths: string[]) => {
|
||||
return {
|
||||
filtered: paths
|
||||
.filter(path => DATASOURCE_TEST_FILES.includes(path))
|
||||
.map(path => ({ test: path })),
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
export const DATASOURCE_TEST_FILES = [
|
||||
"src/integration-test/mysql.spec.ts",
|
||||
"src/integration-test/postgres.spec.ts",
|
||||
"src/api/routes/tests/queries/generic-sql.spec.ts",
|
||||
"src/sdk/app/rows/search/tests/search.spec.ts",
|
||||
"src/api/routes/tests/queries/mongodb.spec.ts",
|
||||
"src/api/routes/tests/search.spec.ts",
|
||||
"src/api/routes/tests/datasource.spec.ts",
|
||||
"src/api/routes/tests/viewV2.spec.ts",
|
||||
"src/api/routes/tests/row.spec.ts",
|
||||
"src/api/routes/tests/rowAction.spec.ts",
|
||||
"src/api/routes/tests/table.spec.ts",
|
||||
]
|
|
@ -0,0 +1,9 @@
|
|||
import { DATASOURCE_TEST_FILES } from "."
|
||||
|
||||
export default (paths: string[]) => {
|
||||
return {
|
||||
filtered: paths
|
||||
.filter(path => !DATASOURCE_TEST_FILES.includes(path))
|
||||
.map(path => ({ test: path })),
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue