Merge pull request #13734 from Budibase/delete-sql-spec-ts

Making progress towards deleting sql.spec.ts.
This commit is contained in:
Sam Rose 2024-05-21 15:51:03 +01:00 committed by GitHub
commit 4826138e6b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 246 additions and 915 deletions

View File

@ -16,6 +16,7 @@ import {
Table,
TableSchema,
User,
Row,
} from "@budibase/types"
import _ from "lodash"
import tk from "timekeeper"
@ -629,6 +630,19 @@ describe.each([
it("fails to find nonexistent row", () =>
expectQuery({ equal: { name: "none" } }).toFindNothing())
it("works as an or condition", () =>
expectQuery({
allOr: true,
equal: { name: "foo" },
oneOf: { name: ["bar"] },
}).toContainExactly([{ name: "foo" }, { name: "bar" }]))
it("can have multiple values for same column", () =>
expectQuery({
allOr: true,
equal: { "1:name": "foo", "2:name": "bar" },
}).toContainExactly([{ name: "foo" }, { name: "bar" }]))
})
describe("notEqual", () => {
@ -663,6 +677,21 @@ describe.each([
expectQuery({ fuzzy: { name: "none" } }).toFindNothing())
})
describe("string", () => {
it("successfully finds a row", () =>
expectQuery({ string: { name: "fo" } }).toContainExactly([
{ name: "foo" },
]))
it("fails to find nonexistent row", () =>
expectQuery({ string: { name: "none" } }).toFindNothing())
it("is case-insensitive", () =>
expectQuery({ string: { name: "FO" } }).toContainExactly([
{ name: "foo" },
]))
})
describe("range", () => {
it("successfully finds multiple rows", () =>
expectQuery({
@ -1267,5 +1296,57 @@ describe.each([
{ auto: 1 },
]))
})
// TODO(samwho): fix for SQS
!isSqs &&
describe("pagination", () => {
it("should paginate through all rows", async () => {
// @ts-ignore
let bookmark: string | number = undefined
let rows: Row[] = []
// eslint-disable-next-line no-constant-condition
while (true) {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
limit: 3,
query: {},
bookmark,
paginate: true,
})
rows.push(...response.rows)
if (!response.bookmark || !response.hasNextPage) {
break
}
bookmark = response.bookmark
}
expect(rows).toHaveLength(10)
expect(rows.map(row => row.auto)).toEqual(
expect.arrayContaining([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
)
})
})
})
describe("field name 1:name", () => {
beforeAll(async () => {
await createTable({
"1:name": { name: "1:name", type: FieldType.STRING },
})
await createRows([{ "1:name": "bar" }, { "1:name": "foo" }])
})
describe("equal", () => {
it("successfully finds a row", () =>
expectQuery({ equal: { "1:1:name": "bar" } }).toContainExactly([
{ "1:name": "bar" },
]))
it("fails to find nonexistent row", () =>
expectQuery({ equal: { "1:1:name": "none" } }).toFindNothing())
})
})
})

View File

@ -52,14 +52,24 @@ describe.each([
jest.clearAllMocks()
})
it("creates a table successfully", async () => {
const name = generator.guid()
it.each([
"alphanum",
"with spaces",
"with-dashes",
"with_underscores",
'with "double quotes"',
"with 'single quotes'",
"with `backticks`",
])("creates a table with name: %s", async name => {
const table = await config.api.table.save(
tableForDatasource(datasource, { name })
)
expect(table.name).toEqual(name)
expect(events.table.created).toHaveBeenCalledTimes(1)
expect(events.table.created).toHaveBeenCalledWith(table)
const res = await config.api.table.get(table._id!)
expect(res.name).toEqual(name)
})
it("creates a table via data import", async () => {

View File

@ -66,38 +66,6 @@ function generateCreateJson(table = TABLE_NAME, body = {}): QueryJson {
}
}
function generateUpdateJson({
table = TABLE_NAME,
body = {},
filters = {},
meta = {},
}: {
table: string
body?: any
filters?: any
meta?: any
}): QueryJson {
if (!meta.table) {
meta.table = TABLE
}
return {
endpoint: endpoint(table, "UPDATE"),
filters,
body,
meta,
}
}
function generateDeleteJson(table = TABLE_NAME, filters = {}): QueryJson {
return {
endpoint: endpoint(table, "DELETE"),
meta: {
table: TABLE,
},
filters,
}
}
function generateRelationshipJson(config: { schema?: string } = {}): QueryJson {
return {
endpoint: {
@ -178,81 +146,6 @@ describe("SQL query builder", () => {
sql = new Sql(client, limit)
})
it("should test a basic read", () => {
const query = sql._query(generateReadJson())
expect(query).toEqual({
bindings: [limit],
sql: `select * from (select * from "${TABLE_NAME}" limit $1) as "${TABLE_NAME}"`,
})
})
it("should test a read with specific columns", () => {
const nameProp = `${TABLE_NAME}.name`,
ageProp = `${TABLE_NAME}.age`
const query = sql._query(
generateReadJson({
fields: [nameProp, ageProp],
})
)
expect(query).toEqual({
bindings: [limit],
sql: `select "${TABLE_NAME}"."name" as "${nameProp}", "${TABLE_NAME}"."age" as "${ageProp}" from (select * from "${TABLE_NAME}" limit $1) as "${TABLE_NAME}"`,
})
})
it("should test a where string starts with read", () => {
const query = sql._query(
generateReadJson({
filters: {
string: {
name: "John",
},
},
})
)
expect(query).toEqual({
bindings: ["John%", limit],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."name" ilike $1 limit $2) as "${TABLE_NAME}"`,
})
})
it("should test a where range read", () => {
const query = sql._query(
generateReadJson({
filters: {
range: {
age: {
low: 2,
high: 10,
},
},
},
})
)
expect(query).toEqual({
bindings: [2, 10, limit],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age" between $1 and $2 limit $3) as "${TABLE_NAME}"`,
})
})
it("should test for multiple IDs with OR", () => {
const query = sql._query(
generateReadJson({
filters: {
equal: {
age: 10,
name: "John",
},
allOr: true,
},
})
)
expect(query).toEqual({
bindings: [10, "John", limit],
sql: `select * from (select * from "${TABLE_NAME}" where ("${TABLE_NAME}"."age" = $1) or ("${TABLE_NAME}"."name" = $2) limit $3) as "${TABLE_NAME}"`,
})
})
it("should allow filtering on a related field", () => {
const query = sql._query(
generateReadJson({
@ -271,260 +164,6 @@ describe("SQL query builder", () => {
})
})
it("should test an create statement", () => {
const query = sql._query(
generateCreateJson(TABLE_NAME, {
name: "Michael",
age: 45,
})
)
expect(query).toEqual({
bindings: [45, "Michael"],
sql: `insert into "${TABLE_NAME}" ("age", "name") values ($1, $2) returning *`,
})
})
it("should test an update statement", () => {
const query = sql._query(
generateUpdateJson({
table: TABLE_NAME,
body: {
name: "John",
},
filters: {
equal: {
id: 1001,
},
},
})
)
expect(query).toEqual({
bindings: ["John", 1001],
sql: `update "${TABLE_NAME}" set "name" = $1 where "${TABLE_NAME}"."id" = $2 returning *`,
})
})
it("should test a delete statement", () => {
const query = sql._query(
generateDeleteJson(TABLE_NAME, {
equal: {
id: 1001,
},
})
)
expect(query).toEqual({
bindings: [1001],
sql: `delete from "${TABLE_NAME}" where "${TABLE_NAME}"."id" = $1 returning *`,
})
})
it("should work with MS-SQL", () => {
const query = new Sql(SqlClient.MS_SQL, 10)._query(generateReadJson())
expect(query).toEqual({
bindings: [10],
sql: `select * from (select top (@p0) * from [${TABLE_NAME}]) as [${TABLE_NAME}]`,
})
})
it("should work with MySQL", () => {
const query = new Sql(SqlClient.MY_SQL, 10)._query(generateReadJson())
expect(query).toEqual({
bindings: [10],
sql: `select * from (select * from \`${TABLE_NAME}\` limit ?) as \`${TABLE_NAME}\``,
})
})
it("should use greater than when only low range specified", () => {
const date = new Date()
const query = sql._query(
generateReadJson({
filters: {
range: {
property: {
low: date,
},
},
},
})
)
expect(query).toEqual({
bindings: [date, limit],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" >= $1 limit $2) as "${TABLE_NAME}"`,
})
})
it("should use less than when only high range specified", () => {
const date = new Date()
const query = sql._query(
generateReadJson({
filters: {
range: {
property: {
high: date,
},
},
},
})
)
expect(query).toEqual({
bindings: [date, limit],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" <= $1 limit $2) as "${TABLE_NAME}"`,
})
})
it("should use AND like expression for MS-SQL when filter is contains", () => {
const query = new Sql(SqlClient.MS_SQL, 10)._query(
generateReadJson({
filters: {
contains: {
age: [20, 25],
name: ["John", "Mary"],
},
},
})
)
expect(query).toEqual({
bindings: [10, "%20%", "%25%", `%"john"%`, `%"mary"%`],
sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where (LOWER([${TABLE_NAME}].[age]) LIKE @p1 AND LOWER([${TABLE_NAME}].[age]) LIKE @p2) and (LOWER([${TABLE_NAME}].[name]) LIKE @p3 AND LOWER([${TABLE_NAME}].[name]) LIKE @p4)) as [${TABLE_NAME}]`,
})
})
it("should use JSON_CONTAINS expression for MySQL when filter is contains", () => {
const query = new Sql(SqlClient.MY_SQL, 10)._query(
generateReadJson({
filters: {
contains: {
age: [20],
name: ["John"],
},
},
})
)
expect(query).toEqual({
bindings: [10],
sql: `select * from (select * from \`${TABLE_NAME}\` where JSON_CONTAINS(${TABLE_NAME}.age, '[20]') and JSON_CONTAINS(${TABLE_NAME}.name, '["John"]') limit ?) as \`${TABLE_NAME}\``,
})
})
it("should use jsonb operator expression for PostgreSQL when filter is contains", () => {
const query = new Sql(SqlClient.POSTGRES, 10)._query(
generateReadJson({
filters: {
contains: {
age: [20],
name: ["John"],
},
},
})
)
expect(query).toEqual({
bindings: [10],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age"::jsonb @> '[20]' and "${TABLE_NAME}"."name"::jsonb @> '["John"]' limit $1) as "${TABLE_NAME}"`,
})
})
it("should use NOT like expression for MS-SQL when filter is notContains", () => {
const query = new Sql(SqlClient.MS_SQL, 10)._query(
generateReadJson({
filters: {
notContains: {
age: [20],
name: ["John"],
},
},
})
)
expect(query).toEqual({
bindings: [10, "%20%", `%"john"%`],
sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where NOT (LOWER([${TABLE_NAME}].[age]) LIKE @p1) and NOT (LOWER([${TABLE_NAME}].[name]) LIKE @p2)) as [${TABLE_NAME}]`,
})
})
it("should use NOT JSON_CONTAINS expression for MySQL when filter is notContains", () => {
const query = new Sql(SqlClient.MY_SQL, 10)._query(
generateReadJson({
filters: {
notContains: {
age: [20],
name: ["John"],
},
},
})
)
expect(query).toEqual({
bindings: [10],
sql: `select * from (select * from \`${TABLE_NAME}\` where NOT JSON_CONTAINS(${TABLE_NAME}.age, '[20]') and NOT JSON_CONTAINS(${TABLE_NAME}.name, '["John"]') limit ?) as \`${TABLE_NAME}\``,
})
})
it("should use jsonb operator NOT expression for PostgreSQL when filter is notContains", () => {
const query = new Sql(SqlClient.POSTGRES, 10)._query(
generateReadJson({
filters: {
notContains: {
age: [20],
name: ["John"],
},
},
})
)
expect(query).toEqual({
bindings: [10],
sql: `select * from (select * from "${TABLE_NAME}" where NOT "${TABLE_NAME}"."age"::jsonb @> '[20]' and NOT "${TABLE_NAME}"."name"::jsonb @> '["John"]' limit $1) as "${TABLE_NAME}"`,
})
})
it("should use OR like expression for MS-SQL when filter is containsAny", () => {
const query = new Sql(SqlClient.MS_SQL, 10)._query(
generateReadJson({
filters: {
containsAny: {
age: [20, 25],
name: ["John", "Mary"],
},
},
})
)
expect(query).toEqual({
bindings: [10, "%20%", "%25%", `%"john"%`, `%"mary"%`],
sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where (LOWER([${TABLE_NAME}].[age]) LIKE @p1 OR LOWER([${TABLE_NAME}].[age]) LIKE @p2) and (LOWER([${TABLE_NAME}].[name]) LIKE @p3 OR LOWER([${TABLE_NAME}].[name]) LIKE @p4)) as [${TABLE_NAME}]`,
})
})
it("should use JSON_OVERLAPS expression for MySQL when filter is containsAny", () => {
const query = new Sql(SqlClient.MY_SQL, 10)._query(
generateReadJson({
filters: {
containsAny: {
age: [20, 25],
name: ["John", "Mary"],
},
},
})
)
expect(query).toEqual({
bindings: [10],
sql: `select * from (select * from \`${TABLE_NAME}\` where JSON_OVERLAPS(${TABLE_NAME}.age, '[20,25]') and JSON_OVERLAPS(${TABLE_NAME}.name, '["John","Mary"]') limit ?) as \`${TABLE_NAME}\``,
})
})
it("should use ?| operator expression for PostgreSQL when filter is containsAny", () => {
const query = new Sql(SqlClient.POSTGRES, 10)._query(
generateReadJson({
filters: {
containsAny: {
age: [20, 25],
name: ["John", "Mary"],
},
},
})
)
expect(query).toEqual({
bindings: [10],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."age"::jsonb ?| array [20,25] and "${TABLE_NAME}"."name"::jsonb ?| array ['John','Mary'] limit $1) as "${TABLE_NAME}"`,
})
})
it("should add the schema to the LEFT JOIN", () => {
const query = sql._query(generateRelationshipJson({ schema: "production" }))
expect(query).toEqual({
@ -551,42 +190,6 @@ describe("SQL query builder", () => {
})
})
it("should handle table names with dashes when performing a LIKE in MySQL", () => {
const tableName = "Table-Name-With-Dashes"
const query = new Sql(SqlClient.MY_SQL, limit)._query(
generateReadJson({
table: tableName,
filters: {
string: {
name: "John",
},
},
})
)
expect(query).toEqual({
bindings: ["john%", limit],
sql: `select * from (select * from \`${tableName}\` where LOWER(\`${tableName}\`.\`name\`) LIKE ? limit ?) as \`${tableName}\``,
})
})
it("should handle table names with dashes when performing a LIKE in SQL Server", () => {
const tableName = "Table-Name-With-Dashes"
const query = new Sql(SqlClient.MS_SQL, limit)._query(
generateReadJson({
table: tableName,
filters: {
string: {
name: "John",
},
},
})
)
expect(query).toEqual({
bindings: [limit, "john%"],
sql: `select * from (select top (@p0) * from [${tableName}] where LOWER([${tableName}].[name]) LIKE @p1) as [${tableName}]`,
})
})
it("should ignore high range value if it is an empty object", () => {
const query = sql._query(
generateReadJson({
@ -709,99 +312,4 @@ describe("SQL query builder", () => {
sql: `insert into "test" ("name") values ($1) returning *`,
})
})
it("should be able to rename column for MySQL", () => {
const table: Table = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
name: TABLE_NAME,
schema: {
first_name: {
type: FieldType.STRING,
name: "first_name",
externalType: "varchar(45)",
},
},
sourceId: "SOURCE_ID",
}
const oldTable: Table = {
...table,
schema: {
name: {
type: FieldType.STRING,
name: "name",
externalType: "varchar(45)",
},
},
}
const query = new Sql(SqlClient.MY_SQL, limit)._query({
table,
endpoint: {
datasourceId: "MySQL",
operation: Operation.UPDATE_TABLE,
entityId: TABLE_NAME,
},
meta: {
table: oldTable,
tables: { [oldTable.name]: oldTable },
renamed: {
old: "name",
updated: "first_name",
},
},
})
expect(query).toEqual({
bindings: [],
sql: `alter table \`${TABLE_NAME}\` rename column \`name\` to \`first_name\`;`,
})
})
it("should be able to delete a column", () => {
const table: Table = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
name: TABLE_NAME,
schema: {
first_name: {
type: FieldType.STRING,
name: "first_name",
externalType: "varchar(45)",
},
},
sourceId: "SOURCE_ID",
}
const oldTable: Table = {
...table,
schema: {
first_name: {
type: FieldType.STRING,
name: "first_name",
externalType: "varchar(45)",
},
last_name: {
type: FieldType.STRING,
name: "last_name",
externalType: "varchar(45)",
},
},
}
const query = sql._query({
table,
endpoint: {
datasourceId: "Postgres",
operation: Operation.UPDATE_TABLE,
entityId: TABLE_NAME,
},
meta: {
table: oldTable,
tables: [oldTable],
},
})
expect(query).toEqual([
{
bindings: [],
sql: `alter table "${TABLE_NAME}" drop column "last_name"`,
},
])
})
})

View File

@ -26,6 +26,7 @@ import {
} from "../../../../db/utils"
import AliasTables from "../sqlAlias"
import { outputProcessing } from "../../../../utilities/rowProcessor"
import pick from "lodash/pick"
function buildInternalFieldList(
table: Table,
@ -186,13 +187,19 @@ export async function search(
}
)
return {
// final row processing for response
const output = {
rows: await outputProcessing<Row[]>(table, processed, {
preserveLinks: true,
squash: true,
}),
}
if (options.fields) {
const fields = [...options.fields, ...CONSTANT_INTERNAL_ROW_COLS]
output.rows = output.rows.map((r: any) => pick(r, fields))
}
return output
} catch (err: any) {
const msg = typeof err === "string" ? err : err.message
if (err.status === 404 && err.message?.includes(SQLITE_DESIGN_DOC_ID)) {

View File

@ -1,166 +0,0 @@
import { GenericContainer } from "testcontainers"
import {
Datasource,
FieldType,
Row,
SourceName,
Table,
RowSearchParams,
TableSourceType,
} from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { search } from "../external"
import {
expectAnyExternalColsAttributes,
generator,
} from "@budibase/backend-core/tests"
describe("external search", () => {
const config = new TestConfiguration()
let externalDatasource: Datasource, tableData: Table
const rows: Row[] = []
beforeAll(async () => {
const container = await new GenericContainer("mysql:8.3")
.withExposedPorts(3306)
.withEnvironment({
MYSQL_ROOT_PASSWORD: "admin",
MYSQL_DATABASE: "db",
MYSQL_USER: "user",
MYSQL_PASSWORD: "password",
})
.start()
const host = container.getHost()
const port = container.getMappedPort(3306)
await config.init()
externalDatasource = await config.createDatasource({
datasource: {
type: "datasource",
name: "Test",
source: SourceName.MYSQL,
plus: true,
config: {
host,
port,
user: "user",
database: "db",
password: "password",
rejectUnauthorized: true,
},
},
})
tableData = {
name: generator.word(),
type: "table",
primary: ["id"],
sourceId: externalDatasource._id!,
sourceType: TableSourceType.EXTERNAL,
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
},
},
}
const table = await config.createExternalTable({
...tableData,
sourceId: externalDatasource._id,
})
for (let i = 0; i < 10; i++) {
rows.push(
await config.createRow({
tableId: table._id,
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
})
)
}
})
it("default search returns all the data", async () => {
await config.doInContext(config.appId, async () => {
const tableId = config.table!._id!
const searchParams: RowSearchParams = {
tableId,
query: {},
}
const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual(
expect.arrayContaining(rows.map(r => expect.objectContaining(r)))
)
})
})
it("querying by fields will always return data attribute columns", async () => {
await config.doInContext(config.appId, async () => {
const tableId = config.table!._id!
const searchParams: RowSearchParams = {
tableId,
query: {},
fields: ["name", "age"],
}
const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual(
expect.arrayContaining(
rows.map(r => ({
...expectAnyExternalColsAttributes,
name: r.name,
age: r.age,
}))
)
)
})
})
it("will decode _id in oneOf query", async () => {
await config.doInContext(config.appId, async () => {
const tableId = config.table!._id!
const searchParams: RowSearchParams = {
tableId,
query: {
oneOf: {
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
},
},
}
const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(3)
expect(result.rows.map(row => row.id)).toEqual([1, 4, 8])
})
})
})

View File

@ -1,249 +0,0 @@
const nodeFetch = require("node-fetch")
nodeFetch.mockSearch()
import * as search from "../utils"
import { RowSearchParams, SortOrder, SortType } from "@budibase/types"
// this will be mocked out for _search endpoint
const PARAMS: RowSearchParams = {
query: {},
tableId: "ta_12345679abcdef",
version: "1",
bookmark: undefined,
sort: undefined,
sortOrder: SortOrder.ASCENDING,
sortType: SortType.STRING,
}
function checkLucene(resp: any, expected: any, params = PARAMS) {
const query = resp.rows[0].query
const json = JSON.parse(query)
if (PARAMS.sort) {
expect(json.sort).toBe(`${PARAMS.sort}<${PARAMS.sortType}>`)
}
if (PARAMS.bookmark) {
expect(json.bookmark).toBe(PARAMS.bookmark)
}
expect(json.include_docs).toBe(true)
expect(json.q).toBe(`${expected} AND tableId:"${params.tableId}"`)
expect(json.limit).toBe(params.limit || 50)
}
describe("internal search", () => {
it("default query", async () => {
const response = await search.paginatedSearch({}, PARAMS)
checkLucene(response, `*:*`)
})
it("test equal query", async () => {
const response = await search.paginatedSearch(
{
equal: {
column: "1",
},
},
PARAMS
)
checkLucene(response, `*:* AND column:"1"`)
})
it("test notEqual query", async () => {
const response = await search.paginatedSearch(
{
notEqual: {
column: "1",
},
},
PARAMS
)
checkLucene(response, `*:* AND !column:"1"`)
})
it("test OR query", async () => {
const response = await search.paginatedSearch(
{
allOr: true,
equal: {
column: "2",
},
notEqual: {
column: "1",
},
},
PARAMS
)
checkLucene(response, `(column:"2" OR !column:"1")`)
})
it("test AND query", async () => {
const response = await search.paginatedSearch(
{
equal: {
column: "2",
},
notEqual: {
column: "1",
},
},
PARAMS
)
checkLucene(response, `(*:* AND column:"2" AND !column:"1")`)
})
it("test pagination query", async () => {
const updatedParams = {
...PARAMS,
limit: 100,
bookmark: "awd",
sort: "column",
}
const response = await search.paginatedSearch(
{
string: {
column: "2",
},
},
updatedParams
)
checkLucene(response, `*:* AND column:2*`, updatedParams)
})
it("test range query", async () => {
const response = await search.paginatedSearch(
{
range: {
column: { low: 1, high: 2 },
},
},
PARAMS
)
checkLucene(response, `*:* AND column:[1 TO 2]`, PARAMS)
})
it("test empty query", async () => {
const response = await search.paginatedSearch(
{
empty: {
column: "",
},
},
PARAMS
)
checkLucene(response, `*:* AND (*:* -column:["" TO *])`, PARAMS)
})
it("test notEmpty query", async () => {
const response = await search.paginatedSearch(
{
notEmpty: {
column: "",
},
},
PARAMS
)
checkLucene(response, `*:* AND column:["" TO *]`, PARAMS)
})
it("test oneOf query", async () => {
const response = await search.paginatedSearch(
{
oneOf: {
column: ["a", "b"],
},
},
PARAMS
)
checkLucene(response, `*:* AND column:("a" OR "b")`, PARAMS)
})
it("test contains query", async () => {
const response = await search.paginatedSearch(
{
contains: {
column: ["a"],
colArr: [1, 2, 3],
},
},
PARAMS
)
checkLucene(
response,
`(*:* AND column:(a) AND colArr:(1 AND 2 AND 3))`,
PARAMS
)
})
it("test multiple of same column", async () => {
const response = await search.paginatedSearch(
{
allOr: true,
equal: {
"1:column": "a",
"2:column": "b",
"3:column": "c",
},
},
PARAMS
)
checkLucene(response, `(column:"a" OR column:"b" OR column:"c")`, PARAMS)
})
it("check a weird case for lucene building", async () => {
const response = await search.paginatedSearch(
{
equal: {
"1:1:column": "a",
},
},
PARAMS
)
checkLucene(response, `*:* AND 1\\:column:"a"`, PARAMS)
})
it("test containsAny query", async () => {
const response = await search.paginatedSearch(
{
containsAny: {
column: ["a", "b", "c"],
},
},
PARAMS
)
checkLucene(response, `*:* AND column:(a OR b OR c)`, PARAMS)
})
it("test notContains query", async () => {
const response = await search.paginatedSearch(
{
notContains: {
column: ["a", "b", "c"],
},
},
PARAMS
)
checkLucene(response, `*:* AND NOT column:(a AND b AND c)`, PARAMS)
})
it("test equal without version query", async () => {
PARAMS.version = undefined
const response = await search.paginatedSearch(
{
equal: {
column: "1",
},
},
PARAMS
)
const query = response.rows[0].query
const json = JSON.parse(query)
if (PARAMS.sort) {
expect(json.sort).toBe(`${PARAMS.sort}<${PARAMS.sortType}>`)
}
if (PARAMS.bookmark) {
expect(json.bookmark).toBe(PARAMS.bookmark)
}
expect(json.include_docs).toBe(true)
expect(json.q).toBe(`*:* AND column:"1" AND tableId:${PARAMS.tableId}`)
})
})

View File

@ -0,0 +1,131 @@
import { Datasource, FieldType, Row, Table } from "@budibase/types"
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
import { search } from "../../../../../sdk/app/rows/search"
import { generator } from "@budibase/backend-core/tests"
import {
DatabaseName,
getDatasource,
} from "../../../../../integrations/tests/utils"
import { tableForDatasource } from "../../../../../tests/utilities/structures"
// These test cases are only for things that cannot be tested through the API
// (e.g. limiting searches to returning specific fields). If it's possible to
// test through the API, it should be done there instead.
describe.each([
["lucene", undefined],
["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("search sdk (%s)", (name, dsProvider) => {
const isSqs = name === "sqs"
const isLucene = name === "lucene"
const isInternal = isLucene || isSqs
const config = new TestConfiguration()
let envCleanup: (() => void) | undefined
let datasource: Datasource | undefined
let table: Table
let rows: Row[]
beforeAll(async () => {
if (isSqs) {
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
}
await config.init()
if (dsProvider) {
datasource = await config.createDatasource({
datasource: await dsProvider,
})
}
table = await config.api.table.save(
tableForDatasource(datasource, {
primary: ["id"],
schema: {
id: {
name: "id",
type: FieldType.AUTO,
autocolumn: true,
},
name: {
name: "name",
type: FieldType.STRING,
},
surname: {
name: "surname",
type: FieldType.STRING,
},
age: {
name: "age",
type: FieldType.NUMBER,
},
address: {
name: "address",
type: FieldType.STRING,
},
},
})
)
rows = []
for (let i = 0; i < 10; i++) {
rows.push(
await config.api.row.save(table._id!, {
name: generator.first(),
surname: generator.last(),
age: generator.age(),
address: generator.address(),
})
)
}
})
afterAll(async () => {
config.end()
if (envCleanup) {
envCleanup()
}
})
it("querying by fields will always return data attribute columns", async () => {
await config.doInContext(config.appId, async () => {
const { rows } = await search({
tableId: table._id!,
query: {},
fields: ["name", "age"],
})
expect(rows).toHaveLength(10)
for (const row of rows) {
const keys = Object.keys(row)
expect(keys).toContain("name")
expect(keys).toContain("age")
expect(keys).not.toContain("surname")
expect(keys).not.toContain("address")
}
})
})
!isInternal &&
it("will decode _id in oneOf query", async () => {
await config.doInContext(config.appId, async () => {
const result = await search({
tableId: table._id!,
query: {
oneOf: {
_id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"],
},
},
})
expect(result.rows).toHaveLength(3)
expect(result.rows.map(row => row.id)).toEqual(
expect.arrayContaining([1, 4, 8])
)
})
})
})

View File

@ -10,6 +10,7 @@ import {
RowSearchParams,
DeleteRows,
DeleteRow,
PaginatedSearchRowResponse,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
@ -133,12 +134,20 @@ export class RowAPI extends TestAPI {
)
}
search = async (
search = async <T extends RowSearchParams>(
sourceId: string,
params?: RowSearchParams,
params?: T,
expectations?: Expectations
): Promise<SearchRowResponse> => {
return await this._post<SearchRowResponse>(`/api/${sourceId}/search`, {
): Promise<
T extends { paginate: true }
? PaginatedSearchRowResponse
: SearchRowResponse
> => {
return await this._post<
T extends { paginate: true }
? PaginatedSearchRowResponse
: SearchRowResponse
>(`/api/${sourceId}/search`, {
body: params,
expectations,
})