Merge pull request #13941 from Budibase/budi-8349-in-memory-search-parity-testing
Test in-memory search alongside all of our other search types, to ensure parity.
This commit is contained in:
commit
c6a7bf6ed1
|
@ -18,7 +18,6 @@ import {
|
|||
SqlQuery,
|
||||
RelationshipsJson,
|
||||
SearchFilters,
|
||||
SortDirection,
|
||||
SqlQueryBinding,
|
||||
Table,
|
||||
TableSourceType,
|
||||
|
@ -27,6 +26,7 @@ import {
|
|||
QueryOptions,
|
||||
JsonTypes,
|
||||
prefixed,
|
||||
SortOrder,
|
||||
} from "@budibase/types"
|
||||
import environment from "../environment"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
@ -420,11 +420,11 @@ class InternalBuilder {
|
|||
if (sort && Object.keys(sort || {}).length > 0) {
|
||||
for (let [key, value] of Object.entries(sort)) {
|
||||
const direction =
|
||||
value.direction === SortDirection.ASCENDING ? "asc" : "desc"
|
||||
value.direction === SortOrder.ASCENDING ? "asc" : "desc"
|
||||
let nulls
|
||||
if (this.client === SqlClient.POSTGRES) {
|
||||
// All other clients already sort this as expected by default, and adding this to the rest of the clients is causing issues
|
||||
nulls = value.direction === SortDirection.ASCENDING ? "first" : "last"
|
||||
nulls = value.direction === SortOrder.ASCENDING ? "first" : "last"
|
||||
}
|
||||
|
||||
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 85b4fc9ea01472bf69840d046733ad596ef893e2
|
||||
Subproject commit bf30f47a28292d619cf0837f21d66790ff31c3a6
|
|
@ -22,30 +22,27 @@ import {
|
|||
import _ from "lodash"
|
||||
import tk from "timekeeper"
|
||||
import { encodeJSBinding } from "@budibase/string-templates"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
|
||||
describe.each([
|
||||
["in-memory", undefined],
|
||||
["lucene", undefined],
|
||||
["sqs", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||
])("/api/:sourceId/search (%s)", (name, dsProvider) => {
|
||||
])("search (%s)", (name, dsProvider) => {
|
||||
const isSqs = name === "sqs"
|
||||
const isLucene = name === "lucene"
|
||||
const isInMemory = name === "in-memory"
|
||||
const isInternal = isSqs || isLucene
|
||||
const config = setup.getConfig()
|
||||
|
||||
let envCleanup: (() => void) | undefined
|
||||
let datasource: Datasource | undefined
|
||||
let table: Table
|
||||
|
||||
const snippets = [
|
||||
{
|
||||
name: "WeeksAgo",
|
||||
code: `return function (weeks) {\n const currentTime = new Date(${Date.now()});\n currentTime.setDate(currentTime.getDate()-(7 * (weeks || 1)));\n return currentTime.toISOString();\n}`,
|
||||
},
|
||||
]
|
||||
let rows: Row[]
|
||||
|
||||
beforeAll(async () => {
|
||||
if (isSqs) {
|
||||
|
@ -55,7 +52,12 @@ describe.each([
|
|||
|
||||
if (config.app?.appId) {
|
||||
config.app = await config.api.application.update(config.app?.appId, {
|
||||
snippets,
|
||||
snippets: [
|
||||
{
|
||||
name: "WeeksAgo",
|
||||
code: `return function (weeks) {\n const currentTime = new Date(${Date.now()});\n currentTime.setDate(currentTime.getDate()-(7 * (weeks || 1)));\n return currentTime.toISOString();\n}`,
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -79,14 +81,30 @@ describe.each([
|
|||
)
|
||||
}
|
||||
|
||||
async function createRows(rows: Record<string, any>[]) {
|
||||
async function createRows(arr: Record<string, any>[]) {
|
||||
// Shuffling to avoid false positives given a fixed order
|
||||
await config.api.row.bulkImport(table._id!, { rows: _.shuffle(rows) })
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: _.shuffle(arr),
|
||||
})
|
||||
rows = await config.api.row.fetch(table._id!)
|
||||
}
|
||||
|
||||
class SearchAssertion {
|
||||
constructor(private readonly query: RowSearchParams) {}
|
||||
|
||||
private async performSearch(): Promise<Row[]> {
|
||||
if (isInMemory) {
|
||||
return dataFilters.search(_.cloneDeep(rows), this.query)
|
||||
} else {
|
||||
return (
|
||||
await config.api.row.search(table._id!, {
|
||||
...this.query,
|
||||
tableId: table._id!,
|
||||
})
|
||||
).rows
|
||||
}
|
||||
}
|
||||
|
||||
// We originally used _.isMatch to compare rows, but found that when
|
||||
// comparing arrays it would return true if the source array was a subset of
|
||||
// the target array. This would sometimes create false matches. This
|
||||
|
@ -157,10 +175,7 @@ describe.each([
|
|||
// different to the one passed in will cause the assertion to fail. Extra
|
||||
// rows returned by the query will also cause the assertion to fail.
|
||||
async toMatchExactly(expectedRows: any[]) {
|
||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
||||
...this.query,
|
||||
tableId: table._id!,
|
||||
})
|
||||
const foundRows = await this.performSearch()
|
||||
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(foundRows).toHaveLength(expectedRows.length)
|
||||
|
@ -176,10 +191,7 @@ describe.each([
|
|||
// passed in. The order of the rows is not important, but extra rows will
|
||||
// cause the assertion to fail.
|
||||
async toContainExactly(expectedRows: any[]) {
|
||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
||||
...this.query,
|
||||
tableId: table._id!,
|
||||
})
|
||||
const foundRows = await this.performSearch()
|
||||
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(foundRows).toHaveLength(expectedRows.length)
|
||||
|
@ -197,10 +209,7 @@ describe.each([
|
|||
// The order of the rows is not important. Extra rows will not cause the
|
||||
// assertion to fail.
|
||||
async toContain(expectedRows: any[]) {
|
||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
||||
...this.query,
|
||||
tableId: table._id!,
|
||||
})
|
||||
const foundRows = await this.performSearch()
|
||||
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect([...foundRows]).toEqual(
|
||||
|
@ -217,10 +226,7 @@ describe.each([
|
|||
}
|
||||
|
||||
async toHaveLength(length: number) {
|
||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
||||
...this.query,
|
||||
tableId: table._id!,
|
||||
})
|
||||
const foundRows = await this.performSearch()
|
||||
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(foundRows).toHaveLength(length)
|
||||
|
@ -296,351 +302,359 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
// Ensure all bindings resolve and perform as expected
|
||||
describe("bindings", () => {
|
||||
let globalUsers: any = []
|
||||
// We've decided not to try and support binding for in-memory search just now.
|
||||
!isInMemory &&
|
||||
describe("bindings", () => {
|
||||
let globalUsers: any = []
|
||||
|
||||
const serverTime = new Date()
|
||||
const serverTime = new Date()
|
||||
|
||||
// In MariaDB and MySQL we only store dates to second precision, so we need
|
||||
// to remove milliseconds from the server time to ensure searches work as
|
||||
// expected.
|
||||
serverTime.setMilliseconds(0)
|
||||
// In MariaDB and MySQL we only store dates to second precision, so we need
|
||||
// to remove milliseconds from the server time to ensure searches work as
|
||||
// expected.
|
||||
serverTime.setMilliseconds(0)
|
||||
|
||||
const future = new Date(serverTime.getTime() + 1000 * 60 * 60 * 24 * 30)
|
||||
const future = new Date(serverTime.getTime() + 1000 * 60 * 60 * 24 * 30)
|
||||
|
||||
const rows = (currentUser: User) => {
|
||||
return [
|
||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||
{ name: currentUser.firstName, appointment: future.toISOString() },
|
||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||
{
|
||||
name: "single user, session user",
|
||||
single_user: JSON.stringify(currentUser),
|
||||
},
|
||||
{
|
||||
name: "single user",
|
||||
single_user: JSON.stringify(globalUsers[0]),
|
||||
},
|
||||
{
|
||||
name: "deprecated single user, session user",
|
||||
deprecated_single_user: JSON.stringify([currentUser]),
|
||||
},
|
||||
{
|
||||
name: "deprecated single user",
|
||||
deprecated_single_user: JSON.stringify([globalUsers[0]]),
|
||||
},
|
||||
{
|
||||
name: "multi user",
|
||||
multi_user: JSON.stringify(globalUsers),
|
||||
},
|
||||
{
|
||||
name: "multi user with session user",
|
||||
multi_user: JSON.stringify([...globalUsers, currentUser]),
|
||||
},
|
||||
{
|
||||
name: "deprecated multi user",
|
||||
deprecated_multi_user: JSON.stringify(globalUsers),
|
||||
},
|
||||
{
|
||||
name: "deprecated multi user with session user",
|
||||
deprecated_multi_user: JSON.stringify([...globalUsers, currentUser]),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
// Set up some global users
|
||||
globalUsers = await Promise.all(
|
||||
Array(2)
|
||||
.fill(0)
|
||||
.map(async () => {
|
||||
const globalUser = await config.globalUser()
|
||||
const userMedataId = globalUser._id
|
||||
? dbCore.generateUserMetadataID(globalUser._id)
|
||||
: null
|
||||
return {
|
||||
_id: globalUser._id,
|
||||
_meta: userMedataId,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
table = await createTable({
|
||||
name: { name: "name", type: FieldType.STRING },
|
||||
appointment: { name: "appointment", type: FieldType.DATETIME },
|
||||
single_user: {
|
||||
name: "single_user",
|
||||
type: FieldType.BB_REFERENCE_SINGLE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
deprecated_single_user: {
|
||||
name: "deprecated_single_user",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
multi_user: {
|
||||
name: "multi_user",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
constraints: {
|
||||
type: "array",
|
||||
const rows = (currentUser: User) => {
|
||||
return [
|
||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||
{ name: currentUser.firstName, appointment: future.toISOString() },
|
||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||
{
|
||||
name: "single user, session user",
|
||||
single_user: JSON.stringify(currentUser),
|
||||
},
|
||||
},
|
||||
deprecated_multi_user: {
|
||||
name: "deprecated_multi_user",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USERS,
|
||||
constraints: {
|
||||
type: "array",
|
||||
{
|
||||
name: "single user",
|
||||
single_user: JSON.stringify(globalUsers[0]),
|
||||
},
|
||||
},
|
||||
})
|
||||
await createRows(rows(config.getUser()))
|
||||
})
|
||||
{
|
||||
name: "deprecated single user, session user",
|
||||
deprecated_single_user: JSON.stringify([currentUser]),
|
||||
},
|
||||
{
|
||||
name: "deprecated single user",
|
||||
deprecated_single_user: JSON.stringify([globalUsers[0]]),
|
||||
},
|
||||
{
|
||||
name: "multi user",
|
||||
multi_user: JSON.stringify(globalUsers),
|
||||
},
|
||||
{
|
||||
name: "multi user with session user",
|
||||
multi_user: JSON.stringify([...globalUsers, currentUser]),
|
||||
},
|
||||
{
|
||||
name: "deprecated multi user",
|
||||
deprecated_multi_user: JSON.stringify(globalUsers),
|
||||
},
|
||||
{
|
||||
name: "deprecated multi user with session user",
|
||||
deprecated_multi_user: JSON.stringify([
|
||||
...globalUsers,
|
||||
currentUser,
|
||||
]),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
// !! Current User is auto generated per run
|
||||
it("should return all rows matching the session user firstname", async () => {
|
||||
await expectQuery({
|
||||
equal: { name: "{{ [user].firstName }}" },
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: config.getUser().firstName,
|
||||
appointment: future.toISOString(),
|
||||
},
|
||||
])
|
||||
})
|
||||
beforeAll(async () => {
|
||||
// Set up some global users
|
||||
globalUsers = await Promise.all(
|
||||
Array(2)
|
||||
.fill(0)
|
||||
.map(async () => {
|
||||
const globalUser = await config.globalUser()
|
||||
const userMedataId = globalUser._id
|
||||
? dbCore.generateUserMetadataID(globalUser._id)
|
||||
: null
|
||||
return {
|
||||
_id: globalUser._id,
|
||||
_meta: userMedataId,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
it("should parse the date binding and return all rows after the resolved value", async () => {
|
||||
await tk.withFreeze(serverTime, async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
appointment: {
|
||||
low: "{{ [now] }}",
|
||||
high: "9999-00-00T00:00:00.000Z",
|
||||
table = await createTable({
|
||||
name: { name: "name", type: FieldType.STRING },
|
||||
appointment: { name: "appointment", type: FieldType.DATETIME },
|
||||
single_user: {
|
||||
name: "single_user",
|
||||
type: FieldType.BB_REFERENCE_SINGLE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
deprecated_single_user: {
|
||||
name: "deprecated_single_user",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
},
|
||||
multi_user: {
|
||||
name: "multi_user",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USER,
|
||||
constraints: {
|
||||
type: "array",
|
||||
},
|
||||
},
|
||||
deprecated_multi_user: {
|
||||
name: "deprecated_multi_user",
|
||||
type: FieldType.BB_REFERENCE,
|
||||
subtype: BBReferenceFieldSubType.USERS,
|
||||
constraints: {
|
||||
type: "array",
|
||||
},
|
||||
},
|
||||
})
|
||||
await createRows(rows(config.getUser()))
|
||||
})
|
||||
|
||||
// !! Current User is auto generated per run
|
||||
it("should return all rows matching the session user firstname", async () => {
|
||||
await expectQuery({
|
||||
equal: { name: "{{ [user].firstName }}" },
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: config.getUser().firstName,
|
||||
appointment: future.toISOString(),
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should parse the date binding and return all rows after the resolved value", async () => {
|
||||
await tk.withFreeze(serverTime, async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
appointment: {
|
||||
low: "{{ [now] }}",
|
||||
high: "9999-00-00T00:00:00.000Z",
|
||||
},
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: config.getUser().firstName,
|
||||
appointment: future.toISOString(),
|
||||
},
|
||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
it("should parse the date binding and return all rows before the resolved value", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
appointment: {
|
||||
low: "0000-00-00T00:00:00.000Z",
|
||||
high: "{{ [now] }}",
|
||||
},
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
it("should parse the date binding and return all rows before the resolved value", async () => {
|
||||
await expectQuery({
|
||||
range: {
|
||||
appointment: {
|
||||
low: "0000-00-00T00:00:00.000Z",
|
||||
high: "{{ [now] }}",
|
||||
it("should parse the encoded js snippet. Return rows with appointments up to 1 week in the past", async () => {
|
||||
const jsBinding = "return snippets.WeeksAgo();"
|
||||
const encodedBinding = encodeJSBinding(jsBinding)
|
||||
|
||||
await expectQuery({
|
||||
range: {
|
||||
appointment: {
|
||||
low: "0000-00-00T00:00:00.000Z",
|
||||
high: encodedBinding,
|
||||
},
|
||||
},
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||
])
|
||||
})
|
||||
}).toContainExactly([
|
||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||
])
|
||||
})
|
||||
|
||||
it("should parse the encoded js snippet. Return rows with appointments up to 1 week in the past", async () => {
|
||||
const jsBinding = "return snippets.WeeksAgo();"
|
||||
const encodedBinding = encodeJSBinding(jsBinding)
|
||||
it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => {
|
||||
const jsBinding = `const currentTime = new Date(${Date.now()})\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();`
|
||||
const encodedBinding = encodeJSBinding(jsBinding)
|
||||
|
||||
await expectQuery({
|
||||
range: {
|
||||
appointment: {
|
||||
low: "0000-00-00T00:00:00.000Z",
|
||||
high: encodedBinding,
|
||||
await expectQuery({
|
||||
range: {
|
||||
appointment: {
|
||||
low: "0000-00-00T00:00:00.000Z",
|
||||
high: encodedBinding,
|
||||
},
|
||||
},
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||
])
|
||||
})
|
||||
}).toContainExactly([
|
||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||
])
|
||||
})
|
||||
|
||||
it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => {
|
||||
const jsBinding = `const currentTime = new Date(${Date.now()})\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();`
|
||||
const encodedBinding = encodeJSBinding(jsBinding)
|
||||
|
||||
await expectQuery({
|
||||
range: {
|
||||
appointment: {
|
||||
low: "0000-00-00T00:00:00.000Z",
|
||||
high: encodedBinding,
|
||||
it("should match a single user row by the session user id", async () => {
|
||||
await expectQuery({
|
||||
equal: { single_user: "{{ [user]._id }}" },
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "single user, session user",
|
||||
single_user: { _id: config.getUser()._id },
|
||||
},
|
||||
},
|
||||
}).toContainExactly([
|
||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||
])
|
||||
})
|
||||
])
|
||||
})
|
||||
|
||||
it("should match a single user row by the session user id", async () => {
|
||||
await expectQuery({
|
||||
equal: { single_user: "{{ [user]._id }}" },
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "single user, session user",
|
||||
single_user: { _id: config.getUser()._id },
|
||||
},
|
||||
])
|
||||
})
|
||||
it("should match a deprecated single user row by the session user id", async () => {
|
||||
await expectQuery({
|
||||
equal: { deprecated_single_user: "{{ [user]._id }}" },
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "deprecated single user, session user",
|
||||
deprecated_single_user: [{ _id: config.getUser()._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should match a deprecated single user row by the session user id", async () => {
|
||||
await expectQuery({
|
||||
equal: { deprecated_single_user: "{{ [user]._id }}" },
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "deprecated single user, session user",
|
||||
deprecated_single_user: [{ _id: config.getUser()._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
// TODO(samwho): fix for SQS
|
||||
!isSqs &&
|
||||
it("should match the session user id in a multi user field", async () => {
|
||||
const allUsers = [...globalUsers, config.getUser()].map(
|
||||
(user: any) => {
|
||||
return { _id: user._id }
|
||||
}
|
||||
)
|
||||
|
||||
// TODO(samwho): fix for SQS
|
||||
!isSqs &&
|
||||
it("should match the session user id in a multi user field", async () => {
|
||||
const allUsers = [...globalUsers, config.getUser()].map((user: any) => {
|
||||
return { _id: user._id }
|
||||
await expectQuery({
|
||||
contains: { multi_user: ["{{ [user]._id }}"] },
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "multi user with session user",
|
||||
multi_user: allUsers,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
await expectQuery({
|
||||
contains: { multi_user: ["{{ [user]._id }}"] },
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "multi user with session user",
|
||||
multi_user: allUsers,
|
||||
},
|
||||
])
|
||||
})
|
||||
// TODO(samwho): fix for SQS
|
||||
!isSqs &&
|
||||
it("should match the session user id in a deprecated multi user field", async () => {
|
||||
const allUsers = [...globalUsers, config.getUser()].map(
|
||||
(user: any) => {
|
||||
return { _id: user._id }
|
||||
}
|
||||
)
|
||||
|
||||
// TODO(samwho): fix for SQS
|
||||
!isSqs &&
|
||||
it("should match the session user id in a deprecated multi user field", async () => {
|
||||
const allUsers = [...globalUsers, config.getUser()].map((user: any) => {
|
||||
return { _id: user._id }
|
||||
await expectQuery({
|
||||
contains: { deprecated_multi_user: ["{{ [user]._id }}"] },
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "deprecated multi user with session user",
|
||||
deprecated_multi_user: allUsers,
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
// TODO(samwho): fix for SQS
|
||||
!isSqs &&
|
||||
it("should not match the session user id in a multi user field", async () => {
|
||||
await expectQuery({
|
||||
notContains: { multi_user: ["{{ [user]._id }}"] },
|
||||
notEmpty: { multi_user: true },
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "multi user",
|
||||
multi_user: globalUsers.map((user: any) => {
|
||||
return { _id: user._id }
|
||||
}),
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
// TODO(samwho): fix for SQS
|
||||
!isSqs &&
|
||||
it("should not match the session user id in a deprecated multi user field", async () => {
|
||||
await expectQuery({
|
||||
notContains: { deprecated_multi_user: ["{{ [user]._id }}"] },
|
||||
notEmpty: { deprecated_multi_user: true },
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "deprecated multi user",
|
||||
deprecated_multi_user: globalUsers.map((user: any) => {
|
||||
return { _id: user._id }
|
||||
}),
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should match the session user id and a user table row id using helpers, user binding and a static user id.", async () => {
|
||||
await expectQuery({
|
||||
contains: { deprecated_multi_user: ["{{ [user]._id }}"] },
|
||||
oneOf: {
|
||||
single_user: [
|
||||
"{{ default [user]._id '_empty_' }}",
|
||||
globalUsers[0]._id,
|
||||
],
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "deprecated multi user with session user",
|
||||
deprecated_multi_user: allUsers,
|
||||
name: "single user, session user",
|
||||
single_user: { _id: config.getUser()._id },
|
||||
},
|
||||
{
|
||||
name: "single user",
|
||||
single_user: { _id: globalUsers[0]._id },
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
// TODO(samwho): fix for SQS
|
||||
!isSqs &&
|
||||
it("should not match the session user id in a multi user field", async () => {
|
||||
it("should match the session user id and a user table row id using helpers, user binding and a static user id. (deprecated single user)", async () => {
|
||||
await expectQuery({
|
||||
notContains: { multi_user: ["{{ [user]._id }}"] },
|
||||
notEmpty: { multi_user: true },
|
||||
oneOf: {
|
||||
deprecated_single_user: [
|
||||
"{{ default [user]._id '_empty_' }}",
|
||||
globalUsers[0]._id,
|
||||
],
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "multi user",
|
||||
multi_user: globalUsers.map((user: any) => {
|
||||
return { _id: user._id }
|
||||
}),
|
||||
name: "deprecated single user, session user",
|
||||
deprecated_single_user: [{ _id: config.getUser()._id }],
|
||||
},
|
||||
{
|
||||
name: "deprecated single user",
|
||||
deprecated_single_user: [{ _id: globalUsers[0]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
// TODO(samwho): fix for SQS
|
||||
!isSqs &&
|
||||
it("should not match the session user id in a deprecated multi user field", async () => {
|
||||
it("should resolve 'default' helper to '_empty_' when binding resolves to nothing", async () => {
|
||||
await expectQuery({
|
||||
notContains: { deprecated_multi_user: ["{{ [user]._id }}"] },
|
||||
notEmpty: { deprecated_multi_user: true },
|
||||
oneOf: {
|
||||
single_user: [
|
||||
"{{ default [user]._idx '_empty_' }}",
|
||||
globalUsers[0]._id,
|
||||
],
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "deprecated multi user",
|
||||
deprecated_multi_user: globalUsers.map((user: any) => {
|
||||
return { _id: user._id }
|
||||
}),
|
||||
name: "single user",
|
||||
single_user: { _id: globalUsers[0]._id },
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should match the session user id and a user table row id using helpers, user binding and a static user id.", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
single_user: [
|
||||
"{{ default [user]._id '_empty_' }}",
|
||||
globalUsers[0]._id,
|
||||
],
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "single user, session user",
|
||||
single_user: { _id: config.getUser()._id },
|
||||
},
|
||||
{
|
||||
name: "single user",
|
||||
single_user: { _id: globalUsers[0]._id },
|
||||
},
|
||||
])
|
||||
it("should resolve 'default' helper to '_empty_' when binding resolves to nothing (deprecated single user)", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
deprecated_single_user: [
|
||||
"{{ default [user]._idx '_empty_' }}",
|
||||
globalUsers[0]._id,
|
||||
],
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "deprecated single user",
|
||||
deprecated_single_user: [{ _id: globalUsers[0]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
it("should match the session user id and a user table row id using helpers, user binding and a static user id. (deprecated single user)", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
deprecated_single_user: [
|
||||
"{{ default [user]._id '_empty_' }}",
|
||||
globalUsers[0]._id,
|
||||
],
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "deprecated single user, session user",
|
||||
deprecated_single_user: [{ _id: config.getUser()._id }],
|
||||
},
|
||||
{
|
||||
name: "deprecated single user",
|
||||
deprecated_single_user: [{ _id: globalUsers[0]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should resolve 'default' helper to '_empty_' when binding resolves to nothing", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
single_user: [
|
||||
"{{ default [user]._idx '_empty_' }}",
|
||||
globalUsers[0]._id,
|
||||
],
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "single user",
|
||||
single_user: { _id: globalUsers[0]._id },
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should resolve 'default' helper to '_empty_' when binding resolves to nothing (deprecated single user)", async () => {
|
||||
await expectQuery({
|
||||
oneOf: {
|
||||
deprecated_single_user: [
|
||||
"{{ default [user]._idx '_empty_' }}",
|
||||
globalUsers[0]._id,
|
||||
],
|
||||
},
|
||||
}).toContainExactly([
|
||||
{
|
||||
name: "deprecated single user",
|
||||
deprecated_single_user: [{ _id: globalUsers[0]._id }],
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe.each([FieldType.STRING, FieldType.LONGFORM])("%s", () => {
|
||||
beforeAll(async () => {
|
||||
table = await createTable({
|
||||
|
@ -1062,13 +1076,13 @@ describe.each([
|
|||
|
||||
!isInternal &&
|
||||
describe("datetime - time only", () => {
|
||||
const T_1000 = "10:00"
|
||||
const T_1045 = "10:45"
|
||||
const T_1200 = "12:00"
|
||||
const T_1530 = "15:30"
|
||||
const T_0000 = "00:00"
|
||||
const T_1000 = "10:00:00"
|
||||
const T_1045 = "10:45:00"
|
||||
const T_1200 = "12:00:00"
|
||||
const T_1530 = "15:30:00"
|
||||
const T_0000 = "00:00:00"
|
||||
|
||||
const UNEXISTING_TIME = "10:01"
|
||||
const UNEXISTING_TIME = "10:01:00"
|
||||
|
||||
const NULL_TIME__ID = `null_time__id`
|
||||
|
||||
|
@ -1262,6 +1276,8 @@ describe.each([
|
|||
{ numbers: ["three"] },
|
||||
]))
|
||||
|
||||
// Not sure if this is correct behaviour but changing it would be a
|
||||
// breaking change.
|
||||
it("finds all with empty list", () =>
|
||||
expectQuery({ notContains: { numbers: [] } }).toContainExactly([
|
||||
{ numbers: ["one", "two"] },
|
||||
|
@ -1746,9 +1762,12 @@ describe.each([
|
|||
|
||||
// This will never work for Lucene.
|
||||
!isLucene &&
|
||||
// It also can't work for in-memory searching because the related table name
|
||||
// isn't available.
|
||||
!isInMemory &&
|
||||
describe("relations", () => {
|
||||
let otherTable: Table
|
||||
let rows: Row[]
|
||||
let otherRows: Row[]
|
||||
|
||||
beforeAll(async () => {
|
||||
otherTable = await createTable({
|
||||
|
@ -1768,7 +1787,7 @@ describe.each([
|
|||
},
|
||||
})
|
||||
|
||||
rows = await Promise.all([
|
||||
otherRows = await Promise.all([
|
||||
config.api.row.save(otherTable._id!, { one: "foo" }),
|
||||
config.api.row.save(otherTable._id!, { one: "bar" }),
|
||||
])
|
||||
|
@ -1776,18 +1795,22 @@ describe.each([
|
|||
await Promise.all([
|
||||
config.api.row.save(table._id!, {
|
||||
two: "foo",
|
||||
other: [rows[0]._id],
|
||||
other: [otherRows[0]._id],
|
||||
}),
|
||||
config.api.row.save(table._id!, {
|
||||
two: "bar",
|
||||
other: [rows[1]._id],
|
||||
other: [otherRows[1]._id],
|
||||
}),
|
||||
])
|
||||
|
||||
rows = await config.api.row.fetch(table._id!)
|
||||
})
|
||||
|
||||
it("can search through relations", () =>
|
||||
expectQuery({
|
||||
equal: { [`${otherTable.name}.one`]: "foo" },
|
||||
}).toContainExactly([{ two: "foo", other: [{ _id: rows[0]._id }] }]))
|
||||
}).toContainExactly([
|
||||
{ two: "foo", other: [{ _id: otherRows[0]._id }] },
|
||||
]))
|
||||
})
|
||||
})
|
||||
|
|
|
@ -14,14 +14,10 @@ import {
|
|||
EmptyFilterOption,
|
||||
SearchFilters,
|
||||
Table,
|
||||
SortOrder,
|
||||
} from "@budibase/types"
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
|
||||
enum SortOrder {
|
||||
ASCENDING = "ascending",
|
||||
DESCENDING = "descending",
|
||||
}
|
||||
|
||||
const SortOrderPretty = {
|
||||
[SortOrder.ASCENDING]: "Ascending",
|
||||
[SortOrder.DESCENDING]: "Descending",
|
||||
|
|
|
@ -70,11 +70,6 @@ export enum DatasourceAuthTypes {
|
|||
GOOGLE = "google",
|
||||
}
|
||||
|
||||
export enum SortDirection {
|
||||
ASCENDING = "ASCENDING",
|
||||
DESCENDING = "DESCENDING",
|
||||
}
|
||||
|
||||
export const USERS_TABLE_SCHEMA: Table = {
|
||||
_id: "ta_users",
|
||||
type: "table",
|
||||
|
|
|
@ -566,7 +566,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
query.filters.equal[`_${GOOGLE_SHEETS_PRIMARY_KEY}`] = id
|
||||
}
|
||||
}
|
||||
let filtered = dataFilters.runQuery(rows, query.filters)
|
||||
let filtered = dataFilters.runQuery(rows, query.filters || {})
|
||||
if (hasFilters && query.paginate) {
|
||||
filtered = filtered.slice(offset, offset + limit)
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
"filters": {},
|
||||
"sort": {
|
||||
"firstname": {
|
||||
"direction": "ASCENDING"
|
||||
"direction": "ascending"
|
||||
}
|
||||
},
|
||||
"paginate": {
|
||||
|
@ -65,9 +65,7 @@
|
|||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons",
|
||||
"primary": [
|
||||
"personid"
|
||||
],
|
||||
"primary": ["personid"],
|
||||
"name": "persons",
|
||||
"schema": {
|
||||
"year": {
|
||||
|
@ -122,12 +120,7 @@
|
|||
"name": "type",
|
||||
"constraints": {
|
||||
"presence": false,
|
||||
"inclusion": [
|
||||
"support",
|
||||
"designer",
|
||||
"programmer",
|
||||
"qa"
|
||||
]
|
||||
"inclusion": ["support", "designer", "programmer", "qa"]
|
||||
}
|
||||
},
|
||||
"city": {
|
||||
|
@ -180,4 +173,4 @@
|
|||
"persons": "a",
|
||||
"tasks": "b"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
},
|
||||
"sort": {
|
||||
"productname": {
|
||||
"direction": "ASCENDING"
|
||||
"direction": "ascending"
|
||||
}
|
||||
},
|
||||
"paginate": {
|
||||
|
@ -60,9 +60,7 @@
|
|||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products",
|
||||
"primary": [
|
||||
"productid"
|
||||
],
|
||||
"primary": ["productid"],
|
||||
"name": "products",
|
||||
"schema": {
|
||||
"productname": {
|
||||
|
@ -106,4 +104,4 @@
|
|||
"tasks": "b",
|
||||
"products_tasks": "c"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
},
|
||||
"sort": {
|
||||
"productname": {
|
||||
"direction": "ASCENDING"
|
||||
"direction": "ascending"
|
||||
}
|
||||
},
|
||||
"paginate": {
|
||||
|
@ -50,9 +50,7 @@
|
|||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products",
|
||||
"primary": [
|
||||
"productid"
|
||||
],
|
||||
"primary": ["productid"],
|
||||
"name": "products",
|
||||
"schema": {
|
||||
"productname": {
|
||||
|
@ -91,4 +89,4 @@
|
|||
"primaryDisplay": "productname"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@
|
|||
},
|
||||
"sort": {
|
||||
"taskname": {
|
||||
"direction": "ASCENDING"
|
||||
"direction": "ascending"
|
||||
}
|
||||
},
|
||||
"paginate": {
|
||||
|
@ -106,9 +106,7 @@
|
|||
"table": {
|
||||
"type": "table",
|
||||
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks",
|
||||
"primary": [
|
||||
"taskid"
|
||||
],
|
||||
"primary": ["taskid"],
|
||||
"name": "tasks",
|
||||
"schema": {
|
||||
"executorid": {
|
||||
|
@ -199,4 +197,4 @@
|
|||
"persons": "c",
|
||||
"products_tasks": "d"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import {
|
|||
RowSearchParams,
|
||||
SearchFilters,
|
||||
SearchResponse,
|
||||
SortOrder,
|
||||
} from "@budibase/types"
|
||||
import { isExternalTableID } from "../../../integrations/utils"
|
||||
import * as internal from "./search/internal"
|
||||
|
@ -78,6 +79,10 @@ export async function search(
|
|||
}
|
||||
}
|
||||
|
||||
if (options.sortOrder) {
|
||||
options.sortOrder = options.sortOrder.toLowerCase() as SortOrder
|
||||
}
|
||||
|
||||
const table = await sdk.tables.getTable(options.tableId)
|
||||
options = searchInputMapping(table, options)
|
||||
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import {
|
||||
SortJson,
|
||||
SortDirection,
|
||||
Operation,
|
||||
PaginationJson,
|
||||
IncludeRelationship,
|
||||
|
@ -9,6 +8,7 @@ import {
|
|||
RowSearchParams,
|
||||
SearchResponse,
|
||||
Table,
|
||||
SortOrder,
|
||||
} from "@budibase/types"
|
||||
import * as exporters from "../../../../api/controllers/view/exporters"
|
||||
import { handleRequest } from "../../../../api/controllers/row/external"
|
||||
|
@ -52,8 +52,8 @@ export async function search(
|
|||
if (params.sort) {
|
||||
const direction =
|
||||
params.sortOrder === "descending"
|
||||
? SortDirection.DESCENDING
|
||||
: SortDirection.ASCENDING
|
||||
? SortOrder.DESCENDING
|
||||
: SortOrder.ASCENDING
|
||||
sort = {
|
||||
[params.sort]: { direction },
|
||||
}
|
||||
|
|
|
@ -8,7 +8,6 @@ import {
|
|||
RowSearchParams,
|
||||
SearchFilters,
|
||||
SearchResponse,
|
||||
SortDirection,
|
||||
SortOrder,
|
||||
SortType,
|
||||
SqlClient,
|
||||
|
@ -170,8 +169,8 @@ export async function search(
|
|||
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
|
||||
const sortDirection =
|
||||
params.sortOrder === SortOrder.ASCENDING
|
||||
? SortDirection.ASCENDING
|
||||
: SortDirection.DESCENDING
|
||||
? SortOrder.ASCENDING
|
||||
: SortOrder.DESCENDING
|
||||
request.sort = {
|
||||
[sortField.name]: {
|
||||
direction: sortDirection,
|
||||
|
|
|
@ -7,13 +7,16 @@ import {
|
|||
SearchFilters,
|
||||
SearchQueryFields,
|
||||
SearchFilterOperator,
|
||||
SortDirection,
|
||||
SortType,
|
||||
FieldConstraints,
|
||||
SortOrder,
|
||||
RowSearchParams,
|
||||
EmptyFilterOption,
|
||||
} from "@budibase/types"
|
||||
import dayjs from "dayjs"
|
||||
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
|
||||
import { deepGet, schema } from "./helpers"
|
||||
import _ from "lodash"
|
||||
|
||||
const HBS_REGEX = /{{([^{].*?)}}/g
|
||||
|
||||
|
@ -259,12 +262,23 @@ export const buildQuery = (filter: SearchFilter[]) => {
|
|||
return query
|
||||
}
|
||||
|
||||
export const search = (docs: Record<string, any>[], query: RowSearchParams) => {
|
||||
let result = runQuery(docs, query.query)
|
||||
if (query.sort) {
|
||||
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
|
||||
}
|
||||
if (query.limit) {
|
||||
result = limit(result, query.limit.toString())
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a client-side search on an array of data
|
||||
* @param docs the data
|
||||
* @param query the JSON query
|
||||
*/
|
||||
export const runQuery = (docs: any[], query?: SearchFilters) => {
|
||||
export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
|
||||
if (!docs || !Array.isArray(docs)) {
|
||||
return []
|
||||
}
|
||||
|
@ -272,105 +286,170 @@ export const runQuery = (docs: any[], query?: SearchFilters) => {
|
|||
return docs
|
||||
}
|
||||
|
||||
// Make query consistent first
|
||||
query = cleanupQuery(query)
|
||||
|
||||
// Iterates over a set of filters and evaluates a fail function against a doc
|
||||
if (
|
||||
!hasFilters(query) &&
|
||||
query.onEmptyFilter === EmptyFilterOption.RETURN_NONE
|
||||
) {
|
||||
return []
|
||||
}
|
||||
|
||||
const match =
|
||||
(
|
||||
type: SearchFilterOperator,
|
||||
failFn: (docValue: any, testValue: any) => boolean
|
||||
test: (docValue: any, testValue: any) => boolean
|
||||
) =>
|
||||
(doc: any) => {
|
||||
const filters = Object.entries(query![type] || {})
|
||||
for (let i = 0; i < filters.length; i++) {
|
||||
const [key, testValue] = filters[i]
|
||||
const docValue = deepGet(doc, removeKeyNumbering(key))
|
||||
if (failFn(docValue, testValue)) {
|
||||
(doc: Record<string, any>) => {
|
||||
for (const [key, testValue] of Object.entries(query[type] || {})) {
|
||||
const result = test(deepGet(doc, removeKeyNumbering(key)), testValue)
|
||||
if (query.allOr && result) {
|
||||
return true
|
||||
} else if (!query.allOr && !result) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Process a string match (fails if the value does not start with the string)
|
||||
const stringMatch = match(
|
||||
SearchFilterOperator.STRING,
|
||||
(docValue: string, testValue: string) => {
|
||||
return (
|
||||
!docValue ||
|
||||
!docValue?.toLowerCase().startsWith(testValue?.toLowerCase())
|
||||
)
|
||||
(docValue: any, testValue: any) => {
|
||||
if (!(typeof docValue === "string")) {
|
||||
return false
|
||||
}
|
||||
if (!(typeof testValue === "string")) {
|
||||
return false
|
||||
}
|
||||
return docValue.toLowerCase().startsWith(testValue.toLowerCase())
|
||||
}
|
||||
)
|
||||
|
||||
// Process a fuzzy match (treat the same as starts with when running locally)
|
||||
const fuzzyMatch = match(
|
||||
SearchFilterOperator.FUZZY,
|
||||
(docValue: string, testValue: string) => {
|
||||
return (
|
||||
!docValue ||
|
||||
!docValue?.toLowerCase().startsWith(testValue?.toLowerCase())
|
||||
)
|
||||
(docValue: any, testValue: any) => {
|
||||
if (!(typeof docValue === "string")) {
|
||||
return false
|
||||
}
|
||||
if (!(typeof testValue === "string")) {
|
||||
return false
|
||||
}
|
||||
return docValue.toLowerCase().includes(testValue.toLowerCase())
|
||||
}
|
||||
)
|
||||
|
||||
// Process a range match
|
||||
const rangeMatch = match(
|
||||
SearchFilterOperator.RANGE,
|
||||
(
|
||||
docValue: string | number | null,
|
||||
testValue: { low: number; high: number }
|
||||
) => {
|
||||
(docValue: any, testValue: any) => {
|
||||
if (docValue == null || docValue === "") {
|
||||
return true
|
||||
return false
|
||||
}
|
||||
if (!isNaN(+docValue)) {
|
||||
return +docValue < testValue.low || +docValue > testValue.high
|
||||
|
||||
if (_.isObject(testValue.low) && _.isEmpty(testValue.low)) {
|
||||
testValue.low = undefined
|
||||
}
|
||||
if (dayjs(docValue).isValid()) {
|
||||
return (
|
||||
new Date(docValue).getTime() < new Date(testValue.low).getTime() ||
|
||||
new Date(docValue).getTime() > new Date(testValue.high).getTime()
|
||||
)
|
||||
|
||||
if (_.isObject(testValue.high) && _.isEmpty(testValue.high)) {
|
||||
testValue.high = undefined
|
||||
}
|
||||
|
||||
if (testValue.low == null && testValue.high == null) {
|
||||
return false
|
||||
}
|
||||
|
||||
const docNum = +docValue
|
||||
if (!isNaN(docNum)) {
|
||||
const lowNum = +testValue.low
|
||||
const highNum = +testValue.high
|
||||
if (!isNaN(lowNum) && !isNaN(highNum)) {
|
||||
return docNum >= lowNum && docNum <= highNum
|
||||
} else if (!isNaN(lowNum)) {
|
||||
return docNum >= lowNum
|
||||
} else if (!isNaN(highNum)) {
|
||||
return docNum <= highNum
|
||||
}
|
||||
}
|
||||
|
||||
const docDate = dayjs(docValue)
|
||||
if (docDate.isValid()) {
|
||||
const lowDate = dayjs(testValue.low || "0000-00-00T00:00:00.000Z")
|
||||
const highDate = dayjs(testValue.high || "9999-00-00T00:00:00.000Z")
|
||||
if (lowDate.isValid() && highDate.isValid()) {
|
||||
return (
|
||||
(docDate.isAfter(lowDate) && docDate.isBefore(highDate)) ||
|
||||
docDate.isSame(lowDate) ||
|
||||
docDate.isSame(highDate)
|
||||
)
|
||||
} else if (lowDate.isValid()) {
|
||||
return docDate.isAfter(lowDate) || docDate.isSame(lowDate)
|
||||
} else if (highDate.isValid()) {
|
||||
return docDate.isBefore(highDate) || docDate.isSame(highDate)
|
||||
}
|
||||
}
|
||||
|
||||
if (testValue.low != null && testValue.high != null) {
|
||||
return docValue >= testValue.low && docValue <= testValue.high
|
||||
} else if (testValue.low != null) {
|
||||
return docValue >= testValue.low
|
||||
} else if (testValue.high != null) {
|
||||
return docValue <= testValue.high
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
)
|
||||
|
||||
// Process an equal match (fails if the value is different)
|
||||
const equalMatch = match(
|
||||
SearchFilterOperator.EQUAL,
|
||||
(docValue: any, testValue: string | null) => {
|
||||
return testValue != null && testValue !== "" && docValue !== testValue
|
||||
// This function exists to check that either the docValue is equal to the
|
||||
// testValue, or if the docValue is an object or array of objects, that the
|
||||
// _id of the docValue is equal to the testValue.
|
||||
const _valueMatches = (docValue: any, testValue: any) => {
|
||||
if (Array.isArray(docValue)) {
|
||||
for (const item of docValue) {
|
||||
if (_valueMatches(item, testValue)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
)
|
||||
|
||||
// Process a not-equal match (fails if the value is the same)
|
||||
if (
|
||||
docValue &&
|
||||
typeof docValue === "object" &&
|
||||
typeof testValue === "string"
|
||||
) {
|
||||
return docValue._id === testValue
|
||||
}
|
||||
|
||||
return docValue === testValue
|
||||
}
|
||||
|
||||
const not =
|
||||
<T extends any[]>(f: (...args: T) => boolean) =>
|
||||
(...args: T): boolean =>
|
||||
!f(...args)
|
||||
|
||||
const equalMatch = match(SearchFilterOperator.EQUAL, _valueMatches)
|
||||
const notEqualMatch = match(
|
||||
SearchFilterOperator.NOT_EQUAL,
|
||||
(docValue: any, testValue: string | null) => {
|
||||
return testValue != null && testValue !== "" && docValue === testValue
|
||||
}
|
||||
not(_valueMatches)
|
||||
)
|
||||
|
||||
// Process an empty match (fails if the value is not empty)
|
||||
const emptyMatch = match(
|
||||
SearchFilterOperator.EMPTY,
|
||||
(docValue: string | null) => {
|
||||
return docValue != null && docValue !== ""
|
||||
const _empty = (docValue: any) => {
|
||||
if (typeof docValue === "string") {
|
||||
return docValue === ""
|
||||
}
|
||||
)
|
||||
|
||||
// Process a not-empty match (fails is the value is empty)
|
||||
const notEmptyMatch = match(
|
||||
SearchFilterOperator.NOT_EMPTY,
|
||||
(docValue: string | null) => {
|
||||
return docValue == null || docValue === ""
|
||||
if (Array.isArray(docValue)) {
|
||||
return docValue.length === 0
|
||||
}
|
||||
)
|
||||
if (typeof docValue === "object") {
|
||||
return Object.keys(docValue).length === 0
|
||||
}
|
||||
return docValue == null
|
||||
}
|
||||
|
||||
const emptyMatch = match(SearchFilterOperator.EMPTY, _empty)
|
||||
const notEmptyMatch = match(SearchFilterOperator.NOT_EMPTY, not(_empty))
|
||||
|
||||
// Process an includes match (fails if the value is not included)
|
||||
const oneOf = match(
|
||||
SearchFilterOperator.ONE_OF,
|
||||
(docValue: any, testValue: any) => {
|
||||
|
@ -380,61 +459,92 @@ export const runQuery = (docs: any[], query?: SearchFilters) => {
|
|||
testValue = testValue.map((item: string) => parseFloat(item))
|
||||
}
|
||||
}
|
||||
return !testValue?.includes(docValue)
|
||||
|
||||
if (!Array.isArray(testValue)) {
|
||||
return false
|
||||
}
|
||||
|
||||
return testValue.some(item => _valueMatches(docValue, item))
|
||||
}
|
||||
)
|
||||
|
||||
const containsAny = match(
|
||||
SearchFilterOperator.CONTAINS_ANY,
|
||||
(docValue: any, testValue: any) => {
|
||||
return !docValue?.includes(...testValue)
|
||||
const _contains =
|
||||
(f: "some" | "every") => (docValue: any, testValue: any) => {
|
||||
if (!Array.isArray(docValue)) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (typeof testValue === "string") {
|
||||
testValue = testValue.split(",")
|
||||
if (typeof docValue[0] === "number") {
|
||||
testValue = testValue.map((item: string) => parseFloat(item))
|
||||
}
|
||||
}
|
||||
|
||||
if (!Array.isArray(testValue)) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (testValue.length === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
return testValue[f](item => _valueMatches(docValue, item))
|
||||
}
|
||||
)
|
||||
|
||||
const contains = match(
|
||||
SearchFilterOperator.CONTAINS,
|
||||
(docValue: string | any[], testValue: any[]) => {
|
||||
return !testValue?.every((item: any) => docValue?.includes(item))
|
||||
(docValue: any, testValue: any) => {
|
||||
if (Array.isArray(testValue) && testValue.length === 0) {
|
||||
return true
|
||||
}
|
||||
return _contains("every")(docValue, testValue)
|
||||
}
|
||||
)
|
||||
|
||||
const notContains = match(
|
||||
SearchFilterOperator.NOT_CONTAINS,
|
||||
(docValue: string | any[], testValue: any[]) => {
|
||||
return testValue?.every((item: any) => docValue?.includes(item))
|
||||
(docValue: any, testValue: any) => {
|
||||
// Not sure if this is logically correct, but at the time this code was
|
||||
// written the search endpoint behaved this way and we wanted to make this
|
||||
// local search match its behaviour, so we had to do this.
|
||||
if (Array.isArray(testValue) && testValue.length === 0) {
|
||||
return true
|
||||
}
|
||||
return not(_contains("every"))(docValue, testValue)
|
||||
}
|
||||
)
|
||||
const containsAny = match(
|
||||
SearchFilterOperator.CONTAINS_ANY,
|
||||
_contains("some")
|
||||
)
|
||||
|
||||
const docMatch = (doc: any) => {
|
||||
const filterFunctions: Record<SearchFilterOperator, (doc: any) => boolean> =
|
||||
{
|
||||
string: stringMatch,
|
||||
fuzzy: fuzzyMatch,
|
||||
range: rangeMatch,
|
||||
equal: equalMatch,
|
||||
notEqual: notEqualMatch,
|
||||
empty: emptyMatch,
|
||||
notEmpty: notEmptyMatch,
|
||||
oneOf: oneOf,
|
||||
contains: contains,
|
||||
containsAny: containsAny,
|
||||
notContains: notContains,
|
||||
}
|
||||
const docMatch = (doc: Record<string, any>) => {
|
||||
const filterFunctions = {
|
||||
string: stringMatch,
|
||||
fuzzy: fuzzyMatch,
|
||||
range: rangeMatch,
|
||||
equal: equalMatch,
|
||||
notEqual: notEqualMatch,
|
||||
empty: emptyMatch,
|
||||
notEmpty: notEmptyMatch,
|
||||
oneOf: oneOf,
|
||||
contains: contains,
|
||||
containsAny: containsAny,
|
||||
notContains: notContains,
|
||||
}
|
||||
|
||||
const activeFilterKeys: SearchFilterOperator[] = Object.entries(query || {})
|
||||
const results = Object.entries(query || {})
|
||||
.filter(
|
||||
([key, value]: [string, any]) =>
|
||||
([key, value]) =>
|
||||
!["allOr", "onEmptyFilter"].includes(key) &&
|
||||
value &&
|
||||
Object.keys(value as Record<string, any>).length > 0
|
||||
Object.keys(value).length > 0
|
||||
)
|
||||
.map(([key]) => key as any)
|
||||
.map(([key]) => {
|
||||
return filterFunctions[key as SearchFilterOperator]?.(doc) ?? false
|
||||
})
|
||||
|
||||
const results: boolean[] = activeFilterKeys.map(filterKey => {
|
||||
return filterFunctions[filterKey]?.(doc) ?? false
|
||||
})
|
||||
|
||||
if (query!.allOr) {
|
||||
if (query.allOr) {
|
||||
return results.some(result => result === true)
|
||||
} else {
|
||||
return results.every(result => result === true)
|
||||
|
@ -454,24 +564,35 @@ export const runQuery = (docs: any[], query?: SearchFilters) => {
|
|||
export const sort = (
|
||||
docs: any[],
|
||||
sort: string,
|
||||
sortOrder: SortDirection,
|
||||
sortOrder: SortOrder,
|
||||
sortType = SortType.STRING
|
||||
) => {
|
||||
if (!sort || !sortOrder || !sortType) {
|
||||
return docs
|
||||
}
|
||||
const parse =
|
||||
sortType === "string" ? (x: any) => `${x}` : (x: string) => parseFloat(x)
|
||||
|
||||
const parse = (x: any) => {
|
||||
if (x == null) {
|
||||
return x
|
||||
}
|
||||
if (sortType === "string") {
|
||||
return `${x}`
|
||||
}
|
||||
return parseFloat(x)
|
||||
}
|
||||
|
||||
return docs
|
||||
.slice()
|
||||
.sort((a: { [x: string]: any }, b: { [x: string]: any }) => {
|
||||
const colA = parse(a[sort])
|
||||
const colB = parse(b[sort])
|
||||
|
||||
const result = colB == null || colA > colB ? 1 : -1
|
||||
if (sortOrder.toLowerCase() === "descending") {
|
||||
return colA > colB ? -1 : 1
|
||||
} else {
|
||||
return colA > colB ? 1 : -1
|
||||
return result * -1
|
||||
}
|
||||
|
||||
return result
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -1,415 +0,0 @@
|
|||
import {
|
||||
SearchFilters,
|
||||
SearchFilterOperator,
|
||||
FieldType,
|
||||
SearchFilter,
|
||||
} from "@budibase/types"
|
||||
import { buildQuery, runQuery } from "../filters"
|
||||
|
||||
describe("runQuery", () => {
|
||||
const docs = [
|
||||
{
|
||||
order_id: 1,
|
||||
customer_id: 259,
|
||||
order_status: 4,
|
||||
order_date: "2016-01-01T00:00:00.000Z",
|
||||
required_date: "2016-01-03T00:00:00.000Z",
|
||||
shipped_date: "2016-01-03T00:00:00.000Z",
|
||||
store_id: 1,
|
||||
staff_id: 2,
|
||||
description: "Large box",
|
||||
label: undefined,
|
||||
},
|
||||
{
|
||||
order_id: 2,
|
||||
customer_id: 1212,
|
||||
order_status: 4,
|
||||
order_date: "2016-01-05T00:00:00.000Z",
|
||||
required_date: "2016-01-04T00:00:00.000Z",
|
||||
shipped_date: "2016-01-03T00:00:00.000Z",
|
||||
store_id: 2,
|
||||
staff_id: 6,
|
||||
description: "Small box",
|
||||
label: "FRAGILE",
|
||||
},
|
||||
{
|
||||
order_id: 3,
|
||||
customer_id: 523,
|
||||
order_status: 5,
|
||||
order_date: "2016-01-12T00:00:00.000Z",
|
||||
required_date: "2016-01-05T00:00:00.000Z",
|
||||
shipped_date: "2016-01-03T00:00:00.000Z",
|
||||
store_id: 2,
|
||||
staff_id: 7,
|
||||
description: "Heavy box",
|
||||
label: "HEAVY",
|
||||
},
|
||||
]
|
||||
|
||||
function buildQuery(filters: { [filterKey: string]: any }): SearchFilters {
|
||||
const query: SearchFilters = {
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {},
|
||||
notEqual: {},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
contains: {},
|
||||
notContains: {},
|
||||
oneOf: {},
|
||||
containsAny: {},
|
||||
allOr: false,
|
||||
}
|
||||
|
||||
for (const filterKey in filters) {
|
||||
query[filterKey as SearchFilterOperator] = filters[filterKey]
|
||||
}
|
||||
|
||||
return query
|
||||
}
|
||||
|
||||
it("should return input docs if no search query is provided", () => {
|
||||
expect(runQuery(docs)).toBe(docs)
|
||||
})
|
||||
|
||||
it("should return matching rows for equal filter", () => {
|
||||
const query = buildQuery({
|
||||
equal: { order_status: 4 },
|
||||
})
|
||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
|
||||
})
|
||||
|
||||
it("should return matching row for notEqual filter", () => {
|
||||
const query = buildQuery({
|
||||
notEqual: { order_status: 4 },
|
||||
})
|
||||
|
||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
||||
})
|
||||
|
||||
it("should return starts with matching rows for fuzzy and string filters", () => {
|
||||
expect(
|
||||
runQuery(
|
||||
docs,
|
||||
buildQuery({
|
||||
fuzzy: { description: "sm" },
|
||||
})
|
||||
).map(row => row.description)
|
||||
).toEqual(["Small box"])
|
||||
expect(
|
||||
runQuery(
|
||||
docs,
|
||||
buildQuery({
|
||||
string: { description: "SM" },
|
||||
})
|
||||
).map(row => row.description)
|
||||
).toEqual(["Small box"])
|
||||
})
|
||||
|
||||
it("should return rows within a range filter", () => {
|
||||
const query = buildQuery({
|
||||
range: {
|
||||
customer_id: {
|
||||
low: 500,
|
||||
high: 1000,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
||||
})
|
||||
|
||||
it("should return rows with numeric strings within a range filter", () => {
|
||||
const query = buildQuery({
|
||||
range: {
|
||||
customer_id: {
|
||||
low: "500",
|
||||
high: "1000",
|
||||
},
|
||||
},
|
||||
})
|
||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
||||
})
|
||||
|
||||
it("should return rows with ISO date strings within a range filter", () => {
|
||||
const query = buildQuery({
|
||||
range: {
|
||||
order_date: {
|
||||
low: "2016-01-04T00:00:00.000Z",
|
||||
high: "2016-01-11T00:00:00.000Z",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2])
|
||||
})
|
||||
|
||||
it("should return return all docs if an invalid doc value is passed into a range filter", async () => {
|
||||
const docs = [
|
||||
{
|
||||
order_id: 4,
|
||||
customer_id: 1758,
|
||||
order_status: 5,
|
||||
order_date: "{{ Binding.INVALID }}",
|
||||
required_date: "2017-03-05T00:00:00.000Z",
|
||||
shipped_date: "2017-03-03T00:00:00.000Z",
|
||||
store_id: 2,
|
||||
staff_id: 7,
|
||||
description: undefined,
|
||||
label: "",
|
||||
},
|
||||
]
|
||||
|
||||
const query = buildQuery({
|
||||
range: {
|
||||
order_date: {
|
||||
low: "2016-01-04T00:00:00.000Z",
|
||||
high: "2016-01-11T00:00:00.000Z",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(runQuery(docs, query)).toEqual(docs)
|
||||
})
|
||||
|
||||
it("should return rows with matches on empty filter", () => {
|
||||
const query = buildQuery({
|
||||
empty: {
|
||||
label: null,
|
||||
},
|
||||
})
|
||||
|
||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1])
|
||||
})
|
||||
|
||||
it("should return rows with matches on notEmpty filter", () => {
|
||||
const query = buildQuery({
|
||||
notEmpty: {
|
||||
label: null,
|
||||
},
|
||||
})
|
||||
|
||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2, 3])
|
||||
})
|
||||
|
||||
it.each([[523, 259], "523,259"])(
|
||||
"should return rows with matches on numeric oneOf filter",
|
||||
input => {
|
||||
const query = buildQuery({
|
||||
oneOf: {
|
||||
customer_id: input,
|
||||
},
|
||||
})
|
||||
|
||||
expect(runQuery(docs, query).map(row => row.customer_id)).toEqual([
|
||||
259, 523,
|
||||
])
|
||||
}
|
||||
)
|
||||
|
||||
it.each([
|
||||
[false, []],
|
||||
[true, [1, 2, 3]],
|
||||
])("should return %s if allOr is %s ", (allOr, expectedResult) => {
|
||||
const query = buildQuery({
|
||||
allOr,
|
||||
oneOf: { staff_id: [10] },
|
||||
contains: { description: ["box"] },
|
||||
})
|
||||
|
||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual(
|
||||
expectedResult
|
||||
)
|
||||
})
|
||||
|
||||
it("should return matching results if allOr is true and only one filter matches with different operands", () => {
|
||||
const query = buildQuery({
|
||||
allOr: true,
|
||||
equal: { order_status: 4 },
|
||||
oneOf: { label: ["FRAGILE"] },
|
||||
})
|
||||
|
||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
|
||||
})
|
||||
|
||||
it("should handle when a value is null or undefined", () => {
|
||||
const query = buildQuery({
|
||||
allOr: true,
|
||||
equal: { order_status: null },
|
||||
oneOf: { label: ["FRAGILE"] },
|
||||
})
|
||||
|
||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2])
|
||||
})
|
||||
})
|
||||
|
||||
describe("buildQuery", () => {
|
||||
it("should return a basic search query template if the input is not an array", () => {
|
||||
const filter: any = "NOT_AN_ARRAY"
|
||||
expect(buildQuery(filter)).toEqual({
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {},
|
||||
notEqual: {},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
contains: {},
|
||||
notContains: {},
|
||||
oneOf: {},
|
||||
containsAny: {},
|
||||
})
|
||||
})
|
||||
|
||||
it("should parseFloat if the type is a number, but the value is a numeric string", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "1212",
|
||||
},
|
||||
{
|
||||
operator: SearchFilterOperator.ONE_OF,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "1000,1212,3400",
|
||||
},
|
||||
]
|
||||
expect(buildQuery(filter)).toEqual({
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {
|
||||
customer_id: 1212,
|
||||
},
|
||||
notEqual: {},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
contains: {},
|
||||
notContains: {},
|
||||
oneOf: {
|
||||
customer_id: [1000, 1212, 3400],
|
||||
},
|
||||
containsAny: {},
|
||||
})
|
||||
})
|
||||
|
||||
it("should not parseFloat if the type is a number, but the value is a handlebars binding string", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "{{ customer_id }}",
|
||||
},
|
||||
{
|
||||
operator: SearchFilterOperator.ONE_OF,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "{{ list_of_customer_ids }}",
|
||||
},
|
||||
]
|
||||
expect(buildQuery(filter)).toEqual({
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {
|
||||
customer_id: "{{ customer_id }}",
|
||||
},
|
||||
notEqual: {},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
contains: {},
|
||||
notContains: {},
|
||||
oneOf: {
|
||||
customer_id: "{{ list_of_customer_ids }}",
|
||||
},
|
||||
containsAny: {},
|
||||
})
|
||||
})
|
||||
|
||||
it("should cast string to boolean if the type is boolean", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "a",
|
||||
type: FieldType.BOOLEAN,
|
||||
value: "not_true",
|
||||
},
|
||||
{
|
||||
operator: SearchFilterOperator.NOT_EQUAL,
|
||||
field: "b",
|
||||
type: FieldType.BOOLEAN,
|
||||
value: "not_true",
|
||||
},
|
||||
{
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "c",
|
||||
type: FieldType.BOOLEAN,
|
||||
value: "true",
|
||||
},
|
||||
]
|
||||
expect(buildQuery(filter)).toEqual({
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {
|
||||
b: true,
|
||||
c: true,
|
||||
},
|
||||
notEqual: {
|
||||
a: true,
|
||||
},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
contains: {},
|
||||
notContains: {},
|
||||
oneOf: {},
|
||||
containsAny: {},
|
||||
})
|
||||
})
|
||||
|
||||
it("should split the string for contains operators", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchFilterOperator.CONTAINS,
|
||||
field: "description",
|
||||
type: FieldType.ARRAY,
|
||||
value: "Large box,Heavy box,Small box",
|
||||
},
|
||||
{
|
||||
operator: SearchFilterOperator.NOT_CONTAINS,
|
||||
field: "description",
|
||||
type: FieldType.ARRAY,
|
||||
value: "Large box,Heavy box,Small box",
|
||||
},
|
||||
{
|
||||
operator: SearchFilterOperator.CONTAINS_ANY,
|
||||
field: "description",
|
||||
type: FieldType.ARRAY,
|
||||
value: "Large box,Heavy box,Small box",
|
||||
},
|
||||
]
|
||||
expect(buildQuery(filter)).toEqual({
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
equal: {},
|
||||
notEqual: {},
|
||||
empty: {},
|
||||
notEmpty: {},
|
||||
contains: {
|
||||
description: ["Large box", "Heavy box", "Small box"],
|
||||
},
|
||||
notContains: {
|
||||
description: ["Large box", "Heavy box", "Small box"],
|
||||
},
|
||||
oneOf: {},
|
||||
containsAny: {
|
||||
description: ["Large box", "Heavy box", "Small box"],
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
|
@ -22,11 +22,6 @@ export const RowOperations = [
|
|||
Operation.BULK_CREATE,
|
||||
]
|
||||
|
||||
export enum SortDirection {
|
||||
ASCENDING = "ASCENDING",
|
||||
DESCENDING = "DESCENDING",
|
||||
}
|
||||
|
||||
export enum QueryType {
|
||||
SQL = "sql",
|
||||
JSON = "json",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { Operation, SortDirection } from "./datasources"
|
||||
import { Operation } from "./datasources"
|
||||
import { Row, Table, DocumentType } from "../documents"
|
||||
import { SortType } from "../api"
|
||||
import { SortOrder, SortType } from "../api"
|
||||
import { Knex } from "knex"
|
||||
|
||||
export enum SearchFilterOperator {
|
||||
|
@ -77,7 +77,7 @@ export type SearchQueryFields = Omit<SearchFilters, "allOr" | "onEmptyFilter">
|
|||
|
||||
export interface SortJson {
|
||||
[key: string]: {
|
||||
direction: SortDirection
|
||||
direction: SortOrder
|
||||
type?: SortType
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue