Merge branch 'uncomment-search-test-todos' of github.com:Budibase/budibase into feature/count-rows-to-return
This commit is contained in:
commit
e402f9c571
|
@ -333,11 +333,11 @@ brace-expansion@^1.1.7:
|
||||||
concat-map "0.0.1"
|
concat-map "0.0.1"
|
||||||
|
|
||||||
braces@^3.0.1, braces@~3.0.2:
|
braces@^3.0.1, braces@~3.0.2:
|
||||||
version "3.0.2"
|
version "3.0.3"
|
||||||
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
|
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789"
|
||||||
integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
|
integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==
|
||||||
dependencies:
|
dependencies:
|
||||||
fill-range "^7.0.1"
|
fill-range "^7.1.1"
|
||||||
|
|
||||||
bulma@^0.9.3:
|
bulma@^0.9.3:
|
||||||
version "0.9.3"
|
version "0.9.3"
|
||||||
|
@ -781,10 +781,10 @@ file-entry-cache@^6.0.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
flat-cache "^3.0.4"
|
flat-cache "^3.0.4"
|
||||||
|
|
||||||
fill-range@^7.0.1:
|
fill-range@^7.1.1:
|
||||||
version "7.0.1"
|
version "7.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
|
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292"
|
||||||
integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
|
integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==
|
||||||
dependencies:
|
dependencies:
|
||||||
to-regex-range "^5.0.1"
|
to-regex-range "^5.0.1"
|
||||||
|
|
||||||
|
@ -1709,10 +1709,10 @@ type-fest@^0.20.2:
|
||||||
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
|
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
|
||||||
integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
|
integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
|
||||||
|
|
||||||
typescript@4.6.2:
|
typescript@5.2.2:
|
||||||
version "4.6.2"
|
version "5.2.2"
|
||||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.6.2.tgz#fe12d2727b708f4eef40f51598b3398baa9611d4"
|
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78"
|
||||||
integrity sha512-HM/hFigTBHZhLXshn9sN37H085+hQGeJHJ/X7LpBWLID/fbc2acUMfU+lGD98X81sKP+pFa9f0DZmCwB9GnbAg==
|
integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==
|
||||||
|
|
||||||
unbox-primitive@^1.0.1:
|
unbox-primitive@^1.0.1:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
|
|
|
@ -12,21 +12,21 @@ import {
|
||||||
BBReferenceFieldMetadata,
|
BBReferenceFieldMetadata,
|
||||||
FieldSchema,
|
FieldSchema,
|
||||||
FieldType,
|
FieldType,
|
||||||
INTERNAL_TABLE_SOURCE_ID,
|
|
||||||
JsonFieldMetadata,
|
JsonFieldMetadata,
|
||||||
JsonTypes,
|
|
||||||
Operation,
|
Operation,
|
||||||
prefixed,
|
|
||||||
QueryJson,
|
QueryJson,
|
||||||
QueryOptions,
|
|
||||||
RelationshipsJson,
|
RelationshipsJson,
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
SortDirection,
|
|
||||||
SqlClient,
|
SqlClient,
|
||||||
SqlQuery,
|
SqlQuery,
|
||||||
SqlQueryBinding,
|
SqlQueryBinding,
|
||||||
Table,
|
Table,
|
||||||
TableSourceType,
|
TableSourceType,
|
||||||
|
INTERNAL_TABLE_SOURCE_ID,
|
||||||
|
QueryOptions,
|
||||||
|
JsonTypes,
|
||||||
|
prefixed,
|
||||||
|
SortOrder,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import environment from "../environment"
|
import environment from "../environment"
|
||||||
import { helpers } from "@budibase/shared-core"
|
import { helpers } from "@budibase/shared-core"
|
||||||
|
@ -420,11 +420,11 @@ class InternalBuilder {
|
||||||
if (sort && Object.keys(sort || {}).length > 0) {
|
if (sort && Object.keys(sort || {}).length > 0) {
|
||||||
for (let [key, value] of Object.entries(sort)) {
|
for (let [key, value] of Object.entries(sort)) {
|
||||||
const direction =
|
const direction =
|
||||||
value.direction === SortDirection.ASCENDING ? "asc" : "desc"
|
value.direction === SortOrder.ASCENDING ? "asc" : "desc"
|
||||||
let nulls
|
let nulls
|
||||||
if (this.client === SqlClient.POSTGRES) {
|
if (this.client === SqlClient.POSTGRES) {
|
||||||
// All other clients already sort this as expected by default, and adding this to the rest of the clients is causing issues
|
// All other clients already sort this as expected by default, and adding this to the rest of the clients is causing issues
|
||||||
nulls = value.direction === SortDirection.ASCENDING ? "first" : "last"
|
nulls = value.direction === SortOrder.ASCENDING ? "first" : "last"
|
||||||
}
|
}
|
||||||
|
|
||||||
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
|
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
|
||||||
|
@ -594,10 +594,10 @@ class InternalBuilder {
|
||||||
if (!counting) {
|
if (!counting) {
|
||||||
query = query.limit(BASE_LIMIT)
|
query = query.limit(BASE_LIMIT)
|
||||||
}
|
}
|
||||||
|
// add filters to the query (where)
|
||||||
query = this.addFilters(query, filters, json.meta.table, {
|
query = this.addFilters(query, filters, json.meta.table, {
|
||||||
aliases: tableAliases,
|
aliases: tableAliases,
|
||||||
})
|
})
|
||||||
|
|
||||||
// add sorting to pre-query
|
// add sorting to pre-query
|
||||||
query = this.addSorting(query, json)
|
query = this.addSorting(query, json)
|
||||||
const alias = tableAliases?.[tableName] || tableName
|
const alias = tableAliases?.[tableName] || tableName
|
||||||
|
@ -621,22 +621,22 @@ class InternalBuilder {
|
||||||
endpoint.schema,
|
endpoint.schema,
|
||||||
tableAliases
|
tableAliases
|
||||||
)
|
)
|
||||||
|
|
||||||
let foundLimit = limit || BASE_LIMIT
|
|
||||||
// handle pagination
|
// handle pagination
|
||||||
let foundOffset: number | null = null
|
let foundOffset: number | null = null
|
||||||
|
let foundLimit = limit || BASE_LIMIT
|
||||||
if (paginate && paginate.page && paginate.limit) {
|
if (paginate && paginate.page && paginate.limit) {
|
||||||
let page =
|
// @ts-ignore
|
||||||
typeof paginate.page === "string"
|
const page = paginate.page <= 1 ? 0 : paginate.page - 1
|
||||||
? parseInt(paginate.page)
|
|
||||||
: paginate.page
|
|
||||||
page = page <= 1 ? 0 : page - 1
|
|
||||||
const offset = page * paginate.limit
|
const offset = page * paginate.limit
|
||||||
foundLimit = paginate.limit
|
foundLimit = paginate.limit
|
||||||
foundOffset = offset
|
foundOffset = offset
|
||||||
|
} else if (paginate && paginate.offset && paginate.limit) {
|
||||||
|
foundLimit = paginate.limit
|
||||||
|
foundOffset = paginate.offset
|
||||||
} else if (paginate && paginate.limit) {
|
} else if (paginate && paginate.limit) {
|
||||||
foundLimit = paginate.limit
|
foundLimit = paginate.limit
|
||||||
}
|
}
|
||||||
|
// always add the found limit, unless counting
|
||||||
if (!counting) {
|
if (!counting) {
|
||||||
query = query.limit(foundLimit)
|
query = query.limit(foundLimit)
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,7 +70,7 @@
|
||||||
<input
|
<input
|
||||||
class="input"
|
class="input"
|
||||||
value={title}
|
value={title}
|
||||||
{title}
|
title={componentName}
|
||||||
placeholder={componentName}
|
placeholder={componentName}
|
||||||
on:keypress={e => {
|
on:keypress={e => {
|
||||||
if (e.key.toLowerCase() === "enter") {
|
if (e.key.toLowerCase() === "enter") {
|
||||||
|
@ -158,7 +158,32 @@
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
|
position: relative;
|
||||||
|
padding: 5px;
|
||||||
|
right: 6px;
|
||||||
|
border: 1px solid transparent;
|
||||||
|
border-radius: 3px;
|
||||||
|
transition: 150ms background-color, 150ms border-color, 150ms color;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.input:hover,
|
||||||
|
.input:focus {
|
||||||
|
cursor: text;
|
||||||
|
background-color: var(
|
||||||
|
--spectrum-textfield-m-background-color,
|
||||||
|
var(--spectrum-global-color-gray-50)
|
||||||
|
);
|
||||||
|
border: 1px solid white;
|
||||||
|
border-color: var(
|
||||||
|
--spectrum-textfield-m-border-color,
|
||||||
|
var(--spectrum-alias-border-color)
|
||||||
|
);
|
||||||
|
color: var(
|
||||||
|
--spectrum-textfield-m-text-color,
|
||||||
|
var(--spectrum-alias-text-color)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
.panel-title-content {
|
.panel-title-content {
|
||||||
display: contents;
|
display: contents;
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,8 @@
|
||||||
</Body>
|
</Body>
|
||||||
</Layout>
|
</Layout>
|
||||||
<Button
|
<Button
|
||||||
on:click={() => (window.location = "https://docs.budibase.com")}
|
on:click={() =>
|
||||||
|
(window.location = "https://docs.budibase.com/docs/migrations")}
|
||||||
>Migration guide</Button
|
>Migration guide</Button
|
||||||
>
|
>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
|
@ -1,18 +1,22 @@
|
||||||
<script>
|
<script>
|
||||||
export let isMigrationDone
|
export let isMigrationDone
|
||||||
export let onMigrationDone
|
export let onMigrationDone
|
||||||
export let timeoutSeconds = 10 // 3 minutes
|
export let timeoutSeconds = 60 // 1 minute
|
||||||
|
export let minTimeSeconds = 3
|
||||||
|
|
||||||
const loadTime = Date.now()
|
const loadTime = Date.now()
|
||||||
|
const intervalMs = 1000
|
||||||
let timedOut = false
|
let timedOut = false
|
||||||
|
let secondsWaited = 0
|
||||||
|
|
||||||
async function checkMigrationsFinished() {
|
async function checkMigrationsFinished() {
|
||||||
setTimeout(async () => {
|
setTimeout(async () => {
|
||||||
const isMigrated = await isMigrationDone()
|
const isMigrated = await isMigrationDone()
|
||||||
|
|
||||||
const timeoutMs = timeoutSeconds * 1000
|
const timeoutMs = timeoutSeconds * 1000
|
||||||
if (!isMigrated) {
|
if (!isMigrated || secondsWaited <= minTimeSeconds) {
|
||||||
if (loadTime + timeoutMs > Date.now()) {
|
if (loadTime + timeoutMs > Date.now()) {
|
||||||
|
secondsWaited += 1
|
||||||
return checkMigrationsFinished()
|
return checkMigrationsFinished()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -20,7 +24,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
onMigrationDone()
|
onMigrationDone()
|
||||||
}, 1000)
|
}, intervalMs)
|
||||||
}
|
}
|
||||||
|
|
||||||
checkMigrationsFinished()
|
checkMigrationsFinished()
|
||||||
|
@ -41,6 +45,11 @@
|
||||||
<span class="subtext">
|
<span class="subtext">
|
||||||
{#if !timedOut}
|
{#if !timedOut}
|
||||||
Please wait and we will be back in a second!
|
Please wait and we will be back in a second!
|
||||||
|
<br />
|
||||||
|
Checkout the
|
||||||
|
<a href="https://docs.budibase.com/docs/app-migrations" target="_blank"
|
||||||
|
>documentation</a
|
||||||
|
> on app migrations.
|
||||||
{:else}
|
{:else}
|
||||||
An error occurred, please try again later.
|
An error occurred, please try again later.
|
||||||
<br />
|
<br />
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 85b4fc9ea01472bf69840d046733ad596ef893e2
|
Subproject commit bf30f47a28292d619cf0837f21d66790ff31c3a6
|
|
@ -22,30 +22,27 @@ import {
|
||||||
import _ from "lodash"
|
import _ from "lodash"
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
import { encodeJSBinding } from "@budibase/string-templates"
|
import { encodeJSBinding } from "@budibase/string-templates"
|
||||||
|
import { dataFilters } from "@budibase/shared-core"
|
||||||
|
|
||||||
describe.each([
|
describe.each([
|
||||||
|
["in-memory", undefined],
|
||||||
["lucene", undefined],
|
["lucene", undefined],
|
||||||
["sqs", undefined],
|
["sqs", undefined],
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||||
])("/api/:sourceId/search (%s)", (name, dsProvider) => {
|
])("search (%s)", (name, dsProvider) => {
|
||||||
const isSqs = name === "sqs"
|
const isSqs = name === "sqs"
|
||||||
const isLucene = name === "lucene"
|
const isLucene = name === "lucene"
|
||||||
|
const isInMemory = name === "in-memory"
|
||||||
const isInternal = isSqs || isLucene
|
const isInternal = isSqs || isLucene
|
||||||
const config = setup.getConfig()
|
const config = setup.getConfig()
|
||||||
|
|
||||||
let envCleanup: (() => void) | undefined
|
let envCleanup: (() => void) | undefined
|
||||||
let datasource: Datasource | undefined
|
let datasource: Datasource | undefined
|
||||||
let table: Table
|
let table: Table
|
||||||
|
let rows: Row[]
|
||||||
const snippets = [
|
|
||||||
{
|
|
||||||
name: "WeeksAgo",
|
|
||||||
code: `return function (weeks) {\n const currentTime = new Date(${Date.now()});\n currentTime.setDate(currentTime.getDate()-(7 * (weeks || 1)));\n return currentTime.toISOString();\n}`,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
|
@ -55,7 +52,12 @@ describe.each([
|
||||||
|
|
||||||
if (config.app?.appId) {
|
if (config.app?.appId) {
|
||||||
config.app = await config.api.application.update(config.app?.appId, {
|
config.app = await config.api.application.update(config.app?.appId, {
|
||||||
snippets,
|
snippets: [
|
||||||
|
{
|
||||||
|
name: "WeeksAgo",
|
||||||
|
code: `return function (weeks) {\n const currentTime = new Date(${Date.now()});\n currentTime.setDate(currentTime.getDate()-(7 * (weeks || 1)));\n return currentTime.toISOString();\n}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,14 +81,30 @@ describe.each([
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createRows(rows: Record<string, any>[]) {
|
async function createRows(arr: Record<string, any>[]) {
|
||||||
// Shuffling to avoid false positives given a fixed order
|
// Shuffling to avoid false positives given a fixed order
|
||||||
await config.api.row.bulkImport(table._id!, { rows: _.shuffle(rows) })
|
await config.api.row.bulkImport(table._id!, {
|
||||||
|
rows: _.shuffle(arr),
|
||||||
|
})
|
||||||
|
rows = await config.api.row.fetch(table._id!)
|
||||||
}
|
}
|
||||||
|
|
||||||
class SearchAssertion {
|
class SearchAssertion {
|
||||||
constructor(private readonly query: RowSearchParams) {}
|
constructor(private readonly query: RowSearchParams) {}
|
||||||
|
|
||||||
|
private async performSearch(): Promise<Row[]> {
|
||||||
|
if (isInMemory) {
|
||||||
|
return dataFilters.search(_.cloneDeep(rows), this.query)
|
||||||
|
} else {
|
||||||
|
return (
|
||||||
|
await config.api.row.search(table._id!, {
|
||||||
|
...this.query,
|
||||||
|
tableId: table._id!,
|
||||||
|
})
|
||||||
|
).rows
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// We originally used _.isMatch to compare rows, but found that when
|
// We originally used _.isMatch to compare rows, but found that when
|
||||||
// comparing arrays it would return true if the source array was a subset of
|
// comparing arrays it would return true if the source array was a subset of
|
||||||
// the target array. This would sometimes create false matches. This
|
// the target array. This would sometimes create false matches. This
|
||||||
|
@ -157,10 +175,7 @@ describe.each([
|
||||||
// different to the one passed in will cause the assertion to fail. Extra
|
// different to the one passed in will cause the assertion to fail. Extra
|
||||||
// rows returned by the query will also cause the assertion to fail.
|
// rows returned by the query will also cause the assertion to fail.
|
||||||
async toMatchExactly(expectedRows: any[]) {
|
async toMatchExactly(expectedRows: any[]) {
|
||||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
const foundRows = await this.performSearch()
|
||||||
...this.query,
|
|
||||||
tableId: table._id!,
|
|
||||||
})
|
|
||||||
|
|
||||||
// eslint-disable-next-line jest/no-standalone-expect
|
// eslint-disable-next-line jest/no-standalone-expect
|
||||||
expect(foundRows).toHaveLength(expectedRows.length)
|
expect(foundRows).toHaveLength(expectedRows.length)
|
||||||
|
@ -176,10 +191,7 @@ describe.each([
|
||||||
// passed in. The order of the rows is not important, but extra rows will
|
// passed in. The order of the rows is not important, but extra rows will
|
||||||
// cause the assertion to fail.
|
// cause the assertion to fail.
|
||||||
async toContainExactly(expectedRows: any[]) {
|
async toContainExactly(expectedRows: any[]) {
|
||||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
const foundRows = await this.performSearch()
|
||||||
...this.query,
|
|
||||||
tableId: table._id!,
|
|
||||||
})
|
|
||||||
|
|
||||||
// eslint-disable-next-line jest/no-standalone-expect
|
// eslint-disable-next-line jest/no-standalone-expect
|
||||||
expect(foundRows).toHaveLength(expectedRows.length)
|
expect(foundRows).toHaveLength(expectedRows.length)
|
||||||
|
@ -197,10 +209,7 @@ describe.each([
|
||||||
// The order of the rows is not important. Extra rows will not cause the
|
// The order of the rows is not important. Extra rows will not cause the
|
||||||
// assertion to fail.
|
// assertion to fail.
|
||||||
async toContain(expectedRows: any[]) {
|
async toContain(expectedRows: any[]) {
|
||||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
const foundRows = await this.performSearch()
|
||||||
...this.query,
|
|
||||||
tableId: table._id!,
|
|
||||||
})
|
|
||||||
|
|
||||||
// eslint-disable-next-line jest/no-standalone-expect
|
// eslint-disable-next-line jest/no-standalone-expect
|
||||||
expect([...foundRows]).toEqual(
|
expect([...foundRows]).toEqual(
|
||||||
|
@ -217,10 +226,7 @@ describe.each([
|
||||||
}
|
}
|
||||||
|
|
||||||
async toHaveLength(length: number) {
|
async toHaveLength(length: number) {
|
||||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
const foundRows = await this.performSearch()
|
||||||
...this.query,
|
|
||||||
tableId: table._id!,
|
|
||||||
})
|
|
||||||
|
|
||||||
// eslint-disable-next-line jest/no-standalone-expect
|
// eslint-disable-next-line jest/no-standalone-expect
|
||||||
expect(foundRows).toHaveLength(length)
|
expect(foundRows).toHaveLength(length)
|
||||||
|
@ -296,214 +302,216 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
// Ensure all bindings resolve and perform as expected
|
// We've decided not to try and support binding for in-memory search just now.
|
||||||
describe("bindings", () => {
|
!isInMemory &&
|
||||||
let globalUsers: any = []
|
describe("bindings", () => {
|
||||||
|
let globalUsers: any = []
|
||||||
|
|
||||||
const serverTime = new Date()
|
const serverTime = new Date()
|
||||||
|
|
||||||
// In MariaDB and MySQL we only store dates to second precision, so we need
|
// In MariaDB and MySQL we only store dates to second precision, so we need
|
||||||
// to remove milliseconds from the server time to ensure searches work as
|
// to remove milliseconds from the server time to ensure searches work as
|
||||||
// expected.
|
// expected.
|
||||||
serverTime.setMilliseconds(0)
|
serverTime.setMilliseconds(0)
|
||||||
|
|
||||||
const future = new Date(serverTime.getTime() + 1000 * 60 * 60 * 24 * 30)
|
const future = new Date(serverTime.getTime() + 1000 * 60 * 60 * 24 * 30)
|
||||||
|
|
||||||
const rows = (currentUser: User) => {
|
const rows = (currentUser: User) => {
|
||||||
return [
|
return [
|
||||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||||
{ name: currentUser.firstName, appointment: future.toISOString() },
|
{ name: currentUser.firstName, appointment: future.toISOString() },
|
||||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||||
{
|
{
|
||||||
name: "single user, session user",
|
name: "single user, session user",
|
||||||
single_user: JSON.stringify(currentUser),
|
single_user: JSON.stringify(currentUser),
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "single user",
|
|
||||||
single_user: JSON.stringify(globalUsers[0]),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "deprecated single user, session user",
|
|
||||||
deprecated_single_user: JSON.stringify([currentUser]),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "deprecated single user",
|
|
||||||
deprecated_single_user: JSON.stringify([globalUsers[0]]),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "multi user",
|
|
||||||
multi_user: JSON.stringify(globalUsers),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "multi user with session user",
|
|
||||||
multi_user: JSON.stringify([...globalUsers, currentUser]),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "deprecated multi user",
|
|
||||||
deprecated_multi_user: JSON.stringify(globalUsers),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "deprecated multi user with session user",
|
|
||||||
deprecated_multi_user: JSON.stringify([...globalUsers, currentUser]),
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
|
||||||
// Set up some global users
|
|
||||||
globalUsers = await Promise.all(
|
|
||||||
Array(2)
|
|
||||||
.fill(0)
|
|
||||||
.map(async () => {
|
|
||||||
const globalUser = await config.globalUser()
|
|
||||||
const userMedataId = globalUser._id
|
|
||||||
? dbCore.generateUserMetadataID(globalUser._id)
|
|
||||||
: null
|
|
||||||
return {
|
|
||||||
_id: globalUser._id,
|
|
||||||
_meta: userMedataId,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
table = await createTable({
|
|
||||||
name: { name: "name", type: FieldType.STRING },
|
|
||||||
appointment: { name: "appointment", type: FieldType.DATETIME },
|
|
||||||
single_user: {
|
|
||||||
name: "single_user",
|
|
||||||
type: FieldType.BB_REFERENCE_SINGLE,
|
|
||||||
subtype: BBReferenceFieldSubType.USER,
|
|
||||||
},
|
|
||||||
deprecated_single_user: {
|
|
||||||
name: "deprecated_single_user",
|
|
||||||
type: FieldType.BB_REFERENCE,
|
|
||||||
subtype: BBReferenceFieldSubType.USER,
|
|
||||||
},
|
|
||||||
multi_user: {
|
|
||||||
name: "multi_user",
|
|
||||||
type: FieldType.BB_REFERENCE,
|
|
||||||
subtype: BBReferenceFieldSubType.USER,
|
|
||||||
constraints: {
|
|
||||||
type: "array",
|
|
||||||
},
|
},
|
||||||
},
|
{
|
||||||
deprecated_multi_user: {
|
name: "single user",
|
||||||
name: "deprecated_multi_user",
|
single_user: JSON.stringify(globalUsers[0]),
|
||||||
type: FieldType.BB_REFERENCE,
|
|
||||||
subtype: BBReferenceFieldSubType.USERS,
|
|
||||||
constraints: {
|
|
||||||
type: "array",
|
|
||||||
},
|
},
|
||||||
},
|
{
|
||||||
})
|
name: "deprecated single user, session user",
|
||||||
await createRows(rows(config.getUser()))
|
deprecated_single_user: JSON.stringify([currentUser]),
|
||||||
})
|
},
|
||||||
|
{
|
||||||
|
name: "deprecated single user",
|
||||||
|
deprecated_single_user: JSON.stringify([globalUsers[0]]),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multi user",
|
||||||
|
multi_user: JSON.stringify(globalUsers),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multi user with session user",
|
||||||
|
multi_user: JSON.stringify([...globalUsers, currentUser]),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "deprecated multi user",
|
||||||
|
deprecated_multi_user: JSON.stringify(globalUsers),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "deprecated multi user with session user",
|
||||||
|
deprecated_multi_user: JSON.stringify([
|
||||||
|
...globalUsers,
|
||||||
|
currentUser,
|
||||||
|
]),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
// !! Current User is auto generated per run
|
beforeAll(async () => {
|
||||||
it("should return all rows matching the session user firstname", async () => {
|
// Set up some global users
|
||||||
await expectQuery({
|
globalUsers = await Promise.all(
|
||||||
equal: { name: "{{ [user].firstName }}" },
|
Array(2)
|
||||||
}).toContainExactly([
|
.fill(0)
|
||||||
{
|
.map(async () => {
|
||||||
name: config.getUser().firstName,
|
const globalUser = await config.globalUser()
|
||||||
appointment: future.toISOString(),
|
const userMedataId = globalUser._id
|
||||||
},
|
? dbCore.generateUserMetadataID(globalUser._id)
|
||||||
])
|
: null
|
||||||
})
|
return {
|
||||||
|
_id: globalUser._id,
|
||||||
|
_meta: userMedataId,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
it("should parse the date binding and return all rows after the resolved value", async () => {
|
table = await createTable({
|
||||||
await tk.withFreeze(serverTime, async () => {
|
name: { name: "name", type: FieldType.STRING },
|
||||||
await expectQuery({
|
appointment: { name: "appointment", type: FieldType.DATETIME },
|
||||||
range: {
|
single_user: {
|
||||||
appointment: {
|
name: "single_user",
|
||||||
low: "{{ [now] }}",
|
type: FieldType.BB_REFERENCE_SINGLE,
|
||||||
high: "9999-00-00T00:00:00.000Z",
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
},
|
||||||
|
deprecated_single_user: {
|
||||||
|
name: "deprecated_single_user",
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
},
|
||||||
|
multi_user: {
|
||||||
|
name: "multi_user",
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
constraints: {
|
||||||
|
type: "array",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
deprecated_multi_user: {
|
||||||
|
name: "deprecated_multi_user",
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: BBReferenceFieldSubType.USERS,
|
||||||
|
constraints: {
|
||||||
|
type: "array",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
await createRows(rows(config.getUser()))
|
||||||
|
})
|
||||||
|
|
||||||
|
// !! Current User is auto generated per run
|
||||||
|
it("should return all rows matching the session user firstname", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
equal: { name: "{{ [user].firstName }}" },
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{
|
{
|
||||||
name: config.getUser().firstName,
|
name: config.getUser().firstName,
|
||||||
appointment: future.toISOString(),
|
appointment: future.toISOString(),
|
||||||
},
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse the date binding and return all rows after the resolved value", async () => {
|
||||||
|
await tk.withFreeze(serverTime, async () => {
|
||||||
|
await expectQuery({
|
||||||
|
range: {
|
||||||
|
appointment: {
|
||||||
|
low: "{{ [now] }}",
|
||||||
|
high: "9999-00-00T00:00:00.000Z",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}).toContainExactly([
|
||||||
|
{
|
||||||
|
name: config.getUser().firstName,
|
||||||
|
appointment: future.toISOString(),
|
||||||
|
},
|
||||||
|
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse the date binding and return all rows before the resolved value", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
range: {
|
||||||
|
appointment: {
|
||||||
|
low: "0000-00-00T00:00:00.000Z",
|
||||||
|
high: "{{ [now] }}",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}).toContainExactly([
|
||||||
|
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||||
|
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
{ name: "serverDate", appointment: serverTime.toISOString() },
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
})
|
|
||||||
|
|
||||||
it("should parse the date binding and return all rows before the resolved value", async () => {
|
it("should parse the encoded js snippet. Return rows with appointments up to 1 week in the past", async () => {
|
||||||
await expectQuery({
|
const jsBinding = "return snippets.WeeksAgo();"
|
||||||
range: {
|
const encodedBinding = encodeJSBinding(jsBinding)
|
||||||
appointment: {
|
|
||||||
low: "0000-00-00T00:00:00.000Z",
|
await expectQuery({
|
||||||
high: "{{ [now] }}",
|
range: {
|
||||||
|
appointment: {
|
||||||
|
low: "0000-00-00T00:00:00.000Z",
|
||||||
|
high: encodedBinding,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
}).toContainExactly([
|
||||||
}).toContainExactly([
|
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
])
|
||||||
{ name: "serverDate", appointment: serverTime.toISOString() },
|
})
|
||||||
])
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should parse the encoded js snippet. Return rows with appointments up to 1 week in the past", async () => {
|
it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => {
|
||||||
const jsBinding = "return snippets.WeeksAgo();"
|
const jsBinding = `const currentTime = new Date(${Date.now()})\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();`
|
||||||
const encodedBinding = encodeJSBinding(jsBinding)
|
const encodedBinding = encodeJSBinding(jsBinding)
|
||||||
|
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
range: {
|
range: {
|
||||||
appointment: {
|
appointment: {
|
||||||
low: "0000-00-00T00:00:00.000Z",
|
low: "0000-00-00T00:00:00.000Z",
|
||||||
high: encodedBinding,
|
high: encodedBinding,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
}).toContainExactly([
|
||||||
}).toContainExactly([
|
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
||||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
||||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
])
|
||||||
])
|
})
|
||||||
})
|
|
||||||
|
|
||||||
it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => {
|
it("should match a single user row by the session user id", async () => {
|
||||||
const jsBinding = `const currentTime = new Date(${Date.now()})\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();`
|
await expectQuery({
|
||||||
const encodedBinding = encodeJSBinding(jsBinding)
|
equal: { single_user: "{{ [user]._id }}" },
|
||||||
|
}).toContainExactly([
|
||||||
await expectQuery({
|
{
|
||||||
range: {
|
name: "single user, session user",
|
||||||
appointment: {
|
single_user: { _id: config.getUser()._id },
|
||||||
low: "0000-00-00T00:00:00.000Z",
|
|
||||||
high: encodedBinding,
|
|
||||||
},
|
},
|
||||||
},
|
])
|
||||||
}).toContainExactly([
|
})
|
||||||
{ name: "foo", appointment: "1982-01-05T00:00:00.000Z" },
|
|
||||||
{ name: "bar", appointment: "1995-05-06T00:00:00.000Z" },
|
|
||||||
])
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should match a single user row by the session user id", async () => {
|
it("should match a deprecated single user row by the session user id", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
equal: { single_user: "{{ [user]._id }}" },
|
equal: { deprecated_single_user: "{{ [user]._id }}" },
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{
|
{
|
||||||
name: "single user, session user",
|
name: "deprecated single user, session user",
|
||||||
single_user: { _id: config.getUser()._id },
|
deprecated_single_user: [{ _id: config.getUser()._id }],
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should match a deprecated single user row by the session user id", async () => {
|
|
||||||
await expectQuery({
|
|
||||||
equal: { deprecated_single_user: "{{ [user]._id }}" },
|
|
||||||
}).toContainExactly([
|
|
||||||
{
|
|
||||||
name: "deprecated single user, session user",
|
|
||||||
deprecated_single_user: [{ _id: config.getUser()._id }],
|
|
||||||
},
|
|
||||||
])
|
|
||||||
})
|
|
||||||
|
|
||||||
// TODO(samwho): fix for SQS
|
|
||||||
!isSqs &&
|
|
||||||
it("should match the session user id in a multi user field", async () => {
|
it("should match the session user id in a multi user field", async () => {
|
||||||
const allUsers = [...globalUsers, config.getUser()].map((user: any) => {
|
const allUsers = [...globalUsers, config.getUser()].map((user: any) => {
|
||||||
return { _id: user._id }
|
return { _id: user._id }
|
||||||
|
@ -519,8 +527,6 @@ describe.each([
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
// TODO(samwho): fix for SQS
|
|
||||||
!isSqs &&
|
|
||||||
it("should match the session user id in a deprecated multi user field", async () => {
|
it("should match the session user id in a deprecated multi user field", async () => {
|
||||||
const allUsers = [...globalUsers, config.getUser()].map((user: any) => {
|
const allUsers = [...globalUsers, config.getUser()].map((user: any) => {
|
||||||
return { _id: user._id }
|
return { _id: user._id }
|
||||||
|
@ -536,8 +542,6 @@ describe.each([
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
// TODO(samwho): fix for SQS
|
|
||||||
!isSqs &&
|
|
||||||
it("should not match the session user id in a multi user field", async () => {
|
it("should not match the session user id in a multi user field", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
notContains: { multi_user: ["{{ [user]._id }}"] },
|
notContains: { multi_user: ["{{ [user]._id }}"] },
|
||||||
|
@ -552,8 +556,6 @@ describe.each([
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
// TODO(samwho): fix for SQS
|
|
||||||
!isSqs &&
|
|
||||||
it("should not match the session user id in a deprecated multi user field", async () => {
|
it("should not match the session user id in a deprecated multi user field", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
notContains: { deprecated_multi_user: ["{{ [user]._id }}"] },
|
notContains: { deprecated_multi_user: ["{{ [user]._id }}"] },
|
||||||
|
@ -568,78 +570,78 @@ describe.each([
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should match the session user id and a user table row id using helpers, user binding and a static user id.", async () => {
|
it("should match the session user id and a user table row id using helpers, user binding and a static user id.", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
oneOf: {
|
oneOf: {
|
||||||
single_user: [
|
single_user: [
|
||||||
"{{ default [user]._id '_empty_' }}",
|
"{{ default [user]._id '_empty_' }}",
|
||||||
globalUsers[0]._id,
|
globalUsers[0]._id,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{
|
{
|
||||||
name: "single user, session user",
|
name: "single user, session user",
|
||||||
single_user: { _id: config.getUser()._id },
|
single_user: { _id: config.getUser()._id },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "single user",
|
name: "single user",
|
||||||
single_user: { _id: globalUsers[0]._id },
|
single_user: { _id: globalUsers[0]._id },
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should match the session user id and a user table row id using helpers, user binding and a static user id. (deprecated single user)", async () => {
|
it("should match the session user id and a user table row id using helpers, user binding and a static user id. (deprecated single user)", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
oneOf: {
|
oneOf: {
|
||||||
deprecated_single_user: [
|
deprecated_single_user: [
|
||||||
"{{ default [user]._id '_empty_' }}",
|
"{{ default [user]._id '_empty_' }}",
|
||||||
globalUsers[0]._id,
|
globalUsers[0]._id,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{
|
{
|
||||||
name: "deprecated single user, session user",
|
name: "deprecated single user, session user",
|
||||||
deprecated_single_user: [{ _id: config.getUser()._id }],
|
deprecated_single_user: [{ _id: config.getUser()._id }],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "deprecated single user",
|
name: "deprecated single user",
|
||||||
deprecated_single_user: [{ _id: globalUsers[0]._id }],
|
deprecated_single_user: [{ _id: globalUsers[0]._id }],
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should resolve 'default' helper to '_empty_' when binding resolves to nothing", async () => {
|
it("should resolve 'default' helper to '_empty_' when binding resolves to nothing", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
oneOf: {
|
oneOf: {
|
||||||
single_user: [
|
single_user: [
|
||||||
"{{ default [user]._idx '_empty_' }}",
|
"{{ default [user]._idx '_empty_' }}",
|
||||||
globalUsers[0]._id,
|
globalUsers[0]._id,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{
|
{
|
||||||
name: "single user",
|
name: "single user",
|
||||||
single_user: { _id: globalUsers[0]._id },
|
single_user: { _id: globalUsers[0]._id },
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should resolve 'default' helper to '_empty_' when binding resolves to nothing (deprecated single user)", async () => {
|
it("should resolve 'default' helper to '_empty_' when binding resolves to nothing (deprecated single user)", async () => {
|
||||||
await expectQuery({
|
await expectQuery({
|
||||||
oneOf: {
|
oneOf: {
|
||||||
deprecated_single_user: [
|
deprecated_single_user: [
|
||||||
"{{ default [user]._idx '_empty_' }}",
|
"{{ default [user]._idx '_empty_' }}",
|
||||||
globalUsers[0]._id,
|
globalUsers[0]._id,
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}).toContainExactly([
|
}).toContainExactly([
|
||||||
{
|
{
|
||||||
name: "deprecated single user",
|
name: "deprecated single user",
|
||||||
deprecated_single_user: [{ _id: globalUsers[0]._id }],
|
deprecated_single_user: [{ _id: globalUsers[0]._id }],
|
||||||
},
|
},
|
||||||
])
|
])
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
|
||||||
|
|
||||||
describe.each([FieldType.STRING, FieldType.LONGFORM])("%s", () => {
|
describe.each([FieldType.STRING, FieldType.LONGFORM])("%s", () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
|
@ -1062,13 +1064,13 @@ describe.each([
|
||||||
|
|
||||||
!isInternal &&
|
!isInternal &&
|
||||||
describe("datetime - time only", () => {
|
describe("datetime - time only", () => {
|
||||||
const T_1000 = "10:00"
|
const T_1000 = "10:00:00"
|
||||||
const T_1045 = "10:45"
|
const T_1045 = "10:45:00"
|
||||||
const T_1200 = "12:00"
|
const T_1200 = "12:00:00"
|
||||||
const T_1530 = "15:30"
|
const T_1530 = "15:30:00"
|
||||||
const T_0000 = "00:00"
|
const T_0000 = "00:00:00"
|
||||||
|
|
||||||
const UNEXISTING_TIME = "10:01"
|
const UNEXISTING_TIME = "10:01:00"
|
||||||
|
|
||||||
const NULL_TIME__ID = `null_time__id`
|
const NULL_TIME__ID = `null_time__id`
|
||||||
|
|
||||||
|
@ -1262,6 +1264,8 @@ describe.each([
|
||||||
{ numbers: ["three"] },
|
{ numbers: ["three"] },
|
||||||
]))
|
]))
|
||||||
|
|
||||||
|
// Not sure if this is correct behaviour but changing it would be a
|
||||||
|
// breaking change.
|
||||||
it("finds all with empty list", () =>
|
it("finds all with empty list", () =>
|
||||||
expectQuery({ notContains: { numbers: [] } }).toContainExactly([
|
expectQuery({ notContains: { numbers: [] } }).toContainExactly([
|
||||||
{ numbers: ["one", "two"] },
|
{ numbers: ["one", "two"] },
|
||||||
|
@ -1536,38 +1540,34 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
// TODO(samwho): fix for SQS
|
describe("pagination", () => {
|
||||||
!isSqs &&
|
it("should paginate through all rows", async () => {
|
||||||
describe("pagination", () => {
|
// @ts-ignore
|
||||||
it("should paginate through all rows", async () => {
|
let bookmark: string | number = undefined
|
||||||
// @ts-ignore
|
let rows: Row[] = []
|
||||||
let bookmark: string | number = undefined
|
|
||||||
let rows: Row[] = []
|
|
||||||
|
|
||||||
// eslint-disable-next-line no-constant-condition
|
// eslint-disable-next-line no-constant-condition
|
||||||
while (true) {
|
while (true) {
|
||||||
const response = await config.api.row.search(table._id!, {
|
const response = await config.api.row.search(table._id!, {
|
||||||
tableId: table._id!,
|
tableId: table._id!,
|
||||||
limit: 3,
|
limit: 3,
|
||||||
query: {},
|
query: {},
|
||||||
bookmark,
|
bookmark,
|
||||||
paginate: true,
|
paginate: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
rows.push(...response.rows)
|
rows.push(...response.rows)
|
||||||
|
|
||||||
if (!response.bookmark || !response.hasNextPage) {
|
if (!response.bookmark || !response.hasNextPage) {
|
||||||
break
|
break
|
||||||
}
|
|
||||||
bookmark = response.bookmark
|
|
||||||
}
|
}
|
||||||
|
bookmark = response.bookmark
|
||||||
|
}
|
||||||
|
|
||||||
expect(rows).toHaveLength(10)
|
const autoValues = rows.map(row => row.auto).sort((a, b) => a - b)
|
||||||
expect(rows.map(row => row.auto)).toEqual(
|
expect(autoValues).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
|
||||||
expect.arrayContaining([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("field name 1:name", () => {
|
describe("field name 1:name", () => {
|
||||||
|
@ -1746,9 +1746,12 @@ describe.each([
|
||||||
|
|
||||||
// This will never work for Lucene.
|
// This will never work for Lucene.
|
||||||
!isLucene &&
|
!isLucene &&
|
||||||
|
// It also can't work for in-memory searching because the related table name
|
||||||
|
// isn't available.
|
||||||
|
!isInMemory &&
|
||||||
describe("relations", () => {
|
describe("relations", () => {
|
||||||
let otherTable: Table
|
let otherTable: Table
|
||||||
let rows: Row[]
|
let otherRows: Row[]
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
otherTable = await createTable({
|
otherTable = await createTable({
|
||||||
|
@ -1768,7 +1771,7 @@ describe.each([
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
rows = await Promise.all([
|
otherRows = await Promise.all([
|
||||||
config.api.row.save(otherTable._id!, { one: "foo" }),
|
config.api.row.save(otherTable._id!, { one: "foo" }),
|
||||||
config.api.row.save(otherTable._id!, { one: "bar" }),
|
config.api.row.save(otherTable._id!, { one: "bar" }),
|
||||||
])
|
])
|
||||||
|
@ -1776,18 +1779,22 @@ describe.each([
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
config.api.row.save(table._id!, {
|
config.api.row.save(table._id!, {
|
||||||
two: "foo",
|
two: "foo",
|
||||||
other: [rows[0]._id],
|
other: [otherRows[0]._id],
|
||||||
}),
|
}),
|
||||||
config.api.row.save(table._id!, {
|
config.api.row.save(table._id!, {
|
||||||
two: "bar",
|
two: "bar",
|
||||||
other: [rows[1]._id],
|
other: [otherRows[1]._id],
|
||||||
}),
|
}),
|
||||||
])
|
])
|
||||||
|
|
||||||
|
rows = await config.api.row.fetch(table._id!)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("can search through relations", () =>
|
it("can search through relations", () =>
|
||||||
expectQuery({
|
expectQuery({
|
||||||
equal: { [`${otherTable.name}.one`]: "foo" },
|
equal: { [`${otherTable.name}.one`]: "foo" },
|
||||||
}).toContainExactly([{ two: "foo", other: [{ _id: rows[0]._id }] }]))
|
}).toContainExactly([
|
||||||
|
{ two: "foo", other: [{ _id: otherRows[0]._id }] },
|
||||||
|
]))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -23,16 +23,15 @@ const getCacheKey = (appId: string) => `appmigrations_${env.VERSION}_${appId}`
|
||||||
export async function getAppMigrationVersion(appId: string): Promise<string> {
|
export async function getAppMigrationVersion(appId: string): Promise<string> {
|
||||||
const cacheKey = getCacheKey(appId)
|
const cacheKey = getCacheKey(appId)
|
||||||
|
|
||||||
let metadata: AppMigrationDoc | undefined = await cache.get(cacheKey)
|
let version: string | undefined = await cache.get(cacheKey)
|
||||||
|
|
||||||
// returned cached version if we found one
|
// returned cached version if we found one
|
||||||
if (metadata?.version) {
|
if (version) {
|
||||||
return metadata.version
|
return version
|
||||||
}
|
}
|
||||||
|
|
||||||
let version
|
|
||||||
try {
|
try {
|
||||||
metadata = await getFromDB(appId)
|
const metadata = await getFromDB(appId)
|
||||||
version = metadata.version || ""
|
version = metadata.version || ""
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
if (err.status !== 404) {
|
if (err.status !== 404) {
|
||||||
|
|
|
@ -14,14 +14,10 @@ import {
|
||||||
EmptyFilterOption,
|
EmptyFilterOption,
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
Table,
|
Table,
|
||||||
|
SortOrder,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { db as dbCore } from "@budibase/backend-core"
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
|
|
||||||
enum SortOrder {
|
|
||||||
ASCENDING = "ascending",
|
|
||||||
DESCENDING = "descending",
|
|
||||||
}
|
|
||||||
|
|
||||||
const SortOrderPretty = {
|
const SortOrderPretty = {
|
||||||
[SortOrder.ASCENDING]: "Ascending",
|
[SortOrder.ASCENDING]: "Ascending",
|
||||||
[SortOrder.DESCENDING]: "Descending",
|
[SortOrder.DESCENDING]: "Descending",
|
||||||
|
|
|
@ -70,11 +70,6 @@ export enum DatasourceAuthTypes {
|
||||||
GOOGLE = "google",
|
GOOGLE = "google",
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum SortDirection {
|
|
||||||
ASCENDING = "ASCENDING",
|
|
||||||
DESCENDING = "DESCENDING",
|
|
||||||
}
|
|
||||||
|
|
||||||
export const USERS_TABLE_SCHEMA: Table = {
|
export const USERS_TABLE_SCHEMA: Table = {
|
||||||
_id: "ta_users",
|
_id: "ta_users",
|
||||||
type: "table",
|
type: "table",
|
||||||
|
|
|
@ -566,7 +566,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
||||||
query.filters.equal[`_${GOOGLE_SHEETS_PRIMARY_KEY}`] = id
|
query.filters.equal[`_${GOOGLE_SHEETS_PRIMARY_KEY}`] = id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let filtered = dataFilters.runQuery(rows, query.filters)
|
let filtered = dataFilters.runQuery(rows, query.filters || {})
|
||||||
if (hasFilters && query.paginate) {
|
if (hasFilters && query.paginate) {
|
||||||
filtered = filtered.slice(offset, offset + limit)
|
filtered = filtered.slice(offset, offset + limit)
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
"filters": {},
|
"filters": {},
|
||||||
"sort": {
|
"sort": {
|
||||||
"firstname": {
|
"firstname": {
|
||||||
"direction": "ASCENDING"
|
"direction": "ascending"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"paginate": {
|
"paginate": {
|
||||||
|
@ -65,9 +65,7 @@
|
||||||
"table": {
|
"table": {
|
||||||
"type": "table",
|
"type": "table",
|
||||||
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons",
|
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons",
|
||||||
"primary": [
|
"primary": ["personid"],
|
||||||
"personid"
|
|
||||||
],
|
|
||||||
"name": "persons",
|
"name": "persons",
|
||||||
"schema": {
|
"schema": {
|
||||||
"year": {
|
"year": {
|
||||||
|
@ -122,12 +120,7 @@
|
||||||
"name": "type",
|
"name": "type",
|
||||||
"constraints": {
|
"constraints": {
|
||||||
"presence": false,
|
"presence": false,
|
||||||
"inclusion": [
|
"inclusion": ["support", "designer", "programmer", "qa"]
|
||||||
"support",
|
|
||||||
"designer",
|
|
||||||
"programmer",
|
|
||||||
"qa"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"city": {
|
"city": {
|
||||||
|
@ -180,4 +173,4 @@
|
||||||
"persons": "a",
|
"persons": "a",
|
||||||
"tasks": "b"
|
"tasks": "b"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
},
|
},
|
||||||
"sort": {
|
"sort": {
|
||||||
"productname": {
|
"productname": {
|
||||||
"direction": "ASCENDING"
|
"direction": "ascending"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"paginate": {
|
"paginate": {
|
||||||
|
@ -60,9 +60,7 @@
|
||||||
"table": {
|
"table": {
|
||||||
"type": "table",
|
"type": "table",
|
||||||
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products",
|
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products",
|
||||||
"primary": [
|
"primary": ["productid"],
|
||||||
"productid"
|
|
||||||
],
|
|
||||||
"name": "products",
|
"name": "products",
|
||||||
"schema": {
|
"schema": {
|
||||||
"productname": {
|
"productname": {
|
||||||
|
@ -106,4 +104,4 @@
|
||||||
"tasks": "b",
|
"tasks": "b",
|
||||||
"products_tasks": "c"
|
"products_tasks": "c"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@
|
||||||
},
|
},
|
||||||
"sort": {
|
"sort": {
|
||||||
"productname": {
|
"productname": {
|
||||||
"direction": "ASCENDING"
|
"direction": "ascending"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"paginate": {
|
"paginate": {
|
||||||
|
@ -50,9 +50,7 @@
|
||||||
"table": {
|
"table": {
|
||||||
"type": "table",
|
"type": "table",
|
||||||
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products",
|
"_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products",
|
||||||
"primary": [
|
"primary": ["productid"],
|
||||||
"productid"
|
|
||||||
],
|
|
||||||
"name": "products",
|
"name": "products",
|
||||||
"schema": {
|
"schema": {
|
||||||
"productname": {
|
"productname": {
|
||||||
|
@ -91,4 +89,4 @@
|
||||||
"primaryDisplay": "productname"
|
"primaryDisplay": "productname"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -56,7 +56,7 @@
|
||||||
},
|
},
|
||||||
"sort": {
|
"sort": {
|
||||||
"taskname": {
|
"taskname": {
|
||||||
"direction": "ASCENDING"
|
"direction": "ascending"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"paginate": {
|
"paginate": {
|
||||||
|
@ -106,9 +106,7 @@
|
||||||
"table": {
|
"table": {
|
||||||
"type": "table",
|
"type": "table",
|
||||||
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks",
|
"_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks",
|
||||||
"primary": [
|
"primary": ["taskid"],
|
||||||
"taskid"
|
|
||||||
],
|
|
||||||
"name": "tasks",
|
"name": "tasks",
|
||||||
"schema": {
|
"schema": {
|
||||||
"executorid": {
|
"executorid": {
|
||||||
|
@ -199,4 +197,4 @@
|
||||||
"persons": "c",
|
"persons": "c",
|
||||||
"products_tasks": "d"
|
"products_tasks": "d"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@ import {
|
||||||
RowSearchParams,
|
RowSearchParams,
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
SearchResponse,
|
SearchResponse,
|
||||||
|
SortOrder,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { isExternalTableID } from "../../../integrations/utils"
|
import { isExternalTableID } from "../../../integrations/utils"
|
||||||
import * as internal from "./search/internal"
|
import * as internal from "./search/internal"
|
||||||
|
@ -78,6 +79,10 @@ export async function search(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (options.sortOrder) {
|
||||||
|
options.sortOrder = options.sortOrder.toLowerCase() as SortOrder
|
||||||
|
}
|
||||||
|
|
||||||
const table = await sdk.tables.getTable(options.tableId)
|
const table = await sdk.tables.getTable(options.tableId)
|
||||||
options = searchInputMapping(table, options)
|
options = searchInputMapping(table, options)
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import {
|
import {
|
||||||
SortJson,
|
SortJson,
|
||||||
SortDirection,
|
|
||||||
Operation,
|
Operation,
|
||||||
PaginationJson,
|
PaginationJson,
|
||||||
IncludeRelationship,
|
IncludeRelationship,
|
||||||
|
@ -9,6 +8,7 @@ import {
|
||||||
RowSearchParams,
|
RowSearchParams,
|
||||||
SearchResponse,
|
SearchResponse,
|
||||||
Table,
|
Table,
|
||||||
|
SortOrder,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as exporters from "../../../../api/controllers/view/exporters"
|
import * as exporters from "../../../../api/controllers/view/exporters"
|
||||||
import { handleRequest } from "../../../../api/controllers/row/external"
|
import { handleRequest } from "../../../../api/controllers/row/external"
|
||||||
|
@ -56,8 +56,8 @@ export async function search(
|
||||||
if (params.sort) {
|
if (params.sort) {
|
||||||
const direction =
|
const direction =
|
||||||
params.sortOrder === "descending"
|
params.sortOrder === "descending"
|
||||||
? SortDirection.DESCENDING
|
? SortOrder.DESCENDING
|
||||||
: SortDirection.ASCENDING
|
: SortOrder.ASCENDING
|
||||||
sort = {
|
sort = {
|
||||||
[params.sort]: { direction },
|
[params.sort]: { direction },
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,6 @@ import {
|
||||||
RowSearchParams,
|
RowSearchParams,
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
SearchResponse,
|
SearchResponse,
|
||||||
SortDirection,
|
|
||||||
SortOrder,
|
SortOrder,
|
||||||
SortType,
|
SortType,
|
||||||
SqlClient,
|
SqlClient,
|
||||||
|
@ -186,13 +185,9 @@ export async function search(
|
||||||
const sortField = table.schema[params.sort]
|
const sortField = table.schema[params.sort]
|
||||||
const sortType =
|
const sortType =
|
||||||
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
|
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
|
||||||
const sortDirection =
|
|
||||||
params.sortOrder === SortOrder.ASCENDING
|
|
||||||
? SortDirection.ASCENDING
|
|
||||||
: SortDirection.DESCENDING
|
|
||||||
request.sort = {
|
request.sort = {
|
||||||
[sortField.name]: {
|
[sortField.name]: {
|
||||||
direction: sortDirection,
|
direction: params.sortOrder || SortOrder.DESCENDING,
|
||||||
type: sortType as SortType,
|
type: sortType as SortType,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -201,14 +196,15 @@ export async function search(
|
||||||
if (params.bookmark && typeof params.bookmark !== "number") {
|
if (params.bookmark && typeof params.bookmark !== "number") {
|
||||||
throw new Error("Unable to paginate with string based bookmarks")
|
throw new Error("Unable to paginate with string based bookmarks")
|
||||||
}
|
}
|
||||||
const bookmark: number = (params.bookmark as number) || 1
|
|
||||||
const limit = params.limit
|
const bookmark: number = (params.bookmark as number) || 0
|
||||||
if (paginate && params.limit) {
|
if (paginate && params.limit) {
|
||||||
request.paginate = {
|
request.paginate = {
|
||||||
limit: params.limit + 1,
|
limit: params.limit + 1,
|
||||||
page: bookmark,
|
offset: bookmark * params.limit,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const rows = await runSqlQuery(request, allTables)
|
const rows = await runSqlQuery(request, allTables)
|
||||||
|
|
||||||
|
@ -247,13 +243,12 @@ export async function search(
|
||||||
}
|
}
|
||||||
|
|
||||||
// check for pagination
|
// check for pagination
|
||||||
if (paginate && limit) {
|
if (paginate) {
|
||||||
const response: SearchResponse<Row> = {
|
const response: SearchResponse<Row> = {
|
||||||
rows: finalRows,
|
rows: finalRows,
|
||||||
}
|
}
|
||||||
const hasNextPage = !!nextRow
|
if (nextRow) {
|
||||||
response.hasNextPage = hasNextPage
|
response.hasNextPage = true
|
||||||
if (hasNextPage) {
|
|
||||||
response.bookmark = bookmark + 1
|
response.bookmark = bookmark + 1
|
||||||
}
|
}
|
||||||
if (rowCount != null) {
|
if (rowCount != null) {
|
||||||
|
|
|
@ -7,13 +7,16 @@ import {
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
SearchQueryFields,
|
SearchQueryFields,
|
||||||
SearchFilterOperator,
|
SearchFilterOperator,
|
||||||
SortDirection,
|
|
||||||
SortType,
|
SortType,
|
||||||
FieldConstraints,
|
FieldConstraints,
|
||||||
|
SortOrder,
|
||||||
|
RowSearchParams,
|
||||||
|
EmptyFilterOption,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import dayjs from "dayjs"
|
import dayjs from "dayjs"
|
||||||
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
|
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
|
||||||
import { deepGet, schema } from "./helpers"
|
import { deepGet, schema } from "./helpers"
|
||||||
|
import _ from "lodash"
|
||||||
|
|
||||||
const HBS_REGEX = /{{([^{].*?)}}/g
|
const HBS_REGEX = /{{([^{].*?)}}/g
|
||||||
|
|
||||||
|
@ -259,12 +262,23 @@ export const buildQuery = (filter: SearchFilter[]) => {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const search = (docs: Record<string, any>[], query: RowSearchParams) => {
|
||||||
|
let result = runQuery(docs, query.query)
|
||||||
|
if (query.sort) {
|
||||||
|
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
|
||||||
|
}
|
||||||
|
if (query.limit) {
|
||||||
|
result = limit(result, query.limit.toString())
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Performs a client-side search on an array of data
|
* Performs a client-side search on an array of data
|
||||||
* @param docs the data
|
* @param docs the data
|
||||||
* @param query the JSON query
|
* @param query the JSON query
|
||||||
*/
|
*/
|
||||||
export const runQuery = (docs: any[], query?: SearchFilters) => {
|
export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
|
||||||
if (!docs || !Array.isArray(docs)) {
|
if (!docs || !Array.isArray(docs)) {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
@ -272,105 +286,170 @@ export const runQuery = (docs: any[], query?: SearchFilters) => {
|
||||||
return docs
|
return docs
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make query consistent first
|
|
||||||
query = cleanupQuery(query)
|
query = cleanupQuery(query)
|
||||||
|
|
||||||
// Iterates over a set of filters and evaluates a fail function against a doc
|
if (
|
||||||
|
!hasFilters(query) &&
|
||||||
|
query.onEmptyFilter === EmptyFilterOption.RETURN_NONE
|
||||||
|
) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
const match =
|
const match =
|
||||||
(
|
(
|
||||||
type: SearchFilterOperator,
|
type: SearchFilterOperator,
|
||||||
failFn: (docValue: any, testValue: any) => boolean
|
test: (docValue: any, testValue: any) => boolean
|
||||||
) =>
|
) =>
|
||||||
(doc: any) => {
|
(doc: Record<string, any>) => {
|
||||||
const filters = Object.entries(query![type] || {})
|
for (const [key, testValue] of Object.entries(query[type] || {})) {
|
||||||
for (let i = 0; i < filters.length; i++) {
|
const result = test(deepGet(doc, removeKeyNumbering(key)), testValue)
|
||||||
const [key, testValue] = filters[i]
|
if (query.allOr && result) {
|
||||||
const docValue = deepGet(doc, removeKeyNumbering(key))
|
return true
|
||||||
if (failFn(docValue, testValue)) {
|
} else if (!query.allOr && !result) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process a string match (fails if the value does not start with the string)
|
|
||||||
const stringMatch = match(
|
const stringMatch = match(
|
||||||
SearchFilterOperator.STRING,
|
SearchFilterOperator.STRING,
|
||||||
(docValue: string, testValue: string) => {
|
(docValue: any, testValue: any) => {
|
||||||
return (
|
if (!(typeof docValue === "string")) {
|
||||||
!docValue ||
|
return false
|
||||||
!docValue?.toLowerCase().startsWith(testValue?.toLowerCase())
|
}
|
||||||
)
|
if (!(typeof testValue === "string")) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return docValue.toLowerCase().startsWith(testValue.toLowerCase())
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
// Process a fuzzy match (treat the same as starts with when running locally)
|
|
||||||
const fuzzyMatch = match(
|
const fuzzyMatch = match(
|
||||||
SearchFilterOperator.FUZZY,
|
SearchFilterOperator.FUZZY,
|
||||||
(docValue: string, testValue: string) => {
|
(docValue: any, testValue: any) => {
|
||||||
return (
|
if (!(typeof docValue === "string")) {
|
||||||
!docValue ||
|
return false
|
||||||
!docValue?.toLowerCase().startsWith(testValue?.toLowerCase())
|
}
|
||||||
)
|
if (!(typeof testValue === "string")) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return docValue.toLowerCase().includes(testValue.toLowerCase())
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
// Process a range match
|
|
||||||
const rangeMatch = match(
|
const rangeMatch = match(
|
||||||
SearchFilterOperator.RANGE,
|
SearchFilterOperator.RANGE,
|
||||||
(
|
(docValue: any, testValue: any) => {
|
||||||
docValue: string | number | null,
|
|
||||||
testValue: { low: number; high: number }
|
|
||||||
) => {
|
|
||||||
if (docValue == null || docValue === "") {
|
if (docValue == null || docValue === "") {
|
||||||
return true
|
return false
|
||||||
}
|
}
|
||||||
if (!isNaN(+docValue)) {
|
|
||||||
return +docValue < testValue.low || +docValue > testValue.high
|
if (_.isObject(testValue.low) && _.isEmpty(testValue.low)) {
|
||||||
|
testValue.low = undefined
|
||||||
}
|
}
|
||||||
if (dayjs(docValue).isValid()) {
|
|
||||||
return (
|
if (_.isObject(testValue.high) && _.isEmpty(testValue.high)) {
|
||||||
new Date(docValue).getTime() < new Date(testValue.low).getTime() ||
|
testValue.high = undefined
|
||||||
new Date(docValue).getTime() > new Date(testValue.high).getTime()
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (testValue.low == null && testValue.high == null) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const docNum = +docValue
|
||||||
|
if (!isNaN(docNum)) {
|
||||||
|
const lowNum = +testValue.low
|
||||||
|
const highNum = +testValue.high
|
||||||
|
if (!isNaN(lowNum) && !isNaN(highNum)) {
|
||||||
|
return docNum >= lowNum && docNum <= highNum
|
||||||
|
} else if (!isNaN(lowNum)) {
|
||||||
|
return docNum >= lowNum
|
||||||
|
} else if (!isNaN(highNum)) {
|
||||||
|
return docNum <= highNum
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const docDate = dayjs(docValue)
|
||||||
|
if (docDate.isValid()) {
|
||||||
|
const lowDate = dayjs(testValue.low || "0000-00-00T00:00:00.000Z")
|
||||||
|
const highDate = dayjs(testValue.high || "9999-00-00T00:00:00.000Z")
|
||||||
|
if (lowDate.isValid() && highDate.isValid()) {
|
||||||
|
return (
|
||||||
|
(docDate.isAfter(lowDate) && docDate.isBefore(highDate)) ||
|
||||||
|
docDate.isSame(lowDate) ||
|
||||||
|
docDate.isSame(highDate)
|
||||||
|
)
|
||||||
|
} else if (lowDate.isValid()) {
|
||||||
|
return docDate.isAfter(lowDate) || docDate.isSame(lowDate)
|
||||||
|
} else if (highDate.isValid()) {
|
||||||
|
return docDate.isBefore(highDate) || docDate.isSame(highDate)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (testValue.low != null && testValue.high != null) {
|
||||||
|
return docValue >= testValue.low && docValue <= testValue.high
|
||||||
|
} else if (testValue.low != null) {
|
||||||
|
return docValue >= testValue.low
|
||||||
|
} else if (testValue.high != null) {
|
||||||
|
return docValue <= testValue.high
|
||||||
|
}
|
||||||
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
// Process an equal match (fails if the value is different)
|
// This function exists to check that either the docValue is equal to the
|
||||||
const equalMatch = match(
|
// testValue, or if the docValue is an object or array of objects, that the
|
||||||
SearchFilterOperator.EQUAL,
|
// _id of the docValue is equal to the testValue.
|
||||||
(docValue: any, testValue: string | null) => {
|
const _valueMatches = (docValue: any, testValue: any) => {
|
||||||
return testValue != null && testValue !== "" && docValue !== testValue
|
if (Array.isArray(docValue)) {
|
||||||
|
for (const item of docValue) {
|
||||||
|
if (_valueMatches(item, testValue)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
)
|
|
||||||
|
|
||||||
// Process a not-equal match (fails if the value is the same)
|
if (
|
||||||
|
docValue &&
|
||||||
|
typeof docValue === "object" &&
|
||||||
|
typeof testValue === "string"
|
||||||
|
) {
|
||||||
|
return docValue._id === testValue
|
||||||
|
}
|
||||||
|
|
||||||
|
return docValue === testValue
|
||||||
|
}
|
||||||
|
|
||||||
|
const not =
|
||||||
|
<T extends any[]>(f: (...args: T) => boolean) =>
|
||||||
|
(...args: T): boolean =>
|
||||||
|
!f(...args)
|
||||||
|
|
||||||
|
const equalMatch = match(SearchFilterOperator.EQUAL, _valueMatches)
|
||||||
const notEqualMatch = match(
|
const notEqualMatch = match(
|
||||||
SearchFilterOperator.NOT_EQUAL,
|
SearchFilterOperator.NOT_EQUAL,
|
||||||
(docValue: any, testValue: string | null) => {
|
not(_valueMatches)
|
||||||
return testValue != null && testValue !== "" && docValue === testValue
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Process an empty match (fails if the value is not empty)
|
const _empty = (docValue: any) => {
|
||||||
const emptyMatch = match(
|
if (typeof docValue === "string") {
|
||||||
SearchFilterOperator.EMPTY,
|
return docValue === ""
|
||||||
(docValue: string | null) => {
|
|
||||||
return docValue != null && docValue !== ""
|
|
||||||
}
|
}
|
||||||
)
|
if (Array.isArray(docValue)) {
|
||||||
|
return docValue.length === 0
|
||||||
// Process a not-empty match (fails is the value is empty)
|
|
||||||
const notEmptyMatch = match(
|
|
||||||
SearchFilterOperator.NOT_EMPTY,
|
|
||||||
(docValue: string | null) => {
|
|
||||||
return docValue == null || docValue === ""
|
|
||||||
}
|
}
|
||||||
)
|
if (typeof docValue === "object") {
|
||||||
|
return Object.keys(docValue).length === 0
|
||||||
|
}
|
||||||
|
return docValue == null
|
||||||
|
}
|
||||||
|
|
||||||
|
const emptyMatch = match(SearchFilterOperator.EMPTY, _empty)
|
||||||
|
const notEmptyMatch = match(SearchFilterOperator.NOT_EMPTY, not(_empty))
|
||||||
|
|
||||||
// Process an includes match (fails if the value is not included)
|
|
||||||
const oneOf = match(
|
const oneOf = match(
|
||||||
SearchFilterOperator.ONE_OF,
|
SearchFilterOperator.ONE_OF,
|
||||||
(docValue: any, testValue: any) => {
|
(docValue: any, testValue: any) => {
|
||||||
|
@ -380,61 +459,92 @@ export const runQuery = (docs: any[], query?: SearchFilters) => {
|
||||||
testValue = testValue.map((item: string) => parseFloat(item))
|
testValue = testValue.map((item: string) => parseFloat(item))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return !testValue?.includes(docValue)
|
|
||||||
|
if (!Array.isArray(testValue)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return testValue.some(item => _valueMatches(docValue, item))
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
const containsAny = match(
|
const _contains =
|
||||||
SearchFilterOperator.CONTAINS_ANY,
|
(f: "some" | "every") => (docValue: any, testValue: any) => {
|
||||||
(docValue: any, testValue: any) => {
|
if (!Array.isArray(docValue)) {
|
||||||
return !docValue?.includes(...testValue)
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof testValue === "string") {
|
||||||
|
testValue = testValue.split(",")
|
||||||
|
if (typeof docValue[0] === "number") {
|
||||||
|
testValue = testValue.map((item: string) => parseFloat(item))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(testValue)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (testValue.length === 0) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return testValue[f](item => _valueMatches(docValue, item))
|
||||||
}
|
}
|
||||||
)
|
|
||||||
|
|
||||||
const contains = match(
|
const contains = match(
|
||||||
SearchFilterOperator.CONTAINS,
|
SearchFilterOperator.CONTAINS,
|
||||||
(docValue: string | any[], testValue: any[]) => {
|
(docValue: any, testValue: any) => {
|
||||||
return !testValue?.every((item: any) => docValue?.includes(item))
|
if (Array.isArray(testValue) && testValue.length === 0) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return _contains("every")(docValue, testValue)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
const notContains = match(
|
const notContains = match(
|
||||||
SearchFilterOperator.NOT_CONTAINS,
|
SearchFilterOperator.NOT_CONTAINS,
|
||||||
(docValue: string | any[], testValue: any[]) => {
|
(docValue: any, testValue: any) => {
|
||||||
return testValue?.every((item: any) => docValue?.includes(item))
|
// Not sure if this is logically correct, but at the time this code was
|
||||||
|
// written the search endpoint behaved this way and we wanted to make this
|
||||||
|
// local search match its behaviour, so we had to do this.
|
||||||
|
if (Array.isArray(testValue) && testValue.length === 0) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return not(_contains("every"))(docValue, testValue)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
const containsAny = match(
|
||||||
|
SearchFilterOperator.CONTAINS_ANY,
|
||||||
|
_contains("some")
|
||||||
|
)
|
||||||
|
|
||||||
const docMatch = (doc: any) => {
|
const docMatch = (doc: Record<string, any>) => {
|
||||||
const filterFunctions: Record<SearchFilterOperator, (doc: any) => boolean> =
|
const filterFunctions = {
|
||||||
{
|
string: stringMatch,
|
||||||
string: stringMatch,
|
fuzzy: fuzzyMatch,
|
||||||
fuzzy: fuzzyMatch,
|
range: rangeMatch,
|
||||||
range: rangeMatch,
|
equal: equalMatch,
|
||||||
equal: equalMatch,
|
notEqual: notEqualMatch,
|
||||||
notEqual: notEqualMatch,
|
empty: emptyMatch,
|
||||||
empty: emptyMatch,
|
notEmpty: notEmptyMatch,
|
||||||
notEmpty: notEmptyMatch,
|
oneOf: oneOf,
|
||||||
oneOf: oneOf,
|
contains: contains,
|
||||||
contains: contains,
|
containsAny: containsAny,
|
||||||
containsAny: containsAny,
|
notContains: notContains,
|
||||||
notContains: notContains,
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const activeFilterKeys: SearchFilterOperator[] = Object.entries(query || {})
|
const results = Object.entries(query || {})
|
||||||
.filter(
|
.filter(
|
||||||
([key, value]: [string, any]) =>
|
([key, value]) =>
|
||||||
!["allOr", "onEmptyFilter"].includes(key) &&
|
!["allOr", "onEmptyFilter"].includes(key) &&
|
||||||
value &&
|
value &&
|
||||||
Object.keys(value as Record<string, any>).length > 0
|
Object.keys(value).length > 0
|
||||||
)
|
)
|
||||||
.map(([key]) => key as any)
|
.map(([key]) => {
|
||||||
|
return filterFunctions[key as SearchFilterOperator]?.(doc) ?? false
|
||||||
|
})
|
||||||
|
|
||||||
const results: boolean[] = activeFilterKeys.map(filterKey => {
|
if (query.allOr) {
|
||||||
return filterFunctions[filterKey]?.(doc) ?? false
|
|
||||||
})
|
|
||||||
|
|
||||||
if (query!.allOr) {
|
|
||||||
return results.some(result => result === true)
|
return results.some(result => result === true)
|
||||||
} else {
|
} else {
|
||||||
return results.every(result => result === true)
|
return results.every(result => result === true)
|
||||||
|
@ -454,24 +564,35 @@ export const runQuery = (docs: any[], query?: SearchFilters) => {
|
||||||
export const sort = (
|
export const sort = (
|
||||||
docs: any[],
|
docs: any[],
|
||||||
sort: string,
|
sort: string,
|
||||||
sortOrder: SortDirection,
|
sortOrder: SortOrder,
|
||||||
sortType = SortType.STRING
|
sortType = SortType.STRING
|
||||||
) => {
|
) => {
|
||||||
if (!sort || !sortOrder || !sortType) {
|
if (!sort || !sortOrder || !sortType) {
|
||||||
return docs
|
return docs
|
||||||
}
|
}
|
||||||
const parse =
|
|
||||||
sortType === "string" ? (x: any) => `${x}` : (x: string) => parseFloat(x)
|
const parse = (x: any) => {
|
||||||
|
if (x == null) {
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
if (sortType === "string") {
|
||||||
|
return `${x}`
|
||||||
|
}
|
||||||
|
return parseFloat(x)
|
||||||
|
}
|
||||||
|
|
||||||
return docs
|
return docs
|
||||||
.slice()
|
.slice()
|
||||||
.sort((a: { [x: string]: any }, b: { [x: string]: any }) => {
|
.sort((a: { [x: string]: any }, b: { [x: string]: any }) => {
|
||||||
const colA = parse(a[sort])
|
const colA = parse(a[sort])
|
||||||
const colB = parse(b[sort])
|
const colB = parse(b[sort])
|
||||||
|
|
||||||
|
const result = colB == null || colA > colB ? 1 : -1
|
||||||
if (sortOrder.toLowerCase() === "descending") {
|
if (sortOrder.toLowerCase() === "descending") {
|
||||||
return colA > colB ? -1 : 1
|
return result * -1
|
||||||
} else {
|
|
||||||
return colA > colB ? 1 : -1
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,415 +0,0 @@
|
||||||
import {
|
|
||||||
SearchFilters,
|
|
||||||
SearchFilterOperator,
|
|
||||||
FieldType,
|
|
||||||
SearchFilter,
|
|
||||||
} from "@budibase/types"
|
|
||||||
import { buildQuery, runQuery } from "../filters"
|
|
||||||
|
|
||||||
describe("runQuery", () => {
|
|
||||||
const docs = [
|
|
||||||
{
|
|
||||||
order_id: 1,
|
|
||||||
customer_id: 259,
|
|
||||||
order_status: 4,
|
|
||||||
order_date: "2016-01-01T00:00:00.000Z",
|
|
||||||
required_date: "2016-01-03T00:00:00.000Z",
|
|
||||||
shipped_date: "2016-01-03T00:00:00.000Z",
|
|
||||||
store_id: 1,
|
|
||||||
staff_id: 2,
|
|
||||||
description: "Large box",
|
|
||||||
label: undefined,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
order_id: 2,
|
|
||||||
customer_id: 1212,
|
|
||||||
order_status: 4,
|
|
||||||
order_date: "2016-01-05T00:00:00.000Z",
|
|
||||||
required_date: "2016-01-04T00:00:00.000Z",
|
|
||||||
shipped_date: "2016-01-03T00:00:00.000Z",
|
|
||||||
store_id: 2,
|
|
||||||
staff_id: 6,
|
|
||||||
description: "Small box",
|
|
||||||
label: "FRAGILE",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
order_id: 3,
|
|
||||||
customer_id: 523,
|
|
||||||
order_status: 5,
|
|
||||||
order_date: "2016-01-12T00:00:00.000Z",
|
|
||||||
required_date: "2016-01-05T00:00:00.000Z",
|
|
||||||
shipped_date: "2016-01-03T00:00:00.000Z",
|
|
||||||
store_id: 2,
|
|
||||||
staff_id: 7,
|
|
||||||
description: "Heavy box",
|
|
||||||
label: "HEAVY",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
function buildQuery(filters: { [filterKey: string]: any }): SearchFilters {
|
|
||||||
const query: SearchFilters = {
|
|
||||||
string: {},
|
|
||||||
fuzzy: {},
|
|
||||||
range: {},
|
|
||||||
equal: {},
|
|
||||||
notEqual: {},
|
|
||||||
empty: {},
|
|
||||||
notEmpty: {},
|
|
||||||
contains: {},
|
|
||||||
notContains: {},
|
|
||||||
oneOf: {},
|
|
||||||
containsAny: {},
|
|
||||||
allOr: false,
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const filterKey in filters) {
|
|
||||||
query[filterKey as SearchFilterOperator] = filters[filterKey]
|
|
||||||
}
|
|
||||||
|
|
||||||
return query
|
|
||||||
}
|
|
||||||
|
|
||||||
it("should return input docs if no search query is provided", () => {
|
|
||||||
expect(runQuery(docs)).toBe(docs)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should return matching rows for equal filter", () => {
|
|
||||||
const query = buildQuery({
|
|
||||||
equal: { order_status: 4 },
|
|
||||||
})
|
|
||||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should return matching row for notEqual filter", () => {
|
|
||||||
const query = buildQuery({
|
|
||||||
notEqual: { order_status: 4 },
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should return starts with matching rows for fuzzy and string filters", () => {
|
|
||||||
expect(
|
|
||||||
runQuery(
|
|
||||||
docs,
|
|
||||||
buildQuery({
|
|
||||||
fuzzy: { description: "sm" },
|
|
||||||
})
|
|
||||||
).map(row => row.description)
|
|
||||||
).toEqual(["Small box"])
|
|
||||||
expect(
|
|
||||||
runQuery(
|
|
||||||
docs,
|
|
||||||
buildQuery({
|
|
||||||
string: { description: "SM" },
|
|
||||||
})
|
|
||||||
).map(row => row.description)
|
|
||||||
).toEqual(["Small box"])
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should return rows within a range filter", () => {
|
|
||||||
const query = buildQuery({
|
|
||||||
range: {
|
|
||||||
customer_id: {
|
|
||||||
low: 500,
|
|
||||||
high: 1000,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should return rows with numeric strings within a range filter", () => {
|
|
||||||
const query = buildQuery({
|
|
||||||
range: {
|
|
||||||
customer_id: {
|
|
||||||
low: "500",
|
|
||||||
high: "1000",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should return rows with ISO date strings within a range filter", () => {
|
|
||||||
const query = buildQuery({
|
|
||||||
range: {
|
|
||||||
order_date: {
|
|
||||||
low: "2016-01-04T00:00:00.000Z",
|
|
||||||
high: "2016-01-11T00:00:00.000Z",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2])
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should return return all docs if an invalid doc value is passed into a range filter", async () => {
|
|
||||||
const docs = [
|
|
||||||
{
|
|
||||||
order_id: 4,
|
|
||||||
customer_id: 1758,
|
|
||||||
order_status: 5,
|
|
||||||
order_date: "{{ Binding.INVALID }}",
|
|
||||||
required_date: "2017-03-05T00:00:00.000Z",
|
|
||||||
shipped_date: "2017-03-03T00:00:00.000Z",
|
|
||||||
store_id: 2,
|
|
||||||
staff_id: 7,
|
|
||||||
description: undefined,
|
|
||||||
label: "",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
const query = buildQuery({
|
|
||||||
range: {
|
|
||||||
order_date: {
|
|
||||||
low: "2016-01-04T00:00:00.000Z",
|
|
||||||
high: "2016-01-11T00:00:00.000Z",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(runQuery(docs, query)).toEqual(docs)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should return rows with matches on empty filter", () => {
|
|
||||||
const query = buildQuery({
|
|
||||||
empty: {
|
|
||||||
label: null,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1])
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should return rows with matches on notEmpty filter", () => {
|
|
||||||
const query = buildQuery({
|
|
||||||
notEmpty: {
|
|
||||||
label: null,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2, 3])
|
|
||||||
})
|
|
||||||
|
|
||||||
it.each([[523, 259], "523,259"])(
|
|
||||||
"should return rows with matches on numeric oneOf filter",
|
|
||||||
input => {
|
|
||||||
const query = buildQuery({
|
|
||||||
oneOf: {
|
|
||||||
customer_id: input,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(runQuery(docs, query).map(row => row.customer_id)).toEqual([
|
|
||||||
259, 523,
|
|
||||||
])
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
it.each([
|
|
||||||
[false, []],
|
|
||||||
[true, [1, 2, 3]],
|
|
||||||
])("should return %s if allOr is %s ", (allOr, expectedResult) => {
|
|
||||||
const query = buildQuery({
|
|
||||||
allOr,
|
|
||||||
oneOf: { staff_id: [10] },
|
|
||||||
contains: { description: ["box"] },
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual(
|
|
||||||
expectedResult
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should return matching results if allOr is true and only one filter matches with different operands", () => {
|
|
||||||
const query = buildQuery({
|
|
||||||
allOr: true,
|
|
||||||
equal: { order_status: 4 },
|
|
||||||
oneOf: { label: ["FRAGILE"] },
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should handle when a value is null or undefined", () => {
|
|
||||||
const query = buildQuery({
|
|
||||||
allOr: true,
|
|
||||||
equal: { order_status: null },
|
|
||||||
oneOf: { label: ["FRAGILE"] },
|
|
||||||
})
|
|
||||||
|
|
||||||
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("buildQuery", () => {
|
|
||||||
it("should return a basic search query template if the input is not an array", () => {
|
|
||||||
const filter: any = "NOT_AN_ARRAY"
|
|
||||||
expect(buildQuery(filter)).toEqual({
|
|
||||||
string: {},
|
|
||||||
fuzzy: {},
|
|
||||||
range: {},
|
|
||||||
equal: {},
|
|
||||||
notEqual: {},
|
|
||||||
empty: {},
|
|
||||||
notEmpty: {},
|
|
||||||
contains: {},
|
|
||||||
notContains: {},
|
|
||||||
oneOf: {},
|
|
||||||
containsAny: {},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should parseFloat if the type is a number, but the value is a numeric string", () => {
|
|
||||||
const filter: SearchFilter[] = [
|
|
||||||
{
|
|
||||||
operator: SearchFilterOperator.EQUAL,
|
|
||||||
field: "customer_id",
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
value: "1212",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
operator: SearchFilterOperator.ONE_OF,
|
|
||||||
field: "customer_id",
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
value: "1000,1212,3400",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
expect(buildQuery(filter)).toEqual({
|
|
||||||
string: {},
|
|
||||||
fuzzy: {},
|
|
||||||
range: {},
|
|
||||||
equal: {
|
|
||||||
customer_id: 1212,
|
|
||||||
},
|
|
||||||
notEqual: {},
|
|
||||||
empty: {},
|
|
||||||
notEmpty: {},
|
|
||||||
contains: {},
|
|
||||||
notContains: {},
|
|
||||||
oneOf: {
|
|
||||||
customer_id: [1000, 1212, 3400],
|
|
||||||
},
|
|
||||||
containsAny: {},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should not parseFloat if the type is a number, but the value is a handlebars binding string", () => {
|
|
||||||
const filter: SearchFilter[] = [
|
|
||||||
{
|
|
||||||
operator: SearchFilterOperator.EQUAL,
|
|
||||||
field: "customer_id",
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
value: "{{ customer_id }}",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
operator: SearchFilterOperator.ONE_OF,
|
|
||||||
field: "customer_id",
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
value: "{{ list_of_customer_ids }}",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
expect(buildQuery(filter)).toEqual({
|
|
||||||
string: {},
|
|
||||||
fuzzy: {},
|
|
||||||
range: {},
|
|
||||||
equal: {
|
|
||||||
customer_id: "{{ customer_id }}",
|
|
||||||
},
|
|
||||||
notEqual: {},
|
|
||||||
empty: {},
|
|
||||||
notEmpty: {},
|
|
||||||
contains: {},
|
|
||||||
notContains: {},
|
|
||||||
oneOf: {
|
|
||||||
customer_id: "{{ list_of_customer_ids }}",
|
|
||||||
},
|
|
||||||
containsAny: {},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should cast string to boolean if the type is boolean", () => {
|
|
||||||
const filter: SearchFilter[] = [
|
|
||||||
{
|
|
||||||
operator: SearchFilterOperator.EQUAL,
|
|
||||||
field: "a",
|
|
||||||
type: FieldType.BOOLEAN,
|
|
||||||
value: "not_true",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
operator: SearchFilterOperator.NOT_EQUAL,
|
|
||||||
field: "b",
|
|
||||||
type: FieldType.BOOLEAN,
|
|
||||||
value: "not_true",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
operator: SearchFilterOperator.EQUAL,
|
|
||||||
field: "c",
|
|
||||||
type: FieldType.BOOLEAN,
|
|
||||||
value: "true",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
expect(buildQuery(filter)).toEqual({
|
|
||||||
string: {},
|
|
||||||
fuzzy: {},
|
|
||||||
range: {},
|
|
||||||
equal: {
|
|
||||||
b: true,
|
|
||||||
c: true,
|
|
||||||
},
|
|
||||||
notEqual: {
|
|
||||||
a: true,
|
|
||||||
},
|
|
||||||
empty: {},
|
|
||||||
notEmpty: {},
|
|
||||||
contains: {},
|
|
||||||
notContains: {},
|
|
||||||
oneOf: {},
|
|
||||||
containsAny: {},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should split the string for contains operators", () => {
|
|
||||||
const filter: SearchFilter[] = [
|
|
||||||
{
|
|
||||||
operator: SearchFilterOperator.CONTAINS,
|
|
||||||
field: "description",
|
|
||||||
type: FieldType.ARRAY,
|
|
||||||
value: "Large box,Heavy box,Small box",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
operator: SearchFilterOperator.NOT_CONTAINS,
|
|
||||||
field: "description",
|
|
||||||
type: FieldType.ARRAY,
|
|
||||||
value: "Large box,Heavy box,Small box",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
operator: SearchFilterOperator.CONTAINS_ANY,
|
|
||||||
field: "description",
|
|
||||||
type: FieldType.ARRAY,
|
|
||||||
value: "Large box,Heavy box,Small box",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
expect(buildQuery(filter)).toEqual({
|
|
||||||
string: {},
|
|
||||||
fuzzy: {},
|
|
||||||
range: {},
|
|
||||||
equal: {},
|
|
||||||
notEqual: {},
|
|
||||||
empty: {},
|
|
||||||
notEmpty: {},
|
|
||||||
contains: {
|
|
||||||
description: ["Large box", "Heavy box", "Small box"],
|
|
||||||
},
|
|
||||||
notContains: {
|
|
||||||
description: ["Large box", "Heavy box", "Small box"],
|
|
||||||
},
|
|
||||||
oneOf: {},
|
|
||||||
containsAny: {
|
|
||||||
description: ["Large box", "Heavy box", "Small box"],
|
|
||||||
},
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
|
@ -23,11 +23,6 @@ export const RowOperations = [
|
||||||
Operation.BULK_CREATE,
|
Operation.BULK_CREATE,
|
||||||
]
|
]
|
||||||
|
|
||||||
export enum SortDirection {
|
|
||||||
ASCENDING = "ASCENDING",
|
|
||||||
DESCENDING = "DESCENDING",
|
|
||||||
}
|
|
||||||
|
|
||||||
export enum QueryType {
|
export enum QueryType {
|
||||||
SQL = "sql",
|
SQL = "sql",
|
||||||
JSON = "json",
|
JSON = "json",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { Operation, SortDirection } from "./datasources"
|
import { Operation } from "./datasources"
|
||||||
import { Row, Table, DocumentType } from "../documents"
|
import { Row, Table, DocumentType } from "../documents"
|
||||||
import { SortType } from "../api"
|
import { SortOrder, SortType } from "../api"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
|
|
||||||
export enum SearchFilterOperator {
|
export enum SearchFilterOperator {
|
||||||
|
@ -77,7 +77,7 @@ export type SearchQueryFields = Omit<SearchFilters, "allOr" | "onEmptyFilter">
|
||||||
|
|
||||||
export interface SortJson {
|
export interface SortJson {
|
||||||
[key: string]: {
|
[key: string]: {
|
||||||
direction: SortDirection
|
direction: SortOrder
|
||||||
type?: SortType
|
type?: SortType
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -85,6 +85,7 @@ export interface SortJson {
|
||||||
export interface PaginationJson {
|
export interface PaginationJson {
|
||||||
limit: number
|
limit: number
|
||||||
page?: string | number
|
page?: string | number
|
||||||
|
offset?: number
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface RenameColumn {
|
export interface RenameColumn {
|
||||||
|
|
Loading…
Reference in New Issue