Merge branch 'master' into fix/correcting-multi-relationships
This commit is contained in:
commit
55f6610f0d
|
@ -108,7 +108,7 @@ jobs:
|
||||||
- name: Pull testcontainers images
|
- name: Pull testcontainers images
|
||||||
run: |
|
run: |
|
||||||
docker pull testcontainers/ryuk:0.5.1 &
|
docker pull testcontainers/ryuk:0.5.1 &
|
||||||
docker pull budibase/couchdb:v3.2.1-sql &
|
docker pull budibase/couchdb:v3.2.1-sqs &
|
||||||
docker pull redis &
|
docker pull redis &
|
||||||
|
|
||||||
wait $(jobs -p)
|
wait $(jobs -p)
|
||||||
|
|
|
@ -206,3 +206,21 @@ export function pagination<T>(
|
||||||
nextPage,
|
nextPage,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function isSqsEnabledForTenant(): boolean {
|
||||||
|
const tenantId = getTenantId()
|
||||||
|
if (!env.SQS_SEARCH_ENABLE) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is to guard against the situation in tests where tests pass because
|
||||||
|
// we're not actually using SQS, we're using Lucene and the tests pass due to
|
||||||
|
// parity.
|
||||||
|
if (env.isTest() && env.SQS_SEARCH_ENABLE_TENANTS.length === 0) {
|
||||||
|
throw new Error(
|
||||||
|
"to enable SQS you must specify a list of tenants in the SQS_SEARCH_ENABLE_TENANTS env var"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return env.SQS_SEARCH_ENABLE_TENANTS.includes(tenantId)
|
||||||
|
}
|
||||||
|
|
|
@ -116,6 +116,9 @@ const environment = {
|
||||||
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
||||||
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4006",
|
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4006",
|
||||||
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
|
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
|
||||||
|
SQS_SEARCH_ENABLE_TENANTS:
|
||||||
|
process.env.SQS_SEARCH_ENABLE_TENANTS?.split(",") || [],
|
||||||
|
SQS_MIGRATION_ENABLE: process.env.SQS_MIGRATION_ENABLE,
|
||||||
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
|
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
|
||||||
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
|
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
|
||||||
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
|
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
|
||||||
|
|
|
@ -18,9 +18,10 @@ import {
|
||||||
CouchFindOptions,
|
CouchFindOptions,
|
||||||
DatabaseQueryOpts,
|
DatabaseQueryOpts,
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
SearchFilterOperator,
|
|
||||||
SearchUsersRequest,
|
SearchUsersRequest,
|
||||||
User,
|
User,
|
||||||
|
BasicOperator,
|
||||||
|
ArrayOperator,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as context from "../context"
|
import * as context from "../context"
|
||||||
import { getGlobalDB } from "../context"
|
import { getGlobalDB } from "../context"
|
||||||
|
@ -46,9 +47,9 @@ function removeUserPassword(users: User | User[]) {
|
||||||
|
|
||||||
export function isSupportedUserSearch(query: SearchFilters) {
|
export function isSupportedUserSearch(query: SearchFilters) {
|
||||||
const allowed = [
|
const allowed = [
|
||||||
{ op: SearchFilterOperator.STRING, key: "email" },
|
{ op: BasicOperator.STRING, key: "email" },
|
||||||
{ op: SearchFilterOperator.EQUAL, key: "_id" },
|
{ op: BasicOperator.EQUAL, key: "_id" },
|
||||||
{ op: SearchFilterOperator.ONE_OF, key: "_id" },
|
{ op: ArrayOperator.ONE_OF, key: "_id" },
|
||||||
]
|
]
|
||||||
for (let [key, operation] of Object.entries(query)) {
|
for (let [key, operation] of Object.entries(query)) {
|
||||||
if (typeof operation !== "object") {
|
if (typeof operation !== "object") {
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
Label,
|
Label,
|
||||||
Multiselect,
|
Multiselect,
|
||||||
} from "@budibase/bbui"
|
} from "@budibase/bbui"
|
||||||
import { FieldType, SearchFilterOperator } from "@budibase/types"
|
import { ArrayOperator, FieldType } from "@budibase/types"
|
||||||
import { generate } from "shortid"
|
import { generate } from "shortid"
|
||||||
import { QueryUtils, Constants } from "@budibase/frontend-core"
|
import { QueryUtils, Constants } from "@budibase/frontend-core"
|
||||||
import { getContext } from "svelte"
|
import { getContext } from "svelte"
|
||||||
|
@ -268,7 +268,7 @@
|
||||||
<slot name="binding" {filter} />
|
<slot name="binding" {filter} />
|
||||||
{:else if [FieldType.STRING, FieldType.LONGFORM, FieldType.NUMBER, FieldType.BIGINT, FieldType.FORMULA].includes(filter.type)}
|
{:else if [FieldType.STRING, FieldType.LONGFORM, FieldType.NUMBER, FieldType.BIGINT, FieldType.FORMULA].includes(filter.type)}
|
||||||
<Input disabled={filter.noValue} bind:value={filter.value} />
|
<Input disabled={filter.noValue} bind:value={filter.value} />
|
||||||
{:else if filter.type === FieldType.ARRAY || (filter.type === FieldType.OPTIONS && filter.operator === SearchFilterOperator.ONE_OF)}
|
{:else if filter.type === FieldType.ARRAY || (filter.type === FieldType.OPTIONS && filter.operator === ArrayOperator.ONE_OF)}
|
||||||
<Multiselect
|
<Multiselect
|
||||||
disabled={filter.noValue}
|
disabled={filter.noValue}
|
||||||
options={getFieldOptions(filter.field)}
|
options={getFieldOptions(filter.field)}
|
||||||
|
|
|
@ -15,7 +15,7 @@ import { getViews, saveView } from "../view/utils"
|
||||||
import viewTemplate from "../view/viewBuilder"
|
import viewTemplate from "../view/viewBuilder"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import { quotas } from "@budibase/pro"
|
import { quotas } from "@budibase/pro"
|
||||||
import { events, context } from "@budibase/backend-core"
|
import { events, context, db as dbCore } from "@budibase/backend-core"
|
||||||
import {
|
import {
|
||||||
AutoFieldSubType,
|
AutoFieldSubType,
|
||||||
ContextUser,
|
ContextUser,
|
||||||
|
@ -324,7 +324,7 @@ class TableSaveFunctions {
|
||||||
importRows: this.importRows,
|
importRows: this.importRows,
|
||||||
user: this.user,
|
user: this.user,
|
||||||
})
|
})
|
||||||
if (env.SQS_SEARCH_ENABLE) {
|
if (dbCore.isSqsEnabledForTenant()) {
|
||||||
await sdk.tables.sqs.addTable(table)
|
await sdk.tables.sqs.addTable(table)
|
||||||
}
|
}
|
||||||
return table
|
return table
|
||||||
|
@ -518,7 +518,7 @@ export async function internalTableCleanup(table: Table, rows?: Row[]) {
|
||||||
if (rows) {
|
if (rows) {
|
||||||
await AttachmentCleanup.tableDelete(table, rows)
|
await AttachmentCleanup.tableDelete(table, rows)
|
||||||
}
|
}
|
||||||
if (env.SQS_SEARCH_ENABLE) {
|
if (dbCore.isSqsEnabledForTenant()) {
|
||||||
await sdk.tables.sqs.removeTable(table)
|
await sdk.tables.sqs.removeTable(table)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,10 +54,13 @@ describe.each([
|
||||||
let rows: Row[]
|
let rows: Row[]
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
|
await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, () => config.init())
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
|
envCleanup = config.setCoreEnv({
|
||||||
|
SQS_SEARCH_ENABLE: "true",
|
||||||
|
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
||||||
|
})
|
||||||
}
|
}
|
||||||
await config.init()
|
|
||||||
|
|
||||||
if (config.app?.appId) {
|
if (config.app?.appId) {
|
||||||
config.app = await config.api.application.update(config.app?.appId, {
|
config.app = await config.api.application.update(config.app?.appId, {
|
||||||
|
@ -780,6 +783,32 @@ describe.each([
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
await expectQuery({ oneOf: { name: ["none"] } }).toFindNothing()
|
await expectQuery({ oneOf: { name: ["none"] } }).toFindNothing()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("can have multiple values for same column", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
oneOf: {
|
||||||
|
name: ["foo", "bar"],
|
||||||
|
},
|
||||||
|
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("splits comma separated strings", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
oneOf: {
|
||||||
|
// @ts-ignore
|
||||||
|
name: "foo,bar",
|
||||||
|
},
|
||||||
|
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("trims whitespace", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
oneOf: {
|
||||||
|
// @ts-ignore
|
||||||
|
name: "foo, bar",
|
||||||
|
},
|
||||||
|
}).toContainExactly([{ name: "foo" }, { name: "bar" }])
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("fuzzy", () => {
|
describe("fuzzy", () => {
|
||||||
|
@ -1002,6 +1031,32 @@ describe.each([
|
||||||
it("fails to find nonexistent row", async () => {
|
it("fails to find nonexistent row", async () => {
|
||||||
await expectQuery({ oneOf: { age: [2] } }).toFindNothing()
|
await expectQuery({ oneOf: { age: [2] } }).toFindNothing()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// I couldn't find a way to make this work in Lucene and given that
|
||||||
|
// we're getting rid of Lucene soon I wasn't inclined to spend time on
|
||||||
|
// it.
|
||||||
|
!isLucene &&
|
||||||
|
it("can convert from a string", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
oneOf: {
|
||||||
|
// @ts-ignore
|
||||||
|
age: "1",
|
||||||
|
},
|
||||||
|
}).toContainExactly([{ age: 1 }])
|
||||||
|
})
|
||||||
|
|
||||||
|
// I couldn't find a way to make this work in Lucene and given that
|
||||||
|
// we're getting rid of Lucene soon I wasn't inclined to spend time on
|
||||||
|
// it.
|
||||||
|
!isLucene &&
|
||||||
|
it("can find multiple values for same column", async () => {
|
||||||
|
await expectQuery({
|
||||||
|
oneOf: {
|
||||||
|
// @ts-ignore
|
||||||
|
age: "1,10",
|
||||||
|
},
|
||||||
|
}).toContainExactly([{ age: 1 }, { age: 10 }])
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("range", () => {
|
describe("range", () => {
|
||||||
|
|
|
@ -86,9 +86,10 @@ describe("/templates", () => {
|
||||||
async source => {
|
async source => {
|
||||||
const env = {
|
const env = {
|
||||||
SQS_SEARCH_ENABLE: source === "sqs" ? "true" : "false",
|
SQS_SEARCH_ENABLE: source === "sqs" ? "true" : "false",
|
||||||
|
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
||||||
}
|
}
|
||||||
|
|
||||||
await config.withEnv(env, async () => {
|
await config.withCoreEnv(env, async () => {
|
||||||
const name = generator.guid().replaceAll("-", "")
|
const name = generator.guid().replaceAll("-", "")
|
||||||
const url = `/${name}`
|
const url = `/${name}`
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,6 @@ import {
|
||||||
QuotaUsageType,
|
QuotaUsageType,
|
||||||
Row,
|
Row,
|
||||||
SaveTableRequest,
|
SaveTableRequest,
|
||||||
SearchFilterOperator,
|
|
||||||
SortOrder,
|
SortOrder,
|
||||||
SortType,
|
SortType,
|
||||||
StaticQuotaName,
|
StaticQuotaName,
|
||||||
|
@ -19,6 +18,7 @@ import {
|
||||||
ViewUIFieldMetadata,
|
ViewUIFieldMetadata,
|
||||||
ViewV2,
|
ViewV2,
|
||||||
SearchResponse,
|
SearchResponse,
|
||||||
|
BasicOperator,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||||
|
@ -88,10 +88,16 @@ describe.each([
|
||||||
}
|
}
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
|
await config.withCoreEnv(
|
||||||
|
{ SQS_SEARCH_ENABLE: isSqs ? "true" : "false" },
|
||||||
|
() => config.init()
|
||||||
|
)
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
|
envCleanup = config.setCoreEnv({
|
||||||
|
SQS_SEARCH_ENABLE: "true",
|
||||||
|
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
||||||
|
})
|
||||||
}
|
}
|
||||||
await config.init()
|
|
||||||
|
|
||||||
if (dsProvider) {
|
if (dsProvider) {
|
||||||
datasource = await config.createDatasource({
|
datasource = await config.createDatasource({
|
||||||
|
@ -149,7 +155,7 @@ describe.each([
|
||||||
primaryDisplay: "id",
|
primaryDisplay: "id",
|
||||||
query: [
|
query: [
|
||||||
{
|
{
|
||||||
operator: SearchFilterOperator.EQUAL,
|
operator: BasicOperator.EQUAL,
|
||||||
field: "field",
|
field: "field",
|
||||||
value: "value",
|
value: "value",
|
||||||
},
|
},
|
||||||
|
@ -561,7 +567,7 @@ describe.each([
|
||||||
...view,
|
...view,
|
||||||
query: [
|
query: [
|
||||||
{
|
{
|
||||||
operator: SearchFilterOperator.EQUAL,
|
operator: BasicOperator.EQUAL,
|
||||||
field: "newField",
|
field: "newField",
|
||||||
value: "thatValue",
|
value: "thatValue",
|
||||||
},
|
},
|
||||||
|
@ -589,7 +595,7 @@ describe.each([
|
||||||
primaryDisplay: "Price",
|
primaryDisplay: "Price",
|
||||||
query: [
|
query: [
|
||||||
{
|
{
|
||||||
operator: SearchFilterOperator.EQUAL,
|
operator: BasicOperator.EQUAL,
|
||||||
field: generator.word(),
|
field: generator.word(),
|
||||||
value: generator.word(),
|
value: generator.word(),
|
||||||
},
|
},
|
||||||
|
@ -673,7 +679,7 @@ describe.each([
|
||||||
tableId: generator.guid(),
|
tableId: generator.guid(),
|
||||||
query: [
|
query: [
|
||||||
{
|
{
|
||||||
operator: SearchFilterOperator.EQUAL,
|
operator: BasicOperator.EQUAL,
|
||||||
field: "newField",
|
field: "newField",
|
||||||
value: "thatValue",
|
value: "thatValue",
|
||||||
},
|
},
|
||||||
|
@ -1194,7 +1200,7 @@ describe.each([
|
||||||
name: generator.guid(),
|
name: generator.guid(),
|
||||||
query: [
|
query: [
|
||||||
{
|
{
|
||||||
operator: SearchFilterOperator.EQUAL,
|
operator: BasicOperator.EQUAL,
|
||||||
field: "two",
|
field: "two",
|
||||||
value: "bar2",
|
value: "bar2",
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
|
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
|
||||||
|
|
||||||
import env from "../environment"
|
import { env } from "@budibase/backend-core"
|
||||||
import { AppMigration } from "."
|
import { AppMigration } from "."
|
||||||
|
|
||||||
import m20240604153647_initial_sqs from "./migrations/20240604153647_initial_sqs"
|
import m20240604153647_initial_sqs from "./migrations/20240604153647_initial_sqs"
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
import { context } from "@budibase/backend-core"
|
import { context, env } from "@budibase/backend-core"
|
||||||
import { allLinkDocs } from "../../db/utils"
|
import { allLinkDocs } from "../../db/utils"
|
||||||
import LinkDocumentImpl from "../../db/linkedRows/LinkDocument"
|
import LinkDocumentImpl from "../../db/linkedRows/LinkDocument"
|
||||||
import sdk from "../../sdk"
|
import sdk from "../../sdk"
|
||||||
import env from "../../environment"
|
|
||||||
|
|
||||||
const migration = async () => {
|
const migration = async () => {
|
||||||
const linkDocs = await allLinkDocs()
|
const linkDocs = await allLinkDocs()
|
||||||
|
|
|
@ -69,11 +69,14 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
|
||||||
type SQSEnvVar = "SQS_MIGRATION_ENABLE" | "SQS_SEARCH_ENABLE"
|
type SQSEnvVar = "SQS_MIGRATION_ENABLE" | "SQS_SEARCH_ENABLE"
|
||||||
|
|
||||||
async function sqsDisabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
async function sqsDisabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
||||||
await config.withEnv({ [envVar]: "" }, cb)
|
await config.withCoreEnv({ [envVar]: "", SQS_SEARCH_ENABLE_TENANTS: [] }, cb)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function sqsEnabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
async function sqsEnabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
||||||
await config.withEnv({ [envVar]: "1" }, cb)
|
await config.withCoreEnv(
|
||||||
|
{ [envVar]: "1", SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()] },
|
||||||
|
cb
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
describe.each(["SQS_MIGRATION_ENABLE", "SQS_SEARCH_ENABLE"] as SQSEnvVar[])(
|
describe.each(["SQS_MIGRATION_ENABLE", "SQS_SEARCH_ENABLE"] as SQSEnvVar[])(
|
||||||
|
|
|
@ -87,8 +87,6 @@ const environment = {
|
||||||
SQL_MAX_ROWS: process.env.SQL_MAX_ROWS,
|
SQL_MAX_ROWS: process.env.SQL_MAX_ROWS,
|
||||||
SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE,
|
SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE,
|
||||||
SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE,
|
SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE,
|
||||||
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
|
|
||||||
SQS_MIGRATION_ENABLE: process.env.SQS_MIGRATION_ENABLE,
|
|
||||||
// flags
|
// flags
|
||||||
ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS,
|
ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS,
|
||||||
DISABLE_THREADING: process.env.DISABLE_THREADING,
|
DISABLE_THREADING: process.env.DISABLE_THREADING,
|
||||||
|
|
|
@ -2,7 +2,6 @@ import {
|
||||||
EmptyFilterOption,
|
EmptyFilterOption,
|
||||||
Row,
|
Row,
|
||||||
RowSearchParams,
|
RowSearchParams,
|
||||||
SearchFilterOperator,
|
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
SearchResponse,
|
SearchResponse,
|
||||||
SortOrder,
|
SortOrder,
|
||||||
|
@ -12,11 +11,11 @@ import * as internal from "./search/internal"
|
||||||
import * as external from "./search/external"
|
import * as external from "./search/external"
|
||||||
import { NoEmptyFilterStrings } from "../../../constants"
|
import { NoEmptyFilterStrings } from "../../../constants"
|
||||||
import * as sqs from "./search/sqs"
|
import * as sqs from "./search/sqs"
|
||||||
import env from "../../../environment"
|
|
||||||
import { ExportRowsParams, ExportRowsResult } from "./search/types"
|
import { ExportRowsParams, ExportRowsResult } from "./search/types"
|
||||||
import { dataFilters } from "@budibase/shared-core"
|
import { dataFilters } from "@budibase/shared-core"
|
||||||
import sdk from "../../index"
|
import sdk from "../../index"
|
||||||
import { searchInputMapping } from "./search/utils"
|
import { searchInputMapping } from "./search/utils"
|
||||||
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
|
|
||||||
export { isValidFilter } from "../../../integrations/utils"
|
export { isValidFilter } from "../../../integrations/utils"
|
||||||
|
|
||||||
|
@ -66,37 +65,12 @@ export function removeEmptyFilters(filters: SearchFilters) {
|
||||||
return filters
|
return filters
|
||||||
}
|
}
|
||||||
|
|
||||||
// The frontend can send single values for array fields sometimes, so to handle
|
|
||||||
// this we convert them to arrays at the controller level so that nothing below
|
|
||||||
// this has to worry about the non-array values.
|
|
||||||
function fixupFilterArrays(filters: SearchFilters) {
|
|
||||||
const arrayFields = [
|
|
||||||
SearchFilterOperator.ONE_OF,
|
|
||||||
SearchFilterOperator.CONTAINS,
|
|
||||||
SearchFilterOperator.NOT_CONTAINS,
|
|
||||||
SearchFilterOperator.CONTAINS_ANY,
|
|
||||||
]
|
|
||||||
for (const searchField of arrayFields) {
|
|
||||||
const field = filters[searchField]
|
|
||||||
if (field == null) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const key of Object.keys(field)) {
|
|
||||||
if (!Array.isArray(field[key])) {
|
|
||||||
field[key] = [field[key]]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return filters
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function search(
|
export async function search(
|
||||||
options: RowSearchParams
|
options: RowSearchParams
|
||||||
): Promise<SearchResponse<Row>> {
|
): Promise<SearchResponse<Row>> {
|
||||||
const isExternalTable = isExternalTableID(options.tableId)
|
const isExternalTable = isExternalTableID(options.tableId)
|
||||||
options.query = removeEmptyFilters(options.query || {})
|
options.query = removeEmptyFilters(options.query || {})
|
||||||
options.query = fixupFilterArrays(options.query)
|
options.query = dataFilters.fixupFilterArrays(options.query)
|
||||||
if (
|
if (
|
||||||
!dataFilters.hasFilters(options.query) &&
|
!dataFilters.hasFilters(options.query) &&
|
||||||
options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE
|
options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE
|
||||||
|
@ -115,7 +89,7 @@ export async function search(
|
||||||
|
|
||||||
if (isExternalTable) {
|
if (isExternalTable) {
|
||||||
return external.search(options, table)
|
return external.search(options, table)
|
||||||
} else if (env.SQS_SEARCH_ENABLE) {
|
} else if (dbCore.isSqsEnabledForTenant()) {
|
||||||
return sqs.search(options, table)
|
return sqs.search(options, table)
|
||||||
} else {
|
} else {
|
||||||
return internal.search(options, table)
|
return internal.search(options, table)
|
||||||
|
|
|
@ -31,10 +31,17 @@ describe.each([
|
||||||
let rows: Row[]
|
let rows: Row[]
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
|
await config.withCoreEnv(
|
||||||
|
{ SQS_SEARCH_ENABLE: isSqs ? "true" : "false" },
|
||||||
|
() => config.init()
|
||||||
|
)
|
||||||
|
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
|
envCleanup = config.setCoreEnv({
|
||||||
|
SQS_SEARCH_ENABLE: "true",
|
||||||
|
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
||||||
|
})
|
||||||
}
|
}
|
||||||
await config.init()
|
|
||||||
|
|
||||||
if (dsProvider) {
|
if (dsProvider) {
|
||||||
datasource = await config.createDatasource({
|
datasource = await config.createDatasource({
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { context } from "@budibase/backend-core"
|
import { context, db as dbCore, env } from "@budibase/backend-core"
|
||||||
import { getTableParams } from "../../../db/utils"
|
import { getTableParams } from "../../../db/utils"
|
||||||
import {
|
import {
|
||||||
breakExternalTableId,
|
breakExternalTableId,
|
||||||
|
@ -15,7 +15,6 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import datasources from "../datasources"
|
import datasources from "../datasources"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import env from "../../../environment"
|
|
||||||
|
|
||||||
export function processTable(table: Table): Table {
|
export function processTable(table: Table): Table {
|
||||||
if (!table) {
|
if (!table) {
|
||||||
|
@ -34,7 +33,7 @@ export function processTable(table: Table): Table {
|
||||||
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
|
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
|
||||||
sourceType: TableSourceType.INTERNAL,
|
sourceType: TableSourceType.INTERNAL,
|
||||||
}
|
}
|
||||||
if (env.SQS_SEARCH_ENABLE) {
|
if (dbCore.isSqsEnabledForTenant()) {
|
||||||
processed.sql = !!env.SQS_SEARCH_ENABLE
|
processed.sql = !!env.SQS_SEARCH_ENABLE
|
||||||
}
|
}
|
||||||
return processed
|
return processed
|
||||||
|
|
|
@ -39,7 +39,9 @@ describe("should be able to re-write attachment URLs", () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
const db = dbCore.getDB(config.getAppId())
|
const db = dbCore.getDB(config.getAppId())
|
||||||
await sdk.backups.updateAttachmentColumns(db.name, db)
|
await config.doInContext(config.getAppId(), () =>
|
||||||
|
sdk.backups.updateAttachmentColumns(db.name, db)
|
||||||
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
db,
|
db,
|
||||||
|
|
|
@ -245,10 +245,10 @@ export default class TestConfiguration {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async withEnv(newEnvVars: Partial<typeof env>, f: () => Promise<void>) {
|
async withEnv<T>(newEnvVars: Partial<typeof env>, f: () => Promise<T>) {
|
||||||
let cleanup = this.setEnv(newEnvVars)
|
let cleanup = this.setEnv(newEnvVars)
|
||||||
try {
|
try {
|
||||||
await f()
|
return await f()
|
||||||
} finally {
|
} finally {
|
||||||
cleanup()
|
cleanup()
|
||||||
}
|
}
|
||||||
|
@ -273,13 +273,13 @@ export default class TestConfiguration {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async withCoreEnv(
|
async withCoreEnv<T>(
|
||||||
newEnvVars: Partial<typeof coreEnv>,
|
newEnvVars: Partial<typeof coreEnv>,
|
||||||
f: () => Promise<void>
|
f: () => Promise<T>
|
||||||
) {
|
) {
|
||||||
let cleanup = this.setCoreEnv(newEnvVars)
|
let cleanup = this.setCoreEnv(newEnvVars)
|
||||||
try {
|
try {
|
||||||
await f()
|
return await f()
|
||||||
} finally {
|
} finally {
|
||||||
cleanup()
|
cleanup()
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ import {
|
||||||
SearchFilter,
|
SearchFilter,
|
||||||
SearchFilters,
|
SearchFilters,
|
||||||
SearchQueryFields,
|
SearchQueryFields,
|
||||||
|
ArrayOperator,
|
||||||
SearchFilterOperator,
|
SearchFilterOperator,
|
||||||
SortType,
|
SortType,
|
||||||
FieldConstraints,
|
FieldConstraints,
|
||||||
|
@ -14,11 +15,13 @@ import {
|
||||||
EmptyFilterOption,
|
EmptyFilterOption,
|
||||||
SearchResponse,
|
SearchResponse,
|
||||||
Table,
|
Table,
|
||||||
|
BasicOperator,
|
||||||
|
RangeOperator,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import dayjs from "dayjs"
|
import dayjs from "dayjs"
|
||||||
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
|
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
|
||||||
import { deepGet, schema } from "./helpers"
|
import { deepGet, schema } from "./helpers"
|
||||||
import _ from "lodash"
|
import { isPlainObject, isEmpty } from "lodash"
|
||||||
|
|
||||||
const HBS_REGEX = /{{([^{].*?)}}/g
|
const HBS_REGEX = /{{([^{].*?)}}/g
|
||||||
|
|
||||||
|
@ -323,6 +326,32 @@ export const buildQuery = (filter: SearchFilter[]) => {
|
||||||
return query
|
return query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The frontend can send single values for array fields sometimes, so to handle
|
||||||
|
// this we convert them to arrays at the controller level so that nothing below
|
||||||
|
// this has to worry about the non-array values.
|
||||||
|
export function fixupFilterArrays(filters: SearchFilters) {
|
||||||
|
for (const searchField of Object.values(ArrayOperator)) {
|
||||||
|
const field = filters[searchField]
|
||||||
|
if (field == null || !isPlainObject(field)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const key of Object.keys(field)) {
|
||||||
|
if (Array.isArray(field[key])) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const value = field[key] as any
|
||||||
|
if (typeof value === "string") {
|
||||||
|
field[key] = value.split(",").map((x: string) => x.trim())
|
||||||
|
} else {
|
||||||
|
field[key] = [value]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return filters
|
||||||
|
}
|
||||||
|
|
||||||
export const search = (
|
export const search = (
|
||||||
docs: Record<string, any>[],
|
docs: Record<string, any>[],
|
||||||
query: RowSearchParams
|
query: RowSearchParams
|
||||||
|
@ -356,6 +385,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
query = cleanupQuery(query)
|
query = cleanupQuery(query)
|
||||||
|
query = fixupFilterArrays(query)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
!hasFilters(query) &&
|
!hasFilters(query) &&
|
||||||
|
@ -382,7 +412,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
const stringMatch = match(
|
const stringMatch = match(
|
||||||
SearchFilterOperator.STRING,
|
BasicOperator.STRING,
|
||||||
(docValue: any, testValue: any) => {
|
(docValue: any, testValue: any) => {
|
||||||
if (!(typeof docValue === "string")) {
|
if (!(typeof docValue === "string")) {
|
||||||
return false
|
return false
|
||||||
|
@ -395,7 +425,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
|
||||||
)
|
)
|
||||||
|
|
||||||
const fuzzyMatch = match(
|
const fuzzyMatch = match(
|
||||||
SearchFilterOperator.FUZZY,
|
BasicOperator.FUZZY,
|
||||||
(docValue: any, testValue: any) => {
|
(docValue: any, testValue: any) => {
|
||||||
if (!(typeof docValue === "string")) {
|
if (!(typeof docValue === "string")) {
|
||||||
return false
|
return false
|
||||||
|
@ -408,17 +438,17 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
|
||||||
)
|
)
|
||||||
|
|
||||||
const rangeMatch = match(
|
const rangeMatch = match(
|
||||||
SearchFilterOperator.RANGE,
|
RangeOperator.RANGE,
|
||||||
(docValue: any, testValue: any) => {
|
(docValue: any, testValue: any) => {
|
||||||
if (docValue == null || docValue === "") {
|
if (docValue == null || docValue === "") {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_.isObject(testValue.low) && _.isEmpty(testValue.low)) {
|
if (isPlainObject(testValue.low) && isEmpty(testValue.low)) {
|
||||||
testValue.low = undefined
|
testValue.low = undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_.isObject(testValue.high) && _.isEmpty(testValue.high)) {
|
if (isPlainObject(testValue.high) && isEmpty(testValue.high)) {
|
||||||
testValue.high = undefined
|
testValue.high = undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -497,11 +527,8 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
|
||||||
(...args: T): boolean =>
|
(...args: T): boolean =>
|
||||||
!f(...args)
|
!f(...args)
|
||||||
|
|
||||||
const equalMatch = match(SearchFilterOperator.EQUAL, _valueMatches)
|
const equalMatch = match(BasicOperator.EQUAL, _valueMatches)
|
||||||
const notEqualMatch = match(
|
const notEqualMatch = match(BasicOperator.NOT_EQUAL, not(_valueMatches))
|
||||||
SearchFilterOperator.NOT_EQUAL,
|
|
||||||
not(_valueMatches)
|
|
||||||
)
|
|
||||||
|
|
||||||
const _empty = (docValue: any) => {
|
const _empty = (docValue: any) => {
|
||||||
if (typeof docValue === "string") {
|
if (typeof docValue === "string") {
|
||||||
|
@ -516,26 +543,24 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
|
||||||
return docValue == null
|
return docValue == null
|
||||||
}
|
}
|
||||||
|
|
||||||
const emptyMatch = match(SearchFilterOperator.EMPTY, _empty)
|
const emptyMatch = match(BasicOperator.EMPTY, _empty)
|
||||||
const notEmptyMatch = match(SearchFilterOperator.NOT_EMPTY, not(_empty))
|
const notEmptyMatch = match(BasicOperator.NOT_EMPTY, not(_empty))
|
||||||
|
|
||||||
const oneOf = match(
|
const oneOf = match(ArrayOperator.ONE_OF, (docValue: any, testValue: any) => {
|
||||||
SearchFilterOperator.ONE_OF,
|
if (typeof testValue === "string") {
|
||||||
(docValue: any, testValue: any) => {
|
testValue = testValue.split(",")
|
||||||
if (typeof testValue === "string") {
|
|
||||||
testValue = testValue.split(",")
|
|
||||||
if (typeof docValue === "number") {
|
|
||||||
testValue = testValue.map((item: string) => parseFloat(item))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!Array.isArray(testValue)) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return testValue.some(item => _valueMatches(docValue, item))
|
|
||||||
}
|
}
|
||||||
)
|
|
||||||
|
if (typeof docValue === "number") {
|
||||||
|
testValue = testValue.map((item: string) => parseFloat(item))
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(testValue)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return testValue.some(item => _valueMatches(docValue, item))
|
||||||
|
})
|
||||||
|
|
||||||
const _contains =
|
const _contains =
|
||||||
(f: "some" | "every") => (docValue: any, testValue: any) => {
|
(f: "some" | "every") => (docValue: any, testValue: any) => {
|
||||||
|
@ -562,7 +587,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
const contains = match(
|
const contains = match(
|
||||||
SearchFilterOperator.CONTAINS,
|
ArrayOperator.CONTAINS,
|
||||||
(docValue: any, testValue: any) => {
|
(docValue: any, testValue: any) => {
|
||||||
if (Array.isArray(testValue) && testValue.length === 0) {
|
if (Array.isArray(testValue) && testValue.length === 0) {
|
||||||
return true
|
return true
|
||||||
|
@ -571,7 +596,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
const notContains = match(
|
const notContains = match(
|
||||||
SearchFilterOperator.NOT_CONTAINS,
|
ArrayOperator.NOT_CONTAINS,
|
||||||
(docValue: any, testValue: any) => {
|
(docValue: any, testValue: any) => {
|
||||||
// Not sure if this is logically correct, but at the time this code was
|
// Not sure if this is logically correct, but at the time this code was
|
||||||
// written the search endpoint behaved this way and we wanted to make this
|
// written the search endpoint behaved this way and we wanted to make this
|
||||||
|
@ -582,10 +607,7 @@ export const runQuery = (docs: Record<string, any>[], query: SearchFilters) => {
|
||||||
return not(_contains("every"))(docValue, testValue)
|
return not(_contains("every"))(docValue, testValue)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
const containsAny = match(
|
const containsAny = match(ArrayOperator.CONTAINS_ANY, _contains("some"))
|
||||||
SearchFilterOperator.CONTAINS_ANY,
|
|
||||||
_contains("some")
|
|
||||||
)
|
|
||||||
|
|
||||||
const docMatch = (doc: Record<string, any>) => {
|
const docMatch = (doc: Record<string, any>) => {
|
||||||
const filterFunctions = {
|
const filterFunctions = {
|
||||||
|
|
|
@ -3,20 +3,28 @@ import { Row, Table, DocumentType } from "../documents"
|
||||||
import { SortOrder, SortType } from "../api"
|
import { SortOrder, SortType } from "../api"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
|
|
||||||
export enum SearchFilterOperator {
|
export enum BasicOperator {
|
||||||
STRING = "string",
|
|
||||||
FUZZY = "fuzzy",
|
|
||||||
RANGE = "range",
|
|
||||||
EQUAL = "equal",
|
EQUAL = "equal",
|
||||||
NOT_EQUAL = "notEqual",
|
NOT_EQUAL = "notEqual",
|
||||||
EMPTY = "empty",
|
EMPTY = "empty",
|
||||||
NOT_EMPTY = "notEmpty",
|
NOT_EMPTY = "notEmpty",
|
||||||
ONE_OF = "oneOf",
|
FUZZY = "fuzzy",
|
||||||
|
STRING = "string",
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum ArrayOperator {
|
||||||
CONTAINS = "contains",
|
CONTAINS = "contains",
|
||||||
NOT_CONTAINS = "notContains",
|
NOT_CONTAINS = "notContains",
|
||||||
CONTAINS_ANY = "containsAny",
|
CONTAINS_ANY = "containsAny",
|
||||||
|
ONE_OF = "oneOf",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum RangeOperator {
|
||||||
|
RANGE = "range",
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SearchFilterOperator = BasicOperator | ArrayOperator | RangeOperator
|
||||||
|
|
||||||
export enum InternalSearchFilterOperator {
|
export enum InternalSearchFilterOperator {
|
||||||
COMPLEX_ID_OPERATOR = "_complexIdOperator",
|
COMPLEX_ID_OPERATOR = "_complexIdOperator",
|
||||||
}
|
}
|
||||||
|
@ -52,17 +60,17 @@ export interface SearchFilters {
|
||||||
// allows just fuzzy to be or - all the fuzzy/like parameters
|
// allows just fuzzy to be or - all the fuzzy/like parameters
|
||||||
fuzzyOr?: boolean
|
fuzzyOr?: boolean
|
||||||
onEmptyFilter?: EmptyFilterOption
|
onEmptyFilter?: EmptyFilterOption
|
||||||
[SearchFilterOperator.STRING]?: BasicFilter<string>
|
[BasicOperator.STRING]?: BasicFilter<string>
|
||||||
[SearchFilterOperator.FUZZY]?: BasicFilter<string>
|
[BasicOperator.FUZZY]?: BasicFilter<string>
|
||||||
[SearchFilterOperator.RANGE]?: RangeFilter
|
[RangeOperator.RANGE]?: RangeFilter
|
||||||
[SearchFilterOperator.EQUAL]?: BasicFilter
|
[BasicOperator.EQUAL]?: BasicFilter
|
||||||
[SearchFilterOperator.NOT_EQUAL]?: BasicFilter
|
[BasicOperator.NOT_EQUAL]?: BasicFilter
|
||||||
[SearchFilterOperator.EMPTY]?: BasicFilter
|
[BasicOperator.EMPTY]?: BasicFilter
|
||||||
[SearchFilterOperator.NOT_EMPTY]?: BasicFilter
|
[BasicOperator.NOT_EMPTY]?: BasicFilter
|
||||||
[SearchFilterOperator.ONE_OF]?: ArrayFilter
|
[ArrayOperator.ONE_OF]?: ArrayFilter
|
||||||
[SearchFilterOperator.CONTAINS]?: ArrayFilter
|
[ArrayOperator.CONTAINS]?: ArrayFilter
|
||||||
[SearchFilterOperator.NOT_CONTAINS]?: ArrayFilter
|
[ArrayOperator.NOT_CONTAINS]?: ArrayFilter
|
||||||
[SearchFilterOperator.CONTAINS_ANY]?: ArrayFilter
|
[ArrayOperator.CONTAINS_ANY]?: ArrayFilter
|
||||||
// specific to SQS/SQLite search on internal tables this can be used
|
// specific to SQS/SQLite search on internal tables this can be used
|
||||||
// to make sure the documents returned are always filtered down to a
|
// to make sure the documents returned are always filtered down to a
|
||||||
// specific document type (such as just rows)
|
// specific document type (such as just rows)
|
||||||
|
|
|
@ -24,7 +24,7 @@ async function isSqsAvailable() {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function isSqsMissing() {
|
async function isSqsMissing() {
|
||||||
return env.SQS_SEARCH_ENABLE && !(await isSqsAvailable())
|
return coreEnv.SQS_SEARCH_ENABLE && !(await isSqsAvailable())
|
||||||
}
|
}
|
||||||
|
|
||||||
export const fetch = async (ctx: Ctx) => {
|
export const fetch = async (ctx: Ctx) => {
|
||||||
|
|
|
@ -5,8 +5,7 @@ const compress = require("koa-compress")
|
||||||
import zlib from "zlib"
|
import zlib from "zlib"
|
||||||
import { routes } from "./routes"
|
import { routes } from "./routes"
|
||||||
import { middleware as pro, sdk } from "@budibase/pro"
|
import { middleware as pro, sdk } from "@budibase/pro"
|
||||||
import { auth, middleware } from "@budibase/backend-core"
|
import { auth, middleware, env } from "@budibase/backend-core"
|
||||||
import env from "../environment"
|
|
||||||
|
|
||||||
if (env.SQS_SEARCH_ENABLE) {
|
if (env.SQS_SEARCH_ENABLE) {
|
||||||
sdk.auditLogs.useSQLSearch()
|
sdk.auditLogs.useSQLSearch()
|
||||||
|
|
|
@ -46,7 +46,6 @@ const environment = {
|
||||||
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
|
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
|
||||||
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
|
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
|
||||||
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
|
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
|
||||||
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
|
|
||||||
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
|
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
|
||||||
// smtp
|
// smtp
|
||||||
SMTP_USER: process.env.SMTP_USER,
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
|
|
Loading…
Reference in New Issue