Merge branch 'master' into feat/pick-relationship-fields
This commit is contained in:
commit
64c2a18934
|
@ -212,10 +212,6 @@ spec:
|
||||||
- name: APP_FEATURES
|
- name: APP_FEATURES
|
||||||
value: "api"
|
value: "api"
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- if .Values.globals.sqs.enabled }}
|
|
||||||
- name: SQS_SEARCH_ENABLE
|
|
||||||
value: "true"
|
|
||||||
{{- end }}
|
|
||||||
{{- range .Values.services.apps.extraEnv }}
|
{{- range .Values.services.apps.extraEnv }}
|
||||||
- name: {{ .name }}
|
- name: {{ .name }}
|
||||||
value: {{ .value | quote }}
|
value: {{ .value | quote }}
|
||||||
|
|
|
@ -198,10 +198,6 @@ spec:
|
||||||
- name: NODE_TLS_REJECT_UNAUTHORIZED
|
- name: NODE_TLS_REJECT_UNAUTHORIZED
|
||||||
value: {{ .Values.services.tlsRejectUnauthorized }}
|
value: {{ .Values.services.tlsRejectUnauthorized }}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
{{- if .Values.globals.sqs.enabled }}
|
|
||||||
- name: SQS_SEARCH_ENABLE
|
|
||||||
value: "true"
|
|
||||||
{{- end }}
|
|
||||||
{{- range .Values.services.worker.extraEnv }}
|
{{- range .Values.services.worker.extraEnv }}
|
||||||
- name: {{ .name }}
|
- name: {{ .name }}
|
||||||
value: {{ .value | quote }}
|
value: {{ .value | quote }}
|
||||||
|
|
|
@ -29,7 +29,7 @@ services:
|
||||||
BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
|
BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
|
||||||
BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
|
BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
|
||||||
PLUGINS_DIR: ${PLUGINS_DIR}
|
PLUGINS_DIR: ${PLUGINS_DIR}
|
||||||
SQS_SEARCH_ENABLE: 1
|
TENANT_FEATURE_FLAGS: "*:SQS"
|
||||||
depends_on:
|
depends_on:
|
||||||
- worker-service
|
- worker-service
|
||||||
- redis-service
|
- redis-service
|
||||||
|
@ -57,7 +57,7 @@ services:
|
||||||
INTERNAL_API_KEY: ${INTERNAL_API_KEY}
|
INTERNAL_API_KEY: ${INTERNAL_API_KEY}
|
||||||
REDIS_URL: redis-service:6379
|
REDIS_URL: redis-service:6379
|
||||||
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
REDIS_PASSWORD: ${REDIS_PASSWORD}
|
||||||
SQS_SEARCH_ENABLE: 1
|
TENANT_FEATURE_FLAGS: "*:SQS"
|
||||||
depends_on:
|
depends_on:
|
||||||
- redis-service
|
- redis-service
|
||||||
- minio-service
|
- minio-service
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit c403315c5fa09a05dfd8fa4cd1890acfd8de0430
|
Subproject commit c24374879d2b61516fabc24d7404e7da235be05e
|
|
@ -25,8 +25,8 @@ import { newid } from "../../docIds/newid"
|
||||||
import { SQLITE_DESIGN_DOC_ID } from "../../constants"
|
import { SQLITE_DESIGN_DOC_ID } from "../../constants"
|
||||||
import { DDInstrumentedDatabase } from "../instrumentation"
|
import { DDInstrumentedDatabase } from "../instrumentation"
|
||||||
import { checkSlashesInUrl } from "../../helpers"
|
import { checkSlashesInUrl } from "../../helpers"
|
||||||
import env from "../../environment"
|
|
||||||
import { sqlLog } from "../../sql/utils"
|
import { sqlLog } from "../../sql/utils"
|
||||||
|
import { flags } from "../../features"
|
||||||
|
|
||||||
const DATABASE_NOT_FOUND = "Database does not exist."
|
const DATABASE_NOT_FOUND = "Database does not exist."
|
||||||
|
|
||||||
|
@ -401,7 +401,10 @@ export class DatabaseImpl implements Database {
|
||||||
}
|
}
|
||||||
|
|
||||||
async destroy() {
|
async destroy() {
|
||||||
if (env.SQS_SEARCH_ENABLE && (await this.exists(SQLITE_DESIGN_DOC_ID))) {
|
if (
|
||||||
|
(await flags.isEnabled("SQS")) &&
|
||||||
|
(await this.exists(SQLITE_DESIGN_DOC_ID))
|
||||||
|
) {
|
||||||
// delete the design document, then run the cleanup operation
|
// delete the design document, then run the cleanup operation
|
||||||
const definition = await this.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
const definition = await this.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||||
// remove all tables - save the definition then trigger a cleanup
|
// remove all tables - save the definition then trigger a cleanup
|
||||||
|
|
|
@ -1,24 +0,0 @@
|
||||||
require("../../../tests")
|
|
||||||
const { structures } = require("../../../tests")
|
|
||||||
const { getDB } = require("../db")
|
|
||||||
|
|
||||||
describe("db", () => {
|
|
||||||
describe("getDB", () => {
|
|
||||||
it("returns a db", async () => {
|
|
||||||
const dbName = structures.db.id()
|
|
||||||
const db = getDB(dbName)
|
|
||||||
expect(db).toBeDefined()
|
|
||||||
expect(db.name).toBe(dbName)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("uses the custom put function", async () => {
|
|
||||||
const db = getDB(structures.db.id())
|
|
||||||
let doc = { _id: "test" }
|
|
||||||
await db.put(doc)
|
|
||||||
doc = await db.get(doc._id)
|
|
||||||
expect(doc.createdAt).toBe(new Date().toISOString())
|
|
||||||
expect(doc.updatedAt).toBe(new Date().toISOString())
|
|
||||||
await db.destroy()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
import { doInTenant } from "../../context"
|
||||||
|
import { structures } from "../../../tests"
|
||||||
|
import { getDB } from "../db"
|
||||||
|
|
||||||
|
interface Doc {
|
||||||
|
_id: string
|
||||||
|
createdAt?: string
|
||||||
|
updatedAt?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("db", () => {
|
||||||
|
describe("getDB", () => {
|
||||||
|
it("returns a db", async () => {
|
||||||
|
const dbName = structures.db.id()
|
||||||
|
const db = getDB(dbName)
|
||||||
|
expect(db).toBeDefined()
|
||||||
|
expect(db.name).toBe(dbName)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("uses the custom put function", async () => {
|
||||||
|
await doInTenant("foo", async () => {
|
||||||
|
const db = getDB(structures.db.id())
|
||||||
|
let doc: Doc = { _id: "test" }
|
||||||
|
await db.put(doc)
|
||||||
|
doc = await db.get(doc._id)
|
||||||
|
expect(doc.createdAt).toBe(new Date().toISOString())
|
||||||
|
expect(doc.updatedAt).toBe(new Date().toISOString())
|
||||||
|
await db.destroy()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -1,6 +1,6 @@
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import { DEFAULT_TENANT_ID, SEPARATOR, DocumentType } from "../constants"
|
import { DEFAULT_TENANT_ID, SEPARATOR, DocumentType } from "../constants"
|
||||||
import { getTenantId, getGlobalDBName, isMultiTenant } from "../context"
|
import { getTenantId, getGlobalDBName } from "../context"
|
||||||
import { doWithDB, directCouchAllDbs } from "./db"
|
import { doWithDB, directCouchAllDbs } from "./db"
|
||||||
import { AppState, DeletedApp, getAppMetadata } from "../cache/appMetadata"
|
import { AppState, DeletedApp, getAppMetadata } from "../cache/appMetadata"
|
||||||
import { isDevApp, isDevAppID, getProdAppID } from "../docIds/conversions"
|
import { isDevApp, isDevAppID, getProdAppID } from "../docIds/conversions"
|
||||||
|
@ -206,34 +206,3 @@ export function pagination<T>(
|
||||||
nextPage,
|
nextPage,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isSqsEnabledForTenant(): boolean {
|
|
||||||
const tenantId = getTenantId()
|
|
||||||
if (!env.SQS_SEARCH_ENABLE) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// single tenant (self host and dev) always enabled if flag set
|
|
||||||
if (!isMultiTenant()) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is to guard against the situation in tests where tests pass because
|
|
||||||
// we're not actually using SQS, we're using Lucene and the tests pass due to
|
|
||||||
// parity.
|
|
||||||
if (env.isTest() && env.SQS_SEARCH_ENABLE_TENANTS.length === 0) {
|
|
||||||
throw new Error(
|
|
||||||
"to enable SQS you must specify a list of tenants in the SQS_SEARCH_ENABLE_TENANTS env var"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special case to enable all tenants, for testing in QA.
|
|
||||||
if (
|
|
||||||
env.SQS_SEARCH_ENABLE_TENANTS.length === 1 &&
|
|
||||||
env.SQS_SEARCH_ENABLE_TENANTS[0] === "*"
|
|
||||||
) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return env.SQS_SEARCH_ENABLE_TENANTS.includes(tenantId)
|
|
||||||
}
|
|
||||||
|
|
|
@ -116,10 +116,6 @@ const environment = {
|
||||||
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
|
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
|
||||||
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
||||||
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL,
|
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL,
|
||||||
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
|
|
||||||
SQS_SEARCH_ENABLE_TENANTS:
|
|
||||||
process.env.SQS_SEARCH_ENABLE_TENANTS?.split(",") || [],
|
|
||||||
SQS_MIGRATION_ENABLE: process.env.SQS_MIGRATION_ENABLE,
|
|
||||||
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
|
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
|
||||||
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
|
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
|
||||||
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
|
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
|
||||||
|
|
|
@ -6,7 +6,7 @@ import tracer from "dd-trace"
|
||||||
|
|
||||||
let posthog: PostHog | undefined
|
let posthog: PostHog | undefined
|
||||||
export function init(opts?: PostHogOptions) {
|
export function init(opts?: PostHogOptions) {
|
||||||
if (env.POSTHOG_TOKEN && env.POSTHOG_API_HOST) {
|
if (env.POSTHOG_TOKEN && env.POSTHOG_API_HOST && !env.SELF_HOSTED) {
|
||||||
console.log("initializing posthog client...")
|
console.log("initializing posthog client...")
|
||||||
posthog = new PostHog(env.POSTHOG_TOKEN, {
|
posthog = new PostHog(env.POSTHOG_TOKEN, {
|
||||||
host: env.POSTHOG_API_HOST,
|
host: env.POSTHOG_API_HOST,
|
||||||
|
@ -267,5 +267,6 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
||||||
// default values set correctly and their types flow through the system.
|
// default values set correctly and their types flow through the system.
|
||||||
export const flags = new FlagSet({
|
export const flags = new FlagSet({
|
||||||
DEFAULT_VALUES: Flag.boolean(false),
|
DEFAULT_VALUES: Flag.boolean(false),
|
||||||
|
SQS: Flag.boolean(false),
|
||||||
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(false),
|
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(false),
|
||||||
})
|
})
|
||||||
|
|
|
@ -147,13 +147,13 @@ describe("feature flags", () => {
|
||||||
}) => {
|
}) => {
|
||||||
const env: Partial<typeof environment> = {
|
const env: Partial<typeof environment> = {
|
||||||
TENANT_FEATURE_FLAGS: environmentFlags,
|
TENANT_FEATURE_FLAGS: environmentFlags,
|
||||||
|
SELF_HOSTED: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (posthogFlags) {
|
if (posthogFlags) {
|
||||||
mockPosthogFlags(posthogFlags)
|
mockPosthogFlags(posthogFlags)
|
||||||
env.POSTHOG_TOKEN = "test"
|
env.POSTHOG_TOKEN = "test"
|
||||||
env.POSTHOG_API_HOST = "https://us.i.posthog.com"
|
env.POSTHOG_API_HOST = "https://us.i.posthog.com"
|
||||||
env.POSTHOG_PERSONAL_TOKEN = "test"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const ctx = { user: { license: { features: licenseFlags || [] } } }
|
const ctx = { user: { license: { features: licenseFlags || [] } } }
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
export let inline = false
|
export let inline = false
|
||||||
export let disableCancel = false
|
export let disableCancel = false
|
||||||
export let autoFocus = true
|
export let autoFocus = true
|
||||||
export let zIndex = 99999
|
export let zIndex = 1001
|
||||||
|
|
||||||
const dispatch = createEventDispatcher()
|
const dispatch = createEventDispatcher()
|
||||||
let visible = fixed || inline
|
let visible = fixed || inline
|
||||||
|
|
|
@ -7,7 +7,15 @@
|
||||||
|
|
||||||
export let user
|
export let user
|
||||||
|
|
||||||
const password = Math.random().toString(36).slice(2, 20)
|
const generatePassword = length => {
|
||||||
|
const array = new Uint8Array(length)
|
||||||
|
crypto.getRandomValues(array)
|
||||||
|
return Array.from(array, byte => byte.toString(36).padStart(2, "0"))
|
||||||
|
.join("")
|
||||||
|
.slice(0, length)
|
||||||
|
}
|
||||||
|
|
||||||
|
const password = generatePassword(12)
|
||||||
|
|
||||||
async function resetPassword() {
|
async function resetPassword() {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit f2862855dcfeb4f4a28d6902daf411ec8f4a28e8
|
Subproject commit d926494cc54e31441637180a745d0df7156a5120
|
|
@ -47,7 +47,7 @@ async function init() {
|
||||||
HTTP_LOGGING: "0",
|
HTTP_LOGGING: "0",
|
||||||
VERSION: "0.0.0+local",
|
VERSION: "0.0.0+local",
|
||||||
PASSWORD_MIN_LENGTH: "1",
|
PASSWORD_MIN_LENGTH: "1",
|
||||||
SQS_SEARCH_ENABLE: "1",
|
TENANT_FEATURE_FLAGS: "*:SQS",
|
||||||
}
|
}
|
||||||
|
|
||||||
config = { ...config, ...existingConfig }
|
config = { ...config, ...existingConfig }
|
||||||
|
|
|
@ -27,6 +27,7 @@ import {
|
||||||
import sdk from "../../sdk"
|
import sdk from "../../sdk"
|
||||||
import { builderSocket } from "../../websockets"
|
import { builderSocket } from "../../websockets"
|
||||||
import { isEqual } from "lodash"
|
import { isEqual } from "lodash"
|
||||||
|
import { processTable } from "../../sdk/app/tables/getters"
|
||||||
|
|
||||||
export async function fetch(ctx: UserCtx) {
|
export async function fetch(ctx: UserCtx) {
|
||||||
ctx.body = await sdk.datasources.fetch()
|
ctx.body = await sdk.datasources.fetch()
|
||||||
|
@ -188,6 +189,7 @@ export async function update(
|
||||||
for (let table of Object.values(datasource.entities)) {
|
for (let table of Object.values(datasource.entities)) {
|
||||||
const oldTable = baseDatasource.entities?.[table.name]
|
const oldTable = baseDatasource.entities?.[table.name]
|
||||||
if (!oldTable || !isEqual(oldTable, table)) {
|
if (!oldTable || !isEqual(oldTable, table)) {
|
||||||
|
table = await processTable(table)
|
||||||
builderSocket?.emitTableUpdate(ctx, table, { includeOriginator: true })
|
builderSocket?.emitTableUpdate(ctx, table, { includeOriginator: true })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { dataFilters } from "@budibase/shared-core"
|
import { dataFilters } from "@budibase/shared-core"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
import { db, context } from "@budibase/backend-core"
|
import { db, context, features } from "@budibase/backend-core"
|
||||||
import { enrichSearchContext } from "./utils"
|
import { enrichSearchContext } from "./utils"
|
||||||
import { isExternalTableID } from "../../../integrations/utils"
|
import { isExternalTableID } from "../../../integrations/utils"
|
||||||
|
|
||||||
|
@ -40,7 +40,10 @@ export async function searchView(
|
||||||
// Delete extraneous search params that cannot be overridden
|
// Delete extraneous search params that cannot be overridden
|
||||||
delete body.query.onEmptyFilter
|
delete body.query.onEmptyFilter
|
||||||
|
|
||||||
if (!isExternalTableID(view.tableId) && !db.isSqsEnabledForTenant()) {
|
if (
|
||||||
|
!isExternalTableID(view.tableId) &&
|
||||||
|
!(await features.flags.isEnabled("SQS"))
|
||||||
|
) {
|
||||||
// Extract existing fields
|
// Extract existing fields
|
||||||
const existingFields =
|
const existingFields =
|
||||||
view.query
|
view.query
|
||||||
|
@ -56,12 +59,13 @@ export async function searchView(
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
} else
|
} else {
|
||||||
query = {
|
query = {
|
||||||
$and: {
|
$and: {
|
||||||
conditions: [query, body.query],
|
conditions: [query, body.query],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await context.ensureSnippetContext(true)
|
await context.ensureSnippetContext(true)
|
||||||
|
|
|
@ -39,6 +39,7 @@ import {
|
||||||
PROTECTED_EXTERNAL_COLUMNS,
|
PROTECTED_EXTERNAL_COLUMNS,
|
||||||
PROTECTED_INTERNAL_COLUMNS,
|
PROTECTED_INTERNAL_COLUMNS,
|
||||||
} from "@budibase/shared-core"
|
} from "@budibase/shared-core"
|
||||||
|
import { processTable } from "../../../sdk/app/tables/getters"
|
||||||
|
|
||||||
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
|
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
|
||||||
if (table && isExternalTable(table)) {
|
if (table && isExternalTable(table)) {
|
||||||
|
@ -123,6 +124,8 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||||
ctx.eventEmitter &&
|
ctx.eventEmitter &&
|
||||||
ctx.eventEmitter.emitTable(`table:save`, appId, { ...savedTable })
|
ctx.eventEmitter.emitTable(`table:save`, appId, { ...savedTable })
|
||||||
ctx.body = savedTable
|
ctx.body = savedTable
|
||||||
|
|
||||||
|
savedTable = await processTable(savedTable)
|
||||||
builderSocket?.emitTableUpdate(ctx, cloneDeep(savedTable))
|
builderSocket?.emitTableUpdate(ctx, cloneDeep(savedTable))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ import { getViews, saveView } from "../view/utils"
|
||||||
import viewTemplate from "../view/viewBuilder"
|
import viewTemplate from "../view/viewBuilder"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import { quotas } from "@budibase/pro"
|
import { quotas } from "@budibase/pro"
|
||||||
import { events, context, db as dbCore } from "@budibase/backend-core"
|
import { events, context, features } from "@budibase/backend-core"
|
||||||
import {
|
import {
|
||||||
AutoFieldSubType,
|
AutoFieldSubType,
|
||||||
ContextUser,
|
ContextUser,
|
||||||
|
@ -332,7 +332,7 @@ class TableSaveFunctions {
|
||||||
importRows: this.importRows,
|
importRows: this.importRows,
|
||||||
user: this.user,
|
user: this.user,
|
||||||
})
|
})
|
||||||
if (dbCore.isSqsEnabledForTenant()) {
|
if (await features.flags.isEnabled("SQS")) {
|
||||||
await sdk.tables.sqs.addTable(table)
|
await sdk.tables.sqs.addTable(table)
|
||||||
}
|
}
|
||||||
return table
|
return table
|
||||||
|
@ -526,7 +526,7 @@ export async function internalTableCleanup(table: Table, rows?: Row[]) {
|
||||||
if (rows) {
|
if (rows) {
|
||||||
await AttachmentCleanup.tableDelete(table, rows)
|
await AttachmentCleanup.tableDelete(table, rows)
|
||||||
}
|
}
|
||||||
if (dbCore.isSqsEnabledForTenant()) {
|
if (await features.flags.isEnabled("SQS")) {
|
||||||
await sdk.tables.sqs.removeTable(table)
|
await sdk.tables.sqs.removeTable(table)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -352,13 +352,13 @@ describe("/applications", () => {
|
||||||
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
|
expect(events.app.unpublished).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to delete an app after SQS_SEARCH_ENABLE has been set but app hasn't been migrated", async () => {
|
it("should be able to delete an app after SQS has been set but app hasn't been migrated", async () => {
|
||||||
const prodAppId = app.appId.replace("_dev", "")
|
const prodAppId = app.appId.replace("_dev", "")
|
||||||
nock("http://localhost:10000")
|
nock("http://localhost:10000")
|
||||||
.delete(`/api/global/roles/${prodAppId}`)
|
.delete(`/api/global/roles/${prodAppId}`)
|
||||||
.reply(200, {})
|
.reply(200, {})
|
||||||
|
|
||||||
await withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, async () => {
|
await withCoreEnv({ TENANT_FEATURE_FLAGS: "*:SQS" }, async () => {
|
||||||
await config.api.application.delete(app.appId)
|
await config.api.application.delete(app.appId)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -97,12 +97,9 @@ describe.each([
|
||||||
let envCleanup: (() => void) | undefined
|
let envCleanup: (() => void) | undefined
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, () => config.init())
|
await withCoreEnv({ TENANT_FEATURE_FLAGS: "*SQS" }, () => config.init())
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
envCleanup = setCoreEnv({
|
envCleanup = setCoreEnv({ TENANT_FEATURE_FLAGS: "*SQS" })
|
||||||
SQS_SEARCH_ENABLE: "true",
|
|
||||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (dsProvider) {
|
if (dsProvider) {
|
||||||
|
|
|
@ -67,11 +67,10 @@ describe.each([
|
||||||
let rows: Row[]
|
let rows: Row[]
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, () => config.init())
|
await withCoreEnv({ TENANT_FEATURE_FLAGS: "*:SQS" }, () => config.init())
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
envCleanup = setCoreEnv({
|
envCleanup = setCoreEnv({
|
||||||
SQS_SEARCH_ENABLE: "true",
|
TENANT_FEATURE_FLAGS: "*:SQS",
|
||||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ import * as setup from "./utilities"
|
||||||
import path from "path"
|
import path from "path"
|
||||||
import nock from "nock"
|
import nock from "nock"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import { withEnv as withCoreEnv } from "@budibase/backend-core"
|
import { withEnv as withCoreEnv, env as coreEnv } from "@budibase/backend-core"
|
||||||
|
|
||||||
interface App {
|
interface App {
|
||||||
background: string
|
background: string
|
||||||
|
@ -85,9 +85,8 @@ describe("/templates", () => {
|
||||||
it.each(["sqs", "lucene"])(
|
it.each(["sqs", "lucene"])(
|
||||||
`should be able to create an app from a template (%s)`,
|
`should be able to create an app from a template (%s)`,
|
||||||
async source => {
|
async source => {
|
||||||
const env = {
|
const env: Partial<typeof coreEnv> = {
|
||||||
SQS_SEARCH_ENABLE: source === "sqs" ? "true" : "false",
|
TENANT_FEATURE_FLAGS: source === "sqs" ? "*:SQS" : "",
|
||||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
await withCoreEnv(env, async () => {
|
await withCoreEnv(env, async () => {
|
||||||
|
|
|
@ -94,13 +94,12 @@ describe.each([
|
||||||
}
|
}
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await withCoreEnv({ SQS_SEARCH_ENABLE: isSqs ? "true" : "false" }, () =>
|
await withCoreEnv({ TENANT_FEATURE_FLAGS: isSqs ? "*:SQS" : "" }, () =>
|
||||||
config.init()
|
config.init()
|
||||||
)
|
)
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
envCleanup = setCoreEnv({
|
envCleanup = setCoreEnv({
|
||||||
SQS_SEARCH_ENABLE: "true",
|
TENANT_FEATURE_FLAGS: "*:SQS",
|
||||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
|
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
|
||||||
|
|
||||||
import { env } from "@budibase/backend-core"
|
|
||||||
import { AppMigration } from "."
|
import { AppMigration } from "."
|
||||||
|
|
||||||
import m20240604153647_initial_sqs from "./migrations/20240604153647_initial_sqs"
|
import m20240604153647_initial_sqs from "./migrations/20240604153647_initial_sqs"
|
||||||
|
@ -10,6 +9,5 @@ export const MIGRATIONS: AppMigration[] = [
|
||||||
{
|
{
|
||||||
id: "20240604153647_initial_sqs",
|
id: "20240604153647_initial_sqs",
|
||||||
func: m20240604153647_initial_sqs,
|
func: m20240604153647_initial_sqs,
|
||||||
disabled: !(env.SQS_MIGRATION_ENABLE || env.SQS_SEARCH_ENABLE),
|
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { context, env } from "@budibase/backend-core"
|
import { context } from "@budibase/backend-core"
|
||||||
import { allLinkDocs } from "../../db/utils"
|
import { allLinkDocs } from "../../db/utils"
|
||||||
import LinkDocumentImpl from "../../db/linkedRows/LinkDocument"
|
import LinkDocumentImpl from "../../db/linkedRows/LinkDocument"
|
||||||
import sdk from "../../sdk"
|
import sdk from "../../sdk"
|
||||||
|
@ -36,16 +36,6 @@ const migration = async () => {
|
||||||
|
|
||||||
// at the end make sure design doc is ready
|
// at the end make sure design doc is ready
|
||||||
await sdk.tables.sqs.syncDefinition()
|
await sdk.tables.sqs.syncDefinition()
|
||||||
// only do initial search if environment is using SQS already
|
|
||||||
// initial search makes sure that all the indexes have been created
|
|
||||||
// and are ready to use, avoiding any initial waits for large tables
|
|
||||||
if (env.SQS_MIGRATION_ENABLE || env.SQS_SEARCH_ENABLE) {
|
|
||||||
const tables = await sdk.tables.getAllInternalTables()
|
|
||||||
// do these one by one - running in parallel could cause problems
|
|
||||||
for (let table of tables) {
|
|
||||||
await db.sql(`select * from ${table._id} limit 1`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export default migration
|
export default migration
|
||||||
|
|
|
@ -18,7 +18,7 @@ import {
|
||||||
} from "../../../db/utils"
|
} from "../../../db/utils"
|
||||||
import { processMigrations } from "../../migrationsProcessor"
|
import { processMigrations } from "../../migrationsProcessor"
|
||||||
import migration from "../20240604153647_initial_sqs"
|
import migration from "../20240604153647_initial_sqs"
|
||||||
import { AppMigration } from "src/appMigrations"
|
import { AppMigration, updateAppMigrationMetadata } from "../../"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
|
||||||
const MIGRATIONS: AppMigration[] = [
|
const MIGRATIONS: AppMigration[] = [
|
||||||
|
@ -70,72 +70,74 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type SQSEnvVar = "SQS_MIGRATION_ENABLE" | "SQS_SEARCH_ENABLE"
|
async function sqsDisabled(cb: () => Promise<void>) {
|
||||||
|
await withCoreEnv({ TENANT_FEATURE_FLAGS: "*:!SQS" }, cb)
|
||||||
async function sqsDisabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
|
||||||
await withCoreEnv({ [envVar]: "", SQS_SEARCH_ENABLE_TENANTS: [] }, cb)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function sqsEnabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
async function sqsEnabled(cb: () => Promise<void>) {
|
||||||
await withCoreEnv(
|
await withCoreEnv({ TENANT_FEATURE_FLAGS: "*:SQS" }, cb)
|
||||||
{ [envVar]: "1", SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()] },
|
|
||||||
cb
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
describe.each(["SQS_MIGRATION_ENABLE", "SQS_SEARCH_ENABLE"] as SQSEnvVar[])(
|
describe("SQS migration", () => {
|
||||||
"SQS migration with (%s)",
|
beforeAll(async () => {
|
||||||
envVar => {
|
await sqsDisabled(async () => {
|
||||||
beforeAll(async () => {
|
await config.init()
|
||||||
await sqsDisabled(envVar, async () => {
|
const table = await config.api.table.save(basicTable())
|
||||||
await config.init()
|
tableId = table._id!
|
||||||
const table = await config.api.table.save(basicTable())
|
|
||||||
tableId = table._id!
|
|
||||||
const db = dbCore.getDB(config.appId!)
|
|
||||||
// old link document
|
|
||||||
await db.put(oldLinkDocument())
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("test migration runs as expected against an older DB", async () => {
|
|
||||||
const db = dbCore.getDB(config.appId!)
|
const db = dbCore.getDB(config.appId!)
|
||||||
// confirm nothing exists initially
|
// old link document
|
||||||
await sqsDisabled(envVar, async () => {
|
await db.put(oldLinkDocument())
|
||||||
let error: any | undefined
|
})
|
||||||
try {
|
})
|
||||||
await db.get(SQLITE_DESIGN_DOC_ID)
|
|
||||||
} catch (err: any) {
|
|
||||||
error = err
|
|
||||||
}
|
|
||||||
expect(error).toBeDefined()
|
|
||||||
expect(error.status).toBe(404)
|
|
||||||
})
|
|
||||||
await sqsEnabled(envVar, async () => {
|
|
||||||
await processMigrations(config.appId!, MIGRATIONS)
|
|
||||||
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
|
||||||
expect(designDoc.sql.tables).toBeDefined()
|
|
||||||
const mainTableDef = designDoc.sql.tables[tableId]
|
|
||||||
expect(mainTableDef).toBeDefined()
|
|
||||||
expect(mainTableDef.fields[prefix("name")]).toEqual({
|
|
||||||
field: "name",
|
|
||||||
type: SQLiteType.TEXT,
|
|
||||||
})
|
|
||||||
expect(mainTableDef.fields[prefix("description")]).toEqual({
|
|
||||||
field: "description",
|
|
||||||
type: SQLiteType.TEXT,
|
|
||||||
})
|
|
||||||
|
|
||||||
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
beforeEach(async () => {
|
||||||
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
await config.doInTenant(async () => {
|
||||||
expect(linkDoc.tableId).toEqual(
|
await updateAppMigrationMetadata({
|
||||||
generateJunctionTableID(tableId1, tableId2)
|
appId: config.getAppId(),
|
||||||
)
|
version: "",
|
||||||
// should have swapped the documents
|
|
||||||
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
|
||||||
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
|
||||||
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
|
||||||
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
})
|
||||||
)
|
|
||||||
|
it("test migration runs as expected against an older DB", async () => {
|
||||||
|
const db = dbCore.getDB(config.appId!)
|
||||||
|
// confirm nothing exists initially
|
||||||
|
await sqsDisabled(async () => {
|
||||||
|
let error: any | undefined
|
||||||
|
try {
|
||||||
|
await db.get(SQLITE_DESIGN_DOC_ID)
|
||||||
|
} catch (err: any) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
expect(error).toBeDefined()
|
||||||
|
expect(error.status).toBe(404)
|
||||||
|
})
|
||||||
|
|
||||||
|
await sqsEnabled(async () => {
|
||||||
|
await processMigrations(config.appId!, MIGRATIONS)
|
||||||
|
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||||
|
expect(designDoc.sql.tables).toBeDefined()
|
||||||
|
const mainTableDef = designDoc.sql.tables[tableId]
|
||||||
|
expect(mainTableDef).toBeDefined()
|
||||||
|
expect(mainTableDef.fields[prefix("name")]).toEqual({
|
||||||
|
field: "name",
|
||||||
|
type: SQLiteType.TEXT,
|
||||||
|
})
|
||||||
|
expect(mainTableDef.fields[prefix("description")]).toEqual({
|
||||||
|
field: "description",
|
||||||
|
type: SQLiteType.TEXT,
|
||||||
|
})
|
||||||
|
|
||||||
|
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
||||||
|
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
||||||
|
expect(linkDoc.tableId).toEqual(
|
||||||
|
generateJunctionTableID(tableId1, tableId2)
|
||||||
|
)
|
||||||
|
// should have swapped the documents
|
||||||
|
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
||||||
|
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
||||||
|
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
||||||
|
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
|
@ -12,7 +12,7 @@ import { ExportRowsParams, ExportRowsResult } from "./search/types"
|
||||||
import { dataFilters } from "@budibase/shared-core"
|
import { dataFilters } from "@budibase/shared-core"
|
||||||
import sdk from "../../index"
|
import sdk from "../../index"
|
||||||
import { searchInputMapping } from "./search/utils"
|
import { searchInputMapping } from "./search/utils"
|
||||||
import { db as dbCore } from "@budibase/backend-core"
|
import { features } from "@budibase/backend-core"
|
||||||
import tracer from "dd-trace"
|
import tracer from "dd-trace"
|
||||||
import { getQueryableFields, removeInvalidFilters } from "./queryUtils"
|
import { getQueryableFields, removeInvalidFilters } from "./queryUtils"
|
||||||
|
|
||||||
|
@ -90,7 +90,7 @@ export async function search(
|
||||||
if (isExternalTable) {
|
if (isExternalTable) {
|
||||||
span?.addTags({ searchType: "external" })
|
span?.addTags({ searchType: "external" })
|
||||||
result = await external.search(options, table)
|
result = await external.search(options, table)
|
||||||
} else if (dbCore.isSqsEnabledForTenant()) {
|
} else if (await features.flags.isEnabled("SQS")) {
|
||||||
span?.addTags({ searchType: "sqs" })
|
span?.addTags({ searchType: "sqs" })
|
||||||
result = await internal.sqs.search(options, table)
|
result = await internal.sqs.search(options, table)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -41,14 +41,13 @@ describe.each([
|
||||||
let table: Table
|
let table: Table
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await withCoreEnv({ SQS_SEARCH_ENABLE: isSqs ? "true" : "false" }, () =>
|
await withCoreEnv({ TENANT_FEATURE_FLAGS: isSqs ? "*:SQS" : "" }, () =>
|
||||||
config.init()
|
config.init()
|
||||||
)
|
)
|
||||||
|
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
envCleanup = setCoreEnv({
|
envCleanup = setCoreEnv({
|
||||||
SQS_SEARCH_ENABLE: "true",
|
TENANT_FEATURE_FLAGS: "*:SQS",
|
||||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { context, db as dbCore, env } from "@budibase/backend-core"
|
import { context, features } from "@budibase/backend-core"
|
||||||
import { getTableParams } from "../../../db/utils"
|
import { getTableParams } from "../../../db/utils"
|
||||||
import {
|
import {
|
||||||
breakExternalTableId,
|
breakExternalTableId,
|
||||||
|
@ -16,7 +16,7 @@ import {
|
||||||
import datasources from "../datasources"
|
import datasources from "../datasources"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
|
||||||
export function processTable(table: Table): Table {
|
export async function processTable(table: Table): Promise<Table> {
|
||||||
if (!table) {
|
if (!table) {
|
||||||
return table
|
return table
|
||||||
}
|
}
|
||||||
|
@ -33,20 +33,21 @@ export function processTable(table: Table): Table {
|
||||||
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
|
sourceId: table.sourceId || INTERNAL_TABLE_SOURCE_ID,
|
||||||
sourceType: TableSourceType.INTERNAL,
|
sourceType: TableSourceType.INTERNAL,
|
||||||
}
|
}
|
||||||
if (dbCore.isSqsEnabledForTenant()) {
|
const sqsEnabled = await features.flags.isEnabled("SQS")
|
||||||
processed.sql = !!env.SQS_SEARCH_ENABLE
|
if (sqsEnabled) {
|
||||||
|
processed.sql = true
|
||||||
}
|
}
|
||||||
return processed
|
return processed
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function processTables(tables: Table[]): Table[] {
|
export async function processTables(tables: Table[]): Promise<Table[]> {
|
||||||
return tables.map(table => processTable(table))
|
return await Promise.all(tables.map(table => processTable(table)))
|
||||||
}
|
}
|
||||||
|
|
||||||
function processEntities(tables: Record<string, Table>) {
|
async function processEntities(tables: Record<string, Table>) {
|
||||||
for (let key of Object.keys(tables)) {
|
for (let key of Object.keys(tables)) {
|
||||||
tables[key] = processTable(tables[key])
|
tables[key] = await processTable(tables[key])
|
||||||
}
|
}
|
||||||
return tables
|
return tables
|
||||||
}
|
}
|
||||||
|
@ -60,7 +61,7 @@ export async function getAllInternalTables(db?: Database): Promise<Table[]> {
|
||||||
include_docs: true,
|
include_docs: true,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
return processTables(internalTables.rows.map(row => row.doc!))
|
return await processTables(internalTables.rows.map(row => row.doc!))
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getAllExternalTables(): Promise<Table[]> {
|
async function getAllExternalTables(): Promise<Table[]> {
|
||||||
|
@ -72,7 +73,7 @@ async function getAllExternalTables(): Promise<Table[]> {
|
||||||
final = final.concat(Object.values(entities))
|
final = final.concat(Object.values(entities))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return processTables(final)
|
return await processTables(final)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getExternalTable(
|
export async function getExternalTable(
|
||||||
|
@ -97,7 +98,7 @@ export async function getTable(tableId: string): Promise<Table> {
|
||||||
} else {
|
} else {
|
||||||
output = await db.get<Table>(tableId)
|
output = await db.get<Table>(tableId)
|
||||||
}
|
}
|
||||||
return processTable(output)
|
return await processTable(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getAllTables() {
|
export async function getAllTables() {
|
||||||
|
@ -105,7 +106,7 @@ export async function getAllTables() {
|
||||||
getAllInternalTables(),
|
getAllInternalTables(),
|
||||||
getAllExternalTables(),
|
getAllExternalTables(),
|
||||||
])
|
])
|
||||||
return processTables([...internal, ...external])
|
return await processTables([...internal, ...external])
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getExternalTablesInDatasource(
|
export async function getExternalTablesInDatasource(
|
||||||
|
@ -115,7 +116,7 @@ export async function getExternalTablesInDatasource(
|
||||||
if (!datasource || !datasource.entities) {
|
if (!datasource || !datasource.entities) {
|
||||||
throw new Error("Datasource is not configured fully.")
|
throw new Error("Datasource is not configured fully.")
|
||||||
}
|
}
|
||||||
return processEntities(datasource.entities)
|
return await processEntities(datasource.entities)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getTables(tableIds: string[]): Promise<Table[]> {
|
export async function getTables(tableIds: string[]): Promise<Table[]> {
|
||||||
|
@ -139,7 +140,7 @@ export async function getTables(tableIds: string[]): Promise<Table[]> {
|
||||||
})
|
})
|
||||||
tables = tables.concat(internalTables)
|
tables = tables.concat(internalTables)
|
||||||
}
|
}
|
||||||
return processTables(tables)
|
return await processTables(tables)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function enrichViewSchemas(table: Table): Promise<TableResponse> {
|
export async function enrichViewSchemas(table: Table): Promise<TableResponse> {
|
||||||
|
|
|
@ -3,6 +3,8 @@ import { tmpdir } from "os"
|
||||||
process.env.SELF_HOSTED = "1"
|
process.env.SELF_HOSTED = "1"
|
||||||
process.env.NODE_ENV = "jest"
|
process.env.NODE_ENV = "jest"
|
||||||
process.env.MULTI_TENANCY = "1"
|
process.env.MULTI_TENANCY = "1"
|
||||||
|
process.env.APP_PORT = "0"
|
||||||
|
process.env.WORKER_PORT = "0"
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
process.env.BUDIBASE_DIR = tmpdir("budibase-unittests")
|
process.env.BUDIBASE_DIR = tmpdir("budibase-unittests")
|
||||||
process.env.LOG_LEVEL = process.env.LOG_LEVEL || "error"
|
process.env.LOG_LEVEL = process.env.LOG_LEVEL || "error"
|
||||||
|
|
|
@ -3,7 +3,7 @@ import { fixAutoColumnSubType, processFormulas } from "./utils"
|
||||||
import {
|
import {
|
||||||
cache,
|
cache,
|
||||||
context,
|
context,
|
||||||
db,
|
features,
|
||||||
HTTPError,
|
HTTPError,
|
||||||
objectStore,
|
objectStore,
|
||||||
utils,
|
utils,
|
||||||
|
@ -350,7 +350,7 @@ export async function outputProcessing<T extends Row[] | Row>(
|
||||||
}
|
}
|
||||||
// remove null properties to match internal API
|
// remove null properties to match internal API
|
||||||
const isExternal = isExternalTableID(table._id!)
|
const isExternal = isExternalTableID(table._id!)
|
||||||
if (isExternal || db.isSqsEnabledForTenant()) {
|
if (isExternal || (await features.flags.isEnabled("SQS"))) {
|
||||||
for (const row of enriched) {
|
for (const row of enriched) {
|
||||||
for (const key of Object.keys(row)) {
|
for (const key of Object.keys(row)) {
|
||||||
if (row[key] === null) {
|
if (row[key] === null) {
|
||||||
|
|
|
@ -8,15 +8,9 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { outputProcessing } from ".."
|
import { outputProcessing } from ".."
|
||||||
import { generator, structures } from "@budibase/backend-core/tests"
|
import { generator, structures } from "@budibase/backend-core/tests"
|
||||||
|
import { setEnv as setCoreEnv } from "@budibase/backend-core"
|
||||||
import * as bbReferenceProcessor from "../bbReferenceProcessor"
|
import * as bbReferenceProcessor from "../bbReferenceProcessor"
|
||||||
|
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||||
jest.mock("@budibase/backend-core", () => ({
|
|
||||||
...jest.requireActual("@budibase/backend-core"),
|
|
||||||
db: {
|
|
||||||
...jest.requireActual("@budibase/backend-core").db,
|
|
||||||
isSqsEnabledForTenant: () => true,
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
jest.mock("../bbReferenceProcessor", (): typeof bbReferenceProcessor => ({
|
jest.mock("../bbReferenceProcessor", (): typeof bbReferenceProcessor => ({
|
||||||
processInputBBReference: jest.fn(),
|
processInputBBReference: jest.fn(),
|
||||||
|
@ -26,8 +20,24 @@ jest.mock("../bbReferenceProcessor", (): typeof bbReferenceProcessor => ({
|
||||||
}))
|
}))
|
||||||
|
|
||||||
describe("rowProcessor - outputProcessing", () => {
|
describe("rowProcessor - outputProcessing", () => {
|
||||||
|
const config = new TestConfiguration()
|
||||||
|
let cleanupEnv: () => void = () => {}
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
await config.init()
|
||||||
|
})
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
config.end()
|
||||||
|
})
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
jest.resetAllMocks()
|
jest.resetAllMocks()
|
||||||
|
cleanupEnv = setCoreEnv({ TENANT_FEATURE_FLAGS: "*SQS" })
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
cleanupEnv()
|
||||||
})
|
})
|
||||||
|
|
||||||
const processOutputBBReferenceMock =
|
const processOutputBBReferenceMock =
|
||||||
|
@ -36,266 +46,276 @@ describe("rowProcessor - outputProcessing", () => {
|
||||||
bbReferenceProcessor.processOutputBBReferences as jest.Mock
|
bbReferenceProcessor.processOutputBBReferences as jest.Mock
|
||||||
|
|
||||||
it("fetches single user references given a populated field", async () => {
|
it("fetches single user references given a populated field", async () => {
|
||||||
const table: Table = {
|
await config.doInContext(config.getAppId(), async () => {
|
||||||
_id: generator.guid(),
|
const table: Table = {
|
||||||
name: "TestTable",
|
_id: generator.guid(),
|
||||||
type: "table",
|
name: "TestTable",
|
||||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
type: "table",
|
||||||
sourceType: TableSourceType.INTERNAL,
|
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||||
schema: {
|
sourceType: TableSourceType.INTERNAL,
|
||||||
name: {
|
schema: {
|
||||||
type: FieldType.STRING,
|
name: {
|
||||||
name: "name",
|
type: FieldType.STRING,
|
||||||
constraints: {
|
name: "name",
|
||||||
presence: true,
|
constraints: {
|
||||||
type: "string",
|
presence: true,
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
user: {
|
||||||
|
type: FieldType.BB_REFERENCE_SINGLE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
name: "user",
|
||||||
|
constraints: {
|
||||||
|
presence: false,
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
user: {
|
}
|
||||||
type: FieldType.BB_REFERENCE_SINGLE,
|
|
||||||
subtype: BBReferenceFieldSubType.USER,
|
|
||||||
name: "user",
|
|
||||||
constraints: {
|
|
||||||
presence: false,
|
|
||||||
type: "string",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const row = {
|
const row = {
|
||||||
name: "Jack",
|
name: "Jack",
|
||||||
user: "123",
|
user: "123",
|
||||||
}
|
}
|
||||||
|
|
||||||
const user = structures.users.user()
|
const user = structures.users.user()
|
||||||
processOutputBBReferenceMock.mockResolvedValue(user)
|
processOutputBBReferenceMock.mockResolvedValue(user)
|
||||||
|
|
||||||
const result = await outputProcessing(table, row, { squash: false })
|
const result = await outputProcessing(table, row, { squash: false })
|
||||||
|
|
||||||
expect(result).toEqual({ name: "Jack", user })
|
expect(result).toEqual({ name: "Jack", user })
|
||||||
|
|
||||||
expect(bbReferenceProcessor.processOutputBBReference).toHaveBeenCalledTimes(
|
expect(
|
||||||
1
|
bbReferenceProcessor.processOutputBBReference
|
||||||
)
|
).toHaveBeenCalledTimes(1)
|
||||||
expect(bbReferenceProcessor.processOutputBBReference).toHaveBeenCalledWith(
|
expect(
|
||||||
"123",
|
bbReferenceProcessor.processOutputBBReference
|
||||||
BBReferenceFieldSubType.USER
|
).toHaveBeenCalledWith("123", BBReferenceFieldSubType.USER)
|
||||||
)
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fetches users references given a populated field", async () => {
|
it("fetches users references given a populated field", async () => {
|
||||||
const table: Table = {
|
await config.doInContext(config.getAppId(), async () => {
|
||||||
_id: generator.guid(),
|
const table: Table = {
|
||||||
name: "TestTable",
|
_id: generator.guid(),
|
||||||
type: "table",
|
name: "TestTable",
|
||||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
type: "table",
|
||||||
sourceType: TableSourceType.INTERNAL,
|
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||||
schema: {
|
sourceType: TableSourceType.INTERNAL,
|
||||||
name: {
|
schema: {
|
||||||
type: FieldType.STRING,
|
name: {
|
||||||
name: "name",
|
type: FieldType.STRING,
|
||||||
constraints: {
|
name: "name",
|
||||||
presence: true,
|
constraints: {
|
||||||
type: "string",
|
presence: true,
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
users: {
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
name: "users",
|
||||||
|
constraints: {
|
||||||
|
presence: false,
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
users: {
|
}
|
||||||
type: FieldType.BB_REFERENCE,
|
|
||||||
subtype: BBReferenceFieldSubType.USER,
|
|
||||||
name: "users",
|
|
||||||
constraints: {
|
|
||||||
presence: false,
|
|
||||||
type: "string",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const row = {
|
const row = {
|
||||||
name: "Jack",
|
name: "Jack",
|
||||||
users: "123",
|
users: "123",
|
||||||
}
|
}
|
||||||
|
|
||||||
const users = [structures.users.user()]
|
const users = [structures.users.user()]
|
||||||
processOutputBBReferencesMock.mockResolvedValue(users)
|
processOutputBBReferencesMock.mockResolvedValue(users)
|
||||||
|
|
||||||
const result = await outputProcessing(table, row, { squash: false })
|
const result = await outputProcessing(table, row, { squash: false })
|
||||||
|
|
||||||
expect(result).toEqual({ name: "Jack", users })
|
expect(result).toEqual({ name: "Jack", users })
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
bbReferenceProcessor.processOutputBBReferences
|
bbReferenceProcessor.processOutputBBReferences
|
||||||
).toHaveBeenCalledTimes(1)
|
).toHaveBeenCalledTimes(1)
|
||||||
expect(bbReferenceProcessor.processOutputBBReferences).toHaveBeenCalledWith(
|
expect(
|
||||||
"123",
|
bbReferenceProcessor.processOutputBBReferences
|
||||||
BBReferenceFieldSubType.USER
|
).toHaveBeenCalledWith("123", BBReferenceFieldSubType.USER)
|
||||||
)
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle attachment list correctly", async () => {
|
it("should handle attachment list correctly", async () => {
|
||||||
const table: Table = {
|
await config.doInContext(config.getAppId(), async () => {
|
||||||
_id: generator.guid(),
|
const table: Table = {
|
||||||
name: "TestTable",
|
_id: generator.guid(),
|
||||||
type: "table",
|
name: "TestTable",
|
||||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
type: "table",
|
||||||
sourceType: TableSourceType.INTERNAL,
|
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||||
schema: {
|
sourceType: TableSourceType.INTERNAL,
|
||||||
attach: {
|
schema: {
|
||||||
type: FieldType.ATTACHMENTS,
|
attach: {
|
||||||
name: "attach",
|
type: FieldType.ATTACHMENTS,
|
||||||
constraints: {},
|
name: "attach",
|
||||||
|
constraints: {},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const row: { attach: RowAttachment[] } = {
|
const row: { attach: RowAttachment[] } = {
|
||||||
attach: [
|
attach: [
|
||||||
{
|
{
|
||||||
|
size: 10,
|
||||||
|
name: "test",
|
||||||
|
extension: "jpg",
|
||||||
|
key: "test.jpg",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
const output = await outputProcessing(table, row, { squash: false })
|
||||||
|
expect(output.attach[0].url?.split("?")[0]).toBe(
|
||||||
|
"/files/signed/prod-budi-app-assets/test.jpg"
|
||||||
|
)
|
||||||
|
|
||||||
|
row.attach[0].url = ""
|
||||||
|
const output2 = await outputProcessing(table, row, { squash: false })
|
||||||
|
expect(output2.attach[0].url?.split("?")[0]).toBe(
|
||||||
|
"/files/signed/prod-budi-app-assets/test.jpg"
|
||||||
|
)
|
||||||
|
|
||||||
|
row.attach[0].url = "aaaa"
|
||||||
|
const output3 = await outputProcessing(table, row, { squash: false })
|
||||||
|
expect(output3.attach[0].url).toBe("aaaa")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle single attachment correctly", async () => {
|
||||||
|
await config.doInContext(config.getAppId(), async () => {
|
||||||
|
const table: Table = {
|
||||||
|
_id: generator.guid(),
|
||||||
|
name: "TestTable",
|
||||||
|
type: "table",
|
||||||
|
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||||
|
sourceType: TableSourceType.INTERNAL,
|
||||||
|
schema: {
|
||||||
|
attach: {
|
||||||
|
type: FieldType.ATTACHMENT_SINGLE,
|
||||||
|
name: "attach",
|
||||||
|
constraints: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const row: { attach: RowAttachment } = {
|
||||||
|
attach: {
|
||||||
size: 10,
|
size: 10,
|
||||||
name: "test",
|
name: "test",
|
||||||
extension: "jpg",
|
extension: "jpg",
|
||||||
key: "test.jpg",
|
key: "test.jpg",
|
||||||
},
|
},
|
||||||
],
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const output = await outputProcessing(table, row, { squash: false })
|
const output = await outputProcessing(table, row, { squash: false })
|
||||||
expect(output.attach[0].url?.split("?")[0]).toBe(
|
expect(output.attach.url?.split("?")[0]).toBe(
|
||||||
"/files/signed/prod-budi-app-assets/test.jpg"
|
"/files/signed/prod-budi-app-assets/test.jpg"
|
||||||
)
|
)
|
||||||
|
|
||||||
row.attach[0].url = ""
|
row.attach.url = ""
|
||||||
const output2 = await outputProcessing(table, row, { squash: false })
|
const output2 = await outputProcessing(table, row, { squash: false })
|
||||||
expect(output2.attach[0].url?.split("?")[0]).toBe(
|
expect(output2.attach?.url?.split("?")[0]).toBe(
|
||||||
"/files/signed/prod-budi-app-assets/test.jpg"
|
"/files/signed/prod-budi-app-assets/test.jpg"
|
||||||
)
|
)
|
||||||
|
|
||||||
row.attach[0].url = "aaaa"
|
row.attach.url = "aaaa"
|
||||||
const output3 = await outputProcessing(table, row, { squash: false })
|
const output3 = await outputProcessing(table, row, { squash: false })
|
||||||
expect(output3.attach[0].url).toBe("aaaa")
|
expect(output3.attach.url).toBe("aaaa")
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle single attachment correctly", async () => {
|
|
||||||
const table: Table = {
|
|
||||||
_id: generator.guid(),
|
|
||||||
name: "TestTable",
|
|
||||||
type: "table",
|
|
||||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
|
||||||
sourceType: TableSourceType.INTERNAL,
|
|
||||||
schema: {
|
|
||||||
attach: {
|
|
||||||
type: FieldType.ATTACHMENT_SINGLE,
|
|
||||||
name: "attach",
|
|
||||||
constraints: {},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const row: { attach: RowAttachment } = {
|
|
||||||
attach: {
|
|
||||||
size: 10,
|
|
||||||
name: "test",
|
|
||||||
extension: "jpg",
|
|
||||||
key: "test.jpg",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const output = await outputProcessing(table, row, { squash: false })
|
|
||||||
expect(output.attach.url?.split("?")[0]).toBe(
|
|
||||||
"/files/signed/prod-budi-app-assets/test.jpg"
|
|
||||||
)
|
|
||||||
|
|
||||||
row.attach.url = ""
|
|
||||||
const output2 = await outputProcessing(table, row, { squash: false })
|
|
||||||
expect(output2.attach?.url?.split("?")[0]).toBe(
|
|
||||||
"/files/signed/prod-budi-app-assets/test.jpg"
|
|
||||||
)
|
|
||||||
|
|
||||||
row.attach.url = "aaaa"
|
|
||||||
const output3 = await outputProcessing(table, row, { squash: false })
|
|
||||||
expect(output3.attach.url).toBe("aaaa")
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("process output even when the field is not empty", async () => {
|
it("process output even when the field is not empty", async () => {
|
||||||
const table: Table = {
|
await config.doInContext(config.getAppId(), async () => {
|
||||||
_id: generator.guid(),
|
const table: Table = {
|
||||||
name: "TestTable",
|
_id: generator.guid(),
|
||||||
type: "table",
|
name: "TestTable",
|
||||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
type: "table",
|
||||||
sourceType: TableSourceType.INTERNAL,
|
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||||
schema: {
|
sourceType: TableSourceType.INTERNAL,
|
||||||
name: {
|
schema: {
|
||||||
type: FieldType.STRING,
|
name: {
|
||||||
name: "name",
|
type: FieldType.STRING,
|
||||||
constraints: {
|
name: "name",
|
||||||
presence: true,
|
constraints: {
|
||||||
type: "string",
|
presence: true,
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
user: {
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
name: "user",
|
||||||
|
constraints: {
|
||||||
|
presence: false,
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
user: {
|
}
|
||||||
type: FieldType.BB_REFERENCE,
|
|
||||||
subtype: BBReferenceFieldSubType.USER,
|
|
||||||
name: "user",
|
|
||||||
constraints: {
|
|
||||||
presence: false,
|
|
||||||
type: "string",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const row = {
|
const row = {
|
||||||
name: "Jack",
|
name: "Jack",
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await outputProcessing(table, row, { squash: false })
|
const result = await outputProcessing(table, row, { squash: false })
|
||||||
|
|
||||||
expect(result).toEqual({ name: "Jack" })
|
expect(result).toEqual({ name: "Jack" })
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
bbReferenceProcessor.processOutputBBReferences
|
bbReferenceProcessor.processOutputBBReferences
|
||||||
).toHaveBeenCalledTimes(1)
|
).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("does not fetch bb references when not in the schema", async () => {
|
it("does not fetch bb references when not in the schema", async () => {
|
||||||
const table: Table = {
|
await config.doInContext(config.getAppId(), async () => {
|
||||||
_id: generator.guid(),
|
const table: Table = {
|
||||||
name: "TestTable",
|
_id: generator.guid(),
|
||||||
type: "table",
|
name: "TestTable",
|
||||||
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
type: "table",
|
||||||
sourceType: TableSourceType.INTERNAL,
|
sourceId: INTERNAL_TABLE_SOURCE_ID,
|
||||||
schema: {
|
sourceType: TableSourceType.INTERNAL,
|
||||||
name: {
|
schema: {
|
||||||
type: FieldType.STRING,
|
name: {
|
||||||
name: "name",
|
type: FieldType.STRING,
|
||||||
constraints: {
|
name: "name",
|
||||||
presence: true,
|
constraints: {
|
||||||
type: "string",
|
presence: true,
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
user: {
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
name: "user",
|
||||||
|
constraints: {
|
||||||
|
presence: false,
|
||||||
|
type: "string",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
user: {
|
}
|
||||||
type: FieldType.NUMBER,
|
|
||||||
name: "user",
|
|
||||||
constraints: {
|
|
||||||
presence: false,
|
|
||||||
type: "string",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const row = {
|
const row = {
|
||||||
name: "Jack",
|
name: "Jack",
|
||||||
user: "123",
|
user: "123",
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await outputProcessing(table, row, { squash: false })
|
const result = await outputProcessing(table, row, { squash: false })
|
||||||
|
|
||||||
expect(result).toEqual({ name: "Jack", user: "123" })
|
expect(result).toEqual({ name: "Jack", user: "123" })
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
bbReferenceProcessor.processOutputBBReferences
|
bbReferenceProcessor.processOutputBBReferences
|
||||||
).not.toHaveBeenCalled()
|
).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -16,7 +16,6 @@ import { gridSocket } from "./index"
|
||||||
import { clearLock, updateLock } from "../utilities/redis"
|
import { clearLock, updateLock } from "../utilities/redis"
|
||||||
import { Socket } from "socket.io"
|
import { Socket } from "socket.io"
|
||||||
import { BuilderSocketEvent } from "@budibase/shared-core"
|
import { BuilderSocketEvent } from "@budibase/shared-core"
|
||||||
import { processTable } from "../sdk/app/tables/getters"
|
|
||||||
|
|
||||||
export default class BuilderSocket extends BaseSocket {
|
export default class BuilderSocket extends BaseSocket {
|
||||||
constructor(app: Koa, server: http.Server) {
|
constructor(app: Koa, server: http.Server) {
|
||||||
|
@ -102,10 +101,9 @@ export default class BuilderSocket extends BaseSocket {
|
||||||
}
|
}
|
||||||
|
|
||||||
emitTableUpdate(ctx: any, table: Table, options?: EmitOptions) {
|
emitTableUpdate(ctx: any, table: Table, options?: EmitOptions) {
|
||||||
// This was added to make sure that sourceId is always present when
|
if (table.sourceId == null || table.sourceId === "") {
|
||||||
// sending this message to clients. Without this, tables without a
|
throw new Error("Table sourceId is not set")
|
||||||
// sourceId (e.g. ta_users) won't get correctly updated client-side.
|
}
|
||||||
table = processTable(table)
|
|
||||||
|
|
||||||
this.emitToRoom(
|
this.emitToRoom(
|
||||||
ctx,
|
ctx,
|
||||||
|
|
|
@ -30,7 +30,7 @@ async function init() {
|
||||||
HTTP_LOGGING: "0",
|
HTTP_LOGGING: "0",
|
||||||
VERSION: "0.0.0+local",
|
VERSION: "0.0.0+local",
|
||||||
PASSWORD_MIN_LENGTH: "1",
|
PASSWORD_MIN_LENGTH: "1",
|
||||||
SQS_SEARCH_ENABLE: "1",
|
TENANT_FEATURE_FLAGS: "*:SQS",
|
||||||
}
|
}
|
||||||
|
|
||||||
config = { ...config, ...existingConfig }
|
config = { ...config, ...existingConfig }
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { Ctx, MaintenanceType } from "@budibase/types"
|
import { Ctx, MaintenanceType } from "@budibase/types"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import { env as coreEnv, db as dbCore } from "@budibase/backend-core"
|
import { env as coreEnv, db as dbCore, features } from "@budibase/backend-core"
|
||||||
import nodeFetch from "node-fetch"
|
import nodeFetch from "node-fetch"
|
||||||
|
|
||||||
let sqsAvailable: boolean
|
let sqsAvailable: boolean
|
||||||
|
@ -29,7 +29,7 @@ async function isSqsAvailable() {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function isSqsMissing() {
|
async function isSqsMissing() {
|
||||||
return coreEnv.SQS_SEARCH_ENABLE && !(await isSqsAvailable())
|
return (await features.flags.isEnabled("SQS")) && !(await isSqsAvailable())
|
||||||
}
|
}
|
||||||
|
|
||||||
export const fetch = async (ctx: Ctx) => {
|
export const fetch = async (ctx: Ctx) => {
|
||||||
|
|
|
@ -4,12 +4,8 @@ const compress = require("koa-compress")
|
||||||
|
|
||||||
import zlib from "zlib"
|
import zlib from "zlib"
|
||||||
import { routes } from "./routes"
|
import { routes } from "./routes"
|
||||||
import { middleware as pro, sdk } from "@budibase/pro"
|
import { middleware as pro } from "@budibase/pro"
|
||||||
import { auth, middleware, env } from "@budibase/backend-core"
|
import { auth, middleware } from "@budibase/backend-core"
|
||||||
|
|
||||||
if (env.SQS_SEARCH_ENABLE) {
|
|
||||||
sdk.auditLogs.useSQLSearch()
|
|
||||||
}
|
|
||||||
|
|
||||||
const PUBLIC_ENDPOINTS = [
|
const PUBLIC_ENDPOINTS = [
|
||||||
// deprecated single tenant sso callback
|
// deprecated single tenant sso callback
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import { mocks, structures } from "@budibase/backend-core/tests"
|
import { mocks, structures } from "@budibase/backend-core/tests"
|
||||||
import { context, events } from "@budibase/backend-core"
|
import { context, events, setEnv as setCoreEnv } from "@budibase/backend-core"
|
||||||
import { Event, IdentityType } from "@budibase/types"
|
import { Event, IdentityType } from "@budibase/types"
|
||||||
import { auditLogs } from "@budibase/pro"
|
|
||||||
import { TestConfiguration } from "../../../../tests"
|
import { TestConfiguration } from "../../../../tests"
|
||||||
|
|
||||||
mocks.licenses.useAuditLogs()
|
mocks.licenses.useAuditLogs()
|
||||||
|
@ -15,15 +14,17 @@ const APP_ID = "app_1"
|
||||||
|
|
||||||
describe.each(["lucene", "sql"])("/api/global/auditlogs (%s)", method => {
|
describe.each(["lucene", "sql"])("/api/global/auditlogs (%s)", method => {
|
||||||
const config = new TestConfiguration()
|
const config = new TestConfiguration()
|
||||||
|
let envCleanup: (() => void) | undefined
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
if (method === "sql") {
|
if (method === "sql") {
|
||||||
auditLogs.useSQLSearch()
|
envCleanup = setCoreEnv({ TENANT_FEATURE_FLAGS: "*:SQS" })
|
||||||
}
|
}
|
||||||
await config.beforeAll()
|
await config.beforeAll()
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(async () => {
|
afterAll(async () => {
|
||||||
|
envCleanup?.()
|
||||||
await config.afterAll()
|
await config.afterAll()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@ export class EnvironmentAPI extends TestAPI {
|
||||||
getEnvironment = () => {
|
getEnvironment = () => {
|
||||||
return this.request
|
return this.request
|
||||||
.get(`/api/system/environment`)
|
.get(`/api/system/environment`)
|
||||||
|
.set(this.config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(200)
|
.expect(200)
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue