diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index bf5215a724..b68cba5fd9 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -45,7 +45,7 @@ "passport-oauth2-refresh": "^2.1.0", "pino": "8.11.0", "pino-http": "8.3.3", - "posthog-node": "1.3.0", + "posthog-node": "4.0.1", "pouchdb": "7.3.0", "pouchdb-find": "7.2.2", "redlock": "4.2.0", diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 64e3187956..5091a4971a 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -1,5 +1,6 @@ import { existsSync, readFileSync } from "fs" import { ServiceType } from "@budibase/types" +import { cloneDeep } from "lodash" function isTest() { return isJest() @@ -208,6 +209,32 @@ const environment = { OPENAI_API_KEY: process.env.OPENAI_API_KEY, } +export function setEnv(newEnvVars: Partial): () => void { + const oldEnv = cloneDeep(environment) + + let key: keyof typeof newEnvVars + for (key in newEnvVars) { + environment._set(key, newEnvVars[key]) + } + + return () => { + for (const [key, value] of Object.entries(oldEnv)) { + environment._set(key, value) + } + } +} + +export function withEnv(envVars: Partial, f: () => T) { + const cleanup = setEnv(envVars) + const result = f() + if (result instanceof Promise) { + return result.finally(cleanup) + } else { + cleanup() + return result + } +} + type EnvironmentKey = keyof typeof environment export const SECRETS: EnvironmentKey[] = [ "API_ENCRYPTION_KEY", diff --git a/packages/backend-core/src/events/processors/posthog/PosthogProcessor.ts b/packages/backend-core/src/events/processors/posthog/PosthogProcessor.ts index d37b85a9b8..12d2bb7e2c 100644 --- a/packages/backend-core/src/events/processors/posthog/PosthogProcessor.ts +++ b/packages/backend-core/src/events/processors/posthog/PosthogProcessor.ts @@ -1,4 +1,4 @@ -import PostHog from "posthog-node" +import { PostHog } from "posthog-node" import { Event, Identity, Group, BaseEvent } from "@budibase/types" import { EventProcessor } from "../types" import env from "../../../environment" diff --git a/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts b/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts index d9a5504073..1d9e341ffe 100644 --- a/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts +++ b/packages/backend-core/src/events/processors/posthog/tests/PosthogProcessor.spec.ts @@ -1,9 +1,7 @@ import { testEnv } from "../../../../../tests/extra" import PosthogProcessor from "../PosthogProcessor" import { Event, IdentityType, Hosting } from "@budibase/types" - -const tk = require("timekeeper") - +import tk from "timekeeper" import * as cache from "../../../../cache/generic" import { CacheKey } from "../../../../cache/generic" import * as context from "../../../../context" @@ -32,27 +30,30 @@ describe("PosthogProcessor", () => { describe("processEvent", () => { it("processes event", async () => { const processor = new PosthogProcessor("test") + const spy = jest.spyOn(processor.posthog, "capture") const identity = newIdentity() const properties = {} await processor.processEvent(Event.APP_CREATED, identity, properties) - expect(processor.posthog.capture).toHaveBeenCalledTimes(1) + expect(spy).toHaveBeenCalledTimes(1) }) it("honours exclusions", async () => { const processor = new PosthogProcessor("test") + const spy = jest.spyOn(processor.posthog, "capture") const identity = newIdentity() const properties = {} await processor.processEvent(Event.AUTH_SSO_UPDATED, identity, properties) - expect(processor.posthog.capture).toHaveBeenCalledTimes(0) + expect(spy).toHaveBeenCalledTimes(0) }) it("removes audited information", async () => { const processor = new PosthogProcessor("test") + const spy = jest.spyOn(processor.posthog, "capture") const identity = newIdentity() const properties = { @@ -63,7 +64,7 @@ describe("PosthogProcessor", () => { } await processor.processEvent(Event.USER_CREATED, identity, properties) - expect(processor.posthog.capture).toHaveBeenCalled() + expect(spy).toHaveBeenCalled() // @ts-ignore const call = processor.posthog.capture.mock.calls[0][0] expect(call.properties.audited).toBeUndefined() @@ -73,6 +74,8 @@ describe("PosthogProcessor", () => { describe("rate limiting", () => { it("sends daily event once in same day", async () => { const processor = new PosthogProcessor("test") + const spy = jest.spyOn(processor.posthog, "capture") + const identity = newIdentity() const properties = {} @@ -82,11 +85,12 @@ describe("PosthogProcessor", () => { tk.freeze(new Date(2022, 0, 1, 15, 0)) await processor.processEvent(Event.SERVED_BUILDER, identity, properties) - expect(processor.posthog.capture).toHaveBeenCalledTimes(1) + expect(spy).toHaveBeenCalledTimes(1) }) it("sends daily event once per unique day", async () => { const processor = new PosthogProcessor("test") + const spy = jest.spyOn(processor.posthog, "capture") const identity = newIdentity() const properties = {} @@ -102,11 +106,13 @@ describe("PosthogProcessor", () => { tk.freeze(new Date(2022, 0, 3, 6, 0)) await processor.processEvent(Event.SERVED_BUILDER, identity, properties) - expect(processor.posthog.capture).toHaveBeenCalledTimes(3) + expect(spy).toHaveBeenCalledTimes(3) }) it("sends event again after cache expires", async () => { const processor = new PosthogProcessor("test") + const spy = jest.spyOn(processor.posthog, "capture") + const identity = newIdentity() const properties = {} @@ -120,11 +126,12 @@ describe("PosthogProcessor", () => { tk.freeze(new Date(2022, 0, 1, 14, 0)) await processor.processEvent(Event.SERVED_BUILDER, identity, properties) - expect(processor.posthog.capture).toHaveBeenCalledTimes(2) + expect(spy).toHaveBeenCalledTimes(2) }) it("sends per app events once per day per app", async () => { const processor = new PosthogProcessor("test") + const spy = jest.spyOn(processor.posthog, "capture") const identity = newIdentity() const properties = {} @@ -160,10 +167,10 @@ describe("PosthogProcessor", () => { } await runAppEvents("app_1") - expect(processor.posthog.capture).toHaveBeenCalledTimes(4) + expect(spy).toHaveBeenCalledTimes(4) await runAppEvents("app_2") - expect(processor.posthog.capture).toHaveBeenCalledTimes(8) + expect(spy).toHaveBeenCalledTimes(8) }) }) }) diff --git a/packages/backend-core/src/features/index.ts b/packages/backend-core/src/features/index.ts index d7f7c76436..7a77eb8b56 100644 --- a/packages/backend-core/src/features/index.ts +++ b/packages/backend-core/src/features/index.ts @@ -1,13 +1,40 @@ import env from "../environment" import * as context from "../context" import { cloneDeep } from "lodash" +import { PostHog } from "posthog-node" +import { IdentityType } from "@budibase/types" -class Flag { - static withDefault(value: T) { - return new Flag(value) +let posthog: PostHog | undefined +export function init() { + if (env.POSTHOG_TOKEN) { + posthog = new PostHog(env.POSTHOG_TOKEN, { + host: "https://us.i.posthog.com", + }) + } +} + +abstract class Flag { + static boolean(defaultValue: boolean): Flag { + return new BooleanFlag(defaultValue) } - private constructor(public defaultValue: T) {} + protected constructor(public defaultValue: T) {} + + abstract parse(value: any): T +} + +class BooleanFlag extends Flag { + parse(value: any) { + if (typeof value === "string") { + return ["true", "t", "1"].includes(value.toLowerCase()) + } + + if (typeof value === "boolean") { + return value + } + + throw new Error(`could not parse value "${value}" as boolean`) + } } // This is the primary source of truth for feature flags. If you want to add a @@ -15,10 +42,10 @@ class Flag { // All of the machinery in this file is to make sure that flags have their // default values set correctly and their types flow through the system. const FLAGS = { - LICENSING: Flag.withDefault(false), - GOOGLE_SHEETS: Flag.withDefault(false), - USER_GROUPS: Flag.withDefault(false), - ONBOARDING_TOUR: Flag.withDefault(false), + LICENSING: Flag.boolean(false), + GOOGLE_SHEETS: Flag.boolean(false), + USER_GROUPS: Flag.boolean(false), + ONBOARDING_TOUR: Flag.boolean(false), } const DEFAULTS = Object.keys(FLAGS).reduce((acc, key) => { @@ -53,9 +80,10 @@ function isFlagName(name: string): name is keyof Flags { * they will be accessed through this function as well. */ export async function fetch(): Promise { - const currentTenantId = context.getTenantId() const flags = defaultFlags() + const currentTenantId = context.getTenantId() + const split = (env.TENANT_FEATURE_FLAGS || "") .split(",") .map(x => x.split(":")) @@ -79,11 +107,33 @@ export async function fetch(): Promise { throw new Error(`Feature: ${feature} is not a boolean`) } - // @ts-ignore flags[feature] = value } } + const identity = context.getIdentity() + if (posthog && identity?.type === IdentityType.USER) { + const posthogFlags = await posthog.getAllFlagsAndPayloads(identity._id) + for (const [name, value] of Object.entries(posthogFlags)) { + const key = name as keyof typeof FLAGS + const flag = FLAGS[key] + if (!flag) { + // We don't want an unexpected PostHog flag to break the app, so we + // just log it and continue. + console.warn(`Unexpected posthog flag "${name}": ${value}`) + continue + } + + try { + flags[key] = flag.parse(value) + } catch (err) { + // We don't want an invalid PostHog flag to break the app, so we just + // log it and continue. + console.warn(`Error parsing posthog flag "${name}": ${value}`, err) + } + } + } + return flags } diff --git a/packages/backend-core/src/features/tests/features.spec.ts b/packages/backend-core/src/features/tests/features.spec.ts index 83a89940b8..9022d568ee 100644 --- a/packages/backend-core/src/features/tests/features.spec.ts +++ b/packages/backend-core/src/features/tests/features.spec.ts @@ -1,16 +1,8 @@ -import { defaultFlags, fetch, get, Flags } from "../" +import { IdentityContext, IdentityType } from "@budibase/types" +import { defaultFlags, fetch, get, Flags, init } from "../" import { context } from "../.." -import env from "../../environment" - -async function withFlags(flags: string, f: () => T): Promise { - const oldFlags = env.TENANT_FEATURE_FLAGS - env._set("TENANT_FEATURE_FLAGS", flags) - try { - return await f() - } finally { - env._set("TENANT_FEATURE_FLAGS", oldFlags) - } -} +import { setEnv, withEnv } from "../../environment" +import nock from "nock" describe("feature flags", () => { interface TestCase { @@ -48,8 +40,8 @@ describe("feature flags", () => { ])( 'should find flags $expected for $tenant with string "$flags"', ({ tenant, flags, expected }) => - context.doInTenant(tenant, () => - withFlags(flags, async () => { + context.doInTenant(tenant, async () => + withEnv({ TENANT_FEATURE_FLAGS: flags }, async () => { const flags = await fetch() expect(flags).toMatchObject(expected) @@ -75,12 +67,51 @@ describe("feature flags", () => { }, ])( "should fail with message \"$expected\" for $tenant with string '$flags'", - async ({ tenant, flags, expected }) => { + ({ tenant, flags, expected }) => context.doInTenant(tenant, () => - withFlags(flags, async () => { - await expect(fetch()).rejects.toThrow(expected) - }) + withEnv({ TENANT_FEATURE_FLAGS: flags }, () => + expect(fetch()).rejects.toThrow(expected) + ) ) - } ) + + // describe("posthog", () => { + // const identity: IdentityContext = { + // _id: "us_1234", + // tenantId: "budibase", + // type: IdentityType.USER, + // email: "test@example.com", + // firstName: "Test", + // lastName: "User", + // } + + // let cleanup: () => void + + // beforeAll(() => { + // cleanup = setEnv({ POSTHOG_TOKEN: "test" }) + // init() + // }) + + // afterAll(() => { + // cleanup() + // }) + + // beforeEach(() => { + // nock.cleanAll() + // }) + + // it("should be able to read flags from posthog", () => + // context.doInIdentityContext(identity, async () => { + // nock("https://app.posthog.com") + // .get("/api/feature_flags/tenant/budibase") + // .reply(200, { + // flags: { + // "budibase:onboardingTour": true, + // }, + // }) + + // const flags = await fetch() + // expect(flags.ONBOARDING_TOUR).toBe(true) + // })) + // }) }) diff --git a/packages/backend-core/src/middleware/passport/sso/tests/oidc.spec.ts b/packages/backend-core/src/middleware/passport/sso/tests/oidc.spec.ts index a705739bd6..594e197204 100644 --- a/packages/backend-core/src/middleware/passport/sso/tests/oidc.spec.ts +++ b/packages/backend-core/src/middleware/passport/sso/tests/oidc.spec.ts @@ -1,4 +1,4 @@ -import { generator, mocks, structures } from "../../../../../tests" +import { generator, structures } from "../../../../../tests" import { JwtClaims, OIDCInnerConfig, @@ -7,6 +7,7 @@ import { } from "@budibase/types" import * as _sso from "../sso" import * as oidc from "../oidc" +import nock from "nock" jest.mock("@techpass/passport-openidconnect") const mockStrategy = require("@techpass/passport-openidconnect").Strategy @@ -22,16 +23,9 @@ describe("oidc", () => { const oidcConfig: OIDCInnerConfig = structures.sso.oidcConfig() const wellKnownConfig = structures.sso.oidcWellKnownConfig() - function mockRetrieveWellKnownConfig() { - // mock the request to retrieve the oidc configuration - mocks.fetch.mockReturnValue({ - ok: true, - json: () => wellKnownConfig, - }) - } - beforeEach(() => { - mockRetrieveWellKnownConfig() + nock.cleanAll() + nock(oidcConfig.configUrl).get("/").reply(200, wellKnownConfig) }) describe("strategyFactory", () => { @@ -42,8 +36,6 @@ describe("oidc", () => { ) await oidc.strategyFactory(strategyConfiguration, mockSaveUser) - expect(mocks.fetch).toHaveBeenCalledWith(oidcConfig.configUrl) - const expectedOptions = { issuer: wellKnownConfig.issuer, authorizationURL: wellKnownConfig.authorization_endpoint, diff --git a/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts b/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts index ea9584c284..9fa82b6594 100644 --- a/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts +++ b/packages/backend-core/src/middleware/passport/sso/tests/sso.spec.ts @@ -5,6 +5,7 @@ import { SSOAuthDetails, User } from "@budibase/types" import { HTTPError } from "../../../../errors" import * as sso from "../sso" import * as context from "../../../../context" +import nock from "nock" const mockDone = jest.fn() const mockSaveUser = jest.fn() @@ -23,6 +24,7 @@ describe("sso", () => { beforeEach(() => { jest.clearAllMocks() testEnv.singleTenant() + nock.cleanAll() }) describe("validation", () => { @@ -51,15 +53,6 @@ describe("sso", () => { }) }) - function mockGetProfilePicture() { - mocks.fetch.mockReturnValueOnce( - Promise.resolve({ - status: 200, - headers: { get: () => "image/" }, - }) - ) - } - describe("when the user doesn't exist", () => { let user: User let details: SSOAuthDetails @@ -68,7 +61,10 @@ describe("sso", () => { users.getById.mockImplementationOnce(() => { throw new HTTPError("", 404) }) - mockGetProfilePicture() + + nock("http://example.com").get("/").reply(200, undefined, { + "Content-Type": "image/png", + }) user = structures.users.user() delete user._rev @@ -131,7 +127,9 @@ describe("sso", () => { existingUser = structures.users.user() existingUser._id = structures.uuid() details = structures.sso.authDetails(existingUser) - mockGetProfilePicture() + nock("http://example.com").get("/").reply(200, undefined, { + "Content-Type": "image/png", + }) }) describe("exists by email", () => { diff --git a/packages/backend-core/src/plugin/tests/validation.spec.ts b/packages/backend-core/src/plugin/tests/validation.spec.ts index 0fea009645..6f1a3e300b 100644 --- a/packages/backend-core/src/plugin/tests/validation.spec.ts +++ b/packages/backend-core/src/plugin/tests/validation.spec.ts @@ -1,12 +1,129 @@ import { validate } from "../utils" import fetch from "node-fetch" import { PluginType } from "@budibase/types" +import nock from "nock" -const repoUrl = - "https://raw.githubusercontent.com/Budibase/budibase-skeleton/master" -const automationLink = `${repoUrl}/automation/schema.json.hbs` -const componentLink = `${repoUrl}/component/schema.json.hbs` -const datasourceLink = `${repoUrl}/datasource/schema.json.hbs` +const automationLink = `http://example.com/automation/schema.json` +const componentLink = `http://example.com/component/schema.json` +const datasourceLink = `http://example.com/datasource/schema.json` + +function mockDatasourceSchema() { + nock("http://example.com") + .get("/datasource/schema.json") + .reply(200, { + type: "datasource", + metadata: {}, + schema: { + docs: "https://docs.budibase.com", + friendlyName: "Basic HTTP", + type: "API", + description: "Performs a basic HTTP calls to a URL", + datasource: { + url: { + type: "string", + required: true, + }, + cookie: { + type: "string", + required: false, + }, + }, + query: { + create: { + type: "json", + }, + read: { + type: "fields", + fields: { + queryString: { + display: "Query string", + type: "string", + required: false, + }, + }, + }, + update: { + type: "json", + }, + delete: { + type: "fields", + fields: { + id: { + type: "string", + required: true, + }, + }, + }, + }, + }, + }) +} + +function mockAutomationSchema() { + nock("http://example.com") + .get("/automation/schema.json") + .reply(200, { + type: "automation", + metadata: {}, + schema: { + name: "{{ name }}", + tagline: "{{ description }}", + icon: "Actions", + description: "{{ description }}", + type: "action", + stepId: "{{ name }}", + inputs: { + text: "", + }, + schema: { + inputs: { + properties: { + text: { + type: "string", + title: "Log", + }, + }, + required: ["text"], + }, + outputs: { + properties: { + success: { + type: "boolean", + description: "Whether the action was successful", + }, + message: { + type: "string", + description: "What was output", + }, + }, + required: ["success", "message"], + }, + }, + }, + }) +} + +function mockComponentSchema() { + nock("http://example.com") + .get("/component/schema.json") + .reply(200, { + type: "component", + metadata: {}, + schema: { + name: "{{ name }}", + friendlyName: "{{ name }}", + description: "{{ description }}", + icon: "Text", + settings: [ + { + type: "text", + key: "text", + label: "Text", + }, + ], + }, + }) +} async function getSchema(link: string) { const response = await fetch(link) @@ -31,53 +148,62 @@ async function runTest(opts: { link?: string; schema?: any }) { return error } -describe("it should be able to validate an automation schema", () => { - it("should return automation skeleton schema is valid", async () => { - const error = await runTest({ link: automationLink }) - expect(error).toBeUndefined() +describe("plugin validation", () => { + beforeEach(() => { + nock.cleanAll() + mockAutomationSchema() + mockComponentSchema() + mockDatasourceSchema() }) - it("should fail given invalid automation schema", async () => { - const error = await runTest({ - schema: { - type: PluginType.AUTOMATION, - schema: {}, - }, + describe("it should be able to validate an automation schema", () => { + it("should return automation skeleton schema is valid", async () => { + const error = await runTest({ link: automationLink }) + expect(error).toBeUndefined() + }) + + it("should fail given invalid automation schema", async () => { + const error = await runTest({ + schema: { + type: PluginType.AUTOMATION, + schema: {}, + }, + }) + expect(error).toBeDefined() + }) + }) + + describe("it should be able to validate a component schema", () => { + it("should return component skeleton schema is valid", async () => { + const error = await runTest({ link: componentLink }) + expect(error).toBeUndefined() + }) + + it("should fail given invalid component schema", async () => { + const error = await runTest({ + schema: { + type: PluginType.COMPONENT, + schema: {}, + }, + }) + expect(error).toBeDefined() + }) + }) + + describe("it should be able to validate a datasource schema", () => { + it("should return datasource skeleton schema is valid", async () => { + const error = await runTest({ link: datasourceLink }) + expect(error).toBeUndefined() + }) + + it("should fail given invalid datasource schema", async () => { + const error = await runTest({ + schema: { + type: PluginType.DATASOURCE, + schema: {}, + }, + }) + expect(error).toBeDefined() }) - expect(error).toBeDefined() - }) -}) - -describe("it should be able to validate a component schema", () => { - it("should return component skeleton schema is valid", async () => { - const error = await runTest({ link: componentLink }) - expect(error).toBeUndefined() - }) - - it("should fail given invalid component schema", async () => { - const error = await runTest({ - schema: { - type: PluginType.COMPONENT, - schema: {}, - }, - }) - expect(error).toBeDefined() - }) -}) - -describe("it should be able to validate a datasource schema", () => { - it("should return datasource skeleton schema is valid", async () => { - const error = await runTest({ link: datasourceLink }) - expect(error).toBeUndefined() - }) - - it("should fail given invalid datasource schema", async () => { - const error = await runTest({ - schema: { - type: PluginType.DATASOURCE, - schema: {}, - }, - }) - expect(error).toBeDefined() }) }) diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index 79f75421d3..1271086fc5 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -111,6 +111,10 @@ function init(selectDb = DEFAULT_SELECT_DB) { CLIENTS[selectDb] = client } +export function closeAll() { + Object.values(CLIENTS).forEach(client => client.disconnect()) +} + function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) { return new Promise(resolve => { if (pickClient(selectDb) == null) { diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts index 9160c6a6dd..376afbfab7 100644 --- a/packages/backend-core/src/redis/tests/redis.spec.ts +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -1,6 +1,6 @@ import { GenericContainer, StartedTestContainer } from "testcontainers" import { generator, structures } from "../../../tests" -import RedisWrapper from "../redis" +import RedisWrapper, { closeAll } from "../redis" import { env } from "../.." import { randomUUID } from "crypto" @@ -23,7 +23,10 @@ describe("redis", () => { env._set("REDIS_PASSWORD", 0) }) - afterAll(() => container?.stop()) + afterAll(() => { + container?.stop() + closeAll() + }) beforeEach(async () => { redis = new RedisWrapper(structures.db.id()) diff --git a/packages/backend-core/tests/core/utilities/mocks/fetch.ts b/packages/backend-core/tests/core/utilities/mocks/fetch.ts deleted file mode 100644 index f7447d2c47..0000000000 --- a/packages/backend-core/tests/core/utilities/mocks/fetch.ts +++ /dev/null @@ -1,17 +0,0 @@ -const mockFetch = jest.fn((url: any, opts: any) => { - const fetch = jest.requireActual("node-fetch") - const env = jest.requireActual("../../../../src/environment").default - if (url.includes(env.COUCH_DB_URL) || url.includes("raw.github")) { - return fetch(url, opts) - } - return undefined -}) - -const enable = () => { - jest.mock("node-fetch", () => mockFetch) -} - -export default { - ...mockFetch, - enable, -} diff --git a/packages/backend-core/tests/core/utilities/mocks/index.ts b/packages/backend-core/tests/core/utilities/mocks/index.ts index 8705e563cb..ef7304d64e 100644 --- a/packages/backend-core/tests/core/utilities/mocks/index.ts +++ b/packages/backend-core/tests/core/utilities/mocks/index.ts @@ -5,7 +5,5 @@ export const accounts = jest.mocked(_accounts) export * as date from "./date" export * as licenses from "./licenses" -export { default as fetch } from "./fetch" export * from "./alerts" import "./events" -import "./posthog" diff --git a/packages/backend-core/tests/core/utilities/mocks/posthog.ts b/packages/backend-core/tests/core/utilities/mocks/posthog.ts deleted file mode 100644 index e9cc653ccc..0000000000 --- a/packages/backend-core/tests/core/utilities/mocks/posthog.ts +++ /dev/null @@ -1,7 +0,0 @@ -jest.mock("posthog-node", () => { - return jest.fn().mockImplementation(() => { - return { - capture: jest.fn(), - } - }) -}) diff --git a/packages/backend-core/tests/jestSetup.ts b/packages/backend-core/tests/jestSetup.ts index e5d144290b..e7f2a6cc98 100644 --- a/packages/backend-core/tests/jestSetup.ts +++ b/packages/backend-core/tests/jestSetup.ts @@ -2,14 +2,21 @@ import "./core/logging" import env from "../src/environment" import { cleanup } from "../src/timers" import { mocks, testContainerUtils } from "./core/utilities" - -// must explicitly enable fetch mock -mocks.fetch.enable() +import nock from "nock" // mock all dates to 2020-01-01T00:00:00.000Z // use tk.reset() to use real dates in individual tests import tk from "timekeeper" +nock.disableNetConnect() +nock.enableNetConnect(host => { + return ( + host.includes("localhost") || + host.includes("127.0.0.1") || + host.includes("::1") + ) +}) + tk.freeze(mocks.date.MOCK_DATE) if (!process.env.DEBUG) { diff --git a/packages/server/src/startup/index.ts b/packages/server/src/startup/index.ts index 5bb1f9aa0f..53c4f884cc 100644 --- a/packages/server/src/startup/index.ts +++ b/packages/server/src/startup/index.ts @@ -9,6 +9,7 @@ import { users, cache, env as coreEnv, + features, } from "@budibase/backend-core" import { watch } from "../watch" import * as automations from "../automations" @@ -96,6 +97,9 @@ export async function startup( console.log("Initialising events") eventInit() + console.log("Initialising feature flags") + features.init() + if (app && server) { console.log("Initialising websockets") initialiseWebsockets(app, server) diff --git a/packages/worker/src/index.ts b/packages/worker/src/index.ts index 85e5d6ad2e..d59d8d96ef 100644 --- a/packages/worker/src/index.ts +++ b/packages/worker/src/index.ts @@ -18,6 +18,7 @@ import { timers, redis, cache, + features, } from "@budibase/backend-core" db.init() @@ -99,6 +100,7 @@ export default server.listen(parseInt(env.PORT || "4002"), async () => { // configure events to use the pro audit log write // can't integrate directly into backend-core due to cyclic issues await events.processors.init(proSdk.auditLogs.write) + features.init() }) process.on("uncaughtException", err => { diff --git a/yarn.lock b/yarn.lock index 0195f19a2a..123eec3dd9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2073,7 +2073,7 @@ passport-oauth2-refresh "^2.1.0" pino "8.11.0" pino-http "8.3.3" - posthog-node "1.3.0" + posthog-node "4.0.1" pouchdb "7.3.0" pouchdb-find "7.2.2" redlock "4.2.0" @@ -7343,7 +7343,7 @@ axios-retry@^3.1.9: "@babel/runtime" "^7.15.4" is-retry-allowed "^2.2.0" -axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0: +axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0, axios@^1.6.2: version "1.6.3" resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.3.tgz#7f50f23b3aa246eff43c54834272346c396613f4" integrity sha512-fWyNdeawGam70jXSVlKl+SUNVcL6j6W79CuSIPfi6HnDUmSCH6gyUys/HrqHeA/wU0Az41rRgean494d0Jb+ww== @@ -18110,6 +18110,14 @@ posthog-node@1.3.0: remove-trailing-slash "^0.1.1" uuid "^8.3.2" +posthog-node@4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/posthog-node/-/posthog-node-4.0.1.tgz#eb8b6cdf68c3fdd0dc2b75e8aab2e0ec3727fb2a" + integrity sha512-rtqm2h22QxLGBrW2bLYzbRhliIrqgZ0k+gF0LkQ1SNdeD06YE5eilV0MxZppFSxC8TfH0+B0cWCuebEnreIDgQ== + dependencies: + axios "^1.6.2" + rusha "^0.8.14" + pouch-stream@^0.4.0: version "0.4.1" resolved "https://registry.yarnpkg.com/pouch-stream/-/pouch-stream-0.4.1.tgz#0c6d8475c9307677627991a2f079b301c3b89bdd" @@ -19574,6 +19582,11 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" +rusha@^0.8.14: + version "0.8.14" + resolved "https://registry.yarnpkg.com/rusha/-/rusha-0.8.14.tgz#a977d0de9428406138b7bb90d3de5dcd024e2f68" + integrity sha512-cLgakCUf6PedEu15t8kbsjnwIFFR2D4RfL+W3iWFJ4iac7z4B0ZI8fxy4R3J956kAI68HclCFGL8MPoUVC3qVA== + rxjs@^6.6.6: version "6.6.7" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9"