Merge branch 'master' into Fix-user-access-roles-from-displaying-business
This commit is contained in:
commit
8cbb003d44
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||||
"version": "2.30.2",
|
"version": "2.30.3",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*",
|
"packages/*",
|
||||||
|
|
|
@ -45,7 +45,7 @@
|
||||||
"passport-oauth2-refresh": "^2.1.0",
|
"passport-oauth2-refresh": "^2.1.0",
|
||||||
"pino": "8.11.0",
|
"pino": "8.11.0",
|
||||||
"pino-http": "8.3.3",
|
"pino-http": "8.3.3",
|
||||||
"posthog-node": "1.3.0",
|
"posthog-node": "4.0.1",
|
||||||
"pouchdb": "7.3.0",
|
"pouchdb": "7.3.0",
|
||||||
"pouchdb-find": "7.2.2",
|
"pouchdb-find": "7.2.2",
|
||||||
"redlock": "4.2.0",
|
"redlock": "4.2.0",
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import { existsSync, readFileSync } from "fs"
|
import { existsSync, readFileSync } from "fs"
|
||||||
import { ServiceType } from "@budibase/types"
|
import { ServiceType } from "@budibase/types"
|
||||||
|
import { cloneDeep } from "lodash"
|
||||||
|
|
||||||
function isTest() {
|
function isTest() {
|
||||||
return isJest()
|
return isJest()
|
||||||
|
@ -144,6 +145,8 @@ const environment = {
|
||||||
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
|
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
|
||||||
PLATFORM_URL: process.env.PLATFORM_URL || "",
|
PLATFORM_URL: process.env.PLATFORM_URL || "",
|
||||||
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
|
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
|
||||||
|
POSTHOG_PERSONAL_TOKEN: process.env.POSTHOG_PERSONAL_TOKEN,
|
||||||
|
POSTHOG_API_HOST: process.env.POSTHOG_API_HOST || "https://us.i.posthog.com",
|
||||||
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
||||||
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
|
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
|
||||||
CLOUDFRONT_CDN: process.env.CLOUDFRONT_CDN,
|
CLOUDFRONT_CDN: process.env.CLOUDFRONT_CDN,
|
||||||
|
@ -208,6 +211,32 @@ const environment = {
|
||||||
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function setEnv(newEnvVars: Partial<typeof environment>): () => void {
|
||||||
|
const oldEnv = cloneDeep(environment)
|
||||||
|
|
||||||
|
let key: keyof typeof newEnvVars
|
||||||
|
for (key in newEnvVars) {
|
||||||
|
environment._set(key, newEnvVars[key])
|
||||||
|
}
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
for (const [key, value] of Object.entries(oldEnv)) {
|
||||||
|
environment._set(key, value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function withEnv<T>(envVars: Partial<typeof environment>, f: () => T) {
|
||||||
|
const cleanup = setEnv(envVars)
|
||||||
|
const result = f()
|
||||||
|
if (result instanceof Promise) {
|
||||||
|
return result.finally(cleanup)
|
||||||
|
} else {
|
||||||
|
cleanup()
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type EnvironmentKey = keyof typeof environment
|
type EnvironmentKey = keyof typeof environment
|
||||||
export const SECRETS: EnvironmentKey[] = [
|
export const SECRETS: EnvironmentKey[] = [
|
||||||
"API_ENCRYPTION_KEY",
|
"API_ENCRYPTION_KEY",
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import PostHog from "posthog-node"
|
import { PostHog } from "posthog-node"
|
||||||
import { Event, Identity, Group, BaseEvent } from "@budibase/types"
|
import { Event, Identity, Group, BaseEvent } from "@budibase/types"
|
||||||
import { EventProcessor } from "../types"
|
import { EventProcessor } from "../types"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
import { testEnv } from "../../../../../tests/extra"
|
import { testEnv } from "../../../../../tests/extra"
|
||||||
import PosthogProcessor from "../PosthogProcessor"
|
import PosthogProcessor from "../PosthogProcessor"
|
||||||
import { Event, IdentityType, Hosting } from "@budibase/types"
|
import { Event, IdentityType, Hosting } from "@budibase/types"
|
||||||
|
import tk from "timekeeper"
|
||||||
const tk = require("timekeeper")
|
|
||||||
|
|
||||||
import * as cache from "../../../../cache/generic"
|
import * as cache from "../../../../cache/generic"
|
||||||
import { CacheKey } from "../../../../cache/generic"
|
import { CacheKey } from "../../../../cache/generic"
|
||||||
import * as context from "../../../../context"
|
import * as context from "../../../../context"
|
||||||
|
@ -18,6 +16,9 @@ const newIdentity = () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("PosthogProcessor", () => {
|
describe("PosthogProcessor", () => {
|
||||||
|
let processor: PosthogProcessor
|
||||||
|
let spy: jest.SpyInstance
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
testEnv.singleTenant()
|
testEnv.singleTenant()
|
||||||
})
|
})
|
||||||
|
@ -27,33 +28,29 @@ describe("PosthogProcessor", () => {
|
||||||
await cache.bustCache(
|
await cache.bustCache(
|
||||||
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
`${CacheKey.EVENTS_RATE_LIMIT}:${Event.SERVED_BUILDER}`
|
||||||
)
|
)
|
||||||
|
|
||||||
|
processor = new PosthogProcessor("test")
|
||||||
|
spy = jest.spyOn(processor.posthog, "capture")
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("processEvent", () => {
|
describe("processEvent", () => {
|
||||||
it("processes event", async () => {
|
it("processes event", async () => {
|
||||||
const processor = new PosthogProcessor("test")
|
|
||||||
|
|
||||||
const identity = newIdentity()
|
const identity = newIdentity()
|
||||||
const properties = {}
|
const properties = {}
|
||||||
|
|
||||||
await processor.processEvent(Event.APP_CREATED, identity, properties)
|
await processor.processEvent(Event.APP_CREATED, identity, properties)
|
||||||
|
expect(spy).toHaveBeenCalledTimes(1)
|
||||||
expect(processor.posthog.capture).toHaveBeenCalledTimes(1)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("honours exclusions", async () => {
|
it("honours exclusions", async () => {
|
||||||
const processor = new PosthogProcessor("test")
|
|
||||||
|
|
||||||
const identity = newIdentity()
|
const identity = newIdentity()
|
||||||
const properties = {}
|
const properties = {}
|
||||||
|
|
||||||
await processor.processEvent(Event.AUTH_SSO_UPDATED, identity, properties)
|
await processor.processEvent(Event.AUTH_SSO_UPDATED, identity, properties)
|
||||||
expect(processor.posthog.capture).toHaveBeenCalledTimes(0)
|
expect(spy).toHaveBeenCalledTimes(0)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("removes audited information", async () => {
|
it("removes audited information", async () => {
|
||||||
const processor = new PosthogProcessor("test")
|
|
||||||
|
|
||||||
const identity = newIdentity()
|
const identity = newIdentity()
|
||||||
const properties = {
|
const properties = {
|
||||||
email: "test",
|
email: "test",
|
||||||
|
@ -63,7 +60,8 @@ describe("PosthogProcessor", () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
await processor.processEvent(Event.USER_CREATED, identity, properties)
|
await processor.processEvent(Event.USER_CREATED, identity, properties)
|
||||||
expect(processor.posthog.capture).toHaveBeenCalled()
|
expect(spy).toHaveBeenCalled()
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
const call = processor.posthog.capture.mock.calls[0][0]
|
const call = processor.posthog.capture.mock.calls[0][0]
|
||||||
expect(call.properties.audited).toBeUndefined()
|
expect(call.properties.audited).toBeUndefined()
|
||||||
|
@ -72,7 +70,6 @@ describe("PosthogProcessor", () => {
|
||||||
|
|
||||||
describe("rate limiting", () => {
|
describe("rate limiting", () => {
|
||||||
it("sends daily event once in same day", async () => {
|
it("sends daily event once in same day", async () => {
|
||||||
const processor = new PosthogProcessor("test")
|
|
||||||
const identity = newIdentity()
|
const identity = newIdentity()
|
||||||
const properties = {}
|
const properties = {}
|
||||||
|
|
||||||
|
@ -82,11 +79,10 @@ describe("PosthogProcessor", () => {
|
||||||
tk.freeze(new Date(2022, 0, 1, 15, 0))
|
tk.freeze(new Date(2022, 0, 1, 15, 0))
|
||||||
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
||||||
|
|
||||||
expect(processor.posthog.capture).toHaveBeenCalledTimes(1)
|
expect(spy).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("sends daily event once per unique day", async () => {
|
it("sends daily event once per unique day", async () => {
|
||||||
const processor = new PosthogProcessor("test")
|
|
||||||
const identity = newIdentity()
|
const identity = newIdentity()
|
||||||
const properties = {}
|
const properties = {}
|
||||||
|
|
||||||
|
@ -102,11 +98,10 @@ describe("PosthogProcessor", () => {
|
||||||
tk.freeze(new Date(2022, 0, 3, 6, 0))
|
tk.freeze(new Date(2022, 0, 3, 6, 0))
|
||||||
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
||||||
|
|
||||||
expect(processor.posthog.capture).toHaveBeenCalledTimes(3)
|
expect(spy).toHaveBeenCalledTimes(3)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("sends event again after cache expires", async () => {
|
it("sends event again after cache expires", async () => {
|
||||||
const processor = new PosthogProcessor("test")
|
|
||||||
const identity = newIdentity()
|
const identity = newIdentity()
|
||||||
const properties = {}
|
const properties = {}
|
||||||
|
|
||||||
|
@ -120,11 +115,10 @@ describe("PosthogProcessor", () => {
|
||||||
tk.freeze(new Date(2022, 0, 1, 14, 0))
|
tk.freeze(new Date(2022, 0, 1, 14, 0))
|
||||||
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
await processor.processEvent(Event.SERVED_BUILDER, identity, properties)
|
||||||
|
|
||||||
expect(processor.posthog.capture).toHaveBeenCalledTimes(2)
|
expect(spy).toHaveBeenCalledTimes(2)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("sends per app events once per day per app", async () => {
|
it("sends per app events once per day per app", async () => {
|
||||||
const processor = new PosthogProcessor("test")
|
|
||||||
const identity = newIdentity()
|
const identity = newIdentity()
|
||||||
const properties = {}
|
const properties = {}
|
||||||
|
|
||||||
|
@ -160,10 +154,10 @@ describe("PosthogProcessor", () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
await runAppEvents("app_1")
|
await runAppEvents("app_1")
|
||||||
expect(processor.posthog.capture).toHaveBeenCalledTimes(4)
|
expect(spy).toHaveBeenCalledTimes(4)
|
||||||
|
|
||||||
await runAppEvents("app_2")
|
await runAppEvents("app_2")
|
||||||
expect(processor.posthog.capture).toHaveBeenCalledTimes(8)
|
expect(spy).toHaveBeenCalledTimes(8)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,108 +1,258 @@
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import * as context from "../context"
|
import * as context from "../context"
|
||||||
import { cloneDeep } from "lodash"
|
import { PostHog, PostHogOptions } from "posthog-node"
|
||||||
|
import { IdentityType, UserCtx } from "@budibase/types"
|
||||||
|
import tracer from "dd-trace"
|
||||||
|
|
||||||
class Flag<T> {
|
let posthog: PostHog | undefined
|
||||||
static withDefault<T>(value: T) {
|
export function init(opts?: PostHogOptions) {
|
||||||
return new Flag(value)
|
if (env.POSTHOG_TOKEN && env.POSTHOG_API_HOST) {
|
||||||
|
console.log("initializing posthog client...")
|
||||||
|
posthog = new PostHog(env.POSTHOG_TOKEN, {
|
||||||
|
host: env.POSTHOG_API_HOST,
|
||||||
|
personalApiKey: env.POSTHOG_PERSONAL_TOKEN,
|
||||||
|
...opts,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
console.log("posthog disabled")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export abstract class Flag<T> {
|
||||||
|
static boolean(defaultValue: boolean): Flag<boolean> {
|
||||||
|
return new BooleanFlag(defaultValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
private constructor(public defaultValue: T) {}
|
static string(defaultValue: string): Flag<string> {
|
||||||
|
return new StringFlag(defaultValue)
|
||||||
|
}
|
||||||
|
|
||||||
|
static number(defaultValue: number): Flag<number> {
|
||||||
|
return new NumberFlag(defaultValue)
|
||||||
|
}
|
||||||
|
|
||||||
|
protected constructor(public defaultValue: T) {}
|
||||||
|
|
||||||
|
abstract parse(value: any): T
|
||||||
|
}
|
||||||
|
|
||||||
|
type UnwrapFlag<F> = F extends Flag<infer U> ? U : never
|
||||||
|
|
||||||
|
export type FlagValues<T> = {
|
||||||
|
[K in keyof T]: UnwrapFlag<T[K]>
|
||||||
|
}
|
||||||
|
|
||||||
|
type KeysOfType<T, U> = {
|
||||||
|
[K in keyof T]: T[K] extends Flag<U> ? K : never
|
||||||
|
}[keyof T]
|
||||||
|
|
||||||
|
class BooleanFlag extends Flag<boolean> {
|
||||||
|
parse(value: any) {
|
||||||
|
if (typeof value === "string") {
|
||||||
|
return ["true", "t", "1"].includes(value.toLowerCase())
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "boolean") {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`could not parse value "${value}" as boolean`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class StringFlag extends Flag<string> {
|
||||||
|
parse(value: any) {
|
||||||
|
if (typeof value === "string") {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
throw new Error(`could not parse value "${value}" as string`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class NumberFlag extends Flag<number> {
|
||||||
|
parse(value: any) {
|
||||||
|
if (typeof value === "number") {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "string") {
|
||||||
|
const parsed = parseFloat(value)
|
||||||
|
if (!isNaN(parsed)) {
|
||||||
|
return parsed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`could not parse value "${value}" as number`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
||||||
|
constructor(private readonly flagSchema: T) {}
|
||||||
|
|
||||||
|
defaults(): FlagValues<T> {
|
||||||
|
return Object.keys(this.flagSchema).reduce((acc, key) => {
|
||||||
|
const typedKey = key as keyof T
|
||||||
|
acc[typedKey] = this.flagSchema[key].defaultValue
|
||||||
|
return acc
|
||||||
|
}, {} as FlagValues<T>)
|
||||||
|
}
|
||||||
|
|
||||||
|
isFlagName(name: string | number | symbol): name is keyof T {
|
||||||
|
return this.flagSchema[name as keyof T] !== undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
async get<K extends keyof T>(
|
||||||
|
key: K,
|
||||||
|
ctx?: UserCtx
|
||||||
|
): Promise<FlagValues<T>[K]> {
|
||||||
|
const flags = await this.fetch(ctx)
|
||||||
|
return flags[key]
|
||||||
|
}
|
||||||
|
|
||||||
|
async isEnabled<K extends KeysOfType<T, boolean>>(
|
||||||
|
key: K,
|
||||||
|
ctx?: UserCtx
|
||||||
|
): Promise<boolean> {
|
||||||
|
const flags = await this.fetch(ctx)
|
||||||
|
return flags[key]
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetch(ctx?: UserCtx): Promise<FlagValues<T>> {
|
||||||
|
return await tracer.trace("features.fetch", async span => {
|
||||||
|
const tags: Record<string, any> = {}
|
||||||
|
const flagValues = this.defaults()
|
||||||
|
const currentTenantId = context.getTenantId()
|
||||||
|
const specificallySetFalse = new Set<string>()
|
||||||
|
|
||||||
|
const split = (env.TENANT_FEATURE_FLAGS || "")
|
||||||
|
.split(",")
|
||||||
|
.map(x => x.split(":"))
|
||||||
|
for (const [tenantId, ...features] of split) {
|
||||||
|
if (!tenantId || (tenantId !== "*" && tenantId !== currentTenantId)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
tags[`readFromEnvironmentVars`] = true
|
||||||
|
|
||||||
|
for (let feature of features) {
|
||||||
|
let value = true
|
||||||
|
if (feature.startsWith("!")) {
|
||||||
|
feature = feature.slice(1)
|
||||||
|
value = false
|
||||||
|
specificallySetFalse.add(feature)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.isFlagName(feature)) {
|
||||||
|
throw new Error(`Feature: ${feature} is not an allowed option`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof flagValues[feature] !== "boolean") {
|
||||||
|
throw new Error(`Feature: ${feature} is not a boolean`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// @ts-expect-error - TS does not like you writing into a generic type,
|
||||||
|
// but we know that it's okay in this case because it's just an object.
|
||||||
|
flagValues[feature] = value
|
||||||
|
tags[`flags.${feature}.source`] = "environment"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const license = ctx?.user?.license
|
||||||
|
if (license) {
|
||||||
|
tags[`readFromLicense`] = true
|
||||||
|
|
||||||
|
for (const feature of license.features) {
|
||||||
|
if (!this.isFlagName(feature)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
flagValues[feature] === true ||
|
||||||
|
specificallySetFalse.has(feature)
|
||||||
|
) {
|
||||||
|
// If the flag is already set to through environment variables, we
|
||||||
|
// don't want to override it back to false here.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// @ts-expect-error - TS does not like you writing into a generic type,
|
||||||
|
// but we know that it's okay in this case because it's just an object.
|
||||||
|
flagValues[feature] = true
|
||||||
|
tags[`flags.${feature}.source`] = "license"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const identity = context.getIdentity()
|
||||||
|
tags[`identity.type`] = identity?.type
|
||||||
|
tags[`identity.tenantId`] = identity?.tenantId
|
||||||
|
tags[`identity._id`] = identity?._id
|
||||||
|
|
||||||
|
// Until we're confident this performs well, we're only enabling it in QA
|
||||||
|
// and test environments.
|
||||||
|
const usePosthog = env.isTest() || env.isQA()
|
||||||
|
if (usePosthog && posthog && identity?.type === IdentityType.USER) {
|
||||||
|
tags[`readFromPostHog`] = true
|
||||||
|
|
||||||
|
const personProperties: Record<string, string> = {}
|
||||||
|
if (identity.tenantId) {
|
||||||
|
personProperties.tenantId = identity.tenantId
|
||||||
|
}
|
||||||
|
|
||||||
|
const posthogFlags = await posthog.getAllFlagsAndPayloads(
|
||||||
|
identity._id,
|
||||||
|
{
|
||||||
|
personProperties,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
console.log("posthog flags", JSON.stringify(posthogFlags))
|
||||||
|
|
||||||
|
for (const [name, value] of Object.entries(posthogFlags.featureFlags)) {
|
||||||
|
if (!this.isFlagName(name)) {
|
||||||
|
// We don't want an unexpected PostHog flag to break the app, so we
|
||||||
|
// just log it and continue.
|
||||||
|
console.warn(`Unexpected posthog flag "${name}": ${value}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (flagValues[name] === true || specificallySetFalse.has(name)) {
|
||||||
|
// If the flag is already set to through environment variables, we
|
||||||
|
// don't want to override it back to false here.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = posthogFlags.featureFlagPayloads?.[name]
|
||||||
|
const flag = this.flagSchema[name]
|
||||||
|
try {
|
||||||
|
// @ts-expect-error - TS does not like you writing into a generic
|
||||||
|
// type, but we know that it's okay in this case because it's just
|
||||||
|
// an object.
|
||||||
|
flagValues[name] = flag.parse(payload || value)
|
||||||
|
tags[`flags.${name}.source`] = "posthog"
|
||||||
|
} catch (err) {
|
||||||
|
// We don't want an invalid PostHog flag to break the app, so we just
|
||||||
|
// log it and continue.
|
||||||
|
console.warn(`Error parsing posthog flag "${name}": ${value}`, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(flagValues)) {
|
||||||
|
tags[`flags.${key}.value`] = value
|
||||||
|
}
|
||||||
|
span?.addTags(tags)
|
||||||
|
|
||||||
|
return flagValues
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is the primary source of truth for feature flags. If you want to add a
|
// This is the primary source of truth for feature flags. If you want to add a
|
||||||
// new flag, add it here and use the `fetch` and `get` functions to access it.
|
// new flag, add it here and use the `fetch` and `get` functions to access it.
|
||||||
// All of the machinery in this file is to make sure that flags have their
|
// All of the machinery in this file is to make sure that flags have their
|
||||||
// default values set correctly and their types flow through the system.
|
// default values set correctly and their types flow through the system.
|
||||||
const FLAGS = {
|
export const flags = new FlagSet({
|
||||||
LICENSING: Flag.withDefault(false),
|
LICENSING: Flag.boolean(false),
|
||||||
GOOGLE_SHEETS: Flag.withDefault(false),
|
GOOGLE_SHEETS: Flag.boolean(false),
|
||||||
USER_GROUPS: Flag.withDefault(false),
|
USER_GROUPS: Flag.boolean(false),
|
||||||
ONBOARDING_TOUR: Flag.withDefault(false),
|
ONBOARDING_TOUR: Flag.boolean(false),
|
||||||
}
|
})
|
||||||
|
|
||||||
const DEFAULTS = Object.keys(FLAGS).reduce((acc, key) => {
|
|
||||||
const typedKey = key as keyof typeof FLAGS
|
|
||||||
// @ts-ignore
|
|
||||||
acc[typedKey] = FLAGS[typedKey].defaultValue
|
|
||||||
return acc
|
|
||||||
}, {} as Flags)
|
|
||||||
|
|
||||||
type UnwrapFlag<F> = F extends Flag<infer U> ? U : never
|
|
||||||
export type Flags = {
|
|
||||||
[K in keyof typeof FLAGS]: UnwrapFlag<(typeof FLAGS)[K]>
|
|
||||||
}
|
|
||||||
|
|
||||||
// Exported for use in tests, should not be used outside of this file.
|
|
||||||
export function defaultFlags(): Flags {
|
|
||||||
return cloneDeep(DEFAULTS)
|
|
||||||
}
|
|
||||||
|
|
||||||
function isFlagName(name: string): name is keyof Flags {
|
|
||||||
return FLAGS[name as keyof typeof FLAGS] !== undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Reads the TENANT_FEATURE_FLAGS environment variable and returns a Flags object
|
|
||||||
* populated with the flags for the current tenant, filling in the default values
|
|
||||||
* if the flag is not set.
|
|
||||||
*
|
|
||||||
* Check the tests for examples of how TENANT_FEATURE_FLAGS should be formatted.
|
|
||||||
*
|
|
||||||
* In future we plan to add more ways of setting feature flags, e.g. PostHog, and
|
|
||||||
* they will be accessed through this function as well.
|
|
||||||
*/
|
|
||||||
export async function fetch(): Promise<Flags> {
|
|
||||||
const currentTenantId = context.getTenantId()
|
|
||||||
const flags = defaultFlags()
|
|
||||||
|
|
||||||
const split = (env.TENANT_FEATURE_FLAGS || "")
|
|
||||||
.split(",")
|
|
||||||
.map(x => x.split(":"))
|
|
||||||
for (const [tenantId, ...features] of split) {
|
|
||||||
if (!tenantId || (tenantId !== "*" && tenantId !== currentTenantId)) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let feature of features) {
|
|
||||||
let value = true
|
|
||||||
if (feature.startsWith("!")) {
|
|
||||||
feature = feature.slice(1)
|
|
||||||
value = false
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isFlagName(feature)) {
|
|
||||||
throw new Error(`Feature: ${feature} is not an allowed option`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof flags[feature] !== "boolean") {
|
|
||||||
throw new Error(`Feature: ${feature} is not a boolean`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// @ts-ignore
|
|
||||||
flags[feature] = value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return flags
|
|
||||||
}
|
|
||||||
|
|
||||||
// Gets a single feature flag value. This is a convenience function for
|
|
||||||
// `fetch().then(flags => flags[name])`.
|
|
||||||
export async function get<K extends keyof Flags>(name: K): Promise<Flags[K]> {
|
|
||||||
const flags = await fetch()
|
|
||||||
return flags[name]
|
|
||||||
}
|
|
||||||
|
|
||||||
type BooleanFlags = {
|
|
||||||
[K in keyof typeof FLAGS]: (typeof FLAGS)[K] extends Flag<boolean> ? K : never
|
|
||||||
}[keyof typeof FLAGS]
|
|
||||||
|
|
||||||
// Convenience function for boolean flag values. This makes callsites more
|
|
||||||
// readable for boolean flags.
|
|
||||||
export async function isEnabled<K extends BooleanFlags>(
|
|
||||||
name: K
|
|
||||||
): Promise<boolean> {
|
|
||||||
const flags = await fetch()
|
|
||||||
return flags[name]
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,86 +1,230 @@
|
||||||
import { defaultFlags, fetch, get, Flags } from "../"
|
import { IdentityContext, IdentityType, UserCtx } from "@budibase/types"
|
||||||
|
import { Flag, FlagSet, FlagValues, init } from "../"
|
||||||
import { context } from "../.."
|
import { context } from "../.."
|
||||||
import env from "../../environment"
|
import environment, { withEnv } from "../../environment"
|
||||||
|
import nodeFetch from "node-fetch"
|
||||||
|
import nock from "nock"
|
||||||
|
|
||||||
async function withFlags<T>(flags: string, f: () => T): Promise<T> {
|
const schema = {
|
||||||
const oldFlags = env.TENANT_FEATURE_FLAGS
|
TEST_BOOLEAN: Flag.boolean(false),
|
||||||
env._set("TENANT_FEATURE_FLAGS", flags)
|
TEST_STRING: Flag.string("default value"),
|
||||||
try {
|
TEST_NUMBER: Flag.number(0),
|
||||||
return await f()
|
}
|
||||||
} finally {
|
const flags = new FlagSet(schema)
|
||||||
env._set("TENANT_FEATURE_FLAGS", oldFlags)
|
|
||||||
}
|
interface TestCase {
|
||||||
|
it: string
|
||||||
|
identity?: Partial<IdentityContext>
|
||||||
|
environmentFlags?: string
|
||||||
|
posthogFlags?: PostHogFlags
|
||||||
|
licenseFlags?: Array<string>
|
||||||
|
expected?: Partial<FlagValues<typeof schema>>
|
||||||
|
errorMessage?: string | RegExp
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PostHogFlags {
|
||||||
|
featureFlags?: Record<string, boolean>
|
||||||
|
featureFlagPayloads?: Record<string, string>
|
||||||
|
}
|
||||||
|
|
||||||
|
function mockPosthogFlags(flags: PostHogFlags) {
|
||||||
|
nock("https://us.i.posthog.com")
|
||||||
|
.post("/decide/?v=3", body => {
|
||||||
|
return body.token === "test" && body.distinct_id === "us_1234"
|
||||||
|
})
|
||||||
|
.reply(200, flags)
|
||||||
|
.persist()
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("feature flags", () => {
|
describe("feature flags", () => {
|
||||||
interface TestCase {
|
beforeEach(() => {
|
||||||
tenant: string
|
nock.cleanAll()
|
||||||
flags: string
|
})
|
||||||
expected: Partial<Flags>
|
|
||||||
}
|
|
||||||
|
|
||||||
it.each<TestCase>([
|
it.each<TestCase>([
|
||||||
{
|
{
|
||||||
tenant: "tenant1",
|
it: "should should find a simple boolean flag in the environment",
|
||||||
flags: "tenant1:ONBOARDING_TOUR",
|
environmentFlags: "default:TEST_BOOLEAN",
|
||||||
expected: { ONBOARDING_TOUR: true },
|
expected: { TEST_BOOLEAN: true },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tenant: "tenant1",
|
it: "should should find a simple netgative boolean flag in the environment",
|
||||||
flags: "tenant1:!ONBOARDING_TOUR",
|
environmentFlags: "default:!TEST_BOOLEAN",
|
||||||
expected: { ONBOARDING_TOUR: false },
|
expected: { TEST_BOOLEAN: false },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tenant: "tenant1",
|
it: "should should match stars in the environment",
|
||||||
flags: "*:ONBOARDING_TOUR",
|
environmentFlags: "*:TEST_BOOLEAN",
|
||||||
expected: { ONBOARDING_TOUR: true },
|
expected: { TEST_BOOLEAN: true },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tenant: "tenant1",
|
it: "should not match a different tenant's flags",
|
||||||
flags: "tenant2:ONBOARDING_TOUR",
|
environmentFlags: "otherTenant:TEST_BOOLEAN",
|
||||||
expected: { ONBOARDING_TOUR: false },
|
expected: { TEST_BOOLEAN: false },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tenant: "tenant1",
|
it: "should return the defaults when no flags are set",
|
||||||
flags: "",
|
expected: flags.defaults(),
|
||||||
expected: defaultFlags(),
|
},
|
||||||
|
{
|
||||||
|
it: "should fail when an environment flag is not recognised",
|
||||||
|
environmentFlags: "default:TEST_BOOLEAN,default:FOO",
|
||||||
|
errorMessage: "Feature: FOO is not an allowed option",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should be able to read boolean flags from PostHog",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: { TEST_BOOLEAN: true },
|
||||||
|
},
|
||||||
|
expected: { TEST_BOOLEAN: true },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should be able to read string flags from PostHog",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: { TEST_STRING: true },
|
||||||
|
featureFlagPayloads: { TEST_STRING: "test" },
|
||||||
|
},
|
||||||
|
expected: { TEST_STRING: "test" },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should be able to read numeric flags from PostHog",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: { TEST_NUMBER: true },
|
||||||
|
featureFlagPayloads: { TEST_NUMBER: "123" },
|
||||||
|
},
|
||||||
|
expected: { TEST_NUMBER: 123 },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should not be able to override a negative environment flag from PostHog",
|
||||||
|
environmentFlags: "default:!TEST_BOOLEAN",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: { TEST_BOOLEAN: true },
|
||||||
|
},
|
||||||
|
expected: { TEST_BOOLEAN: false },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should not be able to override a positive environment flag from PostHog",
|
||||||
|
environmentFlags: "default:TEST_BOOLEAN",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: {
|
||||||
|
TEST_BOOLEAN: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: { TEST_BOOLEAN: true },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should be able to set boolean flags through the license",
|
||||||
|
licenseFlags: ["TEST_BOOLEAN"],
|
||||||
|
expected: { TEST_BOOLEAN: true },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should not be able to override a negative environment flag from license",
|
||||||
|
environmentFlags: "default:!TEST_BOOLEAN",
|
||||||
|
licenseFlags: ["TEST_BOOLEAN"],
|
||||||
|
expected: { TEST_BOOLEAN: false },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should not error on unrecognised PostHog flag",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: { UNDEFINED: true },
|
||||||
|
},
|
||||||
|
expected: flags.defaults(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should not error on unrecognised license flag",
|
||||||
|
licenseFlags: ["UNDEFINED"],
|
||||||
|
expected: flags.defaults(),
|
||||||
},
|
},
|
||||||
])(
|
])(
|
||||||
'should find flags $expected for $tenant with string "$flags"',
|
"$it",
|
||||||
({ tenant, flags, expected }) =>
|
async ({
|
||||||
context.doInTenant(tenant, () =>
|
identity,
|
||||||
withFlags(flags, async () => {
|
environmentFlags,
|
||||||
const flags = await fetch()
|
posthogFlags,
|
||||||
expect(flags).toMatchObject(expected)
|
licenseFlags,
|
||||||
|
expected,
|
||||||
|
errorMessage,
|
||||||
|
}) => {
|
||||||
|
const env: Partial<typeof environment> = {
|
||||||
|
TENANT_FEATURE_FLAGS: environmentFlags,
|
||||||
|
}
|
||||||
|
|
||||||
for (const [key, expectedValue] of Object.entries(expected)) {
|
if (posthogFlags) {
|
||||||
const value = await get(key as keyof Flags)
|
mockPosthogFlags(posthogFlags)
|
||||||
expect(value).toBe(expectedValue)
|
env.POSTHOG_TOKEN = "test"
|
||||||
|
env.POSTHOG_API_HOST = "https://us.i.posthog.com"
|
||||||
|
env.POSTHOG_PERSONAL_TOKEN = "test"
|
||||||
|
}
|
||||||
|
|
||||||
|
const ctx = { user: { license: { features: licenseFlags || [] } } }
|
||||||
|
|
||||||
|
await withEnv(env, async () => {
|
||||||
|
// We need to pass in node-fetch here otherwise nock won't get used
|
||||||
|
// because posthog-node uses axios under the hood.
|
||||||
|
init({
|
||||||
|
fetch: (url, opts) => {
|
||||||
|
return nodeFetch(url, opts)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const fullIdentity: IdentityContext = {
|
||||||
|
_id: "us_1234",
|
||||||
|
tenantId: "default",
|
||||||
|
type: IdentityType.USER,
|
||||||
|
email: "test@example.com",
|
||||||
|
firstName: "Test",
|
||||||
|
lastName: "User",
|
||||||
|
...identity,
|
||||||
|
}
|
||||||
|
|
||||||
|
await context.doInIdentityContext(fullIdentity, async () => {
|
||||||
|
if (errorMessage) {
|
||||||
|
await expect(flags.fetch(ctx as UserCtx)).rejects.toThrow(
|
||||||
|
errorMessage
|
||||||
|
)
|
||||||
|
} else if (expected) {
|
||||||
|
const values = await flags.fetch(ctx as UserCtx)
|
||||||
|
expect(values).toMatchObject(expected)
|
||||||
|
|
||||||
|
for (const [key, expectedValue] of Object.entries(expected)) {
|
||||||
|
const value = await flags.get(
|
||||||
|
key as keyof typeof schema,
|
||||||
|
ctx as UserCtx
|
||||||
|
)
|
||||||
|
expect(value).toBe(expectedValue)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error("No expected value")
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
)
|
})
|
||||||
)
|
|
||||||
|
|
||||||
interface FailedTestCase {
|
|
||||||
tenant: string
|
|
||||||
flags: string
|
|
||||||
expected: string | RegExp
|
|
||||||
}
|
|
||||||
|
|
||||||
it.each<FailedTestCase>([
|
|
||||||
{
|
|
||||||
tenant: "tenant1",
|
|
||||||
flags: "tenant1:ONBOARDING_TOUR,tenant1:FOO",
|
|
||||||
expected: "Feature: FOO is not an allowed option",
|
|
||||||
},
|
|
||||||
])(
|
|
||||||
"should fail with message \"$expected\" for $tenant with string '$flags'",
|
|
||||||
async ({ tenant, flags, expected }) => {
|
|
||||||
context.doInTenant(tenant, () =>
|
|
||||||
withFlags(flags, async () => {
|
|
||||||
await expect(fetch()).rejects.toThrow(expected)
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
it("should not error if PostHog is down", async () => {
|
||||||
|
const identity: IdentityContext = {
|
||||||
|
_id: "us_1234",
|
||||||
|
tenantId: "default",
|
||||||
|
type: IdentityType.USER,
|
||||||
|
email: "test@example.com",
|
||||||
|
firstName: "Test",
|
||||||
|
lastName: "User",
|
||||||
|
}
|
||||||
|
|
||||||
|
nock("https://us.i.posthog.com")
|
||||||
|
.post("/decide/?v=3", body => {
|
||||||
|
return body.token === "test" && body.distinct_id === "us_1234"
|
||||||
|
})
|
||||||
|
.reply(503)
|
||||||
|
.persist()
|
||||||
|
|
||||||
|
await withEnv(
|
||||||
|
{ POSTHOG_TOKEN: "test", POSTHOG_API_HOST: "https://us.i.posthog.com" },
|
||||||
|
async () => {
|
||||||
|
await context.doInIdentityContext(identity, async () => {
|
||||||
|
await flags.fetch()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -27,7 +27,7 @@ export * as locks from "./redis/redlockImpl"
|
||||||
export * as utils from "./utils"
|
export * as utils from "./utils"
|
||||||
export * as errors from "./errors"
|
export * as errors from "./errors"
|
||||||
export * as timers from "./timers"
|
export * as timers from "./timers"
|
||||||
export { default as env } from "./environment"
|
export { default as env, withEnv, setEnv } from "./environment"
|
||||||
export * as blacklist from "./blacklist"
|
export * as blacklist from "./blacklist"
|
||||||
export * as docUpdates from "./docUpdates"
|
export * as docUpdates from "./docUpdates"
|
||||||
export * from "./utils/Duration"
|
export * from "./utils/Duration"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { generator, mocks, structures } from "../../../../../tests"
|
import { generator, structures } from "../../../../../tests"
|
||||||
import {
|
import {
|
||||||
JwtClaims,
|
JwtClaims,
|
||||||
OIDCInnerConfig,
|
OIDCInnerConfig,
|
||||||
|
@ -7,6 +7,7 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import * as _sso from "../sso"
|
import * as _sso from "../sso"
|
||||||
import * as oidc from "../oidc"
|
import * as oidc from "../oidc"
|
||||||
|
import nock from "nock"
|
||||||
|
|
||||||
jest.mock("@techpass/passport-openidconnect")
|
jest.mock("@techpass/passport-openidconnect")
|
||||||
const mockStrategy = require("@techpass/passport-openidconnect").Strategy
|
const mockStrategy = require("@techpass/passport-openidconnect").Strategy
|
||||||
|
@ -22,16 +23,9 @@ describe("oidc", () => {
|
||||||
const oidcConfig: OIDCInnerConfig = structures.sso.oidcConfig()
|
const oidcConfig: OIDCInnerConfig = structures.sso.oidcConfig()
|
||||||
const wellKnownConfig = structures.sso.oidcWellKnownConfig()
|
const wellKnownConfig = structures.sso.oidcWellKnownConfig()
|
||||||
|
|
||||||
function mockRetrieveWellKnownConfig() {
|
|
||||||
// mock the request to retrieve the oidc configuration
|
|
||||||
mocks.fetch.mockReturnValue({
|
|
||||||
ok: true,
|
|
||||||
json: () => wellKnownConfig,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
mockRetrieveWellKnownConfig()
|
nock.cleanAll()
|
||||||
|
nock(oidcConfig.configUrl).get("/").reply(200, wellKnownConfig)
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("strategyFactory", () => {
|
describe("strategyFactory", () => {
|
||||||
|
@ -42,8 +36,6 @@ describe("oidc", () => {
|
||||||
)
|
)
|
||||||
await oidc.strategyFactory(strategyConfiguration, mockSaveUser)
|
await oidc.strategyFactory(strategyConfiguration, mockSaveUser)
|
||||||
|
|
||||||
expect(mocks.fetch).toHaveBeenCalledWith(oidcConfig.configUrl)
|
|
||||||
|
|
||||||
const expectedOptions = {
|
const expectedOptions = {
|
||||||
issuer: wellKnownConfig.issuer,
|
issuer: wellKnownConfig.issuer,
|
||||||
authorizationURL: wellKnownConfig.authorization_endpoint,
|
authorizationURL: wellKnownConfig.authorization_endpoint,
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
import { structures, mocks } from "../../../../../tests"
|
import { structures } from "../../../../../tests"
|
||||||
import { testEnv } from "../../../../../tests/extra"
|
import { testEnv } from "../../../../../tests/extra"
|
||||||
import { SSOAuthDetails, User } from "@budibase/types"
|
import { SSOAuthDetails, User } from "@budibase/types"
|
||||||
|
|
||||||
import { HTTPError } from "../../../../errors"
|
import { HTTPError } from "../../../../errors"
|
||||||
import * as sso from "../sso"
|
import * as sso from "../sso"
|
||||||
import * as context from "../../../../context"
|
import * as context from "../../../../context"
|
||||||
|
import nock from "nock"
|
||||||
|
|
||||||
const mockDone = jest.fn()
|
const mockDone = jest.fn()
|
||||||
const mockSaveUser = jest.fn()
|
const mockSaveUser = jest.fn()
|
||||||
|
@ -23,6 +24,7 @@ describe("sso", () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
jest.clearAllMocks()
|
jest.clearAllMocks()
|
||||||
testEnv.singleTenant()
|
testEnv.singleTenant()
|
||||||
|
nock.cleanAll()
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("validation", () => {
|
describe("validation", () => {
|
||||||
|
@ -51,15 +53,6 @@ describe("sso", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
function mockGetProfilePicture() {
|
|
||||||
mocks.fetch.mockReturnValueOnce(
|
|
||||||
Promise.resolve({
|
|
||||||
status: 200,
|
|
||||||
headers: { get: () => "image/" },
|
|
||||||
})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("when the user doesn't exist", () => {
|
describe("when the user doesn't exist", () => {
|
||||||
let user: User
|
let user: User
|
||||||
let details: SSOAuthDetails
|
let details: SSOAuthDetails
|
||||||
|
@ -68,7 +61,10 @@ describe("sso", () => {
|
||||||
users.getById.mockImplementationOnce(() => {
|
users.getById.mockImplementationOnce(() => {
|
||||||
throw new HTTPError("", 404)
|
throw new HTTPError("", 404)
|
||||||
})
|
})
|
||||||
mockGetProfilePicture()
|
|
||||||
|
nock("http://example.com").get("/").reply(200, undefined, {
|
||||||
|
"Content-Type": "image/png",
|
||||||
|
})
|
||||||
|
|
||||||
user = structures.users.user()
|
user = structures.users.user()
|
||||||
delete user._rev
|
delete user._rev
|
||||||
|
@ -131,7 +127,9 @@ describe("sso", () => {
|
||||||
existingUser = structures.users.user()
|
existingUser = structures.users.user()
|
||||||
existingUser._id = structures.uuid()
|
existingUser._id = structures.uuid()
|
||||||
details = structures.sso.authDetails(existingUser)
|
details = structures.sso.authDetails(existingUser)
|
||||||
mockGetProfilePicture()
|
nock("http://example.com").get("/").reply(200, undefined, {
|
||||||
|
"Content-Type": "image/png",
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("exists by email", () => {
|
describe("exists by email", () => {
|
||||||
|
|
|
@ -1,12 +1,129 @@
|
||||||
import { validate } from "../utils"
|
import { validate } from "../utils"
|
||||||
import fetch from "node-fetch"
|
import fetch from "node-fetch"
|
||||||
import { PluginType } from "@budibase/types"
|
import { PluginType } from "@budibase/types"
|
||||||
|
import nock from "nock"
|
||||||
|
|
||||||
const repoUrl =
|
const automationLink = `http://example.com/automation/schema.json`
|
||||||
"https://raw.githubusercontent.com/Budibase/budibase-skeleton/master"
|
const componentLink = `http://example.com/component/schema.json`
|
||||||
const automationLink = `${repoUrl}/automation/schema.json.hbs`
|
const datasourceLink = `http://example.com/datasource/schema.json`
|
||||||
const componentLink = `${repoUrl}/component/schema.json.hbs`
|
|
||||||
const datasourceLink = `${repoUrl}/datasource/schema.json.hbs`
|
function mockDatasourceSchema() {
|
||||||
|
nock("http://example.com")
|
||||||
|
.get("/datasource/schema.json")
|
||||||
|
.reply(200, {
|
||||||
|
type: "datasource",
|
||||||
|
metadata: {},
|
||||||
|
schema: {
|
||||||
|
docs: "https://docs.budibase.com",
|
||||||
|
friendlyName: "Basic HTTP",
|
||||||
|
type: "API",
|
||||||
|
description: "Performs a basic HTTP calls to a URL",
|
||||||
|
datasource: {
|
||||||
|
url: {
|
||||||
|
type: "string",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
cookie: {
|
||||||
|
type: "string",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
query: {
|
||||||
|
create: {
|
||||||
|
type: "json",
|
||||||
|
},
|
||||||
|
read: {
|
||||||
|
type: "fields",
|
||||||
|
fields: {
|
||||||
|
queryString: {
|
||||||
|
display: "Query string",
|
||||||
|
type: "string",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
update: {
|
||||||
|
type: "json",
|
||||||
|
},
|
||||||
|
delete: {
|
||||||
|
type: "fields",
|
||||||
|
fields: {
|
||||||
|
id: {
|
||||||
|
type: "string",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function mockAutomationSchema() {
|
||||||
|
nock("http://example.com")
|
||||||
|
.get("/automation/schema.json")
|
||||||
|
.reply(200, {
|
||||||
|
type: "automation",
|
||||||
|
metadata: {},
|
||||||
|
schema: {
|
||||||
|
name: "{{ name }}",
|
||||||
|
tagline: "{{ description }}",
|
||||||
|
icon: "Actions",
|
||||||
|
description: "{{ description }}",
|
||||||
|
type: "action",
|
||||||
|
stepId: "{{ name }}",
|
||||||
|
inputs: {
|
||||||
|
text: "",
|
||||||
|
},
|
||||||
|
schema: {
|
||||||
|
inputs: {
|
||||||
|
properties: {
|
||||||
|
text: {
|
||||||
|
type: "string",
|
||||||
|
title: "Log",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ["text"],
|
||||||
|
},
|
||||||
|
outputs: {
|
||||||
|
properties: {
|
||||||
|
success: {
|
||||||
|
type: "boolean",
|
||||||
|
description: "Whether the action was successful",
|
||||||
|
},
|
||||||
|
message: {
|
||||||
|
type: "string",
|
||||||
|
description: "What was output",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ["success", "message"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
function mockComponentSchema() {
|
||||||
|
nock("http://example.com")
|
||||||
|
.get("/component/schema.json")
|
||||||
|
.reply(200, {
|
||||||
|
type: "component",
|
||||||
|
metadata: {},
|
||||||
|
schema: {
|
||||||
|
name: "{{ name }}",
|
||||||
|
friendlyName: "{{ name }}",
|
||||||
|
description: "{{ description }}",
|
||||||
|
icon: "Text",
|
||||||
|
settings: [
|
||||||
|
{
|
||||||
|
type: "text",
|
||||||
|
key: "text",
|
||||||
|
label: "Text",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
async function getSchema(link: string) {
|
async function getSchema(link: string) {
|
||||||
const response = await fetch(link)
|
const response = await fetch(link)
|
||||||
|
@ -31,53 +148,62 @@ async function runTest(opts: { link?: string; schema?: any }) {
|
||||||
return error
|
return error
|
||||||
}
|
}
|
||||||
|
|
||||||
describe("it should be able to validate an automation schema", () => {
|
describe("plugin validation", () => {
|
||||||
it("should return automation skeleton schema is valid", async () => {
|
beforeEach(() => {
|
||||||
const error = await runTest({ link: automationLink })
|
nock.cleanAll()
|
||||||
expect(error).toBeUndefined()
|
mockAutomationSchema()
|
||||||
|
mockComponentSchema()
|
||||||
|
mockDatasourceSchema()
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should fail given invalid automation schema", async () => {
|
describe("it should be able to validate an automation schema", () => {
|
||||||
const error = await runTest({
|
it("should return automation skeleton schema is valid", async () => {
|
||||||
schema: {
|
const error = await runTest({ link: automationLink })
|
||||||
type: PluginType.AUTOMATION,
|
expect(error).toBeUndefined()
|
||||||
schema: {},
|
})
|
||||||
},
|
|
||||||
|
it("should fail given invalid automation schema", async () => {
|
||||||
|
const error = await runTest({
|
||||||
|
schema: {
|
||||||
|
type: PluginType.AUTOMATION,
|
||||||
|
schema: {},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(error).toBeDefined()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("it should be able to validate a component schema", () => {
|
||||||
|
it("should return component skeleton schema is valid", async () => {
|
||||||
|
const error = await runTest({ link: componentLink })
|
||||||
|
expect(error).toBeUndefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should fail given invalid component schema", async () => {
|
||||||
|
const error = await runTest({
|
||||||
|
schema: {
|
||||||
|
type: PluginType.COMPONENT,
|
||||||
|
schema: {},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(error).toBeDefined()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("it should be able to validate a datasource schema", () => {
|
||||||
|
it("should return datasource skeleton schema is valid", async () => {
|
||||||
|
const error = await runTest({ link: datasourceLink })
|
||||||
|
expect(error).toBeUndefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should fail given invalid datasource schema", async () => {
|
||||||
|
const error = await runTest({
|
||||||
|
schema: {
|
||||||
|
type: PluginType.DATASOURCE,
|
||||||
|
schema: {},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
expect(error).toBeDefined()
|
||||||
})
|
})
|
||||||
expect(error).toBeDefined()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("it should be able to validate a component schema", () => {
|
|
||||||
it("should return component skeleton schema is valid", async () => {
|
|
||||||
const error = await runTest({ link: componentLink })
|
|
||||||
expect(error).toBeUndefined()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should fail given invalid component schema", async () => {
|
|
||||||
const error = await runTest({
|
|
||||||
schema: {
|
|
||||||
type: PluginType.COMPONENT,
|
|
||||||
schema: {},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
expect(error).toBeDefined()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("it should be able to validate a datasource schema", () => {
|
|
||||||
it("should return datasource skeleton schema is valid", async () => {
|
|
||||||
const error = await runTest({ link: datasourceLink })
|
|
||||||
expect(error).toBeUndefined()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should fail given invalid datasource schema", async () => {
|
|
||||||
const error = await runTest({
|
|
||||||
schema: {
|
|
||||||
type: PluginType.DATASOURCE,
|
|
||||||
schema: {},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
expect(error).toBeDefined()
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -111,6 +111,10 @@ function init(selectDb = DEFAULT_SELECT_DB) {
|
||||||
CLIENTS[selectDb] = client
|
CLIENTS[selectDb] = client
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function closeAll() {
|
||||||
|
Object.values(CLIENTS).forEach(client => client.disconnect())
|
||||||
|
}
|
||||||
|
|
||||||
function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
|
function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
|
||||||
return new Promise(resolve => {
|
return new Promise(resolve => {
|
||||||
if (pickClient(selectDb) == null) {
|
if (pickClient(selectDb) == null) {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { GenericContainer, StartedTestContainer } from "testcontainers"
|
import { GenericContainer, StartedTestContainer } from "testcontainers"
|
||||||
import { generator, structures } from "../../../tests"
|
import { generator, structures } from "../../../tests"
|
||||||
import RedisWrapper from "../redis"
|
import RedisWrapper, { closeAll } from "../redis"
|
||||||
import { env } from "../.."
|
import { env } from "../.."
|
||||||
import { randomUUID } from "crypto"
|
import { randomUUID } from "crypto"
|
||||||
|
|
||||||
|
@ -23,7 +23,10 @@ describe("redis", () => {
|
||||||
env._set("REDIS_PASSWORD", 0)
|
env._set("REDIS_PASSWORD", 0)
|
||||||
})
|
})
|
||||||
|
|
||||||
afterAll(() => container?.stop())
|
afterAll(() => {
|
||||||
|
container?.stop()
|
||||||
|
closeAll()
|
||||||
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
redis = new RedisWrapper(structures.db.id())
|
redis = new RedisWrapper(structures.db.id())
|
||||||
|
|
|
@ -28,16 +28,25 @@ function generateSchema(
|
||||||
oldTable: null | Table = null,
|
oldTable: null | Table = null,
|
||||||
renamed?: RenameColumn
|
renamed?: RenameColumn
|
||||||
) {
|
) {
|
||||||
let primaryKey = table && table.primary ? table.primary[0] : null
|
let primaryKeys = table && table.primary ? table.primary : []
|
||||||
const columns = Object.values(table.schema)
|
const columns = Object.values(table.schema)
|
||||||
// all columns in a junction table will be meta
|
// all columns in a junction table will be meta
|
||||||
let metaCols = columns.filter(col => (col as NumberFieldMetadata).meta)
|
let metaCols = columns.filter(col => (col as NumberFieldMetadata).meta)
|
||||||
let isJunction = metaCols.length === columns.length
|
let isJunction = metaCols.length === columns.length
|
||||||
|
let columnTypeSet: string[] = []
|
||||||
|
|
||||||
// can't change primary once its set for now
|
// can't change primary once its set for now
|
||||||
if (primaryKey && !oldTable && !isJunction) {
|
if (!oldTable) {
|
||||||
schema.increments(primaryKey).primary()
|
// junction tables are special - we have an expected format
|
||||||
} else if (!oldTable && isJunction) {
|
if (isJunction) {
|
||||||
schema.primary(metaCols.map(col => col.name))
|
schema.primary(metaCols.map(col => col.name))
|
||||||
|
} else if (primaryKeys.length === 1) {
|
||||||
|
schema.increments(primaryKeys[0]).primary()
|
||||||
|
// note that we've set its type
|
||||||
|
columnTypeSet.push(primaryKeys[0])
|
||||||
|
} else {
|
||||||
|
schema.primary(primaryKeys)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// check if any columns need added
|
// check if any columns need added
|
||||||
|
@ -49,7 +58,7 @@ function generateSchema(
|
||||||
const oldColumn = oldTable ? oldTable.schema[key] : null
|
const oldColumn = oldTable ? oldTable.schema[key] : null
|
||||||
if (
|
if (
|
||||||
(oldColumn && oldColumn.type) ||
|
(oldColumn && oldColumn.type) ||
|
||||||
(primaryKey === key && !isJunction) ||
|
columnTypeSet.includes(key) ||
|
||||||
renamed?.updated === key
|
renamed?.updated === key
|
||||||
) {
|
) {
|
||||||
continue
|
continue
|
||||||
|
@ -61,7 +70,12 @@ function generateSchema(
|
||||||
case FieldType.LONGFORM:
|
case FieldType.LONGFORM:
|
||||||
case FieldType.BARCODEQR:
|
case FieldType.BARCODEQR:
|
||||||
case FieldType.BB_REFERENCE_SINGLE:
|
case FieldType.BB_REFERENCE_SINGLE:
|
||||||
schema.text(key)
|
// primary key strings have to have a length in some DBs
|
||||||
|
if (primaryKeys.includes(key)) {
|
||||||
|
schema.string(key, 255)
|
||||||
|
} else {
|
||||||
|
schema.text(key)
|
||||||
|
}
|
||||||
break
|
break
|
||||||
case FieldType.NUMBER:
|
case FieldType.NUMBER:
|
||||||
// if meta is specified then this is a junction table entry
|
// if meta is specified then this is a junction table entry
|
||||||
|
|
|
@ -1,17 +0,0 @@
|
||||||
const mockFetch = jest.fn((url: any, opts: any) => {
|
|
||||||
const fetch = jest.requireActual("node-fetch")
|
|
||||||
const env = jest.requireActual("../../../../src/environment").default
|
|
||||||
if (url.includes(env.COUCH_DB_URL) || url.includes("raw.github")) {
|
|
||||||
return fetch(url, opts)
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
})
|
|
||||||
|
|
||||||
const enable = () => {
|
|
||||||
jest.mock("node-fetch", () => mockFetch)
|
|
||||||
}
|
|
||||||
|
|
||||||
export default {
|
|
||||||
...mockFetch,
|
|
||||||
enable,
|
|
||||||
}
|
|
|
@ -5,7 +5,5 @@ export const accounts = jest.mocked(_accounts)
|
||||||
|
|
||||||
export * as date from "./date"
|
export * as date from "./date"
|
||||||
export * as licenses from "./licenses"
|
export * as licenses from "./licenses"
|
||||||
export { default as fetch } from "./fetch"
|
|
||||||
export * from "./alerts"
|
export * from "./alerts"
|
||||||
import "./events"
|
import "./events"
|
||||||
import "./posthog"
|
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
jest.mock("posthog-node", () => {
|
|
||||||
return jest.fn().mockImplementation(() => {
|
|
||||||
return {
|
|
||||||
capture: jest.fn(),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
|
@ -2,14 +2,21 @@ import "./core/logging"
|
||||||
import env from "../src/environment"
|
import env from "../src/environment"
|
||||||
import { cleanup } from "../src/timers"
|
import { cleanup } from "../src/timers"
|
||||||
import { mocks, testContainerUtils } from "./core/utilities"
|
import { mocks, testContainerUtils } from "./core/utilities"
|
||||||
|
import nock from "nock"
|
||||||
// must explicitly enable fetch mock
|
|
||||||
mocks.fetch.enable()
|
|
||||||
|
|
||||||
// mock all dates to 2020-01-01T00:00:00.000Z
|
// mock all dates to 2020-01-01T00:00:00.000Z
|
||||||
// use tk.reset() to use real dates in individual tests
|
// use tk.reset() to use real dates in individual tests
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
|
|
||||||
|
nock.disableNetConnect()
|
||||||
|
nock.enableNetConnect(host => {
|
||||||
|
return (
|
||||||
|
host.includes("localhost") ||
|
||||||
|
host.includes("127.0.0.1") ||
|
||||||
|
host.includes("::1")
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
tk.freeze(mocks.date.MOCK_DATE)
|
tk.freeze(mocks.date.MOCK_DATE)
|
||||||
|
|
||||||
if (!process.env.DEBUG) {
|
if (!process.env.DEBUG) {
|
||||||
|
|
|
@ -19,6 +19,8 @@
|
||||||
helpers,
|
helpers,
|
||||||
PROTECTED_INTERNAL_COLUMNS,
|
PROTECTED_INTERNAL_COLUMNS,
|
||||||
PROTECTED_EXTERNAL_COLUMNS,
|
PROTECTED_EXTERNAL_COLUMNS,
|
||||||
|
canBeDisplayColumn,
|
||||||
|
canHaveDefaultColumn,
|
||||||
} from "@budibase/shared-core"
|
} from "@budibase/shared-core"
|
||||||
import { createEventDispatcher, getContext, onMount } from "svelte"
|
import { createEventDispatcher, getContext, onMount } from "svelte"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
|
@ -44,6 +46,7 @@
|
||||||
import { RowUtils } from "@budibase/frontend-core"
|
import { RowUtils } from "@budibase/frontend-core"
|
||||||
import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte"
|
import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte"
|
||||||
import OptionsEditor from "./OptionsEditor.svelte"
|
import OptionsEditor from "./OptionsEditor.svelte"
|
||||||
|
import { isEnabled, TENANT_FEATURE_FLAGS } from "helpers/featureFlags"
|
||||||
|
|
||||||
const AUTO_TYPE = FieldType.AUTO
|
const AUTO_TYPE = FieldType.AUTO
|
||||||
const FORMULA_TYPE = FieldType.FORMULA
|
const FORMULA_TYPE = FieldType.FORMULA
|
||||||
|
@ -133,7 +136,9 @@
|
||||||
}
|
}
|
||||||
$: initialiseField(field, savingColumn)
|
$: initialiseField(field, savingColumn)
|
||||||
$: checkConstraints(editableColumn)
|
$: checkConstraints(editableColumn)
|
||||||
$: required = !!editableColumn?.constraints?.presence || primaryDisplay
|
$: required = hasDefault
|
||||||
|
? false
|
||||||
|
: !!editableColumn?.constraints?.presence || primaryDisplay
|
||||||
$: uneditable =
|
$: uneditable =
|
||||||
$tables.selected?._id === TableNames.USERS &&
|
$tables.selected?._id === TableNames.USERS &&
|
||||||
UNEDITABLE_USER_FIELDS.includes(editableColumn.name)
|
UNEDITABLE_USER_FIELDS.includes(editableColumn.name)
|
||||||
|
@ -161,15 +166,17 @@
|
||||||
: availableAutoColumns
|
: availableAutoColumns
|
||||||
// used to select what different options can be displayed for column type
|
// used to select what different options can be displayed for column type
|
||||||
$: canBeDisplay =
|
$: canBeDisplay =
|
||||||
editableColumn?.type !== LINK_TYPE &&
|
canBeDisplayColumn(editableColumn.type) && !editableColumn.autocolumn
|
||||||
editableColumn?.type !== AUTO_TYPE &&
|
$: canHaveDefault =
|
||||||
editableColumn?.type !== JSON_TYPE &&
|
isEnabled(TENANT_FEATURE_FLAGS.DEFAULT_VALUES) &&
|
||||||
!editableColumn.autocolumn
|
canHaveDefaultColumn(editableColumn.type)
|
||||||
$: canBeRequired =
|
$: canBeRequired =
|
||||||
editableColumn?.type !== LINK_TYPE &&
|
editableColumn?.type !== LINK_TYPE &&
|
||||||
!uneditable &&
|
!uneditable &&
|
||||||
editableColumn?.type !== AUTO_TYPE &&
|
editableColumn?.type !== AUTO_TYPE &&
|
||||||
!editableColumn.autocolumn
|
!editableColumn.autocolumn
|
||||||
|
$: hasDefault =
|
||||||
|
editableColumn?.default != null && editableColumn?.default !== ""
|
||||||
$: externalTable = table.sourceType === DB_TYPE_EXTERNAL
|
$: externalTable = table.sourceType === DB_TYPE_EXTERNAL
|
||||||
// in the case of internal tables the sourceId will just be undefined
|
// in the case of internal tables the sourceId will just be undefined
|
||||||
$: tableOptions = $tables.list.filter(
|
$: tableOptions = $tables.list.filter(
|
||||||
|
@ -349,12 +356,15 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function onChangeRequired(e) {
|
function setRequired(req) {
|
||||||
const req = e.detail
|
|
||||||
editableColumn.constraints.presence = req ? { allowEmpty: false } : false
|
editableColumn.constraints.presence = req ? { allowEmpty: false } : false
|
||||||
required = req
|
required = req
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function onChangeRequired(e) {
|
||||||
|
setRequired(e.detail)
|
||||||
|
}
|
||||||
|
|
||||||
function openJsonSchemaEditor() {
|
function openJsonSchemaEditor() {
|
||||||
jsonSchemaModal.show()
|
jsonSchemaModal.show()
|
||||||
}
|
}
|
||||||
|
@ -748,13 +758,37 @@
|
||||||
<Toggle
|
<Toggle
|
||||||
value={required}
|
value={required}
|
||||||
on:change={onChangeRequired}
|
on:change={onChangeRequired}
|
||||||
disabled={primaryDisplay}
|
disabled={primaryDisplay || hasDefault}
|
||||||
thin
|
thin
|
||||||
text="Required"
|
text="Required"
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
|
{#if canHaveDefault}
|
||||||
|
<div>
|
||||||
|
<ModalBindableInput
|
||||||
|
panel={ServerBindingPanel}
|
||||||
|
title="Default"
|
||||||
|
label="Default"
|
||||||
|
value={editableColumn.default}
|
||||||
|
on:change={e => {
|
||||||
|
editableColumn = {
|
||||||
|
...editableColumn,
|
||||||
|
default: e.detail,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (e.detail) {
|
||||||
|
setRequired(false)
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
bindings={getBindings({ table })}
|
||||||
|
allowJS
|
||||||
|
context={rowGoldenSample}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
</Layout>
|
</Layout>
|
||||||
|
|
||||||
<div class="action-buttons">
|
<div class="action-buttons">
|
||||||
|
|
|
@ -6,6 +6,7 @@ export const TENANT_FEATURE_FLAGS = {
|
||||||
USER_GROUPS: "USER_GROUPS",
|
USER_GROUPS: "USER_GROUPS",
|
||||||
ONBOARDING_TOUR: "ONBOARDING_TOUR",
|
ONBOARDING_TOUR: "ONBOARDING_TOUR",
|
||||||
GOOGLE_SHEETS: "GOOGLE_SHEETS",
|
GOOGLE_SHEETS: "GOOGLE_SHEETS",
|
||||||
|
DEFAULT_VALUES: "DEFAULT_VALUES",
|
||||||
}
|
}
|
||||||
|
|
||||||
export const isEnabled = featureFlag => {
|
export const isEnabled = featureFlag => {
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
export let showOnboardingTypeModal
|
export let showOnboardingTypeModal
|
||||||
|
|
||||||
const password = Math.random().toString(36).substring(2, 22)
|
const password = generatePassword(12)
|
||||||
let disabled
|
let disabled
|
||||||
let userGroups = []
|
let userGroups = []
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@
|
||||||
{
|
{
|
||||||
email: "",
|
email: "",
|
||||||
role: "appUser",
|
role: "appUser",
|
||||||
password: Math.random().toString(36).substring(2, 22),
|
password: generatePassword(12),
|
||||||
forceResetPassword: true,
|
forceResetPassword: true,
|
||||||
error: null,
|
error: null,
|
||||||
},
|
},
|
||||||
|
@ -69,6 +69,14 @@
|
||||||
return userData[index].error == null
|
return userData[index].error == null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function generatePassword(length) {
|
||||||
|
const array = new Uint8Array(length)
|
||||||
|
window.crypto.getRandomValues(array)
|
||||||
|
return Array.from(array, byte => byte.toString(36).padStart(2, "0"))
|
||||||
|
.join("")
|
||||||
|
.slice(0, length)
|
||||||
|
}
|
||||||
|
|
||||||
const onConfirm = () => {
|
const onConfirm = () => {
|
||||||
let valid = true
|
let valid = true
|
||||||
userData.forEach((input, index) => {
|
userData.forEach((input, index) => {
|
||||||
|
|
|
@ -216,7 +216,7 @@
|
||||||
const newUser = {
|
const newUser = {
|
||||||
email: email,
|
email: email,
|
||||||
role: usersRole,
|
role: usersRole,
|
||||||
password: Math.random().toString(36).substring(2, 22),
|
password: generatePassword(12),
|
||||||
forceResetPassword: true,
|
forceResetPassword: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -288,6 +288,14 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const generatePassword = length => {
|
||||||
|
const array = new Uint8Array(length)
|
||||||
|
window.crypto.getRandomValues(array)
|
||||||
|
return Array.from(array, byte => byte.toString(36).padStart(2, "0"))
|
||||||
|
.join("")
|
||||||
|
.slice(0, length)
|
||||||
|
}
|
||||||
|
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
try {
|
try {
|
||||||
await groups.actions.init()
|
await groups.actions.init()
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 62ef0e2d6e83522b6732fb3c61338de303f06ff0
|
Subproject commit 94747fd5bb67c218244bb60b9540f3a6f1c3f6f1
|
|
@ -144,6 +144,7 @@
|
||||||
"copyfiles": "2.4.1",
|
"copyfiles": "2.4.1",
|
||||||
"docker-compose": "0.23.17",
|
"docker-compose": "0.23.17",
|
||||||
"jest": "29.7.0",
|
"jest": "29.7.0",
|
||||||
|
"jest-extended": "^4.0.2",
|
||||||
"jest-openapi": "0.14.2",
|
"jest-openapi": "0.14.2",
|
||||||
"nock": "13.5.4",
|
"nock": "13.5.4",
|
||||||
"nodemon": "2.0.15",
|
"nodemon": "2.0.15",
|
||||||
|
|
|
@ -195,12 +195,13 @@ export class ExternalRequest<T extends Operation> {
|
||||||
if (filters) {
|
if (filters) {
|
||||||
// need to map over the filters and make sure the _id field isn't present
|
// need to map over the filters and make sure the _id field isn't present
|
||||||
let prefix = 1
|
let prefix = 1
|
||||||
for (const operator of Object.values(filters)) {
|
for (const [operatorType, operator] of Object.entries(filters)) {
|
||||||
|
const isArrayOp = sdk.rows.utils.isArrayFilter(operatorType)
|
||||||
for (const field of Object.keys(operator || {})) {
|
for (const field of Object.keys(operator || {})) {
|
||||||
if (dbCore.removeKeyNumbering(field) === "_id") {
|
if (dbCore.removeKeyNumbering(field) === "_id") {
|
||||||
if (primary) {
|
if (primary) {
|
||||||
const parts = breakRowIdField(operator[field])
|
const parts = breakRowIdField(operator[field])
|
||||||
if (primary.length > 1) {
|
if (primary.length > 1 && isArrayOp) {
|
||||||
operator[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR] = {
|
operator[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR] = {
|
||||||
id: primary,
|
id: primary,
|
||||||
values: parts[0],
|
values: parts[0],
|
||||||
|
|
|
@ -71,8 +71,7 @@ export function basicProcessing({
|
||||||
}): Row {
|
}): Row {
|
||||||
const thisRow: Row = {}
|
const thisRow: Row = {}
|
||||||
// filter the row down to what is actually the row (not joined)
|
// filter the row down to what is actually the row (not joined)
|
||||||
for (let field of Object.values(table.schema)) {
|
for (let fieldName of Object.keys(table.schema)) {
|
||||||
const fieldName = field.name
|
|
||||||
let value = extractFieldValue({
|
let value = extractFieldValue({
|
||||||
row,
|
row,
|
||||||
tableName: table.name,
|
tableName: table.name,
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
const setup = require("./utilities")
|
import { withEnv } from "../../../environment"
|
||||||
const { checkBuilderEndpoint } = require("./utilities/TestFunctions")
|
import { getRequest, getConfig, afterAll as _afterAll } from "./utilities"
|
||||||
|
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||||
|
|
||||||
describe("/api/keys", () => {
|
describe("/api/keys", () => {
|
||||||
let request = setup.getRequest()
|
let request = getRequest()
|
||||||
let config = setup.getConfig()
|
let config = getConfig()
|
||||||
|
|
||||||
afterAll(setup.afterAll)
|
afterAll(_afterAll)
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
|
@ -13,7 +14,7 @@ describe("/api/keys", () => {
|
||||||
|
|
||||||
describe("fetch", () => {
|
describe("fetch", () => {
|
||||||
it("should allow fetching", async () => {
|
it("should allow fetching", async () => {
|
||||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
await withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||||
const res = await request
|
const res = await request
|
||||||
.get(`/api/keys`)
|
.get(`/api/keys`)
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
|
@ -34,7 +35,7 @@ describe("/api/keys", () => {
|
||||||
|
|
||||||
describe("update", () => {
|
describe("update", () => {
|
||||||
it("should allow updating a value", async () => {
|
it("should allow updating a value", async () => {
|
||||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
await withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||||
const res = await request
|
const res = await request
|
||||||
.put(`/api/keys/TEST`)
|
.put(`/api/keys/TEST`)
|
||||||
.send({
|
.send({
|
|
@ -14,7 +14,12 @@ jest.mock("../../../utilities/redis", () => ({
|
||||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
import { AppStatus } from "../../../db/utils"
|
import { AppStatus } from "../../../db/utils"
|
||||||
import { events, utils, context } from "@budibase/backend-core"
|
import {
|
||||||
|
events,
|
||||||
|
utils,
|
||||||
|
context,
|
||||||
|
withEnv as withCoreEnv,
|
||||||
|
} from "@budibase/backend-core"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import { type App } from "@budibase/types"
|
import { type App } from "@budibase/types"
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
|
@ -353,7 +358,7 @@ describe("/applications", () => {
|
||||||
.delete(`/api/global/roles/${prodAppId}`)
|
.delete(`/api/global/roles/${prodAppId}`)
|
||||||
.reply(200, {})
|
.reply(200, {})
|
||||||
|
|
||||||
await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, async () => {
|
await withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, async () => {
|
||||||
await config.api.application.delete(app.appId)
|
await config.api.application.delete(app.appId)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { withEnv } from "../../../environment"
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
import { APIError } from "@budibase/types"
|
import { APIError } from "@budibase/types"
|
||||||
|
|
||||||
|
@ -28,7 +29,7 @@ describe("/api/applications/:appId/sync", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should reject an upload with a malicious file extension", async () => {
|
it("should reject an upload with a malicious file extension", async () => {
|
||||||
await config.withEnv({ SELF_HOSTED: undefined }, async () => {
|
await withEnv({ SELF_HOSTED: undefined }, async () => {
|
||||||
let resp = (await config.api.attachment.process(
|
let resp = (await config.api.attachment.process(
|
||||||
"ohno.exe",
|
"ohno.exe",
|
||||||
Buffer.from([0]),
|
Buffer.from([0]),
|
||||||
|
@ -39,7 +40,7 @@ describe("/api/applications/:appId/sync", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should reject an upload with a malicious uppercase file extension", async () => {
|
it("should reject an upload with a malicious uppercase file extension", async () => {
|
||||||
await config.withEnv({ SELF_HOSTED: undefined }, async () => {
|
await withEnv({ SELF_HOSTED: undefined }, async () => {
|
||||||
let resp = (await config.api.attachment.process(
|
let resp = (await config.api.attachment.process(
|
||||||
"OHNO.EXE",
|
"OHNO.EXE",
|
||||||
Buffer.from([0]),
|
Buffer.from([0]),
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
import { checkBuilderEndpoint, allowUndefined } from "./utilities/TestFunctions"
|
||||||
import { getCachedVariable } from "../../../threads/utils"
|
import { getCachedVariable } from "../../../threads/utils"
|
||||||
import { context, events } from "@budibase/backend-core"
|
import { context, events } from "@budibase/backend-core"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
@ -380,21 +380,24 @@ describe("/datasources", () => {
|
||||||
persisted?.entities &&
|
persisted?.entities &&
|
||||||
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
||||||
(acc, [tableName, table]) => {
|
(acc, [tableName, table]) => {
|
||||||
acc[tableName] = {
|
acc[tableName] = expect.objectContaining({
|
||||||
...table,
|
...table,
|
||||||
primaryDisplay: expect.not.stringMatching(
|
primaryDisplay: expect.not.stringMatching(
|
||||||
new RegExp(`^${table.primaryDisplay || ""}$`)
|
new RegExp(`^${table.primaryDisplay || ""}$`)
|
||||||
),
|
),
|
||||||
schema: Object.entries(table.schema).reduce<TableSchema>(
|
schema: Object.entries(table.schema).reduce<TableSchema>(
|
||||||
(acc, [fieldName, field]) => {
|
(acc, [fieldName, field]) => {
|
||||||
acc[fieldName] = expect.objectContaining({
|
acc[fieldName] = {
|
||||||
...field,
|
...field,
|
||||||
})
|
externalType: allowUndefined(expect.any(String)),
|
||||||
|
constraints: allowUndefined(expect.any(Object)),
|
||||||
|
autocolumn: allowUndefined(expect.any(Boolean)),
|
||||||
|
}
|
||||||
return acc
|
return acc
|
||||||
},
|
},
|
||||||
{}
|
{}
|
||||||
),
|
),
|
||||||
}
|
})
|
||||||
return acc
|
return acc
|
||||||
},
|
},
|
||||||
{}
|
{}
|
||||||
|
|
|
@ -40,6 +40,7 @@ import _, { merge } from "lodash"
|
||||||
import * as uuid from "uuid"
|
import * as uuid from "uuid"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
import { InternalTables } from "../../../db/utils"
|
import { InternalTables } from "../../../db/utils"
|
||||||
|
import { withEnv } from "../../../environment"
|
||||||
|
|
||||||
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
||||||
tk.freeze(timestamp)
|
tk.freeze(timestamp)
|
||||||
|
@ -1688,7 +1689,7 @@ describe.each([
|
||||||
}
|
}
|
||||||
const row = await config.api.row.save(testTable._id!, draftRow)
|
const row = await config.api.row.save(testTable._id!, draftRow)
|
||||||
|
|
||||||
await config.withEnv({ SELF_HOSTED: "true" }, async () => {
|
await withEnv({ SELF_HOSTED: "true" }, async () => {
|
||||||
return context.doInAppContext(config.getAppId(), async () => {
|
return context.doInAppContext(config.getAppId(), async () => {
|
||||||
const enriched: Row[] = await outputProcessing(table, [row])
|
const enriched: Row[] = await outputProcessing(table, [row])
|
||||||
const [targetRow] = enriched
|
const [targetRow] = enriched
|
||||||
|
@ -2456,7 +2457,7 @@ describe.each([
|
||||||
|
|
||||||
describe("Formula JS protection", () => {
|
describe("Formula JS protection", () => {
|
||||||
it("should time out JS execution if a single cell takes too long", async () => {
|
it("should time out JS execution if a single cell takes too long", async () => {
|
||||||
await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => {
|
await withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => {
|
||||||
const js = Buffer.from(
|
const js = Buffer.from(
|
||||||
`
|
`
|
||||||
let i = 0;
|
let i = 0;
|
||||||
|
@ -2494,7 +2495,7 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should time out JS execution if a multiple cells take too long", async () => {
|
it("should time out JS execution if a multiple cells take too long", async () => {
|
||||||
await config.withEnv(
|
await withEnv(
|
||||||
{
|
{
|
||||||
JS_PER_INVOCATION_TIMEOUT_MS: 40,
|
JS_PER_INVOCATION_TIMEOUT_MS: 40,
|
||||||
JS_PER_REQUEST_TIMEOUT_MS: 80,
|
JS_PER_REQUEST_TIMEOUT_MS: 80,
|
||||||
|
|
|
@ -11,6 +11,8 @@ import {
|
||||||
MIN_VALID_DATE,
|
MIN_VALID_DATE,
|
||||||
SQLITE_DESIGN_DOC_ID,
|
SQLITE_DESIGN_DOC_ID,
|
||||||
utils,
|
utils,
|
||||||
|
withEnv as withCoreEnv,
|
||||||
|
setEnv as setCoreEnv,
|
||||||
} from "@budibase/backend-core"
|
} from "@budibase/backend-core"
|
||||||
|
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
|
@ -39,6 +41,7 @@ import { dataFilters } from "@budibase/shared-core"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
import { structures } from "@budibase/backend-core/tests"
|
import { structures } from "@budibase/backend-core/tests"
|
||||||
import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default"
|
import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default"
|
||||||
|
import { generateRowIdField } from "../../../integrations/utils"
|
||||||
|
|
||||||
describe.each([
|
describe.each([
|
||||||
["in-memory", undefined],
|
["in-memory", undefined],
|
||||||
|
@ -64,9 +67,9 @@ describe.each([
|
||||||
let rows: Row[]
|
let rows: Row[]
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, () => config.init())
|
await withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, () => config.init())
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
envCleanup = config.setCoreEnv({
|
envCleanup = setCoreEnv({
|
||||||
SQS_SEARCH_ENABLE: "true",
|
SQS_SEARCH_ENABLE: "true",
|
||||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
||||||
})
|
})
|
||||||
|
@ -2353,6 +2356,35 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("Invalid column definitions", () => {
|
||||||
|
beforeAll(async () => {
|
||||||
|
// need to create an invalid table - means ignoring typescript
|
||||||
|
table = await createTable({
|
||||||
|
// @ts-ignore
|
||||||
|
invalid: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
},
|
||||||
|
name: {
|
||||||
|
name: "name",
|
||||||
|
type: FieldType.STRING,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
await createRows([
|
||||||
|
{ name: "foo", invalid: "id1" },
|
||||||
|
{ name: "bar", invalid: "id2" },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can get rows with all table data", async () => {
|
||||||
|
await expectSearch({
|
||||||
|
query: {},
|
||||||
|
}).toContain([
|
||||||
|
{ name: "foo", invalid: "id1" },
|
||||||
|
{ name: "bar", invalid: "id2" },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe.each(["data_name_test", "name_data_test", "name_test_data_"])(
|
describe.each(["data_name_test", "name_data_test", "name_test_data_"])(
|
||||||
"special (%s) case",
|
"special (%s) case",
|
||||||
column => {
|
column => {
|
||||||
|
@ -2619,6 +2651,42 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
!isInternal &&
|
||||||
|
describe("search by composite key", () => {
|
||||||
|
beforeAll(async () => {
|
||||||
|
table = await config.api.table.save(
|
||||||
|
tableForDatasource(datasource, {
|
||||||
|
schema: {
|
||||||
|
idColumn1: {
|
||||||
|
name: "idColumn1",
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
},
|
||||||
|
idColumn2: {
|
||||||
|
name: "idColumn2",
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
primary: ["idColumn1", "idColumn2"],
|
||||||
|
})
|
||||||
|
)
|
||||||
|
await createRows([{ idColumn1: 1, idColumn2: 2 }])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can filter by the row ID with limit 1", async () => {
|
||||||
|
await expectSearch({
|
||||||
|
query: {
|
||||||
|
equal: { _id: generateRowIdField([1, 2]) },
|
||||||
|
},
|
||||||
|
limit: 1,
|
||||||
|
}).toContain([
|
||||||
|
{
|
||||||
|
idColumn1: 1,
|
||||||
|
idColumn2: 2,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
isSql &&
|
isSql &&
|
||||||
describe("pagination edge case with relationships", () => {
|
describe("pagination edge case with relationships", () => {
|
||||||
let mainRows: Row[] = []
|
let mainRows: Row[] = []
|
||||||
|
@ -2668,7 +2736,7 @@ describe.each([
|
||||||
})
|
})
|
||||||
|
|
||||||
it("can still page when the hard limit is hit", async () => {
|
it("can still page when the hard limit is hit", async () => {
|
||||||
await config.withCoreEnv(
|
await withCoreEnv(
|
||||||
{
|
{
|
||||||
SQL_MAX_ROWS: "6",
|
SQL_MAX_ROWS: "6",
|
||||||
},
|
},
|
||||||
|
|
|
@ -8,23 +8,24 @@ jest.mock("aws-sdk", () => ({
|
||||||
})),
|
})),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
const setup = require("./utilities")
|
import { Datasource, SourceName } from "@budibase/types"
|
||||||
const { constants } = require("@budibase/backend-core")
|
import { setEnv } from "../../../environment"
|
||||||
|
import { getRequest, getConfig, afterAll as _afterAll } from "./utilities"
|
||||||
|
import { constants } from "@budibase/backend-core"
|
||||||
|
|
||||||
describe("/static", () => {
|
describe("/static", () => {
|
||||||
let request = setup.getRequest()
|
let request = getRequest()
|
||||||
let config = setup.getConfig()
|
let config = getConfig()
|
||||||
let app
|
let cleanupEnv: () => void
|
||||||
let cleanupEnv
|
|
||||||
|
|
||||||
afterAll(() => {
|
afterAll(() => {
|
||||||
setup.afterAll()
|
_afterAll()
|
||||||
cleanupEnv()
|
cleanupEnv()
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
|
cleanupEnv = setEnv({ SELF_HOSTED: "true" })
|
||||||
app = await config.init()
|
await config.init()
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("/app", () => {
|
describe("/app", () => {
|
||||||
|
@ -49,7 +50,7 @@ describe("/static", () => {
|
||||||
delete headers[constants.Header.APP_ID]
|
delete headers[constants.Header.APP_ID]
|
||||||
|
|
||||||
const res = await request
|
const res = await request
|
||||||
.get(`/app${config.prodApp.url}`)
|
.get(`/app${config.getProdApp().url}`)
|
||||||
.set(headers)
|
.set(headers)
|
||||||
.expect(200)
|
.expect(200)
|
||||||
|
|
||||||
|
@ -68,14 +69,14 @@ describe("/static", () => {
|
||||||
|
|
||||||
describe("/attachments", () => {
|
describe("/attachments", () => {
|
||||||
describe("generateSignedUrls", () => {
|
describe("generateSignedUrls", () => {
|
||||||
let datasource
|
let datasource: Datasource
|
||||||
|
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
datasource = await config.createDatasource({
|
datasource = await config.createDatasource({
|
||||||
datasource: {
|
datasource: {
|
||||||
type: "datasource",
|
type: "datasource",
|
||||||
name: "Test",
|
name: "Test",
|
||||||
source: "S3",
|
source: SourceName.S3,
|
||||||
config: {},
|
config: {},
|
||||||
},
|
},
|
||||||
})
|
})
|
|
@ -2,6 +2,7 @@ import * as setup from "./utilities"
|
||||||
import path from "path"
|
import path from "path"
|
||||||
import nock from "nock"
|
import nock from "nock"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
import { withEnv as withCoreEnv } from "@budibase/backend-core"
|
||||||
|
|
||||||
interface App {
|
interface App {
|
||||||
background: string
|
background: string
|
||||||
|
@ -89,7 +90,7 @@ describe("/templates", () => {
|
||||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
||||||
}
|
}
|
||||||
|
|
||||||
await config.withCoreEnv(env, async () => {
|
await withCoreEnv(env, async () => {
|
||||||
const name = generator.guid().replaceAll("-", "")
|
const name = generator.guid().replaceAll("-", "")
|
||||||
const url = `/${name}`
|
const url = `/${name}`
|
||||||
|
|
||||||
|
|
|
@ -184,3 +184,7 @@ export const runInProd = async (func: any) => {
|
||||||
env._set("NODE_ENV", nodeEnv)
|
env._set("NODE_ENV", nodeEnv)
|
||||||
env._set("JEST_WORKER_ID", workerId)
|
env._set("JEST_WORKER_ID", workerId)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function allowUndefined(expectation: jest.Expect) {
|
||||||
|
return expect.toBeOneOf([expectation, undefined, null])
|
||||||
|
}
|
||||||
|
|
|
@ -24,7 +24,12 @@ import { generator, mocks } from "@budibase/backend-core/tests"
|
||||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||||
import merge from "lodash/merge"
|
import merge from "lodash/merge"
|
||||||
import { quotas } from "@budibase/pro"
|
import { quotas } from "@budibase/pro"
|
||||||
import { db, roles } from "@budibase/backend-core"
|
import {
|
||||||
|
db,
|
||||||
|
roles,
|
||||||
|
withEnv as withCoreEnv,
|
||||||
|
setEnv as setCoreEnv,
|
||||||
|
} from "@budibase/backend-core"
|
||||||
|
|
||||||
describe.each([
|
describe.each([
|
||||||
["lucene", undefined],
|
["lucene", undefined],
|
||||||
|
@ -89,12 +94,11 @@ describe.each([
|
||||||
}
|
}
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.withCoreEnv(
|
await withCoreEnv({ SQS_SEARCH_ENABLE: isSqs ? "true" : "false" }, () =>
|
||||||
{ SQS_SEARCH_ENABLE: isSqs ? "true" : "false" },
|
config.init()
|
||||||
() => config.init()
|
|
||||||
)
|
)
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
envCleanup = config.setCoreEnv({
|
envCleanup = setCoreEnv({
|
||||||
SQS_SEARCH_ENABLE: "true",
|
SQS_SEARCH_ENABLE: "true",
|
||||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
||||||
})
|
})
|
||||||
|
|
|
@ -2,6 +2,7 @@ import { Webhook } from "@budibase/types"
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
||||||
import { mocks } from "@budibase/backend-core/tests"
|
import { mocks } from "@budibase/backend-core/tests"
|
||||||
|
import { setEnv } from "../../../environment"
|
||||||
|
|
||||||
const { basicWebhook, basicAutomation, collectAutomation } = setup.structures
|
const { basicWebhook, basicAutomation, collectAutomation } = setup.structures
|
||||||
|
|
||||||
|
@ -17,7 +18,7 @@ describe("/webhooks", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
const setupTest = async () => {
|
const setupTest = async () => {
|
||||||
cleanupEnv = config.setEnv({ SELF_HOSTED: "true" })
|
cleanupEnv = setEnv({ SELF_HOSTED: "true" })
|
||||||
await config.init()
|
await config.init()
|
||||||
const autoConfig = basicAutomation()
|
const autoConfig = basicAutomation()
|
||||||
autoConfig.definition.trigger.schema = {
|
autoConfig.definition.trigger.schema = {
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
import * as setup from "../../../api/routes/tests/utilities"
|
import * as setup from "../../../api/routes/tests/utilities"
|
||||||
import { basicTable } from "../../../tests/utilities/structures"
|
import { basicTable } from "../../../tests/utilities/structures"
|
||||||
import { db as dbCore, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
|
import {
|
||||||
|
db as dbCore,
|
||||||
|
SQLITE_DESIGN_DOC_ID,
|
||||||
|
withEnv as withCoreEnv,
|
||||||
|
} from "@budibase/backend-core"
|
||||||
import {
|
import {
|
||||||
LinkDocument,
|
LinkDocument,
|
||||||
DocumentType,
|
DocumentType,
|
||||||
|
@ -69,11 +73,11 @@ function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
|
||||||
type SQSEnvVar = "SQS_MIGRATION_ENABLE" | "SQS_SEARCH_ENABLE"
|
type SQSEnvVar = "SQS_MIGRATION_ENABLE" | "SQS_SEARCH_ENABLE"
|
||||||
|
|
||||||
async function sqsDisabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
async function sqsDisabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
||||||
await config.withCoreEnv({ [envVar]: "", SQS_SEARCH_ENABLE_TENANTS: [] }, cb)
|
await withCoreEnv({ [envVar]: "", SQS_SEARCH_ENABLE_TENANTS: [] }, cb)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function sqsEnabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
async function sqsEnabled(envVar: SQSEnvVar, cb: () => Promise<void>) {
|
||||||
await config.withCoreEnv(
|
await withCoreEnv(
|
||||||
{ [envVar]: "1", SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()] },
|
{ [envVar]: "1", SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()] },
|
||||||
cb
|
cb
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
import { getConfig, runStep, afterAll as _afterAll } from "./utilities"
|
import { getConfig, runStep, afterAll as _afterAll } from "./utilities"
|
||||||
import { OpenAI } from "openai"
|
import { OpenAI } from "openai"
|
||||||
|
import {
|
||||||
|
withEnv as withCoreEnv,
|
||||||
|
setEnv as setCoreEnv,
|
||||||
|
} from "@budibase/backend-core"
|
||||||
|
|
||||||
jest.mock("openai", () => ({
|
jest.mock("openai", () => ({
|
||||||
OpenAI: jest.fn().mockImplementation(() => ({
|
OpenAI: jest.fn().mockImplementation(() => ({
|
||||||
|
@ -32,7 +36,7 @@ describe("test the openai action", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
resetEnv = config.setCoreEnv({ OPENAI_API_KEY: "abc123" })
|
resetEnv = setCoreEnv({ OPENAI_API_KEY: "abc123" })
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
|
@ -42,7 +46,7 @@ describe("test the openai action", () => {
|
||||||
afterAll(_afterAll)
|
afterAll(_afterAll)
|
||||||
|
|
||||||
it("should present the correct error message when the OPENAI_API_KEY variable isn't set", async () => {
|
it("should present the correct error message when the OPENAI_API_KEY variable isn't set", async () => {
|
||||||
await config.withCoreEnv({ OPENAI_API_KEY: "" }, async () => {
|
await withCoreEnv({ OPENAI_API_KEY: "" }, async () => {
|
||||||
let res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
|
let res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
|
||||||
expect(res.response).toEqual(
|
expect(res.response).toEqual(
|
||||||
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable."
|
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable."
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import { env as coreEnv } from "@budibase/backend-core"
|
import { env as coreEnv } from "@budibase/backend-core"
|
||||||
import { ServiceType } from "@budibase/types"
|
import { ServiceType } from "@budibase/types"
|
||||||
|
import cloneDeep from "lodash/cloneDeep"
|
||||||
|
|
||||||
coreEnv._set("SERVICE_TYPE", ServiceType.APPS)
|
coreEnv._set("SERVICE_TYPE", ServiceType.APPS)
|
||||||
import { join } from "path"
|
import { join } from "path"
|
||||||
|
@ -133,6 +134,32 @@ const environment = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function setEnv(newEnvVars: Partial<typeof environment>): () => void {
|
||||||
|
const oldEnv = cloneDeep(environment)
|
||||||
|
|
||||||
|
let key: keyof typeof newEnvVars
|
||||||
|
for (key in newEnvVars) {
|
||||||
|
environment._set(key, newEnvVars[key])
|
||||||
|
}
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
for (const [key, value] of Object.entries(oldEnv)) {
|
||||||
|
environment._set(key, value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function withEnv<T>(envVars: Partial<typeof environment>, f: () => T) {
|
||||||
|
const cleanup = setEnv(envVars)
|
||||||
|
const result = f()
|
||||||
|
if (result instanceof Promise) {
|
||||||
|
return result.finally(cleanup)
|
||||||
|
} else {
|
||||||
|
cleanup()
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function cleanVariables() {
|
function cleanVariables() {
|
||||||
// clean up any environment variable edge cases
|
// clean up any environment variable edge cases
|
||||||
for (let [key, value] of Object.entries(environment)) {
|
for (let [key, value] of Object.entries(environment)) {
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { setEnv as setCoreEnv } from "@budibase/backend-core"
|
||||||
import type { GoogleSpreadsheetWorksheet } from "google-spreadsheet"
|
import type { GoogleSpreadsheetWorksheet } from "google-spreadsheet"
|
||||||
import nock from "nock"
|
import nock from "nock"
|
||||||
|
|
||||||
|
@ -40,7 +41,7 @@ describe("Google Sheets Integration", () => {
|
||||||
let cleanupEnv: () => void
|
let cleanupEnv: () => void
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
cleanupEnv = config.setCoreEnv({
|
cleanupEnv = setCoreEnv({
|
||||||
GOOGLE_CLIENT_ID: "test",
|
GOOGLE_CLIENT_ID: "test",
|
||||||
GOOGLE_CLIENT_SECRET: "test",
|
GOOGLE_CLIENT_SECRET: "test",
|
||||||
})
|
})
|
||||||
|
|
|
@ -15,6 +15,7 @@ import { helpers, utils } from "@budibase/shared-core"
|
||||||
import { pipeline } from "stream/promises"
|
import { pipeline } from "stream/promises"
|
||||||
import tmp from "tmp"
|
import tmp from "tmp"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
|
import { merge, cloneDeep } from "lodash"
|
||||||
|
|
||||||
type PrimitiveTypes =
|
type PrimitiveTypes =
|
||||||
| FieldType.STRING
|
| FieldType.STRING
|
||||||
|
@ -291,10 +292,16 @@ function copyExistingPropsOver(
|
||||||
const fetchedColumnDefinition: FieldSchema | undefined =
|
const fetchedColumnDefinition: FieldSchema | undefined =
|
||||||
table.schema[key]
|
table.schema[key]
|
||||||
table.schema[key] = {
|
table.schema[key] = {
|
||||||
...existingTableSchema[key],
|
// merge the properties - anything missing will be filled in, old definition preferred
|
||||||
|
// have to clone due to the way merge works
|
||||||
|
...merge(
|
||||||
|
cloneDeep(fetchedColumnDefinition),
|
||||||
|
existingTableSchema[key]
|
||||||
|
),
|
||||||
|
// always take externalType and autocolumn from the fetched definition
|
||||||
externalType:
|
externalType:
|
||||||
existingTableSchema[key].externalType ||
|
existingTableSchema[key].externalType ||
|
||||||
table.schema[key]?.externalType,
|
fetchedColumnDefinition?.externalType,
|
||||||
autocolumn: fetchedColumnDefinition?.autocolumn,
|
autocolumn: fetchedColumnDefinition?.autocolumn,
|
||||||
} as FieldSchema
|
} as FieldSchema
|
||||||
// check constraints which can be fetched from the DB (they could be updated)
|
// check constraints which can be fetched from the DB (they could be updated)
|
||||||
|
|
|
@ -73,13 +73,14 @@ function buildInternalFieldList(
|
||||||
fieldList = fieldList.concat(
|
fieldList = fieldList.concat(
|
||||||
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`)
|
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`)
|
||||||
)
|
)
|
||||||
for (let col of Object.values(table.schema)) {
|
for (let key of Object.keys(table.schema)) {
|
||||||
|
const col = table.schema[key]
|
||||||
const isRelationship = col.type === FieldType.LINK
|
const isRelationship = col.type === FieldType.LINK
|
||||||
if (!opts?.relationships && isRelationship) {
|
if (!opts?.relationships && isRelationship) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if (!isRelationship) {
|
if (!isRelationship) {
|
||||||
fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`)
|
fieldList.push(`${table._id}.${mapToUserColumn(key)}`)
|
||||||
} else {
|
} else {
|
||||||
const linkCol = col as RelationshipFieldMetadata
|
const linkCol = col as RelationshipFieldMetadata
|
||||||
const relatedTable = tables.find(table => table._id === linkCol.tableId)
|
const relatedTable = tables.find(table => table._id === linkCol.tableId)
|
||||||
|
|
|
@ -3,6 +3,10 @@ import { Datasource, FieldType, Row, Table } from "@budibase/types"
|
||||||
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
import TestConfiguration from "../../../../../tests/utilities/TestConfiguration"
|
||||||
import { search } from "../../../../../sdk/app/rows/search"
|
import { search } from "../../../../../sdk/app/rows/search"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
import {
|
||||||
|
withEnv as withCoreEnv,
|
||||||
|
setEnv as setCoreEnv,
|
||||||
|
} from "@budibase/backend-core"
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
getDatasource,
|
||||||
|
@ -31,13 +35,12 @@ describe.each([
|
||||||
let rows: Row[]
|
let rows: Row[]
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.withCoreEnv(
|
await withCoreEnv({ SQS_SEARCH_ENABLE: isSqs ? "true" : "false" }, () =>
|
||||||
{ SQS_SEARCH_ENABLE: isSqs ? "true" : "false" },
|
config.init()
|
||||||
() => config.init()
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if (isSqs) {
|
if (isSqs) {
|
||||||
envCleanup = config.setCoreEnv({
|
envCleanup = setCoreEnv({
|
||||||
SQS_SEARCH_ENABLE: "true",
|
SQS_SEARCH_ENABLE: "true",
|
||||||
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
SQS_SEARCH_ENABLE_TENANTS: [config.getTenantId()],
|
||||||
})
|
})
|
||||||
|
|
|
@ -12,6 +12,7 @@ import {
|
||||||
Table,
|
Table,
|
||||||
TableSchema,
|
TableSchema,
|
||||||
SqlClient,
|
SqlClient,
|
||||||
|
ArrayOperator,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||||
import { Format } from "../../../api/controllers/view/exporters"
|
import { Format } from "../../../api/controllers/view/exporters"
|
||||||
|
@ -311,3 +312,8 @@ function validateTimeOnlyField(
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// type-guard check
|
||||||
|
export function isArrayFilter(operator: any): operator is ArrayOperator {
|
||||||
|
return Object.values(ArrayOperator).includes(operator)
|
||||||
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@ import {
|
||||||
users,
|
users,
|
||||||
cache,
|
cache,
|
||||||
env as coreEnv,
|
env as coreEnv,
|
||||||
|
features,
|
||||||
} from "@budibase/backend-core"
|
} from "@budibase/backend-core"
|
||||||
import { watch } from "../watch"
|
import { watch } from "../watch"
|
||||||
import * as automations from "../automations"
|
import * as automations from "../automations"
|
||||||
|
@ -96,6 +97,9 @@ export async function startup(
|
||||||
console.log("Initialising events")
|
console.log("Initialising events")
|
||||||
eventInit()
|
eventInit()
|
||||||
|
|
||||||
|
console.log("Initialising feature flags")
|
||||||
|
features.init()
|
||||||
|
|
||||||
if (app && server) {
|
if (app && server) {
|
||||||
console.log("Initialising websockets")
|
console.log("Initialising websockets")
|
||||||
initialiseWebsockets(app, server)
|
initialiseWebsockets(app, server)
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
|
import { withEnv } from "../../environment"
|
||||||
import TestConfiguration from "../../tests/utilities/TestConfiguration"
|
import TestConfiguration from "../../tests/utilities/TestConfiguration"
|
||||||
import { startup } from "../index"
|
import { startup } from "../index"
|
||||||
import { users, utils, tenancy } from "@budibase/backend-core"
|
import {
|
||||||
|
users,
|
||||||
|
utils,
|
||||||
|
tenancy,
|
||||||
|
withEnv as withCoreEnv,
|
||||||
|
} from "@budibase/backend-core"
|
||||||
import nock from "nock"
|
import nock from "nock"
|
||||||
|
|
||||||
describe("check BB_ADMIN environment variables", () => {
|
describe("check BB_ADMIN environment variables", () => {
|
||||||
|
@ -23,13 +29,13 @@ describe("check BB_ADMIN environment variables", () => {
|
||||||
const EMAIL = "budibase@budibase.com",
|
const EMAIL = "budibase@budibase.com",
|
||||||
PASSWORD = "budibase"
|
PASSWORD = "budibase"
|
||||||
await tenancy.doInTenant(tenancy.DEFAULT_TENANT_ID, async () => {
|
await tenancy.doInTenant(tenancy.DEFAULT_TENANT_ID, async () => {
|
||||||
await config.withEnv(
|
await withEnv(
|
||||||
{
|
{
|
||||||
MULTI_TENANCY: "0",
|
MULTI_TENANCY: "0",
|
||||||
SELF_HOSTED: "1",
|
SELF_HOSTED: "1",
|
||||||
},
|
},
|
||||||
() =>
|
() =>
|
||||||
config.withCoreEnv(
|
withCoreEnv(
|
||||||
{
|
{
|
||||||
BB_ADMIN_USER_EMAIL: EMAIL,
|
BB_ADMIN_USER_EMAIL: EMAIL,
|
||||||
BB_ADMIN_USER_PASSWORD: PASSWORD,
|
BB_ADMIN_USER_PASSWORD: PASSWORD,
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
|
import * as matchers from "jest-extended"
|
||||||
import { env as coreEnv, timers } from "@budibase/backend-core"
|
import { env as coreEnv, timers } from "@budibase/backend-core"
|
||||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
import nock from "nock"
|
import nock from "nock"
|
||||||
|
|
||||||
|
expect.extend(matchers)
|
||||||
if (!process.env.CI) {
|
if (!process.env.CI) {
|
||||||
// set a longer timeout in dev for debugging 100 seconds
|
// set a longer timeout in dev for debugging 100 seconds
|
||||||
jest.setTimeout(100 * 1000)
|
jest.setTimeout(100 * 1000)
|
||||||
|
|
|
@ -70,7 +70,6 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
import API from "./api"
|
import API from "./api"
|
||||||
import { cloneDeep } from "lodash"
|
|
||||||
import jwt, { Secret } from "jsonwebtoken"
|
import jwt, { Secret } from "jsonwebtoken"
|
||||||
import { Server } from "http"
|
import { Server } from "http"
|
||||||
|
|
||||||
|
@ -247,65 +246,6 @@ export default class TestConfiguration {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async withEnv<T>(newEnvVars: Partial<typeof env>, f: () => Promise<T>) {
|
|
||||||
let cleanup = this.setEnv(newEnvVars)
|
|
||||||
try {
|
|
||||||
return await f()
|
|
||||||
} finally {
|
|
||||||
cleanup()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Sets the environment variables to the given values and returns a function
|
|
||||||
* that can be called to reset the environment variables to their original values.
|
|
||||||
*/
|
|
||||||
setEnv(newEnvVars: Partial<typeof env>): () => void {
|
|
||||||
const oldEnv = cloneDeep(env)
|
|
||||||
|
|
||||||
let key: keyof typeof newEnvVars
|
|
||||||
for (key in newEnvVars) {
|
|
||||||
env._set(key, newEnvVars[key])
|
|
||||||
}
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
for (const [key, value] of Object.entries(oldEnv)) {
|
|
||||||
env._set(key, value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async withCoreEnv<T>(
|
|
||||||
newEnvVars: Partial<typeof coreEnv>,
|
|
||||||
f: () => Promise<T>
|
|
||||||
) {
|
|
||||||
let cleanup = this.setCoreEnv(newEnvVars)
|
|
||||||
try {
|
|
||||||
return await f()
|
|
||||||
} finally {
|
|
||||||
cleanup()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Sets the environment variables to the given values and returns a function
|
|
||||||
* that can be called to reset the environment variables to their original values.
|
|
||||||
*/
|
|
||||||
setCoreEnv(newEnvVars: Partial<typeof coreEnv>): () => void {
|
|
||||||
const oldEnv = cloneDeep(coreEnv)
|
|
||||||
|
|
||||||
let key: keyof typeof newEnvVars
|
|
||||||
for (key in newEnvVars) {
|
|
||||||
coreEnv._set(key, newEnvVars[key])
|
|
||||||
}
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
for (const [key, value] of Object.entries(oldEnv)) {
|
|
||||||
coreEnv._set(key, value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async withUser(user: User, f: () => Promise<void>) {
|
async withUser(user: User, f: () => Promise<void>) {
|
||||||
const oldUser = this.user
|
const oldUser = this.user
|
||||||
this.user = user
|
this.user = user
|
||||||
|
|
|
@ -146,7 +146,8 @@ export function parse(rows: Rows, table: Table): Rows {
|
||||||
return rows.map(row => {
|
return rows.map(row => {
|
||||||
const parsedRow: Row = {}
|
const parsedRow: Row = {}
|
||||||
|
|
||||||
Object.entries(row).forEach(([columnName, columnData]) => {
|
Object.keys(row).forEach(columnName => {
|
||||||
|
const columnData = row[columnName]
|
||||||
const schema = table.schema
|
const schema = table.schema
|
||||||
if (!(columnName in schema)) {
|
if (!(columnName in schema)) {
|
||||||
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
|
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
|
||||||
|
|
|
@ -11,6 +11,7 @@ const allowDisplayColumnByType: Record<FieldType, boolean> = {
|
||||||
[FieldType.AUTO]: true,
|
[FieldType.AUTO]: true,
|
||||||
[FieldType.INTERNAL]: true,
|
[FieldType.INTERNAL]: true,
|
||||||
[FieldType.BARCODEQR]: true,
|
[FieldType.BARCODEQR]: true,
|
||||||
|
|
||||||
[FieldType.BIGINT]: true,
|
[FieldType.BIGINT]: true,
|
||||||
[FieldType.BOOLEAN]: false,
|
[FieldType.BOOLEAN]: false,
|
||||||
[FieldType.ARRAY]: false,
|
[FieldType.ARRAY]: false,
|
||||||
|
@ -35,6 +36,30 @@ const allowSortColumnByType: Record<FieldType, boolean> = {
|
||||||
[FieldType.BIGINT]: true,
|
[FieldType.BIGINT]: true,
|
||||||
[FieldType.BOOLEAN]: true,
|
[FieldType.BOOLEAN]: true,
|
||||||
[FieldType.JSON]: true,
|
[FieldType.JSON]: true,
|
||||||
|
|
||||||
|
[FieldType.FORMULA]: false,
|
||||||
|
[FieldType.ATTACHMENTS]: false,
|
||||||
|
[FieldType.ATTACHMENT_SINGLE]: false,
|
||||||
|
[FieldType.SIGNATURE_SINGLE]: false,
|
||||||
|
[FieldType.ARRAY]: false,
|
||||||
|
[FieldType.LINK]: false,
|
||||||
|
[FieldType.BB_REFERENCE]: false,
|
||||||
|
[FieldType.BB_REFERENCE_SINGLE]: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
const allowDefaultColumnByType: Record<FieldType, boolean> = {
|
||||||
|
[FieldType.NUMBER]: true,
|
||||||
|
[FieldType.JSON]: true,
|
||||||
|
[FieldType.DATETIME]: true,
|
||||||
|
[FieldType.LONGFORM]: true,
|
||||||
|
[FieldType.STRING]: true,
|
||||||
|
|
||||||
|
[FieldType.OPTIONS]: false,
|
||||||
|
[FieldType.AUTO]: false,
|
||||||
|
[FieldType.INTERNAL]: false,
|
||||||
|
[FieldType.BARCODEQR]: false,
|
||||||
|
[FieldType.BIGINT]: false,
|
||||||
|
[FieldType.BOOLEAN]: false,
|
||||||
[FieldType.FORMULA]: false,
|
[FieldType.FORMULA]: false,
|
||||||
[FieldType.ATTACHMENTS]: false,
|
[FieldType.ATTACHMENTS]: false,
|
||||||
[FieldType.ATTACHMENT_SINGLE]: false,
|
[FieldType.ATTACHMENT_SINGLE]: false,
|
||||||
|
@ -53,6 +78,10 @@ export function canBeSortColumn(type: FieldType): boolean {
|
||||||
return !!allowSortColumnByType[type]
|
return !!allowSortColumnByType[type]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function canHaveDefaultColumn(type: FieldType): boolean {
|
||||||
|
return !!allowDefaultColumnByType[type]
|
||||||
|
}
|
||||||
|
|
||||||
export function findDuplicateInternalColumns(table: Table): string[] {
|
export function findDuplicateInternalColumns(table: Table): string[] {
|
||||||
// maintains the case of keys
|
// maintains the case of keys
|
||||||
const casedKeys = Object.keys(table.schema)
|
const casedKeys = Object.keys(table.schema)
|
||||||
|
|
|
@ -114,7 +114,6 @@ export interface FormulaFieldMetadata extends BaseFieldSchema {
|
||||||
type: FieldType.FORMULA
|
type: FieldType.FORMULA
|
||||||
formula: string
|
formula: string
|
||||||
formulaType?: FormulaType
|
formulaType?: FormulaType
|
||||||
default?: string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface BBReferenceFieldMetadata
|
export interface BBReferenceFieldMetadata
|
||||||
|
|
|
@ -104,7 +104,7 @@ export async function getSelf(ctx: any) {
|
||||||
ctx.body = await groups.enrichUserRolesFromGroups(user)
|
ctx.body = await groups.enrichUserRolesFromGroups(user)
|
||||||
|
|
||||||
// add the feature flags for this tenant
|
// add the feature flags for this tenant
|
||||||
const flags = await features.fetch()
|
const flags = await features.flags.fetch()
|
||||||
ctx.body.flags = flags
|
ctx.body.flags = flags
|
||||||
|
|
||||||
addSessionAttributesToUser(ctx)
|
addSessionAttributesToUser(ctx)
|
||||||
|
|
|
@ -41,6 +41,14 @@ import { BpmStatusKey, BpmStatusValue } from "@budibase/shared-core"
|
||||||
|
|
||||||
const MAX_USERS_UPLOAD_LIMIT = 1000
|
const MAX_USERS_UPLOAD_LIMIT = 1000
|
||||||
|
|
||||||
|
const generatePassword = (length: number) => {
|
||||||
|
const array = new Uint8Array(length)
|
||||||
|
crypto.getRandomValues(array)
|
||||||
|
return Array.from(array, byte => byte.toString(36).padStart(2, "0"))
|
||||||
|
.join("")
|
||||||
|
.slice(0, length)
|
||||||
|
}
|
||||||
|
|
||||||
export const save = async (ctx: UserCtx<User, SaveUserResponse>) => {
|
export const save = async (ctx: UserCtx<User, SaveUserResponse>) => {
|
||||||
try {
|
try {
|
||||||
const currentUserId = ctx.user?._id
|
const currentUserId = ctx.user?._id
|
||||||
|
@ -296,7 +304,7 @@ export const onboardUsers = async (
|
||||||
|
|
||||||
let createdPasswords: Record<string, string> = {}
|
let createdPasswords: Record<string, string> = {}
|
||||||
const users: User[] = ctx.request.body.map(invite => {
|
const users: User[] = ctx.request.body.map(invite => {
|
||||||
let password = Math.random().toString(36).substring(2, 22)
|
const password = generatePassword(12)
|
||||||
createdPasswords[invite.email] = password
|
createdPasswords[invite.email] = password
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -292,9 +292,9 @@ describe("/api/global/auth", () => {
|
||||||
it("redirects to auth provider", async () => {
|
it("redirects to auth provider", async () => {
|
||||||
nock("http://someconfigurl").get("/").times(1).reply(200, {
|
nock("http://someconfigurl").get("/").times(1).reply(200, {
|
||||||
issuer: "test",
|
issuer: "test",
|
||||||
authorization_endpoint: "http://localhost/auth",
|
authorization_endpoint: "http://example.com/auth",
|
||||||
token_endpoint: "http://localhost/token",
|
token_endpoint: "http://example.com/token",
|
||||||
userinfo_endpoint: "http://localhost/userinfo",
|
userinfo_endpoint: "http://example.com/userinfo",
|
||||||
})
|
})
|
||||||
|
|
||||||
const configId = await generateOidcConfig()
|
const configId = await generateOidcConfig()
|
||||||
|
@ -305,7 +305,7 @@ describe("/api/global/auth", () => {
|
||||||
const location: string = res.get("location")
|
const location: string = res.get("location")
|
||||||
expect(
|
expect(
|
||||||
location.startsWith(
|
location.startsWith(
|
||||||
`http://localhost/auth?response_type=code&client_id=clientId&redirect_uri=http%3A%2F%2Flocalhost%3A10000%2Fapi%2Fglobal%2Fauth%2F${config.tenantId}%2Foidc%2Fcallback&scope=openid%20profile%20email%20offline_access`
|
`http://example.com/auth?response_type=code&client_id=clientId&redirect_uri=http%3A%2F%2Flocalhost%3A10000%2Fapi%2Fglobal%2Fauth%2F${config.tenantId}%2Foidc%2Fcallback&scope=openid%20profile%20email%20offline_access`
|
||||||
)
|
)
|
||||||
).toBe(true)
|
).toBe(true)
|
||||||
})
|
})
|
||||||
|
@ -313,11 +313,13 @@ describe("/api/global/auth", () => {
|
||||||
|
|
||||||
describe("GET /api/global/auth/:tenantId/oidc/callback", () => {
|
describe("GET /api/global/auth/:tenantId/oidc/callback", () => {
|
||||||
it("logs in", async () => {
|
it("logs in", async () => {
|
||||||
|
const email = `${generator.guid()}@example.com`
|
||||||
|
|
||||||
nock("http://someconfigurl").get("/").times(2).reply(200, {
|
nock("http://someconfigurl").get("/").times(2).reply(200, {
|
||||||
issuer: "test",
|
issuer: "test",
|
||||||
authorization_endpoint: "http://localhost/auth",
|
authorization_endpoint: "http://example.com/auth",
|
||||||
token_endpoint: "http://localhost/token",
|
token_endpoint: "http://example.com/token",
|
||||||
userinfo_endpoint: "http://localhost/userinfo",
|
userinfo_endpoint: "http://example.com/userinfo",
|
||||||
})
|
})
|
||||||
|
|
||||||
const token = jwt.sign(
|
const token = jwt.sign(
|
||||||
|
@ -326,20 +328,20 @@ describe("/api/global/auth", () => {
|
||||||
sub: "sub",
|
sub: "sub",
|
||||||
aud: "clientId",
|
aud: "clientId",
|
||||||
exp: Math.floor(Date.now() / 1000) + 60 * 60,
|
exp: Math.floor(Date.now() / 1000) + 60 * 60,
|
||||||
email: "oauth@example.com",
|
email,
|
||||||
},
|
},
|
||||||
"secret"
|
"secret"
|
||||||
)
|
)
|
||||||
|
|
||||||
nock("http://localhost").post("/token").reply(200, {
|
nock("http://example.com").post("/token").reply(200, {
|
||||||
access_token: "access",
|
access_token: "access",
|
||||||
refresh_token: "refresh",
|
refresh_token: "refresh",
|
||||||
id_token: token,
|
id_token: token,
|
||||||
})
|
})
|
||||||
|
|
||||||
nock("http://localhost").get("/userinfo?schema=openid").reply(200, {
|
nock("http://example.com").get("/userinfo?schema=openid").reply(200, {
|
||||||
sub: "sub",
|
sub: "sub",
|
||||||
email: "oauth@example.com",
|
email,
|
||||||
})
|
})
|
||||||
|
|
||||||
const configId = await generateOidcConfig()
|
const configId = await generateOidcConfig()
|
||||||
|
@ -351,10 +353,7 @@ describe("/api/global/auth", () => {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
expect(events.auth.login).toHaveBeenCalledWith(
|
expect(events.auth.login).toHaveBeenCalledWith("oidc", email)
|
||||||
"oidc",
|
|
||||||
"oauth@example.com"
|
|
||||||
)
|
|
||||||
expect(events.auth.login).toHaveBeenCalledTimes(1)
|
expect(events.auth.login).toHaveBeenCalledTimes(1)
|
||||||
expect(res.status).toBe(302)
|
expect(res.status).toBe(302)
|
||||||
const location: string = res.get("location")
|
const location: string = res.get("location")
|
||||||
|
|
|
@ -12,6 +12,33 @@ const nodemailer = require("nodemailer")
|
||||||
// for the real email tests give them a long time to try complete/fail
|
// for the real email tests give them a long time to try complete/fail
|
||||||
jest.setTimeout(30000)
|
jest.setTimeout(30000)
|
||||||
|
|
||||||
|
function cancelableTimeout(timeout: number): [Promise<unknown>, () => void] {
|
||||||
|
let timeoutId: NodeJS.Timeout
|
||||||
|
return [
|
||||||
|
new Promise((resolve, reject) => {
|
||||||
|
timeoutId = setTimeout(() => {
|
||||||
|
reject({
|
||||||
|
status: 301,
|
||||||
|
errno: "ETIME",
|
||||||
|
})
|
||||||
|
}, timeout)
|
||||||
|
}),
|
||||||
|
() => {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
async function withTimeout<T>(
|
||||||
|
timeout: number,
|
||||||
|
promise: Promise<T>
|
||||||
|
): Promise<T> {
|
||||||
|
const [timeoutPromise, cancel] = cancelableTimeout(timeout)
|
||||||
|
const result = (await Promise.race([promise, timeoutPromise])) as T
|
||||||
|
cancel()
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
describe("/api/global/email", () => {
|
describe("/api/global/email", () => {
|
||||||
const config = new TestConfiguration()
|
const config = new TestConfiguration()
|
||||||
|
|
||||||
|
@ -30,19 +57,8 @@ describe("/api/global/email", () => {
|
||||||
) {
|
) {
|
||||||
let response, text
|
let response, text
|
||||||
try {
|
try {
|
||||||
const timeout = () =>
|
await withTimeout(20000, config.saveEtherealSmtpConfig())
|
||||||
new Promise((resolve, reject) =>
|
await withTimeout(20000, config.saveSettingsConfig())
|
||||||
setTimeout(
|
|
||||||
() =>
|
|
||||||
reject({
|
|
||||||
status: 301,
|
|
||||||
errno: "ETIME",
|
|
||||||
}),
|
|
||||||
20000
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await Promise.race([config.saveEtherealSmtpConfig(), timeout()])
|
|
||||||
await Promise.race([config.saveSettingsConfig(), timeout()])
|
|
||||||
let res
|
let res
|
||||||
if (attachments) {
|
if (attachments) {
|
||||||
res = await config.api.emails
|
res = await config.api.emails
|
||||||
|
|
|
@ -18,6 +18,7 @@ import {
|
||||||
timers,
|
timers,
|
||||||
redis,
|
redis,
|
||||||
cache,
|
cache,
|
||||||
|
features,
|
||||||
} from "@budibase/backend-core"
|
} from "@budibase/backend-core"
|
||||||
|
|
||||||
db.init()
|
db.init()
|
||||||
|
@ -95,6 +96,7 @@ export default server.listen(parseInt(env.PORT || "4002"), async () => {
|
||||||
console.log(startupLog)
|
console.log(startupLog)
|
||||||
await initPro()
|
await initPro()
|
||||||
await redis.clients.init()
|
await redis.clients.init()
|
||||||
|
features.init()
|
||||||
cache.docWritethrough.init()
|
cache.docWritethrough.init()
|
||||||
// configure events to use the pro audit log write
|
// configure events to use the pro audit log write
|
||||||
// can't integrate directly into backend-core due to cyclic issues
|
// can't integrate directly into backend-core due to cyclic issues
|
||||||
|
|
|
@ -40,7 +40,7 @@ export class ConfigAPI extends TestAPI {
|
||||||
const sessionContent = JSON.parse(
|
const sessionContent = JSON.parse(
|
||||||
Buffer.from(koaSession, "base64").toString("utf-8")
|
Buffer.from(koaSession, "base64").toString("utf-8")
|
||||||
)
|
)
|
||||||
const handle = sessionContent["openidconnect:localhost"].state.handle
|
const handle = sessionContent["openidconnect:example.com"].state.handle
|
||||||
return this.request
|
return this.request
|
||||||
.get(`/api/global/auth/${this.config.getTenantId()}/oidc/callback`)
|
.get(`/api/global/auth/${this.config.getTenantId()}/oidc/callback`)
|
||||||
.query({ code: "test", state: handle })
|
.query({ code: "test", state: handle })
|
||||||
|
|
|
@ -1,13 +1,21 @@
|
||||||
import { mocks, testContainerUtils } from "@budibase/backend-core/tests"
|
import { mocks, testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import { env as coreEnv, timers } from "@budibase/backend-core"
|
import { env as coreEnv, timers } from "@budibase/backend-core"
|
||||||
|
import nock from "nock"
|
||||||
// must explicitly enable fetch mock
|
|
||||||
mocks.fetch.enable()
|
|
||||||
|
|
||||||
// mock all dates to 2020-01-01T00:00:00.000Z
|
// mock all dates to 2020-01-01T00:00:00.000Z
|
||||||
// use tk.reset() to use real dates in individual tests
|
// use tk.reset() to use real dates in individual tests
|
||||||
const tk = require("timekeeper")
|
import tk from "timekeeper"
|
||||||
|
|
||||||
|
nock.disableNetConnect()
|
||||||
|
nock.enableNetConnect(host => {
|
||||||
|
return (
|
||||||
|
host.includes("localhost") ||
|
||||||
|
host.includes("127.0.0.1") ||
|
||||||
|
host.includes("::1") ||
|
||||||
|
host.includes("ethereal.email") // used in realEmail.spec.ts
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
tk.freeze(mocks.date.MOCK_DATE)
|
tk.freeze(mocks.date.MOCK_DATE)
|
||||||
|
|
||||||
|
|
29
yarn.lock
29
yarn.lock
|
@ -2073,7 +2073,7 @@
|
||||||
passport-oauth2-refresh "^2.1.0"
|
passport-oauth2-refresh "^2.1.0"
|
||||||
pino "8.11.0"
|
pino "8.11.0"
|
||||||
pino-http "8.3.3"
|
pino-http "8.3.3"
|
||||||
posthog-node "1.3.0"
|
posthog-node "4.0.1"
|
||||||
pouchdb "7.3.0"
|
pouchdb "7.3.0"
|
||||||
pouchdb-find "7.2.2"
|
pouchdb-find "7.2.2"
|
||||||
redlock "4.2.0"
|
redlock "4.2.0"
|
||||||
|
@ -7343,7 +7343,7 @@ axios-retry@^3.1.9:
|
||||||
"@babel/runtime" "^7.15.4"
|
"@babel/runtime" "^7.15.4"
|
||||||
is-retry-allowed "^2.2.0"
|
is-retry-allowed "^2.2.0"
|
||||||
|
|
||||||
axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0:
|
axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0, axios@^1.6.2:
|
||||||
version "1.6.3"
|
version "1.6.3"
|
||||||
resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.3.tgz#7f50f23b3aa246eff43c54834272346c396613f4"
|
resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.3.tgz#7f50f23b3aa246eff43c54834272346c396613f4"
|
||||||
integrity sha512-fWyNdeawGam70jXSVlKl+SUNVcL6j6W79CuSIPfi6HnDUmSCH6gyUys/HrqHeA/wU0Az41rRgean494d0Jb+ww==
|
integrity sha512-fWyNdeawGam70jXSVlKl+SUNVcL6j6W79CuSIPfi6HnDUmSCH6gyUys/HrqHeA/wU0Az41rRgean494d0Jb+ww==
|
||||||
|
@ -13614,7 +13614,7 @@ jest-config@^29.7.0:
|
||||||
slash "^3.0.0"
|
slash "^3.0.0"
|
||||||
strip-json-comments "^3.1.1"
|
strip-json-comments "^3.1.1"
|
||||||
|
|
||||||
"jest-diff@>=29.4.3 < 30", jest-diff@^29.4.1, jest-diff@^29.7.0:
|
"jest-diff@>=29.4.3 < 30", jest-diff@^29.0.0, jest-diff@^29.4.1, jest-diff@^29.7.0:
|
||||||
version "29.7.0"
|
version "29.7.0"
|
||||||
resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.7.0.tgz#017934a66ebb7ecf6f205e84699be10afd70458a"
|
resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.7.0.tgz#017934a66ebb7ecf6f205e84699be10afd70458a"
|
||||||
integrity sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==
|
integrity sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==
|
||||||
|
@ -13673,12 +13673,20 @@ jest-environment-node@^29.7.0:
|
||||||
jest-mock "^29.7.0"
|
jest-mock "^29.7.0"
|
||||||
jest-util "^29.7.0"
|
jest-util "^29.7.0"
|
||||||
|
|
||||||
|
jest-extended@^4.0.2:
|
||||||
|
version "4.0.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/jest-extended/-/jest-extended-4.0.2.tgz#d23b52e687cedf66694e6b2d77f65e211e99e021"
|
||||||
|
integrity sha512-FH7aaPgtGYHc9mRjriS0ZEHYM5/W69tLrFTIdzm+yJgeoCmmrSB/luSfMSqWP9O29QWHPEmJ4qmU6EwsZideog==
|
||||||
|
dependencies:
|
||||||
|
jest-diff "^29.0.0"
|
||||||
|
jest-get-type "^29.0.0"
|
||||||
|
|
||||||
jest-get-type@^26.3.0:
|
jest-get-type@^26.3.0:
|
||||||
version "26.3.0"
|
version "26.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-26.3.0.tgz#e97dc3c3f53c2b406ca7afaed4493b1d099199e0"
|
resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-26.3.0.tgz#e97dc3c3f53c2b406ca7afaed4493b1d099199e0"
|
||||||
integrity sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig==
|
integrity sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig==
|
||||||
|
|
||||||
jest-get-type@^29.6.3:
|
jest-get-type@^29.0.0, jest-get-type@^29.6.3:
|
||||||
version "29.6.3"
|
version "29.6.3"
|
||||||
resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.6.3.tgz#36f499fdcea197c1045a127319c0481723908fd1"
|
resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.6.3.tgz#36f499fdcea197c1045a127319c0481723908fd1"
|
||||||
integrity sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==
|
integrity sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==
|
||||||
|
@ -18110,6 +18118,14 @@ posthog-node@1.3.0:
|
||||||
remove-trailing-slash "^0.1.1"
|
remove-trailing-slash "^0.1.1"
|
||||||
uuid "^8.3.2"
|
uuid "^8.3.2"
|
||||||
|
|
||||||
|
posthog-node@4.0.1:
|
||||||
|
version "4.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/posthog-node/-/posthog-node-4.0.1.tgz#eb8b6cdf68c3fdd0dc2b75e8aab2e0ec3727fb2a"
|
||||||
|
integrity sha512-rtqm2h22QxLGBrW2bLYzbRhliIrqgZ0k+gF0LkQ1SNdeD06YE5eilV0MxZppFSxC8TfH0+B0cWCuebEnreIDgQ==
|
||||||
|
dependencies:
|
||||||
|
axios "^1.6.2"
|
||||||
|
rusha "^0.8.14"
|
||||||
|
|
||||||
pouch-stream@^0.4.0:
|
pouch-stream@^0.4.0:
|
||||||
version "0.4.1"
|
version "0.4.1"
|
||||||
resolved "https://registry.yarnpkg.com/pouch-stream/-/pouch-stream-0.4.1.tgz#0c6d8475c9307677627991a2f079b301c3b89bdd"
|
resolved "https://registry.yarnpkg.com/pouch-stream/-/pouch-stream-0.4.1.tgz#0c6d8475c9307677627991a2f079b301c3b89bdd"
|
||||||
|
@ -19574,6 +19590,11 @@ run-parallel@^1.1.9:
|
||||||
dependencies:
|
dependencies:
|
||||||
queue-microtask "^1.2.2"
|
queue-microtask "^1.2.2"
|
||||||
|
|
||||||
|
rusha@^0.8.14:
|
||||||
|
version "0.8.14"
|
||||||
|
resolved "https://registry.yarnpkg.com/rusha/-/rusha-0.8.14.tgz#a977d0de9428406138b7bb90d3de5dcd024e2f68"
|
||||||
|
integrity sha512-cLgakCUf6PedEu15t8kbsjnwIFFR2D4RfL+W3iWFJ4iac7z4B0ZI8fxy4R3J956kAI68HclCFGL8MPoUVC3qVA==
|
||||||
|
|
||||||
rxjs@^6.6.6:
|
rxjs@^6.6.6:
|
||||||
version "6.6.7"
|
version "6.6.7"
|
||||||
resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9"
|
resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9"
|
||||||
|
|
Loading…
Reference in New Issue