Merge branch 'master' of github.com:budibase/budibase into budi-8433-default-value-column-ui
This commit is contained in:
commit
63789208b7
|
@ -1,12 +1,12 @@
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
import * as context from "../context"
|
import * as context from "../context"
|
||||||
import { PostHog, PostHogOptions } from "posthog-node"
|
import { PostHog, PostHogOptions } from "posthog-node"
|
||||||
import { IdentityType } from "@budibase/types"
|
import { IdentityType, UserCtx } from "@budibase/types"
|
||||||
import tracer from "dd-trace"
|
import tracer from "dd-trace"
|
||||||
|
|
||||||
let posthog: PostHog | undefined
|
let posthog: PostHog | undefined
|
||||||
export function init(opts?: PostHogOptions) {
|
export function init(opts?: PostHogOptions) {
|
||||||
if (env.POSTHOG_TOKEN) {
|
if (env.POSTHOG_TOKEN && env.POSTHOG_API_HOST) {
|
||||||
posthog = new PostHog(env.POSTHOG_TOKEN, {
|
posthog = new PostHog(env.POSTHOG_TOKEN, {
|
||||||
host: env.POSTHOG_API_HOST,
|
host: env.POSTHOG_API_HOST,
|
||||||
...opts,
|
...opts,
|
||||||
|
@ -83,40 +83,40 @@ class NumberFlag extends Flag<number> {
|
||||||
}
|
}
|
||||||
|
|
||||||
export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
||||||
private readonly flags: T
|
constructor(private readonly flagSchema: T) {}
|
||||||
|
|
||||||
constructor(flags: T) {
|
|
||||||
this.flags = flags
|
|
||||||
}
|
|
||||||
|
|
||||||
defaults(): FlagValues<T> {
|
defaults(): FlagValues<T> {
|
||||||
return Object.keys(this.flags).reduce((acc, key) => {
|
return Object.keys(this.flagSchema).reduce((acc, key) => {
|
||||||
const typedKey = key as keyof T
|
const typedKey = key as keyof T
|
||||||
acc[typedKey] = this.flags[key].defaultValue
|
acc[typedKey] = this.flagSchema[key].defaultValue
|
||||||
return acc
|
return acc
|
||||||
}, {} as FlagValues<T>)
|
}, {} as FlagValues<T>)
|
||||||
}
|
}
|
||||||
|
|
||||||
isFlagName(name: string | number | symbol): name is keyof T {
|
isFlagName(name: string | number | symbol): name is keyof T {
|
||||||
return this.flags[name as keyof T] !== undefined
|
return this.flagSchema[name as keyof T] !== undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
async get<K extends keyof T>(key: K): Promise<FlagValues<T>[K]> {
|
async get<K extends keyof T>(
|
||||||
const flags = await this.fetch()
|
key: K,
|
||||||
|
ctx?: UserCtx
|
||||||
|
): Promise<FlagValues<T>[K]> {
|
||||||
|
const flags = await this.fetch(ctx)
|
||||||
return flags[key]
|
return flags[key]
|
||||||
}
|
}
|
||||||
|
|
||||||
async isEnabled<K extends KeysOfType<T, boolean>>(key: K): Promise<boolean> {
|
async isEnabled<K extends KeysOfType<T, boolean>>(
|
||||||
const flags = await this.fetch()
|
key: K,
|
||||||
|
ctx?: UserCtx
|
||||||
|
): Promise<boolean> {
|
||||||
|
const flags = await this.fetch(ctx)
|
||||||
return flags[key]
|
return flags[key]
|
||||||
}
|
}
|
||||||
|
|
||||||
async fetch(): Promise<FlagValues<T>> {
|
async fetch(ctx?: UserCtx): Promise<FlagValues<T>> {
|
||||||
return await tracer.trace("features.fetch", async span => {
|
return await tracer.trace("features.fetch", async span => {
|
||||||
const tags: Record<string, any> = {}
|
const tags: Record<string, any> = {}
|
||||||
|
const flagValues = this.defaults()
|
||||||
const flags = this.defaults()
|
|
||||||
|
|
||||||
const currentTenantId = context.getTenantId()
|
const currentTenantId = context.getTenantId()
|
||||||
const specificallySetFalse = new Set<string>()
|
const specificallySetFalse = new Set<string>()
|
||||||
|
|
||||||
|
@ -140,39 +140,64 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
||||||
throw new Error(`Feature: ${feature} is not an allowed option`)
|
throw new Error(`Feature: ${feature} is not an allowed option`)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (typeof flags[feature] !== "boolean") {
|
if (typeof flagValues[feature] !== "boolean") {
|
||||||
throw new Error(`Feature: ${feature} is not a boolean`)
|
throw new Error(`Feature: ${feature} is not a boolean`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-expect-error - TS does not like you writing into a generic type,
|
||||||
flags[feature] = value
|
// but we know that it's okay in this case because it's just an object.
|
||||||
|
flagValues[feature] = value
|
||||||
tags[`flags.${feature}.source`] = "environment"
|
tags[`flags.${feature}.source`] = "environment"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const license = ctx?.user?.license
|
||||||
|
if (license) {
|
||||||
|
for (const feature of license.features) {
|
||||||
|
if (!this.isFlagName(feature)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
flagValues[feature] === true ||
|
||||||
|
specificallySetFalse.has(feature)
|
||||||
|
) {
|
||||||
|
// If the flag is already set to through environment variables, we
|
||||||
|
// don't want to override it back to false here.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// @ts-expect-error - TS does not like you writing into a generic type,
|
||||||
|
// but we know that it's okay in this case because it's just an object.
|
||||||
|
flagValues[feature] = true
|
||||||
|
tags[`flags.${feature}.source`] = "license"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const identity = context.getIdentity()
|
const identity = context.getIdentity()
|
||||||
if (posthog && identity?.type === IdentityType.USER) {
|
if (posthog && identity?.type === IdentityType.USER) {
|
||||||
const posthogFlags = await posthog.getAllFlagsAndPayloads(identity._id)
|
const posthogFlags = await posthog.getAllFlagsAndPayloads(identity._id)
|
||||||
for (const [name, value] of Object.entries(posthogFlags.featureFlags)) {
|
for (const [name, value] of Object.entries(posthogFlags.featureFlags)) {
|
||||||
const flag = this.flags[name]
|
if (!this.isFlagName(name)) {
|
||||||
if (!flag) {
|
|
||||||
// We don't want an unexpected PostHog flag to break the app, so we
|
// We don't want an unexpected PostHog flag to break the app, so we
|
||||||
// just log it and continue.
|
// just log it and continue.
|
||||||
console.warn(`Unexpected posthog flag "${name}": ${value}`)
|
console.warn(`Unexpected posthog flag "${name}": ${value}`)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if (flags[name] === true || specificallySetFalse.has(name)) {
|
if (flagValues[name] === true || specificallySetFalse.has(name)) {
|
||||||
// If the flag is already set to through environment variables, we
|
// If the flag is already set to through environment variables, we
|
||||||
// don't want to override it back to false here.
|
// don't want to override it back to false here.
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
const payload = posthogFlags.featureFlagPayloads?.[name]
|
const payload = posthogFlags.featureFlagPayloads?.[name]
|
||||||
|
const flag = this.flagSchema[name]
|
||||||
try {
|
try {
|
||||||
// @ts-ignore
|
// @ts-expect-error - TS does not like you writing into a generic
|
||||||
flags[name] = flag.parse(payload || value)
|
// type, but we know that it's okay in this case because it's just
|
||||||
|
// an object.
|
||||||
|
flagValues[name] = flag.parse(payload || value)
|
||||||
tags[`flags.${name}.source`] = "posthog"
|
tags[`flags.${name}.source`] = "posthog"
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// We don't want an invalid PostHog flag to break the app, so we just
|
// We don't want an invalid PostHog flag to break the app, so we just
|
||||||
|
@ -182,12 +207,12 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(flags)) {
|
for (const [key, value] of Object.entries(flagValues)) {
|
||||||
tags[`flags.${key}.value`] = value
|
tags[`flags.${key}.value`] = value
|
||||||
}
|
}
|
||||||
span?.addTags(tags)
|
span?.addTags(tags)
|
||||||
|
|
||||||
return flags
|
return flagValues
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import { IdentityContext, IdentityType } from "@budibase/types"
|
import { IdentityContext, IdentityType, UserCtx } from "@budibase/types"
|
||||||
import { Flag, FlagSet, FlagValues, init } from "../"
|
import { Flag, FlagSet, FlagValues, init } from "../"
|
||||||
import { context } from "../.."
|
import { context } from "../.."
|
||||||
import { setEnv, withEnv } from "../../environment"
|
import environment, { withEnv } from "../../environment"
|
||||||
import nodeFetch from "node-fetch"
|
import nodeFetch from "node-fetch"
|
||||||
import nock from "nock"
|
import nock from "nock"
|
||||||
|
|
||||||
|
@ -12,207 +12,214 @@ const schema = {
|
||||||
}
|
}
|
||||||
const flags = new FlagSet(schema)
|
const flags = new FlagSet(schema)
|
||||||
|
|
||||||
|
interface TestCase {
|
||||||
|
it: string
|
||||||
|
identity?: Partial<IdentityContext>
|
||||||
|
environmentFlags?: string
|
||||||
|
posthogFlags?: PostHogFlags
|
||||||
|
licenseFlags?: Array<string>
|
||||||
|
expected?: Partial<FlagValues<typeof schema>>
|
||||||
|
errorMessage?: string | RegExp
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PostHogFlags {
|
||||||
|
featureFlags?: Record<string, boolean>
|
||||||
|
featureFlagPayloads?: Record<string, string>
|
||||||
|
}
|
||||||
|
|
||||||
|
function mockPosthogFlags(flags: PostHogFlags) {
|
||||||
|
nock("https://us.i.posthog.com")
|
||||||
|
.post("/decide/?v=3", body => {
|
||||||
|
return body.token === "test" && body.distinct_id === "us_1234"
|
||||||
|
})
|
||||||
|
.reply(200, flags)
|
||||||
|
.persist()
|
||||||
|
}
|
||||||
|
|
||||||
describe("feature flags", () => {
|
describe("feature flags", () => {
|
||||||
interface TestCase {
|
beforeEach(() => {
|
||||||
tenant: string
|
nock.cleanAll()
|
||||||
flags: string
|
})
|
||||||
expected: Partial<FlagValues<typeof schema>>
|
|
||||||
}
|
|
||||||
|
|
||||||
it.each<TestCase>([
|
it.each<TestCase>([
|
||||||
{
|
{
|
||||||
tenant: "tenant1",
|
it: "should should find a simple boolean flag in the environment",
|
||||||
flags: "tenant1:TEST_BOOLEAN",
|
environmentFlags: "default:TEST_BOOLEAN",
|
||||||
expected: { TEST_BOOLEAN: true },
|
expected: { TEST_BOOLEAN: true },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tenant: "tenant1",
|
it: "should should find a simple netgative boolean flag in the environment",
|
||||||
flags: "tenant1:!TEST_BOOLEAN",
|
environmentFlags: "default:!TEST_BOOLEAN",
|
||||||
expected: { TEST_BOOLEAN: false },
|
expected: { TEST_BOOLEAN: false },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tenant: "tenant1",
|
it: "should should match stars in the environment",
|
||||||
flags: "*:TEST_BOOLEAN",
|
environmentFlags: "*:TEST_BOOLEAN",
|
||||||
expected: { TEST_BOOLEAN: true },
|
expected: { TEST_BOOLEAN: true },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tenant: "tenant1",
|
it: "should not match a different tenant's flags",
|
||||||
flags: "tenant2:TEST_BOOLEAN",
|
environmentFlags: "otherTenant:TEST_BOOLEAN",
|
||||||
expected: { TEST_BOOLEAN: false },
|
expected: { TEST_BOOLEAN: false },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tenant: "tenant1",
|
it: "should return the defaults when no flags are set",
|
||||||
flags: "",
|
expected: flags.defaults(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should fail when an environment flag is not recognised",
|
||||||
|
environmentFlags: "default:TEST_BOOLEAN,default:FOO",
|
||||||
|
errorMessage: "Feature: FOO is not an allowed option",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should be able to read boolean flags from PostHog",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: { TEST_BOOLEAN: true },
|
||||||
|
},
|
||||||
|
expected: { TEST_BOOLEAN: true },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should be able to read string flags from PostHog",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: { TEST_STRING: true },
|
||||||
|
featureFlagPayloads: { TEST_STRING: "test" },
|
||||||
|
},
|
||||||
|
expected: { TEST_STRING: "test" },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should be able to read numeric flags from PostHog",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: { TEST_NUMBER: true },
|
||||||
|
featureFlagPayloads: { TEST_NUMBER: "123" },
|
||||||
|
},
|
||||||
|
expected: { TEST_NUMBER: 123 },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should not be able to override a negative environment flag from PostHog",
|
||||||
|
environmentFlags: "default:!TEST_BOOLEAN",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: { TEST_BOOLEAN: true },
|
||||||
|
},
|
||||||
|
expected: { TEST_BOOLEAN: false },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should not be able to override a positive environment flag from PostHog",
|
||||||
|
environmentFlags: "default:TEST_BOOLEAN",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: {
|
||||||
|
TEST_BOOLEAN: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: { TEST_BOOLEAN: true },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should be able to set boolean flags through the license",
|
||||||
|
licenseFlags: ["TEST_BOOLEAN"],
|
||||||
|
expected: { TEST_BOOLEAN: true },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should not be able to override a negative environment flag from license",
|
||||||
|
environmentFlags: "default:!TEST_BOOLEAN",
|
||||||
|
licenseFlags: ["TEST_BOOLEAN"],
|
||||||
|
expected: { TEST_BOOLEAN: false },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should not error on unrecognised PostHog flag",
|
||||||
|
posthogFlags: {
|
||||||
|
featureFlags: { UNDEFINED: true },
|
||||||
|
},
|
||||||
|
expected: flags.defaults(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: "should not error on unrecognised license flag",
|
||||||
|
licenseFlags: ["UNDEFINED"],
|
||||||
expected: flags.defaults(),
|
expected: flags.defaults(),
|
||||||
},
|
},
|
||||||
])(
|
])(
|
||||||
'should find flags $expected for $tenant with string "$flags"',
|
"$it",
|
||||||
({ tenant, flags: envFlags, expected }) =>
|
async ({
|
||||||
context.doInTenant(tenant, async () =>
|
identity,
|
||||||
withEnv({ TENANT_FEATURE_FLAGS: envFlags }, async () => {
|
environmentFlags,
|
||||||
const values = await flags.fetch()
|
posthogFlags,
|
||||||
expect(values).toMatchObject(expected)
|
licenseFlags,
|
||||||
|
expected,
|
||||||
|
errorMessage,
|
||||||
|
}) => {
|
||||||
|
const env: Partial<typeof environment> = {
|
||||||
|
TENANT_FEATURE_FLAGS: environmentFlags,
|
||||||
|
}
|
||||||
|
|
||||||
for (const [key, expectedValue] of Object.entries(expected)) {
|
if (posthogFlags) {
|
||||||
const value = await flags.get(key as keyof typeof schema)
|
mockPosthogFlags(posthogFlags)
|
||||||
expect(value).toBe(expectedValue)
|
env.POSTHOG_TOKEN = "test"
|
||||||
|
env.POSTHOG_API_HOST = "https://us.i.posthog.com"
|
||||||
|
}
|
||||||
|
|
||||||
|
const ctx = { user: { license: { features: licenseFlags || [] } } }
|
||||||
|
|
||||||
|
await withEnv(env, async () => {
|
||||||
|
// We need to pass in node-fetch here otherwise nock won't get used
|
||||||
|
// because posthog-node uses axios under the hood.
|
||||||
|
init({ fetch: nodeFetch })
|
||||||
|
|
||||||
|
const fullIdentity: IdentityContext = {
|
||||||
|
_id: "us_1234",
|
||||||
|
tenantId: "default",
|
||||||
|
type: IdentityType.USER,
|
||||||
|
email: "test@example.com",
|
||||||
|
firstName: "Test",
|
||||||
|
lastName: "User",
|
||||||
|
...identity,
|
||||||
|
}
|
||||||
|
|
||||||
|
await context.doInIdentityContext(fullIdentity, async () => {
|
||||||
|
if (errorMessage) {
|
||||||
|
await expect(flags.fetch(ctx as UserCtx)).rejects.toThrow(
|
||||||
|
errorMessage
|
||||||
|
)
|
||||||
|
} else if (expected) {
|
||||||
|
const values = await flags.fetch(ctx as UserCtx)
|
||||||
|
expect(values).toMatchObject(expected)
|
||||||
|
|
||||||
|
for (const [key, expectedValue] of Object.entries(expected)) {
|
||||||
|
const value = await flags.get(
|
||||||
|
key as keyof typeof schema,
|
||||||
|
ctx as UserCtx
|
||||||
|
)
|
||||||
|
expect(value).toBe(expectedValue)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error("No expected value")
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
)
|
})
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
interface FailedTestCase {
|
it("should not error if PostHog is down", async () => {
|
||||||
tenant: string
|
|
||||||
flags: string
|
|
||||||
expected: string | RegExp
|
|
||||||
}
|
|
||||||
|
|
||||||
it.each<FailedTestCase>([
|
|
||||||
{
|
|
||||||
tenant: "tenant1",
|
|
||||||
flags: "tenant1:TEST_BOOLEAN,tenant1:FOO",
|
|
||||||
expected: "Feature: FOO is not an allowed option",
|
|
||||||
},
|
|
||||||
])(
|
|
||||||
"should fail with message \"$expected\" for $tenant with string '$flags'",
|
|
||||||
({ tenant, flags: envFlags, expected }) =>
|
|
||||||
context.doInTenant(tenant, () =>
|
|
||||||
withEnv({ TENANT_FEATURE_FLAGS: envFlags }, () =>
|
|
||||||
expect(flags.fetch()).rejects.toThrow(expected)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
describe("posthog", () => {
|
|
||||||
const identity: IdentityContext = {
|
const identity: IdentityContext = {
|
||||||
_id: "us_1234",
|
_id: "us_1234",
|
||||||
tenantId: "tenant1",
|
tenantId: "default",
|
||||||
type: IdentityType.USER,
|
type: IdentityType.USER,
|
||||||
email: "test@example.com",
|
email: "test@example.com",
|
||||||
firstName: "Test",
|
firstName: "Test",
|
||||||
lastName: "User",
|
lastName: "User",
|
||||||
}
|
}
|
||||||
|
|
||||||
let cleanup: () => void
|
nock("https://us.i.posthog.com")
|
||||||
|
.post("/decide/?v=3", body => {
|
||||||
|
return body.token === "test" && body.distinct_id === "us_1234"
|
||||||
|
})
|
||||||
|
.reply(503)
|
||||||
|
.persist()
|
||||||
|
|
||||||
beforeAll(() => {
|
await withEnv(
|
||||||
cleanup = setEnv({ POSTHOG_TOKEN: "test" })
|
{ POSTHOG_TOKEN: "test", POSTHOG_API_HOST: "https://us.i.posthog.com" },
|
||||||
})
|
async () => {
|
||||||
|
await context.doInIdentityContext(identity, async () => {
|
||||||
afterAll(() => {
|
await flags.fetch()
|
||||||
cleanup()
|
|
||||||
})
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
nock.cleanAll()
|
|
||||||
|
|
||||||
// We need to pass in node-fetch here otherwise nock won't get used
|
|
||||||
// because posthog-node uses axios under the hood.
|
|
||||||
init({ fetch: nodeFetch })
|
|
||||||
})
|
|
||||||
|
|
||||||
function mockFlags(flags: {
|
|
||||||
featureFlags?: Record<string, boolean>
|
|
||||||
featureFlagPayloads?: Record<string, any>
|
|
||||||
}) {
|
|
||||||
nock("https://us.i.posthog.com")
|
|
||||||
.post("/decide/?v=3", body => {
|
|
||||||
return body.token === "test" && body.distinct_id === "us_1234"
|
|
||||||
})
|
})
|
||||||
.reply(200, flags)
|
}
|
||||||
}
|
)
|
||||||
|
|
||||||
it("should be able to read flags from posthog", async () => {
|
|
||||||
mockFlags({
|
|
||||||
featureFlags: {
|
|
||||||
TEST_BOOLEAN: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await context.doInIdentityContext(identity, async () => {
|
|
||||||
const values = await flags.fetch()
|
|
||||||
expect(values.TEST_BOOLEAN).toBe(true)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to read flags from posthog with payloads", async () => {
|
|
||||||
mockFlags({
|
|
||||||
featureFlags: {
|
|
||||||
TEST_STRING: true,
|
|
||||||
},
|
|
||||||
featureFlagPayloads: {
|
|
||||||
TEST_STRING: "test payload",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await context.doInIdentityContext(identity, async () => {
|
|
||||||
const values = await flags.fetch()
|
|
||||||
expect(values.TEST_STRING).toBe("test payload")
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should be able to read flags from posthog with numbers", async () => {
|
|
||||||
mockFlags({
|
|
||||||
featureFlags: {
|
|
||||||
TEST_NUMBER: true,
|
|
||||||
},
|
|
||||||
featureFlagPayloads: {
|
|
||||||
TEST_NUMBER: 123,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await context.doInIdentityContext(identity, async () => {
|
|
||||||
const values = await flags.fetch()
|
|
||||||
expect(values.TEST_NUMBER).toBe(123)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should not fail when a flag is not known", async () => {
|
|
||||||
mockFlags({
|
|
||||||
featureFlags: {
|
|
||||||
_SOME_RANDOM_FLAG: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await context.doInIdentityContext(identity, async () => {
|
|
||||||
await expect(flags.fetch()).resolves.not.toThrow()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should not override flags set in the environment", async () => {
|
|
||||||
mockFlags({
|
|
||||||
featureFlags: {
|
|
||||||
TEST_BOOLEAN: false,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await withEnv(
|
|
||||||
{ TENANT_FEATURE_FLAGS: `${identity.tenantId}:TEST_BOOLEAN` },
|
|
||||||
async () => {
|
|
||||||
await context.doInIdentityContext(identity, async () => {
|
|
||||||
const values = await flags.fetch()
|
|
||||||
expect(values.TEST_BOOLEAN).toBe(true)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should not override flags set in the environment with a ! prefix", async () => {
|
|
||||||
mockFlags({
|
|
||||||
featureFlags: {
|
|
||||||
TEST_BOOLEAN: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
await withEnv(
|
|
||||||
{ TENANT_FEATURE_FLAGS: `${identity.tenantId}:!TEST_BOOLEAN` },
|
|
||||||
async () => {
|
|
||||||
await context.doInIdentityContext(identity, async () => {
|
|
||||||
const values = await flags.fetch()
|
|
||||||
expect(values.TEST_BOOLEAN).toBe(false)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -28,16 +28,25 @@ function generateSchema(
|
||||||
oldTable: null | Table = null,
|
oldTable: null | Table = null,
|
||||||
renamed?: RenameColumn
|
renamed?: RenameColumn
|
||||||
) {
|
) {
|
||||||
let primaryKey = table && table.primary ? table.primary[0] : null
|
let primaryKeys = table && table.primary ? table.primary : []
|
||||||
const columns = Object.values(table.schema)
|
const columns = Object.values(table.schema)
|
||||||
// all columns in a junction table will be meta
|
// all columns in a junction table will be meta
|
||||||
let metaCols = columns.filter(col => (col as NumberFieldMetadata).meta)
|
let metaCols = columns.filter(col => (col as NumberFieldMetadata).meta)
|
||||||
let isJunction = metaCols.length === columns.length
|
let isJunction = metaCols.length === columns.length
|
||||||
|
let columnTypeSet: string[] = []
|
||||||
|
|
||||||
// can't change primary once its set for now
|
// can't change primary once its set for now
|
||||||
if (primaryKey && !oldTable && !isJunction) {
|
if (!oldTable) {
|
||||||
schema.increments(primaryKey).primary()
|
// junction tables are special - we have an expected format
|
||||||
} else if (!oldTable && isJunction) {
|
if (isJunction) {
|
||||||
schema.primary(metaCols.map(col => col.name))
|
schema.primary(metaCols.map(col => col.name))
|
||||||
|
} else if (primaryKeys.length === 1) {
|
||||||
|
schema.increments(primaryKeys[0]).primary()
|
||||||
|
// note that we've set its type
|
||||||
|
columnTypeSet.push(primaryKeys[0])
|
||||||
|
} else {
|
||||||
|
schema.primary(primaryKeys)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// check if any columns need added
|
// check if any columns need added
|
||||||
|
@ -49,7 +58,7 @@ function generateSchema(
|
||||||
const oldColumn = oldTable ? oldTable.schema[key] : null
|
const oldColumn = oldTable ? oldTable.schema[key] : null
|
||||||
if (
|
if (
|
||||||
(oldColumn && oldColumn.type) ||
|
(oldColumn && oldColumn.type) ||
|
||||||
(primaryKey === key && !isJunction) ||
|
columnTypeSet.includes(key) ||
|
||||||
renamed?.updated === key
|
renamed?.updated === key
|
||||||
) {
|
) {
|
||||||
continue
|
continue
|
||||||
|
@ -61,7 +70,12 @@ function generateSchema(
|
||||||
case FieldType.LONGFORM:
|
case FieldType.LONGFORM:
|
||||||
case FieldType.BARCODEQR:
|
case FieldType.BARCODEQR:
|
||||||
case FieldType.BB_REFERENCE_SINGLE:
|
case FieldType.BB_REFERENCE_SINGLE:
|
||||||
schema.text(key)
|
// primary key strings have to have a length in some DBs
|
||||||
|
if (primaryKeys.includes(key)) {
|
||||||
|
schema.string(key, 255)
|
||||||
|
} else {
|
||||||
|
schema.text(key)
|
||||||
|
}
|
||||||
break
|
break
|
||||||
case FieldType.NUMBER:
|
case FieldType.NUMBER:
|
||||||
// if meta is specified then this is a junction table entry
|
// if meta is specified then this is a junction table entry
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit 7fe713e51afea77c8024579582a4e1a4ec1b55b3
|
Subproject commit 94747fd5bb67c218244bb60b9540f3a6f1c3f6f1
|
|
@ -144,6 +144,7 @@
|
||||||
"copyfiles": "2.4.1",
|
"copyfiles": "2.4.1",
|
||||||
"docker-compose": "0.23.17",
|
"docker-compose": "0.23.17",
|
||||||
"jest": "29.7.0",
|
"jest": "29.7.0",
|
||||||
|
"jest-extended": "^4.0.2",
|
||||||
"jest-openapi": "0.14.2",
|
"jest-openapi": "0.14.2",
|
||||||
"nock": "13.5.4",
|
"nock": "13.5.4",
|
||||||
"nodemon": "2.0.15",
|
"nodemon": "2.0.15",
|
||||||
|
|
|
@ -195,12 +195,13 @@ export class ExternalRequest<T extends Operation> {
|
||||||
if (filters) {
|
if (filters) {
|
||||||
// need to map over the filters and make sure the _id field isn't present
|
// need to map over the filters and make sure the _id field isn't present
|
||||||
let prefix = 1
|
let prefix = 1
|
||||||
for (const operator of Object.values(filters)) {
|
for (const [operatorType, operator] of Object.entries(filters)) {
|
||||||
|
const isArrayOp = sdk.rows.utils.isArrayFilter(operatorType)
|
||||||
for (const field of Object.keys(operator || {})) {
|
for (const field of Object.keys(operator || {})) {
|
||||||
if (dbCore.removeKeyNumbering(field) === "_id") {
|
if (dbCore.removeKeyNumbering(field) === "_id") {
|
||||||
if (primary) {
|
if (primary) {
|
||||||
const parts = breakRowIdField(operator[field])
|
const parts = breakRowIdField(operator[field])
|
||||||
if (primary.length > 1) {
|
if (primary.length > 1 && isArrayOp) {
|
||||||
operator[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR] = {
|
operator[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR] = {
|
||||||
id: primary,
|
id: primary,
|
||||||
values: parts[0],
|
values: parts[0],
|
||||||
|
|
|
@ -71,8 +71,7 @@ export function basicProcessing({
|
||||||
}): Row {
|
}): Row {
|
||||||
const thisRow: Row = {}
|
const thisRow: Row = {}
|
||||||
// filter the row down to what is actually the row (not joined)
|
// filter the row down to what is actually the row (not joined)
|
||||||
for (let field of Object.values(table.schema)) {
|
for (let fieldName of Object.keys(table.schema)) {
|
||||||
const fieldName = field.name
|
|
||||||
let value = extractFieldValue({
|
let value = extractFieldValue({
|
||||||
row,
|
row,
|
||||||
tableName: table.name,
|
tableName: table.name,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
|
import { checkBuilderEndpoint, allowUndefined } from "./utilities/TestFunctions"
|
||||||
import { getCachedVariable } from "../../../threads/utils"
|
import { getCachedVariable } from "../../../threads/utils"
|
||||||
import { context, events } from "@budibase/backend-core"
|
import { context, events } from "@budibase/backend-core"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
@ -380,21 +380,24 @@ describe("/datasources", () => {
|
||||||
persisted?.entities &&
|
persisted?.entities &&
|
||||||
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
||||||
(acc, [tableName, table]) => {
|
(acc, [tableName, table]) => {
|
||||||
acc[tableName] = {
|
acc[tableName] = expect.objectContaining({
|
||||||
...table,
|
...table,
|
||||||
primaryDisplay: expect.not.stringMatching(
|
primaryDisplay: expect.not.stringMatching(
|
||||||
new RegExp(`^${table.primaryDisplay || ""}$`)
|
new RegExp(`^${table.primaryDisplay || ""}$`)
|
||||||
),
|
),
|
||||||
schema: Object.entries(table.schema).reduce<TableSchema>(
|
schema: Object.entries(table.schema).reduce<TableSchema>(
|
||||||
(acc, [fieldName, field]) => {
|
(acc, [fieldName, field]) => {
|
||||||
acc[fieldName] = expect.objectContaining({
|
acc[fieldName] = {
|
||||||
...field,
|
...field,
|
||||||
})
|
externalType: allowUndefined(expect.any(String)),
|
||||||
|
constraints: allowUndefined(expect.any(Object)),
|
||||||
|
autocolumn: allowUndefined(expect.any(Boolean)),
|
||||||
|
}
|
||||||
return acc
|
return acc
|
||||||
},
|
},
|
||||||
{}
|
{}
|
||||||
),
|
),
|
||||||
}
|
})
|
||||||
return acc
|
return acc
|
||||||
},
|
},
|
||||||
{}
|
{}
|
||||||
|
|
|
@ -41,6 +41,7 @@ import { dataFilters } from "@budibase/shared-core"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
import { structures } from "@budibase/backend-core/tests"
|
import { structures } from "@budibase/backend-core/tests"
|
||||||
import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default"
|
import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default"
|
||||||
|
import { generateRowIdField } from "../../../integrations/utils"
|
||||||
|
|
||||||
describe.each([
|
describe.each([
|
||||||
["in-memory", undefined],
|
["in-memory", undefined],
|
||||||
|
@ -2355,6 +2356,35 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("Invalid column definitions", () => {
|
||||||
|
beforeAll(async () => {
|
||||||
|
// need to create an invalid table - means ignoring typescript
|
||||||
|
table = await createTable({
|
||||||
|
// @ts-ignore
|
||||||
|
invalid: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
},
|
||||||
|
name: {
|
||||||
|
name: "name",
|
||||||
|
type: FieldType.STRING,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
await createRows([
|
||||||
|
{ name: "foo", invalid: "id1" },
|
||||||
|
{ name: "bar", invalid: "id2" },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can get rows with all table data", async () => {
|
||||||
|
await expectSearch({
|
||||||
|
query: {},
|
||||||
|
}).toContain([
|
||||||
|
{ name: "foo", invalid: "id1" },
|
||||||
|
{ name: "bar", invalid: "id2" },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
describe.each(["data_name_test", "name_data_test", "name_test_data_"])(
|
describe.each(["data_name_test", "name_data_test", "name_test_data_"])(
|
||||||
"special (%s) case",
|
"special (%s) case",
|
||||||
column => {
|
column => {
|
||||||
|
@ -2621,6 +2651,42 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
!isInternal &&
|
||||||
|
describe("search by composite key", () => {
|
||||||
|
beforeAll(async () => {
|
||||||
|
table = await config.api.table.save(
|
||||||
|
tableForDatasource(datasource, {
|
||||||
|
schema: {
|
||||||
|
idColumn1: {
|
||||||
|
name: "idColumn1",
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
},
|
||||||
|
idColumn2: {
|
||||||
|
name: "idColumn2",
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
primary: ["idColumn1", "idColumn2"],
|
||||||
|
})
|
||||||
|
)
|
||||||
|
await createRows([{ idColumn1: 1, idColumn2: 2 }])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("can filter by the row ID with limit 1", async () => {
|
||||||
|
await expectSearch({
|
||||||
|
query: {
|
||||||
|
equal: { _id: generateRowIdField([1, 2]) },
|
||||||
|
},
|
||||||
|
limit: 1,
|
||||||
|
}).toContain([
|
||||||
|
{
|
||||||
|
idColumn1: 1,
|
||||||
|
idColumn2: 2,
|
||||||
|
},
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
isSql &&
|
isSql &&
|
||||||
describe("pagination edge case with relationships", () => {
|
describe("pagination edge case with relationships", () => {
|
||||||
let mainRows: Row[] = []
|
let mainRows: Row[] = []
|
||||||
|
|
|
@ -184,3 +184,7 @@ export const runInProd = async (func: any) => {
|
||||||
env._set("NODE_ENV", nodeEnv)
|
env._set("NODE_ENV", nodeEnv)
|
||||||
env._set("JEST_WORKER_ID", workerId)
|
env._set("JEST_WORKER_ID", workerId)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function allowUndefined(expectation: jest.Expect) {
|
||||||
|
return expect.toBeOneOf([expectation, undefined, null])
|
||||||
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ import { helpers, utils } from "@budibase/shared-core"
|
||||||
import { pipeline } from "stream/promises"
|
import { pipeline } from "stream/promises"
|
||||||
import tmp from "tmp"
|
import tmp from "tmp"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
|
import { merge, cloneDeep } from "lodash"
|
||||||
|
|
||||||
type PrimitiveTypes =
|
type PrimitiveTypes =
|
||||||
| FieldType.STRING
|
| FieldType.STRING
|
||||||
|
@ -291,10 +292,16 @@ function copyExistingPropsOver(
|
||||||
const fetchedColumnDefinition: FieldSchema | undefined =
|
const fetchedColumnDefinition: FieldSchema | undefined =
|
||||||
table.schema[key]
|
table.schema[key]
|
||||||
table.schema[key] = {
|
table.schema[key] = {
|
||||||
...existingTableSchema[key],
|
// merge the properties - anything missing will be filled in, old definition preferred
|
||||||
|
// have to clone due to the way merge works
|
||||||
|
...merge(
|
||||||
|
cloneDeep(fetchedColumnDefinition),
|
||||||
|
existingTableSchema[key]
|
||||||
|
),
|
||||||
|
// always take externalType and autocolumn from the fetched definition
|
||||||
externalType:
|
externalType:
|
||||||
existingTableSchema[key].externalType ||
|
existingTableSchema[key].externalType ||
|
||||||
table.schema[key]?.externalType,
|
fetchedColumnDefinition?.externalType,
|
||||||
autocolumn: fetchedColumnDefinition?.autocolumn,
|
autocolumn: fetchedColumnDefinition?.autocolumn,
|
||||||
} as FieldSchema
|
} as FieldSchema
|
||||||
// check constraints which can be fetched from the DB (they could be updated)
|
// check constraints which can be fetched from the DB (they could be updated)
|
||||||
|
|
|
@ -73,13 +73,14 @@ function buildInternalFieldList(
|
||||||
fieldList = fieldList.concat(
|
fieldList = fieldList.concat(
|
||||||
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`)
|
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`)
|
||||||
)
|
)
|
||||||
for (let col of Object.values(table.schema)) {
|
for (let key of Object.keys(table.schema)) {
|
||||||
|
const col = table.schema[key]
|
||||||
const isRelationship = col.type === FieldType.LINK
|
const isRelationship = col.type === FieldType.LINK
|
||||||
if (!opts?.relationships && isRelationship) {
|
if (!opts?.relationships && isRelationship) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if (!isRelationship) {
|
if (!isRelationship) {
|
||||||
fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`)
|
fieldList.push(`${table._id}.${mapToUserColumn(key)}`)
|
||||||
} else {
|
} else {
|
||||||
const linkCol = col as RelationshipFieldMetadata
|
const linkCol = col as RelationshipFieldMetadata
|
||||||
const relatedTable = tables.find(table => table._id === linkCol.tableId)
|
const relatedTable = tables.find(table => table._id === linkCol.tableId)
|
||||||
|
|
|
@ -12,6 +12,7 @@ import {
|
||||||
Table,
|
Table,
|
||||||
TableSchema,
|
TableSchema,
|
||||||
SqlClient,
|
SqlClient,
|
||||||
|
ArrayOperator,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||||
import { Format } from "../../../api/controllers/view/exporters"
|
import { Format } from "../../../api/controllers/view/exporters"
|
||||||
|
@ -311,3 +312,8 @@ function validateTimeOnlyField(
|
||||||
|
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// type-guard check
|
||||||
|
export function isArrayFilter(operator: any): operator is ArrayOperator {
|
||||||
|
return Object.values(ArrayOperator).includes(operator)
|
||||||
|
}
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
import env from "../environment"
|
import env from "../environment"
|
||||||
|
import * as matchers from "jest-extended"
|
||||||
import { env as coreEnv, timers } from "@budibase/backend-core"
|
import { env as coreEnv, timers } from "@budibase/backend-core"
|
||||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
import nock from "nock"
|
import nock from "nock"
|
||||||
|
|
||||||
|
expect.extend(matchers)
|
||||||
if (!process.env.CI) {
|
if (!process.env.CI) {
|
||||||
// set a longer timeout in dev for debugging 100 seconds
|
// set a longer timeout in dev for debugging 100 seconds
|
||||||
jest.setTimeout(100 * 1000)
|
jest.setTimeout(100 * 1000)
|
||||||
|
|
|
@ -146,7 +146,8 @@ export function parse(rows: Rows, table: Table): Rows {
|
||||||
return rows.map(row => {
|
return rows.map(row => {
|
||||||
const parsedRow: Row = {}
|
const parsedRow: Row = {}
|
||||||
|
|
||||||
Object.entries(row).forEach(([columnName, columnData]) => {
|
Object.keys(row).forEach(columnName => {
|
||||||
|
const columnData = row[columnName]
|
||||||
const schema = table.schema
|
const schema = table.schema
|
||||||
if (!(columnName in schema)) {
|
if (!(columnName in schema)) {
|
||||||
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
|
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
|
||||||
|
|
12
yarn.lock
12
yarn.lock
|
@ -13614,7 +13614,7 @@ jest-config@^29.7.0:
|
||||||
slash "^3.0.0"
|
slash "^3.0.0"
|
||||||
strip-json-comments "^3.1.1"
|
strip-json-comments "^3.1.1"
|
||||||
|
|
||||||
"jest-diff@>=29.4.3 < 30", jest-diff@^29.4.1, jest-diff@^29.7.0:
|
"jest-diff@>=29.4.3 < 30", jest-diff@^29.0.0, jest-diff@^29.4.1, jest-diff@^29.7.0:
|
||||||
version "29.7.0"
|
version "29.7.0"
|
||||||
resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.7.0.tgz#017934a66ebb7ecf6f205e84699be10afd70458a"
|
resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.7.0.tgz#017934a66ebb7ecf6f205e84699be10afd70458a"
|
||||||
integrity sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==
|
integrity sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==
|
||||||
|
@ -13673,12 +13673,20 @@ jest-environment-node@^29.7.0:
|
||||||
jest-mock "^29.7.0"
|
jest-mock "^29.7.0"
|
||||||
jest-util "^29.7.0"
|
jest-util "^29.7.0"
|
||||||
|
|
||||||
|
jest-extended@^4.0.2:
|
||||||
|
version "4.0.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/jest-extended/-/jest-extended-4.0.2.tgz#d23b52e687cedf66694e6b2d77f65e211e99e021"
|
||||||
|
integrity sha512-FH7aaPgtGYHc9mRjriS0ZEHYM5/W69tLrFTIdzm+yJgeoCmmrSB/luSfMSqWP9O29QWHPEmJ4qmU6EwsZideog==
|
||||||
|
dependencies:
|
||||||
|
jest-diff "^29.0.0"
|
||||||
|
jest-get-type "^29.0.0"
|
||||||
|
|
||||||
jest-get-type@^26.3.0:
|
jest-get-type@^26.3.0:
|
||||||
version "26.3.0"
|
version "26.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-26.3.0.tgz#e97dc3c3f53c2b406ca7afaed4493b1d099199e0"
|
resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-26.3.0.tgz#e97dc3c3f53c2b406ca7afaed4493b1d099199e0"
|
||||||
integrity sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig==
|
integrity sha512-TpfaviN1R2pQWkIihlfEanwOXK0zcxrKEE4MlU6Tn7keoXdN6/3gK/xl0yEh8DOunn5pOVGKf8hB4R9gVh04ig==
|
||||||
|
|
||||||
jest-get-type@^29.6.3:
|
jest-get-type@^29.0.0, jest-get-type@^29.6.3:
|
||||||
version "29.6.3"
|
version "29.6.3"
|
||||||
resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.6.3.tgz#36f499fdcea197c1045a127319c0481723908fd1"
|
resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.6.3.tgz#36f499fdcea197c1045a127319c0481723908fd1"
|
||||||
integrity sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==
|
integrity sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==
|
||||||
|
|
Loading…
Reference in New Issue