Merge pull request #14959 from Budibase/flag-ai-correctly-for-premium

Flag ai correctly for premium
This commit is contained in:
Martin McKeaveney 2024-11-04 23:30:06 +00:00 committed by GitHub
commit 2d8a321421
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 54 additions and 21 deletions

View File

@ -9,7 +9,7 @@
} from "@budibase/bbui" } from "@budibase/bbui"
import { onMount, createEventDispatcher } from "svelte" import { onMount, createEventDispatcher } from "svelte"
import { flags } from "stores/builder" import { flags } from "stores/builder"
import { featureFlags } from "stores/portal" import { featureFlags, licensing } from "stores/portal"
import { API } from "api" import { API } from "api"
import MagicWand from "../../../../assets/MagicWand.svelte" import MagicWand from "../../../../assets/MagicWand.svelte"
@ -26,7 +26,9 @@
let aiCronPrompt = "" let aiCronPrompt = ""
let loadingAICronExpression = false let loadingAICronExpression = false
$: aiEnabled = $featureFlags.AI_CUSTOM_CONFIGS || $featureFlags.BUDIBASE_AI $: aiEnabled =
($featureFlags.AI_CUSTOM_CONFIGS && $licensing.customAIConfigsEnabled) ||
($featureFlags.BUDIBASE_AI && $licensing.budibaseAIEnabled)
$: { $: {
if (cronExpression) { if (cronExpression) {
try { try {

View File

@ -1,7 +1,7 @@
import { it, expect, describe, vi } from "vitest" import { it, expect, describe, vi } from "vitest"
import AISettings from "./index.svelte" import AISettings from "./index.svelte"
import { render, fireEvent } from "@testing-library/svelte" import { render, fireEvent } from "@testing-library/svelte"
import { admin, licensing } from "stores/portal" import { admin, licensing, featureFlags } from "stores/portal"
import { notifications } from "@budibase/bbui" import { notifications } from "@budibase/bbui"
vi.spyOn(notifications, "error").mockImplementation(vi.fn) vi.spyOn(notifications, "error").mockImplementation(vi.fn)
@ -12,12 +12,17 @@ const Hosting = {
Self: "self", Self: "self",
} }
function setupEnv(hosting, features = {}) { function setupEnv(hosting, features = {}, flags = {}) {
const defaultFeatures = { const defaultFeatures = {
budibaseAIEnabled: false, budibaseAIEnabled: false,
customAIConfigsEnabled: false, customAIConfigsEnabled: false,
...features, ...features,
} }
const defaultFlags = {
BUDIBASE_AI: false,
AI_CUSTOM_CONFIGS: false,
...flags,
}
admin.subscribe = vi.fn().mockImplementation(callback => { admin.subscribe = vi.fn().mockImplementation(callback => {
callback({ cloud: hosting === Hosting.Cloud }) callback({ cloud: hosting === Hosting.Cloud })
return () => {} return () => {}
@ -26,6 +31,10 @@ function setupEnv(hosting, features = {}) {
callback(defaultFeatures) callback(defaultFeatures)
return () => {} return () => {}
}) })
featureFlags.subscribe = vi.fn().mockImplementation(callback => {
callback(defaultFlags)
return () => {}
})
} }
describe("AISettings", () => { describe("AISettings", () => {
@ -72,7 +81,11 @@ describe("AISettings", () => {
let addConfigurationButton let addConfigurationButton
let configModal let configModal
setupEnv(Hosting.Cloud, { customAIConfigsEnabled: true }) setupEnv(
Hosting.Cloud,
{ customAIConfigsEnabled: true },
{ AI_CUSTOM_CONFIGS: true }
)
instance = render(AISettings) instance = render(AISettings)
addConfigurationButton = instance.queryByText("Add configuration") addConfigurationButton = instance.queryByText("Add configuration")
expect(addConfigurationButton).toBeInTheDocument() expect(addConfigurationButton).toBeInTheDocument()
@ -85,7 +98,11 @@ describe("AISettings", () => {
let addConfigurationButton let addConfigurationButton
let configModal let configModal
setupEnv(Hosting.Self, { customAIConfigsEnabled: true }) setupEnv(
Hosting.Self,
{ customAIConfigsEnabled: true },
{ AI_CUSTOM_CONFIGS: true }
)
instance = render(AISettings) instance = render(AISettings)
addConfigurationButton = instance.queryByText("Add configuration") addConfigurationButton = instance.queryByText("Add configuration")
expect(addConfigurationButton).toBeInTheDocument() expect(addConfigurationButton).toBeInTheDocument()

View File

@ -12,7 +12,7 @@
Tags, Tags,
Tag, Tag,
} from "@budibase/bbui" } from "@budibase/bbui"
import { admin, licensing } from "stores/portal" import { admin, licensing, featureFlags } from "stores/portal"
import { API } from "api" import { API } from "api"
import AIConfigModal from "./ConfigModal.svelte" import AIConfigModal from "./ConfigModal.svelte"
import AIConfigTile from "./AIConfigTile.svelte" import AIConfigTile from "./AIConfigTile.svelte"
@ -27,7 +27,8 @@
let editingUuid let editingUuid
$: isCloud = $admin.cloud $: isCloud = $admin.cloud
$: customAIConfigsEnabled = $licensing.customAIConfigsEnabled $: customAIConfigsEnabled =
$featureFlags.AI_CUSTOM_CONFIGS && $licensing.customAIConfigsEnabled
async function fetchAIConfig() { async function fetchAIConfig() {
try { try {

View File

@ -1,8 +1,8 @@
<script> <script>
import { redirect } from "@roxi/routify" import { redirect } from "@roxi/routify"
import { licensing } from "stores/portal" import { licensing, featureFlags } from "stores/portal"
if ($licensing.customAIConfigsEnabled) { if ($featureFlags.AI_CUSTOM_CONFIGS && $licensing.customAIConfigsEnabled) {
$redirect("./ai") $redirect("./ai")
} else { } else {
$redirect("./auth") $redirect("./auth")

View File

@ -4,8 +4,15 @@ import {
processAIColumns, processAIColumns,
processFormulas, processFormulas,
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import { context } from "@budibase/backend-core" import { context, features } from "@budibase/backend-core"
import { Table, Row, FormulaType, FieldType, ViewV2 } from "@budibase/types" import {
Table,
Row,
FeatureFlag,
FormulaType,
FieldType,
ViewV2,
} from "@budibase/types"
import * as linkRows from "../../../db/linkedRows" import * as linkRows from "../../../db/linkedRows"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
@ -145,8 +152,10 @@ export async function finaliseRow(
contextRows: [enrichedRow], contextRows: [enrichedRow],
}) })
const aiEnabled = const aiEnabled =
(await pro.features.isBudibaseAIEnabled()) || ((await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) &&
(await pro.features.isAICustomConfigsEnabled()) (await pro.features.isBudibaseAIEnabled())) ||
((await features.flags.isEnabled(FeatureFlag.AI_CUSTOM_CONFIGS)) &&
(await pro.features.isAICustomConfigsEnabled()))
if (aiEnabled) { if (aiEnabled) {
row = await processAIColumns(table, row, { row = await processAIColumns(table, row, {
contextRows: [enrichedRow], contextRows: [enrichedRow],

View File

@ -7,17 +7,17 @@ import {
AutomationIOType, AutomationIOType,
OpenAIStepInputs, OpenAIStepInputs,
OpenAIStepOutputs, OpenAIStepOutputs,
FeatureFlag,
} from "@budibase/types" } from "@budibase/types"
import { env } from "@budibase/backend-core" import { env, features } from "@budibase/backend-core"
import * as automationUtils from "../automationUtils" import * as automationUtils from "../automationUtils"
import * as pro from "@budibase/pro" import * as pro from "@budibase/pro"
enum Model { enum Model {
GPT_35_TURBO = "gpt-3.5-turbo",
// will only work with api keys that have access to the GPT4 API
GPT_4 = "gpt-4",
GPT_4O = "gpt-4o",
GPT_4O_MINI = "gpt-4o-mini", GPT_4O_MINI = "gpt-4o-mini",
GPT_4O = "gpt-4o",
GPT_4 = "gpt-4",
GPT_35_TURBO = "gpt-3.5-turbo",
} }
export const definition: AutomationStepDefinition = { export const definition: AutomationStepDefinition = {
@ -99,8 +99,12 @@ export async function run({
try { try {
let response let response
const customConfigsEnabled = await pro.features.isAICustomConfigsEnabled() const customConfigsEnabled =
const budibaseAIEnabled = await pro.features.isBudibaseAIEnabled() (await features.flags.isEnabled(FeatureFlag.AI_CUSTOM_CONFIGS)) &&
(await pro.features.isAICustomConfigsEnabled())
const budibaseAIEnabled =
(await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) &&
(await pro.features.isBudibaseAIEnabled())
if (budibaseAIEnabled || customConfigsEnabled) { if (budibaseAIEnabled || customConfigsEnabled) {
const llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model) const llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model)