Merge remote-tracking branch 'origin/master' into automation-branching-ux-updates
This commit is contained in:
commit
5f31166bf3
|
@ -13,7 +13,6 @@ on:
|
|||
options:
|
||||
- patch
|
||||
- minor
|
||||
- major
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.0.3",
|
||||
"version": "3.1.0",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -272,7 +272,6 @@ export const flags = new FlagSet({
|
|||
[FeatureFlag.SQS]: Flag.boolean(true),
|
||||
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(env.isDev()),
|
||||
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(env.isDev()),
|
||||
[FeatureFlag.TABLES_DEFAULT_ADMIN]: Flag.boolean(env.isDev()),
|
||||
[FeatureFlag.BUDIBASE_AI]: Flag.boolean(env.isDev()),
|
||||
})
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
let editing = false
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
$: blockRefs = $selectedAutomation.blockRefs || {}
|
||||
$: blockRefs = $selectedAutomation?.blockRefs || {}
|
||||
$: stepNames = automation?.definition.stepNames
|
||||
$: allSteps = automation?.definition.steps || []
|
||||
$: automationName = itemName || stepNames?.[block.id] || block?.name || ""
|
||||
|
|
|
@ -645,7 +645,7 @@
|
|||
<!-- Custom Layouts -->
|
||||
{#if stepLayouts[block.stepId]}
|
||||
{#each Object.keys(stepLayouts[block.stepId] || {}) as key}
|
||||
{#if canShowField(key, stepLayouts[block.stepId].schema)}
|
||||
{#if canShowField(stepLayouts[block.stepId].schema)}
|
||||
{#each stepLayouts[block.stepId][key].content as config}
|
||||
{#if config.title}
|
||||
<PropField label={config.title} labelTooltip={config.tooltip}>
|
||||
|
@ -670,7 +670,7 @@
|
|||
{:else}
|
||||
<!-- Default Schema Property Layout -->
|
||||
{#each schemaProperties as [key, value]}
|
||||
{#if canShowField(key, value)}
|
||||
{#if canShowField(value)}
|
||||
{@const label = getFieldLabel(key, value)}
|
||||
<div class:block-field={shouldRenderField(value)}>
|
||||
{#if key !== "fields" && value.type !== "boolean" && shouldRenderField(value)}
|
||||
|
@ -693,7 +693,7 @@
|
|||
</div>
|
||||
{/if}
|
||||
<div>
|
||||
{#if value.type === "string" && value.enum && canShowField(key, value)}
|
||||
{#if value.type === "string" && value.enum && canShowField(value)}
|
||||
<Select
|
||||
on:change={e => onChange({ [key]: e.detail })}
|
||||
value={inputData[key]}
|
||||
|
|
|
@ -65,7 +65,7 @@
|
|||
let tableOptions
|
||||
let errorChecker = new RelationshipErrorChecker(
|
||||
invalidThroughTable,
|
||||
relationshipExists
|
||||
manyToManyRelationshipExistsFn
|
||||
)
|
||||
let errors = {}
|
||||
let fromPrimary, fromForeign, fromColumn, toColumn
|
||||
|
@ -125,7 +125,7 @@
|
|||
}
|
||||
return false
|
||||
}
|
||||
function relationshipExists() {
|
||||
function manyToManyRelationshipExistsFn() {
|
||||
if (
|
||||
originalFromTable &&
|
||||
originalToTable &&
|
||||
|
@ -141,16 +141,14 @@
|
|||
datasource.entities[getTable(toId).name].schema
|
||||
).filter(value => value.through)
|
||||
|
||||
const matchAgainstUserInput = (fromTableId, toTableId) =>
|
||||
(fromTableId === fromId && toTableId === toId) ||
|
||||
(fromTableId === toId && toTableId === fromId)
|
||||
const matchAgainstUserInput = link =>
|
||||
(link.throughTo === throughToKey &&
|
||||
link.throughFrom === throughFromKey) ||
|
||||
(link.throughTo === throughFromKey && link.throughFrom === throughToKey)
|
||||
|
||||
return !!fromThroughLinks.find(from =>
|
||||
toThroughLinks.find(
|
||||
to =>
|
||||
from.through === to.through &&
|
||||
matchAgainstUserInput(from.tableId, to.tableId)
|
||||
)
|
||||
const allLinks = [...fromThroughLinks, ...toThroughLinks]
|
||||
return !!allLinks.find(
|
||||
link => link.through === throughId && matchAgainstUserInput(link)
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -181,16 +179,15 @@
|
|||
relationshipType: errorChecker.relationshipTypeSet(relationshipType),
|
||||
fromTable:
|
||||
errorChecker.tableSet(fromTable) ||
|
||||
errorChecker.doesRelationshipExists() ||
|
||||
errorChecker.differentTables(fromId, toId, throughId),
|
||||
toTable:
|
||||
errorChecker.tableSet(toTable) ||
|
||||
errorChecker.doesRelationshipExists() ||
|
||||
errorChecker.differentTables(toId, fromId, throughId),
|
||||
throughTable:
|
||||
errorChecker.throughTableSet(throughTable) ||
|
||||
errorChecker.throughIsNullable() ||
|
||||
errorChecker.differentTables(throughId, fromId, toId),
|
||||
errorChecker.differentTables(throughId, fromId, toId) ||
|
||||
errorChecker.doesRelationshipExists(),
|
||||
throughFromKey:
|
||||
errorChecker.manyForeignKeySet(throughFromKey) ||
|
||||
errorChecker.manyTypeMismatch(
|
||||
|
@ -198,7 +195,8 @@
|
|||
throughTable,
|
||||
fromTable.primary[0],
|
||||
throughToKey
|
||||
),
|
||||
) ||
|
||||
errorChecker.differentColumns(throughFromKey, throughToKey),
|
||||
throughToKey:
|
||||
errorChecker.manyForeignKeySet(throughToKey) ||
|
||||
errorChecker.manyTypeMismatch(
|
||||
|
@ -372,6 +370,16 @@
|
|||
fromColumn = selectedFromTable.name
|
||||
fromPrimary = selectedFromTable?.primary[0] || null
|
||||
}
|
||||
if (relationshipType === RelationshipType.MANY_TO_MANY) {
|
||||
relationshipPart1 = PrettyRelationshipDefinitions.MANY
|
||||
relationshipPart2 = PrettyRelationshipDefinitions.MANY
|
||||
} else if (relationshipType === RelationshipType.MANY_TO_ONE) {
|
||||
relationshipPart1 = PrettyRelationshipDefinitions.ONE
|
||||
relationshipPart2 = PrettyRelationshipDefinitions.MANY
|
||||
} else {
|
||||
relationshipPart1 = PrettyRelationshipDefinitions.MANY
|
||||
relationshipPart2 = PrettyRelationshipDefinitions.ONE
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ import { RelationshipType } from "@budibase/types"
|
|||
const typeMismatch = "Column type of the foreign key must match the primary key"
|
||||
const columnBeingUsed = "Column name cannot be an existing column"
|
||||
const mustBeDifferentTables = "From/to/through tables must be different"
|
||||
const mustBeDifferentColumns = "Foreign keys must be different"
|
||||
const primaryKeyNotSet = "Please pick the primary key"
|
||||
const throughNotNullable =
|
||||
"Ensure non-key columns are nullable or auto-generated"
|
||||
|
@ -30,9 +31,9 @@ function typeMismatchCheck(fromTable, toTable, primary, foreign) {
|
|||
}
|
||||
|
||||
export class RelationshipErrorChecker {
|
||||
constructor(invalidThroughTableFn, relationshipExistsFn) {
|
||||
constructor(invalidThroughTableFn, manyToManyRelationshipExistsFn) {
|
||||
this.invalidThroughTable = invalidThroughTableFn
|
||||
this.relationshipExists = relationshipExistsFn
|
||||
this.manyToManyRelationshipExists = manyToManyRelationshipExistsFn
|
||||
}
|
||||
|
||||
setType(type) {
|
||||
|
@ -72,7 +73,7 @@ export class RelationshipErrorChecker {
|
|||
}
|
||||
|
||||
doesRelationshipExists() {
|
||||
return this.isMany() && this.relationshipExists()
|
||||
return this.isMany() && this.manyToManyRelationshipExists()
|
||||
? relationshipAlreadyExists
|
||||
: null
|
||||
}
|
||||
|
@ -83,6 +84,11 @@ export class RelationshipErrorChecker {
|
|||
return error ? mustBeDifferentTables : null
|
||||
}
|
||||
|
||||
differentColumns(columnA, columnB) {
|
||||
const error = columnA && columnB && columnA === columnB
|
||||
return error ? mustBeDifferentColumns : null
|
||||
}
|
||||
|
||||
columnBeingUsed(table, column, ogName) {
|
||||
return isColumnNameBeingUsed(table, column, ogName) ? columnBeingUsed : null
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
<script>
|
||||
import { redirect } from "@roxi/routify"
|
||||
import { licensing, featureFlags } from "stores/portal"
|
||||
import { featureFlags } from "stores/portal"
|
||||
|
||||
if ($featureFlags.AI_CUSTOM_CONFIGS && $licensing.customAIConfigsEnabled) {
|
||||
if ($featureFlags.AI_CUSTOM_CONFIGS) {
|
||||
$redirect("./ai")
|
||||
} else {
|
||||
$redirect("./auth")
|
||||
|
|
|
@ -402,7 +402,6 @@ const automationActions = store => ({
|
|||
traverse: (blockRefs, automation) => {
|
||||
let blocks = []
|
||||
if (!automation || !blockRefs) {
|
||||
console.error("Need a valid automation")
|
||||
return
|
||||
}
|
||||
if (automation.definition?.trigger) {
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 2ab8536b6005576684810d774f1ac22239218546
|
||||
Subproject commit 04bee88597edb1edb88ed299d0597b587f0362ec
|
|
@ -56,6 +56,7 @@ jest.mock("@budibase/pro", () => ({
|
|||
ai: {
|
||||
LargeLanguageModel: {
|
||||
forCurrentTenant: async () => ({
|
||||
initialised: true,
|
||||
run: jest.fn(() => `Mock LLM Response`),
|
||||
buildPromptFromAIOperation: jest.fn(),
|
||||
}),
|
||||
|
|
|
@ -54,6 +54,7 @@ jest.mock("@budibase/pro", () => ({
|
|||
ai: {
|
||||
LargeLanguageModel: {
|
||||
forCurrentTenant: async () => ({
|
||||
initialised: true,
|
||||
run: jest.fn(() => `Mock LLM Response`),
|
||||
buildPromptFromAIOperation: jest.fn(),
|
||||
}),
|
||||
|
|
|
@ -106,21 +106,15 @@ export async function run({
|
|||
(await features.flags.isEnabled(FeatureFlag.BUDIBASE_AI)) &&
|
||||
(await pro.features.isBudibaseAIEnabled())
|
||||
|
||||
let llm
|
||||
if (budibaseAIEnabled || customConfigsEnabled) {
|
||||
const llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model)
|
||||
response = await llm.run(inputs.prompt)
|
||||
} else {
|
||||
// fallback to the default that uses the environment variable for backwards compat
|
||||
if (!env.OPENAI_API_KEY) {
|
||||
return {
|
||||
success: false,
|
||||
response:
|
||||
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable.",
|
||||
}
|
||||
}
|
||||
response = await legacyOpenAIPrompt(inputs)
|
||||
llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model)
|
||||
}
|
||||
|
||||
response = llm?.initialised
|
||||
? await llm.run(inputs.prompt)
|
||||
: await legacyOpenAIPrompt(inputs)
|
||||
|
||||
return {
|
||||
response,
|
||||
success: true,
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
import { getConfig, runStep, afterAll as _afterAll } from "./utilities"
|
||||
import { OpenAI } from "openai"
|
||||
import {
|
||||
withEnv as withCoreEnv,
|
||||
setEnv as setCoreEnv,
|
||||
} from "@budibase/backend-core"
|
||||
import { setEnv as setCoreEnv } from "@budibase/backend-core"
|
||||
import * as pro from "@budibase/pro"
|
||||
|
||||
jest.mock("openai", () => ({
|
||||
|
@ -28,6 +25,7 @@ jest.mock("@budibase/pro", () => ({
|
|||
ai: {
|
||||
LargeLanguageModel: {
|
||||
forCurrentTenant: jest.fn().mockImplementation(() => ({
|
||||
initialised: true,
|
||||
init: jest.fn(),
|
||||
run: jest.fn(),
|
||||
})),
|
||||
|
@ -63,16 +61,6 @@ describe("test the openai action", () => {
|
|||
|
||||
afterAll(_afterAll)
|
||||
|
||||
it("should present the correct error message when the OPENAI_API_KEY variable isn't set", async () => {
|
||||
await withCoreEnv({ OPENAI_API_KEY: "" }, async () => {
|
||||
let res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
|
||||
expect(res.response).toEqual(
|
||||
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable."
|
||||
)
|
||||
expect(res.success).toBeFalsy()
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to receive a response from ChatGPT given a prompt", async () => {
|
||||
const res = await runStep("OPENAI", { prompt: OPENAI_PROMPT })
|
||||
expect(res.response).toEqual("This is a test")
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import { FeatureFlag, Row, Table } from "@budibase/types"
|
||||
import { Row, Table } from "@budibase/types"
|
||||
|
||||
import * as external from "./external"
|
||||
import * as internal from "./internal"
|
||||
import { isExternal } from "./utils"
|
||||
import { setPermissions } from "../permissions"
|
||||
import { features, roles } from "@budibase/backend-core"
|
||||
import { roles } from "@budibase/backend-core"
|
||||
|
||||
export async function create(
|
||||
table: Omit<Table, "_id" | "_rev">,
|
||||
|
@ -18,16 +18,10 @@ export async function create(
|
|||
createdTable = await internal.create(table, rows, userId)
|
||||
}
|
||||
|
||||
const setExplicitPermission = await features.flags.isEnabled(
|
||||
FeatureFlag.TABLES_DEFAULT_ADMIN
|
||||
)
|
||||
|
||||
if (setExplicitPermission) {
|
||||
await setPermissions(createdTable._id!, {
|
||||
writeRole: roles.BUILTIN_ROLE_IDS.ADMIN,
|
||||
readRole: roles.BUILTIN_ROLE_IDS.ADMIN,
|
||||
})
|
||||
}
|
||||
await setPermissions(createdTable._id!, {
|
||||
writeRole: roles.BUILTIN_ROLE_IDS.ADMIN,
|
||||
readRole: roles.BUILTIN_ROLE_IDS.ADMIN,
|
||||
})
|
||||
|
||||
return createdTable
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@ import {
|
|||
BBReferenceFieldSubType,
|
||||
CalculationType,
|
||||
canGroupBy,
|
||||
FeatureFlag,
|
||||
FieldType,
|
||||
isNumeric,
|
||||
PermissionLevel,
|
||||
|
@ -16,7 +15,7 @@ import {
|
|||
ViewV2ColumnEnriched,
|
||||
ViewV2Enriched,
|
||||
} from "@budibase/types"
|
||||
import { context, docIds, features, HTTPError } from "@budibase/backend-core"
|
||||
import { context, docIds, HTTPError } from "@budibase/backend-core"
|
||||
import {
|
||||
helpers,
|
||||
PROTECTED_EXTERNAL_COLUMNS,
|
||||
|
@ -287,17 +286,12 @@ export async function create(
|
|||
await guardViewSchema(tableId, viewRequest)
|
||||
const view = await pickApi(tableId).create(tableId, viewRequest)
|
||||
|
||||
const setExplicitPermission = await features.flags.isEnabled(
|
||||
FeatureFlag.TABLES_DEFAULT_ADMIN
|
||||
)
|
||||
if (setExplicitPermission) {
|
||||
// Set permissions to be the same as the table
|
||||
const tablePerms = await sdk.permissions.getResourcePerms(tableId)
|
||||
await sdk.permissions.setPermissions(view.id, {
|
||||
writeRole: tablePerms[PermissionLevel.WRITE].role,
|
||||
readRole: tablePerms[PermissionLevel.READ].role,
|
||||
})
|
||||
}
|
||||
// Set permissions to be the same as the table
|
||||
const tablePerms = await sdk.permissions.getResourcePerms(tableId)
|
||||
await sdk.permissions.setPermissions(view.id, {
|
||||
writeRole: tablePerms[PermissionLevel.WRITE].role,
|
||||
readRole: tablePerms[PermissionLevel.READ].role,
|
||||
})
|
||||
|
||||
return view
|
||||
}
|
||||
|
|
|
@ -400,7 +400,7 @@ class Orchestrator {
|
|||
)
|
||||
} catch (err) {
|
||||
this.updateContextAndOutput(
|
||||
pathStepIdx,
|
||||
pathStepIdx + 1,
|
||||
steps[stepToLoopIndex],
|
||||
{},
|
||||
{
|
||||
|
@ -420,7 +420,7 @@ class Orchestrator {
|
|||
(loopStep.inputs.iterations && loopStepIndex === maxIterations)
|
||||
) {
|
||||
this.updateContextAndOutput(
|
||||
pathStepIdx,
|
||||
pathStepIdx + 1,
|
||||
steps[stepToLoopIndex],
|
||||
{
|
||||
items: this.loopStepOutputs,
|
||||
|
@ -447,7 +447,7 @@ class Orchestrator {
|
|||
|
||||
if (isFailure) {
|
||||
this.updateContextAndOutput(
|
||||
pathStepIdx,
|
||||
pathStepIdx + 1,
|
||||
steps[stepToLoopIndex],
|
||||
{
|
||||
items: this.loopStepOutputs,
|
||||
|
|
|
@ -18,6 +18,7 @@ jest.mock("@budibase/pro", () => ({
|
|||
ai: {
|
||||
LargeLanguageModel: {
|
||||
forCurrentTenant: async () => ({
|
||||
initialised: true,
|
||||
run: jest.fn(() => "response from LLM"),
|
||||
buildPromptFromAIOperation: buildPromptMock,
|
||||
}),
|
||||
|
|
|
@ -108,7 +108,7 @@ export async function processAIColumns<T extends Row | Row[]>(
|
|||
span?.addTags({ table_id: table._id, numRows })
|
||||
const rows = Array.isArray(inputRows) ? inputRows : [inputRows]
|
||||
const llm = await pro.ai.LargeLanguageModel.forCurrentTenant("gpt-4o-mini")
|
||||
if (rows) {
|
||||
if (rows && llm.initialised) {
|
||||
// Ensure we have snippet context
|
||||
await context.ensureSnippetContext()
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ export enum FeatureFlag {
|
|||
AI_CUSTOM_CONFIGS = "AI_CUSTOM_CONFIGS",
|
||||
DEFAULT_VALUES = "DEFAULT_VALUES",
|
||||
ENRICHED_RELATIONSHIPS = "ENRICHED_RELATIONSHIPS",
|
||||
TABLES_DEFAULT_ADMIN = "TABLES_DEFAULT_ADMIN",
|
||||
BUDIBASE_AI = "BUDIBASE_AI",
|
||||
}
|
||||
|
||||
|
|
|
@ -12,27 +12,29 @@ import {
|
|||
} from "@budibase/backend-core"
|
||||
import { checkAnyUserExists } from "../../../utilities/users"
|
||||
import {
|
||||
AIConfig,
|
||||
AIInnerConfig,
|
||||
Config,
|
||||
ConfigType,
|
||||
Ctx,
|
||||
GetPublicOIDCConfigResponse,
|
||||
GetPublicSettingsResponse,
|
||||
GoogleInnerConfig,
|
||||
isAIConfig,
|
||||
isGoogleConfig,
|
||||
isOIDCConfig,
|
||||
isSettingsConfig,
|
||||
isSMTPConfig,
|
||||
OIDCConfigs,
|
||||
OIDCLogosConfig,
|
||||
PASSWORD_REPLACEMENT,
|
||||
QuotaUsageType,
|
||||
SettingsBrandingConfig,
|
||||
SettingsInnerConfig,
|
||||
SSOConfig,
|
||||
SSOConfigType,
|
||||
StaticQuotaName,
|
||||
UserCtx,
|
||||
OIDCLogosConfig,
|
||||
AIConfig,
|
||||
PASSWORD_REPLACEMENT,
|
||||
isAIConfig,
|
||||
AIInnerConfig,
|
||||
} from "@budibase/types"
|
||||
import * as pro from "@budibase/pro"
|
||||
|
||||
|
@ -83,6 +85,12 @@ const getEventFns = async (config: Config, existing?: Config) => {
|
|||
fns.push(events.email.SMTPUpdated)
|
||||
} else if (isAIConfig(config)) {
|
||||
fns.push(() => events.ai.AIConfigUpdated)
|
||||
if (
|
||||
Object.keys(existing.config).length > Object.keys(config.config).length
|
||||
) {
|
||||
fns.push(() => pro.quotas.removeCustomAIConfig())
|
||||
}
|
||||
fns.push(() => pro.quotas.addCustomAIConfig())
|
||||
} else if (isGoogleConfig(config)) {
|
||||
fns.push(() => events.auth.SSOUpdated(ConfigType.GOOGLE))
|
||||
if (!existing.config.activated && config.config.activated) {
|
||||
|
@ -248,7 +256,6 @@ export async function save(ctx: UserCtx<Config>) {
|
|||
if (existingConfig) {
|
||||
await verifyAIConfig(config, existingConfig)
|
||||
}
|
||||
await pro.quotas.addCustomAIConfig()
|
||||
break
|
||||
}
|
||||
} catch (err: any) {
|
||||
|
@ -518,7 +525,11 @@ export async function destroy(ctx: UserCtx) {
|
|||
await db.remove(id, rev)
|
||||
await cache.destroy(cache.CacheKey.CHECKLIST)
|
||||
if (id === configs.generateConfigID(ConfigType.AI)) {
|
||||
await pro.quotas.removeCustomAIConfig()
|
||||
await pro.quotas.set(
|
||||
StaticQuotaName.AI_CUSTOM_CONFIGS,
|
||||
QuotaUsageType.STATIC,
|
||||
0
|
||||
)
|
||||
}
|
||||
ctx.body = { message: "Config deleted successfully" }
|
||||
} catch (err: any) {
|
||||
|
|
Loading…
Reference in New Issue