Merge branch 'master' of github.com:Budibase/budibase into feature/role-multi-inheritance

This commit is contained in:
mike12345567 2024-09-24 16:33:45 +01:00
commit 36587691d2
41 changed files with 668 additions and 489 deletions

View File

@ -108,7 +108,7 @@ jobs:
- name: Pull testcontainers images
run: |
docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.3.3 &
docker pull budibase/couchdb:v3.3.3-sqs-v2.1.1 &
docker pull redis &
wait $(jobs -p)
@ -179,7 +179,7 @@ jobs:
docker pull minio/minio &
docker pull redis &
docker pull testcontainers/ryuk:0.5.1 &
docker pull budibase/couchdb:v3.3.3 &
docker pull budibase/couchdb:v3.3.3-sqs-v2.1.1 &
wait $(jobs -p)

View File

@ -641,7 +641,7 @@ couchdb:
# @ignore
repository: budibase/couchdb
# @ignore
tag: v3.3.3
tag: v3.3.3-sqs-v2.1.1
# @ignore
pullPolicy: Always

View File

@ -10,7 +10,7 @@
},
"dependencies": {
"bulma": "^0.9.3",
"next": "14.1.1",
"next": "14.2.10",
"node-fetch": "^3.2.10",
"sass": "^1.52.3",
"react": "17.0.2",

View File

@ -46,10 +46,10 @@
resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45"
integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==
"@next/env@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/env/-/env-14.1.1.tgz#80150a8440eb0022a73ba353c6088d419b908bac"
integrity sha512-7CnQyD5G8shHxQIIg3c7/pSeYFeMhsNbpU/bmvH7ZnDql7mNRgg8O2JZrhrc/soFnfBnKP4/xXNiiSIPn2w8gA==
"@next/env@14.2.10":
version "14.2.10"
resolved "https://registry.yarnpkg.com/@next/env/-/env-14.2.10.tgz#1d3178340028ced2d679f84140877db4f420333c"
integrity sha512-dZIu93Bf5LUtluBXIv4woQw2cZVZ2DJTjax5/5DOs3lzEOeKLy7GxRSr4caK9/SCPdaW6bCgpye6+n4Dh9oJPw==
"@next/eslint-plugin-next@12.1.0":
version "12.1.0"
@ -58,50 +58,50 @@
dependencies:
glob "7.1.7"
"@next/swc-darwin-arm64@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.1.tgz#b74ba7c14af7d05fa2848bdeb8ee87716c939b64"
integrity sha512-yDjSFKQKTIjyT7cFv+DqQfW5jsD+tVxXTckSe1KIouKk75t1qZmj/mV3wzdmFb0XHVGtyRjDMulfVG8uCKemOQ==
"@next/swc-darwin-arm64@14.2.10":
version "14.2.10"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.10.tgz#49d10ca4086fbd59ee68e204f75d7136eda2aa80"
integrity sha512-V3z10NV+cvMAfxQUMhKgfQnPbjw+Ew3cnr64b0lr8MDiBJs3eLnM6RpGC46nhfMZsiXgQngCJKWGTC/yDcgrDQ==
"@next/swc-darwin-x64@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.1.tgz#82c3e67775e40094c66e76845d1a36cc29c9e78b"
integrity sha512-KCQmBL0CmFmN8D64FHIZVD9I4ugQsDBBEJKiblXGgwn7wBCSe8N4Dx47sdzl4JAg39IkSN5NNrr8AniXLMb3aw==
"@next/swc-darwin-x64@14.2.10":
version "14.2.10"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.10.tgz#0ebeae3afb8eac433882b79543295ab83624a1a8"
integrity sha512-Y0TC+FXbFUQ2MQgimJ/7Ina2mXIKhE7F+GUe1SgnzRmwFY3hX2z8nyVCxE82I2RicspdkZnSWMn4oTjIKz4uzA==
"@next/swc-linux-arm64-gnu@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.1.tgz#4f4134457b90adc5c3d167d07dfb713c632c0caa"
integrity sha512-YDQfbWyW0JMKhJf/T4eyFr4b3tceTorQ5w2n7I0mNVTFOvu6CGEzfwT3RSAQGTi/FFMTFcuspPec/7dFHuP7Eg==
"@next/swc-linux-arm64-gnu@14.2.10":
version "14.2.10"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.10.tgz#7e602916d2fb55a3c532f74bed926a0137c16f20"
integrity sha512-ZfQ7yOy5zyskSj9rFpa0Yd7gkrBnJTkYVSya95hX3zeBG9E55Z6OTNPn1j2BTFWvOVVj65C3T+qsjOyVI9DQpA==
"@next/swc-linux-arm64-musl@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.1.tgz#594bedafaeba4a56db23a48ffed2cef7cd09c31a"
integrity sha512-fiuN/OG6sNGRN/bRFxRvV5LyzLB8gaL8cbDH5o3mEiVwfcMzyE5T//ilMmaTrnA8HLMS6hoz4cHOu6Qcp9vxgQ==
"@next/swc-linux-arm64-musl@14.2.10":
version "14.2.10"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.10.tgz#6b143f628ccee490b527562e934f8de578d4be47"
integrity sha512-n2i5o3y2jpBfXFRxDREr342BGIQCJbdAUi/K4q6Env3aSx8erM9VuKXHw5KNROK9ejFSPf0LhoSkU/ZiNdacpQ==
"@next/swc-linux-x64-gnu@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.1.tgz#cb4e75f1ff2b9bcadf2a50684605928ddfc58528"
integrity sha512-rv6AAdEXoezjbdfp3ouMuVqeLjE1Bin0AuE6qxE6V9g3Giz5/R3xpocHoAi7CufRR+lnkuUjRBn05SYJ83oKNQ==
"@next/swc-linux-x64-gnu@14.2.10":
version "14.2.10"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.10.tgz#086f2f16a0678890a1eb46518c4dda381b046082"
integrity sha512-GXvajAWh2woTT0GKEDlkVhFNxhJS/XdDmrVHrPOA83pLzlGPQnixqxD8u3bBB9oATBKB//5e4vpACnx5Vaxdqg==
"@next/swc-linux-x64-musl@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.1.tgz#15f26800df941b94d06327f674819ab64b272e25"
integrity sha512-YAZLGsaNeChSrpz/G7MxO3TIBLaMN8QWMr3X8bt6rCvKovwU7GqQlDu99WdvF33kI8ZahvcdbFsy4jAFzFX7og==
"@next/swc-linux-x64-musl@14.2.10":
version "14.2.10"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.10.tgz#1befef10ed8dbcc5047b5d637a25ae3c30a0bfc3"
integrity sha512-opFFN5B0SnO+HTz4Wq4HaylXGFV+iHrVxd3YvREUX9K+xfc4ePbRrxqOuPOFjtSuiVouwe6uLeDtabjEIbkmDA==
"@next/swc-win32-arm64-msvc@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.1.tgz#060c134fa7fa843666e3e8574972b2b723773dd9"
integrity sha512-1L4mUYPBMvVDMZg1inUYyPvFSduot0g73hgfD9CODgbr4xiTYe0VOMTZzaRqYJYBA9mana0x4eaAaypmWo1r5A==
"@next/swc-win32-arm64-msvc@14.2.10":
version "14.2.10"
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.10.tgz#731f52c3ae3c56a26cf21d474b11ae1529531209"
integrity sha512-9NUzZuR8WiXTvv+EiU/MXdcQ1XUvFixbLIMNQiVHuzs7ZIFrJDLJDaOF1KaqttoTujpcxljM/RNAOmw1GhPPQQ==
"@next/swc-win32-ia32-msvc@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.1.tgz#5c06889352b1f77e3807834a0d0afd7e2d2d1da2"
integrity sha512-jvIE9tsuj9vpbbXlR5YxrghRfMuG0Qm/nZ/1KDHc+y6FpnZ/apsgh+G6t15vefU0zp3WSpTMIdXRUsNl/7RSuw==
"@next/swc-win32-ia32-msvc@14.2.10":
version "14.2.10"
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.10.tgz#32723ef7f04e25be12af357cc72ddfdd42fd1041"
integrity sha512-fr3aEbSd1GeW3YUMBkWAu4hcdjZ6g4NBl1uku4gAn661tcxd1bHs1THWYzdsbTRLcCKLjrDZlNp6j2HTfrw+Bg==
"@next/swc-win32-x64-msvc@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.1.tgz#d38c63a8f9b7f36c1470872797d3735b4a9c5c52"
integrity sha512-S6K6EHDU5+1KrBDLko7/c1MNy/Ya73pIAmvKeFwsF4RmBFJSO7/7YeD4FnZ4iBdzE69PpQ4sOMU9ORKeNuxe8A==
"@next/swc-win32-x64-msvc@14.2.10":
version "14.2.10"
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.10.tgz#ee1d036cb5ec871816f96baee7991035bb242455"
integrity sha512-UjeVoRGKNL2zfbcQ6fscmgjBAS/inHBh63mjIlfPg/NG8Yn2ztqylXt5qilYb6hoHIwaU2ogHknHWWmahJjgZQ==
"@nodelib/fs.scandir@2.1.5":
version "2.1.5"
@ -129,11 +129,17 @@
resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.1.0.tgz#7f698254aadf921e48dda8c0a6b304026b8a9323"
integrity sha512-JLo+Y592QzIE+q7Dl2pMUtt4q8SKYI5jDrZxrozEQxnGVOyYE+GWK9eLkwTaeN9DDctlaRAQ3TBmzZ1qdLE30A==
"@swc/helpers@0.5.2":
version "0.5.2"
resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.2.tgz#85ea0c76450b61ad7d10a37050289eded783c27d"
integrity sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw==
"@swc/counter@^0.1.3":
version "0.1.3"
resolved "https://registry.yarnpkg.com/@swc/counter/-/counter-0.1.3.tgz#cc7463bd02949611c6329596fccd2b0ec782b0e9"
integrity sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==
"@swc/helpers@0.5.5":
version "0.5.5"
resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.5.tgz#12689df71bfc9b21c4f4ca00ae55f2f16c8b77c0"
integrity sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A==
dependencies:
"@swc/counter" "^0.1.3"
tslib "^2.4.0"
"@types/json5@^0.0.29":
@ -1245,28 +1251,28 @@ natural-compare@^1.4.0:
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
next@14.1.1:
version "14.1.1"
resolved "https://registry.yarnpkg.com/next/-/next-14.1.1.tgz#92bd603996c050422a738e90362dff758459a171"
integrity sha512-McrGJqlGSHeaz2yTRPkEucxQKe5Zq7uPwyeHNmJaZNY4wx9E9QdxmTp310agFRoMuIYgQrCrT3petg13fSVOww==
next@14.2.10:
version "14.2.10"
resolved "https://registry.yarnpkg.com/next/-/next-14.2.10.tgz#331981a4fecb1ae8af1817d4db98fc9687ee1cb6"
integrity sha512-sDDExXnh33cY3RkS9JuFEKaS4HmlWmDKP1VJioucCG6z5KuA008DPsDZOzi8UfqEk3Ii+2NCQSJrfbEWtZZfww==
dependencies:
"@next/env" "14.1.1"
"@swc/helpers" "0.5.2"
"@next/env" "14.2.10"
"@swc/helpers" "0.5.5"
busboy "1.6.0"
caniuse-lite "^1.0.30001579"
graceful-fs "^4.2.11"
postcss "8.4.31"
styled-jsx "5.1.1"
optionalDependencies:
"@next/swc-darwin-arm64" "14.1.1"
"@next/swc-darwin-x64" "14.1.1"
"@next/swc-linux-arm64-gnu" "14.1.1"
"@next/swc-linux-arm64-musl" "14.1.1"
"@next/swc-linux-x64-gnu" "14.1.1"
"@next/swc-linux-x64-musl" "14.1.1"
"@next/swc-win32-arm64-msvc" "14.1.1"
"@next/swc-win32-ia32-msvc" "14.1.1"
"@next/swc-win32-x64-msvc" "14.1.1"
"@next/swc-darwin-arm64" "14.2.10"
"@next/swc-darwin-x64" "14.2.10"
"@next/swc-linux-arm64-gnu" "14.2.10"
"@next/swc-linux-arm64-musl" "14.2.10"
"@next/swc-linux-x64-gnu" "14.2.10"
"@next/swc-linux-x64-musl" "14.2.10"
"@next/swc-win32-arm64-msvc" "14.2.10"
"@next/swc-win32-ia32-msvc" "14.2.10"
"@next/swc-win32-x64-msvc" "14.2.10"
node-domexception@^1.0.0:
version "1.0.0"

View File

@ -46,7 +46,7 @@ export default async function setup() {
await killContainers(containers)
try {
const couchdb = new GenericContainer("budibase/couchdb:v3.3.3")
const couchdb = new GenericContainer("budibase/couchdb:v3.3.3-sqs-v2.1.1")
.withExposedPorts(5984, 4984)
.withEnvironment({
COUCHDB_PASSWORD: "budibase",

View File

@ -1,4 +1,4 @@
ARG BASEIMG=budibase/couchdb:v3.3.3
ARG BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1
FROM node:20-slim as build
# install node-gyp dependencies

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.32.6",
"version": "2.32.7",
"npmClient": "yarn",
"packages": [
"packages/*",

View File

@ -11,6 +11,7 @@ export interface DeletedApp {
}
const EXPIRY_SECONDS = 3600
const INVALID_EXPIRY_SECONDS = 60
/**
* The default populate app metadata function
@ -48,9 +49,8 @@ export async function getAppMetadata(appId: string): Promise<App | DeletedApp> {
// app DB left around, but no metadata, it is invalid
if (err && err.status === 404) {
metadata = { state: AppState.INVALID }
// don't expire the reference to an invalid app, it'll only be
// updated if a metadata doc actually gets stored (app is remade/reverted)
expiry = undefined
// expire invalid apps regularly, in-case it was only briefly invalid
expiry = INVALID_EXPIRY_SECONDS
} else {
throw err
}

View File

@ -1,4 +1,5 @@
import {
AIConfig,
Config,
ConfigType,
GoogleConfig,
@ -254,3 +255,9 @@ export async function getSCIMConfig(): Promise<SCIMInnerConfig | undefined> {
const config = await getConfig<SCIMConfig>(ConfigType.SCIM)
return config?.config
}
// AI
export async function getAIConfig(): Promise<AIConfig | undefined> {
return getConfig<AIConfig>(ConfigType.AI)
}

View File

@ -43,6 +43,9 @@ function buildNano(couchInfo: { url: string; cookie: string }) {
}
type DBCall<T> = () => Promise<T>
type DBCallback<T> = (
db: Nano.DocumentScope<any>
) => Promise<DBCall<T>> | DBCall<T>
class CouchDBError extends Error implements DBError {
status: number
@ -171,8 +174,8 @@ export class DatabaseImpl implements Database {
}
// this function fetches the DB and handles if DB creation is needed
private async performCall<T>(
call: (db: Nano.DocumentScope<any>) => Promise<DBCall<T>> | DBCall<T>
private async performCallWithDBCreation<T>(
call: DBCallback<T>
): Promise<any> {
const db = this.getDb()
const fnc = await call(db)
@ -181,13 +184,24 @@ export class DatabaseImpl implements Database {
} catch (err: any) {
if (err.statusCode === 404 && err.reason === DATABASE_NOT_FOUND) {
await this.checkAndCreateDb()
return await this.performCall(call)
return await this.performCallWithDBCreation(call)
}
// stripping the error down the props which are safe/useful, drop everything else
throw new CouchDBError(`CouchDB error: ${err.message}`, err)
}
}
private async performCall<T>(call: DBCallback<T>): Promise<any> {
const db = this.getDb()
const fnc = await call(db)
try {
return await fnc()
} catch (err: any) {
// stripping the error down the props which are safe/useful, drop everything else
throw new CouchDBError(`CouchDB error: ${err.message}`, err)
}
}
async get<T extends Document>(id?: string): Promise<T> {
return this.performCall(db => {
if (!id) {
@ -227,6 +241,7 @@ export class DatabaseImpl implements Database {
}
async remove(idOrDoc: string | Document, rev?: string) {
// not a read call - but don't create a DB to delete a document
return this.performCall(db => {
let _id: string
let _rev: string
@ -286,7 +301,7 @@ export class DatabaseImpl implements Database {
if (!document._id) {
throw new Error("Cannot store document without _id field.")
}
return this.performCall(async db => {
return this.performCallWithDBCreation(async db => {
if (!document.createdAt) {
document.createdAt = new Date().toISOString()
}
@ -309,7 +324,7 @@ export class DatabaseImpl implements Database {
async bulkDocs(documents: AnyDocument[]) {
const now = new Date().toISOString()
return this.performCall(db => {
return this.performCallWithDBCreation(db => {
return () =>
db.bulk({
docs: documents.map(d => ({ createdAt: now, ...d, updatedAt: now })),
@ -321,7 +336,21 @@ export class DatabaseImpl implements Database {
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {
return this.performCall(db => {
return () => db.list(params)
return async () => {
try {
return (await db.list(params)) as AllDocsResponse<T>
} catch (err: any) {
if (err.reason === DATABASE_NOT_FOUND) {
return {
offset: 0,
total_rows: 0,
rows: [],
}
} else {
throw err
}
}
}
})
}

View File

@ -150,6 +150,7 @@ class InternalBuilder {
return `"${str}"`
case SqlClient.MS_SQL:
return `[${str}]`
case SqlClient.MARIADB:
case SqlClient.MY_SQL:
return `\`${str}\``
}
@ -559,7 +560,10 @@ class InternalBuilder {
)}${wrap}, FALSE)`
)
})
} else if (this.client === SqlClient.MY_SQL) {
} else if (
this.client === SqlClient.MY_SQL ||
this.client === SqlClient.MARIADB
) {
const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS"
iterate(mode, (q, key, value) => {
return q[rawFnc](
@ -930,7 +934,8 @@ class InternalBuilder {
}
const relatedTable = meta.tables?.[toTable]
const toAlias = aliases?.[toTable] || toTable,
fromAlias = aliases?.[fromTable] || fromTable
fromAlias = aliases?.[fromTable] || fromTable,
throughAlias = (throughTable && aliases?.[throughTable]) || throughTable
let toTableWithSchema = this.tableNameWithSchema(toTable, {
alias: toAlias,
schema: endpoint.schema,
@ -957,38 +962,36 @@ class InternalBuilder {
const primaryKey = `${toAlias}.${toPrimary || toKey}`
let subQuery: Knex.QueryBuilder = knex
.from(toTableWithSchema)
.limit(getRelationshipLimit())
// add sorting to get consistent order
.orderBy(primaryKey)
// many-to-many relationship with junction table
if (throughTable && toPrimary && fromPrimary) {
const throughAlias = aliases?.[throughTable] || throughTable
const isManyToMany = throughTable && toPrimary && fromPrimary
let correlatedTo = isManyToMany
? `${throughAlias}.${fromKey}`
: `${toAlias}.${toKey}`,
correlatedFrom = isManyToMany
? `${fromAlias}.${fromPrimary}`
: `${fromAlias}.${fromKey}`
// many-to-many relationship needs junction table join
if (isManyToMany) {
let throughTableWithSchema = this.tableNameWithSchema(throughTable, {
alias: throughAlias,
schema: endpoint.schema,
})
subQuery = subQuery
.join(throughTableWithSchema, function () {
this.on(`${toAlias}.${toPrimary}`, "=", `${throughAlias}.${toKey}`)
})
.where(
`${throughAlias}.${fromKey}`,
"=",
knex.raw(this.quotedIdentifier(`${fromAlias}.${fromPrimary}`))
)
}
// one-to-many relationship with foreign key
else {
subQuery = subQuery.where(
`${toAlias}.${toKey}`,
"=",
knex.raw(this.quotedIdentifier(`${fromAlias}.${fromKey}`))
)
subQuery = subQuery.join(throughTableWithSchema, function () {
this.on(`${toAlias}.${toPrimary}`, "=", `${throughAlias}.${toKey}`)
})
}
// add the correlation to the overall query
subQuery = subQuery.where(
correlatedTo,
"=",
knex.raw(this.quotedIdentifier(correlatedFrom))
)
const standardWrap = (select: string): Knex.QueryBuilder => {
subQuery = subQuery.select(`${toAlias}.*`)
subQuery = subQuery.select(`${toAlias}.*`).limit(getRelationshipLimit())
// @ts-ignore - the from alias syntax isn't in Knex typing
return knex.select(knex.raw(select)).from({
[toAlias]: subQuery,
@ -1008,11 +1011,15 @@ class InternalBuilder {
`json_agg(json_build_object(${fieldList}))`
)
break
case SqlClient.MY_SQL:
case SqlClient.MARIADB:
// can't use the standard wrap due to correlated sub-query limitations in MariaDB
wrapperQuery = subQuery.select(
knex.raw(`json_arrayagg(json_object(${fieldList}))`)
knex.raw(
`json_arrayagg(json_object(${fieldList}) LIMIT ${getRelationshipLimit()})`
)
)
break
case SqlClient.MY_SQL:
case SqlClient.ORACLE:
wrapperQuery = standardWrap(
`json_arrayagg(json_object(${fieldList}))`
@ -1024,7 +1031,9 @@ class InternalBuilder {
.select(`${fromAlias}.*`)
// @ts-ignore - from alias syntax not TS supported
.from({
[fromAlias]: subQuery.select(`${toAlias}.*`),
[fromAlias]: subQuery
.select(`${toAlias}.*`)
.limit(getRelationshipLimit()),
})} FOR JSON PATH))`
)
break
@ -1179,7 +1188,8 @@ class InternalBuilder {
if (
this.client === SqlClient.POSTGRES ||
this.client === SqlClient.SQL_LITE ||
this.client === SqlClient.MY_SQL
this.client === SqlClient.MY_SQL ||
this.client === SqlClient.MARIADB
) {
const primary = this.table.primary
if (!primary) {
@ -1326,12 +1336,11 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
_query(json: QueryJson, opts: QueryOptions = {}): SqlQuery | SqlQuery[] {
const sqlClient = this.getSqlClient()
const config: Knex.Config = {
client: sqlClient,
client: this.getBaseSqlClient(),
}
if (sqlClient === SqlClient.SQL_LITE || sqlClient === SqlClient.ORACLE) {
config.useNullAsDefault = true
}
const client = knex(config)
let query: Knex.QueryBuilder
const builder = new InternalBuilder(sqlClient, client, json)
@ -1440,7 +1449,10 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
let id
if (sqlClient === SqlClient.MS_SQL) {
id = results?.[0].id
} else if (sqlClient === SqlClient.MY_SQL) {
} else if (
sqlClient === SqlClient.MY_SQL ||
sqlClient === SqlClient.MARIADB
) {
id = results?.insertId
}
row = processFn(

View File

@ -210,16 +210,27 @@ function buildDeleteTable(knex: SchemaBuilder, table: Table): SchemaBuilder {
class SqlTableQueryBuilder {
private readonly sqlClient: SqlClient
private extendedSqlClient: SqlClient | undefined
// pass through client to get flavour of SQL
constructor(client: SqlClient) {
this.sqlClient = client
}
getSqlClient(): SqlClient {
getBaseSqlClient(): SqlClient {
return this.sqlClient
}
getSqlClient(): SqlClient {
return this.extendedSqlClient || this.sqlClient
}
// if working in a database like MySQL with many variants (MariaDB)
// we can set another client which overrides the base one
setExtendedSqlClient(client: SqlClient) {
this.extendedSqlClient = client
}
/**
* @param json the input JSON structure from which an SQL query will be built.
* @return the operation that was found in the JSON.

View File

@ -102,10 +102,6 @@ export const useAppBuilders = () => {
return useFeature(Feature.APP_BUILDERS)
}
export const useViewReadonlyColumns = () => {
return useFeature(Feature.VIEW_READONLY_COLUMNS)
}
// QUOTAS
export const setAutomationLogsQuota = (value: number) => {

View File

@ -16,9 +16,11 @@
export let enableNaming = true
let validRegex = /^[A-Za-z0-9_\s]+$/
let typing = false
let editing = false
const dispatch = createEventDispatcher()
$: stepNames = $selectedAutomation?.definition.stepNames
$: allSteps = $selectedAutomation?.definition.steps || []
$: automationName = stepNames?.[block.id] || block?.name || ""
$: automationNameError = getAutomationNameError(automationName)
$: status = updateStatus(testResult)
@ -56,10 +58,18 @@
}
}
const getAutomationNameError = name => {
if (stepNames) {
const duplicateError =
"This name already exists, please enter a unique name"
if (stepNames && editing) {
for (const [key, value] of Object.entries(stepNames)) {
if (name === value && key !== block.id) {
return "This name already exists, please enter a unique name"
if (name !== block.name && name === value && key !== block.id) {
return duplicateError
}
}
for (const step of allSteps) {
if (step.id !== block.id && name === step.name) {
return duplicateError
}
}
}
@ -67,15 +77,11 @@
if (name !== block.name && name?.length > 0) {
let invalidRoleName = !validRegex.test(name)
if (invalidRoleName) {
return "Please enter a role name consisting of only alphanumeric symbols and underscores"
return "Please enter a name consisting of only alphanumeric symbols and underscores"
}
return null
}
}
const startTyping = async () => {
typing = true
return null
}
const saveName = async () => {
@ -89,13 +95,28 @@
await automationStore.actions.saveAutomationName(block.id, automationName)
}
}
const startEditing = () => {
editing = true
typing = true
}
const stopEditing = async () => {
editing = false
typing = false
if (automationNameError) {
automationName = stepNames[block.id] || block?.name
} else {
await saveName()
}
}
</script>
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<div
class:typing={typing && !automationNameError}
class:typing-error={automationNameError}
class:typing={typing && !automationNameError && editing}
class:typing-error={automationNameError && editing}
class="blockSection"
on:click={() => dispatch("toggle")}
>
@ -132,7 +153,7 @@
<input
class="input-text"
disabled={!enableNaming}
placeholder="Enter some text"
placeholder="Enter step name"
name="name"
autocomplete="off"
value={automationName}
@ -141,26 +162,14 @@
}}
on:click={e => {
e.stopPropagation()
startTyping()
startEditing()
}}
on:keydown={async e => {
if (e.key === "Enter") {
typing = false
if (automationNameError) {
automationName = stepNames[block.id] || block?.name
} else {
await saveName()
}
}
}}
on:blur={async () => {
typing = false
if (automationNameError) {
automationName = stepNames[block.id] || block?.name
} else {
await saveName()
await stopEditing()
}
}}
on:blur={stopEditing}
/>
{:else}
<div class="input-text">
@ -222,7 +231,7 @@
/>
{/if}
</div>
{#if automationNameError}
{#if automationNameError && editing}
<div class="error-container">
<AbsTooltip type="negative" text={automationNameError}>
<div class="error-icon">

View File

@ -643,8 +643,8 @@
runtimeName = `loop.${name}`
} else if (block.name.startsWith("JS")) {
runtimeName = hasUserDefinedName
? `stepsByName[${bindingName}].${name}`
: `steps[${idx - loopBlockCount}].${name}`
? `stepsByName["${bindingName}"].${name}`
: `steps["${idx - loopBlockCount}"].${name}`
} else {
runtimeName = hasUserDefinedName
? `stepsByName.${bindingName}.${name}`
@ -752,13 +752,21 @@
: allSteps[idx].icon
if (wasLoopBlock) {
loopBlockCount++
schema = cloneDeep(allSteps[idx - 1]?.schema?.outputs?.properties)
}
Object.entries(schema).forEach(([name, value]) => {
addBinding(name, value, icon, idx, isLoopBlock, bindingName)
})
}
if (
allSteps[blockIdx - 1]?.stepId !== ActionStepID.LOOP &&
allSteps
.slice(0, blockIdx)
.some(step => step.stepId === ActionStepID.LOOP)
) {
bindings = bindings.filter(x => !x.readableBinding.includes("loop"))
}
return bindings
}

View File

@ -1,6 +1,6 @@
<script>
import { viewsV2 } from "stores/builder"
import { admin, licensing } from "stores/portal"
import { admin } from "stores/portal"
import { Grid } from "@budibase/frontend-core"
import { API } from "api"
import GridCreateEditRowModal from "components/backend/DataTable/modals/grid/GridCreateEditRowModal.svelte"
@ -30,7 +30,6 @@
showAvatars={false}
on:updatedatasource={handleGridViewUpdate}
isCloud={$admin.cloud}
allowViewReadonlyColumns={$licensing.isViewReadonlyColumnsEnabled}
canSetRelationshipSchemas={isEnabled(FeatureFlag.ENRICHED_RELATIONSHIPS)}
>
<svelte:fragment slot="filter">

View File

@ -56,10 +56,13 @@
} else {
// We don't store the default BB AI config in the DB
delete fullAIConfig.config.budibase_ai
// unset the default value from other configs if default is set
if (editingAIConfig.isDefault) {
for (let key in fullAIConfig.config) {
fullAIConfig.config[key].isDefault = false
if (key !== id) {
fullAIConfig.config[key].isDefault = false
}
}
}
// Add new or update existing custom AI Config

View File

@ -140,10 +140,6 @@ export const createLicensingStore = () => {
Constants.Features.VIEW_PERMISSIONS
)
const isViewReadonlyColumnsEnabled = license.features.includes(
Constants.Features.VIEW_READONLY_COLUMNS
)
const budibaseAIEnabled = license.features.includes(
Constants.Features.BUDIBASE_AI
)
@ -173,7 +169,6 @@ export const createLicensingStore = () => {
triggerAutomationRunEnabled,
isViewPermissionsEnabled,
perAppBuildersEnabled,
isViewReadonlyColumnsEnabled,
}
})
},

View File

@ -28,7 +28,7 @@
"inquirer": "8.0.0",
"lookpath": "1.1.0",
"node-fetch": "2.6.7",
"posthog-node": "1.3.0",
"posthog-node": "4.0.1",
"pouchdb": "7.3.0",
"@budibase/pouchdb-replication-stream": "1.2.11",
"randomstring": "1.1.5",

View File

@ -1,4 +1,4 @@
import PostHog from "posthog-node"
import { PostHog } from "posthog-node"
import { POSTHOG_TOKEN, AnalyticsEvent } from "../constants"
import { ConfigManager } from "../structures/ConfigManager"

View File

@ -4,16 +4,13 @@
import ColumnsSettingContent from "./ColumnsSettingContent.svelte"
import { FieldPermissions } from "../../../constants"
export let allowViewReadonlyColumns = false
const { columns, datasource } = getContext("grid")
let open = false
let anchor
$: anyRestricted = $columns.filter(col => !col.visible || col.readonly).length
$: text = anyRestricted ? `Columns (${anyRestricted} restricted)` : "Columns"
$: text = anyRestricted ? `Columns: (${anyRestricted} restricted)` : "Columns"
$: permissions =
$datasource.type === "viewV2"
? [
@ -22,9 +19,6 @@
FieldPermissions.HIDDEN,
]
: [FieldPermissions.WRITABLE, FieldPermissions.HIDDEN]
$: disabledPermissions = allowViewReadonlyColumns
? []
: [FieldPermissions.READONLY]
</script>
<div bind:this={anchor}>
@ -41,9 +35,5 @@
</div>
<Popover bind:open {anchor} align="left">
<ColumnsSettingContent
columns={$columns}
{permissions}
{disabledPermissions}
/>
<ColumnsSettingContent columns={$columns} {permissions} />
</Popover>

View File

@ -58,7 +58,6 @@
export let buttons = null
export let darkMode
export let isCloud = null
export let allowViewReadonlyColumns = false
export let rowConditions = null
// Unique identifier for DOM nodes inside this instance
@ -115,7 +114,6 @@
buttons,
darkMode,
isCloud,
allowViewReadonlyColumns,
rowConditions,
})
@ -157,7 +155,7 @@
<div class="controls-left">
<slot name="filter" />
<SortButton />
<ColumnsSettingButton {allowViewReadonlyColumns} />
<ColumnsSettingButton />
<SizeButton />
<slot name="controls" />
</div>

@ -1 +1 @@
Subproject commit 922431260e90d558a1ca55398475412e75088057
Subproject commit e2fe0f9cc856b4ee1a97df96d623b2d87d4e8733

View File

@ -101,7 +101,7 @@
"mysql2": "3.9.8",
"node-fetch": "2.6.7",
"object-sizeof": "2.6.1",
"openai": "^4.52.1",
"openai": "4.59.0",
"openapi-types": "9.3.1",
"oracledb": "6.5.1",
"pg": "8.10.0",

View File

@ -39,9 +39,10 @@ import tk from "timekeeper"
import { encodeJSBinding } from "@budibase/string-templates"
import { dataFilters } from "@budibase/shared-core"
import { Knex } from "knex"
import { structures } from "@budibase/backend-core/tests"
import { generator, structures } from "@budibase/backend-core/tests"
import { DEFAULT_EMPLOYEE_TABLE_SCHEMA } from "../../../db/defaultData/datasource_bb_default"
import { generateRowIdField } from "../../../integrations/utils"
import { cloneDeep } from "lodash/fp"
describe.each([
["in-memory", undefined],
@ -66,6 +67,36 @@ describe.each([
let table: Table
let rows: Row[]
async function basicRelationshipTables(type: RelationshipType) {
const relatedTable = await createTable(
{
name: { name: "name", type: FieldType.STRING },
},
generator.guid().substring(0, 10)
)
table = await createTable(
{
name: { name: "name", type: FieldType.STRING },
//@ts-ignore - API accepts this structure, will build out rest of definition
productCat: {
type: FieldType.LINK,
relationshipType: type,
name: "productCat",
fieldName: "product",
tableId: relatedTable._id!,
constraints: {
type: "array",
},
},
},
generator.guid().substring(0, 10)
)
return {
relatedTable: await config.api.table.get(relatedTable._id!),
table,
}
}
beforeAll(async () => {
await withCoreEnv({ TENANT_FEATURE_FLAGS: "*:SQS" }, () => config.init())
if (isLucene) {
@ -201,6 +232,7 @@ describe.each([
// rows returned by the query will also cause the assertion to fail.
async toMatchExactly(expectedRows: any[]) {
const response = await this.performSearch()
const cloned = cloneDeep(response)
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
@ -211,7 +243,7 @@ describe.each([
expect.objectContaining(this.popRow(expectedRow, foundRows))
)
)
return response
return cloned
}
// Asserts that the query returns rows matching exactly the set of rows
@ -219,6 +251,7 @@ describe.each([
// cause the assertion to fail.
async toContainExactly(expectedRows: any[]) {
const response = await this.performSearch()
const cloned = cloneDeep(response)
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
@ -231,7 +264,7 @@ describe.each([
)
)
)
return response
return cloned
}
// Asserts that the query returns some property values - this cannot be used
@ -239,6 +272,7 @@ describe.each([
// typing for this has to be any, Jest doesn't expose types for matchers like expect.any(...)
async toMatch(properties: Record<string, any>) {
const response = await this.performSearch()
const cloned = cloneDeep(response)
const keys = Object.keys(properties) as Array<keyof SearchResponse<Row>>
for (let key of keys) {
// eslint-disable-next-line jest/no-standalone-expect
@ -248,17 +282,18 @@ describe.each([
expect(response[key]).toEqual(properties[key])
}
}
return response
return cloned
}
// Asserts that the query doesn't return a property, e.g. pagination parameters.
async toNotHaveProperty(properties: (keyof SearchResponse<Row>)[]) {
const response = await this.performSearch()
const cloned = cloneDeep(response)
for (let property of properties) {
// eslint-disable-next-line jest/no-standalone-expect
expect(response[property]).toBeUndefined()
}
return response
return cloned
}
// Asserts that the query returns rows matching the set of rows passed in.
@ -266,6 +301,7 @@ describe.each([
// assertion to fail.
async toContain(expectedRows: any[]) {
const response = await this.performSearch()
const cloned = cloneDeep(response)
const foundRows = response.rows
// eslint-disable-next-line jest/no-standalone-expect
@ -276,7 +312,7 @@ describe.each([
)
)
)
return response
return cloned
}
async toFindNothing() {
@ -2196,28 +2232,10 @@ describe.each([
let productCategoryTable: Table, productCatRows: Row[]
beforeAll(async () => {
productCategoryTable = await createTable(
{
name: { name: "name", type: FieldType.STRING },
},
"productCategory"
)
table = await createTable(
{
name: { name: "name", type: FieldType.STRING },
productCat: {
type: FieldType.LINK,
relationshipType: RelationshipType.ONE_TO_MANY,
name: "productCat",
fieldName: "product",
tableId: productCategoryTable._id!,
constraints: {
type: "array",
},
},
},
"product"
const { relatedTable } = await basicRelationshipTables(
RelationshipType.ONE_TO_MANY
)
productCategoryTable = relatedTable
productCatRows = await Promise.all([
config.api.row.save(productCategoryTable._id!, { name: "foo" }),
@ -2250,7 +2268,7 @@ describe.each([
it("should be able to filter by relationship using table name", async () => {
await expectQuery({
equal: { ["productCategory.name"]: "foo" },
equal: { [`${productCategoryTable.name}.name`]: "foo" },
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
])
@ -2262,6 +2280,36 @@ describe.each([
}).toContainExactly([{ name: "baz", productCat: undefined }])
})
})
isSql &&
describe("big relations", () => {
beforeAll(async () => {
const { relatedTable } = await basicRelationshipTables(
RelationshipType.MANY_TO_ONE
)
const mainRow = await config.api.row.save(table._id!, {
name: "foo",
})
for (let i = 0; i < 11; i++) {
await config.api.row.save(relatedTable._id!, {
name: i,
product: [mainRow._id!],
})
}
})
it("can only pull 10 related rows", async () => {
await withCoreEnv({ SQL_MAX_RELATED_ROWS: "10" }, async () => {
const response = await expectQuery({}).toContain([{ name: "foo" }])
expect(response.rows[0].productCat).toBeArrayOfSize(10)
})
})
it("can pull max rows when env not set (defaults to 500)", async () => {
const response = await expectQuery({}).toContain([{ name: "foo" }])
expect(response.rows[0].productCat).toBeArrayOfSize(11)
})
})
;(isSqs || isLucene) &&
describe("relations to same table", () => {
let relatedTable: Table, relatedRows: Row[]

View File

@ -309,10 +309,6 @@ describe.each([
})
describe("readonly fields", () => {
beforeEach(() => {
mocks.licenses.useViewReadonlyColumns()
})
it("readonly fields are persisted", async () => {
const table = await config.api.table.save(
saveTableRequest({
@ -436,7 +432,7 @@ describe.each([
})
})
it("readonly fields cannot be used on free license", async () => {
it("readonly fields can be used on free license", async () => {
mocks.licenses.useCloudFree()
const table = await config.api.table.save(
saveTableRequest({
@ -466,11 +462,7 @@ describe.each([
}
await config.api.viewV2.create(newView, {
status: 400,
body: {
message: "Readonly fields are not enabled",
status: 400,
},
status: 201,
})
})
})
@ -513,7 +505,6 @@ describe.each([
})
it("display fields can be readonly", async () => {
mocks.licenses.useViewReadonlyColumns()
const table = await config.api.table.save(
saveTableRequest({
schema: {
@ -588,7 +579,6 @@ describe.each([
})
it("can update all fields", async () => {
mocks.licenses.useViewReadonlyColumns()
const tableId = table._id!
const updatedData: Required<UpdateViewRequest> = {
@ -802,71 +792,6 @@ describe.each([
)
})
it("cannot update views with readonly on on free license", async () => {
mocks.licenses.useViewReadonlyColumns()
view = await config.api.viewV2.update({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: true,
},
},
})
mocks.licenses.useCloudFree()
await config.api.viewV2.update(view, {
status: 400,
body: {
message: "Readonly fields are not enabled",
},
})
})
it("can remove readonly config after license downgrade", async () => {
mocks.licenses.useViewReadonlyColumns()
view = await config.api.viewV2.update({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: true,
},
Category: {
visible: true,
readonly: true,
},
},
})
mocks.licenses.useCloudFree()
const res = await config.api.viewV2.update({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: false,
},
},
})
expect(res).toEqual(
expect.objectContaining({
...view,
schema: {
id: { visible: true },
Price: {
visible: true,
readonly: false,
},
},
})
)
})
isInternal &&
it("updating schema will only validate modified field", async () => {
let view = await config.api.viewV2.create({
@ -1046,7 +971,6 @@ describe.each([
})
it("should be able to fetch readonly config after downgrades", async () => {
mocks.licenses.useViewReadonlyColumns()
const res = await config.api.viewV2.create({
name: generator.name(),
tableId: table._id!,
@ -1112,8 +1036,6 @@ describe.each([
})
it("rejects if field is readonly in any view", async () => {
mocks.licenses.useViewReadonlyColumns()
await config.api.viewV2.create({
name: "view a",
tableId: table._id!,
@ -1538,7 +1460,6 @@ describe.each([
})
it("can't persist readonly columns", async () => {
mocks.licenses.useViewReadonlyColumns()
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
@ -1607,7 +1528,6 @@ describe.each([
})
it("can't update readonly columns", async () => {
mocks.licenses.useViewReadonlyColumns()
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),

View File

@ -10,6 +10,7 @@ import {
} from "@budibase/types"
import { env } from "@budibase/backend-core"
import * as automationUtils from "../automationUtils"
import * as pro from "@budibase/pro"
enum Model {
GPT_35_TURBO = "gpt-3.5-turbo",
@ -62,19 +63,33 @@ export const definition: AutomationStepDefinition = {
},
}
/**
* Maintains backward compatibility with automation steps created before the introduction
* of custom configurations and Budibase AI
* @param inputs - automation inputs from the OpenAI automation step.
*/
async function legacyOpenAIPrompt(inputs: OpenAIStepInputs) {
const openai = new OpenAI({
apiKey: env.OPENAI_API_KEY,
})
const completion = await openai.chat.completions.create({
model: inputs.model,
messages: [
{
role: "user",
content: inputs.prompt,
},
],
})
return completion?.choices[0]?.message?.content
}
export async function run({
inputs,
}: {
inputs: OpenAIStepInputs
}): Promise<OpenAIStepOutputs> {
if (!env.OPENAI_API_KEY) {
return {
success: false,
response:
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable.",
}
}
if (inputs.prompt == null) {
return {
success: false,
@ -83,20 +98,24 @@ export async function run({
}
try {
const openai = new OpenAI({
apiKey: env.OPENAI_API_KEY,
})
let response
const customConfigsEnabled = await pro.features.isAICustomConfigsEnabled()
const budibaseAIEnabled = await pro.features.isBudibaseAIEnabled()
const completion = await openai.chat.completions.create({
model: inputs.model,
messages: [
{
role: "user",
content: inputs.prompt,
},
],
})
const response = completion?.choices[0]?.message?.content
if (budibaseAIEnabled || customConfigsEnabled) {
const llm = await pro.ai.LargeLanguageModel.forCurrentTenant(inputs.model)
response = await llm.run(inputs.prompt)
} else {
// fallback to the default that uses the environment variable for backwards compat
if (!env.OPENAI_API_KEY) {
return {
success: false,
response:
"OpenAI API Key not configured - please add the OPENAI_API_KEY environment variable.",
}
}
response = await legacyOpenAIPrompt(inputs)
}
return {
response,

View File

@ -4,6 +4,7 @@ import {
withEnv as withCoreEnv,
setEnv as setCoreEnv,
} from "@budibase/backend-core"
import * as pro from "@budibase/pro"
jest.mock("openai", () => ({
OpenAI: jest.fn().mockImplementation(() => ({
@ -22,7 +23,23 @@ jest.mock("openai", () => ({
},
})),
}))
jest.mock("@budibase/pro", () => ({
...jest.requireActual("@budibase/pro"),
ai: {
LargeLanguageModel: {
forCurrentTenant: jest.fn().mockImplementation(() => ({
init: jest.fn(),
run: jest.fn(),
})),
},
},
features: {
isAICustomConfigsEnabled: jest.fn(),
isBudibaseAIEnabled: jest.fn(),
},
}))
const mockedPro = jest.mocked(pro)
const mockedOpenAI = OpenAI as jest.MockedClass<typeof OpenAI>
const OPENAI_PROMPT = "What is the meaning of life?"
@ -41,6 +58,7 @@ describe("test the openai action", () => {
afterEach(() => {
resetEnv()
jest.clearAllMocks()
})
afterAll(_afterAll)
@ -94,4 +112,25 @@ describe("test the openai action", () => {
)
expect(res.success).toBeFalsy()
})
it("should ensure that the pro AI module is called when the budibase AI features are enabled", async () => {
jest.spyOn(pro.features, "isBudibaseAIEnabled").mockResolvedValue(true)
jest.spyOn(pro.features, "isAICustomConfigsEnabled").mockResolvedValue(true)
const prompt = "What is the meaning of life?"
await runStep("OPENAI", {
model: "gpt-4o-mini",
prompt,
})
expect(pro.ai.LargeLanguageModel.forCurrentTenant).toHaveBeenCalledWith(
"gpt-4o-mini"
)
const llmInstance =
mockedPro.ai.LargeLanguageModel.forCurrentTenant.mock.results[0].value
// init does not appear to be called currently
// expect(llmInstance.init).toHaveBeenCalled()
expect(llmInstance.run).toHaveBeenCalledWith(prompt)
})
})

View File

@ -581,16 +581,15 @@ export class GoogleSheetsIntegration implements DatasourcePlus {
rows = await sheet.getRows()
}
if (hasFilters && query.paginate) {
rows = rows.slice(offset, offset + limit)
}
const headerValues = sheet.headerValues
let response = rows.map(row =>
this.buildRowObject(headerValues, row.toObject(), row.rowNumber)
this.buildRowObject(sheet.headerValues, row.toObject(), row.rowNumber)
)
response = dataFilters.runQuery(response, query.filters || {})
if (hasFilters && query.paginate) {
response = response.slice(offset, offset + limit)
}
if (query.sort) {
if (Object.keys(query.sort).length !== 1) {
console.warn("Googlesheets does not support multiple sorting", {

View File

@ -241,6 +241,16 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async connect() {
this.client = await mysql.createConnection(this.config)
const res = await this.internalQuery(
{
sql: "SELECT VERSION();",
},
{ connect: false }
)
const version = res?.[0]?.["VERSION()"]
if (version?.toLowerCase().includes("mariadb")) {
this.setExtendedSqlClient(SqlClient.MARIADB)
}
}
async disconnect() {

View File

@ -5,6 +5,7 @@ import TestConfiguration from "../../tests/utilities/TestConfiguration"
import {
Datasource,
FieldType,
Row,
SourceName,
Table,
TableSourceType,
@ -598,4 +599,193 @@ describe("Google Sheets Integration", () => {
)
})
})
describe("search", () => {
let table: Table
beforeEach(async () => {
table = await config.api.table.save({
name: "Test Table",
type: "table",
sourceId: datasource._id!,
sourceType: TableSourceType.EXTERNAL,
schema: {
name: {
name: "name",
type: FieldType.STRING,
constraints: {
type: "string",
},
},
},
})
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Foo",
},
{
name: "Bar",
},
{
name: "Baz",
},
],
})
})
it("should be able to find rows with equals filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
equal: {
name: "Foo",
},
},
})
expect(response.rows).toHaveLength(1)
expect(response.rows[0].name).toEqual("Foo")
})
it("should be able to find rows with not equals filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
notEqual: {
name: "Foo",
},
},
})
expect(response.rows).toHaveLength(2)
expect(response.rows[0].name).toEqual("Bar")
expect(response.rows[1].name).toEqual("Baz")
})
it("should be able to find rows with empty filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
empty: {
name: null,
},
},
})
expect(response.rows).toHaveLength(0)
})
it("should be able to find rows with not empty filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
notEmpty: {
name: null,
},
},
})
expect(response.rows).toHaveLength(3)
})
it("should be able to find rows with one of filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
oneOf: {
name: ["Foo", "Bar"],
},
},
})
expect(response.rows).toHaveLength(2)
expect(response.rows[0].name).toEqual("Foo")
expect(response.rows[1].name).toEqual("Bar")
})
it("should be able to find rows with fuzzy filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
fuzzy: {
name: "oo",
},
},
})
expect(response.rows).toHaveLength(1)
expect(response.rows[0].name).toEqual("Foo")
})
it("should be able to find rows with range filter", async () => {
const response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: {
range: {
name: {
low: "A",
high: "C",
},
},
},
})
expect(response.rows).toHaveLength(2)
expect(response.rows[0].name).toEqual("Bar")
expect(response.rows[1].name).toEqual("Baz")
})
it("should paginate correctly", async () => {
await config.api.row.bulkImport(table._id!, {
rows: Array.from({ length: 50 }, () => ({
name: `Unique value!`,
})),
})
await config.api.row.bulkImport(table._id!, {
rows: Array.from({ length: 50 }, () => ({
name: `Non-unique value!`,
})),
})
let response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: { equal: { name: "Unique value!" } },
paginate: true,
limit: 10,
})
let rows: Row[] = response.rows
while (response.hasNextPage) {
response = await config.api.row.search(table._id!, {
tableId: table._id!,
query: { equal: { name: "Unique value!" } },
paginate: true,
limit: 10,
bookmark: response.bookmark,
})
expect(response.rows.length).toBeLessThanOrEqual(10)
rows = rows.concat(response.rows)
}
// Make sure we only get rows matching the query.
expect(rows.length).toEqual(50)
expect(rows.map(row => row.name)).toEqual(
expect.arrayContaining(
Array.from({ length: 50 }, () => "Unique value!")
)
)
// Make sure all of the rows have a unique ID.
const ids = Object.keys(
rows.reduce((acc, row) => {
acc[row._id!] = true
return acc
}, {})
)
expect(ids.length).toEqual(50)
})
})
})

View File

@ -440,6 +440,8 @@ export class GoogleSheetsMock {
endColumnIndex: 0,
})
sheet.properties.gridProperties.rowCount = sheet.data[0].rowData.length
return {
spreadsheetId: this.spreadsheet.spreadsheetId,
tableRange: range,

View File

@ -198,12 +198,15 @@ export async function save(
}
}
generateRelatedSchema(schema, relatedTable, tableToSave, relatedColumnName)
tables[relatedTable.name] = relatedTable
schema.main = true
}
// add in the new table for relationship purposes
tables[tableToSave.name] = tableToSave
cleanupRelationships(tableToSave, tables, oldTable)
if (oldTable) {
cleanupRelationships(tableToSave, tables, { oldTable })
}
const operation = tableId ? Operation.UPDATE_TABLE : Operation.CREATE_TABLE
await makeTableRequest(
@ -231,7 +234,10 @@ export async function save(
// remove the rename prop
delete tableToSave._rename
datasource.entities[tableToSave.name] = tableToSave
datasource.entities = {
...datasource.entities,
...tables,
}
// store it into couch now for budibase reference
await db.put(populateExternalTableSchemas(datasource))
@ -255,7 +261,7 @@ export async function destroy(datasourceId: string, table: Table) {
const operation = Operation.DELETE_TABLE
if (tables) {
await makeTableRequest(datasource, operation, table, tables)
cleanupRelationships(table, tables)
cleanupRelationships(table, tables, { deleting: true })
delete tables[table.name]
datasource.entities = tables
}

View File

@ -20,14 +20,26 @@ import { cloneDeep } from "lodash/fp"
export function cleanupRelationships(
table: Table,
tables: Record<string, Table>,
oldTable?: Table
) {
opts: { oldTable: Table }
): void
export function cleanupRelationships(
table: Table,
tables: Record<string, Table>,
opts: { deleting: boolean }
): void
export function cleanupRelationships(
table: Table,
tables: Record<string, Table>,
opts?: { oldTable?: Table; deleting?: boolean }
): void {
const oldTable = opts?.oldTable
const tableToIterate = oldTable ? oldTable : table
// clean up relationships in couch table schemas
for (let [key, schema] of Object.entries(tableToIterate.schema)) {
if (
schema.type === FieldType.LINK &&
(!oldTable || table.schema[key] == null)
(opts?.deleting || oldTable?.schema[key] != null) &&
table.schema[key] == null
) {
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find(

View File

@ -5,13 +5,11 @@ import {
Table,
TableSchema,
View,
ViewFieldMetadata,
ViewV2,
ViewV2ColumnEnriched,
ViewV2Enriched,
} from "@budibase/types"
import { HTTPError } from "@budibase/backend-core"
import { features } from "@budibase/pro"
import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
@ -59,13 +57,6 @@ async function guardViewSchema(
}
if (viewSchema[field].readonly) {
if (
!(await features.isViewReadonlyColumnsEnabled()) &&
!(tableSchemaField as ViewFieldMetadata).readonly
) {
throw new HTTPError(`Readonly fields are not enabled`, 400)
}
if (!viewSchema[field].visible) {
throw new HTTPError(
`Field "${field}" must be visible if you want to make it readonly`,

View File

@ -111,7 +111,7 @@ export interface SCIMInnerConfig {
export interface SCIMConfig extends Config<SCIMInnerConfig> {}
type AIProvider = "OpenAI" | "Anthropic" | "AzureOpenAI" | "Custom"
export type AIProvider = "OpenAI" | "Anthropic" | "TogetherAI" | "Custom"
export interface AIInnerConfig {
[key: string]: {

View File

@ -195,6 +195,7 @@ export enum SqlClient {
MS_SQL = "mssql",
POSTGRES = "pg",
MY_SQL = "mysql2",
MARIADB = "mariadb",
ORACLE = "oracledb",
SQL_LITE = "sqlite3",
}

View File

@ -253,6 +253,7 @@ export async function save(ctx: UserCtx<Config>) {
if (existingConfig) {
await verifyAIConfig(config, existingConfig)
}
await pro.quotas.updateCustomAIConfigCount(Object.keys(config).length)
break
}
} catch (err: any) {
@ -334,32 +335,6 @@ function enrichOIDCLogos(oidcLogos: OIDCLogosConfig) {
)
}
async function enrichAIConfig(aiConfig: AIConfig) {
// Strip out the API Keys from the response so they don't show in the UI
for (const key in aiConfig.config) {
if (aiConfig.config[key].apiKey) {
aiConfig.config[key].apiKey = PASSWORD_REPLACEMENT
}
}
// Return the Budibase AI data source as part of the response if licensing allows
const budibaseAIEnabled = await pro.features.isBudibaseAIEnabled()
const defaultConfigExists = Object.keys(aiConfig.config).some(
key => aiConfig.config[key].isDefault
)
if (budibaseAIEnabled) {
aiConfig.config["budibase_ai"] = {
provider: "OpenAI",
active: true,
isDefault: !defaultConfigExists,
defaultModel: env.BUDIBASE_AI_DEFAULT_MODEL || "",
name: "Budibase AI",
}
}
return aiConfig
}
export async function find(ctx: UserCtx) {
try {
// Find the config with the most granular scope based on context
@ -372,7 +347,13 @@ export async function find(ctx: UserCtx) {
}
if (type === ConfigType.AI) {
await enrichAIConfig(scopedConfig)
await pro.sdk.ai.enrichAIConfig(scopedConfig)
// Strip out the API Keys from the response so they don't show in the UI
for (const key in scopedConfig.config) {
if (scopedConfig.config[key].apiKey) {
scopedConfig.config[key].apiKey = PASSWORD_REPLACEMENT
}
}
}
ctx.body = scopedConfig
} else {

View File

@ -1,4 +1,3 @@
import * as pro from "@budibase/pro"
import { verifyAIConfig } from "../configs"
import { TestConfiguration, structures } from "../../../../tests"
import { AIInnerConfig } from "@budibase/types"
@ -35,55 +34,6 @@ describe("Global configs controller", () => {
})
})
it("Should return the default BB AI config when the feature is turned on", async () => {
jest
.spyOn(pro.features, "isBudibaseAIEnabled")
.mockImplementation(() => Promise.resolve(true))
const data = structures.configs.ai()
await config.api.configs.saveConfig(data)
const response = await config.api.configs.getAIConfig()
expect(response.body.config).toEqual({
budibase_ai: {
provider: "OpenAI",
active: true,
isDefault: true,
name: "Budibase AI",
defaultModel: "",
},
ai: {
active: true,
apiKey: "--secret-value--",
baseUrl: "https://api.example.com",
defaultModel: "gpt4",
isDefault: false,
name: "Test",
provider: "OpenAI",
},
})
})
it("Should not not return the default Budibase AI config when on self host", async () => {
jest
.spyOn(pro.features, "isBudibaseAIEnabled")
.mockImplementation(() => Promise.resolve(false))
const data = structures.configs.ai()
await config.api.configs.saveConfig(data)
const response = await config.api.configs.getAIConfig()
expect(response.body.config).toEqual({
ai: {
active: true,
apiKey: "--secret-value--",
baseUrl: "https://api.example.com",
defaultModel: "gpt4",
isDefault: false,
name: "Test",
provider: "OpenAI",
},
})
})
it("Should not update existing secrets when updating an existing AI Config", async () => {
const data = structures.configs.ai()
await config.api.configs.saveConfig(data)

View File

@ -2,4 +2,4 @@
yarn build:apps
version=$(./scripts/getCurrentVersion.sh)
docker build -f hosting/single/Dockerfile -t budibase:sqs --build-arg BUDIBASE_VERSION=$version --build-arg TARGETBUILD=single --build-arg BASEIMG=budibase/couchdb:v3.3.3-sqs .
docker build -f hosting/single/Dockerfile -t budibase:sqs --build-arg BUDIBASE_VERSION=$version --build-arg TARGETBUILD=single --build-arg BASEIMG=budibase/couchdb:v3.3.3-sqs-v2.1.1 .

126
yarn.lock
View File

@ -33,6 +33,19 @@
"@jridgewell/gen-mapping" "^0.3.5"
"@jridgewell/trace-mapping" "^0.3.24"
"@anthropic-ai/sdk@^0.27.3":
version "0.27.3"
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.27.3.tgz#592cdd873c85ffab9589ae6f2e250cbf150e1475"
integrity sha512-IjLt0gd3L4jlOfilxVXTifn42FnVffMgDC04RJK1KDZpmkBWLv0XC92MVVmkxrFZNS/7l3xWgP/I3nqtX1sQHw==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
abort-controller "^3.0.0"
agentkeepalive "^4.2.1"
form-data-encoder "1.7.2"
formdata-node "^4.3.2"
node-fetch "^2.6.7"
"@apidevtools/json-schema-ref-parser@^9.0.6":
version "9.1.2"
resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.1.2.tgz#8ff5386b365d4c9faa7c8b566ff16a46a577d9b8"
@ -2053,7 +2066,7 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/backend-core@2.32.6":
"@budibase/backend-core@2.32.5":
version "0.0.0"
dependencies:
"@budibase/nano" "10.1.5"
@ -2134,14 +2147,14 @@
through2 "^2.0.0"
"@budibase/pro@npm:@budibase/pro@latest":
version "2.32.6"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.32.6.tgz#02ddef737ee8f52dafd8fab8f8f277dfc89cd33f"
integrity sha512-+XEv4JtMvUKZWyllcw+iFOh44zxsoJLmUdShu4bAjj5zXWgElF6LjFpK51IrQzM6xKfQxn7N2vmxu7175u5dDQ==
version "2.32.5"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.32.5.tgz#2beecf566da972a92200faddc97bc152ea2bbdea"
integrity sha512-afrklI2A8P7pfl/3KxysqO2Sjr0l2yQ1+jyuouEZliEklLxV8AFlzrODr4V2SK3J8E1xk8wG5ztYQS2uT7TnuA==
dependencies:
"@budibase/backend-core" "2.32.6"
"@budibase/shared-core" "2.32.6"
"@budibase/string-templates" "2.32.6"
"@budibase/types" "2.32.6"
"@budibase/backend-core" "2.32.5"
"@budibase/shared-core" "2.32.5"
"@budibase/string-templates" "2.32.5"
"@budibase/types" "2.32.5"
"@koa/router" "8.0.8"
bull "4.10.1"
dd-trace "5.2.0"
@ -2153,13 +2166,13 @@
scim-patch "^0.8.1"
scim2-parse-filter "^0.2.8"
"@budibase/shared-core@2.32.6":
"@budibase/shared-core@2.32.5":
version "0.0.0"
dependencies:
"@budibase/types" "0.0.0"
cron-validate "1.4.5"
"@budibase/string-templates@2.32.6":
"@budibase/string-templates@2.32.5":
version "0.0.0"
dependencies:
"@budibase/handlebars-helpers" "^0.13.2"
@ -2167,7 +2180,7 @@
handlebars "^4.7.8"
lodash.clonedeep "^4.5.0"
"@budibase/types@2.32.6":
"@budibase/types@2.32.5":
version "0.0.0"
dependencies:
scim-patch "^0.8.1"
@ -6117,6 +6130,11 @@
resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.7.tgz#63bb7d067db107cc1e457c303bc25d511febf6cb"
integrity sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==
"@types/qs@^6.9.15":
version "6.9.16"
resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.16.tgz#52bba125a07c0482d26747d5d4947a64daf8f794"
integrity sha512-7i+zxXdPD0T4cKDuxCUXJ4wHcsJLwENa6Z3dCu8cfCK743OGy5Nu1RmAGqDPsoTDINVEcdXKRvR/zre+P2Ku1A==
"@types/range-parser@*":
version "1.2.4"
resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc"
@ -7517,15 +7535,7 @@ aws4@^1.8.0:
resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59"
integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==
axios-retry@^3.1.9:
version "3.4.0"
resolved "https://registry.yarnpkg.com/axios-retry/-/axios-retry-3.4.0.tgz#f464dbe9408e5aa78fa319afd38bb69b533d8854"
integrity sha512-VdgaP+gHH4iQYCCNUWF2pcqeciVOdGrBBAYUfTY+wPcO5Ltvp/37MLFNCmJKo7Gj3SHvCSdL8ouI1qLYJN3liA==
dependencies:
"@babel/runtime" "^7.15.4"
is-retry-allowed "^2.2.0"
axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0, axios@^1.6.2:
axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0, axios@^1.6.2:
version "1.6.3"
resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.3.tgz#7f50f23b3aa246eff43c54834272346c396613f4"
integrity sha512-fWyNdeawGam70jXSVlKl+SUNVcL6j6W79CuSIPfi6HnDUmSCH6gyUys/HrqHeA/wU0Az41rRgean494d0Jb+ww==
@ -8378,11 +8388,6 @@ chardet@^0.7.0:
resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e"
integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==
charenc@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
integrity sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==
cheap-watch@^1.0.2, cheap-watch@^1.0.4:
version "1.0.4"
resolved "https://registry.yarnpkg.com/cheap-watch/-/cheap-watch-1.0.4.tgz#0bcb4a3a8fbd9d5327936493f6b56baa668d8fef"
@ -8769,11 +8774,6 @@ component-emitter@^1.3.0:
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0"
integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==
component-type@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/component-type/-/component-type-1.2.1.tgz#8a47901700238e4fc32269771230226f24b415a9"
integrity sha512-Kgy+2+Uwr75vAi6ChWXgHuLvd+QLD7ssgpaRq2zCvt80ptvAfMc/hijcJxXkBa2wMlEZcJvC2H8Ubo+A9ATHIg==
compress-commons@^4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-4.1.2.tgz#6542e59cb63e1f46a8b21b0e06f9a32e4c8b06df"
@ -9192,11 +9192,6 @@ cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3:
shebang-command "^2.0.0"
which "^2.0.1"
crypt@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==
crypto-browserify@^3.11.0:
version "3.12.0"
resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec"
@ -13239,11 +13234,6 @@ is-boolean-object@^1.1.0:
call-bind "^1.0.2"
has-tostringtag "^1.0.0"
is-buffer@~1.1.6:
version "1.1.6"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
is-builtin-module@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-3.2.1.tgz#f03271717d8654cfcaf07ab0463faa3571581169"
@ -13528,11 +13518,6 @@ is-retry-allowed@^1.1.0:
resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz#d778488bd0a4666a3be8a1482b9f2baafedea8b4"
integrity sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==
is-retry-allowed@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-2.2.0.tgz#88f34cbd236e043e71b6932d09b0c65fb7b4d71d"
integrity sha512-XVm7LOeLpTW4jV19QSH38vkswxoLud8sQ57YwJVTPWdiaI9I8keEhGFpBlslyVsgdQy4Opg8QOLb8YRgsyZiQg==
is-self-closing@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/is-self-closing/-/is-self-closing-1.0.1.tgz#5f406b527c7b12610176320338af0fa3896416e4"
@ -14289,11 +14274,6 @@ joi@^17.13.1:
"@sideway/formula" "^3.0.1"
"@sideway/pinpoint" "^2.0.0"
join-component@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/join-component/-/join-component-1.1.0.tgz#b8417b750661a392bee2c2537c68b2a9d4977cd5"
integrity sha512-bF7vcQxbODoGK1imE2P9GS9aw4zD0Sd+Hni68IMZLj7zRnquH7dXUmMw9hDI5S/Jzt7q+IyTXN0rSg2GI0IKhQ==
joycon@^3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/joycon/-/joycon-3.1.1.tgz#bce8596d6ae808f8b68168f5fc69280996894f03"
@ -15783,15 +15763,6 @@ md5.js@^1.3.4:
inherits "^2.0.1"
safe-buffer "^5.1.2"
md5@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/md5/-/md5-2.3.0.tgz#c3da9a6aae3a30b46b7b0c349b87b110dc3bda4f"
integrity sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==
dependencies:
charenc "0.0.2"
crypt "0.0.2"
is-buffer "~1.1.6"
mdn-data@2.0.14:
version "2.0.14"
resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.14.tgz#7113fc4281917d63ce29b43446f701e68c25ba50"
@ -16253,7 +16224,7 @@ ms@2.1.2:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
ms@^2.0.0, ms@^2.1.1, ms@^2.1.3:
ms@^2.0.0, ms@^2.1.1:
version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
@ -17097,19 +17068,20 @@ open@^8.0.0, open@^8.4.0, open@~8.4.0:
is-docker "^2.1.1"
is-wsl "^2.2.0"
openai@^4.52.1:
version "4.52.1"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.52.1.tgz#44acc362a844fa2927b0cfa1fb70fb51e388af65"
integrity sha512-kv2hevAWZZ3I/vd2t8znGO2rd8wkowncsfcYpo8i+wU9ML+JEcdqiViANXXjWWGjIhajFNixE6gOY1fEgqILAg==
openai@4.59.0:
version "4.59.0"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.59.0.tgz#3961d11a9afb5920e1bd475948a87969e244fc08"
integrity sha512-3bn7FypMt2R1ZDuO0+GcXgBEnVFhIzrpUkb47pQRoYvyfdZ2fQXcuP14aOc4C8F9FvCtZ/ElzJmVzVqnP4nHNg==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
"@types/qs" "^6.9.15"
abort-controller "^3.0.0"
agentkeepalive "^4.2.1"
form-data-encoder "1.7.2"
formdata-node "^4.3.2"
node-fetch "^2.6.7"
web-streams-polyfill "^3.2.1"
qs "^6.10.3"
openapi-response-validator@^9.2.0:
version "9.3.1"
@ -18421,20 +18393,6 @@ posthog-js@^1.13.4:
preact "^10.19.3"
web-vitals "^4.0.1"
posthog-node@1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/posthog-node/-/posthog-node-1.3.0.tgz#804ed2f213a2f05253f798bf9569d55a9cad94f7"
integrity sha512-2+VhqiY/rKIqKIXyvemBFHbeijHE25sP7eKltnqcFqAssUE6+sX6vusN9A4luzToOqHQkUZexiCKxvuGagh7JA==
dependencies:
axios "0.24.0"
axios-retry "^3.1.9"
component-type "^1.2.1"
join-component "^1.1.0"
md5 "^2.3.0"
ms "^2.1.3"
remove-trailing-slash "^0.1.1"
uuid "^8.3.2"
posthog-node@4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/posthog-node/-/posthog-node-4.0.1.tgz#eb8b6cdf68c3fdd0dc2b75e8aab2e0ec3727fb2a"
@ -19475,11 +19433,6 @@ remixicon@2.5.0:
resolved "https://registry.yarnpkg.com/remixicon/-/remixicon-2.5.0.tgz#b5e245894a1550aa23793f95daceadbf96ad1a41"
integrity sha512-q54ra2QutYDZpuSnFjmeagmEiN9IMo56/zz5dDNitzKD23oFRw77cWo4TsrAdmdkPiEn8mxlrTqxnkujDbEGww==
remove-trailing-slash@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/remove-trailing-slash/-/remove-trailing-slash-0.1.1.tgz#be2285a59f39c74d1bce4f825950061915e3780d"
integrity sha512-o4S4Qh6L2jpnCy83ysZDau+VORNvnFw07CKSAymkd6ICNVEPisMyzlc00KlvvicsxKck94SEwhDnMNdICzO+tA==
request@^2.88.0:
version "2.88.2"
resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3"
@ -22599,11 +22552,6 @@ web-streams-polyfill@4.0.0-beta.3:
resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz#2898486b74f5156095e473efe989dcf185047a38"
integrity sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==
web-streams-polyfill@^3.2.1:
version "3.3.3"
resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz#2073b91a2fdb1fbfbd401e7de0ac9f8214cecb4b"
integrity sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==
web-vitals@^4.0.1:
version "4.2.3"
resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-4.2.3.tgz#270c4baecfbc6ec6fc15da1989e465e5f9b94fb7"