Merge branches 'master' and 'budi-8742-add-a-baseurl-binding-inside-automations' of github.com:budibase/budibase into budi-8742-add-a-baseurl-binding-inside-automations
This commit is contained in:
commit
78af5cdc67
|
@ -27,7 +27,7 @@ export function doInUserContext(user: User, ctx: Ctx, task: any) {
|
|||
hostInfo: {
|
||||
ipAddress: ctx.request.ip,
|
||||
// filled in by koa-useragent package
|
||||
userAgent: ctx.userAgent._agent.source,
|
||||
userAgent: ctx.userAgent.source,
|
||||
},
|
||||
}
|
||||
return doInIdentityContext(userContext, task)
|
||||
|
|
|
@ -1,20 +1,26 @@
|
|||
import { Cookie, Header } from "../constants"
|
||||
import {
|
||||
getCookie,
|
||||
clearCookie,
|
||||
openJwt,
|
||||
getCookie,
|
||||
isValidInternalAPIKey,
|
||||
openJwt,
|
||||
} from "../utils"
|
||||
import { getUser } from "../cache/user"
|
||||
import { getSession, updateSessionTTL } from "../security/sessions"
|
||||
import { buildMatcherRegex, matches } from "./matchers"
|
||||
import { SEPARATOR, queryGlobalView, ViewName } from "../db"
|
||||
import { getGlobalDB, doInTenant } from "../context"
|
||||
import { queryGlobalView, SEPARATOR, ViewName } from "../db"
|
||||
import { doInTenant, getGlobalDB } from "../context"
|
||||
import { decrypt } from "../security/encryption"
|
||||
import * as identity from "../context/identity"
|
||||
import env from "../environment"
|
||||
import { Ctx, EndpointMatcher, SessionCookie, User } from "@budibase/types"
|
||||
import { InvalidAPIKeyError, ErrorCode } from "../errors"
|
||||
import {
|
||||
Ctx,
|
||||
EndpointMatcher,
|
||||
LoginMethod,
|
||||
SessionCookie,
|
||||
User,
|
||||
} from "@budibase/types"
|
||||
import { ErrorCode, InvalidAPIKeyError } from "../errors"
|
||||
import tracer from "dd-trace"
|
||||
|
||||
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
|
||||
|
@ -26,16 +32,18 @@ interface FinaliseOpts {
|
|||
internal?: boolean
|
||||
publicEndpoint?: boolean
|
||||
version?: string
|
||||
user?: any
|
||||
user?: User | { tenantId: string }
|
||||
loginMethod?: LoginMethod
|
||||
}
|
||||
|
||||
function timeMinusOneMinute() {
|
||||
return new Date(Date.now() - ONE_MINUTE).toISOString()
|
||||
}
|
||||
|
||||
function finalise(ctx: any, opts: FinaliseOpts = {}) {
|
||||
function finalise(ctx: Ctx, opts: FinaliseOpts = {}) {
|
||||
ctx.publicEndpoint = opts.publicEndpoint || false
|
||||
ctx.isAuthenticated = opts.authenticated || false
|
||||
ctx.loginMethod = opts.loginMethod
|
||||
ctx.user = opts.user
|
||||
ctx.internal = opts.internal || false
|
||||
ctx.version = opts.version
|
||||
|
@ -120,9 +128,10 @@ export default function (
|
|||
}
|
||||
|
||||
const tenantId = ctx.request.headers[Header.TENANT_ID]
|
||||
let authenticated = false,
|
||||
user = null,
|
||||
internal = false
|
||||
let authenticated: boolean = false,
|
||||
user: User | { tenantId: string } | undefined = undefined,
|
||||
internal: boolean = false,
|
||||
loginMethod: LoginMethod | undefined = undefined
|
||||
if (authCookie && !apiKey) {
|
||||
const sessionId = authCookie.sessionId
|
||||
const userId = authCookie.userId
|
||||
|
@ -146,6 +155,7 @@ export default function (
|
|||
}
|
||||
// @ts-ignore
|
||||
user.csrfToken = session.csrfToken
|
||||
loginMethod = LoginMethod.COOKIE
|
||||
|
||||
if (session?.lastAccessedAt < timeMinusOneMinute()) {
|
||||
// make sure we denote that the session is still in use
|
||||
|
@ -170,17 +180,16 @@ export default function (
|
|||
apiKey,
|
||||
populateUser
|
||||
)
|
||||
if (valid && foundUser) {
|
||||
if (valid) {
|
||||
authenticated = true
|
||||
loginMethod = LoginMethod.API_KEY
|
||||
user = foundUser
|
||||
} else if (valid) {
|
||||
authenticated = true
|
||||
internal = true
|
||||
internal = !foundUser
|
||||
}
|
||||
}
|
||||
if (!user && tenantId) {
|
||||
user = { tenantId }
|
||||
} else if (user) {
|
||||
} else if (user && "password" in user) {
|
||||
delete user.password
|
||||
}
|
||||
// be explicit
|
||||
|
@ -204,7 +213,14 @@ export default function (
|
|||
}
|
||||
|
||||
// isAuthenticated is a function, so use a variable to be able to check authed state
|
||||
finalise(ctx, { authenticated, user, internal, version, publicEndpoint })
|
||||
finalise(ctx, {
|
||||
authenticated,
|
||||
user,
|
||||
internal,
|
||||
version,
|
||||
publicEndpoint,
|
||||
loginMethod,
|
||||
})
|
||||
|
||||
if (isUser(user)) {
|
||||
return identity.doInUserContext(user, ctx, next)
|
||||
|
|
|
@ -13,6 +13,7 @@ import SqlTableQueryBuilder from "./sqlTable"
|
|||
import {
|
||||
Aggregation,
|
||||
AnySearchFilter,
|
||||
ArrayFilter,
|
||||
ArrayOperator,
|
||||
BasicOperator,
|
||||
BBReferenceFieldMetadata,
|
||||
|
@ -98,6 +99,23 @@ function isSqs(table: Table): boolean {
|
|||
)
|
||||
}
|
||||
|
||||
function escapeQuotes(value: string, quoteChar = '"'): string {
|
||||
return value.replace(new RegExp(quoteChar, "g"), `${quoteChar}${quoteChar}`)
|
||||
}
|
||||
|
||||
function wrap(value: string, quoteChar = '"'): string {
|
||||
return `${quoteChar}${escapeQuotes(value, quoteChar)}${quoteChar}`
|
||||
}
|
||||
|
||||
function stringifyArray(value: any[], quoteStyle = '"'): string {
|
||||
for (let i in value) {
|
||||
if (typeof value[i] === "string") {
|
||||
value[i] = wrap(value[i], quoteStyle)
|
||||
}
|
||||
}
|
||||
return `[${value.join(",")}]`
|
||||
}
|
||||
|
||||
const allowEmptyRelationships: Record<SearchFilterKey, boolean> = {
|
||||
[BasicOperator.EQUAL]: false,
|
||||
[BasicOperator.NOT_EQUAL]: true,
|
||||
|
@ -152,30 +170,30 @@ class InternalBuilder {
|
|||
return this.query.meta.table
|
||||
}
|
||||
|
||||
get knexClient(): Knex.Client {
|
||||
return this.knex.client as Knex.Client
|
||||
}
|
||||
|
||||
getFieldSchema(key: string): FieldSchema | undefined {
|
||||
const { column } = this.splitter.run(key)
|
||||
return this.table.schema[column]
|
||||
}
|
||||
|
||||
private quoteChars(): [string, string] {
|
||||
switch (this.client) {
|
||||
case SqlClient.ORACLE:
|
||||
case SqlClient.POSTGRES:
|
||||
return ['"', '"']
|
||||
case SqlClient.MS_SQL:
|
||||
return ["[", "]"]
|
||||
case SqlClient.MARIADB:
|
||||
case SqlClient.MY_SQL:
|
||||
case SqlClient.SQL_LITE:
|
||||
return ["`", "`"]
|
||||
}
|
||||
private supportsILike(): boolean {
|
||||
return !(
|
||||
this.client === SqlClient.ORACLE || this.client === SqlClient.SQL_LITE
|
||||
)
|
||||
}
|
||||
|
||||
// Takes a string like foo and returns a quoted string like [foo] for SQL Server
|
||||
// and "foo" for Postgres.
|
||||
private quoteChars(): [string, string] {
|
||||
const wrapped = this.knexClient.wrapIdentifier("foo", {})
|
||||
return [wrapped[0], wrapped[wrapped.length - 1]]
|
||||
}
|
||||
|
||||
// Takes a string like foo and returns a quoted string like [foo] for SQL
|
||||
// Server and "foo" for Postgres.
|
||||
private quote(str: string): string {
|
||||
const [start, end] = this.quoteChars()
|
||||
return `${start}${str}${end}`
|
||||
return this.knexClient.wrapIdentifier(str, {})
|
||||
}
|
||||
|
||||
private isQuoted(key: string): boolean {
|
||||
|
@ -193,6 +211,30 @@ class InternalBuilder {
|
|||
return key.map(part => this.quote(part)).join(".")
|
||||
}
|
||||
|
||||
private quotedValue(value: string): string {
|
||||
const formatter = this.knexClient.formatter(this.knexClient.queryBuilder())
|
||||
return formatter.wrap(value, false)
|
||||
}
|
||||
|
||||
private rawQuotedValue(value: string): Knex.Raw {
|
||||
return this.knex.raw(this.quotedValue(value))
|
||||
}
|
||||
|
||||
// Unfortuantely we cannot rely on knex's identifier escaping because it trims
|
||||
// the identifier string before escaping it, which breaks cases for us where
|
||||
// columns that start or end with a space aren't referenced correctly anymore.
|
||||
//
|
||||
// So whenever you're using an identifier binding in knex, e.g. knex.raw("??
|
||||
// as ?", ["foo", "bar"]), you need to make sure you call this:
|
||||
//
|
||||
// knex.raw("?? as ?", [this.quotedIdentifier("foo"), "bar"])
|
||||
//
|
||||
// Issue we filed against knex about this:
|
||||
// https://github.com/knex/knex/issues/6143
|
||||
private rawQuotedIdentifier(key: string): Knex.Raw {
|
||||
return this.knex.raw(this.quotedIdentifier(key))
|
||||
}
|
||||
|
||||
// Turns an identifier like a.b.c or `a`.`b`.`c` into ["a", "b", "c"]
|
||||
private splitIdentifier(key: string): string[] {
|
||||
const [start, end] = this.quoteChars()
|
||||
|
@ -236,7 +278,7 @@ class InternalBuilder {
|
|||
const alias = this.getTableName(endpoint.entityId)
|
||||
const schema = meta.table.schema
|
||||
if (!this.isFullSelectStatementRequired()) {
|
||||
return [this.knex.raw(`${this.quote(alias)}.*`)]
|
||||
return [this.knex.raw("??", [`${alias}.*`])]
|
||||
}
|
||||
// get just the fields for this table
|
||||
return resource.fields
|
||||
|
@ -258,30 +300,40 @@ class InternalBuilder {
|
|||
const columnSchema = schema[column]
|
||||
|
||||
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(columnSchema)) {
|
||||
return this.knex.raw(
|
||||
`${this.quotedIdentifier(
|
||||
[table, column].join(".")
|
||||
)}::money::numeric as ${this.quote(field)}`
|
||||
)
|
||||
// TODO: figure out how to express this safely without string
|
||||
// interpolation.
|
||||
return this.knex.raw(`??::money::numeric as "${field}"`, [
|
||||
this.rawQuotedIdentifier([table, column].join(".")),
|
||||
field,
|
||||
])
|
||||
}
|
||||
|
||||
if (this.SPECIAL_SELECT_CASES.MSSQL_DATES(columnSchema)) {
|
||||
// Time gets returned as timestamp from mssql, not matching the expected
|
||||
// HH:mm format
|
||||
return this.knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`)
|
||||
|
||||
// TODO: figure out how to express this safely without string
|
||||
// interpolation.
|
||||
return this.knex.raw(`CONVERT(varchar, ??, 108) as "${field}"`, [
|
||||
this.rawQuotedIdentifier(field),
|
||||
])
|
||||
}
|
||||
|
||||
const quoted = table
|
||||
? `${this.quote(table)}.${this.quote(column)}`
|
||||
: this.quote(field)
|
||||
return this.knex.raw(quoted)
|
||||
if (table) {
|
||||
return this.rawQuotedIdentifier(`${table}.${column}`)
|
||||
} else {
|
||||
return this.rawQuotedIdentifier(field)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// OracleDB can't use character-large-objects (CLOBs) in WHERE clauses,
|
||||
// so when we use them we need to wrap them in to_char(). This function
|
||||
// converts a field name to the appropriate identifier.
|
||||
private convertClobs(field: string, opts?: { forSelect?: boolean }): string {
|
||||
private convertClobs(
|
||||
field: string,
|
||||
opts?: { forSelect?: boolean }
|
||||
): Knex.Raw {
|
||||
if (this.client !== SqlClient.ORACLE) {
|
||||
throw new Error(
|
||||
"you've called convertClobs on a DB that's not Oracle, this is a mistake"
|
||||
|
@ -290,7 +342,7 @@ class InternalBuilder {
|
|||
const parts = this.splitIdentifier(field)
|
||||
const col = parts.pop()!
|
||||
const schema = this.table.schema[col]
|
||||
let identifier = this.quotedIdentifier(field)
|
||||
let identifier = this.rawQuotedIdentifier(field)
|
||||
|
||||
if (
|
||||
schema.type === FieldType.STRING ||
|
||||
|
@ -301,9 +353,12 @@ class InternalBuilder {
|
|||
schema.type === FieldType.BARCODEQR
|
||||
) {
|
||||
if (opts?.forSelect) {
|
||||
identifier = `to_char(${identifier}) as ${this.quotedIdentifier(col)}`
|
||||
identifier = this.knex.raw("to_char(??) as ??", [
|
||||
identifier,
|
||||
this.rawQuotedIdentifier(col),
|
||||
])
|
||||
} else {
|
||||
identifier = `to_char(${identifier})`
|
||||
identifier = this.knex.raw("to_char(??)", [identifier])
|
||||
}
|
||||
}
|
||||
return identifier
|
||||
|
@ -427,7 +482,6 @@ class InternalBuilder {
|
|||
filterKey: string,
|
||||
whereCb: (filterKey: string, query: Knex.QueryBuilder) => Knex.QueryBuilder
|
||||
): Knex.QueryBuilder {
|
||||
const mainKnex = this.knex
|
||||
const { relationships, endpoint, tableAliases: aliases } = this.query
|
||||
const tableName = endpoint.entityId
|
||||
const fromAlias = aliases?.[tableName] || tableName
|
||||
|
@ -449,8 +503,8 @@ class InternalBuilder {
|
|||
relationship.to &&
|
||||
relationship.tableName
|
||||
) {
|
||||
const joinTable = mainKnex
|
||||
.select(mainKnex.raw(1))
|
||||
const joinTable = this.knex
|
||||
.select(this.knex.raw(1))
|
||||
.from({ [toAlias]: relatedTableName })
|
||||
let subQuery = joinTable.clone()
|
||||
const manyToMany = validateManyToMany(relationship)
|
||||
|
@ -485,9 +539,7 @@ class InternalBuilder {
|
|||
.where(
|
||||
`${throughAlias}.${manyToMany.from}`,
|
||||
"=",
|
||||
mainKnex.raw(
|
||||
this.quotedIdentifier(`${fromAlias}.${manyToMany.fromPrimary}`)
|
||||
)
|
||||
this.rawQuotedIdentifier(`${fromAlias}.${manyToMany.fromPrimary}`)
|
||||
)
|
||||
// in SQS the same junction table is used for different many-to-many relationships between the
|
||||
// two same tables, this is needed to avoid rows ending up in all columns
|
||||
|
@ -516,7 +568,7 @@ class InternalBuilder {
|
|||
subQuery = subQuery.where(
|
||||
toKey,
|
||||
"=",
|
||||
mainKnex.raw(this.quotedIdentifier(foreignKey))
|
||||
this.rawQuotedIdentifier(foreignKey)
|
||||
)
|
||||
|
||||
query = query.where(q => {
|
||||
|
@ -546,7 +598,7 @@ class InternalBuilder {
|
|||
filters = this.parseFilters({ ...filters })
|
||||
const aliases = this.query.tableAliases
|
||||
// if all or specified in filters, then everything is an or
|
||||
const allOr = filters.allOr
|
||||
const shouldOr = filters.allOr
|
||||
const isSqlite = this.client === SqlClient.SQL_LITE
|
||||
const tableName = isSqlite ? this.table._id! : this.table.name
|
||||
|
||||
|
@ -610,7 +662,7 @@ class InternalBuilder {
|
|||
value
|
||||
)
|
||||
} else if (shouldProcessRelationship) {
|
||||
if (allOr) {
|
||||
if (shouldOr) {
|
||||
query = query.or
|
||||
}
|
||||
query = builder.addRelationshipForFilter(
|
||||
|
@ -626,85 +678,102 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
const like = (q: Knex.QueryBuilder, key: string, value: any) => {
|
||||
const fuzzyOr = filters?.fuzzyOr
|
||||
const fnc = fuzzyOr || allOr ? "orWhere" : "where"
|
||||
// postgres supports ilike, nothing else does
|
||||
if (this.client === SqlClient.POSTGRES) {
|
||||
return q[fnc](key, "ilike", `%${value}%`)
|
||||
} else {
|
||||
const rawFnc = `${fnc}Raw`
|
||||
// @ts-ignore
|
||||
return q[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [
|
||||
if (filters?.fuzzyOr || shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
if (
|
||||
this.client === SqlClient.ORACLE ||
|
||||
this.client === SqlClient.SQL_LITE
|
||||
) {
|
||||
return q.whereRaw(`LOWER(??) LIKE ?`, [
|
||||
this.rawQuotedIdentifier(key),
|
||||
`%${value.toLowerCase()}%`,
|
||||
])
|
||||
}
|
||||
return q.whereILike(
|
||||
// @ts-expect-error knex types are wrong, raw is fine here
|
||||
this.rawQuotedIdentifier(key),
|
||||
this.knex.raw("?", [`%${value}%`])
|
||||
)
|
||||
}
|
||||
|
||||
const contains = (mode: AnySearchFilter, any: boolean = false) => {
|
||||
const rawFnc = allOr ? "orWhereRaw" : "whereRaw"
|
||||
const not = mode === filters?.notContains ? "NOT " : ""
|
||||
function stringifyArray(value: Array<any>, quoteStyle = '"'): string {
|
||||
for (let i in value) {
|
||||
if (typeof value[i] === "string") {
|
||||
value[i] = `${quoteStyle}${value[i]}${quoteStyle}`
|
||||
}
|
||||
const contains = (mode: ArrayFilter, any = false) => {
|
||||
function addModifiers<T extends {}, Q>(q: Knex.QueryBuilder<T, Q>) {
|
||||
if (shouldOr || mode === filters?.containsAny) {
|
||||
q = q.or
|
||||
}
|
||||
return `[${value.join(",")}]`
|
||||
if (mode === filters?.notContains) {
|
||||
q = q.not
|
||||
}
|
||||
return q
|
||||
}
|
||||
|
||||
if (this.client === SqlClient.POSTGRES) {
|
||||
iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => {
|
||||
const wrap = any ? "" : "'"
|
||||
const op = any ? "\\?| array" : "@>"
|
||||
const fieldNames = key.split(/\./g)
|
||||
const table = fieldNames[0]
|
||||
const col = fieldNames[1]
|
||||
return q[rawFnc](
|
||||
`${not}COALESCE("${table}"."${col}"::jsonb ${op} ${wrap}${stringifyArray(
|
||||
value,
|
||||
any ? "'" : '"'
|
||||
)}${wrap}, FALSE)`
|
||||
)
|
||||
q = addModifiers(q)
|
||||
if (any) {
|
||||
return q.whereRaw(`COALESCE(??::jsonb \\?| array??, FALSE)`, [
|
||||
this.rawQuotedIdentifier(key),
|
||||
this.knex.raw(stringifyArray(value, "'")),
|
||||
])
|
||||
} else {
|
||||
return q.whereRaw(`COALESCE(??::jsonb @> '??', FALSE)`, [
|
||||
this.rawQuotedIdentifier(key),
|
||||
this.knex.raw(stringifyArray(value)),
|
||||
])
|
||||
}
|
||||
})
|
||||
} else if (
|
||||
this.client === SqlClient.MY_SQL ||
|
||||
this.client === SqlClient.MARIADB
|
||||
) {
|
||||
const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS"
|
||||
iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => {
|
||||
return q[rawFnc](
|
||||
`${not}COALESCE(${jsonFnc}(${key}, '${stringifyArray(
|
||||
value
|
||||
)}'), FALSE)`
|
||||
)
|
||||
return addModifiers(q).whereRaw(`COALESCE(?(??, ?), FALSE)`, [
|
||||
this.knex.raw(any ? "JSON_OVERLAPS" : "JSON_CONTAINS"),
|
||||
this.rawQuotedIdentifier(key),
|
||||
this.knex.raw(wrap(stringifyArray(value))),
|
||||
])
|
||||
})
|
||||
} else {
|
||||
const andOr = mode === filters?.containsAny ? " OR " : " AND "
|
||||
iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => {
|
||||
let statement = ""
|
||||
const identifier = this.quotedIdentifier(key)
|
||||
for (let i in value) {
|
||||
if (typeof value[i] === "string") {
|
||||
value[i] = `%"${value[i].toLowerCase()}"%`
|
||||
} else {
|
||||
value[i] = `%${value[i]}%`
|
||||
}
|
||||
statement += `${
|
||||
statement ? andOr : ""
|
||||
}COALESCE(LOWER(${identifier}), '') LIKE ?`
|
||||
}
|
||||
|
||||
if (statement === "") {
|
||||
if (value.length === 0) {
|
||||
return q
|
||||
}
|
||||
|
||||
if (not) {
|
||||
return q[rawFnc](
|
||||
`(NOT (${statement}) OR ${identifier} IS NULL)`,
|
||||
value
|
||||
)
|
||||
} else {
|
||||
return q[rawFnc](statement, value)
|
||||
}
|
||||
q = q.where(subQuery => {
|
||||
if (mode === filters?.notContains) {
|
||||
subQuery = subQuery.not
|
||||
}
|
||||
|
||||
subQuery = subQuery.where(subSubQuery => {
|
||||
for (const elem of value) {
|
||||
if (mode === filters?.containsAny) {
|
||||
subSubQuery = subSubQuery.or
|
||||
} else {
|
||||
subSubQuery = subSubQuery.and
|
||||
}
|
||||
|
||||
const lower =
|
||||
typeof elem === "string" ? `"${elem.toLowerCase()}"` : elem
|
||||
|
||||
subSubQuery = subSubQuery.whereLike(
|
||||
// @ts-expect-error knex types are wrong, raw is fine here
|
||||
this.knex.raw(`COALESCE(LOWER(??), '')`, [
|
||||
this.rawQuotedIdentifier(key),
|
||||
]),
|
||||
`%${lower}%`
|
||||
)
|
||||
}
|
||||
})
|
||||
if (mode === filters?.notContains) {
|
||||
subQuery = subQuery.or.whereNull(
|
||||
// @ts-expect-error knex types are wrong, raw is fine here
|
||||
this.rawQuotedIdentifier(key)
|
||||
)
|
||||
}
|
||||
return subQuery
|
||||
})
|
||||
return q
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -730,45 +799,46 @@ class InternalBuilder {
|
|||
}
|
||||
|
||||
if (filters.oneOf) {
|
||||
const fnc = allOr ? "orWhereIn" : "whereIn"
|
||||
iterate(
|
||||
filters.oneOf,
|
||||
ArrayOperator.ONE_OF,
|
||||
(q, key: string, array) => {
|
||||
if (this.client === SqlClient.ORACLE) {
|
||||
key = this.convertClobs(key)
|
||||
array = Array.isArray(array) ? array : [array]
|
||||
const binding = new Array(array.length).fill("?").join(",")
|
||||
return q.whereRaw(`${key} IN (${binding})`, array)
|
||||
} else {
|
||||
return q[fnc](key, Array.isArray(array) ? array : [array])
|
||||
if (shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
if (this.client === SqlClient.ORACLE) {
|
||||
// @ts-ignore
|
||||
key = this.convertClobs(key)
|
||||
}
|
||||
return q.whereIn(key, Array.isArray(array) ? array : [array])
|
||||
},
|
||||
(q, key: string[], array) => {
|
||||
if (this.client === SqlClient.ORACLE) {
|
||||
const keyStr = `(${key.map(k => this.convertClobs(k)).join(",")})`
|
||||
const binding = `(${array
|
||||
.map((a: any) => `(${new Array(a.length).fill("?").join(",")})`)
|
||||
.join(",")})`
|
||||
return q.whereRaw(`${keyStr} IN ${binding}`, array.flat())
|
||||
} else {
|
||||
return q[fnc](key, Array.isArray(array) ? array : [array])
|
||||
if (shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
if (this.client === SqlClient.ORACLE) {
|
||||
// @ts-ignore
|
||||
key = key.map(k => this.convertClobs(k))
|
||||
}
|
||||
return q.whereIn(key, Array.isArray(array) ? array : [array])
|
||||
}
|
||||
)
|
||||
}
|
||||
if (filters.string) {
|
||||
iterate(filters.string, BasicOperator.STRING, (q, key, value) => {
|
||||
const fnc = allOr ? "orWhere" : "where"
|
||||
// postgres supports ilike, nothing else does
|
||||
if (this.client === SqlClient.POSTGRES) {
|
||||
return q[fnc](key, "ilike", `${value}%`)
|
||||
} else {
|
||||
const rawFnc = `${fnc}Raw`
|
||||
// @ts-ignore
|
||||
return q[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [
|
||||
if (shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
if (
|
||||
this.client === SqlClient.ORACLE ||
|
||||
this.client === SqlClient.SQL_LITE
|
||||
) {
|
||||
return q.whereRaw(`LOWER(??) LIKE ?`, [
|
||||
this.rawQuotedIdentifier(key),
|
||||
`${value.toLowerCase()}%`,
|
||||
])
|
||||
} else {
|
||||
return q.whereILike(key, `${value}%`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -795,67 +865,59 @@ class InternalBuilder {
|
|||
|
||||
const schema = this.getFieldSchema(key)
|
||||
|
||||
let rawKey: string | Knex.Raw = key
|
||||
let high = value.high
|
||||
let low = value.low
|
||||
|
||||
if (this.client === SqlClient.ORACLE) {
|
||||
// @ts-ignore
|
||||
key = this.knex.raw(this.convertClobs(key))
|
||||
rawKey = this.convertClobs(key)
|
||||
} else if (
|
||||
this.client === SqlClient.SQL_LITE &&
|
||||
schema?.type === FieldType.BIGINT
|
||||
) {
|
||||
rawKey = this.knex.raw("CAST(?? AS INTEGER)", [
|
||||
this.rawQuotedIdentifier(key),
|
||||
])
|
||||
high = this.knex.raw("CAST(? AS INTEGER)", [value.high])
|
||||
low = this.knex.raw("CAST(? AS INTEGER)", [value.low])
|
||||
}
|
||||
|
||||
if (shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
|
||||
if (lowValid && highValid) {
|
||||
if (
|
||||
schema?.type === FieldType.BIGINT &&
|
||||
this.client === SqlClient.SQL_LITE
|
||||
) {
|
||||
return q.whereRaw(
|
||||
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
|
||||
[value.low, value.high]
|
||||
)
|
||||
} else {
|
||||
const fnc = allOr ? "orWhereBetween" : "whereBetween"
|
||||
return q[fnc](key, [value.low, value.high])
|
||||
}
|
||||
// @ts-expect-error knex types are wrong, raw is fine here
|
||||
return q.whereBetween(rawKey, [low, high])
|
||||
} else if (lowValid) {
|
||||
if (
|
||||
schema?.type === FieldType.BIGINT &&
|
||||
this.client === SqlClient.SQL_LITE
|
||||
) {
|
||||
return q.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
|
||||
value.low,
|
||||
])
|
||||
} else {
|
||||
const fnc = allOr ? "orWhere" : "where"
|
||||
return q[fnc](key, ">=", value.low)
|
||||
}
|
||||
// @ts-expect-error knex types are wrong, raw is fine here
|
||||
return q.where(rawKey, ">=", low)
|
||||
} else if (highValid) {
|
||||
if (
|
||||
schema?.type === FieldType.BIGINT &&
|
||||
this.client === SqlClient.SQL_LITE
|
||||
) {
|
||||
return q.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
|
||||
value.high,
|
||||
])
|
||||
} else {
|
||||
const fnc = allOr ? "orWhere" : "where"
|
||||
return q[fnc](key, "<=", value.high)
|
||||
}
|
||||
// @ts-expect-error knex types are wrong, raw is fine here
|
||||
return q.where(rawKey, "<=", high)
|
||||
}
|
||||
return q
|
||||
})
|
||||
}
|
||||
if (filters.equal) {
|
||||
iterate(filters.equal, BasicOperator.EQUAL, (q, key, value) => {
|
||||
const fnc = allOr ? "orWhereRaw" : "whereRaw"
|
||||
if (shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
if (this.client === SqlClient.MS_SQL) {
|
||||
return q[fnc](
|
||||
`CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 1`,
|
||||
[value]
|
||||
)
|
||||
} else if (this.client === SqlClient.ORACLE) {
|
||||
const identifier = this.convertClobs(key)
|
||||
return q[fnc](`(${identifier} IS NOT NULL AND ${identifier} = ?)`, [
|
||||
return q.whereRaw(`CASE WHEN ?? = ? THEN 1 ELSE 0 END = 1`, [
|
||||
this.rawQuotedIdentifier(key),
|
||||
value,
|
||||
])
|
||||
} else if (this.client === SqlClient.ORACLE) {
|
||||
const identifier = this.convertClobs(key)
|
||||
return q.where(subq =>
|
||||
// @ts-expect-error knex types are wrong, raw is fine here
|
||||
subq.whereNotNull(identifier).andWhere(identifier, value)
|
||||
)
|
||||
} else {
|
||||
return q[fnc](`COALESCE(${this.quotedIdentifier(key)} = ?, FALSE)`, [
|
||||
return q.whereRaw(`COALESCE(?? = ?, FALSE)`, [
|
||||
this.rawQuotedIdentifier(key),
|
||||
value,
|
||||
])
|
||||
}
|
||||
|
@ -863,20 +925,30 @@ class InternalBuilder {
|
|||
}
|
||||
if (filters.notEqual) {
|
||||
iterate(filters.notEqual, BasicOperator.NOT_EQUAL, (q, key, value) => {
|
||||
const fnc = allOr ? "orWhereRaw" : "whereRaw"
|
||||
if (shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
if (this.client === SqlClient.MS_SQL) {
|
||||
return q[fnc](
|
||||
`CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 0`,
|
||||
[value]
|
||||
)
|
||||
return q.whereRaw(`CASE WHEN ?? = ? THEN 1 ELSE 0 END = 0`, [
|
||||
this.rawQuotedIdentifier(key),
|
||||
value,
|
||||
])
|
||||
} else if (this.client === SqlClient.ORACLE) {
|
||||
const identifier = this.convertClobs(key)
|
||||
return q[fnc](
|
||||
`(${identifier} IS NOT NULL AND ${identifier} != ?) OR ${identifier} IS NULL`,
|
||||
[value]
|
||||
return (
|
||||
q
|
||||
.where(subq =>
|
||||
subq.not
|
||||
// @ts-expect-error knex types are wrong, raw is fine here
|
||||
.whereNull(identifier)
|
||||
.and.where(identifier, "!=", value)
|
||||
)
|
||||
// @ts-expect-error knex types are wrong, raw is fine here
|
||||
.or.whereNull(identifier)
|
||||
)
|
||||
} else {
|
||||
return q[fnc](`COALESCE(${this.quotedIdentifier(key)} != ?, TRUE)`, [
|
||||
return q.whereRaw(`COALESCE(?? != ?, TRUE)`, [
|
||||
this.rawQuotedIdentifier(key),
|
||||
value,
|
||||
])
|
||||
}
|
||||
|
@ -884,14 +956,18 @@ class InternalBuilder {
|
|||
}
|
||||
if (filters.empty) {
|
||||
iterate(filters.empty, BasicOperator.EMPTY, (q, key) => {
|
||||
const fnc = allOr ? "orWhereNull" : "whereNull"
|
||||
return q[fnc](key)
|
||||
if (shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
return q.whereNull(key)
|
||||
})
|
||||
}
|
||||
if (filters.notEmpty) {
|
||||
iterate(filters.notEmpty, BasicOperator.NOT_EMPTY, (q, key) => {
|
||||
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
|
||||
return q[fnc](key)
|
||||
if (shouldOr) {
|
||||
q = q.or
|
||||
}
|
||||
return q.whereNotNull(key)
|
||||
})
|
||||
}
|
||||
if (filters.contains) {
|
||||
|
@ -976,9 +1052,7 @@ class InternalBuilder {
|
|||
const selectFields = qualifiedFields.map(field =>
|
||||
this.convertClobs(field, { forSelect: true })
|
||||
)
|
||||
query = query
|
||||
.groupByRaw(groupByFields.join(", "))
|
||||
.select(this.knex.raw(selectFields.join(", ")))
|
||||
query = query.groupBy(groupByFields).select(selectFields)
|
||||
} else {
|
||||
query = query.groupBy(qualifiedFields).select(qualifiedFields)
|
||||
}
|
||||
|
@ -990,11 +1064,10 @@ class InternalBuilder {
|
|||
if (this.client === SqlClient.ORACLE) {
|
||||
const field = this.convertClobs(`${tableName}.${aggregation.field}`)
|
||||
query = query.select(
|
||||
this.knex.raw(
|
||||
`COUNT(DISTINCT ${field}) as ${this.quotedIdentifier(
|
||||
aggregation.name
|
||||
)}`
|
||||
)
|
||||
this.knex.raw(`COUNT(DISTINCT ??) as ??`, [
|
||||
field,
|
||||
aggregation.name,
|
||||
])
|
||||
)
|
||||
} else {
|
||||
query = query.countDistinct(
|
||||
|
@ -1059,9 +1132,11 @@ class InternalBuilder {
|
|||
} else {
|
||||
let composite = `${aliased}.${key}`
|
||||
if (this.client === SqlClient.ORACLE) {
|
||||
query = query.orderByRaw(
|
||||
`${this.convertClobs(composite)} ${direction} nulls ${nulls}`
|
||||
)
|
||||
query = query.orderByRaw(`?? ?? nulls ??`, [
|
||||
this.convertClobs(composite),
|
||||
this.knex.raw(direction),
|
||||
this.knex.raw(nulls as string),
|
||||
])
|
||||
} else {
|
||||
query = query.orderBy(composite, direction, nulls)
|
||||
}
|
||||
|
@ -1091,17 +1166,22 @@ class InternalBuilder {
|
|||
|
||||
private buildJsonField(field: string): string {
|
||||
const parts = field.split(".")
|
||||
let tableField: string, unaliased: string
|
||||
let unaliased: string
|
||||
|
||||
let tableField: string
|
||||
if (parts.length > 1) {
|
||||
const alias = parts.shift()!
|
||||
unaliased = parts.join(".")
|
||||
tableField = `${this.quote(alias)}.${this.quote(unaliased)}`
|
||||
tableField = `${alias}.${unaliased}`
|
||||
} else {
|
||||
unaliased = parts.join(".")
|
||||
tableField = this.quote(unaliased)
|
||||
tableField = unaliased
|
||||
}
|
||||
|
||||
const separator = this.client === SqlClient.ORACLE ? " VALUE " : ","
|
||||
return `'${unaliased}'${separator}${tableField}`
|
||||
return this.knex
|
||||
.raw(`?${separator}??`, [unaliased, this.rawQuotedIdentifier(tableField)])
|
||||
.toString()
|
||||
}
|
||||
|
||||
maxFunctionParameters() {
|
||||
|
@ -1197,13 +1277,13 @@ class InternalBuilder {
|
|||
subQuery = subQuery.where(
|
||||
correlatedTo,
|
||||
"=",
|
||||
knex.raw(this.quotedIdentifier(correlatedFrom))
|
||||
this.rawQuotedIdentifier(correlatedFrom)
|
||||
)
|
||||
|
||||
const standardWrap = (select: string): Knex.QueryBuilder => {
|
||||
const standardWrap = (select: Knex.Raw): Knex.QueryBuilder => {
|
||||
subQuery = subQuery.select(`${toAlias}.*`).limit(getRelationshipLimit())
|
||||
// @ts-ignore - the from alias syntax isn't in Knex typing
|
||||
return knex.select(knex.raw(select)).from({
|
||||
return knex.select(select).from({
|
||||
[toAlias]: subQuery,
|
||||
})
|
||||
}
|
||||
|
@ -1213,12 +1293,12 @@ class InternalBuilder {
|
|||
// need to check the junction table document is to the right column, this is just for SQS
|
||||
subQuery = this.addJoinFieldCheck(subQuery, relationship)
|
||||
wrapperQuery = standardWrap(
|
||||
`json_group_array(json_object(${fieldList}))`
|
||||
this.knex.raw(`json_group_array(json_object(${fieldList}))`)
|
||||
)
|
||||
break
|
||||
case SqlClient.POSTGRES:
|
||||
wrapperQuery = standardWrap(
|
||||
`json_agg(json_build_object(${fieldList}))`
|
||||
this.knex.raw(`json_agg(json_build_object(${fieldList}))`)
|
||||
)
|
||||
break
|
||||
case SqlClient.MARIADB:
|
||||
|
@ -1232,21 +1312,25 @@ class InternalBuilder {
|
|||
case SqlClient.MY_SQL:
|
||||
case SqlClient.ORACLE:
|
||||
wrapperQuery = standardWrap(
|
||||
`json_arrayagg(json_object(${fieldList}))`
|
||||
this.knex.raw(`json_arrayagg(json_object(${fieldList}))`)
|
||||
)
|
||||
break
|
||||
case SqlClient.MS_SQL:
|
||||
case SqlClient.MS_SQL: {
|
||||
const comparatorQuery = knex
|
||||
.select(`${fromAlias}.*`)
|
||||
// @ts-ignore - from alias syntax not TS supported
|
||||
.from({
|
||||
[fromAlias]: subQuery
|
||||
.select(`${toAlias}.*`)
|
||||
.limit(getRelationshipLimit()),
|
||||
})
|
||||
|
||||
wrapperQuery = knex.raw(
|
||||
`(SELECT ${this.quote(toAlias)} = (${knex
|
||||
.select(`${fromAlias}.*`)
|
||||
// @ts-ignore - from alias syntax not TS supported
|
||||
.from({
|
||||
[fromAlias]: subQuery
|
||||
.select(`${toAlias}.*`)
|
||||
.limit(getRelationshipLimit()),
|
||||
})} FOR JSON PATH))`
|
||||
`(SELECT ?? = (${comparatorQuery} FOR JSON PATH))`,
|
||||
[this.rawQuotedIdentifier(toAlias)]
|
||||
)
|
||||
break
|
||||
}
|
||||
default:
|
||||
throw new Error(`JSON relationships not implement for ${sqlClient}`)
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 297fdc937e9c650b4964fc1a942b60022b195865
|
||||
Subproject commit f6aebba94451ce47bba551926e5ad72bd75f71c6
|
|
@ -20,19 +20,15 @@ const options = {
|
|||
{
|
||||
url: "https://budibase.app/api/public/v1",
|
||||
description: "Budibase Cloud API",
|
||||
},
|
||||
{
|
||||
url: "{protocol}://{hostname}/api/public/v1",
|
||||
description: "Budibase self hosted API",
|
||||
variables: {
|
||||
protocol: {
|
||||
default: "http",
|
||||
description:
|
||||
"Whether HTTP or HTTPS should be used to communicate with your Budibase instance.",
|
||||
apiKey: {
|
||||
default: "<user API key>",
|
||||
description: "The API key of the user to assume for API call.",
|
||||
},
|
||||
hostname: {
|
||||
default: "localhost:10000",
|
||||
description: "The URL of your Budibase instance.",
|
||||
appId: {
|
||||
default: "<App ID>",
|
||||
description:
|
||||
"The ID of the app the calls will be executed within the context of, this should start with app_ (production) or app_dev (development).",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -8,19 +8,15 @@
|
|||
"servers": [
|
||||
{
|
||||
"url": "https://budibase.app/api/public/v1",
|
||||
"description": "Budibase Cloud API"
|
||||
},
|
||||
{
|
||||
"url": "{protocol}://{hostname}/api/public/v1",
|
||||
"description": "Budibase self hosted API",
|
||||
"description": "Budibase Cloud API",
|
||||
"variables": {
|
||||
"protocol": {
|
||||
"default": "http",
|
||||
"description": "Whether HTTP or HTTPS should be used to communicate with your Budibase instance."
|
||||
"apiKey": {
|
||||
"default": "<user API key>",
|
||||
"description": "The API key of the user to assume for API call."
|
||||
},
|
||||
"hostname": {
|
||||
"default": "localhost:10000",
|
||||
"description": "The URL of your Budibase instance."
|
||||
"appId": {
|
||||
"default": "<App ID>",
|
||||
"description": "The ID of the app the calls will be executed within the context of, this should start with app_ (production) or app_dev (development)."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -51,6 +47,7 @@
|
|||
"required": true,
|
||||
"description": "The ID of the app which this request is targeting.",
|
||||
"schema": {
|
||||
"default": "{{ appId }}",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
|
@ -60,6 +57,7 @@
|
|||
"required": true,
|
||||
"description": "The ID of the app which this request is targeting.",
|
||||
"schema": {
|
||||
"default": "{{ appId }}",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
|
@ -833,7 +831,8 @@
|
|||
"type": "string",
|
||||
"enum": [
|
||||
"static",
|
||||
"dynamic"
|
||||
"dynamic",
|
||||
"ai"
|
||||
],
|
||||
"description": "Defines whether this is a static or dynamic formula."
|
||||
}
|
||||
|
@ -857,6 +856,7 @@
|
|||
"link",
|
||||
"formula",
|
||||
"auto",
|
||||
"ai",
|
||||
"json",
|
||||
"internal",
|
||||
"barcodeqr",
|
||||
|
@ -1042,7 +1042,8 @@
|
|||
"type": "string",
|
||||
"enum": [
|
||||
"static",
|
||||
"dynamic"
|
||||
"dynamic",
|
||||
"ai"
|
||||
],
|
||||
"description": "Defines whether this is a static or dynamic formula."
|
||||
}
|
||||
|
@ -1066,6 +1067,7 @@
|
|||
"link",
|
||||
"formula",
|
||||
"auto",
|
||||
"ai",
|
||||
"json",
|
||||
"internal",
|
||||
"barcodeqr",
|
||||
|
@ -1262,7 +1264,8 @@
|
|||
"type": "string",
|
||||
"enum": [
|
||||
"static",
|
||||
"dynamic"
|
||||
"dynamic",
|
||||
"ai"
|
||||
],
|
||||
"description": "Defines whether this is a static or dynamic formula."
|
||||
}
|
||||
|
@ -1286,6 +1289,7 @@
|
|||
"link",
|
||||
"formula",
|
||||
"auto",
|
||||
"ai",
|
||||
"json",
|
||||
"internal",
|
||||
"barcodeqr",
|
||||
|
|
|
@ -6,16 +6,14 @@ info:
|
|||
servers:
|
||||
- url: https://budibase.app/api/public/v1
|
||||
description: Budibase Cloud API
|
||||
- url: "{protocol}://{hostname}/api/public/v1"
|
||||
description: Budibase self hosted API
|
||||
variables:
|
||||
protocol:
|
||||
default: http
|
||||
description: Whether HTTP or HTTPS should be used to communicate with your
|
||||
Budibase instance.
|
||||
hostname:
|
||||
default: localhost:10000
|
||||
description: The URL of your Budibase instance.
|
||||
apiKey:
|
||||
default: <user API key>
|
||||
description: The API key of the user to assume for API call.
|
||||
appId:
|
||||
default: <App ID>
|
||||
description: The ID of the app the calls will be executed within the context of,
|
||||
this should start with app_ (production) or app_dev (development).
|
||||
components:
|
||||
parameters:
|
||||
tableId:
|
||||
|
@ -38,6 +36,7 @@ components:
|
|||
required: true
|
||||
description: The ID of the app which this request is targeting.
|
||||
schema:
|
||||
default: "{{ appId }}"
|
||||
type: string
|
||||
appIdUrl:
|
||||
in: path
|
||||
|
@ -45,6 +44,7 @@ components:
|
|||
required: true
|
||||
description: The ID of the app which this request is targeting.
|
||||
schema:
|
||||
default: "{{ appId }}"
|
||||
type: string
|
||||
queryId:
|
||||
in: path
|
||||
|
@ -761,6 +761,7 @@ components:
|
|||
enum:
|
||||
- static
|
||||
- dynamic
|
||||
- ai
|
||||
description: Defines whether this is a static or dynamic formula.
|
||||
- type: object
|
||||
properties:
|
||||
|
@ -779,6 +780,7 @@ components:
|
|||
- link
|
||||
- formula
|
||||
- auto
|
||||
- ai
|
||||
- json
|
||||
- internal
|
||||
- barcodeqr
|
||||
|
@ -929,6 +931,7 @@ components:
|
|||
enum:
|
||||
- static
|
||||
- dynamic
|
||||
- ai
|
||||
description: Defines whether this is a static or dynamic formula.
|
||||
- type: object
|
||||
properties:
|
||||
|
@ -947,6 +950,7 @@ components:
|
|||
- link
|
||||
- formula
|
||||
- auto
|
||||
- ai
|
||||
- json
|
||||
- internal
|
||||
- barcodeqr
|
||||
|
@ -1104,6 +1108,7 @@ components:
|
|||
enum:
|
||||
- static
|
||||
- dynamic
|
||||
- ai
|
||||
description: Defines whether this is a static or dynamic formula.
|
||||
- type: object
|
||||
properties:
|
||||
|
@ -1122,6 +1127,7 @@ components:
|
|||
- link
|
||||
- formula
|
||||
- auto
|
||||
- ai
|
||||
- json
|
||||
- internal
|
||||
- barcodeqr
|
||||
|
|
|
@ -24,6 +24,7 @@ export const appId = {
|
|||
required: true,
|
||||
description: "The ID of the app which this request is targeting.",
|
||||
schema: {
|
||||
default: "{{ appId }}",
|
||||
type: "string",
|
||||
},
|
||||
}
|
||||
|
@ -34,6 +35,7 @@ export const appIdUrl = {
|
|||
required: true,
|
||||
description: "The ID of the app which this request is targeting.",
|
||||
schema: {
|
||||
default: "{{ appId }}",
|
||||
type: "string",
|
||||
},
|
||||
}
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
import { User, Table, SearchFilters, Row } from "@budibase/types"
|
||||
import { HttpMethod, MakeRequestResponse, generateMakeRequest } from "./utils"
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
import { Expectations } from "../../../../tests/utilities/api/base"
|
||||
|
||||
type RequestOpts = { internal?: boolean; appId?: string }
|
||||
|
||||
type PublicAPIExpectations = Omit<Expectations, "headers" | "headersNotPresent">
|
||||
|
||||
export class PublicAPIRequest {
|
||||
private makeRequest: MakeRequestResponse | undefined
|
||||
private appId: string | undefined
|
||||
private _tables: PublicTableAPI | undefined
|
||||
private _rows: PublicRowAPI | undefined
|
||||
private _apiKey: string | undefined
|
||||
|
||||
async init(config: TestConfiguration, user: User, opts?: RequestOpts) {
|
||||
this._apiKey = await config.generateApiKey(user._id)
|
||||
this.makeRequest = generateMakeRequest(this.apiKey, opts)
|
||||
this.appId = opts?.appId
|
||||
this._tables = new PublicTableAPI(this)
|
||||
this._rows = new PublicRowAPI(this)
|
||||
return this
|
||||
}
|
||||
|
||||
opts(opts: RequestOpts) {
|
||||
if (opts.appId) {
|
||||
this.appId = opts.appId
|
||||
}
|
||||
this.makeRequest = generateMakeRequest(this.apiKey, opts)
|
||||
}
|
||||
|
||||
async send(
|
||||
method: HttpMethod,
|
||||
endpoint: string,
|
||||
body?: any,
|
||||
expectations?: PublicAPIExpectations
|
||||
) {
|
||||
if (!this.makeRequest) {
|
||||
throw new Error("Init has not been called")
|
||||
}
|
||||
const res = await this.makeRequest(method, endpoint, body, this.appId)
|
||||
if (expectations?.status) {
|
||||
expect(res.status).toEqual(expectations.status)
|
||||
}
|
||||
if (expectations?.body) {
|
||||
expect(res.body).toEqual(expectations?.body)
|
||||
}
|
||||
return res.body
|
||||
}
|
||||
|
||||
get apiKey(): string {
|
||||
if (!this._apiKey) {
|
||||
throw new Error("Init has not been called")
|
||||
}
|
||||
return this._apiKey
|
||||
}
|
||||
|
||||
get tables(): PublicTableAPI {
|
||||
if (!this._tables) {
|
||||
throw new Error("Init has not been called")
|
||||
}
|
||||
return this._tables
|
||||
}
|
||||
|
||||
get rows(): PublicRowAPI {
|
||||
if (!this._rows) {
|
||||
throw new Error("Init has not been called")
|
||||
}
|
||||
return this._rows
|
||||
}
|
||||
}
|
||||
|
||||
export class PublicTableAPI {
|
||||
request: PublicAPIRequest
|
||||
|
||||
constructor(request: PublicAPIRequest) {
|
||||
this.request = request
|
||||
}
|
||||
|
||||
async create(
|
||||
table: Table,
|
||||
expectations?: PublicAPIExpectations
|
||||
): Promise<{ data: Table }> {
|
||||
return this.request.send("post", "/tables", table, expectations)
|
||||
}
|
||||
}
|
||||
|
||||
export class PublicRowAPI {
|
||||
request: PublicAPIRequest
|
||||
|
||||
constructor(request: PublicAPIRequest) {
|
||||
this.request = request
|
||||
}
|
||||
|
||||
async search(
|
||||
tableId: string,
|
||||
query: SearchFilters,
|
||||
expectations?: PublicAPIExpectations
|
||||
): Promise<{ data: Row[] }> {
|
||||
return this.request.send(
|
||||
"post",
|
||||
`/tables/${tableId}/rows/search`,
|
||||
{
|
||||
query,
|
||||
},
|
||||
expectations
|
||||
)
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
const setup = require("../../tests/utilities")
|
||||
import * as setup from "../../tests/utilities"
|
||||
|
||||
describe("/metrics", () => {
|
||||
let request = setup.getRequest()
|
|
@ -0,0 +1,71 @@
|
|||
import * as setup from "../../tests/utilities"
|
||||
import { roles } from "@budibase/backend-core"
|
||||
import { basicTable } from "../../../../tests/utilities/structures"
|
||||
import { Table, User } from "@budibase/types"
|
||||
import { PublicAPIRequest } from "./Request"
|
||||
|
||||
describe("check public API security", () => {
|
||||
const config = setup.getConfig()
|
||||
let builderRequest: PublicAPIRequest,
|
||||
appUserRequest: PublicAPIRequest,
|
||||
table: Table,
|
||||
appUser: User
|
||||
|
||||
beforeAll(async () => {
|
||||
await config.init()
|
||||
const builderUser = await config.globalUser()
|
||||
appUser = await config.globalUser({
|
||||
builder: { global: false },
|
||||
roles: {
|
||||
[config.getProdAppId()]: roles.BUILTIN_ROLE_IDS.BASIC,
|
||||
},
|
||||
})
|
||||
builderRequest = await new PublicAPIRequest().init(config, builderUser)
|
||||
appUserRequest = await new PublicAPIRequest().init(config, appUser)
|
||||
table = (await builderRequest.tables.create(basicTable())).data
|
||||
})
|
||||
|
||||
it("should allow with builder API key", async () => {
|
||||
const res = await builderRequest.rows.search(
|
||||
table._id!,
|
||||
{},
|
||||
{
|
||||
status: 200,
|
||||
}
|
||||
)
|
||||
expect(res.data.length).toEqual(0)
|
||||
})
|
||||
|
||||
it("should 403 when from browser, but API key", async () => {
|
||||
await appUserRequest.rows.search(
|
||||
table._id!,
|
||||
{},
|
||||
{
|
||||
status: 403,
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("should re-direct when using cookie", async () => {
|
||||
const headers = await config.login({
|
||||
userId: appUser._id!,
|
||||
builder: false,
|
||||
prodApp: false,
|
||||
})
|
||||
await config.withHeaders(
|
||||
{
|
||||
...headers,
|
||||
"User-Agent": config.browserUserAgent(),
|
||||
},
|
||||
async () => {
|
||||
await config.api.row.search(
|
||||
table._id!,
|
||||
{ query: {} },
|
||||
{
|
||||
status: 302,
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
|
@ -21,17 +21,19 @@ export type MakeRequestWithFormDataResponse = (
|
|||
function base(
|
||||
apiKey: string,
|
||||
endpoint: string,
|
||||
intAppId: string | null,
|
||||
isInternal: boolean
|
||||
opts?: {
|
||||
intAppId?: string
|
||||
internal?: boolean
|
||||
}
|
||||
) {
|
||||
const extraHeaders: any = {
|
||||
"x-budibase-api-key": apiKey,
|
||||
}
|
||||
if (intAppId) {
|
||||
extraHeaders["x-budibase-app-id"] = intAppId
|
||||
if (opts?.intAppId) {
|
||||
extraHeaders["x-budibase-app-id"] = opts.intAppId
|
||||
}
|
||||
|
||||
const url = isInternal
|
||||
const url = opts?.internal
|
||||
? endpoint
|
||||
: checkSlashesInUrl(`/api/public/v1/${endpoint}`)
|
||||
return { headers: extraHeaders, url }
|
||||
|
@ -39,7 +41,7 @@ function base(
|
|||
|
||||
export function generateMakeRequest(
|
||||
apiKey: string,
|
||||
isInternal = false
|
||||
opts?: { internal?: boolean }
|
||||
): MakeRequestResponse {
|
||||
const request = setup.getRequest()!
|
||||
const config = setup.getConfig()!
|
||||
|
@ -47,9 +49,12 @@ export function generateMakeRequest(
|
|||
method: HttpMethod,
|
||||
endpoint: string,
|
||||
body?: any,
|
||||
intAppId: string | null = config.getAppId()
|
||||
intAppId: string | undefined = config.getAppId()
|
||||
) => {
|
||||
const { headers, url } = base(apiKey, endpoint, intAppId, isInternal)
|
||||
const { headers, url } = base(apiKey, endpoint, { ...opts, intAppId })
|
||||
if (body && typeof body !== "string") {
|
||||
headers["Content-Type"] = "application/json"
|
||||
}
|
||||
const req = request[method](url).set(config.defaultHeaders(headers))
|
||||
if (body) {
|
||||
req.send(body)
|
||||
|
@ -62,7 +67,7 @@ export function generateMakeRequest(
|
|||
|
||||
export function generateMakeRequestWithFormData(
|
||||
apiKey: string,
|
||||
isInternal = false
|
||||
opts?: { internal?: boolean; browser?: boolean }
|
||||
): MakeRequestWithFormDataResponse {
|
||||
const request = setup.getRequest()!
|
||||
const config = setup.getConfig()!
|
||||
|
@ -70,9 +75,9 @@ export function generateMakeRequestWithFormData(
|
|||
method: HttpMethod,
|
||||
endpoint: string,
|
||||
fields: Record<string, string | { path: string }>,
|
||||
intAppId: string | null = config.getAppId()
|
||||
intAppId: string | undefined = config.getAppId()
|
||||
) => {
|
||||
const { headers, url } = base(apiKey, endpoint, intAppId, isInternal)
|
||||
const { headers, url } = base(apiKey, endpoint, { ...opts, intAppId })
|
||||
const req = request[method](url).set(config.defaultHeaders(headers))
|
||||
for (let [field, value] of Object.entries(fields)) {
|
||||
if (typeof value === "string") {
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
const setup = require("./utilities")
|
||||
const { basicScreen, powerScreen } = setup.structures
|
||||
const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions")
|
||||
const { roles } = require("@budibase/backend-core")
|
||||
const { BUILTIN_ROLE_IDS } = roles
|
||||
import * as setup from "./utilities"
|
||||
import { checkBuilderEndpoint, runInProd } from "./utilities/TestFunctions"
|
||||
import { roles } from "@budibase/backend-core"
|
||||
import { Screen } from "@budibase/types"
|
||||
|
||||
const { BUILTIN_ROLE_IDS } = roles
|
||||
const { basicScreen, powerScreen } = setup.structures
|
||||
const route = "/test"
|
||||
|
||||
// there are checks which are disabled in test env,
|
||||
|
@ -12,7 +13,7 @@ const route = "/test"
|
|||
describe("/routing", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let basic, power
|
||||
let basic: Screen, power: Screen
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
|
@ -25,26 +26,40 @@ describe("/routing", () => {
|
|||
|
||||
describe("fetch", () => {
|
||||
it("prevents a public user from accessing development app", async () => {
|
||||
await runInProd(() => {
|
||||
return request
|
||||
.get(`/api/routing/client`)
|
||||
.set(config.publicHeaders({ prodApp: false }))
|
||||
.expect(302)
|
||||
})
|
||||
await config.withHeaders(
|
||||
{
|
||||
"User-Agent": config.browserUserAgent(),
|
||||
},
|
||||
async () => {
|
||||
await runInProd(() => {
|
||||
return request
|
||||
.get(`/api/routing/client`)
|
||||
.set(config.publicHeaders({ prodApp: false }))
|
||||
.expect(302)
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it("prevents a non builder from accessing development app", async () => {
|
||||
await runInProd(async () => {
|
||||
return request
|
||||
.get(`/api/routing/client`)
|
||||
.set(
|
||||
await config.roleHeaders({
|
||||
roleId: BUILTIN_ROLE_IDS.BASIC,
|
||||
prodApp: false,
|
||||
})
|
||||
)
|
||||
.expect(302)
|
||||
})
|
||||
await config.withHeaders(
|
||||
{
|
||||
"User-Agent": config.browserUserAgent(),
|
||||
},
|
||||
async () => {
|
||||
await runInProd(async () => {
|
||||
return request
|
||||
.get(`/api/routing/client`)
|
||||
.set(
|
||||
await config.roleHeaders({
|
||||
roleId: BUILTIN_ROLE_IDS.BASIC,
|
||||
prodApp: false,
|
||||
})
|
||||
)
|
||||
.expect(302)
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
it("returns the correct routing for basic user", async () => {
|
||||
const res = await request
|
|
@ -7,6 +7,7 @@ import {
|
|||
import {
|
||||
context,
|
||||
db as dbCore,
|
||||
docIds,
|
||||
features,
|
||||
MAX_VALID_DATE,
|
||||
MIN_VALID_DATE,
|
||||
|
@ -61,6 +62,7 @@ describe.each([
|
|||
const isLucene = name === "lucene"
|
||||
const isInMemory = name === "in-memory"
|
||||
const isInternal = isSqs || isLucene || isInMemory
|
||||
const isOracle = name === DatabaseName.ORACLE
|
||||
const isSql = !isInMemory && !isLucene
|
||||
const config = setup.getConfig()
|
||||
|
||||
|
@ -129,14 +131,14 @@ describe.each([
|
|||
}
|
||||
})
|
||||
|
||||
async function createTable(schema: TableSchema) {
|
||||
async function createTable(schema?: TableSchema) {
|
||||
const table = await config.api.table.save(
|
||||
tableForDatasource(datasource, { schema })
|
||||
)
|
||||
return table._id!
|
||||
}
|
||||
|
||||
async function createView(tableId: string, schema: ViewV2Schema) {
|
||||
async function createView(tableId: string, schema?: ViewV2Schema) {
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: tableId,
|
||||
name: generator.guid(),
|
||||
|
@ -153,22 +155,51 @@ describe.each([
|
|||
rows = await config.api.row.fetch(tableOrViewId)
|
||||
}
|
||||
|
||||
async function getTable(tableOrViewId: string): Promise<Table> {
|
||||
if (docIds.isViewId(tableOrViewId)) {
|
||||
const view = await config.api.viewV2.get(tableOrViewId)
|
||||
return await config.api.table.get(view.tableId)
|
||||
} else {
|
||||
return await config.api.table.get(tableOrViewId)
|
||||
}
|
||||
}
|
||||
|
||||
async function assertTableExists(nameOrTable: string | Table) {
|
||||
const name =
|
||||
typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name
|
||||
expect(await client!.schema.hasTable(name)).toBeTrue()
|
||||
}
|
||||
|
||||
async function assertTableNumRows(
|
||||
nameOrTable: string | Table,
|
||||
numRows: number
|
||||
) {
|
||||
const name =
|
||||
typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name
|
||||
const row = await client!.from(name).count()
|
||||
const count = parseInt(Object.values(row[0])[0] as string)
|
||||
expect(count).toEqual(numRows)
|
||||
}
|
||||
|
||||
describe.each([
|
||||
["table", createTable],
|
||||
[
|
||||
"view",
|
||||
async (schema: TableSchema) => {
|
||||
async (schema?: TableSchema) => {
|
||||
const tableId = await createTable(schema)
|
||||
const viewId = await createView(
|
||||
tableId,
|
||||
Object.keys(schema).reduce<ViewV2Schema>((viewSchema, fieldName) => {
|
||||
const field = schema[fieldName]
|
||||
viewSchema[fieldName] = {
|
||||
visible: field.visible ?? true,
|
||||
readonly: false,
|
||||
}
|
||||
return viewSchema
|
||||
}, {})
|
||||
Object.keys(schema || {}).reduce<ViewV2Schema>(
|
||||
(viewSchema, fieldName) => {
|
||||
const field = schema![fieldName]
|
||||
viewSchema[fieldName] = {
|
||||
visible: field.visible ?? true,
|
||||
readonly: false,
|
||||
}
|
||||
return viewSchema
|
||||
},
|
||||
{}
|
||||
)
|
||||
)
|
||||
return viewId
|
||||
},
|
||||
|
@ -792,10 +823,11 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
describe.each([FieldType.STRING, FieldType.LONGFORM])("%s", () => {
|
||||
const stringTypes = [FieldType.STRING, FieldType.LONGFORM] as const
|
||||
describe.each(stringTypes)("%s", type => {
|
||||
beforeAll(async () => {
|
||||
tableOrViewId = await createTableOrView({
|
||||
name: { name: "name", type: FieldType.STRING },
|
||||
name: { name: "name", type },
|
||||
})
|
||||
await createRows([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
|
@ -1602,7 +1634,7 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
describe.each([FieldType.ARRAY, FieldType.OPTIONS])("%s", () => {
|
||||
describe("arrays", () => {
|
||||
beforeAll(async () => {
|
||||
tableOrViewId = await createTableOrView({
|
||||
numbers: {
|
||||
|
@ -3470,5 +3502,105 @@ describe.each([
|
|||
])
|
||||
})
|
||||
})
|
||||
|
||||
isSql &&
|
||||
!isSqs &&
|
||||
describe("SQL injection", () => {
|
||||
const badStrings = [
|
||||
"1; DROP TABLE %table_name%;",
|
||||
"1; DELETE FROM %table_name%;",
|
||||
"1; UPDATE %table_name% SET name = 'foo';",
|
||||
"1; INSERT INTO %table_name% (name) VALUES ('foo');",
|
||||
"' OR '1'='1' --",
|
||||
"'; DROP TABLE %table_name%; --",
|
||||
"' OR 1=1 --",
|
||||
"' UNION SELECT null, null, null; --",
|
||||
"' AND (SELECT COUNT(*) FROM %table_name%) > 0 --",
|
||||
"\"; EXEC xp_cmdshell('dir'); --",
|
||||
"\"' OR 'a'='a",
|
||||
"OR 1=1;",
|
||||
"'; SHUTDOWN --",
|
||||
]
|
||||
|
||||
describe.each(badStrings)("bad string: %s", badStringTemplate => {
|
||||
// The SQL that knex generates when you try to use a double quote in a
|
||||
// field name is always invalid and never works, so we skip it for these
|
||||
// tests.
|
||||
const skipFieldNameCheck = isOracle && badStringTemplate.includes('"')
|
||||
|
||||
!skipFieldNameCheck &&
|
||||
it("should not allow SQL injection as a field name", async () => {
|
||||
const tableOrViewId = await createTableOrView()
|
||||
const table = await getTable(tableOrViewId)
|
||||
const badString = badStringTemplate.replace(
|
||||
/%table_name%/g,
|
||||
table.name
|
||||
)
|
||||
|
||||
await config.api.table.save({
|
||||
...table,
|
||||
schema: {
|
||||
...table.schema,
|
||||
[badString]: { name: badString, type: FieldType.STRING },
|
||||
},
|
||||
})
|
||||
|
||||
if (docIds.isViewId(tableOrViewId)) {
|
||||
const view = await config.api.viewV2.get(tableOrViewId)
|
||||
await config.api.viewV2.update({
|
||||
...view,
|
||||
schema: {
|
||||
[badString]: { visible: true },
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
await config.api.row.save(tableOrViewId, { [badString]: "foo" })
|
||||
|
||||
await assertTableExists(table)
|
||||
await assertTableNumRows(table, 1)
|
||||
|
||||
const { rows } = await config.api.row.search(
|
||||
tableOrViewId,
|
||||
{ query: {} },
|
||||
{ status: 200 }
|
||||
)
|
||||
|
||||
expect(rows).toHaveLength(1)
|
||||
|
||||
await assertTableExists(table)
|
||||
await assertTableNumRows(table, 1)
|
||||
})
|
||||
|
||||
it("should not allow SQL injection as a field value", async () => {
|
||||
const tableOrViewId = await createTableOrView({
|
||||
foo: {
|
||||
name: "foo",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
})
|
||||
const table = await getTable(tableOrViewId)
|
||||
const badString = badStringTemplate.replace(
|
||||
/%table_name%/g,
|
||||
table.name
|
||||
)
|
||||
|
||||
await config.api.row.save(tableOrViewId, { foo: "foo" })
|
||||
|
||||
await assertTableExists(table)
|
||||
await assertTableNumRows(table, 1)
|
||||
|
||||
const { rows } = await config.api.row.search(
|
||||
tableOrViewId,
|
||||
{ query: { equal: { foo: badString } } },
|
||||
{ status: 200 }
|
||||
)
|
||||
|
||||
expect(rows).toBeEmpty()
|
||||
await assertTableExists(table)
|
||||
await assertTableNumRows(table, 1)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -257,7 +257,7 @@ export interface components {
|
|||
* @description Defines whether this is a static or dynamic formula.
|
||||
* @enum {string}
|
||||
*/
|
||||
formulaType?: "static" | "dynamic";
|
||||
formulaType?: "static" | "dynamic" | "ai";
|
||||
}
|
||||
| {
|
||||
/**
|
||||
|
@ -277,11 +277,14 @@ export interface components {
|
|||
| "link"
|
||||
| "formula"
|
||||
| "auto"
|
||||
| "ai"
|
||||
| "json"
|
||||
| "internal"
|
||||
| "barcodeqr"
|
||||
| "signature_single"
|
||||
| "bigint"
|
||||
| "bb_reference";
|
||||
| "bb_reference"
|
||||
| "bb_reference_single";
|
||||
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
|
||||
constraints?: {
|
||||
/** @enum {string} */
|
||||
|
@ -366,7 +369,7 @@ export interface components {
|
|||
* @description Defines whether this is a static or dynamic formula.
|
||||
* @enum {string}
|
||||
*/
|
||||
formulaType?: "static" | "dynamic";
|
||||
formulaType?: "static" | "dynamic" | "ai";
|
||||
}
|
||||
| {
|
||||
/**
|
||||
|
@ -386,11 +389,14 @@ export interface components {
|
|||
| "link"
|
||||
| "formula"
|
||||
| "auto"
|
||||
| "ai"
|
||||
| "json"
|
||||
| "internal"
|
||||
| "barcodeqr"
|
||||
| "signature_single"
|
||||
| "bigint"
|
||||
| "bb_reference";
|
||||
| "bb_reference"
|
||||
| "bb_reference_single";
|
||||
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
|
||||
constraints?: {
|
||||
/** @enum {string} */
|
||||
|
@ -477,7 +483,7 @@ export interface components {
|
|||
* @description Defines whether this is a static or dynamic formula.
|
||||
* @enum {string}
|
||||
*/
|
||||
formulaType?: "static" | "dynamic";
|
||||
formulaType?: "static" | "dynamic" | "ai";
|
||||
}
|
||||
| {
|
||||
/**
|
||||
|
@ -497,11 +503,14 @@ export interface components {
|
|||
| "link"
|
||||
| "formula"
|
||||
| "auto"
|
||||
| "ai"
|
||||
| "json"
|
||||
| "internal"
|
||||
| "barcodeqr"
|
||||
| "signature_single"
|
||||
| "bigint"
|
||||
| "bb_reference";
|
||||
| "bb_reference"
|
||||
| "bb_reference_single";
|
||||
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
|
||||
constraints?: {
|
||||
/** @enum {string} */
|
||||
|
|
|
@ -212,7 +212,7 @@ describe("SQL query builder", () => {
|
|||
const filterSet = [`%20%`, `%25%`, `%"john"%`, `%"mary"%`]
|
||||
expect(query).toEqual({
|
||||
bindings: [...filterSet, limit],
|
||||
sql: `select * from (select * from "test" where COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2 and COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4 order by "test"."id" asc) where rownum <= :5`,
|
||||
sql: `select * from (select * from "test" where ((COALESCE(LOWER("test"."age"), '') like :1 and COALESCE(LOWER("test"."age"), '') like :2)) and ((COALESCE(LOWER("test"."name"), '') like :3 and COALESCE(LOWER("test"."name"), '') like :4)) order by "test"."id" asc) where rownum <= :5`,
|
||||
})
|
||||
|
||||
query = new Sql(SqlClient.ORACLE, limit)._query(
|
||||
|
@ -244,7 +244,7 @@ describe("SQL query builder", () => {
|
|||
|
||||
expect(query).toEqual({
|
||||
bindings: ["John", limit],
|
||||
sql: `select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2`,
|
||||
sql: `select * from (select * from "test" where (to_char("test"."name") is not null and to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2`,
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -262,7 +262,7 @@ describe("SQL query builder", () => {
|
|||
|
||||
expect(query).toEqual({
|
||||
bindings: ["John", limit],
|
||||
sql: `select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") != :1) OR to_char("test"."name") IS NULL order by "test"."id" asc) where rownum <= :2`,
|
||||
sql: `select * from (select * from "test" where (to_char("test"."name") is not null and to_char("test"."name") != :1) or to_char("test"."name") is null order by "test"."id" asc) where rownum <= :2`,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -10,7 +10,7 @@ import {
|
|||
import { generateUserMetadataID, isDevAppID } from "../db/utils"
|
||||
import { getCachedSelf } from "../utilities/global"
|
||||
import env from "../environment"
|
||||
import { isWebhookEndpoint } from "./utils"
|
||||
import { isWebhookEndpoint, isBrowser, isApiKey } from "./utils"
|
||||
import { UserCtx, ContextUser } from "@budibase/types"
|
||||
import tracer from "dd-trace"
|
||||
|
||||
|
@ -27,7 +27,7 @@ export default async (ctx: UserCtx, next: any) => {
|
|||
}
|
||||
|
||||
// deny access to application preview
|
||||
if (!env.isTest()) {
|
||||
if (isBrowser(ctx) && !isApiKey(ctx)) {
|
||||
if (
|
||||
isDevAppID(requestAppId) &&
|
||||
!isWebhookEndpoint(ctx) &&
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
require("../../db").init()
|
||||
import * as db from "../../db"
|
||||
|
||||
db.init()
|
||||
mockAuthWithNoCookie()
|
||||
mockWorker()
|
||||
mockUserGroups()
|
||||
|
@ -45,7 +47,7 @@ function mockAuthWithNoCookie() {
|
|||
},
|
||||
cache: {
|
||||
user: {
|
||||
getUser: async id => {
|
||||
getUser: async () => {
|
||||
return {
|
||||
_id: "us_uuid1",
|
||||
}
|
||||
|
@ -82,7 +84,7 @@ function mockAuthWithCookie() {
|
|||
},
|
||||
cache: {
|
||||
user: {
|
||||
getUser: async id => {
|
||||
getUser: async () => {
|
||||
return {
|
||||
_id: "us_uuid1",
|
||||
}
|
||||
|
@ -94,6 +96,10 @@ function mockAuthWithCookie() {
|
|||
}
|
||||
|
||||
class TestConfiguration {
|
||||
next: jest.MockedFunction<any>
|
||||
throw: jest.MockedFunction<any>
|
||||
ctx: any
|
||||
|
||||
constructor() {
|
||||
this.next = jest.fn()
|
||||
this.throw = jest.fn()
|
||||
|
@ -130,7 +136,7 @@ class TestConfiguration {
|
|||
}
|
||||
|
||||
describe("Current app middleware", () => {
|
||||
let config
|
||||
let config: TestConfiguration
|
||||
|
||||
beforeEach(() => {
|
||||
config = new TestConfiguration()
|
||||
|
@ -192,7 +198,7 @@ describe("Current app middleware", () => {
|
|||
},
|
||||
cache: {
|
||||
user: {
|
||||
getUser: async id => {
|
||||
getUser: async () => {
|
||||
return {
|
||||
_id: "us_uuid1",
|
||||
}
|
|
@ -1,9 +1,18 @@
|
|||
import { BBContext } from "@budibase/types"
|
||||
import { LoginMethod, UserCtx } from "@budibase/types"
|
||||
|
||||
const WEBHOOK_ENDPOINTS = new RegExp(
|
||||
["webhooks/trigger", "webhooks/schema"].join("|")
|
||||
)
|
||||
|
||||
export function isWebhookEndpoint(ctx: BBContext) {
|
||||
export function isWebhookEndpoint(ctx: UserCtx) {
|
||||
return WEBHOOK_ENDPOINTS.test(ctx.request.url)
|
||||
}
|
||||
|
||||
export function isBrowser(ctx: UserCtx) {
|
||||
const browser = ctx.userAgent?.browser
|
||||
return browser && browser !== "unknown"
|
||||
}
|
||||
|
||||
export function isApiKey(ctx: UserCtx) {
|
||||
return ctx.loginMethod === LoginMethod.API_KEY
|
||||
}
|
||||
|
|
|
@ -423,6 +423,7 @@ export default class TestConfiguration {
|
|||
Accept: "application/json",
|
||||
Cookie: [`${constants.Cookie.Auth}=${authToken}`],
|
||||
[constants.Header.APP_ID]: appId,
|
||||
...this.temporaryHeaders,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -527,6 +528,10 @@ export default class TestConfiguration {
|
|||
return this.login({ userId: email, roleId, builder, prodApp })
|
||||
}
|
||||
|
||||
browserUserAgent() {
|
||||
return "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
|
||||
}
|
||||
|
||||
// TENANCY
|
||||
|
||||
tenantHost() {
|
||||
|
|
|
@ -19,7 +19,8 @@
|
|||
"@types/koa": "2.13.4",
|
||||
"@types/redlock": "4.0.7",
|
||||
"rimraf": "3.0.2",
|
||||
"typescript": "5.5.2"
|
||||
"typescript": "5.5.2",
|
||||
"koa-useragent": "^4.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"scim-patch": "^0.8.1"
|
||||
|
|
|
@ -2,6 +2,12 @@ import { Context, Request } from "koa"
|
|||
import { User, Role, UserRoles, Account, ConfigType } from "../documents"
|
||||
import { FeatureFlag, License } from "../sdk"
|
||||
import { Files } from "formidable"
|
||||
import { UserAgentContext } from "koa-useragent"
|
||||
|
||||
export enum LoginMethod {
|
||||
API_KEY = "api_key",
|
||||
COOKIE = "cookie",
|
||||
}
|
||||
|
||||
export interface ContextUser extends Omit<User, "roles"> {
|
||||
globalId?: string
|
||||
|
@ -31,6 +37,7 @@ export interface BBRequest<RequestBody> extends Request {
|
|||
export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
|
||||
request: BBRequest<RequestBody>
|
||||
body: ResponseBody
|
||||
userAgent: UserAgentContext["userAgent"]
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -40,6 +47,7 @@ export interface UserCtx<RequestBody = any, ResponseBody = any>
|
|||
extends Ctx<RequestBody, ResponseBody> {
|
||||
user: ContextUser
|
||||
roleId?: string
|
||||
loginMethod?: LoginMethod
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -49,7 +49,7 @@ type BasicFilter<T = any> = Record<string, T> & {
|
|||
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: never
|
||||
}
|
||||
|
||||
type ArrayFilter = Record<string, any[]> & {
|
||||
export type ArrayFilter = Record<string, any[]> & {
|
||||
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: {
|
||||
id: string[]
|
||||
values: string[]
|
||||
|
|
|
@ -44,9 +44,7 @@ const getEventFns = async (config: Config, existing?: Config) => {
|
|||
fns.push(events.email.SMTPCreated)
|
||||
} else if (isAIConfig(config)) {
|
||||
fns.push(() => events.ai.AIConfigCreated)
|
||||
fns.push(() =>
|
||||
pro.quotas.updateCustomAIConfigCount(Object.keys(config.config).length)
|
||||
)
|
||||
fns.push(() => pro.quotas.addCustomAIConfig())
|
||||
} else if (isGoogleConfig(config)) {
|
||||
fns.push(() => events.auth.SSOCreated(ConfigType.GOOGLE))
|
||||
if (config.config.activated) {
|
||||
|
@ -85,9 +83,6 @@ const getEventFns = async (config: Config, existing?: Config) => {
|
|||
fns.push(events.email.SMTPUpdated)
|
||||
} else if (isAIConfig(config)) {
|
||||
fns.push(() => events.ai.AIConfigUpdated)
|
||||
fns.push(() =>
|
||||
pro.quotas.updateCustomAIConfigCount(Object.keys(config.config).length)
|
||||
)
|
||||
} else if (isGoogleConfig(config)) {
|
||||
fns.push(() => events.auth.SSOUpdated(ConfigType.GOOGLE))
|
||||
if (!existing.config.activated && config.config.activated) {
|
||||
|
@ -253,7 +248,7 @@ export async function save(ctx: UserCtx<Config>) {
|
|||
if (existingConfig) {
|
||||
await verifyAIConfig(config, existingConfig)
|
||||
}
|
||||
await pro.quotas.updateCustomAIConfigCount(Object.keys(config).length)
|
||||
await pro.quotas.addCustomAIConfig()
|
||||
break
|
||||
}
|
||||
} catch (err: any) {
|
||||
|
@ -342,29 +337,43 @@ export async function find(ctx: UserCtx) {
|
|||
let scopedConfig = await configs.getConfig(type)
|
||||
|
||||
if (scopedConfig) {
|
||||
if (type === ConfigType.OIDC_LOGOS) {
|
||||
enrichOIDCLogos(scopedConfig)
|
||||
}
|
||||
|
||||
if (type === ConfigType.AI) {
|
||||
await pro.sdk.ai.enrichAIConfig(scopedConfig)
|
||||
// Strip out the API Keys from the response so they don't show in the UI
|
||||
for (const key in scopedConfig.config) {
|
||||
if (scopedConfig.config[key].apiKey) {
|
||||
scopedConfig.config[key].apiKey = PASSWORD_REPLACEMENT
|
||||
}
|
||||
}
|
||||
}
|
||||
ctx.body = scopedConfig
|
||||
await handleConfigType(type, scopedConfig)
|
||||
} else if (type === ConfigType.AI) {
|
||||
scopedConfig = { config: {} } as AIConfig
|
||||
await handleAIConfig(scopedConfig)
|
||||
} else {
|
||||
// don't throw an error, there simply is nothing to return
|
||||
// If no config found and not AI type, just return an empty body
|
||||
ctx.body = {}
|
||||
return
|
||||
}
|
||||
|
||||
ctx.body = scopedConfig
|
||||
} catch (err: any) {
|
||||
ctx.throw(err?.status || 400, err)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleConfigType(type: ConfigType, config: Config) {
|
||||
if (type === ConfigType.OIDC_LOGOS) {
|
||||
enrichOIDCLogos(config)
|
||||
} else if (type === ConfigType.AI) {
|
||||
await handleAIConfig(config)
|
||||
}
|
||||
}
|
||||
|
||||
async function handleAIConfig(config: AIConfig) {
|
||||
await pro.sdk.ai.enrichAIConfig(config)
|
||||
stripApiKeys(config)
|
||||
}
|
||||
|
||||
function stripApiKeys(config: AIConfig) {
|
||||
for (const key in config?.config) {
|
||||
if (config.config[key].apiKey) {
|
||||
config.config[key].apiKey = PASSWORD_REPLACEMENT
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function publicOidc(ctx: Ctx<void, GetPublicOIDCConfigResponse>) {
|
||||
try {
|
||||
// Find the config with the most granular scope based on context
|
||||
|
@ -508,6 +517,9 @@ export async function destroy(ctx: UserCtx) {
|
|||
try {
|
||||
await db.remove(id, rev)
|
||||
await cache.destroy(cache.CacheKey.CHECKLIST)
|
||||
if (id === configs.generateConfigID(ConfigType.AI)) {
|
||||
await pro.quotas.removeCustomAIConfig()
|
||||
}
|
||||
ctx.body = { message: "Config deleted successfully" }
|
||||
} catch (err: any) {
|
||||
ctx.throw(err.status, err)
|
||||
|
|
|
@ -13,10 +13,6 @@ describe("Global configs controller", () => {
|
|||
await config.afterAll()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks()
|
||||
})
|
||||
|
||||
it("Should strip secrets when pulling AI config", async () => {
|
||||
const data = structures.configs.ai()
|
||||
await config.api.configs.saveConfig(data)
|
||||
|
|
Loading…
Reference in New Issue