Merge branch 'master' into chore/update-submodule

This commit is contained in:
Michael Drury 2024-10-25 13:14:50 +01:00 committed by GitHub
commit 516ee275ce
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 938 additions and 375 deletions

View File

@ -27,7 +27,7 @@ export function doInUserContext(user: User, ctx: Ctx, task: any) {
hostInfo: {
ipAddress: ctx.request.ip,
// filled in by koa-useragent package
userAgent: ctx.userAgent._agent.source,
userAgent: ctx.userAgent.source,
},
}
return doInIdentityContext(userContext, task)

View File

@ -1,20 +1,26 @@
import { Cookie, Header } from "../constants"
import {
getCookie,
clearCookie,
openJwt,
getCookie,
isValidInternalAPIKey,
openJwt,
} from "../utils"
import { getUser } from "../cache/user"
import { getSession, updateSessionTTL } from "../security/sessions"
import { buildMatcherRegex, matches } from "./matchers"
import { SEPARATOR, queryGlobalView, ViewName } from "../db"
import { getGlobalDB, doInTenant } from "../context"
import { queryGlobalView, SEPARATOR, ViewName } from "../db"
import { doInTenant, getGlobalDB } from "../context"
import { decrypt } from "../security/encryption"
import * as identity from "../context/identity"
import env from "../environment"
import { Ctx, EndpointMatcher, SessionCookie, User } from "@budibase/types"
import { InvalidAPIKeyError, ErrorCode } from "../errors"
import {
Ctx,
EndpointMatcher,
LoginMethod,
SessionCookie,
User,
} from "@budibase/types"
import { ErrorCode, InvalidAPIKeyError } from "../errors"
import tracer from "dd-trace"
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
@ -26,16 +32,18 @@ interface FinaliseOpts {
internal?: boolean
publicEndpoint?: boolean
version?: string
user?: any
user?: User | { tenantId: string }
loginMethod?: LoginMethod
}
function timeMinusOneMinute() {
return new Date(Date.now() - ONE_MINUTE).toISOString()
}
function finalise(ctx: any, opts: FinaliseOpts = {}) {
function finalise(ctx: Ctx, opts: FinaliseOpts = {}) {
ctx.publicEndpoint = opts.publicEndpoint || false
ctx.isAuthenticated = opts.authenticated || false
ctx.loginMethod = opts.loginMethod
ctx.user = opts.user
ctx.internal = opts.internal || false
ctx.version = opts.version
@ -120,9 +128,10 @@ export default function (
}
const tenantId = ctx.request.headers[Header.TENANT_ID]
let authenticated = false,
user = null,
internal = false
let authenticated: boolean = false,
user: User | { tenantId: string } | undefined = undefined,
internal: boolean = false,
loginMethod: LoginMethod | undefined = undefined
if (authCookie && !apiKey) {
const sessionId = authCookie.sessionId
const userId = authCookie.userId
@ -146,6 +155,7 @@ export default function (
}
// @ts-ignore
user.csrfToken = session.csrfToken
loginMethod = LoginMethod.COOKIE
if (session?.lastAccessedAt < timeMinusOneMinute()) {
// make sure we denote that the session is still in use
@ -170,17 +180,16 @@ export default function (
apiKey,
populateUser
)
if (valid && foundUser) {
if (valid) {
authenticated = true
loginMethod = LoginMethod.API_KEY
user = foundUser
} else if (valid) {
authenticated = true
internal = true
internal = !foundUser
}
}
if (!user && tenantId) {
user = { tenantId }
} else if (user) {
} else if (user && "password" in user) {
delete user.password
}
// be explicit
@ -204,7 +213,14 @@ export default function (
}
// isAuthenticated is a function, so use a variable to be able to check authed state
finalise(ctx, { authenticated, user, internal, version, publicEndpoint })
finalise(ctx, {
authenticated,
user,
internal,
version,
publicEndpoint,
loginMethod,
})
if (isUser(user)) {
return identity.doInUserContext(user, ctx, next)

View File

@ -13,6 +13,7 @@ import SqlTableQueryBuilder from "./sqlTable"
import {
Aggregation,
AnySearchFilter,
ArrayFilter,
ArrayOperator,
BasicOperator,
BBReferenceFieldMetadata,
@ -98,6 +99,23 @@ function isSqs(table: Table): boolean {
)
}
function escapeQuotes(value: string, quoteChar = '"'): string {
return value.replace(new RegExp(quoteChar, "g"), `${quoteChar}${quoteChar}`)
}
function wrap(value: string, quoteChar = '"'): string {
return `${quoteChar}${escapeQuotes(value, quoteChar)}${quoteChar}`
}
function stringifyArray(value: any[], quoteStyle = '"'): string {
for (let i in value) {
if (typeof value[i] === "string") {
value[i] = wrap(value[i], quoteStyle)
}
}
return `[${value.join(",")}]`
}
const allowEmptyRelationships: Record<SearchFilterKey, boolean> = {
[BasicOperator.EQUAL]: false,
[BasicOperator.NOT_EQUAL]: true,
@ -152,30 +170,30 @@ class InternalBuilder {
return this.query.meta.table
}
get knexClient(): Knex.Client {
return this.knex.client as Knex.Client
}
getFieldSchema(key: string): FieldSchema | undefined {
const { column } = this.splitter.run(key)
return this.table.schema[column]
}
private quoteChars(): [string, string] {
switch (this.client) {
case SqlClient.ORACLE:
case SqlClient.POSTGRES:
return ['"', '"']
case SqlClient.MS_SQL:
return ["[", "]"]
case SqlClient.MARIADB:
case SqlClient.MY_SQL:
case SqlClient.SQL_LITE:
return ["`", "`"]
}
private supportsILike(): boolean {
return !(
this.client === SqlClient.ORACLE || this.client === SqlClient.SQL_LITE
)
}
// Takes a string like foo and returns a quoted string like [foo] for SQL Server
// and "foo" for Postgres.
private quoteChars(): [string, string] {
const wrapped = this.knexClient.wrapIdentifier("foo", {})
return [wrapped[0], wrapped[wrapped.length - 1]]
}
// Takes a string like foo and returns a quoted string like [foo] for SQL
// Server and "foo" for Postgres.
private quote(str: string): string {
const [start, end] = this.quoteChars()
return `${start}${str}${end}`
return this.knexClient.wrapIdentifier(str, {})
}
private isQuoted(key: string): boolean {
@ -193,6 +211,30 @@ class InternalBuilder {
return key.map(part => this.quote(part)).join(".")
}
private quotedValue(value: string): string {
const formatter = this.knexClient.formatter(this.knexClient.queryBuilder())
return formatter.wrap(value, false)
}
private rawQuotedValue(value: string): Knex.Raw {
return this.knex.raw(this.quotedValue(value))
}
// Unfortuantely we cannot rely on knex's identifier escaping because it trims
// the identifier string before escaping it, which breaks cases for us where
// columns that start or end with a space aren't referenced correctly anymore.
//
// So whenever you're using an identifier binding in knex, e.g. knex.raw("??
// as ?", ["foo", "bar"]), you need to make sure you call this:
//
// knex.raw("?? as ?", [this.quotedIdentifier("foo"), "bar"])
//
// Issue we filed against knex about this:
// https://github.com/knex/knex/issues/6143
private rawQuotedIdentifier(key: string): Knex.Raw {
return this.knex.raw(this.quotedIdentifier(key))
}
// Turns an identifier like a.b.c or `a`.`b`.`c` into ["a", "b", "c"]
private splitIdentifier(key: string): string[] {
const [start, end] = this.quoteChars()
@ -236,7 +278,7 @@ class InternalBuilder {
const alias = this.getTableName(endpoint.entityId)
const schema = meta.table.schema
if (!this.isFullSelectStatementRequired()) {
return [this.knex.raw(`${this.quote(alias)}.*`)]
return [this.knex.raw("??", [`${alias}.*`])]
}
// get just the fields for this table
return resource.fields
@ -258,30 +300,40 @@ class InternalBuilder {
const columnSchema = schema[column]
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(columnSchema)) {
return this.knex.raw(
`${this.quotedIdentifier(
[table, column].join(".")
)}::money::numeric as ${this.quote(field)}`
)
// TODO: figure out how to express this safely without string
// interpolation.
return this.knex.raw(`??::money::numeric as "${field}"`, [
this.rawQuotedIdentifier([table, column].join(".")),
field,
])
}
if (this.SPECIAL_SELECT_CASES.MSSQL_DATES(columnSchema)) {
// Time gets returned as timestamp from mssql, not matching the expected
// HH:mm format
return this.knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`)
// TODO: figure out how to express this safely without string
// interpolation.
return this.knex.raw(`CONVERT(varchar, ??, 108) as "${field}"`, [
this.rawQuotedIdentifier(field),
])
}
const quoted = table
? `${this.quote(table)}.${this.quote(column)}`
: this.quote(field)
return this.knex.raw(quoted)
if (table) {
return this.rawQuotedIdentifier(`${table}.${column}`)
} else {
return this.rawQuotedIdentifier(field)
}
})
}
// OracleDB can't use character-large-objects (CLOBs) in WHERE clauses,
// so when we use them we need to wrap them in to_char(). This function
// converts a field name to the appropriate identifier.
private convertClobs(field: string, opts?: { forSelect?: boolean }): string {
private convertClobs(
field: string,
opts?: { forSelect?: boolean }
): Knex.Raw {
if (this.client !== SqlClient.ORACLE) {
throw new Error(
"you've called convertClobs on a DB that's not Oracle, this is a mistake"
@ -290,7 +342,7 @@ class InternalBuilder {
const parts = this.splitIdentifier(field)
const col = parts.pop()!
const schema = this.table.schema[col]
let identifier = this.quotedIdentifier(field)
let identifier = this.rawQuotedIdentifier(field)
if (
schema.type === FieldType.STRING ||
@ -301,9 +353,12 @@ class InternalBuilder {
schema.type === FieldType.BARCODEQR
) {
if (opts?.forSelect) {
identifier = `to_char(${identifier}) as ${this.quotedIdentifier(col)}`
identifier = this.knex.raw("to_char(??) as ??", [
identifier,
this.rawQuotedIdentifier(col),
])
} else {
identifier = `to_char(${identifier})`
identifier = this.knex.raw("to_char(??)", [identifier])
}
}
return identifier
@ -427,7 +482,6 @@ class InternalBuilder {
filterKey: string,
whereCb: (filterKey: string, query: Knex.QueryBuilder) => Knex.QueryBuilder
): Knex.QueryBuilder {
const mainKnex = this.knex
const { relationships, endpoint, tableAliases: aliases } = this.query
const tableName = endpoint.entityId
const fromAlias = aliases?.[tableName] || tableName
@ -449,8 +503,8 @@ class InternalBuilder {
relationship.to &&
relationship.tableName
) {
const joinTable = mainKnex
.select(mainKnex.raw(1))
const joinTable = this.knex
.select(this.knex.raw(1))
.from({ [toAlias]: relatedTableName })
let subQuery = joinTable.clone()
const manyToMany = validateManyToMany(relationship)
@ -485,9 +539,7 @@ class InternalBuilder {
.where(
`${throughAlias}.${manyToMany.from}`,
"=",
mainKnex.raw(
this.quotedIdentifier(`${fromAlias}.${manyToMany.fromPrimary}`)
)
this.rawQuotedIdentifier(`${fromAlias}.${manyToMany.fromPrimary}`)
)
// in SQS the same junction table is used for different many-to-many relationships between the
// two same tables, this is needed to avoid rows ending up in all columns
@ -516,7 +568,7 @@ class InternalBuilder {
subQuery = subQuery.where(
toKey,
"=",
mainKnex.raw(this.quotedIdentifier(foreignKey))
this.rawQuotedIdentifier(foreignKey)
)
query = query.where(q => {
@ -546,7 +598,7 @@ class InternalBuilder {
filters = this.parseFilters({ ...filters })
const aliases = this.query.tableAliases
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
const shouldOr = filters.allOr
const isSqlite = this.client === SqlClient.SQL_LITE
const tableName = isSqlite ? this.table._id! : this.table.name
@ -610,7 +662,7 @@ class InternalBuilder {
value
)
} else if (shouldProcessRelationship) {
if (allOr) {
if (shouldOr) {
query = query.or
}
query = builder.addRelationshipForFilter(
@ -626,85 +678,102 @@ class InternalBuilder {
}
const like = (q: Knex.QueryBuilder, key: string, value: any) => {
const fuzzyOr = filters?.fuzzyOr
const fnc = fuzzyOr || allOr ? "orWhere" : "where"
// postgres supports ilike, nothing else does
if (this.client === SqlClient.POSTGRES) {
return q[fnc](key, "ilike", `%${value}%`)
} else {
const rawFnc = `${fnc}Raw`
// @ts-ignore
return q[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [
if (filters?.fuzzyOr || shouldOr) {
q = q.or
}
if (
this.client === SqlClient.ORACLE ||
this.client === SqlClient.SQL_LITE
) {
return q.whereRaw(`LOWER(??) LIKE ?`, [
this.rawQuotedIdentifier(key),
`%${value.toLowerCase()}%`,
])
}
return q.whereILike(
// @ts-expect-error knex types are wrong, raw is fine here
this.rawQuotedIdentifier(key),
this.knex.raw("?", [`%${value}%`])
)
}
const contains = (mode: AnySearchFilter, any: boolean = false) => {
const rawFnc = allOr ? "orWhereRaw" : "whereRaw"
const not = mode === filters?.notContains ? "NOT " : ""
function stringifyArray(value: Array<any>, quoteStyle = '"'): string {
for (let i in value) {
if (typeof value[i] === "string") {
value[i] = `${quoteStyle}${value[i]}${quoteStyle}`
}
const contains = (mode: ArrayFilter, any = false) => {
function addModifiers<T extends {}, Q>(q: Knex.QueryBuilder<T, Q>) {
if (shouldOr || mode === filters?.containsAny) {
q = q.or
}
return `[${value.join(",")}]`
if (mode === filters?.notContains) {
q = q.not
}
return q
}
if (this.client === SqlClient.POSTGRES) {
iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => {
const wrap = any ? "" : "'"
const op = any ? "\\?| array" : "@>"
const fieldNames = key.split(/\./g)
const table = fieldNames[0]
const col = fieldNames[1]
return q[rawFnc](
`${not}COALESCE("${table}"."${col}"::jsonb ${op} ${wrap}${stringifyArray(
value,
any ? "'" : '"'
)}${wrap}, FALSE)`
)
q = addModifiers(q)
if (any) {
return q.whereRaw(`COALESCE(??::jsonb \\?| array??, FALSE)`, [
this.rawQuotedIdentifier(key),
this.knex.raw(stringifyArray(value, "'")),
])
} else {
return q.whereRaw(`COALESCE(??::jsonb @> '??', FALSE)`, [
this.rawQuotedIdentifier(key),
this.knex.raw(stringifyArray(value)),
])
}
})
} else if (
this.client === SqlClient.MY_SQL ||
this.client === SqlClient.MARIADB
) {
const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS"
iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => {
return q[rawFnc](
`${not}COALESCE(${jsonFnc}(${key}, '${stringifyArray(
value
)}'), FALSE)`
)
return addModifiers(q).whereRaw(`COALESCE(?(??, ?), FALSE)`, [
this.knex.raw(any ? "JSON_OVERLAPS" : "JSON_CONTAINS"),
this.rawQuotedIdentifier(key),
this.knex.raw(wrap(stringifyArray(value))),
])
})
} else {
const andOr = mode === filters?.containsAny ? " OR " : " AND "
iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => {
let statement = ""
const identifier = this.quotedIdentifier(key)
for (let i in value) {
if (typeof value[i] === "string") {
value[i] = `%"${value[i].toLowerCase()}"%`
} else {
value[i] = `%${value[i]}%`
}
statement += `${
statement ? andOr : ""
}COALESCE(LOWER(${identifier}), '') LIKE ?`
}
if (statement === "") {
if (value.length === 0) {
return q
}
if (not) {
return q[rawFnc](
`(NOT (${statement}) OR ${identifier} IS NULL)`,
value
)
} else {
return q[rawFnc](statement, value)
}
q = q.where(subQuery => {
if (mode === filters?.notContains) {
subQuery = subQuery.not
}
subQuery = subQuery.where(subSubQuery => {
for (const elem of value) {
if (mode === filters?.containsAny) {
subSubQuery = subSubQuery.or
} else {
subSubQuery = subSubQuery.and
}
const lower =
typeof elem === "string" ? `"${elem.toLowerCase()}"` : elem
subSubQuery = subSubQuery.whereLike(
// @ts-expect-error knex types are wrong, raw is fine here
this.knex.raw(`COALESCE(LOWER(??), '')`, [
this.rawQuotedIdentifier(key),
]),
`%${lower}%`
)
}
})
if (mode === filters?.notContains) {
subQuery = subQuery.or.whereNull(
// @ts-expect-error knex types are wrong, raw is fine here
this.rawQuotedIdentifier(key)
)
}
return subQuery
})
return q
})
}
}
@ -730,45 +799,46 @@ class InternalBuilder {
}
if (filters.oneOf) {
const fnc = allOr ? "orWhereIn" : "whereIn"
iterate(
filters.oneOf,
ArrayOperator.ONE_OF,
(q, key: string, array) => {
if (this.client === SqlClient.ORACLE) {
key = this.convertClobs(key)
array = Array.isArray(array) ? array : [array]
const binding = new Array(array.length).fill("?").join(",")
return q.whereRaw(`${key} IN (${binding})`, array)
} else {
return q[fnc](key, Array.isArray(array) ? array : [array])
if (shouldOr) {
q = q.or
}
if (this.client === SqlClient.ORACLE) {
// @ts-ignore
key = this.convertClobs(key)
}
return q.whereIn(key, Array.isArray(array) ? array : [array])
},
(q, key: string[], array) => {
if (this.client === SqlClient.ORACLE) {
const keyStr = `(${key.map(k => this.convertClobs(k)).join(",")})`
const binding = `(${array
.map((a: any) => `(${new Array(a.length).fill("?").join(",")})`)
.join(",")})`
return q.whereRaw(`${keyStr} IN ${binding}`, array.flat())
} else {
return q[fnc](key, Array.isArray(array) ? array : [array])
if (shouldOr) {
q = q.or
}
if (this.client === SqlClient.ORACLE) {
// @ts-ignore
key = key.map(k => this.convertClobs(k))
}
return q.whereIn(key, Array.isArray(array) ? array : [array])
}
)
}
if (filters.string) {
iterate(filters.string, BasicOperator.STRING, (q, key, value) => {
const fnc = allOr ? "orWhere" : "where"
// postgres supports ilike, nothing else does
if (this.client === SqlClient.POSTGRES) {
return q[fnc](key, "ilike", `${value}%`)
} else {
const rawFnc = `${fnc}Raw`
// @ts-ignore
return q[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [
if (shouldOr) {
q = q.or
}
if (
this.client === SqlClient.ORACLE ||
this.client === SqlClient.SQL_LITE
) {
return q.whereRaw(`LOWER(??) LIKE ?`, [
this.rawQuotedIdentifier(key),
`${value.toLowerCase()}%`,
])
} else {
return q.whereILike(key, `${value}%`)
}
})
}
@ -795,67 +865,59 @@ class InternalBuilder {
const schema = this.getFieldSchema(key)
let rawKey: string | Knex.Raw = key
let high = value.high
let low = value.low
if (this.client === SqlClient.ORACLE) {
// @ts-ignore
key = this.knex.raw(this.convertClobs(key))
rawKey = this.convertClobs(key)
} else if (
this.client === SqlClient.SQL_LITE &&
schema?.type === FieldType.BIGINT
) {
rawKey = this.knex.raw("CAST(?? AS INTEGER)", [
this.rawQuotedIdentifier(key),
])
high = this.knex.raw("CAST(? AS INTEGER)", [value.high])
low = this.knex.raw("CAST(? AS INTEGER)", [value.low])
}
if (shouldOr) {
q = q.or
}
if (lowValid && highValid) {
if (
schema?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
return q.whereRaw(
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
[value.low, value.high]
)
} else {
const fnc = allOr ? "orWhereBetween" : "whereBetween"
return q[fnc](key, [value.low, value.high])
}
// @ts-expect-error knex types are wrong, raw is fine here
return q.whereBetween(rawKey, [low, high])
} else if (lowValid) {
if (
schema?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
return q.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
value.low,
])
} else {
const fnc = allOr ? "orWhere" : "where"
return q[fnc](key, ">=", value.low)
}
// @ts-expect-error knex types are wrong, raw is fine here
return q.where(rawKey, ">=", low)
} else if (highValid) {
if (
schema?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
return q.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
value.high,
])
} else {
const fnc = allOr ? "orWhere" : "where"
return q[fnc](key, "<=", value.high)
}
// @ts-expect-error knex types are wrong, raw is fine here
return q.where(rawKey, "<=", high)
}
return q
})
}
if (filters.equal) {
iterate(filters.equal, BasicOperator.EQUAL, (q, key, value) => {
const fnc = allOr ? "orWhereRaw" : "whereRaw"
if (shouldOr) {
q = q.or
}
if (this.client === SqlClient.MS_SQL) {
return q[fnc](
`CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 1`,
[value]
)
} else if (this.client === SqlClient.ORACLE) {
const identifier = this.convertClobs(key)
return q[fnc](`(${identifier} IS NOT NULL AND ${identifier} = ?)`, [
return q.whereRaw(`CASE WHEN ?? = ? THEN 1 ELSE 0 END = 1`, [
this.rawQuotedIdentifier(key),
value,
])
} else if (this.client === SqlClient.ORACLE) {
const identifier = this.convertClobs(key)
return q.where(subq =>
// @ts-expect-error knex types are wrong, raw is fine here
subq.whereNotNull(identifier).andWhere(identifier, value)
)
} else {
return q[fnc](`COALESCE(${this.quotedIdentifier(key)} = ?, FALSE)`, [
return q.whereRaw(`COALESCE(?? = ?, FALSE)`, [
this.rawQuotedIdentifier(key),
value,
])
}
@ -863,20 +925,30 @@ class InternalBuilder {
}
if (filters.notEqual) {
iterate(filters.notEqual, BasicOperator.NOT_EQUAL, (q, key, value) => {
const fnc = allOr ? "orWhereRaw" : "whereRaw"
if (shouldOr) {
q = q.or
}
if (this.client === SqlClient.MS_SQL) {
return q[fnc](
`CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 0`,
[value]
)
return q.whereRaw(`CASE WHEN ?? = ? THEN 1 ELSE 0 END = 0`, [
this.rawQuotedIdentifier(key),
value,
])
} else if (this.client === SqlClient.ORACLE) {
const identifier = this.convertClobs(key)
return q[fnc](
`(${identifier} IS NOT NULL AND ${identifier} != ?) OR ${identifier} IS NULL`,
[value]
return (
q
.where(subq =>
subq.not
// @ts-expect-error knex types are wrong, raw is fine here
.whereNull(identifier)
.and.where(identifier, "!=", value)
)
// @ts-expect-error knex types are wrong, raw is fine here
.or.whereNull(identifier)
)
} else {
return q[fnc](`COALESCE(${this.quotedIdentifier(key)} != ?, TRUE)`, [
return q.whereRaw(`COALESCE(?? != ?, TRUE)`, [
this.rawQuotedIdentifier(key),
value,
])
}
@ -884,14 +956,18 @@ class InternalBuilder {
}
if (filters.empty) {
iterate(filters.empty, BasicOperator.EMPTY, (q, key) => {
const fnc = allOr ? "orWhereNull" : "whereNull"
return q[fnc](key)
if (shouldOr) {
q = q.or
}
return q.whereNull(key)
})
}
if (filters.notEmpty) {
iterate(filters.notEmpty, BasicOperator.NOT_EMPTY, (q, key) => {
const fnc = allOr ? "orWhereNotNull" : "whereNotNull"
return q[fnc](key)
if (shouldOr) {
q = q.or
}
return q.whereNotNull(key)
})
}
if (filters.contains) {
@ -976,9 +1052,7 @@ class InternalBuilder {
const selectFields = qualifiedFields.map(field =>
this.convertClobs(field, { forSelect: true })
)
query = query
.groupByRaw(groupByFields.join(", "))
.select(this.knex.raw(selectFields.join(", ")))
query = query.groupBy(groupByFields).select(selectFields)
} else {
query = query.groupBy(qualifiedFields).select(qualifiedFields)
}
@ -990,11 +1064,10 @@ class InternalBuilder {
if (this.client === SqlClient.ORACLE) {
const field = this.convertClobs(`${tableName}.${aggregation.field}`)
query = query.select(
this.knex.raw(
`COUNT(DISTINCT ${field}) as ${this.quotedIdentifier(
aggregation.name
)}`
)
this.knex.raw(`COUNT(DISTINCT ??) as ??`, [
field,
aggregation.name,
])
)
} else {
query = query.countDistinct(
@ -1059,9 +1132,11 @@ class InternalBuilder {
} else {
let composite = `${aliased}.${key}`
if (this.client === SqlClient.ORACLE) {
query = query.orderByRaw(
`${this.convertClobs(composite)} ${direction} nulls ${nulls}`
)
query = query.orderByRaw(`?? ?? nulls ??`, [
this.convertClobs(composite),
this.knex.raw(direction),
this.knex.raw(nulls as string),
])
} else {
query = query.orderBy(composite, direction, nulls)
}
@ -1091,17 +1166,22 @@ class InternalBuilder {
private buildJsonField(field: string): string {
const parts = field.split(".")
let tableField: string, unaliased: string
let unaliased: string
let tableField: string
if (parts.length > 1) {
const alias = parts.shift()!
unaliased = parts.join(".")
tableField = `${this.quote(alias)}.${this.quote(unaliased)}`
tableField = `${alias}.${unaliased}`
} else {
unaliased = parts.join(".")
tableField = this.quote(unaliased)
tableField = unaliased
}
const separator = this.client === SqlClient.ORACLE ? " VALUE " : ","
return `'${unaliased}'${separator}${tableField}`
return this.knex
.raw(`?${separator}??`, [unaliased, this.rawQuotedIdentifier(tableField)])
.toString()
}
maxFunctionParameters() {
@ -1197,13 +1277,13 @@ class InternalBuilder {
subQuery = subQuery.where(
correlatedTo,
"=",
knex.raw(this.quotedIdentifier(correlatedFrom))
this.rawQuotedIdentifier(correlatedFrom)
)
const standardWrap = (select: string): Knex.QueryBuilder => {
const standardWrap = (select: Knex.Raw): Knex.QueryBuilder => {
subQuery = subQuery.select(`${toAlias}.*`).limit(getRelationshipLimit())
// @ts-ignore - the from alias syntax isn't in Knex typing
return knex.select(knex.raw(select)).from({
return knex.select(select).from({
[toAlias]: subQuery,
})
}
@ -1213,12 +1293,12 @@ class InternalBuilder {
// need to check the junction table document is to the right column, this is just for SQS
subQuery = this.addJoinFieldCheck(subQuery, relationship)
wrapperQuery = standardWrap(
`json_group_array(json_object(${fieldList}))`
this.knex.raw(`json_group_array(json_object(${fieldList}))`)
)
break
case SqlClient.POSTGRES:
wrapperQuery = standardWrap(
`json_agg(json_build_object(${fieldList}))`
this.knex.raw(`json_agg(json_build_object(${fieldList}))`)
)
break
case SqlClient.MARIADB:
@ -1232,21 +1312,25 @@ class InternalBuilder {
case SqlClient.MY_SQL:
case SqlClient.ORACLE:
wrapperQuery = standardWrap(
`json_arrayagg(json_object(${fieldList}))`
this.knex.raw(`json_arrayagg(json_object(${fieldList}))`)
)
break
case SqlClient.MS_SQL:
case SqlClient.MS_SQL: {
const comparatorQuery = knex
.select(`${fromAlias}.*`)
// @ts-ignore - from alias syntax not TS supported
.from({
[fromAlias]: subQuery
.select(`${toAlias}.*`)
.limit(getRelationshipLimit()),
})
wrapperQuery = knex.raw(
`(SELECT ${this.quote(toAlias)} = (${knex
.select(`${fromAlias}.*`)
// @ts-ignore - from alias syntax not TS supported
.from({
[fromAlias]: subQuery
.select(`${toAlias}.*`)
.limit(getRelationshipLimit()),
})} FOR JSON PATH))`
`(SELECT ?? = (${comparatorQuery} FOR JSON PATH))`,
[this.rawQuotedIdentifier(toAlias)]
)
break
}
default:
throw new Error(`JSON relationships not implement for ${sqlClient}`)
}

View File

@ -20,19 +20,15 @@ const options = {
{
url: "https://budibase.app/api/public/v1",
description: "Budibase Cloud API",
},
{
url: "{protocol}://{hostname}/api/public/v1",
description: "Budibase self hosted API",
variables: {
protocol: {
default: "http",
description:
"Whether HTTP or HTTPS should be used to communicate with your Budibase instance.",
apiKey: {
default: "<user API key>",
description: "The API key of the user to assume for API call.",
},
hostname: {
default: "localhost:10000",
description: "The URL of your Budibase instance.",
appId: {
default: "<App ID>",
description:
"The ID of the app the calls will be executed within the context of, this should start with app_ (production) or app_dev (development).",
},
},
},

View File

@ -8,19 +8,15 @@
"servers": [
{
"url": "https://budibase.app/api/public/v1",
"description": "Budibase Cloud API"
},
{
"url": "{protocol}://{hostname}/api/public/v1",
"description": "Budibase self hosted API",
"description": "Budibase Cloud API",
"variables": {
"protocol": {
"default": "http",
"description": "Whether HTTP or HTTPS should be used to communicate with your Budibase instance."
"apiKey": {
"default": "<user API key>",
"description": "The API key of the user to assume for API call."
},
"hostname": {
"default": "localhost:10000",
"description": "The URL of your Budibase instance."
"appId": {
"default": "<App ID>",
"description": "The ID of the app the calls will be executed within the context of, this should start with app_ (production) or app_dev (development)."
}
}
}
@ -51,6 +47,7 @@
"required": true,
"description": "The ID of the app which this request is targeting.",
"schema": {
"default": "{{ appId }}",
"type": "string"
}
},
@ -60,6 +57,7 @@
"required": true,
"description": "The ID of the app which this request is targeting.",
"schema": {
"default": "{{ appId }}",
"type": "string"
}
},
@ -833,7 +831,8 @@
"type": "string",
"enum": [
"static",
"dynamic"
"dynamic",
"ai"
],
"description": "Defines whether this is a static or dynamic formula."
}
@ -857,6 +856,7 @@
"link",
"formula",
"auto",
"ai",
"json",
"internal",
"barcodeqr",
@ -1042,7 +1042,8 @@
"type": "string",
"enum": [
"static",
"dynamic"
"dynamic",
"ai"
],
"description": "Defines whether this is a static or dynamic formula."
}
@ -1066,6 +1067,7 @@
"link",
"formula",
"auto",
"ai",
"json",
"internal",
"barcodeqr",
@ -1262,7 +1264,8 @@
"type": "string",
"enum": [
"static",
"dynamic"
"dynamic",
"ai"
],
"description": "Defines whether this is a static or dynamic formula."
}
@ -1286,6 +1289,7 @@
"link",
"formula",
"auto",
"ai",
"json",
"internal",
"barcodeqr",

View File

@ -6,16 +6,14 @@ info:
servers:
- url: https://budibase.app/api/public/v1
description: Budibase Cloud API
- url: "{protocol}://{hostname}/api/public/v1"
description: Budibase self hosted API
variables:
protocol:
default: http
description: Whether HTTP or HTTPS should be used to communicate with your
Budibase instance.
hostname:
default: localhost:10000
description: The URL of your Budibase instance.
apiKey:
default: <user API key>
description: The API key of the user to assume for API call.
appId:
default: <App ID>
description: The ID of the app the calls will be executed within the context of,
this should start with app_ (production) or app_dev (development).
components:
parameters:
tableId:
@ -38,6 +36,7 @@ components:
required: true
description: The ID of the app which this request is targeting.
schema:
default: "{{ appId }}"
type: string
appIdUrl:
in: path
@ -45,6 +44,7 @@ components:
required: true
description: The ID of the app which this request is targeting.
schema:
default: "{{ appId }}"
type: string
queryId:
in: path
@ -761,6 +761,7 @@ components:
enum:
- static
- dynamic
- ai
description: Defines whether this is a static or dynamic formula.
- type: object
properties:
@ -779,6 +780,7 @@ components:
- link
- formula
- auto
- ai
- json
- internal
- barcodeqr
@ -929,6 +931,7 @@ components:
enum:
- static
- dynamic
- ai
description: Defines whether this is a static or dynamic formula.
- type: object
properties:
@ -947,6 +950,7 @@ components:
- link
- formula
- auto
- ai
- json
- internal
- barcodeqr
@ -1104,6 +1108,7 @@ components:
enum:
- static
- dynamic
- ai
description: Defines whether this is a static or dynamic formula.
- type: object
properties:
@ -1122,6 +1127,7 @@ components:
- link
- formula
- auto
- ai
- json
- internal
- barcodeqr

View File

@ -24,6 +24,7 @@ export const appId = {
required: true,
description: "The ID of the app which this request is targeting.",
schema: {
default: "{{ appId }}",
type: "string",
},
}
@ -34,6 +35,7 @@ export const appIdUrl = {
required: true,
description: "The ID of the app which this request is targeting.",
schema: {
default: "{{ appId }}",
type: "string",
},
}

View File

@ -0,0 +1,110 @@
import { User, Table, SearchFilters, Row } from "@budibase/types"
import { HttpMethod, MakeRequestResponse, generateMakeRequest } from "./utils"
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
import { Expectations } from "../../../../tests/utilities/api/base"
type RequestOpts = { internal?: boolean; appId?: string }
type PublicAPIExpectations = Omit<Expectations, "headers" | "headersNotPresent">
export class PublicAPIRequest {
private makeRequest: MakeRequestResponse | undefined
private appId: string | undefined
private _tables: PublicTableAPI | undefined
private _rows: PublicRowAPI | undefined
private _apiKey: string | undefined
async init(config: TestConfiguration, user: User, opts?: RequestOpts) {
this._apiKey = await config.generateApiKey(user._id)
this.makeRequest = generateMakeRequest(this.apiKey, opts)
this.appId = opts?.appId
this._tables = new PublicTableAPI(this)
this._rows = new PublicRowAPI(this)
return this
}
opts(opts: RequestOpts) {
if (opts.appId) {
this.appId = opts.appId
}
this.makeRequest = generateMakeRequest(this.apiKey, opts)
}
async send(
method: HttpMethod,
endpoint: string,
body?: any,
expectations?: PublicAPIExpectations
) {
if (!this.makeRequest) {
throw new Error("Init has not been called")
}
const res = await this.makeRequest(method, endpoint, body, this.appId)
if (expectations?.status) {
expect(res.status).toEqual(expectations.status)
}
if (expectations?.body) {
expect(res.body).toEqual(expectations?.body)
}
return res.body
}
get apiKey(): string {
if (!this._apiKey) {
throw new Error("Init has not been called")
}
return this._apiKey
}
get tables(): PublicTableAPI {
if (!this._tables) {
throw new Error("Init has not been called")
}
return this._tables
}
get rows(): PublicRowAPI {
if (!this._rows) {
throw new Error("Init has not been called")
}
return this._rows
}
}
export class PublicTableAPI {
request: PublicAPIRequest
constructor(request: PublicAPIRequest) {
this.request = request
}
async create(
table: Table,
expectations?: PublicAPIExpectations
): Promise<{ data: Table }> {
return this.request.send("post", "/tables", table, expectations)
}
}
export class PublicRowAPI {
request: PublicAPIRequest
constructor(request: PublicAPIRequest) {
this.request = request
}
async search(
tableId: string,
query: SearchFilters,
expectations?: PublicAPIExpectations
): Promise<{ data: Row[] }> {
return this.request.send(
"post",
`/tables/${tableId}/rows/search`,
{
query,
},
expectations
)
}
}

View File

@ -1,4 +1,4 @@
const setup = require("../../tests/utilities")
import * as setup from "../../tests/utilities"
describe("/metrics", () => {
let request = setup.getRequest()

View File

@ -0,0 +1,71 @@
import * as setup from "../../tests/utilities"
import { roles } from "@budibase/backend-core"
import { basicTable } from "../../../../tests/utilities/structures"
import { Table, User } from "@budibase/types"
import { PublicAPIRequest } from "./Request"
describe("check public API security", () => {
const config = setup.getConfig()
let builderRequest: PublicAPIRequest,
appUserRequest: PublicAPIRequest,
table: Table,
appUser: User
beforeAll(async () => {
await config.init()
const builderUser = await config.globalUser()
appUser = await config.globalUser({
builder: { global: false },
roles: {
[config.getProdAppId()]: roles.BUILTIN_ROLE_IDS.BASIC,
},
})
builderRequest = await new PublicAPIRequest().init(config, builderUser)
appUserRequest = await new PublicAPIRequest().init(config, appUser)
table = (await builderRequest.tables.create(basicTable())).data
})
it("should allow with builder API key", async () => {
const res = await builderRequest.rows.search(
table._id!,
{},
{
status: 200,
}
)
expect(res.data.length).toEqual(0)
})
it("should 403 when from browser, but API key", async () => {
await appUserRequest.rows.search(
table._id!,
{},
{
status: 403,
}
)
})
it("should re-direct when using cookie", async () => {
const headers = await config.login({
userId: appUser._id!,
builder: false,
prodApp: false,
})
await config.withHeaders(
{
...headers,
"User-Agent": config.browserUserAgent(),
},
async () => {
await config.api.row.search(
table._id!,
{ query: {} },
{
status: 302,
}
)
}
)
})
})

View File

@ -21,17 +21,19 @@ export type MakeRequestWithFormDataResponse = (
function base(
apiKey: string,
endpoint: string,
intAppId: string | null,
isInternal: boolean
opts?: {
intAppId?: string
internal?: boolean
}
) {
const extraHeaders: any = {
"x-budibase-api-key": apiKey,
}
if (intAppId) {
extraHeaders["x-budibase-app-id"] = intAppId
if (opts?.intAppId) {
extraHeaders["x-budibase-app-id"] = opts.intAppId
}
const url = isInternal
const url = opts?.internal
? endpoint
: checkSlashesInUrl(`/api/public/v1/${endpoint}`)
return { headers: extraHeaders, url }
@ -39,7 +41,7 @@ function base(
export function generateMakeRequest(
apiKey: string,
isInternal = false
opts?: { internal?: boolean }
): MakeRequestResponse {
const request = setup.getRequest()!
const config = setup.getConfig()!
@ -47,9 +49,12 @@ export function generateMakeRequest(
method: HttpMethod,
endpoint: string,
body?: any,
intAppId: string | null = config.getAppId()
intAppId: string | undefined = config.getAppId()
) => {
const { headers, url } = base(apiKey, endpoint, intAppId, isInternal)
const { headers, url } = base(apiKey, endpoint, { ...opts, intAppId })
if (body && typeof body !== "string") {
headers["Content-Type"] = "application/json"
}
const req = request[method](url).set(config.defaultHeaders(headers))
if (body) {
req.send(body)
@ -62,7 +67,7 @@ export function generateMakeRequest(
export function generateMakeRequestWithFormData(
apiKey: string,
isInternal = false
opts?: { internal?: boolean; browser?: boolean }
): MakeRequestWithFormDataResponse {
const request = setup.getRequest()!
const config = setup.getConfig()!
@ -70,9 +75,9 @@ export function generateMakeRequestWithFormData(
method: HttpMethod,
endpoint: string,
fields: Record<string, string | { path: string }>,
intAppId: string | null = config.getAppId()
intAppId: string | undefined = config.getAppId()
) => {
const { headers, url } = base(apiKey, endpoint, intAppId, isInternal)
const { headers, url } = base(apiKey, endpoint, { ...opts, intAppId })
const req = request[method](url).set(config.defaultHeaders(headers))
for (let [field, value] of Object.entries(fields)) {
if (typeof value === "string") {

View File

@ -29,6 +29,7 @@ describe.each(
const isOracle = dbName === DatabaseName.ORACLE
const isMsSQL = dbName === DatabaseName.SQL_SERVER
const isPostgres = dbName === DatabaseName.POSTGRES
const mainTableName = "test_table"
let rawDatasource: Datasource
let datasource: Datasource
@ -71,15 +72,15 @@ describe.each(
client = await knexClient(rawDatasource)
await client.schema.dropTableIfExists("test_table")
await client.schema.createTable("test_table", table => {
await client.schema.dropTableIfExists(mainTableName)
await client.schema.createTable(mainTableName, table => {
table.increments("id").primary()
table.string("name")
table.timestamp("birthday")
table.integer("number")
})
await client("test_table").insert([
await client(mainTableName).insert([
{ name: "one" },
{ name: "two" },
{ name: "three" },
@ -105,7 +106,7 @@ describe.each(
const query = await createQuery({
name: "New Query",
fields: {
sql: client("test_table").select("*").toString(),
sql: client(mainTableName).select("*").toString(),
},
})
@ -114,7 +115,7 @@ describe.each(
name: "New Query",
parameters: [],
fields: {
sql: client("test_table").select("*").toString(),
sql: client(mainTableName).select("*").toString(),
},
schema: {},
queryVerb: "read",
@ -133,7 +134,7 @@ describe.each(
it("should be able to update a query", async () => {
const query = await createQuery({
fields: {
sql: client("test_table").select("*").toString(),
sql: client(mainTableName).select("*").toString(),
},
})
@ -143,7 +144,7 @@ describe.each(
...query,
name: "Updated Query",
fields: {
sql: client("test_table").where({ id: 1 }).toString(),
sql: client(mainTableName).where({ id: 1 }).toString(),
},
})
@ -152,7 +153,7 @@ describe.each(
name: "Updated Query",
parameters: [],
fields: {
sql: client("test_table").where({ id: 1 }).toString(),
sql: client(mainTableName).where({ id: 1 }).toString(),
},
schema: {},
queryVerb: "read",
@ -169,7 +170,7 @@ describe.each(
it("should be able to delete a query", async () => {
const query = await createQuery({
fields: {
sql: client("test_table").select("*").toString(),
sql: client(mainTableName).select("*").toString(),
},
})
@ -188,7 +189,7 @@ describe.each(
it("should be able to list queries", async () => {
const query = await createQuery({
fields: {
sql: client("test_table").select("*").toString(),
sql: client(mainTableName).select("*").toString(),
},
})
@ -199,7 +200,7 @@ describe.each(
it("should strip sensitive fields for prod apps", async () => {
const query = await createQuery({
fields: {
sql: client("test_table").select("*").toString(),
sql: client(mainTableName).select("*").toString(),
},
})
@ -217,7 +218,7 @@ describe.each(
const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)`
const query = await createQuery({
fields: {
sql: client("test_table")
sql: client(mainTableName)
.select([
"*",
client.raw(
@ -245,7 +246,7 @@ describe.each(
datasourceId: datasource._id!,
queryVerb: "read",
fields: {
sql: client("test_table").where({ id: 1 }).toString(),
sql: client(mainTableName).where({ id: 1 }).toString(),
},
parameters: [],
transformer: "return data",
@ -391,7 +392,7 @@ describe.each(
it("should work with dynamic variables", async () => {
const basedOnQuery = await createQuery({
fields: {
sql: client("test_table").select("name").where({ id: 1 }).toString(),
sql: client(mainTableName).select("name").where({ id: 1 }).toString(),
},
})
@ -440,7 +441,7 @@ describe.each(
it("should handle the dynamic base query being deleted", async () => {
const basedOnQuery = await createQuery({
fields: {
sql: client("test_table").select("name").where({ id: 1 }).toString(),
sql: client(mainTableName).select("name").where({ id: 1 }).toString(),
},
})
@ -494,7 +495,7 @@ describe.each(
it("should be able to insert with bindings", async () => {
const query = await createQuery({
fields: {
sql: client("test_table").insert({ name: "{{ foo }}" }).toString(),
sql: client(mainTableName).insert({ name: "{{ foo }}" }).toString(),
},
parameters: [
{
@ -517,7 +518,7 @@ describe.each(
},
])
const rows = await client("test_table").where({ name: "baz" }).select()
const rows = await client(mainTableName).where({ name: "baz" }).select()
expect(rows).toHaveLength(1)
for (const row of rows) {
expect(row).toMatchObject({ name: "baz" })
@ -527,7 +528,7 @@ describe.each(
it("should not allow handlebars as parameters", async () => {
const query = await createQuery({
fields: {
sql: client("test_table").insert({ name: "{{ foo }}" }).toString(),
sql: client(mainTableName).insert({ name: "{{ foo }}" }).toString(),
},
parameters: [
{
@ -563,7 +564,7 @@ describe.each(
const date = new Date(datetimeStr)
const query = await createQuery({
fields: {
sql: client("test_table")
sql: client(mainTableName)
.insert({
name: "foo",
birthday: client.raw("{{ birthday }}"),
@ -585,7 +586,7 @@ describe.each(
expect(result.data).toEqual([{ created: true }])
const rows = await client("test_table")
const rows = await client(mainTableName)
.where({ birthday: datetimeStr })
.select()
expect(rows).toHaveLength(1)
@ -601,7 +602,7 @@ describe.each(
async notDateStr => {
const query = await createQuery({
fields: {
sql: client("test_table")
sql: client(mainTableName)
.insert({ name: client.raw("{{ name }}") })
.toString(),
},
@ -622,7 +623,7 @@ describe.each(
expect(result.data).toEqual([{ created: true }])
const rows = await client("test_table")
const rows = await client(mainTableName)
.where({ name: notDateStr })
.select()
expect(rows).toHaveLength(1)
@ -634,7 +635,7 @@ describe.each(
it("should execute a query", async () => {
const query = await createQuery({
fields: {
sql: client("test_table").select("*").orderBy("id").toString(),
sql: client(mainTableName).select("*").orderBy("id").toString(),
},
})
@ -677,7 +678,7 @@ describe.each(
it("should be able to transform a query", async () => {
const query = await createQuery({
fields: {
sql: client("test_table").where({ id: 1 }).select("*").toString(),
sql: client(mainTableName).where({ id: 1 }).select("*").toString(),
},
transformer: `
data[0].id = data[0].id + 1;
@ -700,7 +701,7 @@ describe.each(
it("should coerce numeric bindings", async () => {
const query = await createQuery({
fields: {
sql: client("test_table")
sql: client(mainTableName)
.where({ id: client.raw("{{ id }}") })
.select("*")
.toString(),
@ -734,7 +735,7 @@ describe.each(
it("should be able to update rows", async () => {
const query = await createQuery({
fields: {
sql: client("test_table")
sql: client(mainTableName)
.update({ name: client.raw("{{ name }}") })
.where({ id: client.raw("{{ id }}") })
.toString(),
@ -759,7 +760,7 @@ describe.each(
},
})
const rows = await client("test_table").where({ id: 1 }).select()
const rows = await client(mainTableName).where({ id: 1 }).select()
expect(rows).toEqual([
{ id: 1, name: "foo", birthday: null, number: null },
])
@ -768,7 +769,7 @@ describe.each(
it("should be able to execute an update that updates no rows", async () => {
const query = await createQuery({
fields: {
sql: client("test_table")
sql: client(mainTableName)
.update({ name: "updated" })
.where({ id: 100 })
.toString(),
@ -778,7 +779,7 @@ describe.each(
await config.api.query.execute(query._id!)
const rows = await client("test_table").select()
const rows = await client(mainTableName).select()
for (const row of rows) {
expect(row.name).not.toEqual("updated")
}
@ -787,14 +788,14 @@ describe.each(
it("should be able to execute a delete that deletes no rows", async () => {
const query = await createQuery({
fields: {
sql: client("test_table").where({ id: 100 }).delete().toString(),
sql: client(mainTableName).where({ id: 100 }).delete().toString(),
},
queryVerb: "delete",
})
await config.api.query.execute(query._id!)
const rows = await client("test_table").select()
const rows = await client(mainTableName).select()
expect(rows).toHaveLength(5)
})
})
@ -803,7 +804,7 @@ describe.each(
it("should be able to delete rows", async () => {
const query = await createQuery({
fields: {
sql: client("test_table")
sql: client(mainTableName)
.where({ id: client.raw("{{ id }}") })
.delete()
.toString(),
@ -823,7 +824,7 @@ describe.each(
},
})
const rows = await client("test_table").where({ id: 1 }).select()
const rows = await client(mainTableName).where({ id: 1 }).select()
expect(rows).toHaveLength(0)
})
})
@ -831,7 +832,7 @@ describe.each(
describe("query through datasource", () => {
it("should be able to query the datasource", async () => {
const entityId = "test_table"
const entityId = mainTableName
await config.api.datasource.update({
...datasource,
entities: {
@ -876,7 +877,7 @@ describe.each(
beforeAll(async () => {
queryParams = {
fields: {
sql: client("test_table")
sql: client(mainTableName)
.insert({
name: client.raw("{{ bindingName }}"),
number: client.raw("{{ bindingNumber }}"),
@ -929,4 +930,34 @@ describe.each(
})
})
})
describe("edge cases", () => {
it("should find rows with a binding containing a slash", async () => {
const slashValue = "1/10"
await client(mainTableName).insert([{ name: slashValue }])
const query = await createQuery({
fields: {
sql: client(mainTableName)
.select("*")
.where("name", "=", client.raw("{{ bindingName }}"))
.toString(),
},
parameters: [
{
name: "bindingName",
default: "",
},
],
queryVerb: "read",
})
const results = await config.api.query.execute(query._id!, {
parameters: {
bindingName: slashValue,
},
})
expect(results).toBeDefined()
expect(results.data.length).toEqual(1)
})
})
})

View File

@ -1,9 +1,10 @@
const setup = require("./utilities")
const { basicScreen, powerScreen } = setup.structures
const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions")
const { roles } = require("@budibase/backend-core")
const { BUILTIN_ROLE_IDS } = roles
import * as setup from "./utilities"
import { checkBuilderEndpoint, runInProd } from "./utilities/TestFunctions"
import { roles } from "@budibase/backend-core"
import { Screen } from "@budibase/types"
const { BUILTIN_ROLE_IDS } = roles
const { basicScreen, powerScreen } = setup.structures
const route = "/test"
// there are checks which are disabled in test env,
@ -12,7 +13,7 @@ const route = "/test"
describe("/routing", () => {
let request = setup.getRequest()
let config = setup.getConfig()
let basic, power
let basic: Screen, power: Screen
afterAll(setup.afterAll)
@ -25,26 +26,40 @@ describe("/routing", () => {
describe("fetch", () => {
it("prevents a public user from accessing development app", async () => {
await runInProd(() => {
return request
.get(`/api/routing/client`)
.set(config.publicHeaders({ prodApp: false }))
.expect(302)
})
await config.withHeaders(
{
"User-Agent": config.browserUserAgent(),
},
async () => {
await runInProd(() => {
return request
.get(`/api/routing/client`)
.set(config.publicHeaders({ prodApp: false }))
.expect(302)
})
}
)
})
it("prevents a non builder from accessing development app", async () => {
await runInProd(async () => {
return request
.get(`/api/routing/client`)
.set(
await config.roleHeaders({
roleId: BUILTIN_ROLE_IDS.BASIC,
prodApp: false,
})
)
.expect(302)
})
await config.withHeaders(
{
"User-Agent": config.browserUserAgent(),
},
async () => {
await runInProd(async () => {
return request
.get(`/api/routing/client`)
.set(
await config.roleHeaders({
roleId: BUILTIN_ROLE_IDS.BASIC,
prodApp: false,
})
)
.expect(302)
})
}
)
})
it("returns the correct routing for basic user", async () => {
const res = await request

View File

@ -7,6 +7,7 @@ import {
import {
context,
db as dbCore,
docIds,
features,
MAX_VALID_DATE,
MIN_VALID_DATE,
@ -61,6 +62,7 @@ describe.each([
const isLucene = name === "lucene"
const isInMemory = name === "in-memory"
const isInternal = isSqs || isLucene || isInMemory
const isOracle = name === DatabaseName.ORACLE
const isSql = !isInMemory && !isLucene
const config = setup.getConfig()
@ -129,14 +131,14 @@ describe.each([
}
})
async function createTable(schema: TableSchema) {
async function createTable(schema?: TableSchema) {
const table = await config.api.table.save(
tableForDatasource(datasource, { schema })
)
return table._id!
}
async function createView(tableId: string, schema: ViewV2Schema) {
async function createView(tableId: string, schema?: ViewV2Schema) {
const view = await config.api.viewV2.create({
tableId: tableId,
name: generator.guid(),
@ -153,22 +155,51 @@ describe.each([
rows = await config.api.row.fetch(tableOrViewId)
}
async function getTable(tableOrViewId: string): Promise<Table> {
if (docIds.isViewId(tableOrViewId)) {
const view = await config.api.viewV2.get(tableOrViewId)
return await config.api.table.get(view.tableId)
} else {
return await config.api.table.get(tableOrViewId)
}
}
async function assertTableExists(nameOrTable: string | Table) {
const name =
typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name
expect(await client!.schema.hasTable(name)).toBeTrue()
}
async function assertTableNumRows(
nameOrTable: string | Table,
numRows: number
) {
const name =
typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name
const row = await client!.from(name).count()
const count = parseInt(Object.values(row[0])[0] as string)
expect(count).toEqual(numRows)
}
describe.each([
["table", createTable],
[
"view",
async (schema: TableSchema) => {
async (schema?: TableSchema) => {
const tableId = await createTable(schema)
const viewId = await createView(
tableId,
Object.keys(schema).reduce<ViewV2Schema>((viewSchema, fieldName) => {
const field = schema[fieldName]
viewSchema[fieldName] = {
visible: field.visible ?? true,
readonly: false,
}
return viewSchema
}, {})
Object.keys(schema || {}).reduce<ViewV2Schema>(
(viewSchema, fieldName) => {
const field = schema![fieldName]
viewSchema[fieldName] = {
visible: field.visible ?? true,
readonly: false,
}
return viewSchema
},
{}
)
)
return viewId
},
@ -792,10 +823,11 @@ describe.each([
})
})
describe.each([FieldType.STRING, FieldType.LONGFORM])("%s", () => {
const stringTypes = [FieldType.STRING, FieldType.LONGFORM] as const
describe.each(stringTypes)("%s", type => {
beforeAll(async () => {
tableOrViewId = await createTableOrView({
name: { name: "name", type: FieldType.STRING },
name: { name: "name", type },
})
await createRows([{ name: "foo" }, { name: "bar" }])
})
@ -1602,7 +1634,7 @@ describe.each([
})
})
describe.each([FieldType.ARRAY, FieldType.OPTIONS])("%s", () => {
describe("arrays", () => {
beforeAll(async () => {
tableOrViewId = await createTableOrView({
numbers: {
@ -3470,5 +3502,105 @@ describe.each([
])
})
})
isSql &&
!isSqs &&
describe("SQL injection", () => {
const badStrings = [
"1; DROP TABLE %table_name%;",
"1; DELETE FROM %table_name%;",
"1; UPDATE %table_name% SET name = 'foo';",
"1; INSERT INTO %table_name% (name) VALUES ('foo');",
"' OR '1'='1' --",
"'; DROP TABLE %table_name%; --",
"' OR 1=1 --",
"' UNION SELECT null, null, null; --",
"' AND (SELECT COUNT(*) FROM %table_name%) > 0 --",
"\"; EXEC xp_cmdshell('dir'); --",
"\"' OR 'a'='a",
"OR 1=1;",
"'; SHUTDOWN --",
]
describe.each(badStrings)("bad string: %s", badStringTemplate => {
// The SQL that knex generates when you try to use a double quote in a
// field name is always invalid and never works, so we skip it for these
// tests.
const skipFieldNameCheck = isOracle && badStringTemplate.includes('"')
!skipFieldNameCheck &&
it("should not allow SQL injection as a field name", async () => {
const tableOrViewId = await createTableOrView()
const table = await getTable(tableOrViewId)
const badString = badStringTemplate.replace(
/%table_name%/g,
table.name
)
await config.api.table.save({
...table,
schema: {
...table.schema,
[badString]: { name: badString, type: FieldType.STRING },
},
})
if (docIds.isViewId(tableOrViewId)) {
const view = await config.api.viewV2.get(tableOrViewId)
await config.api.viewV2.update({
...view,
schema: {
[badString]: { visible: true },
},
})
}
await config.api.row.save(tableOrViewId, { [badString]: "foo" })
await assertTableExists(table)
await assertTableNumRows(table, 1)
const { rows } = await config.api.row.search(
tableOrViewId,
{ query: {} },
{ status: 200 }
)
expect(rows).toHaveLength(1)
await assertTableExists(table)
await assertTableNumRows(table, 1)
})
it("should not allow SQL injection as a field value", async () => {
const tableOrViewId = await createTableOrView({
foo: {
name: "foo",
type: FieldType.STRING,
},
})
const table = await getTable(tableOrViewId)
const badString = badStringTemplate.replace(
/%table_name%/g,
table.name
)
await config.api.row.save(tableOrViewId, { foo: "foo" })
await assertTableExists(table)
await assertTableNumRows(table, 1)
const { rows } = await config.api.row.search(
tableOrViewId,
{ query: { equal: { foo: badString } } },
{ status: 200 }
)
expect(rows).toBeEmpty()
await assertTableExists(table)
await assertTableNumRows(table, 1)
})
})
})
})
})

View File

@ -257,7 +257,7 @@ export interface components {
* @description Defines whether this is a static or dynamic formula.
* @enum {string}
*/
formulaType?: "static" | "dynamic";
formulaType?: "static" | "dynamic" | "ai";
}
| {
/**
@ -277,11 +277,14 @@ export interface components {
| "link"
| "formula"
| "auto"
| "ai"
| "json"
| "internal"
| "barcodeqr"
| "signature_single"
| "bigint"
| "bb_reference";
| "bb_reference"
| "bb_reference_single";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */
@ -366,7 +369,7 @@ export interface components {
* @description Defines whether this is a static or dynamic formula.
* @enum {string}
*/
formulaType?: "static" | "dynamic";
formulaType?: "static" | "dynamic" | "ai";
}
| {
/**
@ -386,11 +389,14 @@ export interface components {
| "link"
| "formula"
| "auto"
| "ai"
| "json"
| "internal"
| "barcodeqr"
| "signature_single"
| "bigint"
| "bb_reference";
| "bb_reference"
| "bb_reference_single";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */
@ -477,7 +483,7 @@ export interface components {
* @description Defines whether this is a static or dynamic formula.
* @enum {string}
*/
formulaType?: "static" | "dynamic";
formulaType?: "static" | "dynamic" | "ai";
}
| {
/**
@ -497,11 +503,14 @@ export interface components {
| "link"
| "formula"
| "auto"
| "ai"
| "json"
| "internal"
| "barcodeqr"
| "signature_single"
| "bigint"
| "bb_reference";
| "bb_reference"
| "bb_reference_single";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: {
/** @enum {string} */

View File

@ -24,8 +24,7 @@ import {
checkExternalTables,
HOST_ADDRESS,
} from "./utils"
import dayjs from "dayjs"
import { NUMBER_REGEX } from "../utilities"
import { isDate, NUMBER_REGEX } from "../utilities"
import { MySQLColumn } from "./base/types"
import { getReadableErrorMessage } from "./base/errorMapping"
import { sql } from "@budibase/backend-core"
@ -129,11 +128,7 @@ export function bindingTypeCoerce(bindings: SqlQueryBinding) {
}
// if not a number, see if it is a date - important to do in this order as any
// integer will be considered a valid date
else if (
/^\d/.test(binding) &&
dayjs(binding).isValid() &&
!binding.includes(",")
) {
else if (isDate(binding)) {
let value: any
value = new Date(binding)
if (isNaN(value)) {
@ -439,8 +434,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
dumpContent.push(createTableStatement)
}
const schema = dumpContent.join("\n")
return schema
return dumpContent.join("\n")
} finally {
this.disconnect()
}

View File

@ -212,7 +212,7 @@ describe("SQL query builder", () => {
const filterSet = [`%20%`, `%25%`, `%"john"%`, `%"mary"%`]
expect(query).toEqual({
bindings: [...filterSet, limit],
sql: `select * from (select * from "test" where COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2 and COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4 order by "test"."id" asc) where rownum <= :5`,
sql: `select * from (select * from "test" where ((COALESCE(LOWER("test"."age"), '') like :1 and COALESCE(LOWER("test"."age"), '') like :2)) and ((COALESCE(LOWER("test"."name"), '') like :3 and COALESCE(LOWER("test"."name"), '') like :4)) order by "test"."id" asc) where rownum <= :5`,
})
query = new Sql(SqlClient.ORACLE, limit)._query(
@ -244,7 +244,7 @@ describe("SQL query builder", () => {
expect(query).toEqual({
bindings: ["John", limit],
sql: `select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2`,
sql: `select * from (select * from "test" where (to_char("test"."name") is not null and to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2`,
})
})
@ -262,7 +262,7 @@ describe("SQL query builder", () => {
expect(query).toEqual({
bindings: ["John", limit],
sql: `select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") != :1) OR to_char("test"."name") IS NULL order by "test"."id" asc) where rownum <= :2`,
sql: `select * from (select * from "test" where (to_char("test"."name") is not null and to_char("test"."name") != :1) or to_char("test"."name") is null order by "test"."id" asc) where rownum <= :2`,
})
})
})

View File

@ -10,7 +10,7 @@ import {
import { generateUserMetadataID, isDevAppID } from "../db/utils"
import { getCachedSelf } from "../utilities/global"
import env from "../environment"
import { isWebhookEndpoint } from "./utils"
import { isWebhookEndpoint, isBrowser, isApiKey } from "./utils"
import { UserCtx, ContextUser } from "@budibase/types"
import tracer from "dd-trace"
@ -27,7 +27,7 @@ export default async (ctx: UserCtx, next: any) => {
}
// deny access to application preview
if (!env.isTest()) {
if (isBrowser(ctx) && !isApiKey(ctx)) {
if (
isDevAppID(requestAppId) &&
!isWebhookEndpoint(ctx) &&

View File

@ -1,4 +1,6 @@
require("../../db").init()
import * as db from "../../db"
db.init()
mockAuthWithNoCookie()
mockWorker()
mockUserGroups()
@ -45,7 +47,7 @@ function mockAuthWithNoCookie() {
},
cache: {
user: {
getUser: async id => {
getUser: async () => {
return {
_id: "us_uuid1",
}
@ -82,7 +84,7 @@ function mockAuthWithCookie() {
},
cache: {
user: {
getUser: async id => {
getUser: async () => {
return {
_id: "us_uuid1",
}
@ -94,6 +96,10 @@ function mockAuthWithCookie() {
}
class TestConfiguration {
next: jest.MockedFunction<any>
throw: jest.MockedFunction<any>
ctx: any
constructor() {
this.next = jest.fn()
this.throw = jest.fn()
@ -130,7 +136,7 @@ class TestConfiguration {
}
describe("Current app middleware", () => {
let config
let config: TestConfiguration
beforeEach(() => {
config = new TestConfiguration()
@ -192,7 +198,7 @@ describe("Current app middleware", () => {
},
cache: {
user: {
getUser: async id => {
getUser: async () => {
return {
_id: "us_uuid1",
}

View File

@ -1,9 +1,18 @@
import { BBContext } from "@budibase/types"
import { LoginMethod, UserCtx } from "@budibase/types"
const WEBHOOK_ENDPOINTS = new RegExp(
["webhooks/trigger", "webhooks/schema"].join("|")
)
export function isWebhookEndpoint(ctx: BBContext) {
export function isWebhookEndpoint(ctx: UserCtx) {
return WEBHOOK_ENDPOINTS.test(ctx.request.url)
}
export function isBrowser(ctx: UserCtx) {
const browser = ctx.userAgent?.browser
return browser && browser !== "unknown"
}
export function isApiKey(ctx: UserCtx) {
return ctx.loginMethod === LoginMethod.API_KEY
}

View File

@ -423,6 +423,7 @@ export default class TestConfiguration {
Accept: "application/json",
Cookie: [`${constants.Cookie.Auth}=${authToken}`],
[constants.Header.APP_ID]: appId,
...this.temporaryHeaders,
}
})
}
@ -527,6 +528,10 @@ export default class TestConfiguration {
return this.login({ userId: email, roleId, builder, prodApp })
}
browserUserAgent() {
return "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
}
// TENANCY
tenantHost() {

View File

@ -3,7 +3,10 @@ import { context } from "@budibase/backend-core"
import { generateMetadataID } from "../db/utils"
import { Document } from "@budibase/types"
import stream from "stream"
import dayjs from "dayjs"
import customParseFormat from "dayjs/plugin/customParseFormat"
dayjs.extend(customParseFormat)
const Readable = stream.Readable
export function wait(ms: number) {
@ -13,6 +16,28 @@ export function wait(ms: number) {
export const isDev = env.isDev
export const NUMBER_REGEX = /^[+-]?([0-9]*[.])?[0-9]+$/g
const ACCEPTED_DATE_FORMATS = [
"MM/DD/YYYY",
"MM/DD/YY",
"DD/MM/YYYY",
"DD/MM/YY",
"YYYY/MM/DD",
"YYYY-MM-DD",
"YYYY-MM-DDTHH:mm",
"YYYY-MM-DDTHH:mm:ss",
"YYYY-MM-DDTHH:mm:ss[Z]",
"YYYY-MM-DDTHH:mm:ss.SSS[Z]",
]
export function isDate(str: string) {
// checks for xx/xx/xx or ISO date timestamp formats
for (const format of ACCEPTED_DATE_FORMATS) {
if (dayjs(str, format, true).isValid()) {
return true
}
}
return false
}
export function removeFromArray(array: any[], element: any) {
const index = array.indexOf(element)

View File

@ -0,0 +1,34 @@
import { isDate } from "../"
describe("isDate", () => {
it("should handle DD/MM/YYYY", () => {
expect(isDate("01/01/2001")).toEqual(true)
})
it("should handle DD/MM/YY", () => {
expect(isDate("01/01/01")).toEqual(true)
})
it("should handle ISO format YYYY-MM-DD", () => {
expect(isDate("2001-01-01")).toEqual(true)
})
it("should handle ISO format with time (YYYY-MM-DDTHH:MM)", () => {
expect(isDate("2001-01-01T12:30")).toEqual(true)
})
it("should handle ISO format with full timestamp (YYYY-MM-DDTHH:MM:SS)", () => {
expect(isDate("2001-01-01T12:30:45")).toEqual(true)
})
it("should handle complete ISO format", () => {
expect(isDate("2001-01-01T12:30:00.000Z")).toEqual(true)
})
it("should return false for invalid formats", () => {
expect(isDate("")).toEqual(false)
expect(isDate("1/10")).toEqual(false)
expect(isDate("random string")).toEqual(false)
expect(isDate("123456")).toEqual(false)
})
})

View File

@ -19,7 +19,8 @@
"@types/koa": "2.13.4",
"@types/redlock": "4.0.7",
"rimraf": "3.0.2",
"typescript": "5.5.2"
"typescript": "5.5.2",
"koa-useragent": "^4.1.0"
},
"dependencies": {
"scim-patch": "^0.8.1"

View File

@ -2,6 +2,12 @@ import { Context, Request } from "koa"
import { User, Role, UserRoles, Account, ConfigType } from "../documents"
import { FeatureFlag, License } from "../sdk"
import { Files } from "formidable"
import { UserAgentContext } from "koa-useragent"
export enum LoginMethod {
API_KEY = "api_key",
COOKIE = "cookie",
}
export interface ContextUser extends Omit<User, "roles"> {
globalId?: string
@ -31,6 +37,7 @@ export interface BBRequest<RequestBody> extends Request {
export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
request: BBRequest<RequestBody>
body: ResponseBody
userAgent: UserAgentContext["userAgent"]
}
/**
@ -40,6 +47,7 @@ export interface UserCtx<RequestBody = any, ResponseBody = any>
extends Ctx<RequestBody, ResponseBody> {
user: ContextUser
roleId?: string
loginMethod?: LoginMethod
}
/**

View File

@ -49,7 +49,7 @@ type BasicFilter<T = any> = Record<string, T> & {
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: never
}
type ArrayFilter = Record<string, any[]> & {
export type ArrayFilter = Record<string, any[]> & {
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: {
id: string[]
values: string[]