Merge branch 'master' into chore/update-submodule

This commit is contained in:
Michael Drury 2024-10-25 13:14:50 +01:00 committed by GitHub
commit 516ee275ce
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 938 additions and 375 deletions

View File

@ -27,7 +27,7 @@ export function doInUserContext(user: User, ctx: Ctx, task: any) {
hostInfo: { hostInfo: {
ipAddress: ctx.request.ip, ipAddress: ctx.request.ip,
// filled in by koa-useragent package // filled in by koa-useragent package
userAgent: ctx.userAgent._agent.source, userAgent: ctx.userAgent.source,
}, },
} }
return doInIdentityContext(userContext, task) return doInIdentityContext(userContext, task)

View File

@ -1,20 +1,26 @@
import { Cookie, Header } from "../constants" import { Cookie, Header } from "../constants"
import { import {
getCookie,
clearCookie, clearCookie,
openJwt, getCookie,
isValidInternalAPIKey, isValidInternalAPIKey,
openJwt,
} from "../utils" } from "../utils"
import { getUser } from "../cache/user" import { getUser } from "../cache/user"
import { getSession, updateSessionTTL } from "../security/sessions" import { getSession, updateSessionTTL } from "../security/sessions"
import { buildMatcherRegex, matches } from "./matchers" import { buildMatcherRegex, matches } from "./matchers"
import { SEPARATOR, queryGlobalView, ViewName } from "../db" import { queryGlobalView, SEPARATOR, ViewName } from "../db"
import { getGlobalDB, doInTenant } from "../context" import { doInTenant, getGlobalDB } from "../context"
import { decrypt } from "../security/encryption" import { decrypt } from "../security/encryption"
import * as identity from "../context/identity" import * as identity from "../context/identity"
import env from "../environment" import env from "../environment"
import { Ctx, EndpointMatcher, SessionCookie, User } from "@budibase/types" import {
import { InvalidAPIKeyError, ErrorCode } from "../errors" Ctx,
EndpointMatcher,
LoginMethod,
SessionCookie,
User,
} from "@budibase/types"
import { ErrorCode, InvalidAPIKeyError } from "../errors"
import tracer from "dd-trace" import tracer from "dd-trace"
const ONE_MINUTE = env.SESSION_UPDATE_PERIOD const ONE_MINUTE = env.SESSION_UPDATE_PERIOD
@ -26,16 +32,18 @@ interface FinaliseOpts {
internal?: boolean internal?: boolean
publicEndpoint?: boolean publicEndpoint?: boolean
version?: string version?: string
user?: any user?: User | { tenantId: string }
loginMethod?: LoginMethod
} }
function timeMinusOneMinute() { function timeMinusOneMinute() {
return new Date(Date.now() - ONE_MINUTE).toISOString() return new Date(Date.now() - ONE_MINUTE).toISOString()
} }
function finalise(ctx: any, opts: FinaliseOpts = {}) { function finalise(ctx: Ctx, opts: FinaliseOpts = {}) {
ctx.publicEndpoint = opts.publicEndpoint || false ctx.publicEndpoint = opts.publicEndpoint || false
ctx.isAuthenticated = opts.authenticated || false ctx.isAuthenticated = opts.authenticated || false
ctx.loginMethod = opts.loginMethod
ctx.user = opts.user ctx.user = opts.user
ctx.internal = opts.internal || false ctx.internal = opts.internal || false
ctx.version = opts.version ctx.version = opts.version
@ -120,9 +128,10 @@ export default function (
} }
const tenantId = ctx.request.headers[Header.TENANT_ID] const tenantId = ctx.request.headers[Header.TENANT_ID]
let authenticated = false, let authenticated: boolean = false,
user = null, user: User | { tenantId: string } | undefined = undefined,
internal = false internal: boolean = false,
loginMethod: LoginMethod | undefined = undefined
if (authCookie && !apiKey) { if (authCookie && !apiKey) {
const sessionId = authCookie.sessionId const sessionId = authCookie.sessionId
const userId = authCookie.userId const userId = authCookie.userId
@ -146,6 +155,7 @@ export default function (
} }
// @ts-ignore // @ts-ignore
user.csrfToken = session.csrfToken user.csrfToken = session.csrfToken
loginMethod = LoginMethod.COOKIE
if (session?.lastAccessedAt < timeMinusOneMinute()) { if (session?.lastAccessedAt < timeMinusOneMinute()) {
// make sure we denote that the session is still in use // make sure we denote that the session is still in use
@ -170,17 +180,16 @@ export default function (
apiKey, apiKey,
populateUser populateUser
) )
if (valid && foundUser) { if (valid) {
authenticated = true authenticated = true
loginMethod = LoginMethod.API_KEY
user = foundUser user = foundUser
} else if (valid) { internal = !foundUser
authenticated = true
internal = true
} }
} }
if (!user && tenantId) { if (!user && tenantId) {
user = { tenantId } user = { tenantId }
} else if (user) { } else if (user && "password" in user) {
delete user.password delete user.password
} }
// be explicit // be explicit
@ -204,7 +213,14 @@ export default function (
} }
// isAuthenticated is a function, so use a variable to be able to check authed state // isAuthenticated is a function, so use a variable to be able to check authed state
finalise(ctx, { authenticated, user, internal, version, publicEndpoint }) finalise(ctx, {
authenticated,
user,
internal,
version,
publicEndpoint,
loginMethod,
})
if (isUser(user)) { if (isUser(user)) {
return identity.doInUserContext(user, ctx, next) return identity.doInUserContext(user, ctx, next)

View File

@ -13,6 +13,7 @@ import SqlTableQueryBuilder from "./sqlTable"
import { import {
Aggregation, Aggregation,
AnySearchFilter, AnySearchFilter,
ArrayFilter,
ArrayOperator, ArrayOperator,
BasicOperator, BasicOperator,
BBReferenceFieldMetadata, BBReferenceFieldMetadata,
@ -98,6 +99,23 @@ function isSqs(table: Table): boolean {
) )
} }
function escapeQuotes(value: string, quoteChar = '"'): string {
return value.replace(new RegExp(quoteChar, "g"), `${quoteChar}${quoteChar}`)
}
function wrap(value: string, quoteChar = '"'): string {
return `${quoteChar}${escapeQuotes(value, quoteChar)}${quoteChar}`
}
function stringifyArray(value: any[], quoteStyle = '"'): string {
for (let i in value) {
if (typeof value[i] === "string") {
value[i] = wrap(value[i], quoteStyle)
}
}
return `[${value.join(",")}]`
}
const allowEmptyRelationships: Record<SearchFilterKey, boolean> = { const allowEmptyRelationships: Record<SearchFilterKey, boolean> = {
[BasicOperator.EQUAL]: false, [BasicOperator.EQUAL]: false,
[BasicOperator.NOT_EQUAL]: true, [BasicOperator.NOT_EQUAL]: true,
@ -152,30 +170,30 @@ class InternalBuilder {
return this.query.meta.table return this.query.meta.table
} }
get knexClient(): Knex.Client {
return this.knex.client as Knex.Client
}
getFieldSchema(key: string): FieldSchema | undefined { getFieldSchema(key: string): FieldSchema | undefined {
const { column } = this.splitter.run(key) const { column } = this.splitter.run(key)
return this.table.schema[column] return this.table.schema[column]
} }
private quoteChars(): [string, string] { private supportsILike(): boolean {
switch (this.client) { return !(
case SqlClient.ORACLE: this.client === SqlClient.ORACLE || this.client === SqlClient.SQL_LITE
case SqlClient.POSTGRES: )
return ['"', '"']
case SqlClient.MS_SQL:
return ["[", "]"]
case SqlClient.MARIADB:
case SqlClient.MY_SQL:
case SqlClient.SQL_LITE:
return ["`", "`"]
}
} }
// Takes a string like foo and returns a quoted string like [foo] for SQL Server private quoteChars(): [string, string] {
// and "foo" for Postgres. const wrapped = this.knexClient.wrapIdentifier("foo", {})
return [wrapped[0], wrapped[wrapped.length - 1]]
}
// Takes a string like foo and returns a quoted string like [foo] for SQL
// Server and "foo" for Postgres.
private quote(str: string): string { private quote(str: string): string {
const [start, end] = this.quoteChars() return this.knexClient.wrapIdentifier(str, {})
return `${start}${str}${end}`
} }
private isQuoted(key: string): boolean { private isQuoted(key: string): boolean {
@ -193,6 +211,30 @@ class InternalBuilder {
return key.map(part => this.quote(part)).join(".") return key.map(part => this.quote(part)).join(".")
} }
private quotedValue(value: string): string {
const formatter = this.knexClient.formatter(this.knexClient.queryBuilder())
return formatter.wrap(value, false)
}
private rawQuotedValue(value: string): Knex.Raw {
return this.knex.raw(this.quotedValue(value))
}
// Unfortuantely we cannot rely on knex's identifier escaping because it trims
// the identifier string before escaping it, which breaks cases for us where
// columns that start or end with a space aren't referenced correctly anymore.
//
// So whenever you're using an identifier binding in knex, e.g. knex.raw("??
// as ?", ["foo", "bar"]), you need to make sure you call this:
//
// knex.raw("?? as ?", [this.quotedIdentifier("foo"), "bar"])
//
// Issue we filed against knex about this:
// https://github.com/knex/knex/issues/6143
private rawQuotedIdentifier(key: string): Knex.Raw {
return this.knex.raw(this.quotedIdentifier(key))
}
// Turns an identifier like a.b.c or `a`.`b`.`c` into ["a", "b", "c"] // Turns an identifier like a.b.c or `a`.`b`.`c` into ["a", "b", "c"]
private splitIdentifier(key: string): string[] { private splitIdentifier(key: string): string[] {
const [start, end] = this.quoteChars() const [start, end] = this.quoteChars()
@ -236,7 +278,7 @@ class InternalBuilder {
const alias = this.getTableName(endpoint.entityId) const alias = this.getTableName(endpoint.entityId)
const schema = meta.table.schema const schema = meta.table.schema
if (!this.isFullSelectStatementRequired()) { if (!this.isFullSelectStatementRequired()) {
return [this.knex.raw(`${this.quote(alias)}.*`)] return [this.knex.raw("??", [`${alias}.*`])]
} }
// get just the fields for this table // get just the fields for this table
return resource.fields return resource.fields
@ -258,30 +300,40 @@ class InternalBuilder {
const columnSchema = schema[column] const columnSchema = schema[column]
if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(columnSchema)) { if (this.SPECIAL_SELECT_CASES.POSTGRES_MONEY(columnSchema)) {
return this.knex.raw( // TODO: figure out how to express this safely without string
`${this.quotedIdentifier( // interpolation.
[table, column].join(".") return this.knex.raw(`??::money::numeric as "${field}"`, [
)}::money::numeric as ${this.quote(field)}` this.rawQuotedIdentifier([table, column].join(".")),
) field,
])
} }
if (this.SPECIAL_SELECT_CASES.MSSQL_DATES(columnSchema)) { if (this.SPECIAL_SELECT_CASES.MSSQL_DATES(columnSchema)) {
// Time gets returned as timestamp from mssql, not matching the expected // Time gets returned as timestamp from mssql, not matching the expected
// HH:mm format // HH:mm format
return this.knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`)
// TODO: figure out how to express this safely without string
// interpolation.
return this.knex.raw(`CONVERT(varchar, ??, 108) as "${field}"`, [
this.rawQuotedIdentifier(field),
])
} }
const quoted = table if (table) {
? `${this.quote(table)}.${this.quote(column)}` return this.rawQuotedIdentifier(`${table}.${column}`)
: this.quote(field) } else {
return this.knex.raw(quoted) return this.rawQuotedIdentifier(field)
}
}) })
} }
// OracleDB can't use character-large-objects (CLOBs) in WHERE clauses, // OracleDB can't use character-large-objects (CLOBs) in WHERE clauses,
// so when we use them we need to wrap them in to_char(). This function // so when we use them we need to wrap them in to_char(). This function
// converts a field name to the appropriate identifier. // converts a field name to the appropriate identifier.
private convertClobs(field: string, opts?: { forSelect?: boolean }): string { private convertClobs(
field: string,
opts?: { forSelect?: boolean }
): Knex.Raw {
if (this.client !== SqlClient.ORACLE) { if (this.client !== SqlClient.ORACLE) {
throw new Error( throw new Error(
"you've called convertClobs on a DB that's not Oracle, this is a mistake" "you've called convertClobs on a DB that's not Oracle, this is a mistake"
@ -290,7 +342,7 @@ class InternalBuilder {
const parts = this.splitIdentifier(field) const parts = this.splitIdentifier(field)
const col = parts.pop()! const col = parts.pop()!
const schema = this.table.schema[col] const schema = this.table.schema[col]
let identifier = this.quotedIdentifier(field) let identifier = this.rawQuotedIdentifier(field)
if ( if (
schema.type === FieldType.STRING || schema.type === FieldType.STRING ||
@ -301,9 +353,12 @@ class InternalBuilder {
schema.type === FieldType.BARCODEQR schema.type === FieldType.BARCODEQR
) { ) {
if (opts?.forSelect) { if (opts?.forSelect) {
identifier = `to_char(${identifier}) as ${this.quotedIdentifier(col)}` identifier = this.knex.raw("to_char(??) as ??", [
identifier,
this.rawQuotedIdentifier(col),
])
} else { } else {
identifier = `to_char(${identifier})` identifier = this.knex.raw("to_char(??)", [identifier])
} }
} }
return identifier return identifier
@ -427,7 +482,6 @@ class InternalBuilder {
filterKey: string, filterKey: string,
whereCb: (filterKey: string, query: Knex.QueryBuilder) => Knex.QueryBuilder whereCb: (filterKey: string, query: Knex.QueryBuilder) => Knex.QueryBuilder
): Knex.QueryBuilder { ): Knex.QueryBuilder {
const mainKnex = this.knex
const { relationships, endpoint, tableAliases: aliases } = this.query const { relationships, endpoint, tableAliases: aliases } = this.query
const tableName = endpoint.entityId const tableName = endpoint.entityId
const fromAlias = aliases?.[tableName] || tableName const fromAlias = aliases?.[tableName] || tableName
@ -449,8 +503,8 @@ class InternalBuilder {
relationship.to && relationship.to &&
relationship.tableName relationship.tableName
) { ) {
const joinTable = mainKnex const joinTable = this.knex
.select(mainKnex.raw(1)) .select(this.knex.raw(1))
.from({ [toAlias]: relatedTableName }) .from({ [toAlias]: relatedTableName })
let subQuery = joinTable.clone() let subQuery = joinTable.clone()
const manyToMany = validateManyToMany(relationship) const manyToMany = validateManyToMany(relationship)
@ -485,9 +539,7 @@ class InternalBuilder {
.where( .where(
`${throughAlias}.${manyToMany.from}`, `${throughAlias}.${manyToMany.from}`,
"=", "=",
mainKnex.raw( this.rawQuotedIdentifier(`${fromAlias}.${manyToMany.fromPrimary}`)
this.quotedIdentifier(`${fromAlias}.${manyToMany.fromPrimary}`)
)
) )
// in SQS the same junction table is used for different many-to-many relationships between the // in SQS the same junction table is used for different many-to-many relationships between the
// two same tables, this is needed to avoid rows ending up in all columns // two same tables, this is needed to avoid rows ending up in all columns
@ -516,7 +568,7 @@ class InternalBuilder {
subQuery = subQuery.where( subQuery = subQuery.where(
toKey, toKey,
"=", "=",
mainKnex.raw(this.quotedIdentifier(foreignKey)) this.rawQuotedIdentifier(foreignKey)
) )
query = query.where(q => { query = query.where(q => {
@ -546,7 +598,7 @@ class InternalBuilder {
filters = this.parseFilters({ ...filters }) filters = this.parseFilters({ ...filters })
const aliases = this.query.tableAliases const aliases = this.query.tableAliases
// if all or specified in filters, then everything is an or // if all or specified in filters, then everything is an or
const allOr = filters.allOr const shouldOr = filters.allOr
const isSqlite = this.client === SqlClient.SQL_LITE const isSqlite = this.client === SqlClient.SQL_LITE
const tableName = isSqlite ? this.table._id! : this.table.name const tableName = isSqlite ? this.table._id! : this.table.name
@ -610,7 +662,7 @@ class InternalBuilder {
value value
) )
} else if (shouldProcessRelationship) { } else if (shouldProcessRelationship) {
if (allOr) { if (shouldOr) {
query = query.or query = query.or
} }
query = builder.addRelationshipForFilter( query = builder.addRelationshipForFilter(
@ -626,85 +678,102 @@ class InternalBuilder {
} }
const like = (q: Knex.QueryBuilder, key: string, value: any) => { const like = (q: Knex.QueryBuilder, key: string, value: any) => {
const fuzzyOr = filters?.fuzzyOr if (filters?.fuzzyOr || shouldOr) {
const fnc = fuzzyOr || allOr ? "orWhere" : "where" q = q.or
// postgres supports ilike, nothing else does }
if (this.client === SqlClient.POSTGRES) { if (
return q[fnc](key, "ilike", `%${value}%`) this.client === SqlClient.ORACLE ||
} else { this.client === SqlClient.SQL_LITE
const rawFnc = `${fnc}Raw` ) {
// @ts-ignore return q.whereRaw(`LOWER(??) LIKE ?`, [
return q[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [ this.rawQuotedIdentifier(key),
`%${value.toLowerCase()}%`, `%${value.toLowerCase()}%`,
]) ])
} }
return q.whereILike(
// @ts-expect-error knex types are wrong, raw is fine here
this.rawQuotedIdentifier(key),
this.knex.raw("?", [`%${value}%`])
)
} }
const contains = (mode: AnySearchFilter, any: boolean = false) => { const contains = (mode: ArrayFilter, any = false) => {
const rawFnc = allOr ? "orWhereRaw" : "whereRaw" function addModifiers<T extends {}, Q>(q: Knex.QueryBuilder<T, Q>) {
const not = mode === filters?.notContains ? "NOT " : "" if (shouldOr || mode === filters?.containsAny) {
function stringifyArray(value: Array<any>, quoteStyle = '"'): string { q = q.or
for (let i in value) {
if (typeof value[i] === "string") {
value[i] = `${quoteStyle}${value[i]}${quoteStyle}`
}
} }
return `[${value.join(",")}]` if (mode === filters?.notContains) {
q = q.not
}
return q
} }
if (this.client === SqlClient.POSTGRES) { if (this.client === SqlClient.POSTGRES) {
iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => { iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => {
const wrap = any ? "" : "'" q = addModifiers(q)
const op = any ? "\\?| array" : "@>" if (any) {
const fieldNames = key.split(/\./g) return q.whereRaw(`COALESCE(??::jsonb \\?| array??, FALSE)`, [
const table = fieldNames[0] this.rawQuotedIdentifier(key),
const col = fieldNames[1] this.knex.raw(stringifyArray(value, "'")),
return q[rawFnc]( ])
`${not}COALESCE("${table}"."${col}"::jsonb ${op} ${wrap}${stringifyArray( } else {
value, return q.whereRaw(`COALESCE(??::jsonb @> '??', FALSE)`, [
any ? "'" : '"' this.rawQuotedIdentifier(key),
)}${wrap}, FALSE)` this.knex.raw(stringifyArray(value)),
) ])
}
}) })
} else if ( } else if (
this.client === SqlClient.MY_SQL || this.client === SqlClient.MY_SQL ||
this.client === SqlClient.MARIADB this.client === SqlClient.MARIADB
) { ) {
const jsonFnc = any ? "JSON_OVERLAPS" : "JSON_CONTAINS"
iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => { iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => {
return q[rawFnc]( return addModifiers(q).whereRaw(`COALESCE(?(??, ?), FALSE)`, [
`${not}COALESCE(${jsonFnc}(${key}, '${stringifyArray( this.knex.raw(any ? "JSON_OVERLAPS" : "JSON_CONTAINS"),
value this.rawQuotedIdentifier(key),
)}'), FALSE)` this.knex.raw(wrap(stringifyArray(value))),
) ])
}) })
} else { } else {
const andOr = mode === filters?.containsAny ? " OR " : " AND "
iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => { iterate(mode, ArrayOperator.CONTAINS, (q, key, value) => {
let statement = "" if (value.length === 0) {
const identifier = this.quotedIdentifier(key)
for (let i in value) {
if (typeof value[i] === "string") {
value[i] = `%"${value[i].toLowerCase()}"%`
} else {
value[i] = `%${value[i]}%`
}
statement += `${
statement ? andOr : ""
}COALESCE(LOWER(${identifier}), '') LIKE ?`
}
if (statement === "") {
return q return q
} }
if (not) { q = q.where(subQuery => {
return q[rawFnc]( if (mode === filters?.notContains) {
`(NOT (${statement}) OR ${identifier} IS NULL)`, subQuery = subQuery.not
value }
)
} else { subQuery = subQuery.where(subSubQuery => {
return q[rawFnc](statement, value) for (const elem of value) {
} if (mode === filters?.containsAny) {
subSubQuery = subSubQuery.or
} else {
subSubQuery = subSubQuery.and
}
const lower =
typeof elem === "string" ? `"${elem.toLowerCase()}"` : elem
subSubQuery = subSubQuery.whereLike(
// @ts-expect-error knex types are wrong, raw is fine here
this.knex.raw(`COALESCE(LOWER(??), '')`, [
this.rawQuotedIdentifier(key),
]),
`%${lower}%`
)
}
})
if (mode === filters?.notContains) {
subQuery = subQuery.or.whereNull(
// @ts-expect-error knex types are wrong, raw is fine here
this.rawQuotedIdentifier(key)
)
}
return subQuery
})
return q
}) })
} }
} }
@ -730,45 +799,46 @@ class InternalBuilder {
} }
if (filters.oneOf) { if (filters.oneOf) {
const fnc = allOr ? "orWhereIn" : "whereIn"
iterate( iterate(
filters.oneOf, filters.oneOf,
ArrayOperator.ONE_OF, ArrayOperator.ONE_OF,
(q, key: string, array) => { (q, key: string, array) => {
if (this.client === SqlClient.ORACLE) { if (shouldOr) {
key = this.convertClobs(key) q = q.or
array = Array.isArray(array) ? array : [array]
const binding = new Array(array.length).fill("?").join(",")
return q.whereRaw(`${key} IN (${binding})`, array)
} else {
return q[fnc](key, Array.isArray(array) ? array : [array])
} }
if (this.client === SqlClient.ORACLE) {
// @ts-ignore
key = this.convertClobs(key)
}
return q.whereIn(key, Array.isArray(array) ? array : [array])
}, },
(q, key: string[], array) => { (q, key: string[], array) => {
if (this.client === SqlClient.ORACLE) { if (shouldOr) {
const keyStr = `(${key.map(k => this.convertClobs(k)).join(",")})` q = q.or
const binding = `(${array
.map((a: any) => `(${new Array(a.length).fill("?").join(",")})`)
.join(",")})`
return q.whereRaw(`${keyStr} IN ${binding}`, array.flat())
} else {
return q[fnc](key, Array.isArray(array) ? array : [array])
} }
if (this.client === SqlClient.ORACLE) {
// @ts-ignore
key = key.map(k => this.convertClobs(k))
}
return q.whereIn(key, Array.isArray(array) ? array : [array])
} }
) )
} }
if (filters.string) { if (filters.string) {
iterate(filters.string, BasicOperator.STRING, (q, key, value) => { iterate(filters.string, BasicOperator.STRING, (q, key, value) => {
const fnc = allOr ? "orWhere" : "where" if (shouldOr) {
// postgres supports ilike, nothing else does q = q.or
if (this.client === SqlClient.POSTGRES) { }
return q[fnc](key, "ilike", `${value}%`) if (
} else { this.client === SqlClient.ORACLE ||
const rawFnc = `${fnc}Raw` this.client === SqlClient.SQL_LITE
// @ts-ignore ) {
return q[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [ return q.whereRaw(`LOWER(??) LIKE ?`, [
this.rawQuotedIdentifier(key),
`${value.toLowerCase()}%`, `${value.toLowerCase()}%`,
]) ])
} else {
return q.whereILike(key, `${value}%`)
} }
}) })
} }
@ -795,67 +865,59 @@ class InternalBuilder {
const schema = this.getFieldSchema(key) const schema = this.getFieldSchema(key)
let rawKey: string | Knex.Raw = key
let high = value.high
let low = value.low
if (this.client === SqlClient.ORACLE) { if (this.client === SqlClient.ORACLE) {
// @ts-ignore rawKey = this.convertClobs(key)
key = this.knex.raw(this.convertClobs(key)) } else if (
this.client === SqlClient.SQL_LITE &&
schema?.type === FieldType.BIGINT
) {
rawKey = this.knex.raw("CAST(?? AS INTEGER)", [
this.rawQuotedIdentifier(key),
])
high = this.knex.raw("CAST(? AS INTEGER)", [value.high])
low = this.knex.raw("CAST(? AS INTEGER)", [value.low])
}
if (shouldOr) {
q = q.or
} }
if (lowValid && highValid) { if (lowValid && highValid) {
if ( // @ts-expect-error knex types are wrong, raw is fine here
schema?.type === FieldType.BIGINT && return q.whereBetween(rawKey, [low, high])
this.client === SqlClient.SQL_LITE
) {
return q.whereRaw(
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
[value.low, value.high]
)
} else {
const fnc = allOr ? "orWhereBetween" : "whereBetween"
return q[fnc](key, [value.low, value.high])
}
} else if (lowValid) { } else if (lowValid) {
if ( // @ts-expect-error knex types are wrong, raw is fine here
schema?.type === FieldType.BIGINT && return q.where(rawKey, ">=", low)
this.client === SqlClient.SQL_LITE
) {
return q.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
value.low,
])
} else {
const fnc = allOr ? "orWhere" : "where"
return q[fnc](key, ">=", value.low)
}
} else if (highValid) { } else if (highValid) {
if ( // @ts-expect-error knex types are wrong, raw is fine here
schema?.type === FieldType.BIGINT && return q.where(rawKey, "<=", high)
this.client === SqlClient.SQL_LITE
) {
return q.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
value.high,
])
} else {
const fnc = allOr ? "orWhere" : "where"
return q[fnc](key, "<=", value.high)
}
} }
return q return q
}) })
} }
if (filters.equal) { if (filters.equal) {
iterate(filters.equal, BasicOperator.EQUAL, (q, key, value) => { iterate(filters.equal, BasicOperator.EQUAL, (q, key, value) => {
const fnc = allOr ? "orWhereRaw" : "whereRaw" if (shouldOr) {
q = q.or
}
if (this.client === SqlClient.MS_SQL) { if (this.client === SqlClient.MS_SQL) {
return q[fnc]( return q.whereRaw(`CASE WHEN ?? = ? THEN 1 ELSE 0 END = 1`, [
`CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 1`, this.rawQuotedIdentifier(key),
[value]
)
} else if (this.client === SqlClient.ORACLE) {
const identifier = this.convertClobs(key)
return q[fnc](`(${identifier} IS NOT NULL AND ${identifier} = ?)`, [
value, value,
]) ])
} else if (this.client === SqlClient.ORACLE) {
const identifier = this.convertClobs(key)
return q.where(subq =>
// @ts-expect-error knex types are wrong, raw is fine here
subq.whereNotNull(identifier).andWhere(identifier, value)
)
} else { } else {
return q[fnc](`COALESCE(${this.quotedIdentifier(key)} = ?, FALSE)`, [ return q.whereRaw(`COALESCE(?? = ?, FALSE)`, [
this.rawQuotedIdentifier(key),
value, value,
]) ])
} }
@ -863,20 +925,30 @@ class InternalBuilder {
} }
if (filters.notEqual) { if (filters.notEqual) {
iterate(filters.notEqual, BasicOperator.NOT_EQUAL, (q, key, value) => { iterate(filters.notEqual, BasicOperator.NOT_EQUAL, (q, key, value) => {
const fnc = allOr ? "orWhereRaw" : "whereRaw" if (shouldOr) {
q = q.or
}
if (this.client === SqlClient.MS_SQL) { if (this.client === SqlClient.MS_SQL) {
return q[fnc]( return q.whereRaw(`CASE WHEN ?? = ? THEN 1 ELSE 0 END = 0`, [
`CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 0`, this.rawQuotedIdentifier(key),
[value] value,
) ])
} else if (this.client === SqlClient.ORACLE) { } else if (this.client === SqlClient.ORACLE) {
const identifier = this.convertClobs(key) const identifier = this.convertClobs(key)
return q[fnc]( return (
`(${identifier} IS NOT NULL AND ${identifier} != ?) OR ${identifier} IS NULL`, q
[value] .where(subq =>
subq.not
// @ts-expect-error knex types are wrong, raw is fine here
.whereNull(identifier)
.and.where(identifier, "!=", value)
)
// @ts-expect-error knex types are wrong, raw is fine here
.or.whereNull(identifier)
) )
} else { } else {
return q[fnc](`COALESCE(${this.quotedIdentifier(key)} != ?, TRUE)`, [ return q.whereRaw(`COALESCE(?? != ?, TRUE)`, [
this.rawQuotedIdentifier(key),
value, value,
]) ])
} }
@ -884,14 +956,18 @@ class InternalBuilder {
} }
if (filters.empty) { if (filters.empty) {
iterate(filters.empty, BasicOperator.EMPTY, (q, key) => { iterate(filters.empty, BasicOperator.EMPTY, (q, key) => {
const fnc = allOr ? "orWhereNull" : "whereNull" if (shouldOr) {
return q[fnc](key) q = q.or
}
return q.whereNull(key)
}) })
} }
if (filters.notEmpty) { if (filters.notEmpty) {
iterate(filters.notEmpty, BasicOperator.NOT_EMPTY, (q, key) => { iterate(filters.notEmpty, BasicOperator.NOT_EMPTY, (q, key) => {
const fnc = allOr ? "orWhereNotNull" : "whereNotNull" if (shouldOr) {
return q[fnc](key) q = q.or
}
return q.whereNotNull(key)
}) })
} }
if (filters.contains) { if (filters.contains) {
@ -976,9 +1052,7 @@ class InternalBuilder {
const selectFields = qualifiedFields.map(field => const selectFields = qualifiedFields.map(field =>
this.convertClobs(field, { forSelect: true }) this.convertClobs(field, { forSelect: true })
) )
query = query query = query.groupBy(groupByFields).select(selectFields)
.groupByRaw(groupByFields.join(", "))
.select(this.knex.raw(selectFields.join(", ")))
} else { } else {
query = query.groupBy(qualifiedFields).select(qualifiedFields) query = query.groupBy(qualifiedFields).select(qualifiedFields)
} }
@ -990,11 +1064,10 @@ class InternalBuilder {
if (this.client === SqlClient.ORACLE) { if (this.client === SqlClient.ORACLE) {
const field = this.convertClobs(`${tableName}.${aggregation.field}`) const field = this.convertClobs(`${tableName}.${aggregation.field}`)
query = query.select( query = query.select(
this.knex.raw( this.knex.raw(`COUNT(DISTINCT ??) as ??`, [
`COUNT(DISTINCT ${field}) as ${this.quotedIdentifier( field,
aggregation.name aggregation.name,
)}` ])
)
) )
} else { } else {
query = query.countDistinct( query = query.countDistinct(
@ -1059,9 +1132,11 @@ class InternalBuilder {
} else { } else {
let composite = `${aliased}.${key}` let composite = `${aliased}.${key}`
if (this.client === SqlClient.ORACLE) { if (this.client === SqlClient.ORACLE) {
query = query.orderByRaw( query = query.orderByRaw(`?? ?? nulls ??`, [
`${this.convertClobs(composite)} ${direction} nulls ${nulls}` this.convertClobs(composite),
) this.knex.raw(direction),
this.knex.raw(nulls as string),
])
} else { } else {
query = query.orderBy(composite, direction, nulls) query = query.orderBy(composite, direction, nulls)
} }
@ -1091,17 +1166,22 @@ class InternalBuilder {
private buildJsonField(field: string): string { private buildJsonField(field: string): string {
const parts = field.split(".") const parts = field.split(".")
let tableField: string, unaliased: string let unaliased: string
let tableField: string
if (parts.length > 1) { if (parts.length > 1) {
const alias = parts.shift()! const alias = parts.shift()!
unaliased = parts.join(".") unaliased = parts.join(".")
tableField = `${this.quote(alias)}.${this.quote(unaliased)}` tableField = `${alias}.${unaliased}`
} else { } else {
unaliased = parts.join(".") unaliased = parts.join(".")
tableField = this.quote(unaliased) tableField = unaliased
} }
const separator = this.client === SqlClient.ORACLE ? " VALUE " : "," const separator = this.client === SqlClient.ORACLE ? " VALUE " : ","
return `'${unaliased}'${separator}${tableField}` return this.knex
.raw(`?${separator}??`, [unaliased, this.rawQuotedIdentifier(tableField)])
.toString()
} }
maxFunctionParameters() { maxFunctionParameters() {
@ -1197,13 +1277,13 @@ class InternalBuilder {
subQuery = subQuery.where( subQuery = subQuery.where(
correlatedTo, correlatedTo,
"=", "=",
knex.raw(this.quotedIdentifier(correlatedFrom)) this.rawQuotedIdentifier(correlatedFrom)
) )
const standardWrap = (select: string): Knex.QueryBuilder => { const standardWrap = (select: Knex.Raw): Knex.QueryBuilder => {
subQuery = subQuery.select(`${toAlias}.*`).limit(getRelationshipLimit()) subQuery = subQuery.select(`${toAlias}.*`).limit(getRelationshipLimit())
// @ts-ignore - the from alias syntax isn't in Knex typing // @ts-ignore - the from alias syntax isn't in Knex typing
return knex.select(knex.raw(select)).from({ return knex.select(select).from({
[toAlias]: subQuery, [toAlias]: subQuery,
}) })
} }
@ -1213,12 +1293,12 @@ class InternalBuilder {
// need to check the junction table document is to the right column, this is just for SQS // need to check the junction table document is to the right column, this is just for SQS
subQuery = this.addJoinFieldCheck(subQuery, relationship) subQuery = this.addJoinFieldCheck(subQuery, relationship)
wrapperQuery = standardWrap( wrapperQuery = standardWrap(
`json_group_array(json_object(${fieldList}))` this.knex.raw(`json_group_array(json_object(${fieldList}))`)
) )
break break
case SqlClient.POSTGRES: case SqlClient.POSTGRES:
wrapperQuery = standardWrap( wrapperQuery = standardWrap(
`json_agg(json_build_object(${fieldList}))` this.knex.raw(`json_agg(json_build_object(${fieldList}))`)
) )
break break
case SqlClient.MARIADB: case SqlClient.MARIADB:
@ -1232,21 +1312,25 @@ class InternalBuilder {
case SqlClient.MY_SQL: case SqlClient.MY_SQL:
case SqlClient.ORACLE: case SqlClient.ORACLE:
wrapperQuery = standardWrap( wrapperQuery = standardWrap(
`json_arrayagg(json_object(${fieldList}))` this.knex.raw(`json_arrayagg(json_object(${fieldList}))`)
) )
break break
case SqlClient.MS_SQL: case SqlClient.MS_SQL: {
const comparatorQuery = knex
.select(`${fromAlias}.*`)
// @ts-ignore - from alias syntax not TS supported
.from({
[fromAlias]: subQuery
.select(`${toAlias}.*`)
.limit(getRelationshipLimit()),
})
wrapperQuery = knex.raw( wrapperQuery = knex.raw(
`(SELECT ${this.quote(toAlias)} = (${knex `(SELECT ?? = (${comparatorQuery} FOR JSON PATH))`,
.select(`${fromAlias}.*`) [this.rawQuotedIdentifier(toAlias)]
// @ts-ignore - from alias syntax not TS supported
.from({
[fromAlias]: subQuery
.select(`${toAlias}.*`)
.limit(getRelationshipLimit()),
})} FOR JSON PATH))`
) )
break break
}
default: default:
throw new Error(`JSON relationships not implement for ${sqlClient}`) throw new Error(`JSON relationships not implement for ${sqlClient}`)
} }

View File

@ -20,19 +20,15 @@ const options = {
{ {
url: "https://budibase.app/api/public/v1", url: "https://budibase.app/api/public/v1",
description: "Budibase Cloud API", description: "Budibase Cloud API",
},
{
url: "{protocol}://{hostname}/api/public/v1",
description: "Budibase self hosted API",
variables: { variables: {
protocol: { apiKey: {
default: "http", default: "<user API key>",
description: description: "The API key of the user to assume for API call.",
"Whether HTTP or HTTPS should be used to communicate with your Budibase instance.",
}, },
hostname: { appId: {
default: "localhost:10000", default: "<App ID>",
description: "The URL of your Budibase instance.", description:
"The ID of the app the calls will be executed within the context of, this should start with app_ (production) or app_dev (development).",
}, },
}, },
}, },

View File

@ -8,19 +8,15 @@
"servers": [ "servers": [
{ {
"url": "https://budibase.app/api/public/v1", "url": "https://budibase.app/api/public/v1",
"description": "Budibase Cloud API" "description": "Budibase Cloud API",
},
{
"url": "{protocol}://{hostname}/api/public/v1",
"description": "Budibase self hosted API",
"variables": { "variables": {
"protocol": { "apiKey": {
"default": "http", "default": "<user API key>",
"description": "Whether HTTP or HTTPS should be used to communicate with your Budibase instance." "description": "The API key of the user to assume for API call."
}, },
"hostname": { "appId": {
"default": "localhost:10000", "default": "<App ID>",
"description": "The URL of your Budibase instance." "description": "The ID of the app the calls will be executed within the context of, this should start with app_ (production) or app_dev (development)."
} }
} }
} }
@ -51,6 +47,7 @@
"required": true, "required": true,
"description": "The ID of the app which this request is targeting.", "description": "The ID of the app which this request is targeting.",
"schema": { "schema": {
"default": "{{ appId }}",
"type": "string" "type": "string"
} }
}, },
@ -60,6 +57,7 @@
"required": true, "required": true,
"description": "The ID of the app which this request is targeting.", "description": "The ID of the app which this request is targeting.",
"schema": { "schema": {
"default": "{{ appId }}",
"type": "string" "type": "string"
} }
}, },
@ -833,7 +831,8 @@
"type": "string", "type": "string",
"enum": [ "enum": [
"static", "static",
"dynamic" "dynamic",
"ai"
], ],
"description": "Defines whether this is a static or dynamic formula." "description": "Defines whether this is a static or dynamic formula."
} }
@ -857,6 +856,7 @@
"link", "link",
"formula", "formula",
"auto", "auto",
"ai",
"json", "json",
"internal", "internal",
"barcodeqr", "barcodeqr",
@ -1042,7 +1042,8 @@
"type": "string", "type": "string",
"enum": [ "enum": [
"static", "static",
"dynamic" "dynamic",
"ai"
], ],
"description": "Defines whether this is a static or dynamic formula." "description": "Defines whether this is a static or dynamic formula."
} }
@ -1066,6 +1067,7 @@
"link", "link",
"formula", "formula",
"auto", "auto",
"ai",
"json", "json",
"internal", "internal",
"barcodeqr", "barcodeqr",
@ -1262,7 +1264,8 @@
"type": "string", "type": "string",
"enum": [ "enum": [
"static", "static",
"dynamic" "dynamic",
"ai"
], ],
"description": "Defines whether this is a static or dynamic formula." "description": "Defines whether this is a static or dynamic formula."
} }
@ -1286,6 +1289,7 @@
"link", "link",
"formula", "formula",
"auto", "auto",
"ai",
"json", "json",
"internal", "internal",
"barcodeqr", "barcodeqr",

View File

@ -6,16 +6,14 @@ info:
servers: servers:
- url: https://budibase.app/api/public/v1 - url: https://budibase.app/api/public/v1
description: Budibase Cloud API description: Budibase Cloud API
- url: "{protocol}://{hostname}/api/public/v1"
description: Budibase self hosted API
variables: variables:
protocol: apiKey:
default: http default: <user API key>
description: Whether HTTP or HTTPS should be used to communicate with your description: The API key of the user to assume for API call.
Budibase instance. appId:
hostname: default: <App ID>
default: localhost:10000 description: The ID of the app the calls will be executed within the context of,
description: The URL of your Budibase instance. this should start with app_ (production) or app_dev (development).
components: components:
parameters: parameters:
tableId: tableId:
@ -38,6 +36,7 @@ components:
required: true required: true
description: The ID of the app which this request is targeting. description: The ID of the app which this request is targeting.
schema: schema:
default: "{{ appId }}"
type: string type: string
appIdUrl: appIdUrl:
in: path in: path
@ -45,6 +44,7 @@ components:
required: true required: true
description: The ID of the app which this request is targeting. description: The ID of the app which this request is targeting.
schema: schema:
default: "{{ appId }}"
type: string type: string
queryId: queryId:
in: path in: path
@ -761,6 +761,7 @@ components:
enum: enum:
- static - static
- dynamic - dynamic
- ai
description: Defines whether this is a static or dynamic formula. description: Defines whether this is a static or dynamic formula.
- type: object - type: object
properties: properties:
@ -779,6 +780,7 @@ components:
- link - link
- formula - formula
- auto - auto
- ai
- json - json
- internal - internal
- barcodeqr - barcodeqr
@ -929,6 +931,7 @@ components:
enum: enum:
- static - static
- dynamic - dynamic
- ai
description: Defines whether this is a static or dynamic formula. description: Defines whether this is a static or dynamic formula.
- type: object - type: object
properties: properties:
@ -947,6 +950,7 @@ components:
- link - link
- formula - formula
- auto - auto
- ai
- json - json
- internal - internal
- barcodeqr - barcodeqr
@ -1104,6 +1108,7 @@ components:
enum: enum:
- static - static
- dynamic - dynamic
- ai
description: Defines whether this is a static or dynamic formula. description: Defines whether this is a static or dynamic formula.
- type: object - type: object
properties: properties:
@ -1122,6 +1127,7 @@ components:
- link - link
- formula - formula
- auto - auto
- ai
- json - json
- internal - internal
- barcodeqr - barcodeqr

View File

@ -24,6 +24,7 @@ export const appId = {
required: true, required: true,
description: "The ID of the app which this request is targeting.", description: "The ID of the app which this request is targeting.",
schema: { schema: {
default: "{{ appId }}",
type: "string", type: "string",
}, },
} }
@ -34,6 +35,7 @@ export const appIdUrl = {
required: true, required: true,
description: "The ID of the app which this request is targeting.", description: "The ID of the app which this request is targeting.",
schema: { schema: {
default: "{{ appId }}",
type: "string", type: "string",
}, },
} }

View File

@ -0,0 +1,110 @@
import { User, Table, SearchFilters, Row } from "@budibase/types"
import { HttpMethod, MakeRequestResponse, generateMakeRequest } from "./utils"
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
import { Expectations } from "../../../../tests/utilities/api/base"
type RequestOpts = { internal?: boolean; appId?: string }
type PublicAPIExpectations = Omit<Expectations, "headers" | "headersNotPresent">
export class PublicAPIRequest {
private makeRequest: MakeRequestResponse | undefined
private appId: string | undefined
private _tables: PublicTableAPI | undefined
private _rows: PublicRowAPI | undefined
private _apiKey: string | undefined
async init(config: TestConfiguration, user: User, opts?: RequestOpts) {
this._apiKey = await config.generateApiKey(user._id)
this.makeRequest = generateMakeRequest(this.apiKey, opts)
this.appId = opts?.appId
this._tables = new PublicTableAPI(this)
this._rows = new PublicRowAPI(this)
return this
}
opts(opts: RequestOpts) {
if (opts.appId) {
this.appId = opts.appId
}
this.makeRequest = generateMakeRequest(this.apiKey, opts)
}
async send(
method: HttpMethod,
endpoint: string,
body?: any,
expectations?: PublicAPIExpectations
) {
if (!this.makeRequest) {
throw new Error("Init has not been called")
}
const res = await this.makeRequest(method, endpoint, body, this.appId)
if (expectations?.status) {
expect(res.status).toEqual(expectations.status)
}
if (expectations?.body) {
expect(res.body).toEqual(expectations?.body)
}
return res.body
}
get apiKey(): string {
if (!this._apiKey) {
throw new Error("Init has not been called")
}
return this._apiKey
}
get tables(): PublicTableAPI {
if (!this._tables) {
throw new Error("Init has not been called")
}
return this._tables
}
get rows(): PublicRowAPI {
if (!this._rows) {
throw new Error("Init has not been called")
}
return this._rows
}
}
export class PublicTableAPI {
request: PublicAPIRequest
constructor(request: PublicAPIRequest) {
this.request = request
}
async create(
table: Table,
expectations?: PublicAPIExpectations
): Promise<{ data: Table }> {
return this.request.send("post", "/tables", table, expectations)
}
}
export class PublicRowAPI {
request: PublicAPIRequest
constructor(request: PublicAPIRequest) {
this.request = request
}
async search(
tableId: string,
query: SearchFilters,
expectations?: PublicAPIExpectations
): Promise<{ data: Row[] }> {
return this.request.send(
"post",
`/tables/${tableId}/rows/search`,
{
query,
},
expectations
)
}
}

View File

@ -1,4 +1,4 @@
const setup = require("../../tests/utilities") import * as setup from "../../tests/utilities"
describe("/metrics", () => { describe("/metrics", () => {
let request = setup.getRequest() let request = setup.getRequest()

View File

@ -0,0 +1,71 @@
import * as setup from "../../tests/utilities"
import { roles } from "@budibase/backend-core"
import { basicTable } from "../../../../tests/utilities/structures"
import { Table, User } from "@budibase/types"
import { PublicAPIRequest } from "./Request"
describe("check public API security", () => {
const config = setup.getConfig()
let builderRequest: PublicAPIRequest,
appUserRequest: PublicAPIRequest,
table: Table,
appUser: User
beforeAll(async () => {
await config.init()
const builderUser = await config.globalUser()
appUser = await config.globalUser({
builder: { global: false },
roles: {
[config.getProdAppId()]: roles.BUILTIN_ROLE_IDS.BASIC,
},
})
builderRequest = await new PublicAPIRequest().init(config, builderUser)
appUserRequest = await new PublicAPIRequest().init(config, appUser)
table = (await builderRequest.tables.create(basicTable())).data
})
it("should allow with builder API key", async () => {
const res = await builderRequest.rows.search(
table._id!,
{},
{
status: 200,
}
)
expect(res.data.length).toEqual(0)
})
it("should 403 when from browser, but API key", async () => {
await appUserRequest.rows.search(
table._id!,
{},
{
status: 403,
}
)
})
it("should re-direct when using cookie", async () => {
const headers = await config.login({
userId: appUser._id!,
builder: false,
prodApp: false,
})
await config.withHeaders(
{
...headers,
"User-Agent": config.browserUserAgent(),
},
async () => {
await config.api.row.search(
table._id!,
{ query: {} },
{
status: 302,
}
)
}
)
})
})

View File

@ -21,17 +21,19 @@ export type MakeRequestWithFormDataResponse = (
function base( function base(
apiKey: string, apiKey: string,
endpoint: string, endpoint: string,
intAppId: string | null, opts?: {
isInternal: boolean intAppId?: string
internal?: boolean
}
) { ) {
const extraHeaders: any = { const extraHeaders: any = {
"x-budibase-api-key": apiKey, "x-budibase-api-key": apiKey,
} }
if (intAppId) { if (opts?.intAppId) {
extraHeaders["x-budibase-app-id"] = intAppId extraHeaders["x-budibase-app-id"] = opts.intAppId
} }
const url = isInternal const url = opts?.internal
? endpoint ? endpoint
: checkSlashesInUrl(`/api/public/v1/${endpoint}`) : checkSlashesInUrl(`/api/public/v1/${endpoint}`)
return { headers: extraHeaders, url } return { headers: extraHeaders, url }
@ -39,7 +41,7 @@ function base(
export function generateMakeRequest( export function generateMakeRequest(
apiKey: string, apiKey: string,
isInternal = false opts?: { internal?: boolean }
): MakeRequestResponse { ): MakeRequestResponse {
const request = setup.getRequest()! const request = setup.getRequest()!
const config = setup.getConfig()! const config = setup.getConfig()!
@ -47,9 +49,12 @@ export function generateMakeRequest(
method: HttpMethod, method: HttpMethod,
endpoint: string, endpoint: string,
body?: any, body?: any,
intAppId: string | null = config.getAppId() intAppId: string | undefined = config.getAppId()
) => { ) => {
const { headers, url } = base(apiKey, endpoint, intAppId, isInternal) const { headers, url } = base(apiKey, endpoint, { ...opts, intAppId })
if (body && typeof body !== "string") {
headers["Content-Type"] = "application/json"
}
const req = request[method](url).set(config.defaultHeaders(headers)) const req = request[method](url).set(config.defaultHeaders(headers))
if (body) { if (body) {
req.send(body) req.send(body)
@ -62,7 +67,7 @@ export function generateMakeRequest(
export function generateMakeRequestWithFormData( export function generateMakeRequestWithFormData(
apiKey: string, apiKey: string,
isInternal = false opts?: { internal?: boolean; browser?: boolean }
): MakeRequestWithFormDataResponse { ): MakeRequestWithFormDataResponse {
const request = setup.getRequest()! const request = setup.getRequest()!
const config = setup.getConfig()! const config = setup.getConfig()!
@ -70,9 +75,9 @@ export function generateMakeRequestWithFormData(
method: HttpMethod, method: HttpMethod,
endpoint: string, endpoint: string,
fields: Record<string, string | { path: string }>, fields: Record<string, string | { path: string }>,
intAppId: string | null = config.getAppId() intAppId: string | undefined = config.getAppId()
) => { ) => {
const { headers, url } = base(apiKey, endpoint, intAppId, isInternal) const { headers, url } = base(apiKey, endpoint, { ...opts, intAppId })
const req = request[method](url).set(config.defaultHeaders(headers)) const req = request[method](url).set(config.defaultHeaders(headers))
for (let [field, value] of Object.entries(fields)) { for (let [field, value] of Object.entries(fields)) {
if (typeof value === "string") { if (typeof value === "string") {

View File

@ -29,6 +29,7 @@ describe.each(
const isOracle = dbName === DatabaseName.ORACLE const isOracle = dbName === DatabaseName.ORACLE
const isMsSQL = dbName === DatabaseName.SQL_SERVER const isMsSQL = dbName === DatabaseName.SQL_SERVER
const isPostgres = dbName === DatabaseName.POSTGRES const isPostgres = dbName === DatabaseName.POSTGRES
const mainTableName = "test_table"
let rawDatasource: Datasource let rawDatasource: Datasource
let datasource: Datasource let datasource: Datasource
@ -71,15 +72,15 @@ describe.each(
client = await knexClient(rawDatasource) client = await knexClient(rawDatasource)
await client.schema.dropTableIfExists("test_table") await client.schema.dropTableIfExists(mainTableName)
await client.schema.createTable("test_table", table => { await client.schema.createTable(mainTableName, table => {
table.increments("id").primary() table.increments("id").primary()
table.string("name") table.string("name")
table.timestamp("birthday") table.timestamp("birthday")
table.integer("number") table.integer("number")
}) })
await client("test_table").insert([ await client(mainTableName).insert([
{ name: "one" }, { name: "one" },
{ name: "two" }, { name: "two" },
{ name: "three" }, { name: "three" },
@ -105,7 +106,7 @@ describe.each(
const query = await createQuery({ const query = await createQuery({
name: "New Query", name: "New Query",
fields: { fields: {
sql: client("test_table").select("*").toString(), sql: client(mainTableName).select("*").toString(),
}, },
}) })
@ -114,7 +115,7 @@ describe.each(
name: "New Query", name: "New Query",
parameters: [], parameters: [],
fields: { fields: {
sql: client("test_table").select("*").toString(), sql: client(mainTableName).select("*").toString(),
}, },
schema: {}, schema: {},
queryVerb: "read", queryVerb: "read",
@ -133,7 +134,7 @@ describe.each(
it("should be able to update a query", async () => { it("should be able to update a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table").select("*").toString(), sql: client(mainTableName).select("*").toString(),
}, },
}) })
@ -143,7 +144,7 @@ describe.each(
...query, ...query,
name: "Updated Query", name: "Updated Query",
fields: { fields: {
sql: client("test_table").where({ id: 1 }).toString(), sql: client(mainTableName).where({ id: 1 }).toString(),
}, },
}) })
@ -152,7 +153,7 @@ describe.each(
name: "Updated Query", name: "Updated Query",
parameters: [], parameters: [],
fields: { fields: {
sql: client("test_table").where({ id: 1 }).toString(), sql: client(mainTableName).where({ id: 1 }).toString(),
}, },
schema: {}, schema: {},
queryVerb: "read", queryVerb: "read",
@ -169,7 +170,7 @@ describe.each(
it("should be able to delete a query", async () => { it("should be able to delete a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table").select("*").toString(), sql: client(mainTableName).select("*").toString(),
}, },
}) })
@ -188,7 +189,7 @@ describe.each(
it("should be able to list queries", async () => { it("should be able to list queries", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table").select("*").toString(), sql: client(mainTableName).select("*").toString(),
}, },
}) })
@ -199,7 +200,7 @@ describe.each(
it("should strip sensitive fields for prod apps", async () => { it("should strip sensitive fields for prod apps", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table").select("*").toString(), sql: client(mainTableName).select("*").toString(),
}, },
}) })
@ -217,7 +218,7 @@ describe.each(
const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)` const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)`
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table") sql: client(mainTableName)
.select([ .select([
"*", "*",
client.raw( client.raw(
@ -245,7 +246,7 @@ describe.each(
datasourceId: datasource._id!, datasourceId: datasource._id!,
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: client("test_table").where({ id: 1 }).toString(), sql: client(mainTableName).where({ id: 1 }).toString(),
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -391,7 +392,7 @@ describe.each(
it("should work with dynamic variables", async () => { it("should work with dynamic variables", async () => {
const basedOnQuery = await createQuery({ const basedOnQuery = await createQuery({
fields: { fields: {
sql: client("test_table").select("name").where({ id: 1 }).toString(), sql: client(mainTableName).select("name").where({ id: 1 }).toString(),
}, },
}) })
@ -440,7 +441,7 @@ describe.each(
it("should handle the dynamic base query being deleted", async () => { it("should handle the dynamic base query being deleted", async () => {
const basedOnQuery = await createQuery({ const basedOnQuery = await createQuery({
fields: { fields: {
sql: client("test_table").select("name").where({ id: 1 }).toString(), sql: client(mainTableName).select("name").where({ id: 1 }).toString(),
}, },
}) })
@ -494,7 +495,7 @@ describe.each(
it("should be able to insert with bindings", async () => { it("should be able to insert with bindings", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table").insert({ name: "{{ foo }}" }).toString(), sql: client(mainTableName).insert({ name: "{{ foo }}" }).toString(),
}, },
parameters: [ parameters: [
{ {
@ -517,7 +518,7 @@ describe.each(
}, },
]) ])
const rows = await client("test_table").where({ name: "baz" }).select() const rows = await client(mainTableName).where({ name: "baz" }).select()
expect(rows).toHaveLength(1) expect(rows).toHaveLength(1)
for (const row of rows) { for (const row of rows) {
expect(row).toMatchObject({ name: "baz" }) expect(row).toMatchObject({ name: "baz" })
@ -527,7 +528,7 @@ describe.each(
it("should not allow handlebars as parameters", async () => { it("should not allow handlebars as parameters", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table").insert({ name: "{{ foo }}" }).toString(), sql: client(mainTableName).insert({ name: "{{ foo }}" }).toString(),
}, },
parameters: [ parameters: [
{ {
@ -563,7 +564,7 @@ describe.each(
const date = new Date(datetimeStr) const date = new Date(datetimeStr)
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table") sql: client(mainTableName)
.insert({ .insert({
name: "foo", name: "foo",
birthday: client.raw("{{ birthday }}"), birthday: client.raw("{{ birthday }}"),
@ -585,7 +586,7 @@ describe.each(
expect(result.data).toEqual([{ created: true }]) expect(result.data).toEqual([{ created: true }])
const rows = await client("test_table") const rows = await client(mainTableName)
.where({ birthday: datetimeStr }) .where({ birthday: datetimeStr })
.select() .select()
expect(rows).toHaveLength(1) expect(rows).toHaveLength(1)
@ -601,7 +602,7 @@ describe.each(
async notDateStr => { async notDateStr => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table") sql: client(mainTableName)
.insert({ name: client.raw("{{ name }}") }) .insert({ name: client.raw("{{ name }}") })
.toString(), .toString(),
}, },
@ -622,7 +623,7 @@ describe.each(
expect(result.data).toEqual([{ created: true }]) expect(result.data).toEqual([{ created: true }])
const rows = await client("test_table") const rows = await client(mainTableName)
.where({ name: notDateStr }) .where({ name: notDateStr })
.select() .select()
expect(rows).toHaveLength(1) expect(rows).toHaveLength(1)
@ -634,7 +635,7 @@ describe.each(
it("should execute a query", async () => { it("should execute a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table").select("*").orderBy("id").toString(), sql: client(mainTableName).select("*").orderBy("id").toString(),
}, },
}) })
@ -677,7 +678,7 @@ describe.each(
it("should be able to transform a query", async () => { it("should be able to transform a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table").where({ id: 1 }).select("*").toString(), sql: client(mainTableName).where({ id: 1 }).select("*").toString(),
}, },
transformer: ` transformer: `
data[0].id = data[0].id + 1; data[0].id = data[0].id + 1;
@ -700,7 +701,7 @@ describe.each(
it("should coerce numeric bindings", async () => { it("should coerce numeric bindings", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table") sql: client(mainTableName)
.where({ id: client.raw("{{ id }}") }) .where({ id: client.raw("{{ id }}") })
.select("*") .select("*")
.toString(), .toString(),
@ -734,7 +735,7 @@ describe.each(
it("should be able to update rows", async () => { it("should be able to update rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table") sql: client(mainTableName)
.update({ name: client.raw("{{ name }}") }) .update({ name: client.raw("{{ name }}") })
.where({ id: client.raw("{{ id }}") }) .where({ id: client.raw("{{ id }}") })
.toString(), .toString(),
@ -759,7 +760,7 @@ describe.each(
}, },
}) })
const rows = await client("test_table").where({ id: 1 }).select() const rows = await client(mainTableName).where({ id: 1 }).select()
expect(rows).toEqual([ expect(rows).toEqual([
{ id: 1, name: "foo", birthday: null, number: null }, { id: 1, name: "foo", birthday: null, number: null },
]) ])
@ -768,7 +769,7 @@ describe.each(
it("should be able to execute an update that updates no rows", async () => { it("should be able to execute an update that updates no rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table") sql: client(mainTableName)
.update({ name: "updated" }) .update({ name: "updated" })
.where({ id: 100 }) .where({ id: 100 })
.toString(), .toString(),
@ -778,7 +779,7 @@ describe.each(
await config.api.query.execute(query._id!) await config.api.query.execute(query._id!)
const rows = await client("test_table").select() const rows = await client(mainTableName).select()
for (const row of rows) { for (const row of rows) {
expect(row.name).not.toEqual("updated") expect(row.name).not.toEqual("updated")
} }
@ -787,14 +788,14 @@ describe.each(
it("should be able to execute a delete that deletes no rows", async () => { it("should be able to execute a delete that deletes no rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table").where({ id: 100 }).delete().toString(), sql: client(mainTableName).where({ id: 100 }).delete().toString(),
}, },
queryVerb: "delete", queryVerb: "delete",
}) })
await config.api.query.execute(query._id!) await config.api.query.execute(query._id!)
const rows = await client("test_table").select() const rows = await client(mainTableName).select()
expect(rows).toHaveLength(5) expect(rows).toHaveLength(5)
}) })
}) })
@ -803,7 +804,7 @@ describe.each(
it("should be able to delete rows", async () => { it("should be able to delete rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: client("test_table") sql: client(mainTableName)
.where({ id: client.raw("{{ id }}") }) .where({ id: client.raw("{{ id }}") })
.delete() .delete()
.toString(), .toString(),
@ -823,7 +824,7 @@ describe.each(
}, },
}) })
const rows = await client("test_table").where({ id: 1 }).select() const rows = await client(mainTableName).where({ id: 1 }).select()
expect(rows).toHaveLength(0) expect(rows).toHaveLength(0)
}) })
}) })
@ -831,7 +832,7 @@ describe.each(
describe("query through datasource", () => { describe("query through datasource", () => {
it("should be able to query the datasource", async () => { it("should be able to query the datasource", async () => {
const entityId = "test_table" const entityId = mainTableName
await config.api.datasource.update({ await config.api.datasource.update({
...datasource, ...datasource,
entities: { entities: {
@ -876,7 +877,7 @@ describe.each(
beforeAll(async () => { beforeAll(async () => {
queryParams = { queryParams = {
fields: { fields: {
sql: client("test_table") sql: client(mainTableName)
.insert({ .insert({
name: client.raw("{{ bindingName }}"), name: client.raw("{{ bindingName }}"),
number: client.raw("{{ bindingNumber }}"), number: client.raw("{{ bindingNumber }}"),
@ -929,4 +930,34 @@ describe.each(
}) })
}) })
}) })
describe("edge cases", () => {
it("should find rows with a binding containing a slash", async () => {
const slashValue = "1/10"
await client(mainTableName).insert([{ name: slashValue }])
const query = await createQuery({
fields: {
sql: client(mainTableName)
.select("*")
.where("name", "=", client.raw("{{ bindingName }}"))
.toString(),
},
parameters: [
{
name: "bindingName",
default: "",
},
],
queryVerb: "read",
})
const results = await config.api.query.execute(query._id!, {
parameters: {
bindingName: slashValue,
},
})
expect(results).toBeDefined()
expect(results.data.length).toEqual(1)
})
})
}) })

View File

@ -1,9 +1,10 @@
const setup = require("./utilities") import * as setup from "./utilities"
const { basicScreen, powerScreen } = setup.structures import { checkBuilderEndpoint, runInProd } from "./utilities/TestFunctions"
const { checkBuilderEndpoint, runInProd } = require("./utilities/TestFunctions") import { roles } from "@budibase/backend-core"
const { roles } = require("@budibase/backend-core") import { Screen } from "@budibase/types"
const { BUILTIN_ROLE_IDS } = roles
const { BUILTIN_ROLE_IDS } = roles
const { basicScreen, powerScreen } = setup.structures
const route = "/test" const route = "/test"
// there are checks which are disabled in test env, // there are checks which are disabled in test env,
@ -12,7 +13,7 @@ const route = "/test"
describe("/routing", () => { describe("/routing", () => {
let request = setup.getRequest() let request = setup.getRequest()
let config = setup.getConfig() let config = setup.getConfig()
let basic, power let basic: Screen, power: Screen
afterAll(setup.afterAll) afterAll(setup.afterAll)
@ -25,26 +26,40 @@ describe("/routing", () => {
describe("fetch", () => { describe("fetch", () => {
it("prevents a public user from accessing development app", async () => { it("prevents a public user from accessing development app", async () => {
await runInProd(() => { await config.withHeaders(
return request {
.get(`/api/routing/client`) "User-Agent": config.browserUserAgent(),
.set(config.publicHeaders({ prodApp: false })) },
.expect(302) async () => {
}) await runInProd(() => {
return request
.get(`/api/routing/client`)
.set(config.publicHeaders({ prodApp: false }))
.expect(302)
})
}
)
}) })
it("prevents a non builder from accessing development app", async () => { it("prevents a non builder from accessing development app", async () => {
await runInProd(async () => { await config.withHeaders(
return request {
.get(`/api/routing/client`) "User-Agent": config.browserUserAgent(),
.set( },
await config.roleHeaders({ async () => {
roleId: BUILTIN_ROLE_IDS.BASIC, await runInProd(async () => {
prodApp: false, return request
}) .get(`/api/routing/client`)
) .set(
.expect(302) await config.roleHeaders({
}) roleId: BUILTIN_ROLE_IDS.BASIC,
prodApp: false,
})
)
.expect(302)
})
}
)
}) })
it("returns the correct routing for basic user", async () => { it("returns the correct routing for basic user", async () => {
const res = await request const res = await request

View File

@ -7,6 +7,7 @@ import {
import { import {
context, context,
db as dbCore, db as dbCore,
docIds,
features, features,
MAX_VALID_DATE, MAX_VALID_DATE,
MIN_VALID_DATE, MIN_VALID_DATE,
@ -61,6 +62,7 @@ describe.each([
const isLucene = name === "lucene" const isLucene = name === "lucene"
const isInMemory = name === "in-memory" const isInMemory = name === "in-memory"
const isInternal = isSqs || isLucene || isInMemory const isInternal = isSqs || isLucene || isInMemory
const isOracle = name === DatabaseName.ORACLE
const isSql = !isInMemory && !isLucene const isSql = !isInMemory && !isLucene
const config = setup.getConfig() const config = setup.getConfig()
@ -129,14 +131,14 @@ describe.each([
} }
}) })
async function createTable(schema: TableSchema) { async function createTable(schema?: TableSchema) {
const table = await config.api.table.save( const table = await config.api.table.save(
tableForDatasource(datasource, { schema }) tableForDatasource(datasource, { schema })
) )
return table._id! return table._id!
} }
async function createView(tableId: string, schema: ViewV2Schema) { async function createView(tableId: string, schema?: ViewV2Schema) {
const view = await config.api.viewV2.create({ const view = await config.api.viewV2.create({
tableId: tableId, tableId: tableId,
name: generator.guid(), name: generator.guid(),
@ -153,22 +155,51 @@ describe.each([
rows = await config.api.row.fetch(tableOrViewId) rows = await config.api.row.fetch(tableOrViewId)
} }
async function getTable(tableOrViewId: string): Promise<Table> {
if (docIds.isViewId(tableOrViewId)) {
const view = await config.api.viewV2.get(tableOrViewId)
return await config.api.table.get(view.tableId)
} else {
return await config.api.table.get(tableOrViewId)
}
}
async function assertTableExists(nameOrTable: string | Table) {
const name =
typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name
expect(await client!.schema.hasTable(name)).toBeTrue()
}
async function assertTableNumRows(
nameOrTable: string | Table,
numRows: number
) {
const name =
typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name
const row = await client!.from(name).count()
const count = parseInt(Object.values(row[0])[0] as string)
expect(count).toEqual(numRows)
}
describe.each([ describe.each([
["table", createTable], ["table", createTable],
[ [
"view", "view",
async (schema: TableSchema) => { async (schema?: TableSchema) => {
const tableId = await createTable(schema) const tableId = await createTable(schema)
const viewId = await createView( const viewId = await createView(
tableId, tableId,
Object.keys(schema).reduce<ViewV2Schema>((viewSchema, fieldName) => { Object.keys(schema || {}).reduce<ViewV2Schema>(
const field = schema[fieldName] (viewSchema, fieldName) => {
viewSchema[fieldName] = { const field = schema![fieldName]
visible: field.visible ?? true, viewSchema[fieldName] = {
readonly: false, visible: field.visible ?? true,
} readonly: false,
return viewSchema }
}, {}) return viewSchema
},
{}
)
) )
return viewId return viewId
}, },
@ -792,10 +823,11 @@ describe.each([
}) })
}) })
describe.each([FieldType.STRING, FieldType.LONGFORM])("%s", () => { const stringTypes = [FieldType.STRING, FieldType.LONGFORM] as const
describe.each(stringTypes)("%s", type => {
beforeAll(async () => { beforeAll(async () => {
tableOrViewId = await createTableOrView({ tableOrViewId = await createTableOrView({
name: { name: "name", type: FieldType.STRING }, name: { name: "name", type },
}) })
await createRows([{ name: "foo" }, { name: "bar" }]) await createRows([{ name: "foo" }, { name: "bar" }])
}) })
@ -1602,7 +1634,7 @@ describe.each([
}) })
}) })
describe.each([FieldType.ARRAY, FieldType.OPTIONS])("%s", () => { describe("arrays", () => {
beforeAll(async () => { beforeAll(async () => {
tableOrViewId = await createTableOrView({ tableOrViewId = await createTableOrView({
numbers: { numbers: {
@ -3470,5 +3502,105 @@ describe.each([
]) ])
}) })
}) })
isSql &&
!isSqs &&
describe("SQL injection", () => {
const badStrings = [
"1; DROP TABLE %table_name%;",
"1; DELETE FROM %table_name%;",
"1; UPDATE %table_name% SET name = 'foo';",
"1; INSERT INTO %table_name% (name) VALUES ('foo');",
"' OR '1'='1' --",
"'; DROP TABLE %table_name%; --",
"' OR 1=1 --",
"' UNION SELECT null, null, null; --",
"' AND (SELECT COUNT(*) FROM %table_name%) > 0 --",
"\"; EXEC xp_cmdshell('dir'); --",
"\"' OR 'a'='a",
"OR 1=1;",
"'; SHUTDOWN --",
]
describe.each(badStrings)("bad string: %s", badStringTemplate => {
// The SQL that knex generates when you try to use a double quote in a
// field name is always invalid and never works, so we skip it for these
// tests.
const skipFieldNameCheck = isOracle && badStringTemplate.includes('"')
!skipFieldNameCheck &&
it("should not allow SQL injection as a field name", async () => {
const tableOrViewId = await createTableOrView()
const table = await getTable(tableOrViewId)
const badString = badStringTemplate.replace(
/%table_name%/g,
table.name
)
await config.api.table.save({
...table,
schema: {
...table.schema,
[badString]: { name: badString, type: FieldType.STRING },
},
})
if (docIds.isViewId(tableOrViewId)) {
const view = await config.api.viewV2.get(tableOrViewId)
await config.api.viewV2.update({
...view,
schema: {
[badString]: { visible: true },
},
})
}
await config.api.row.save(tableOrViewId, { [badString]: "foo" })
await assertTableExists(table)
await assertTableNumRows(table, 1)
const { rows } = await config.api.row.search(
tableOrViewId,
{ query: {} },
{ status: 200 }
)
expect(rows).toHaveLength(1)
await assertTableExists(table)
await assertTableNumRows(table, 1)
})
it("should not allow SQL injection as a field value", async () => {
const tableOrViewId = await createTableOrView({
foo: {
name: "foo",
type: FieldType.STRING,
},
})
const table = await getTable(tableOrViewId)
const badString = badStringTemplate.replace(
/%table_name%/g,
table.name
)
await config.api.row.save(tableOrViewId, { foo: "foo" })
await assertTableExists(table)
await assertTableNumRows(table, 1)
const { rows } = await config.api.row.search(
tableOrViewId,
{ query: { equal: { foo: badString } } },
{ status: 200 }
)
expect(rows).toBeEmpty()
await assertTableExists(table)
await assertTableNumRows(table, 1)
})
})
})
}) })
}) })

View File

@ -257,7 +257,7 @@ export interface components {
* @description Defines whether this is a static or dynamic formula. * @description Defines whether this is a static or dynamic formula.
* @enum {string} * @enum {string}
*/ */
formulaType?: "static" | "dynamic"; formulaType?: "static" | "dynamic" | "ai";
} }
| { | {
/** /**
@ -277,11 +277,14 @@ export interface components {
| "link" | "link"
| "formula" | "formula"
| "auto" | "auto"
| "ai"
| "json" | "json"
| "internal" | "internal"
| "barcodeqr" | "barcodeqr"
| "signature_single"
| "bigint" | "bigint"
| "bb_reference"; | "bb_reference"
| "bb_reference_single";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */ /** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: { constraints?: {
/** @enum {string} */ /** @enum {string} */
@ -366,7 +369,7 @@ export interface components {
* @description Defines whether this is a static or dynamic formula. * @description Defines whether this is a static or dynamic formula.
* @enum {string} * @enum {string}
*/ */
formulaType?: "static" | "dynamic"; formulaType?: "static" | "dynamic" | "ai";
} }
| { | {
/** /**
@ -386,11 +389,14 @@ export interface components {
| "link" | "link"
| "formula" | "formula"
| "auto" | "auto"
| "ai"
| "json" | "json"
| "internal" | "internal"
| "barcodeqr" | "barcodeqr"
| "signature_single"
| "bigint" | "bigint"
| "bb_reference"; | "bb_reference"
| "bb_reference_single";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */ /** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: { constraints?: {
/** @enum {string} */ /** @enum {string} */
@ -477,7 +483,7 @@ export interface components {
* @description Defines whether this is a static or dynamic formula. * @description Defines whether this is a static or dynamic formula.
* @enum {string} * @enum {string}
*/ */
formulaType?: "static" | "dynamic"; formulaType?: "static" | "dynamic" | "ai";
} }
| { | {
/** /**
@ -497,11 +503,14 @@ export interface components {
| "link" | "link"
| "formula" | "formula"
| "auto" | "auto"
| "ai"
| "json" | "json"
| "internal" | "internal"
| "barcodeqr" | "barcodeqr"
| "signature_single"
| "bigint" | "bigint"
| "bb_reference"; | "bb_reference"
| "bb_reference_single";
/** @description A constraint can be applied to the column which will be validated against when a row is saved. */ /** @description A constraint can be applied to the column which will be validated against when a row is saved. */
constraints?: { constraints?: {
/** @enum {string} */ /** @enum {string} */

View File

@ -24,8 +24,7 @@ import {
checkExternalTables, checkExternalTables,
HOST_ADDRESS, HOST_ADDRESS,
} from "./utils" } from "./utils"
import dayjs from "dayjs" import { isDate, NUMBER_REGEX } from "../utilities"
import { NUMBER_REGEX } from "../utilities"
import { MySQLColumn } from "./base/types" import { MySQLColumn } from "./base/types"
import { getReadableErrorMessage } from "./base/errorMapping" import { getReadableErrorMessage } from "./base/errorMapping"
import { sql } from "@budibase/backend-core" import { sql } from "@budibase/backend-core"
@ -129,11 +128,7 @@ export function bindingTypeCoerce(bindings: SqlQueryBinding) {
} }
// if not a number, see if it is a date - important to do in this order as any // if not a number, see if it is a date - important to do in this order as any
// integer will be considered a valid date // integer will be considered a valid date
else if ( else if (isDate(binding)) {
/^\d/.test(binding) &&
dayjs(binding).isValid() &&
!binding.includes(",")
) {
let value: any let value: any
value = new Date(binding) value = new Date(binding)
if (isNaN(value)) { if (isNaN(value)) {
@ -439,8 +434,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
dumpContent.push(createTableStatement) dumpContent.push(createTableStatement)
} }
const schema = dumpContent.join("\n") return dumpContent.join("\n")
return schema
} finally { } finally {
this.disconnect() this.disconnect()
} }

View File

@ -212,7 +212,7 @@ describe("SQL query builder", () => {
const filterSet = [`%20%`, `%25%`, `%"john"%`, `%"mary"%`] const filterSet = [`%20%`, `%25%`, `%"john"%`, `%"mary"%`]
expect(query).toEqual({ expect(query).toEqual({
bindings: [...filterSet, limit], bindings: [...filterSet, limit],
sql: `select * from (select * from "test" where COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2 and COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4 order by "test"."id" asc) where rownum <= :5`, sql: `select * from (select * from "test" where ((COALESCE(LOWER("test"."age"), '') like :1 and COALESCE(LOWER("test"."age"), '') like :2)) and ((COALESCE(LOWER("test"."name"), '') like :3 and COALESCE(LOWER("test"."name"), '') like :4)) order by "test"."id" asc) where rownum <= :5`,
}) })
query = new Sql(SqlClient.ORACLE, limit)._query( query = new Sql(SqlClient.ORACLE, limit)._query(
@ -244,7 +244,7 @@ describe("SQL query builder", () => {
expect(query).toEqual({ expect(query).toEqual({
bindings: ["John", limit], bindings: ["John", limit],
sql: `select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2`, sql: `select * from (select * from "test" where (to_char("test"."name") is not null and to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2`,
}) })
}) })
@ -262,7 +262,7 @@ describe("SQL query builder", () => {
expect(query).toEqual({ expect(query).toEqual({
bindings: ["John", limit], bindings: ["John", limit],
sql: `select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") != :1) OR to_char("test"."name") IS NULL order by "test"."id" asc) where rownum <= :2`, sql: `select * from (select * from "test" where (to_char("test"."name") is not null and to_char("test"."name") != :1) or to_char("test"."name") is null order by "test"."id" asc) where rownum <= :2`,
}) })
}) })
}) })

View File

@ -10,7 +10,7 @@ import {
import { generateUserMetadataID, isDevAppID } from "../db/utils" import { generateUserMetadataID, isDevAppID } from "../db/utils"
import { getCachedSelf } from "../utilities/global" import { getCachedSelf } from "../utilities/global"
import env from "../environment" import env from "../environment"
import { isWebhookEndpoint } from "./utils" import { isWebhookEndpoint, isBrowser, isApiKey } from "./utils"
import { UserCtx, ContextUser } from "@budibase/types" import { UserCtx, ContextUser } from "@budibase/types"
import tracer from "dd-trace" import tracer from "dd-trace"
@ -27,7 +27,7 @@ export default async (ctx: UserCtx, next: any) => {
} }
// deny access to application preview // deny access to application preview
if (!env.isTest()) { if (isBrowser(ctx) && !isApiKey(ctx)) {
if ( if (
isDevAppID(requestAppId) && isDevAppID(requestAppId) &&
!isWebhookEndpoint(ctx) && !isWebhookEndpoint(ctx) &&

View File

@ -1,4 +1,6 @@
require("../../db").init() import * as db from "../../db"
db.init()
mockAuthWithNoCookie() mockAuthWithNoCookie()
mockWorker() mockWorker()
mockUserGroups() mockUserGroups()
@ -45,7 +47,7 @@ function mockAuthWithNoCookie() {
}, },
cache: { cache: {
user: { user: {
getUser: async id => { getUser: async () => {
return { return {
_id: "us_uuid1", _id: "us_uuid1",
} }
@ -82,7 +84,7 @@ function mockAuthWithCookie() {
}, },
cache: { cache: {
user: { user: {
getUser: async id => { getUser: async () => {
return { return {
_id: "us_uuid1", _id: "us_uuid1",
} }
@ -94,6 +96,10 @@ function mockAuthWithCookie() {
} }
class TestConfiguration { class TestConfiguration {
next: jest.MockedFunction<any>
throw: jest.MockedFunction<any>
ctx: any
constructor() { constructor() {
this.next = jest.fn() this.next = jest.fn()
this.throw = jest.fn() this.throw = jest.fn()
@ -130,7 +136,7 @@ class TestConfiguration {
} }
describe("Current app middleware", () => { describe("Current app middleware", () => {
let config let config: TestConfiguration
beforeEach(() => { beforeEach(() => {
config = new TestConfiguration() config = new TestConfiguration()
@ -192,7 +198,7 @@ describe("Current app middleware", () => {
}, },
cache: { cache: {
user: { user: {
getUser: async id => { getUser: async () => {
return { return {
_id: "us_uuid1", _id: "us_uuid1",
} }

View File

@ -1,9 +1,18 @@
import { BBContext } from "@budibase/types" import { LoginMethod, UserCtx } from "@budibase/types"
const WEBHOOK_ENDPOINTS = new RegExp( const WEBHOOK_ENDPOINTS = new RegExp(
["webhooks/trigger", "webhooks/schema"].join("|") ["webhooks/trigger", "webhooks/schema"].join("|")
) )
export function isWebhookEndpoint(ctx: BBContext) { export function isWebhookEndpoint(ctx: UserCtx) {
return WEBHOOK_ENDPOINTS.test(ctx.request.url) return WEBHOOK_ENDPOINTS.test(ctx.request.url)
} }
export function isBrowser(ctx: UserCtx) {
const browser = ctx.userAgent?.browser
return browser && browser !== "unknown"
}
export function isApiKey(ctx: UserCtx) {
return ctx.loginMethod === LoginMethod.API_KEY
}

View File

@ -423,6 +423,7 @@ export default class TestConfiguration {
Accept: "application/json", Accept: "application/json",
Cookie: [`${constants.Cookie.Auth}=${authToken}`], Cookie: [`${constants.Cookie.Auth}=${authToken}`],
[constants.Header.APP_ID]: appId, [constants.Header.APP_ID]: appId,
...this.temporaryHeaders,
} }
}) })
} }
@ -527,6 +528,10 @@ export default class TestConfiguration {
return this.login({ userId: email, roleId, builder, prodApp }) return this.login({ userId: email, roleId, builder, prodApp })
} }
browserUserAgent() {
return "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
}
// TENANCY // TENANCY
tenantHost() { tenantHost() {

View File

@ -3,7 +3,10 @@ import { context } from "@budibase/backend-core"
import { generateMetadataID } from "../db/utils" import { generateMetadataID } from "../db/utils"
import { Document } from "@budibase/types" import { Document } from "@budibase/types"
import stream from "stream" import stream from "stream"
import dayjs from "dayjs"
import customParseFormat from "dayjs/plugin/customParseFormat"
dayjs.extend(customParseFormat)
const Readable = stream.Readable const Readable = stream.Readable
export function wait(ms: number) { export function wait(ms: number) {
@ -13,6 +16,28 @@ export function wait(ms: number) {
export const isDev = env.isDev export const isDev = env.isDev
export const NUMBER_REGEX = /^[+-]?([0-9]*[.])?[0-9]+$/g export const NUMBER_REGEX = /^[+-]?([0-9]*[.])?[0-9]+$/g
const ACCEPTED_DATE_FORMATS = [
"MM/DD/YYYY",
"MM/DD/YY",
"DD/MM/YYYY",
"DD/MM/YY",
"YYYY/MM/DD",
"YYYY-MM-DD",
"YYYY-MM-DDTHH:mm",
"YYYY-MM-DDTHH:mm:ss",
"YYYY-MM-DDTHH:mm:ss[Z]",
"YYYY-MM-DDTHH:mm:ss.SSS[Z]",
]
export function isDate(str: string) {
// checks for xx/xx/xx or ISO date timestamp formats
for (const format of ACCEPTED_DATE_FORMATS) {
if (dayjs(str, format, true).isValid()) {
return true
}
}
return false
}
export function removeFromArray(array: any[], element: any) { export function removeFromArray(array: any[], element: any) {
const index = array.indexOf(element) const index = array.indexOf(element)

View File

@ -0,0 +1,34 @@
import { isDate } from "../"
describe("isDate", () => {
it("should handle DD/MM/YYYY", () => {
expect(isDate("01/01/2001")).toEqual(true)
})
it("should handle DD/MM/YY", () => {
expect(isDate("01/01/01")).toEqual(true)
})
it("should handle ISO format YYYY-MM-DD", () => {
expect(isDate("2001-01-01")).toEqual(true)
})
it("should handle ISO format with time (YYYY-MM-DDTHH:MM)", () => {
expect(isDate("2001-01-01T12:30")).toEqual(true)
})
it("should handle ISO format with full timestamp (YYYY-MM-DDTHH:MM:SS)", () => {
expect(isDate("2001-01-01T12:30:45")).toEqual(true)
})
it("should handle complete ISO format", () => {
expect(isDate("2001-01-01T12:30:00.000Z")).toEqual(true)
})
it("should return false for invalid formats", () => {
expect(isDate("")).toEqual(false)
expect(isDate("1/10")).toEqual(false)
expect(isDate("random string")).toEqual(false)
expect(isDate("123456")).toEqual(false)
})
})

View File

@ -19,7 +19,8 @@
"@types/koa": "2.13.4", "@types/koa": "2.13.4",
"@types/redlock": "4.0.7", "@types/redlock": "4.0.7",
"rimraf": "3.0.2", "rimraf": "3.0.2",
"typescript": "5.5.2" "typescript": "5.5.2",
"koa-useragent": "^4.1.0"
}, },
"dependencies": { "dependencies": {
"scim-patch": "^0.8.1" "scim-patch": "^0.8.1"

View File

@ -2,6 +2,12 @@ import { Context, Request } from "koa"
import { User, Role, UserRoles, Account, ConfigType } from "../documents" import { User, Role, UserRoles, Account, ConfigType } from "../documents"
import { FeatureFlag, License } from "../sdk" import { FeatureFlag, License } from "../sdk"
import { Files } from "formidable" import { Files } from "formidable"
import { UserAgentContext } from "koa-useragent"
export enum LoginMethod {
API_KEY = "api_key",
COOKIE = "cookie",
}
export interface ContextUser extends Omit<User, "roles"> { export interface ContextUser extends Omit<User, "roles"> {
globalId?: string globalId?: string
@ -31,6 +37,7 @@ export interface BBRequest<RequestBody> extends Request {
export interface Ctx<RequestBody = any, ResponseBody = any> extends Context { export interface Ctx<RequestBody = any, ResponseBody = any> extends Context {
request: BBRequest<RequestBody> request: BBRequest<RequestBody>
body: ResponseBody body: ResponseBody
userAgent: UserAgentContext["userAgent"]
} }
/** /**
@ -40,6 +47,7 @@ export interface UserCtx<RequestBody = any, ResponseBody = any>
extends Ctx<RequestBody, ResponseBody> { extends Ctx<RequestBody, ResponseBody> {
user: ContextUser user: ContextUser
roleId?: string roleId?: string
loginMethod?: LoginMethod
} }
/** /**

View File

@ -49,7 +49,7 @@ type BasicFilter<T = any> = Record<string, T> & {
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: never [InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: never
} }
type ArrayFilter = Record<string, any[]> & { export type ArrayFilter = Record<string, any[]> & {
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: { [InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: {
id: string[] id: string[]
values: string[] values: string[]