Merge branch 'master' into BUDI-8508/sql-support-for-logical-operators

This commit is contained in:
Adria Navarro 2024-08-05 17:28:58 +02:00 committed by GitHub
commit 9743aca715
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 1017 additions and 733 deletions

View File

@ -175,6 +175,7 @@ jobs:
docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }} & docker pull postgres@${{ steps.dotenv.outputs.POSTGRES_SHA }} &
docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }} & docker pull mongo@${{ steps.dotenv.outputs.MONGODB_SHA }} &
docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }} & docker pull mariadb@${{ steps.dotenv.outputs.MARIADB_SHA }} &
docker pull budibase/oracle-database:23.2-slim-faststart &
docker pull minio/minio & docker pull minio/minio &
docker pull redis & docker pull redis &
docker pull testcontainers/ryuk:0.5.1 & docker pull testcontainers/ryuk:0.5.1 &

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.29.27", "version": "2.29.29",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -3,15 +3,16 @@ import * as dbCore from "../db"
import { import {
getNativeSql, getNativeSql,
isExternalTable, isExternalTable,
isValidISODateString,
isValidFilter,
sqlLog,
isInvalidISODateString, isInvalidISODateString,
isValidFilter,
isValidISODateString,
sqlLog,
} from "./utils" } from "./utils"
import { SqlStatements } from "./sqlStatements"
import SqlTableQueryBuilder from "./sqlTable" import SqlTableQueryBuilder from "./sqlTable"
import { import {
AnySearchFilter, AnySearchFilter,
ArrayOperator,
BasicOperator,
BBReferenceFieldMetadata, BBReferenceFieldMetadata,
FieldSchema, FieldSchema,
FieldType, FieldType,
@ -23,6 +24,7 @@ import {
prefixed, prefixed,
QueryJson, QueryJson,
QueryOptions, QueryOptions,
RangeOperator,
RelationshipsJson, RelationshipsJson,
SearchFilters, SearchFilters,
SortOrder, SortOrder,
@ -33,7 +35,7 @@ import {
TableSourceType, TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import environment from "../environment" import environment from "../environment"
import { helpers } from "@budibase/shared-core" import { dataFilters, helpers } from "@budibase/shared-core"
type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any
@ -44,10 +46,64 @@ function getBaseLimit() {
return envLimit || 5000 return envLimit || 5000
} }
// Takes a string like foo and returns a quoted string like [foo] for SQL Server function getTableName(table?: Table): string | undefined {
// and "foo" for Postgres. // SQS uses the table ID rather than the table name
function quote(client: SqlClient, str: string): string { if (
switch (client) { table?.sourceType === TableSourceType.INTERNAL ||
table?.sourceId === INTERNAL_TABLE_SOURCE_ID
) {
return table?._id
} else {
return table?.name
}
}
function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] {
if (Array.isArray(query)) {
return query.map((q: SqlQuery) => convertBooleans(q) as SqlQuery)
} else {
if (query.bindings) {
query.bindings = query.bindings.map(binding => {
if (typeof binding === "boolean") {
return binding ? 1 : 0
}
return binding
})
}
}
return query
}
class InternalBuilder {
private readonly client: SqlClient
private readonly query: QueryJson
private readonly splitter: dataFilters.ColumnSplitter
private readonly knex: Knex
constructor(client: SqlClient, knex: Knex, query: QueryJson) {
this.client = client
this.query = query
this.knex = knex
this.splitter = new dataFilters.ColumnSplitter([this.table], {
aliases: this.query.tableAliases,
columnPrefix: this.query.meta.columnPrefix,
})
}
get table(): Table {
return this.query.meta.table
}
getFieldSchema(key: string): FieldSchema | undefined {
const { column } = this.splitter.run(key)
return this.table.schema[column]
}
// Takes a string like foo and returns a quoted string like [foo] for SQL Server
// and "foo" for Postgres.
private quote(str: string): string {
switch (this.client) {
case SqlClient.SQL_LITE: case SqlClient.SQL_LITE:
case SqlClient.ORACLE: case SqlClient.ORACLE:
case SqlClient.POSTGRES: case SqlClient.POSTGRES:
@ -57,66 +113,19 @@ function quote(client: SqlClient, str: string): string {
case SqlClient.MY_SQL: case SqlClient.MY_SQL:
return `\`${str}\`` return `\`${str}\``
} }
} }
// Takes a string like a.b.c and returns a quoted identifier like [a].[b].[c] // Takes a string like a.b.c and returns a quoted identifier like [a].[b].[c]
// for SQL Server and `a`.`b`.`c` for MySQL. // for SQL Server and `a`.`b`.`c` for MySQL.
function quotedIdentifier(client: SqlClient, key: string): string { private quotedIdentifier(key: string): string {
return key return key
.split(".") .split(".")
.map(part => quote(client, part)) .map(part => this.quote(part))
.join(".") .join(".")
} }
function parse(input: any) { private generateSelectStatement(): (string | Knex.Raw)[] | "*" {
if (Array.isArray(input)) { const { resource, meta } = this.query
return JSON.stringify(input)
}
if (input == undefined) {
return null
}
if (typeof input !== "string") {
return input
}
if (isInvalidISODateString(input)) {
return null
}
if (isValidISODateString(input)) {
return new Date(input.trim())
}
return input
}
function parseBody(body: any) {
for (let [key, value] of Object.entries(body)) {
body[key] = parse(value)
}
return body
}
function parseFilters(filters: SearchFilters | undefined): SearchFilters {
if (!filters) {
return {}
}
for (let [key, value] of Object.entries(filters)) {
let parsed
if (typeof value === "object") {
parsed = parseFilters(value)
} else {
parsed = parse(value)
}
// @ts-ignore
filters[key] = parsed
}
return filters
}
function generateSelectStatement(
json: QueryJson,
knex: Knex
): (string | Knex.Raw)[] | "*" {
const { resource, meta } = json
const client = knex.client.config.client as SqlClient
if (!resource || !resource.fields || resource.fields.length === 0) { if (!resource || !resource.fields || resource.fields.length === 0) {
return "*" return "*"
@ -152,25 +161,24 @@ function generateSelectStatement(
const columnSchema = schema[column] const columnSchema = schema[column]
if ( if (
client === SqlClient.POSTGRES && this.client === SqlClient.POSTGRES &&
columnSchema?.externalType?.includes("money") columnSchema?.externalType?.includes("money")
) { ) {
return knex.raw( return this.knex.raw(
`${quotedIdentifier( `${this.quotedIdentifier(
client,
[table, column].join(".") [table, column].join(".")
)}::money::numeric as ${quote(client, field)}` )}::money::numeric as ${this.quote(field)}`
) )
} }
if ( if (
client === SqlClient.MS_SQL && this.client === SqlClient.MS_SQL &&
columnSchema?.type === FieldType.DATETIME && columnSchema?.type === FieldType.DATETIME &&
columnSchema.timeOnly columnSchema.timeOnly
) { ) {
// Time gets returned as timestamp from mssql, not matching the expected // Time gets returned as timestamp from mssql, not matching the expected
// HH:mm format // HH:mm format
return knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`) return this.knex.raw(`CONVERT(varchar, ${field}, 108) as "${field}"`)
} }
// There's at least two edge cases being handled in the expression below. // There's at least two edge cases being handled in the expression below.
@ -182,79 +190,162 @@ function generateSelectStatement(
// aren't actually clear to me, but `table`.`doc1` breaks things with the // aren't actually clear to me, but `table`.`doc1` breaks things with the
// sample data tests. // sample data tests.
if (table) { if (table) {
return knex.raw( return this.knex.raw(
`${quote(client, table)}.${quote(client, column)} as ${quote( `${this.quote(table)}.${this.quote(column)} as ${this.quote(field)}`
client,
field
)}`
) )
} else { } else {
return knex.raw(`${quote(client, field)} as ${quote(client, field)}`) return this.knex.raw(`${this.quote(field)} as ${this.quote(field)}`)
} }
}) })
} }
function getTableName(table?: Table): string | undefined { // OracleDB can't use character-large-objects (CLOBs) in WHERE clauses,
// SQS uses the table ID rather than the table name // so when we use them we need to wrap them in to_char(). This function
// converts a field name to the appropriate identifier.
private convertClobs(field: string): string {
const parts = field.split(".")
const col = parts.pop()!
const schema = this.table.schema[col]
let identifier = this.quotedIdentifier(field)
if ( if (
table?.sourceType === TableSourceType.INTERNAL || schema.type === FieldType.STRING ||
table?.sourceId === INTERNAL_TABLE_SOURCE_ID schema.type === FieldType.LONGFORM ||
schema.type === FieldType.BB_REFERENCE_SINGLE ||
schema.type === FieldType.BB_REFERENCE ||
schema.type === FieldType.OPTIONS ||
schema.type === FieldType.BARCODEQR
) { ) {
return table?._id identifier = `to_char(${identifier})`
} else { }
return table?.name return identifier
} }
}
function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] { private parse(input: any, schema: FieldSchema) {
if (Array.isArray(query)) { if (Array.isArray(input)) {
return query.map((q: SqlQuery) => convertBooleans(q) as SqlQuery) return JSON.stringify(input)
} else {
if (query.bindings) {
query.bindings = query.bindings.map(binding => {
if (typeof binding === "boolean") {
return binding ? 1 : 0
} }
return binding if (input == undefined) {
}) return null
} }
}
return query
}
class InternalBuilder { if (
private readonly client: SqlClient this.client === SqlClient.ORACLE &&
schema.type === FieldType.DATETIME &&
schema.timeOnly
) {
if (input instanceof Date) {
const hours = input.getHours().toString().padStart(2, "0")
const minutes = input.getMinutes().toString().padStart(2, "0")
const seconds = input.getSeconds().toString().padStart(2, "0")
return `${hours}:${minutes}:${seconds}`
}
if (typeof input === "string") {
return new Date(`1970-01-01T${input}Z`)
}
}
constructor(client: SqlClient) { if (typeof input === "string") {
this.client = client if (isInvalidISODateString(input)) {
return null
}
if (isValidISODateString(input)) {
return new Date(input.trim())
}
}
return input
}
private parseBody(body: any) {
for (let [key, value] of Object.entries(body)) {
const { column } = this.splitter.run(key)
const schema = this.table.schema[column]
if (!schema) {
continue
}
body[key] = this.parse(value, schema)
}
return body
}
private parseFilters(filters: SearchFilters): SearchFilters {
for (const op of Object.values(BasicOperator)) {
const filter = filters[op]
if (!filter) {
continue
}
for (const key of Object.keys(filter)) {
if (Array.isArray(filter[key])) {
filter[key] = JSON.stringify(filter[key])
continue
}
const { column } = this.splitter.run(key)
const schema = this.table.schema[column]
if (!schema) {
continue
}
filter[key] = this.parse(filter[key], schema)
}
}
for (const op of Object.values(ArrayOperator)) {
const filter = filters[op]
if (!filter) {
continue
}
for (const key of Object.keys(filter)) {
const { column } = this.splitter.run(key)
const schema = this.table.schema[column]
if (!schema) {
continue
}
filter[key] = filter[key].map(v => this.parse(v, schema))
}
}
for (const op of Object.values(RangeOperator)) {
const filter = filters[op]
if (!filter) {
continue
}
for (const key of Object.keys(filter)) {
const { column } = this.splitter.run(key)
const schema = this.table.schema[column]
if (!schema) {
continue
}
const value = filter[key]
if ("low" in value) {
value.low = this.parse(value.low, schema)
}
if ("high" in value) {
value.high = this.parse(value.high, schema)
}
}
}
return filters
} }
// right now we only do filters on the specific table being queried // right now we only do filters on the specific table being queried
addFilters( addFilters(
query: Knex.QueryBuilder, query: Knex.QueryBuilder,
filters: SearchFilters | undefined, filters: SearchFilters | undefined,
table: Table, opts?: {
opts: {
aliases?: Record<string, string>
relationship?: boolean relationship?: boolean
columnPrefix?: string
} }
): Knex.QueryBuilder { ): Knex.QueryBuilder {
if (!filters) { if (!filters) {
return query return query
} }
filters = parseFilters(filters) filters = this.parseFilters(filters)
const aliases = this.query.tableAliases
// if all or specified in filters, then everything is an or // if all or specified in filters, then everything is an or
const allOr = filters.allOr const allOr = filters.allOr
const sqlStatements = new SqlStatements(this.client, table, {
allOr,
columnPrefix: opts.columnPrefix,
})
const tableName = const tableName =
this.client === SqlClient.SQL_LITE ? table._id! : table.name this.client === SqlClient.SQL_LITE ? this.table._id! : this.table.name
function getTableAlias(name: string) { function getTableAlias(name: string) {
const alias = opts.aliases?.[name] const alias = aliases?.[name]
return alias || name return alias || name
} }
function iterate( function iterate(
@ -280,10 +371,10 @@ class InternalBuilder {
), ),
castedTypeValue.values castedTypeValue.values
) )
} else if (!opts.relationship && !isRelationshipField) { } else if (!opts?.relationship && !isRelationshipField) {
const alias = getTableAlias(tableName) const alias = getTableAlias(tableName)
fn(alias ? `${alias}.${updatedKey}` : updatedKey, value) fn(alias ? `${alias}.${updatedKey}` : updatedKey, value)
} else if (opts.relationship && isRelationshipField) { } else if (opts?.relationship && isRelationshipField) {
const [filterTableName, property] = updatedKey.split(".") const [filterTableName, property] = updatedKey.split(".")
const alias = getTableAlias(filterTableName) const alias = getTableAlias(filterTableName)
fn(alias ? `${alias}.${property}` : property, value) fn(alias ? `${alias}.${property}` : property, value)
@ -300,10 +391,9 @@ class InternalBuilder {
} else { } else {
const rawFnc = `${fnc}Raw` const rawFnc = `${fnc}Raw`
// @ts-ignore // @ts-ignore
query = query[rawFnc]( query = query[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [
`LOWER(${quotedIdentifier(this.client, key)}) LIKE ?`, `%${value.toLowerCase()}%`,
[`%${value.toLowerCase()}%`] ])
)
} }
} }
@ -345,26 +435,30 @@ class InternalBuilder {
const andOr = mode === filters?.containsAny ? " OR " : " AND " const andOr = mode === filters?.containsAny ? " OR " : " AND "
iterate(mode, (key, value) => { iterate(mode, (key, value) => {
let statement = "" let statement = ""
const identifier = this.quotedIdentifier(key)
for (let i in value) { for (let i in value) {
if (typeof value[i] === "string") { if (typeof value[i] === "string") {
value[i] = `%"${value[i].toLowerCase()}"%` value[i] = `%"${value[i].toLowerCase()}"%`
} else { } else {
value[i] = `%${value[i]}%` value[i] = `%${value[i]}%`
} }
statement += statement += `${
(statement ? andOr : "") + statement ? andOr : ""
`COALESCE(LOWER(${quotedIdentifier( }COALESCE(LOWER(${identifier}), '') LIKE ?`
this.client,
key
)}), '') LIKE ?`
} }
if (statement === "") { if (statement === "") {
return return
} }
// @ts-ignore if (not) {
query = query[rawFnc](`${not}(${statement})`, value) query = query[rawFnc](
`(NOT (${statement}) OR ${identifier} IS NULL)`,
value
)
} else {
query = query[rawFnc](statement, value)
}
}) })
} }
} }
@ -392,11 +486,26 @@ class InternalBuilder {
iterate( iterate(
filters.oneOf, filters.oneOf,
(key: string, array) => { (key: string, array) => {
if (this.client === SqlClient.ORACLE) {
key = this.convertClobs(key)
array = Array.isArray(array) ? array : [array]
const binding = new Array(array.length).fill("?").join(",")
query = query.whereRaw(`${key} IN (${binding})`, array)
} else {
query = query[fnc](key, Array.isArray(array) ? array : [array]) query = query[fnc](key, Array.isArray(array) ? array : [array])
}
}, },
(key: string[], array) => { (key: string[], array) => {
if (this.client === SqlClient.ORACLE) {
const keyStr = `(${key.map(k => this.convertClobs(k)).join(",")})`
const binding = `(${array
.map((a: any) => `(${new Array(a.length).fill("?").join(",")})`)
.join(",")})`
query = query.whereRaw(`${keyStr} IN ${binding}`, array.flat())
} else {
query = query[fnc](key, Array.isArray(array) ? array : [array]) query = query[fnc](key, Array.isArray(array) ? array : [array])
} }
}
) )
} }
if (filters.string) { if (filters.string) {
@ -408,10 +517,9 @@ class InternalBuilder {
} else { } else {
const rawFnc = `${fnc}Raw` const rawFnc = `${fnc}Raw`
// @ts-ignore // @ts-ignore
query = query[rawFnc]( query = query[rawFnc](`LOWER(${this.quotedIdentifier(key)}) LIKE ?`, [
`LOWER(${quotedIdentifier(this.client, key)}) LIKE ?`, `${value.toLowerCase()}%`,
[`${value.toLowerCase()}%`] ])
)
} }
}) })
} }
@ -435,12 +543,53 @@ class InternalBuilder {
} }
const lowValid = isValidFilter(value.low), const lowValid = isValidFilter(value.low),
highValid = isValidFilter(value.high) highValid = isValidFilter(value.high)
const schema = this.getFieldSchema(key)
if (this.client === SqlClient.ORACLE) {
// @ts-ignore
key = this.knex.raw(this.convertClobs(key))
}
if (lowValid && highValid) { if (lowValid && highValid) {
query = sqlStatements.between(query, key, value.low, value.high) if (
schema?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
[value.low, value.high]
)
} else {
const fnc = allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [value.low, value.high])
}
} else if (lowValid) { } else if (lowValid) {
query = sqlStatements.lte(query, key, value.low) if (
schema?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(
`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`,
[value.low]
)
} else {
const fnc = allOr ? "orWhere" : "where"
query = query[fnc](key, ">=", value.low)
}
} else if (highValid) { } else if (highValid) {
query = sqlStatements.gte(query, key, value.high) if (
schema?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(
`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`,
[value.high]
)
} else {
const fnc = allOr ? "orWhere" : "where"
query = query[fnc](key, "<=", value.high)
}
} }
}) })
} }
@ -449,20 +598,18 @@ class InternalBuilder {
const fnc = allOr ? "orWhereRaw" : "whereRaw" const fnc = allOr ? "orWhereRaw" : "whereRaw"
if (this.client === SqlClient.MS_SQL) { if (this.client === SqlClient.MS_SQL) {
query = query[fnc]( query = query[fnc](
`CASE WHEN ${quotedIdentifier( `CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 1`,
this.client,
key
)} = ? THEN 1 ELSE 0 END = 1`,
[value] [value]
) )
} else if (this.client === SqlClient.ORACLE) { } else if (this.client === SqlClient.ORACLE) {
const identifier = this.convertClobs(key)
query = query[fnc]( query = query[fnc](
`COALESCE(${quotedIdentifier(this.client, key)}, -1) = ?`, `(${identifier} IS NOT NULL AND ${identifier} = ?)`,
[value] [value]
) )
} else { } else {
query = query[fnc]( query = query[fnc](
`COALESCE(${quotedIdentifier(this.client, key)} = ?, FALSE)`, `COALESCE(${this.quotedIdentifier(key)} = ?, FALSE)`,
[value] [value]
) )
} }
@ -473,20 +620,18 @@ class InternalBuilder {
const fnc = allOr ? "orWhereRaw" : "whereRaw" const fnc = allOr ? "orWhereRaw" : "whereRaw"
if (this.client === SqlClient.MS_SQL) { if (this.client === SqlClient.MS_SQL) {
query = query[fnc]( query = query[fnc](
`CASE WHEN ${quotedIdentifier( `CASE WHEN ${this.quotedIdentifier(key)} = ? THEN 1 ELSE 0 END = 0`,
this.client,
key
)} = ? THEN 1 ELSE 0 END = 0`,
[value] [value]
) )
} else if (this.client === SqlClient.ORACLE) { } else if (this.client === SqlClient.ORACLE) {
const identifier = this.convertClobs(key)
query = query[fnc]( query = query[fnc](
`COALESCE(${quotedIdentifier(this.client, key)}, -1) != ?`, `(${identifier} IS NOT NULL AND ${identifier} != ?) OR ${identifier} IS NULL`,
[value] [value]
) )
} else { } else {
query = query[fnc]( query = query[fnc](
`COALESCE(${quotedIdentifier(this.client, key)} != ?, TRUE)`, `COALESCE(${this.quotedIdentifier(key)} != ?, TRUE)`,
[value] [value]
) )
} }
@ -514,9 +659,9 @@ class InternalBuilder {
contains(filters.containsAny, true) contains(filters.containsAny, true)
} }
const tableRef = opts?.aliases?.[table._id!] || table._id const tableRef = aliases?.[this.table._id!] || this.table._id
// when searching internal tables make sure long looking for rows // when searching internal tables make sure long looking for rows
if (filters.documentType && !isExternalTable(table) && tableRef) { if (filters.documentType && !isExternalTable(this.table) && tableRef) {
// has to be its own option, must always be AND onto the search // has to be its own option, must always be AND onto the search
query.andWhereLike( query.andWhereLike(
`${tableRef}._id`, `${tableRef}._id`,
@ -527,29 +672,26 @@ class InternalBuilder {
return query return query
} }
addDistinctCount( addDistinctCount(query: Knex.QueryBuilder): Knex.QueryBuilder {
query: Knex.QueryBuilder, const primary = this.table.primary
json: QueryJson const aliases = this.query.tableAliases
): Knex.QueryBuilder {
const table = json.meta.table
const primary = table.primary
const aliases = json.tableAliases
const aliased = const aliased =
table.name && aliases?.[table.name] ? aliases[table.name] : table.name this.table.name && aliases?.[this.table.name]
? aliases[this.table.name]
: this.table.name
if (!primary) { if (!primary) {
throw new Error("SQL counting requires primary key to be supplied") throw new Error("SQL counting requires primary key to be supplied")
} }
return query.countDistinct(`${aliased}.${primary[0]} as total`) return query.countDistinct(`${aliased}.${primary[0]} as total`)
} }
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { addSorting(query: Knex.QueryBuilder): Knex.QueryBuilder {
let { sort } = json let { sort } = this.query
const table = json.meta.table const primaryKey = this.table.primary
const primaryKey = table.primary const tableName = getTableName(this.table)
const tableName = getTableName(table) const aliases = this.query.tableAliases
const aliases = json.tableAliases
const aliased = const aliased =
tableName && aliases?.[tableName] ? aliases[tableName] : table?.name tableName && aliases?.[tableName] ? aliases[tableName] : this.table?.name
if (!Array.isArray(primaryKey)) { if (!Array.isArray(primaryKey)) {
throw new Error("Sorting requires primary key to be specified for table") throw new Error("Sorting requires primary key to be specified for table")
} }
@ -557,13 +699,23 @@ class InternalBuilder {
for (let [key, value] of Object.entries(sort)) { for (let [key, value] of Object.entries(sort)) {
const direction = const direction =
value.direction === SortOrder.ASCENDING ? "asc" : "desc" value.direction === SortOrder.ASCENDING ? "asc" : "desc"
let nulls
if (this.client === SqlClient.POSTGRES) { let nulls: "first" | "last" | undefined = undefined
// All other clients already sort this as expected by default, and adding this to the rest of the clients is causing issues if (
this.client === SqlClient.POSTGRES ||
this.client === SqlClient.ORACLE
) {
nulls = value.direction === SortOrder.ASCENDING ? "first" : "last" nulls = value.direction === SortOrder.ASCENDING ? "first" : "last"
} }
query = query.orderBy(`${aliased}.${key}`, direction, nulls) let composite = `${aliased}.${key}`
if (this.client === SqlClient.ORACLE) {
query = query.orderByRaw(
`${this.convertClobs(composite)} ${direction} nulls ${nulls}`
)
} else {
query = query.orderBy(composite, direction, nulls)
}
} }
} }
@ -664,32 +816,54 @@ class InternalBuilder {
return query return query
} }
knexWithAlias( qualifiedKnex(opts?: { alias?: string | boolean }): Knex.QueryBuilder {
knex: Knex, let alias = this.query.tableAliases?.[this.query.endpoint.entityId]
endpoint: QueryJson["endpoint"], if (opts?.alias === false) {
aliases?: QueryJson["tableAliases"] alias = undefined
): Knex.QueryBuilder { } else if (typeof opts?.alias === "string") {
const tableName = endpoint.entityId alias = opts.alias
const tableAlias = aliases?.[tableName] }
return this.knex(
return knex( this.tableNameWithSchema(this.query.endpoint.entityId, {
this.tableNameWithSchema(tableName, { alias,
alias: tableAlias, schema: this.query.endpoint.schema,
schema: endpoint.schema,
}) })
) )
} }
create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { create(opts: QueryOptions): Knex.QueryBuilder {
const { endpoint, body } = json const { body } = this.query
let query = this.knexWithAlias(knex, endpoint) let query = this.qualifiedKnex({ alias: false })
const parsedBody = parseBody(body) const parsedBody = this.parseBody(body)
if (this.client === SqlClient.ORACLE) {
// Oracle doesn't seem to automatically insert nulls
// if we don't specify them, so we need to do that here
for (const [column, schema] of Object.entries(
this.query.meta.table.schema
)) {
if (
schema.constraints?.presence === true ||
schema.type === FieldType.FORMULA ||
schema.type === FieldType.AUTO ||
schema.type === FieldType.LINK
) {
continue
}
const value = parsedBody[column]
if (value == null) {
parsedBody[column] = null
}
}
} else {
// make sure no null values in body for creation // make sure no null values in body for creation
for (let [key, value] of Object.entries(parsedBody)) { for (let [key, value] of Object.entries(parsedBody)) {
if (value == null) { if (value == null) {
delete parsedBody[key] delete parsedBody[key]
} }
} }
}
// mysql can't use returning // mysql can't use returning
if (opts.disableReturning) { if (opts.disableReturning) {
@ -699,36 +873,39 @@ class InternalBuilder {
} }
} }
bulkCreate(knex: Knex, json: QueryJson): Knex.QueryBuilder { bulkCreate(): Knex.QueryBuilder {
const { endpoint, body } = json const { body } = this.query
let query = this.knexWithAlias(knex, endpoint) let query = this.qualifiedKnex({ alias: false })
if (!Array.isArray(body)) { if (!Array.isArray(body)) {
return query return query
} }
const parsedBody = body.map(row => parseBody(row)) const parsedBody = body.map(row => this.parseBody(row))
return query.insert(parsedBody) return query.insert(parsedBody)
} }
bulkUpsert(knex: Knex, json: QueryJson): Knex.QueryBuilder { bulkUpsert(): Knex.QueryBuilder {
const { endpoint, body } = json const { body } = this.query
let query = this.knexWithAlias(knex, endpoint) let query = this.qualifiedKnex({ alias: false })
if (!Array.isArray(body)) { if (!Array.isArray(body)) {
return query return query
} }
const parsedBody = body.map(row => parseBody(row)) const parsedBody = body.map(row => this.parseBody(row))
if ( if (
this.client === SqlClient.POSTGRES || this.client === SqlClient.POSTGRES ||
this.client === SqlClient.SQL_LITE || this.client === SqlClient.SQL_LITE ||
this.client === SqlClient.MY_SQL this.client === SqlClient.MY_SQL
) { ) {
const primary = json.meta.table.primary const primary = this.table.primary
if (!primary) { if (!primary) {
throw new Error("Primary key is required for upsert") throw new Error("Primary key is required for upsert")
} }
const ret = query.insert(parsedBody).onConflict(primary).merge() const ret = query.insert(parsedBody).onConflict(primary).merge()
return ret return ret
} else if (this.client === SqlClient.MS_SQL) { } else if (
// No upsert or onConflict support in MSSQL yet, see: this.client === SqlClient.MS_SQL ||
this.client === SqlClient.ORACLE
) {
// No upsert or onConflict support in MSSQL/Oracle yet, see:
// https://github.com/knex/knex/pull/6050 // https://github.com/knex/knex/pull/6050
return query.insert(parsedBody) return query.insert(parsedBody)
} }
@ -736,19 +913,18 @@ class InternalBuilder {
} }
read( read(
knex: Knex,
json: QueryJson,
opts: { opts: {
limits?: { base: number; query: number } limits?: { base: number; query: number }
} = {} } = {}
): Knex.QueryBuilder { ): Knex.QueryBuilder {
let { endpoint, filters, paginate, relationships, tableAliases } = json let { endpoint, filters, paginate, relationships, tableAliases } =
this.query
const { limits } = opts const { limits } = opts
const counting = endpoint.operation === Operation.COUNT const counting = endpoint.operation === Operation.COUNT
const tableName = endpoint.entityId const tableName = endpoint.entityId
// start building the query // start building the query
let query = this.knexWithAlias(knex, endpoint, tableAliases) let query = this.qualifiedKnex()
// handle pagination // handle pagination
let foundOffset: number | null = null let foundOffset: number | null = null
let foundLimit = limits?.query || limits?.base let foundLimit = limits?.query || limits?.base
@ -776,16 +952,13 @@ class InternalBuilder {
} }
// add sorting to pre-query // add sorting to pre-query
// no point in sorting when counting // no point in sorting when counting
query = this.addSorting(query, json) query = this.addSorting(query)
} }
// add filters to the query (where) // add filters to the query (where)
query = this.addFilters(query, filters, json.meta.table, { query = this.addFilters(query, filters)
columnPrefix: json.meta.columnPrefix,
aliases: tableAliases,
})
const alias = tableAliases?.[tableName] || tableName const alias = tableAliases?.[tableName] || tableName
let preQuery: Knex.QueryBuilder = knex({ let preQuery: Knex.QueryBuilder = this.knex({
// the typescript definition for the knex constructor doesn't support this // the typescript definition for the knex constructor doesn't support this
// syntax, but it is the only way to alias a pre-query result as part of // syntax, but it is the only way to alias a pre-query result as part of
// a query - there is an alias dictionary type, but it assumes it can only // a query - there is an alias dictionary type, but it assumes it can only
@ -794,11 +967,11 @@ class InternalBuilder {
}) })
// if counting, use distinct count, else select // if counting, use distinct count, else select
preQuery = !counting preQuery = !counting
? preQuery.select(generateSelectStatement(json, knex)) ? preQuery.select(this.generateSelectStatement())
: this.addDistinctCount(preQuery, json) : this.addDistinctCount(preQuery)
// have to add after as well (this breaks MS-SQL) // have to add after as well (this breaks MS-SQL)
if (this.client !== SqlClient.MS_SQL && !counting) { if (this.client !== SqlClient.MS_SQL && !counting) {
preQuery = this.addSorting(preQuery, json) preQuery = this.addSorting(preQuery)
} }
// handle joins // handle joins
query = this.addRelationships( query = this.addRelationships(
@ -815,21 +988,14 @@ class InternalBuilder {
query = query.limit(limits.base) query = query.limit(limits.base)
} }
return this.addFilters(query, filters, json.meta.table, { return this.addFilters(query, filters, { relationship: true })
columnPrefix: json.meta.columnPrefix,
relationship: true,
aliases: tableAliases,
})
} }
update(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { update(opts: QueryOptions): Knex.QueryBuilder {
const { endpoint, body, filters, tableAliases } = json const { body, filters } = this.query
let query = this.knexWithAlias(knex, endpoint, tableAliases) let query = this.qualifiedKnex()
const parsedBody = parseBody(body) const parsedBody = this.parseBody(body)
query = this.addFilters(query, filters, json.meta.table, { query = this.addFilters(query, filters)
columnPrefix: json.meta.columnPrefix,
aliases: tableAliases,
})
// mysql can't use returning // mysql can't use returning
if (opts.disableReturning) { if (opts.disableReturning) {
return query.update(parsedBody) return query.update(parsedBody)
@ -838,18 +1004,15 @@ class InternalBuilder {
} }
} }
delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { delete(opts: QueryOptions): Knex.QueryBuilder {
const { endpoint, filters, tableAliases } = json const { filters } = this.query
let query = this.knexWithAlias(knex, endpoint, tableAliases) let query = this.qualifiedKnex()
query = this.addFilters(query, filters, json.meta.table, { query = this.addFilters(query, filters)
columnPrefix: json.meta.columnPrefix,
aliases: tableAliases,
})
// mysql can't use returning // mysql can't use returning
if (opts.disableReturning) { if (opts.disableReturning) {
return query.delete() return query.delete()
} else { } else {
return query.delete().returning(generateSelectStatement(json, knex)) return query.delete().returning(this.generateSelectStatement())
} }
} }
} }
@ -887,19 +1050,19 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
const config: Knex.Config = { const config: Knex.Config = {
client: sqlClient, client: sqlClient,
} }
if (sqlClient === SqlClient.SQL_LITE) { if (sqlClient === SqlClient.SQL_LITE || sqlClient === SqlClient.ORACLE) {
config.useNullAsDefault = true config.useNullAsDefault = true
} }
const client = knex(config) const client = knex(config)
let query: Knex.QueryBuilder let query: Knex.QueryBuilder
const builder = new InternalBuilder(sqlClient) const builder = new InternalBuilder(sqlClient, client, json)
switch (this._operation(json)) { switch (this._operation(json)) {
case Operation.CREATE: case Operation.CREATE:
query = builder.create(client, json, opts) query = builder.create(opts)
break break
case Operation.READ: case Operation.READ:
query = builder.read(client, json, { query = builder.read({
limits: { limits: {
query: this.limit, query: this.limit,
base: getBaseLimit(), base: getBaseLimit(),
@ -908,19 +1071,19 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
break break
case Operation.COUNT: case Operation.COUNT:
// read without any limits to count // read without any limits to count
query = builder.read(client, json) query = builder.read()
break break
case Operation.UPDATE: case Operation.UPDATE:
query = builder.update(client, json, opts) query = builder.update(opts)
break break
case Operation.DELETE: case Operation.DELETE:
query = builder.delete(client, json, opts) query = builder.delete(opts)
break break
case Operation.BULK_CREATE: case Operation.BULK_CREATE:
query = builder.bulkCreate(client, json) query = builder.bulkCreate()
break break
case Operation.BULK_UPSERT: case Operation.BULK_UPSERT:
query = builder.bulkUpsert(client, json) query = builder.bulkUpsert()
break break
case Operation.CREATE_TABLE: case Operation.CREATE_TABLE:
case Operation.UPDATE_TABLE: case Operation.UPDATE_TABLE:

View File

@ -1,87 +0,0 @@
import { FieldType, Table, FieldSchema, SqlClient } from "@budibase/types"
import { Knex } from "knex"
export class SqlStatements {
client: string
table: Table
allOr: boolean | undefined
columnPrefix: string | undefined
constructor(
client: string,
table: Table,
{ allOr, columnPrefix }: { allOr?: boolean; columnPrefix?: string } = {}
) {
this.client = client
this.table = table
this.allOr = allOr
this.columnPrefix = columnPrefix
}
getField(key: string): FieldSchema | undefined {
const fieldName = key.split(".")[1]
let found = this.table.schema[fieldName]
if (!found && this.columnPrefix) {
const prefixRemovedFieldName = fieldName.replace(this.columnPrefix, "")
found = this.table.schema[prefixRemovedFieldName]
}
return found
}
between(
query: Knex.QueryBuilder,
key: string,
low: number | string,
high: number | string
) {
// Use a between operator if we have 2 valid range values
const field = this.getField(key)
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
[low, high]
)
} else {
const fnc = this.allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [low, high])
}
return query
}
lte(query: Knex.QueryBuilder, key: string, low: number | string) {
// Use just a single greater than operator if we only have a low
const field = this.getField(key)
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
low,
])
} else {
const fnc = this.allOr ? "orWhere" : "where"
query = query[fnc](key, ">=", low)
}
return query
}
gte(query: Knex.QueryBuilder, key: string, high: number | string) {
const field = this.getField(key)
// Use just a single less than operator if we only have a high
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
high,
])
} else {
const fnc = this.allOr ? "orWhere" : "where"
query = query[fnc](key, "<=", high)
}
return query
}
}

View File

@ -22,6 +22,7 @@ export function getNativeSql(
query: Knex.SchemaBuilder | Knex.QueryBuilder query: Knex.SchemaBuilder | Knex.QueryBuilder
): SqlQuery | SqlQuery[] { ): SqlQuery | SqlQuery[] {
let sql = query.toSQL() let sql = query.toSQL()
if (Array.isArray(sql)) { if (Array.isArray(sql)) {
return sql as SqlQuery[] return sql as SqlQuery[]
} }

View File

@ -1,21 +0,0 @@
const executeMock = jest.fn(() => ({
rows: [
{
a: "string",
b: 1,
},
],
}))
const closeMock = jest.fn()
class Connection {
execute = executeMock
close = closeMock
}
module.exports = {
getConnection: jest.fn(() => new Connection()),
executeMock,
closeMock,
}

View File

@ -6,9 +6,9 @@ services:
db: db:
restart: unless-stopped restart: unless-stopped
platform: linux/x86_64 platform: linux/x86_64
image: container-registry.oracle.com/database/express:18.4.0-xe image: gvenzl/oracle-free:23.2-slim-faststart
environment: environment:
ORACLE_PWD: oracle ORACLE_PWD: Password1
ports: ports:
- 1521:1521 - 1521:1521
- 5500:5500 - 5500:5500

View File

@ -22,9 +22,13 @@ describe.each(
DatabaseName.MYSQL, DatabaseName.MYSQL,
DatabaseName.SQL_SERVER, DatabaseName.SQL_SERVER,
DatabaseName.MARIADB, DatabaseName.MARIADB,
DatabaseName.ORACLE,
].map(name => [name, getDatasource(name)]) ].map(name => [name, getDatasource(name)])
)("queries (%s)", (dbName, dsProvider) => { )("queries (%s)", (dbName, dsProvider) => {
const config = setup.getConfig() const config = setup.getConfig()
const isOracle = dbName === DatabaseName.ORACLE
const isMsSQL = dbName === DatabaseName.SQL_SERVER
let rawDatasource: Datasource let rawDatasource: Datasource
let datasource: Datasource let datasource: Datasource
let client: Knex let client: Knex
@ -97,7 +101,7 @@ describe.each(
const query = await createQuery({ const query = await createQuery({
name: "New Query", name: "New Query",
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
}) })
@ -106,7 +110,7 @@ describe.each(
name: "New Query", name: "New Query",
parameters: [], parameters: [],
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
schema: {}, schema: {},
queryVerb: "read", queryVerb: "read",
@ -125,7 +129,7 @@ describe.each(
it("should be able to update a query", async () => { it("should be able to update a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
}) })
@ -135,7 +139,7 @@ describe.each(
...query, ...query,
name: "Updated Query", name: "Updated Query",
fields: { fields: {
sql: "SELECT * FROM test_table WHERE id = 1", sql: client("test_table").where({ id: 1 }).toString(),
}, },
}) })
@ -144,7 +148,7 @@ describe.each(
name: "Updated Query", name: "Updated Query",
parameters: [], parameters: [],
fields: { fields: {
sql: "SELECT * FROM test_table WHERE id = 1", sql: client("test_table").where({ id: 1 }).toString(),
}, },
schema: {}, schema: {},
queryVerb: "read", queryVerb: "read",
@ -161,7 +165,7 @@ describe.each(
it("should be able to delete a query", async () => { it("should be able to delete a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
}) })
@ -180,7 +184,7 @@ describe.each(
it("should be able to list queries", async () => { it("should be able to list queries", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
}) })
@ -191,7 +195,7 @@ describe.each(
it("should strip sensitive fields for prod apps", async () => { it("should strip sensitive fields for prod apps", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table", sql: client("test_table").select("*").toString(),
}, },
}) })
@ -212,7 +216,7 @@ describe.each(
datasourceId: datasource._id!, datasourceId: datasource._id!,
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT * FROM test_table WHERE id = 1`, sql: client("test_table").where({ id: 1 }).toString(),
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -270,7 +274,7 @@ describe.each(
name: "Test Query", name: "Test Query",
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT * FROM ${tableName}`, sql: client(tableName).select("*").toString(),
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -284,11 +288,13 @@ describe.each(
}) })
) )
await client(tableName).delete()
await client.schema.alterTable(tableName, table => { await client.schema.alterTable(tableName, table => {
table.string("data").alter() table.string("data").alter()
}) })
await client(tableName).update({ await client(tableName).insert({
name: "test",
data: "string value", data: "string value",
}) })
@ -297,7 +303,7 @@ describe.each(
name: "Test Query", name: "Test Query",
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT * FROM ${tableName}`, sql: client(tableName).select("*").toString(),
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -311,6 +317,7 @@ describe.each(
}) })
) )
}) })
it("should work with static variables", async () => { it("should work with static variables", async () => {
await config.api.datasource.update({ await config.api.datasource.update({
...datasource, ...datasource,
@ -326,7 +333,7 @@ describe.each(
datasourceId: datasource._id!, datasourceId: datasource._id!,
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT '{{ foo }}' as foo`, sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -337,16 +344,17 @@ describe.each(
const response = await config.api.query.preview(request) const response = await config.api.query.preview(request)
let key = isOracle ? "FOO" : "foo"
expect(response.schema).toEqual({ expect(response.schema).toEqual({
foo: { [key]: {
name: "foo", name: key,
type: "string", type: "string",
}, },
}) })
expect(response.rows).toEqual([ expect(response.rows).toEqual([
{ {
foo: "bar", [key]: "bar",
}, },
]) ])
}) })
@ -354,7 +362,7 @@ describe.each(
it("should work with dynamic variables", async () => { it("should work with dynamic variables", async () => {
const basedOnQuery = await createQuery({ const basedOnQuery = await createQuery({
fields: { fields: {
sql: "SELECT name FROM test_table WHERE id = 1", sql: client("test_table").select("name").where({ id: 1 }).toString(),
}, },
}) })
@ -376,7 +384,7 @@ describe.each(
datasourceId: datasource._id!, datasourceId: datasource._id!,
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT '{{ foo }}' as foo`, sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -385,16 +393,17 @@ describe.each(
readable: true, readable: true,
}) })
let key = isOracle ? "FOO" : "foo"
expect(preview.schema).toEqual({ expect(preview.schema).toEqual({
foo: { [key]: {
name: "foo", name: key,
type: "string", type: "string",
}, },
}) })
expect(preview.rows).toEqual([ expect(preview.rows).toEqual([
{ {
foo: "one", [key]: "one",
}, },
]) ])
}) })
@ -402,7 +411,7 @@ describe.each(
it("should handle the dynamic base query being deleted", async () => { it("should handle the dynamic base query being deleted", async () => {
const basedOnQuery = await createQuery({ const basedOnQuery = await createQuery({
fields: { fields: {
sql: "SELECT name FROM test_table WHERE id = 1", sql: client("test_table").select("name").where({ id: 1 }).toString(),
}, },
}) })
@ -426,7 +435,7 @@ describe.each(
datasourceId: datasource._id!, datasourceId: datasource._id!,
queryVerb: "read", queryVerb: "read",
fields: { fields: {
sql: `SELECT '{{ foo }}' as foo`, sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`,
}, },
parameters: [], parameters: [],
transformer: "return data", transformer: "return data",
@ -435,16 +444,17 @@ describe.each(
readable: true, readable: true,
}) })
let key = isOracle ? "FOO" : "foo"
expect(preview.schema).toEqual({ expect(preview.schema).toEqual({
foo: { [key]: {
name: "foo", name: key,
type: "string", type: "string",
}, },
}) })
expect(preview.rows).toEqual([ expect(preview.rows).toEqual([
{ {
foo: datasource.source === SourceName.SQL_SERVER ? "" : null, [key]: datasource.source === SourceName.SQL_SERVER ? "" : null,
}, },
]) ])
}) })
@ -455,7 +465,7 @@ describe.each(
it("should be able to insert with bindings", async () => { it("should be able to insert with bindings", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})", sql: client("test_table").insert({ name: "{{ foo }}" }).toString(),
}, },
parameters: [ parameters: [
{ {
@ -488,7 +498,7 @@ describe.each(
it("should not allow handlebars as parameters", async () => { it("should not allow handlebars as parameters", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})", sql: client("test_table").insert({ name: "{{ foo }}" }).toString(),
}, },
parameters: [ parameters: [
{ {
@ -516,13 +526,20 @@ describe.each(
) )
}) })
// Oracle doesn't automatically coerce strings into dates.
!isOracle &&
it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])( it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])(
"should coerce %s into a date", "should coerce %s into a date",
async datetimeStr => { async datetimeStr => {
const date = new Date(datetimeStr) const date = new Date(datetimeStr)
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: `INSERT INTO test_table (name, birthday) VALUES ('foo', {{ birthday }})`, sql: client("test_table")
.insert({
name: "foo",
birthday: client.raw("{{ birthday }}"),
})
.toString(),
}, },
parameters: [ parameters: [
{ {
@ -555,7 +572,9 @@ describe.each(
async notDateStr => { async notDateStr => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "INSERT INTO test_table (name) VALUES ({{ name }})", sql: client("test_table")
.insert({ name: client.raw("{{ name }}") })
.toString(),
}, },
parameters: [ parameters: [
{ {
@ -586,7 +605,7 @@ describe.each(
it("should execute a query", async () => { it("should execute a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table ORDER BY id", sql: client("test_table").select("*").orderBy("id").toString(),
}, },
}) })
@ -629,7 +648,7 @@ describe.each(
it("should be able to transform a query", async () => { it("should be able to transform a query", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table WHERE id = 1", sql: client("test_table").where({ id: 1 }).select("*").toString(),
}, },
transformer: ` transformer: `
data[0].id = data[0].id + 1; data[0].id = data[0].id + 1;
@ -652,7 +671,10 @@ describe.each(
it("should coerce numeric bindings", async () => { it("should coerce numeric bindings", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "SELECT * FROM test_table WHERE id = {{ id }}", sql: client("test_table")
.where({ id: client.raw("{{ id }}") })
.select("*")
.toString(),
}, },
parameters: [ parameters: [
{ {
@ -683,7 +705,10 @@ describe.each(
it("should be able to update rows", async () => { it("should be able to update rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "UPDATE test_table SET name = {{ name }} WHERE id = {{ id }}", sql: client("test_table")
.update({ name: client.raw("{{ name }}") })
.where({ id: client.raw("{{ id }}") })
.toString(),
}, },
parameters: [ parameters: [
{ {
@ -698,19 +723,13 @@ describe.each(
queryVerb: "update", queryVerb: "update",
}) })
const result = await config.api.query.execute(query._id!, { await config.api.query.execute(query._id!, {
parameters: { parameters: {
id: "1", id: "1",
name: "foo", name: "foo",
}, },
}) })
expect(result.data).toEqual([
{
updated: true,
},
])
const rows = await client("test_table").where({ id: 1 }).select() const rows = await client("test_table").where({ id: 1 }).select()
expect(rows).toEqual([ expect(rows).toEqual([
{ id: 1, name: "foo", birthday: null, number: null }, { id: 1, name: "foo", birthday: null, number: null },
@ -720,35 +739,34 @@ describe.each(
it("should be able to execute an update that updates no rows", async () => { it("should be able to execute an update that updates no rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "UPDATE test_table SET name = 'updated' WHERE id = 100", sql: client("test_table")
.update({ name: "updated" })
.where({ id: 100 })
.toString(),
}, },
queryVerb: "update", queryVerb: "update",
}) })
const result = await config.api.query.execute(query._id!) await config.api.query.execute(query._id!)
expect(result.data).toEqual([ const rows = await client("test_table").select()
{ for (const row of rows) {
updated: true, expect(row.name).not.toEqual("updated")
}, }
])
}) })
it("should be able to execute a delete that deletes no rows", async () => { it("should be able to execute a delete that deletes no rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "DELETE FROM test_table WHERE id = 100", sql: client("test_table").where({ id: 100 }).delete().toString(),
}, },
queryVerb: "delete", queryVerb: "delete",
}) })
const result = await config.api.query.execute(query._id!) await config.api.query.execute(query._id!)
expect(result.data).toEqual([ const rows = await client("test_table").select()
{ expect(rows).toHaveLength(5)
deleted: true,
},
])
}) })
}) })
@ -756,7 +774,10 @@ describe.each(
it("should be able to delete rows", async () => { it("should be able to delete rows", async () => {
const query = await createQuery({ const query = await createQuery({
fields: { fields: {
sql: "DELETE FROM test_table WHERE id = {{ id }}", sql: client("test_table")
.where({ id: client.raw("{{ id }}") })
.delete()
.toString(),
}, },
parameters: [ parameters: [
{ {
@ -767,18 +788,12 @@ describe.each(
queryVerb: "delete", queryVerb: "delete",
}) })
const result = await config.api.query.execute(query._id!, { await config.api.query.execute(query._id!, {
parameters: { parameters: {
id: "1", id: "1",
}, },
}) })
expect(result.data).toEqual([
{
deleted: true,
},
])
const rows = await client("test_table").where({ id: 1 }).select() const rows = await client("test_table").where({ id: 1 }).select()
expect(rows).toHaveLength(0) expect(rows).toHaveLength(0)
}) })
@ -823,29 +838,18 @@ describe.each(
}) })
}) })
it("should be able to execute an update that updates no rows", async () => {
const query = await createQuery({
fields: {
sql: "UPDATE test_table SET name = 'updated' WHERE id = 100",
},
queryVerb: "update",
})
const result = await config.api.query.execute(query._id!, {})
expect(result.data).toEqual([
{
updated: true,
},
])
})
})
// this parameter really only impacts SQL queries // this parameter really only impacts SQL queries
describe("confirm nullDefaultSupport", () => { describe("confirm nullDefaultSupport", () => {
const queryParams = { let queryParams: Partial<Query>
beforeAll(async () => {
queryParams = {
fields: { fields: {
sql: "INSERT INTO test_table (name, number) VALUES ({{ bindingName }}, {{ bindingNumber }})", sql: client("test_table")
.insert({
name: client.raw("{{ bindingName }}"),
number: client.raw("{{ bindingNumber }}"),
})
.toString(),
}, },
parameters: [ parameters: [
{ {
@ -859,6 +863,7 @@ describe.each(
], ],
queryVerb: "create", queryVerb: "create",
} }
})
it("should error for old queries", async () => { it("should error for old queries", async () => {
const query = await createQuery(queryParams) const query = await createQuery(queryParams)
@ -873,7 +878,7 @@ describe.each(
} catch (err: any) { } catch (err: any) {
error = err.message error = err.message
} }
if (dbName === "mssql") { if (isMsSQL || isOracle) {
expect(error).toBeUndefined() expect(error).toBeUndefined()
} else { } else {
expect(error).toBeDefined() expect(error).toBeDefined()
@ -891,4 +896,5 @@ describe.each(
expect(results).toEqual({ data: [{ created: true }] }) expect(results).toEqual({ data: [{ created: true }] })
}) })
}) })
})
}) })

View File

@ -72,9 +72,11 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/rows (%s)", (providerType, dsProvider) => { ])("/rows (%s)", (providerType, dsProvider) => {
const isInternal = dsProvider === undefined const isInternal = dsProvider === undefined
const isMSSQL = providerType === DatabaseName.SQL_SERVER const isMSSQL = providerType === DatabaseName.SQL_SERVER
const isOracle = providerType === DatabaseName.ORACLE
const config = setup.getConfig() const config = setup.getConfig()
let table: Table let table: Table
@ -129,7 +131,8 @@ describe.each([
primary: ["id"], primary: ["id"],
schema: defaultSchema, schema: defaultSchema,
} }
return merge(req, ...overrides) const merged = merge(req, ...overrides)
return merged
} }
function defaultTable( function defaultTable(
@ -1406,9 +1409,10 @@ describe.each([
await assertRowUsage(rowUsage + 3) await assertRowUsage(rowUsage + 3)
}) })
// Upserting isn't yet supported in MSSQL, see: // Upserting isn't yet supported in MSSQL / Oracle, see:
// https://github.com/knex/knex/pull/6050 // https://github.com/knex/knex/pull/6050
!isMSSQL && !isMSSQL &&
!isOracle &&
it("should be able to update existing rows with bulkImport", async () => { it("should be able to update existing rows with bulkImport", async () => {
const table = await config.api.table.save( const table = await config.api.table.save(
saveTableRequest({ saveTableRequest({
@ -1478,9 +1482,10 @@ describe.each([
expect(rows[2].description).toEqual("Row 3 description") expect(rows[2].description).toEqual("Row 3 description")
}) })
// Upserting isn't yet supported in MSSQL, see: // Upserting isn't yet supported in MSSQL or Oracle, see:
// https://github.com/knex/knex/pull/6050 // https://github.com/knex/knex/pull/6050
!isMSSQL && !isMSSQL &&
!isOracle &&
!isInternal && !isInternal &&
it("should be able to update existing rows with composite primary keys with bulkImport", async () => { it("should be able to update existing rows with composite primary keys with bulkImport", async () => {
const tableName = uuid.v4() const tableName = uuid.v4()
@ -1547,9 +1552,10 @@ describe.each([
expect(rows[2].description).toEqual("Row 3 description") expect(rows[2].description).toEqual("Row 3 description")
}) })
// Upserting isn't yet supported in MSSQL, see: // Upserting isn't yet supported in MSSQL/Oracle, see:
// https://github.com/knex/knex/pull/6050 // https://github.com/knex/knex/pull/6050
!isMSSQL && !isMSSQL &&
!isOracle &&
!isInternal && !isInternal &&
it("should be able to update existing rows an autoID primary key", async () => { it("should be able to update existing rows an autoID primary key", async () => {
const tableName = uuid.v4() const tableName = uuid.v4()

View File

@ -48,6 +48,7 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("search (%s)", (name, dsProvider) => { ])("search (%s)", (name, dsProvider) => {
const isSqs = name === "sqs" const isSqs = name === "sqs"
const isLucene = name === "lucene" const isLucene = name === "lucene"
@ -1594,7 +1595,10 @@ describe.each([
const MEDIUM = "10000000" const MEDIUM = "10000000"
// Our bigints are int64s in most datasources. // Our bigints are int64s in most datasources.
const BIG = "9223372036854775807" let BIG = "9223372036854775807"
if (name === DatabaseName.ORACLE) {
// BIG = "9223372036854775808"
}
beforeAll(async () => { beforeAll(async () => {
table = await createTable({ table = await createTable({

View File

@ -40,7 +40,8 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/tables (%s)", (_, dsProvider) => { [DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/tables (%s)", (name, dsProvider) => {
const isInternal: boolean = !dsProvider const isInternal: boolean = !dsProvider
let datasource: Datasource | undefined let datasource: Datasource | undefined
let config = setup.getConfig() let config = setup.getConfig()
@ -59,15 +60,20 @@ describe.each([
jest.clearAllMocks() jest.clearAllMocks()
}) })
it.each([ let names = [
"alphanum", "alphanum",
"with spaces", "with spaces",
"with-dashes", "with-dashes",
"with_underscores", "with_underscores",
'with "double quotes"',
"with 'single quotes'",
"with `backticks`", "with `backticks`",
])("creates a table with name: %s", async name => { ]
if (name !== DatabaseName.ORACLE) {
names.push(`with "double quotes"`)
names.push(`with 'single quotes'`)
}
it.each(names)("creates a table with name: %s", async name => {
const table = await config.api.table.save( const table = await config.api.table.save(
tableForDatasource(datasource, { name }) tableForDatasource(datasource, { name })
) )

View File

@ -33,6 +33,7 @@ describe.each([
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
[DatabaseName.ORACLE, getDatasource(DatabaseName.ORACLE)],
])("/v2/views (%s)", (name, dsProvider) => { ])("/v2/views (%s)", (name, dsProvider) => {
const config = setup.getConfig() const config = setup.getConfig()
const isSqs = name === "sqs" const isSqs = name === "sqs"

View File

@ -1,27 +1,20 @@
import { Datasource, Query, SourceName } from "@budibase/types" import { Datasource, Query } from "@budibase/types"
import * as setup from "./utilities" import * as setup from "./utilities"
import { DatabaseName, getDatasource } from "../../integrations/tests/utils" import {
import knex, { Knex } from "knex" DatabaseName,
getDatasource,
knexClient,
} from "../../integrations/tests/utils"
import { Knex } from "knex"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
function getKnexClientName(source: SourceName) {
switch (source) {
case SourceName.MYSQL:
return "mysql2"
case SourceName.SQL_SERVER:
return "mssql"
case SourceName.POSTGRES:
return "pg"
}
throw new Error(`Unsupported source: ${source}`)
}
describe.each( describe.each(
[ [
DatabaseName.POSTGRES, DatabaseName.POSTGRES,
DatabaseName.MYSQL, DatabaseName.MYSQL,
DatabaseName.SQL_SERVER, DatabaseName.SQL_SERVER,
DatabaseName.MARIADB, DatabaseName.MARIADB,
DatabaseName.ORACLE,
].map(name => [name, getDatasource(name)]) ].map(name => [name, getDatasource(name)])
)("execute query action (%s)", (_, dsProvider) => { )("execute query action (%s)", (_, dsProvider) => {
let tableName: string let tableName: string
@ -35,10 +28,7 @@ describe.each(
const ds = await dsProvider const ds = await dsProvider
datasource = await config.api.datasource.create(ds) datasource = await config.api.datasource.create(ds)
client = knex({ client = await knexClient(ds)
client: getKnexClientName(ds.source),
connection: ds.config,
})
}) })
beforeEach(async () => { beforeEach(async () => {

View File

@ -104,6 +104,37 @@ export interface OracleColumnsResponse {
SEARCH_CONDITION: null | string SEARCH_CONDITION: null | string
} }
export enum TriggeringEvent {
INSERT = "INSERT",
DELETE = "DELETE",
UPDATE = "UPDATE",
LOGON = "LOGON",
LOGOFF = "LOGOFF",
STARTUP = "STARTUP",
SHUTDOWN = "SHUTDOWN",
SERVERERROR = "SERVERERROR",
SCHEMA = "SCHEMA",
ALTER = "ALTER",
DROP = "DROP",
}
export enum TriggerType {
BEFORE_EACH_ROW = "BEFORE EACH ROW",
AFTER_EACH_ROW = "AFTER EACH ROW",
BEFORE_STATEMENT = "BEFORE STATEMENT",
AFTER_STATEMENT = "AFTER STATEMENT",
INSTEAD_OF = "INSTEAD OF",
COMPOUND = "COMPOUND",
}
export interface OracleTriggersResponse {
TABLE_NAME: string
TRIGGER_NAME: string
TRIGGER_TYPE: TriggerType
TRIGGERING_EVENT: TriggeringEvent
TRIGGER_BODY: string
}
/** /**
* An oracle constraint * An oracle constraint
*/ */

View File

@ -31,7 +31,14 @@ import oracledb, {
ExecuteOptions, ExecuteOptions,
Result, Result,
} from "oracledb" } from "oracledb"
import { OracleTable, OracleColumn, OracleColumnsResponse } from "./base/types" import {
OracleTable,
OracleColumn,
OracleColumnsResponse,
OracleTriggersResponse,
TriggeringEvent,
TriggerType,
} from "./base/types"
import { sql } from "@budibase/backend-core" import { sql } from "@budibase/backend-core"
const Sql = sql.Sql const Sql = sql.Sql
@ -98,7 +105,7 @@ const SCHEMA: Integration = {
}, },
} }
const UNSUPPORTED_TYPES = ["BLOB", "CLOB", "NCLOB"] const UNSUPPORTED_TYPES = ["BLOB", "NCLOB"]
const OracleContraintTypes = { const OracleContraintTypes = {
PRIMARY: "P", PRIMARY: "P",
@ -111,7 +118,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
private readonly config: OracleConfig private readonly config: OracleConfig
private index: number = 1 private index: number = 1
private readonly COLUMNS_SQL = ` private static readonly COLUMNS_SQL = `
SELECT SELECT
tabs.table_name, tabs.table_name,
cols.column_name, cols.column_name,
@ -139,6 +146,19 @@ class OracleIntegration extends Sql implements DatasourcePlus {
(cons.status = 'ENABLED' (cons.status = 'ENABLED'
OR cons.status IS NULL) OR cons.status IS NULL)
` `
private static readonly TRIGGERS_SQL = `
SELECT
table_name,
trigger_name,
trigger_type,
triggering_event,
trigger_body
FROM
all_triggers
WHERE status = 'ENABLED'
`
constructor(config: OracleConfig) { constructor(config: OracleConfig) {
super(SqlClient.ORACLE) super(SqlClient.ORACLE)
this.config = config this.config = config
@ -211,6 +231,75 @@ class OracleIntegration extends Sql implements DatasourcePlus {
return oracleTables return oracleTables
} }
private getTriggersFor(
tableName: string,
triggersResponse: Result<OracleTriggersResponse>,
opts?: { event?: TriggeringEvent; type?: TriggerType }
): OracleTriggersResponse[] {
const triggers: OracleTriggersResponse[] = []
for (const trigger of triggersResponse.rows || []) {
if (trigger.TABLE_NAME !== tableName) {
continue
}
if (opts?.event && opts.event !== trigger.TRIGGERING_EVENT) {
continue
}
if (opts?.type && opts.type !== trigger.TRIGGER_TYPE) {
continue
}
triggers.push(trigger)
}
return triggers
}
private markAutoIncrementColumns(
triggersResponse: Result<OracleTriggersResponse>,
tables: Record<string, Table>
) {
for (const table of Object.values(tables)) {
const triggers = this.getTriggersFor(table.name, triggersResponse, {
type: TriggerType.BEFORE_EACH_ROW,
event: TriggeringEvent.INSERT,
})
// This is the trigger body Knex generates for an auto increment column
// called "id" on a table called "foo":
//
// declare checking number := 1;
// begin if (:new. "id" is null) then while checking >= 1 loop
// select
// "foo_seq".nextval into :new. "id"
// from
// dual;
// select
// count("id") into checking
// from
// "foo"
// where
// "id" = :new. "id";
// end loop;
// end if;
// end;
for (const [columnName, schema] of Object.entries(table.schema)) {
const autoIncrementTriggers = triggers.filter(
trigger =>
// This is a bit heuristic, but I think it's the best we can do with
// the information we have. We're looking for triggers that run
// before each row is inserted, and that have a body that contains a
// call to a function that generates a new value for the column. We
// also check that the column name is in the trigger body, to make
// sure we're not picking up triggers that don't affect the column.
trigger.TRIGGER_BODY.includes(`"${columnName}"`) &&
trigger.TRIGGER_BODY.includes(`.nextval`)
)
if (autoIncrementTriggers.length > 0) {
schema.autocolumn = true
}
}
}
}
private static isSupportedColumn(column: OracleColumn) { private static isSupportedColumn(column: OracleColumn) {
return !UNSUPPORTED_TYPES.includes(column.type) return !UNSUPPORTED_TYPES.includes(column.type)
} }
@ -255,7 +344,10 @@ class OracleIntegration extends Sql implements DatasourcePlus {
entities: Record<string, Table> entities: Record<string, Table>
): Promise<Schema> { ): Promise<Schema> {
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({ const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
sql: this.COLUMNS_SQL, sql: OracleIntegration.COLUMNS_SQL,
})
const triggersResponse = await this.internalQuery<OracleTriggersResponse>({
sql: OracleIntegration.TRIGGERS_SQL,
}) })
const oracleTables = this.mapColumns(columnsResponse) const oracleTables = this.mapColumns(columnsResponse)
@ -318,6 +410,8 @@ class OracleIntegration extends Sql implements DatasourcePlus {
}) })
}) })
this.markAutoIncrementColumns(triggersResponse, tables)
let externalTables = finaliseExternalTables(tables, entities) let externalTables = finaliseExternalTables(tables, entities)
let errors = checkExternalTables(externalTables) let errors = checkExternalTables(externalTables)
return { tables: externalTables, errors } return { tables: externalTables, errors }
@ -325,7 +419,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
async getTableNames() { async getTableNames() {
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({ const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
sql: this.COLUMNS_SQL, sql: OracleIntegration.COLUMNS_SQL,
}) })
return (columnsResponse.rows || []).map(row => row.TABLE_NAME) return (columnsResponse.rows || []).map(row => row.TABLE_NAME)
} }
@ -360,11 +454,32 @@ class OracleIntegration extends Sql implements DatasourcePlus {
this.index = 1 this.index = 1
connection = await this.getConnection() connection = await this.getConnection()
const options: ExecuteOptions = { autoCommit: true } const options: ExecuteOptions = {
autoCommit: true,
fetchTypeHandler: function (metaData) {
if (metaData.dbType === oracledb.CLOB) {
return { type: oracledb.STRING }
} else if (
// When we create a new table in OracleDB from Budibase, bigints get
// created as NUMBER(20,0). Budibase expects bigints to be returned
// as strings, which is what we're doing here. However, this is
// likely to be brittle if we connect to externally created
// databases that have used different precisions and scales.
// We shold find a way to do better.
metaData.dbType === oracledb.NUMBER &&
metaData.precision === 20 &&
metaData.scale === 0
) {
return { type: oracledb.STRING }
}
return undefined
},
}
const bindings: BindParameters = query.bindings || [] const bindings: BindParameters = query.bindings || []
this.log(query.sql, bindings) this.log(query.sql, bindings)
return await connection.execute<T>(query.sql, bindings, options) const result = await connection.execute(query.sql, bindings, options)
return result as Result<T>
} finally { } finally {
if (connection) { if (connection) {
try { try {
@ -377,7 +492,6 @@ class OracleIntegration extends Sql implements DatasourcePlus {
} }
private getConnection = async (): Promise<Connection> => { private getConnection = async (): Promise<Connection> => {
//connectString : "(DESCRIPTION =(ADDRESS = (PROTOCOL = TCP)(HOST = localhost)(PORT = 1521))(CONNECT_DATA =(SID= ORCL)))"
const connectString = `${this.config.host}:${this.config.port || 1521}/${ const connectString = `${this.config.host}:${this.config.port || 1521}/${
this.config.database this.config.database
}` }`
@ -386,7 +500,10 @@ class OracleIntegration extends Sql implements DatasourcePlus {
password: this.config.password, password: this.config.password,
connectString, connectString,
} }
return oracledb.getConnection(attributes) const tz = Intl.DateTimeFormat().resolvedOptions().timeZone
const connection = await oracledb.getConnection(attributes)
await connection.execute(`ALTER SESSION SET TIME_ZONE = '${tz}'`)
return connection
} }
async create(query: SqlQuery | string): Promise<any[]> { async create(query: SqlQuery | string): Promise<any[]> {

View File

@ -1,100 +0,0 @@
const oracledb = require("oracledb")
import { default as OracleIntegration } from "../oracle"
jest.mock("oracledb")
class TestConfiguration {
integration: any
constructor(config: any = {}) {
this.integration = new OracleIntegration.integration(config)
}
}
const options = { autoCommit: true }
describe("Oracle Integration", () => {
let config: any
beforeEach(() => {
jest.clearAllMocks()
config = new TestConfiguration()
})
it("calls the create method with the correct params", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);"
await config.integration.create({
sql,
})
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
})
it("calls the read method with the correct params", async () => {
const sql = "select * from users;"
await config.integration.read({
sql,
})
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
})
it("calls the update method with the correct params", async () => {
const sql = "update table users set name = 'test';"
await config.integration.update({
sql,
})
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
})
it("calls the delete method with the correct params", async () => {
const sql = "delete from users where name = 'todelete';"
await config.integration.delete({
sql,
})
expect(oracledb.executeMock).toHaveBeenCalledWith(sql, [], options)
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
})
describe("no rows returned", () => {
beforeEach(() => {
oracledb.executeMock.mockImplementation(() => ({ rows: [] }))
})
it("returns the correct response when the create response has no rows", async () => {
const sql = "insert into users (name, age) values ('Joe', 123);"
const response = await config.integration.create({
sql,
})
expect(response).toEqual([{ created: true }])
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
})
it("returns the correct response when the update response has no rows", async () => {
const sql = "update table users set name = 'test';"
const response = await config.integration.update({
sql,
})
expect(response).toEqual([{ updated: true }])
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
})
it("returns the correct response when the delete response has no rows", async () => {
const sql = "delete from users where name = 'todelete';"
const response = await config.integration.delete({
sql,
})
expect(response).toEqual([{ deleted: true }])
expect(oracledb.executeMock).toHaveBeenCalledTimes(1)
expect(oracledb.closeMock).toHaveBeenCalledTimes(1)
})
})
})

View File

@ -1,12 +1,16 @@
import { import {
FieldType, FieldType,
Operation, Operation,
PaginationJson,
QueryJson, QueryJson,
SearchFilters,
SortJson,
SqlClient,
Table, Table,
TableSourceType, TableSourceType,
SqlClient,
} from "@budibase/types" } from "@budibase/types"
import { sql } from "@budibase/backend-core" import { sql } from "@budibase/backend-core"
import { merge } from "lodash"
const Sql = sql.Sql const Sql = sql.Sql
@ -25,7 +29,16 @@ const TABLE: Table = {
primary: ["id"], primary: ["id"],
} }
function endpoint(table: any, operation: any) { const ORACLE_TABLE: Partial<Table> = {
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
}
function endpoint(table: string, operation: Operation) {
return { return {
datasourceId: "Postgres", datasourceId: "Postgres",
operation: operation, operation: operation,
@ -39,19 +52,25 @@ function generateReadJson({
filters, filters,
sort, sort,
paginate, paginate,
}: any = {}): QueryJson { }: {
const tableObj = { ...TABLE } table?: Partial<Table>
fields?: string[]
filters?: SearchFilters
sort?: SortJson
paginate?: PaginationJson
} = {}): QueryJson {
let tableObj: Table = { ...TABLE }
if (table) { if (table) {
tableObj.name = table tableObj = merge(TABLE, table)
} }
return { return {
endpoint: endpoint(table || TABLE_NAME, "READ"), endpoint: endpoint(tableObj.name || TABLE_NAME, Operation.READ),
resource: { resource: {
fields: fields || [], fields: fields || [],
}, },
filters: filters || {}, filters: filters || {},
sort: sort || {}, sort: sort || {},
paginate: paginate || {}, paginate: paginate || undefined,
meta: { meta: {
table: tableObj, table: tableObj,
}, },
@ -191,7 +210,7 @@ describe("SQL query builder", () => {
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit, 5000], bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit, 5000],
sql: `select * from (select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4) order by "test"."id" asc) where rownum <= :5) "test" order by "test"."id" asc) where rownum <= :6`, sql: `select * from (select * from (select * from (select * from "test" where COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2 and COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4 order by "test"."id" asc) where rownum <= :5) "test" order by "test"."id" asc) where rownum <= :6`,
}) })
query = new Sql(SqlClient.ORACLE, limit)._query( query = new Sql(SqlClient.ORACLE, limit)._query(
@ -212,6 +231,7 @@ describe("SQL query builder", () => {
it("should use an oracle compatible coalesce query for oracle when using the equals filter", () => { it("should use an oracle compatible coalesce query for oracle when using the equals filter", () => {
let query = new Sql(SqlClient.ORACLE, limit)._query( let query = new Sql(SqlClient.ORACLE, limit)._query(
generateReadJson({ generateReadJson({
table: ORACLE_TABLE,
filters: { filters: {
equal: { equal: {
name: "John", name: "John",
@ -222,13 +242,14 @@ describe("SQL query builder", () => {
expect(query).toEqual({ expect(query).toEqual({
bindings: ["John", limit, 5000], bindings: ["John", limit, 5000],
sql: `select * from (select * from (select * from (select * from "test" where COALESCE("test"."name", -1) = :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`, sql: `select * from (select * from (select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") = :1) order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
}) })
}) })
it("should use an oracle compatible coalesce query for oracle when using the not equals filter", () => { it("should use an oracle compatible coalesce query for oracle when using the not equals filter", () => {
let query = new Sql(SqlClient.ORACLE, limit)._query( let query = new Sql(SqlClient.ORACLE, limit)._query(
generateReadJson({ generateReadJson({
table: ORACLE_TABLE,
filters: { filters: {
notEqual: { notEqual: {
name: "John", name: "John",
@ -239,7 +260,7 @@ describe("SQL query builder", () => {
expect(query).toEqual({ expect(query).toEqual({
bindings: ["John", limit, 5000], bindings: ["John", limit, 5000],
sql: `select * from (select * from (select * from (select * from "test" where COALESCE("test"."name", -1) != :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`, sql: `select * from (select * from (select * from (select * from "test" where (to_char("test"."name") IS NOT NULL AND to_char("test"."name") != :1) OR to_char("test"."name") IS NULL order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
}) })
}) })
}) })

View File

@ -5,6 +5,7 @@ import * as mongodb from "./mongodb"
import * as mysql from "./mysql" import * as mysql from "./mysql"
import * as mssql from "./mssql" import * as mssql from "./mssql"
import * as mariadb from "./mariadb" import * as mariadb from "./mariadb"
import * as oracle from "./oracle"
import { GenericContainer, StartedTestContainer } from "testcontainers" import { GenericContainer, StartedTestContainer } from "testcontainers"
import { testContainerUtils } from "@budibase/backend-core/tests" import { testContainerUtils } from "@budibase/backend-core/tests"
import cloneDeep from "lodash/cloneDeep" import cloneDeep from "lodash/cloneDeep"
@ -17,6 +18,7 @@ export enum DatabaseName {
MYSQL = "mysql", MYSQL = "mysql",
SQL_SERVER = "mssql", SQL_SERVER = "mssql",
MARIADB = "mariadb", MARIADB = "mariadb",
ORACLE = "oracle",
} }
const providers: Record<DatabaseName, DatasourceProvider> = { const providers: Record<DatabaseName, DatasourceProvider> = {
@ -25,6 +27,7 @@ const providers: Record<DatabaseName, DatasourceProvider> = {
[DatabaseName.MYSQL]: mysql.getDatasource, [DatabaseName.MYSQL]: mysql.getDatasource,
[DatabaseName.SQL_SERVER]: mssql.getDatasource, [DatabaseName.SQL_SERVER]: mssql.getDatasource,
[DatabaseName.MARIADB]: mariadb.getDatasource, [DatabaseName.MARIADB]: mariadb.getDatasource,
[DatabaseName.ORACLE]: oracle.getDatasource,
} }
export function getDatasourceProviders( export function getDatasourceProviders(
@ -60,6 +63,9 @@ export async function knexClient(ds: Datasource) {
case SourceName.SQL_SERVER: { case SourceName.SQL_SERVER: {
return mssql.knexClient(ds) return mssql.knexClient(ds)
} }
case SourceName.ORACLE: {
return oracle.knexClient(ds)
}
default: { default: {
throw new Error(`Unsupported source: ${ds.source}`) throw new Error(`Unsupported source: ${ds.source}`)
} }

View File

@ -0,0 +1,78 @@
import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
import knex from "knex"
let ports: Promise<testContainerUtils.Port[]>
export async function getDatasource(): Promise<Datasource> {
// password needs to conform to Oracle standards
const password = "password"
if (!ports) {
// couldn't build 19.3.0 for X64
let image = "budibase/oracle-database:23.2-slim-faststart"
if (process.arch.startsWith("arm")) {
// there isn't an ARM compatible 23.2 build
image = "budibase/oracle-database:19.3.0-ee-slim-faststart"
}
ports = startContainer(
new GenericContainer(image)
.withExposedPorts(1521)
.withEnvironment({
ORACLE_PASSWORD: password,
})
.withWaitStrategy(Wait.forLogMessage("DATABASE IS READY TO USE!"))
)
}
const port = (await ports).find(x => x.container === 1521)?.host
if (!port) {
throw new Error("Oracle port not found")
}
const host = "127.0.0.1"
const user = "SYSTEM"
const datasource: Datasource = {
type: "datasource_plus",
source: SourceName.ORACLE,
plus: true,
config: { host, port, user, password, database: "FREEPDB1" },
}
const newUser = "a" + generator.guid().replaceAll("-", "")
const client = await knexClient(datasource)
await client.raw(`CREATE USER ${newUser} IDENTIFIED BY password`)
await client.raw(
`GRANT CONNECT, RESOURCE, CREATE VIEW, CREATE SESSION TO ${newUser}`
)
await client.raw(`GRANT UNLIMITED TABLESPACE TO ${newUser}`)
datasource.config!.user = newUser
return datasource
}
export async function knexClient(ds: Datasource) {
if (!ds.config) {
throw new Error("Datasource config is missing")
}
if (ds.source !== SourceName.ORACLE) {
throw new Error("Datasource source is not Oracle")
}
const db = ds.config.database || "FREEPDB1"
const connectString = `${ds.config.host}:${ds.config.port}/${db}`
const c = knex({
client: "oracledb",
connection: {
connectString,
user: ds.config.user,
password: ds.config.password,
},
})
return c
}

View File

@ -147,7 +147,8 @@ export async function search(
} catch (err: any) { } catch (err: any) {
if (err.message && err.message.includes("does not exist")) { if (err.message && err.message.includes("does not exist")) {
throw new Error( throw new Error(
`Table updated externally, please re-fetch - ${err.message}` `Table updated externally, please re-fetch - ${err.message}`,
{ cause: err }
) )
} else { } else {
throw err throw err

View File

@ -20,12 +20,11 @@ import {
buildInternalRelationships, buildInternalRelationships,
sqlOutputProcessing, sqlOutputProcessing,
} from "../../../../../api/controllers/row/utils" } from "../../../../../api/controllers/row/utils"
import sdk from "../../../../index"
import { import {
decodeNonAscii,
mapToUserColumn, mapToUserColumn,
USER_COLUMN_PREFIX, USER_COLUMN_PREFIX,
} from "../../../tables/internal/sqs" } from "../../../tables/internal/sqs"
import sdk from "../../../../index"
import { import {
context, context,
sql, sql,
@ -42,7 +41,11 @@ import {
getRelationshipColumns, getRelationshipColumns,
getTableIDList, getTableIDList,
} from "../filters" } from "../filters"
import { dataFilters, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core" import {
dataFilters,
helpers,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import { isSearchingByRowID } from "../utils" import { isSearchingByRowID } from "../utils"
import tracer from "dd-trace" import tracer from "dd-trace"
@ -191,7 +194,7 @@ function reverseUserColumnMapping(rows: Row[]) {
if (index !== -1) { if (index !== -1) {
// cut out the prefix // cut out the prefix
const newKey = key.slice(0, index) + key.slice(index + prefixLength) const newKey = key.slice(0, index) + key.slice(index + prefixLength)
const decoded = decodeNonAscii(newKey) const decoded = helpers.schema.decodeNonAscii(newKey)
finalRow[decoded] = row[key] finalRow[decoded] = row[key]
} else { } else {
finalRow[key] = row[key] finalRow[key] = row[key]

View File

@ -13,7 +13,7 @@ import tablesSdk from "../"
import { generateJunctionTableID } from "../../../../db/utils" import { generateJunctionTableID } from "../../../../db/utils"
import { isEqual } from "lodash" import { isEqual } from "lodash"
import { DEFAULT_TABLES } from "../../../../db/defaultData/datasource_bb_default" import { DEFAULT_TABLES } from "../../../../db/defaultData/datasource_bb_default"
import { PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core" import { helpers, PROTECTED_INTERNAL_COLUMNS } from "@budibase/shared-core"
const FieldTypeMap: Record<FieldType, SQLiteType> = { const FieldTypeMap: Record<FieldType, SQLiteType> = {
[FieldType.BOOLEAN]: SQLiteType.NUMERIC, [FieldType.BOOLEAN]: SQLiteType.NUMERIC,
@ -63,29 +63,10 @@ function buildRelationshipDefinitions(
export const USER_COLUMN_PREFIX = "data_" export const USER_COLUMN_PREFIX = "data_"
// SQS does not support non-ASCII characters in column names, so we need to
// replace them with unicode escape sequences.
function encodeNonAscii(str: string): string {
return str
.split("")
.map(char => {
return char.charCodeAt(0) > 127
? "\\u" + char.charCodeAt(0).toString(16).padStart(4, "0")
: char
})
.join("")
}
export function decodeNonAscii(str: string): string {
return str.replace(/\\u([0-9a-fA-F]{4})/g, (match, p1) =>
String.fromCharCode(parseInt(p1, 16))
)
}
// utility function to denote that columns in SQLite are mapped to avoid overlap issues // utility function to denote that columns in SQLite are mapped to avoid overlap issues
// the overlaps can occur due to case insensitivity and some of the columns which Budibase requires // the overlaps can occur due to case insensitivity and some of the columns which Budibase requires
export function mapToUserColumn(key: string) { export function mapToUserColumn(key: string) {
return `${USER_COLUMN_PREFIX}${encodeNonAscii(key)}` return `${USER_COLUMN_PREFIX}${helpers.schema.encodeNonAscii(key)}`
} }
// this can generate relationship tables as part of the mapping // this can generate relationship tables as part of the mapping

View File

@ -315,6 +315,21 @@ export async function outputProcessing<T extends Row[] | Row>(
column.subtype column.subtype
) )
} }
} else if (column.type === FieldType.DATETIME && column.timeOnly) {
for (let row of enriched) {
if (row[property] instanceof Date) {
const hours = row[property].getUTCHours().toString().padStart(2, "0")
const minutes = row[property]
.getUTCMinutes()
.toString()
.padStart(2, "0")
const seconds = row[property]
.getUTCSeconds()
.toString()
.padStart(2, "0")
row[property] = `${hours}:${minutes}:${seconds}`
}
}
} }
} }

View File

@ -22,6 +22,7 @@ import dayjs from "dayjs"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
import { deepGet, schema } from "./helpers" import { deepGet, schema } from "./helpers"
import { isPlainObject, isEmpty } from "lodash" import { isPlainObject, isEmpty } from "lodash"
import { decodeNonAscii } from "./helpers/schema"
const HBS_REGEX = /{{([^{].*?)}}/g const HBS_REGEX = /{{([^{].*?)}}/g
@ -181,8 +182,16 @@ export class ColumnSplitter {
tableIds: string[] tableIds: string[]
relationshipColumnNames: string[] relationshipColumnNames: string[]
relationships: string[] relationships: string[]
aliases?: Record<string, string>
columnPrefix?: string
constructor(tables: Table[]) { constructor(
tables: Table[],
opts?: {
aliases?: Record<string, string>
columnPrefix?: string
}
) {
this.tableNames = tables.map(table => table.name) this.tableNames = tables.map(table => table.name)
this.tableIds = tables.map(table => table._id!) this.tableIds = tables.map(table => table._id!)
this.relationshipColumnNames = tables.flatMap(table => this.relationshipColumnNames = tables.flatMap(table =>
@ -195,16 +204,38 @@ export class ColumnSplitter {
.concat(this.relationshipColumnNames) .concat(this.relationshipColumnNames)
// sort by length - makes sure there's no mis-matches due to similarities (sub column names) // sort by length - makes sure there's no mis-matches due to similarities (sub column names)
.sort((a, b) => b.length - a.length) .sort((a, b) => b.length - a.length)
if (opts?.aliases) {
this.aliases = {}
for (const [key, value] of Object.entries(opts.aliases)) {
this.aliases[value] = key
}
}
this.columnPrefix = opts?.columnPrefix
} }
run(key: string): { run(key: string): {
numberPrefix?: string numberPrefix?: string
relationshipPrefix?: string relationshipPrefix?: string
tableName?: string
column: string column: string
} { } {
let { prefix, key: splitKey } = getKeyNumbering(key) let { prefix, key: splitKey } = getKeyNumbering(key)
let tableName: string | undefined = undefined
if (this.aliases) {
for (const possibleAlias of Object.keys(this.aliases || {})) {
const withDot = `${possibleAlias}.`
if (splitKey.startsWith(withDot)) {
tableName = this.aliases[possibleAlias]!
splitKey = splitKey.slice(withDot.length)
}
}
}
let relationship: string | undefined let relationship: string | undefined
for (let possibleRelationship of this.relationships) { for (const possibleRelationship of this.relationships) {
const withDot = `${possibleRelationship}.` const withDot = `${possibleRelationship}.`
if (splitKey.startsWith(withDot)) { if (splitKey.startsWith(withDot)) {
const finalKeyParts = splitKey.split(withDot) const finalKeyParts = splitKey.split(withDot)
@ -214,7 +245,15 @@ export class ColumnSplitter {
break break
} }
} }
if (this.columnPrefix) {
if (splitKey.startsWith(this.columnPrefix)) {
splitKey = decodeNonAscii(splitKey.slice(this.columnPrefix.length))
}
}
return { return {
tableName,
numberPrefix: prefix, numberPrefix: prefix,
relationshipPrefix: relationship, relationshipPrefix: relationship,
column: splitKey, column: splitKey,

View File

@ -26,3 +26,22 @@ export function isRequired(constraints: FieldConstraints | undefined) {
constraints.presence === true) constraints.presence === true)
return isRequired return isRequired
} }
// SQS does not support non-ASCII characters in column names, so we need to
// replace them with unicode escape sequences.
export function encodeNonAscii(str: string): string {
return str
.split("")
.map(char => {
return char.charCodeAt(0) > 127
? "\\u" + char.charCodeAt(0).toString(16).padStart(4, "0")
: char
})
.join("")
}
export function decodeNonAscii(str: string): string {
return str.replace(/\\u([0-9a-fA-F]{4})/g, (match, p1) =>
String.fromCharCode(parseInt(p1, 16))
)
}

View File

@ -48,10 +48,13 @@ async function removeTenantUsers(tenantId: string) {
try { try {
const allUsers = await getTenantUsers(tenantId) const allUsers = await getTenantUsers(tenantId)
const allEmails = allUsers.rows.map((row: any) => row.doc.email) const allEmails = allUsers.rows.map((row: any) => row.doc.email)
const allSsoIds = allUsers.rows
.map((row: any) => row.doc.ssoId)
.filter(id => !!id)
// get the id and email doc ids // get the id and email doc ids
let keys = allUsers.rows.map((row: any) => row.id) let keys = allUsers.rows.map((row: any) => row.id)
keys = keys.concat(allEmails) keys = keys.concat(allEmails).concat(allSsoIds)
const platformDb = platform.getPlatformDB() const platformDb = platform.getPlatformDB()