Plumb FieldSchema into parse.
This commit is contained in:
parent
25ab2e2689
commit
5bce8e595d
|
@ -12,6 +12,8 @@ import { SqlStatements } from "./sqlStatements"
|
|||
import SqlTableQueryBuilder from "./sqlTable"
|
||||
import {
|
||||
AnySearchFilter,
|
||||
ArrayOperator,
|
||||
BasicOperator,
|
||||
BBReferenceFieldMetadata,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
|
@ -23,6 +25,7 @@ import {
|
|||
prefixed,
|
||||
QueryJson,
|
||||
QueryOptions,
|
||||
RangeOperator,
|
||||
RelationshipsJson,
|
||||
SearchFilters,
|
||||
SortOrder,
|
||||
|
@ -33,9 +36,7 @@ import {
|
|||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import environment from "../environment"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
import { isPlainObject } from "lodash"
|
||||
import { ColumnSplitter } from "@budibase/shared-core/src/filters"
|
||||
import { dataFilters, helpers } from "@budibase/shared-core"
|
||||
|
||||
type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any
|
||||
|
||||
|
@ -75,10 +76,16 @@ function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] {
|
|||
class InternalBuilder {
|
||||
private readonly client: SqlClient
|
||||
private readonly query: QueryJson
|
||||
private readonly splitter: dataFilters.ColumnSplitter
|
||||
|
||||
constructor(client: SqlClient, query: QueryJson) {
|
||||
this.client = client
|
||||
this.query = query
|
||||
|
||||
this.splitter = new dataFilters.ColumnSplitter([this.table], {
|
||||
aliases: this.query.tableAliases,
|
||||
columnPrefix: this.query.meta.columnPrefix,
|
||||
})
|
||||
}
|
||||
|
||||
get table(): Table {
|
||||
|
@ -205,107 +212,95 @@ class InternalBuilder {
|
|||
return identifier
|
||||
}
|
||||
|
||||
private parse(input: any) {
|
||||
private parse(input: any, schema: FieldSchema) {
|
||||
if (Array.isArray(input)) {
|
||||
return JSON.stringify(input)
|
||||
}
|
||||
if (input == undefined) {
|
||||
return null
|
||||
}
|
||||
if (typeof input !== "string") {
|
||||
return input
|
||||
}
|
||||
if (isInvalidISODateString(input)) {
|
||||
return null
|
||||
}
|
||||
if (isValidISODateString(input)) {
|
||||
return new Date(input.trim())
|
||||
if (typeof input === "string") {
|
||||
if (isInvalidISODateString(input)) {
|
||||
return null
|
||||
}
|
||||
if (isValidISODateString(input)) {
|
||||
return new Date(input.trim())
|
||||
}
|
||||
}
|
||||
return input
|
||||
}
|
||||
|
||||
private parseBody(body: any) {
|
||||
for (let [key, value] of Object.entries(body)) {
|
||||
body[key] = this.parse(value)
|
||||
const { column } = this.splitter.run(key)
|
||||
const schema = this.table.schema[column]
|
||||
if (!schema) {
|
||||
continue
|
||||
}
|
||||
body[key] = this.parse(value, schema)
|
||||
}
|
||||
return body
|
||||
}
|
||||
|
||||
private parseFilters(filters: SearchFilters | undefined): SearchFilters {
|
||||
if (!filters) {
|
||||
return {}
|
||||
}
|
||||
for (let [key, value] of Object.entries(filters)) {
|
||||
let parsed
|
||||
if (typeof value === "object") {
|
||||
parsed = this.parseFilters(value)
|
||||
} else {
|
||||
parsed = this.parse(value)
|
||||
private parseFilters(filters: SearchFilters): SearchFilters {
|
||||
for (const op of Object.values(BasicOperator)) {
|
||||
const filter = filters[op]
|
||||
if (!filter) {
|
||||
continue
|
||||
}
|
||||
for (const key of Object.keys(filter)) {
|
||||
if (Array.isArray(filter[key])) {
|
||||
filter[key] = JSON.stringify(filter[key])
|
||||
continue
|
||||
}
|
||||
const { column } = this.splitter.run(key)
|
||||
const schema = this.table.schema[column]
|
||||
if (!schema) {
|
||||
continue
|
||||
}
|
||||
filter[key] = this.parse(filter[key], schema)
|
||||
}
|
||||
// @ts-ignore
|
||||
filters[key] = parsed
|
||||
}
|
||||
|
||||
for (const op of Object.values(ArrayOperator)) {
|
||||
const filter = filters[op]
|
||||
if (!filter) {
|
||||
continue
|
||||
}
|
||||
for (const key of Object.keys(filter)) {
|
||||
const { column } = this.splitter.run(key)
|
||||
const schema = this.table.schema[column]
|
||||
if (!schema) {
|
||||
continue
|
||||
}
|
||||
filter[key] = filter[key].map(v => this.parse(v, schema))
|
||||
}
|
||||
}
|
||||
|
||||
for (const op of Object.values(RangeOperator)) {
|
||||
const filter = filters[op]
|
||||
if (!filter) {
|
||||
continue
|
||||
}
|
||||
for (const key of Object.keys(filter)) {
|
||||
const { column } = this.splitter.run(key)
|
||||
const schema = this.table.schema[column]
|
||||
if (!schema) {
|
||||
continue
|
||||
}
|
||||
const value = filter[key]
|
||||
if ("low" in value) {
|
||||
value.low = this.parse(value.low, schema)
|
||||
}
|
||||
if ("high" in value) {
|
||||
value.high = this.parse(value.high, schema)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return filters
|
||||
}
|
||||
|
||||
// private parse(input: any, schema: FieldSchema) {
|
||||
// if (input == undefined) {
|
||||
// return null
|
||||
// }
|
||||
|
||||
// if (isPlainObject(input)) {
|
||||
// for (const [key, value] of Object.entries(input)) {
|
||||
// input[key] = this.parse(value, schema)
|
||||
// }
|
||||
// return input
|
||||
// }
|
||||
|
||||
// if (schema.type === FieldType.DATETIME && schema.timeOnly) {
|
||||
// if (this.client === SqlClient.ORACLE) {
|
||||
// return new Date(`1970-01-01 ${input}`)
|
||||
// }
|
||||
// }
|
||||
|
||||
// if (typeof input === "string") {
|
||||
// if (isInvalidISODateString(input)) {
|
||||
// return null
|
||||
// }
|
||||
// if (isValidISODateString(input)) {
|
||||
// return new Date(input.trim())
|
||||
// }
|
||||
// }
|
||||
|
||||
// return input
|
||||
// }
|
||||
|
||||
// private parseBody(body: any) {
|
||||
// for (let [key, value] of Object.entries(body)) {
|
||||
// body[key] = this.parse(value, this.table.schema[key])
|
||||
// }
|
||||
// return body
|
||||
// }
|
||||
|
||||
// private parseFilters(filters: SearchFilters | undefined): SearchFilters {
|
||||
// if (!filters) {
|
||||
// return {}
|
||||
// }
|
||||
|
||||
// for (const [_, filter] of Object.entries(filters)) {
|
||||
// for (const [key, value] of Object.entries(filter)) {
|
||||
// const { column } = new ColumnSplitter([this.table]).run(key)
|
||||
// const schema = this.table.schema[column]
|
||||
// if (!schema) {
|
||||
// throw new Error(
|
||||
// `Column ${key} does not exist in table ${this.table._id}`
|
||||
// )
|
||||
// }
|
||||
// filter[key] = this.parse(value, schema)
|
||||
// }
|
||||
// }
|
||||
|
||||
// return filters
|
||||
// }
|
||||
|
||||
// right now we only do filters on the specific table being queried
|
||||
addFilters(
|
||||
query: Knex.QueryBuilder,
|
||||
|
|
|
@ -19,11 +19,7 @@ import {
|
|||
buildInternalRelationships,
|
||||
sqlOutputProcessing,
|
||||
} from "../../../../api/controllers/row/utils"
|
||||
import {
|
||||
decodeNonAscii,
|
||||
mapToUserColumn,
|
||||
USER_COLUMN_PREFIX,
|
||||
} from "../../tables/internal/sqs"
|
||||
import { mapToUserColumn, USER_COLUMN_PREFIX } from "../../tables/internal/sqs"
|
||||
import sdk from "../../../index"
|
||||
import {
|
||||
context,
|
||||
|
@ -44,7 +40,7 @@ import {
|
|||
getRelationshipColumns,
|
||||
getTableIDList,
|
||||
} from "./filters"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
import { dataFilters, helpers } from "@budibase/shared-core"
|
||||
|
||||
const builder = new sql.Sql(SqlClient.SQL_LITE)
|
||||
const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`)
|
||||
|
@ -164,7 +160,7 @@ function reverseUserColumnMapping(rows: Row[]) {
|
|||
if (index !== -1) {
|
||||
// cut out the prefix
|
||||
const newKey = key.slice(0, index) + key.slice(index + prefixLength)
|
||||
const decoded = decodeNonAscii(newKey)
|
||||
const decoded = helpers.schema.decodeNonAscii(newKey)
|
||||
finalRow[decoded] = row[key]
|
||||
} else {
|
||||
finalRow[key] = row[key]
|
||||
|
|
|
@ -16,6 +16,7 @@ import {
|
|||
} from "../../../../db/utils"
|
||||
import { isEqual } from "lodash"
|
||||
import { DEFAULT_TABLES } from "../../../../db/defaultData/datasource_bb_default"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
const FieldTypeMap: Record<FieldType, SQLiteType> = {
|
||||
[FieldType.BOOLEAN]: SQLiteType.NUMERIC,
|
||||
|
@ -65,29 +66,10 @@ function buildRelationshipDefinitions(
|
|||
|
||||
export const USER_COLUMN_PREFIX = "data_"
|
||||
|
||||
// SQS does not support non-ASCII characters in column names, so we need to
|
||||
// replace them with unicode escape sequences.
|
||||
function encodeNonAscii(str: string): string {
|
||||
return str
|
||||
.split("")
|
||||
.map(char => {
|
||||
return char.charCodeAt(0) > 127
|
||||
? "\\u" + char.charCodeAt(0).toString(16).padStart(4, "0")
|
||||
: char
|
||||
})
|
||||
.join("")
|
||||
}
|
||||
|
||||
export function decodeNonAscii(str: string): string {
|
||||
return str.replace(/\\u([0-9a-fA-F]{4})/g, (match, p1) =>
|
||||
String.fromCharCode(parseInt(p1, 16))
|
||||
)
|
||||
}
|
||||
|
||||
// utility function to denote that columns in SQLite are mapped to avoid overlap issues
|
||||
// the overlaps can occur due to case insensitivity and some of the columns which Budibase requires
|
||||
export function mapToUserColumn(key: string) {
|
||||
return `${USER_COLUMN_PREFIX}${encodeNonAscii(key)}`
|
||||
return `${USER_COLUMN_PREFIX}${helpers.schema.encodeNonAscii(key)}`
|
||||
}
|
||||
|
||||
// this can generate relationship tables as part of the mapping
|
||||
|
|
|
@ -22,6 +22,7 @@ import dayjs from "dayjs"
|
|||
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
|
||||
import { deepGet, schema } from "./helpers"
|
||||
import { isPlainObject, isEmpty } from "lodash"
|
||||
import { decodeNonAscii } from "./helpers/schema"
|
||||
|
||||
const HBS_REGEX = /{{([^{].*?)}}/g
|
||||
|
||||
|
@ -181,8 +182,16 @@ export class ColumnSplitter {
|
|||
tableIds: string[]
|
||||
relationshipColumnNames: string[]
|
||||
relationships: string[]
|
||||
aliases?: Record<string, string>
|
||||
columnPrefix?: string
|
||||
|
||||
constructor(tables: Table[]) {
|
||||
constructor(
|
||||
tables: Table[],
|
||||
opts?: {
|
||||
aliases?: Record<string, string>
|
||||
columnPrefix?: string
|
||||
}
|
||||
) {
|
||||
this.tableNames = tables.map(table => table.name)
|
||||
this.tableIds = tables.map(table => table._id!)
|
||||
this.relationshipColumnNames = tables.flatMap(table =>
|
||||
|
@ -195,16 +204,38 @@ export class ColumnSplitter {
|
|||
.concat(this.relationshipColumnNames)
|
||||
// sort by length - makes sure there's no mis-matches due to similarities (sub column names)
|
||||
.sort((a, b) => b.length - a.length)
|
||||
|
||||
if (opts?.aliases) {
|
||||
this.aliases = {}
|
||||
for (const [key, value] of Object.entries(opts.aliases)) {
|
||||
this.aliases[value] = key
|
||||
}
|
||||
}
|
||||
|
||||
this.columnPrefix = opts?.columnPrefix
|
||||
}
|
||||
|
||||
run(key: string): {
|
||||
numberPrefix?: string
|
||||
relationshipPrefix?: string
|
||||
tableName?: string
|
||||
column: string
|
||||
} {
|
||||
let { prefix, key: splitKey } = getKeyNumbering(key)
|
||||
|
||||
let tableName: string | undefined = undefined
|
||||
if (this.aliases) {
|
||||
for (const possibleAlias of Object.keys(this.aliases || {})) {
|
||||
const withDot = `${possibleAlias}.`
|
||||
if (splitKey.startsWith(withDot)) {
|
||||
tableName = this.aliases[possibleAlias]!
|
||||
splitKey = splitKey.slice(withDot.length)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let relationship: string | undefined
|
||||
for (let possibleRelationship of this.relationships) {
|
||||
for (const possibleRelationship of this.relationships) {
|
||||
const withDot = `${possibleRelationship}.`
|
||||
if (splitKey.startsWith(withDot)) {
|
||||
const finalKeyParts = splitKey.split(withDot)
|
||||
|
@ -214,7 +245,15 @@ export class ColumnSplitter {
|
|||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (this.columnPrefix) {
|
||||
if (splitKey.startsWith(this.columnPrefix)) {
|
||||
splitKey = decodeNonAscii(splitKey.slice(this.columnPrefix.length))
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
tableName,
|
||||
numberPrefix: prefix,
|
||||
relationshipPrefix: relationship,
|
||||
column: splitKey,
|
||||
|
|
|
@ -26,3 +26,22 @@ export function isRequired(constraints: FieldConstraints | undefined) {
|
|||
constraints.presence === true)
|
||||
return isRequired
|
||||
}
|
||||
|
||||
// SQS does not support non-ASCII characters in column names, so we need to
|
||||
// replace them with unicode escape sequences.
|
||||
export function encodeNonAscii(str: string): string {
|
||||
return str
|
||||
.split("")
|
||||
.map(char => {
|
||||
return char.charCodeAt(0) > 127
|
||||
? "\\u" + char.charCodeAt(0).toString(16).padStart(4, "0")
|
||||
: char
|
||||
})
|
||||
.join("")
|
||||
}
|
||||
|
||||
export function decodeNonAscii(str: string): string {
|
||||
return str.replace(/\\u([0-9a-fA-F]{4})/g, (match, p1) =>
|
||||
String.fromCharCode(parseInt(p1, 16))
|
||||
)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue