Merge remote-tracking branch 'origin/master' into feature/automation-row-ux-update

This commit is contained in:
Dean 2024-07-01 16:48:41 +01:00
commit c5b702ab69
11 changed files with 207 additions and 22 deletions

View File

@ -184,7 +184,11 @@ class InternalBuilder {
query: Knex.QueryBuilder,
filters: SearchFilters | undefined,
table: Table,
opts: { aliases?: Record<string, string>; relationship?: boolean }
opts: {
aliases?: Record<string, string>
relationship?: boolean
columnPrefix?: string
}
): Knex.QueryBuilder {
if (!filters) {
return query
@ -192,7 +196,10 @@ class InternalBuilder {
filters = parseFilters(filters)
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
const sqlStatements = new SqlStatements(this.client, table, { allOr })
const sqlStatements = new SqlStatements(this.client, table, {
allOr,
columnPrefix: opts.columnPrefix,
})
const tableName =
this.client === SqlClient.SQL_LITE ? table._id! : table.name
@ -663,6 +670,7 @@ class InternalBuilder {
}
// add filters to the query (where)
query = this.addFilters(query, filters, json.meta.table, {
columnPrefix: json.meta.columnPrefix,
aliases: tableAliases,
})
@ -698,6 +706,7 @@ class InternalBuilder {
}
return this.addFilters(query, filters, json.meta.table, {
columnPrefix: json.meta.columnPrefix,
relationship: true,
aliases: tableAliases,
})
@ -708,6 +717,7 @@ class InternalBuilder {
let query = this.knexWithAlias(knex, endpoint, tableAliases)
const parsedBody = parseBody(body)
query = this.addFilters(query, filters, json.meta.table, {
columnPrefix: json.meta.columnPrefix,
aliases: tableAliases,
})
// mysql can't use returning
@ -722,6 +732,7 @@ class InternalBuilder {
const { endpoint, filters, tableAliases } = json
let query = this.knexWithAlias(knex, endpoint, tableAliases)
query = this.addFilters(query, filters, json.meta.table, {
columnPrefix: json.meta.columnPrefix,
aliases: tableAliases,
})
// mysql can't use returning

View File

@ -5,19 +5,27 @@ export class SqlStatements {
client: string
table: Table
allOr: boolean | undefined
columnPrefix: string | undefined
constructor(
client: string,
table: Table,
{ allOr }: { allOr?: boolean } = {}
{ allOr, columnPrefix }: { allOr?: boolean; columnPrefix?: string } = {}
) {
this.client = client
this.table = table
this.allOr = allOr
this.columnPrefix = columnPrefix
}
getField(key: string): FieldSchema | undefined {
const fieldName = key.split(".")[1]
return this.table.schema[fieldName]
let found = this.table.schema[fieldName]
if (!found && this.columnPrefix) {
const prefixRemovedFieldName = fieldName.replace(this.columnPrefix, "")
found = this.table.schema[prefixRemovedFieldName]
}
return found
}
between(

View File

@ -2119,4 +2119,29 @@ describe.each([
}).toNotHaveProperty(["totalRows"])
})
})
describe.each(["data_name_test", "name_data_test", "name_test_data_"])(
"special (%s) case",
column => {
beforeAll(async () => {
table = await createTable({
[column]: {
name: column,
type: FieldType.STRING,
},
})
await createRows([{ [column]: "a" }, { [column]: "b" }])
})
it("should be able to query a column with data_ in it", async () => {
await expectSearch({
query: {
equal: {
[`1:${column}`]: "a",
},
},
}).toContainExactly([{ [column]: "a" }])
})
}
)
})

View File

@ -15,6 +15,7 @@ import {
import { processMigrations } from "../../migrationsProcessor"
import migration from "../20240604153647_initial_sqs"
import { AppMigration } from "src/appMigrations"
import sdk from "../../../sdk"
const MIGRATIONS: AppMigration[] = [
{
@ -27,6 +28,8 @@ const MIGRATIONS: AppMigration[] = [
const config = setup.getConfig()
let tableId: string
const prefix = sdk.tables.sqs.mapToUserColumn
function oldLinkDocInfo() {
const tableId1 = `${DocumentType.TABLE}_a`,
tableId2 = `${DocumentType.TABLE}_b`
@ -102,8 +105,14 @@ describe("SQS migration", () => {
expect(designDoc.sql.tables).toBeDefined()
const mainTableDef = designDoc.sql.tables[tableId]
expect(mainTableDef).toBeDefined()
expect(mainTableDef.fields.name).toEqual(SQLiteType.TEXT)
expect(mainTableDef.fields.description).toEqual(SQLiteType.TEXT)
expect(mainTableDef.fields[prefix("name")]).toEqual({
field: "name",
type: SQLiteType.TEXT,
})
expect(mainTableDef.fields[prefix("description")]).toEqual({
field: "description",
type: SQLiteType.TEXT,
})
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())

View File

@ -18,6 +18,7 @@ import {
buildInternalRelationships,
sqlOutputProcessing,
} from "../../../../api/controllers/row/utils"
import { mapToUserColumn, USER_COLUMN_PREFIX } from "../../tables/internal/sqs"
import sdk from "../../../index"
import {
context,
@ -35,8 +36,10 @@ import {
getRelationshipColumns,
getTableIDList,
} from "./filters"
import { dataFilters } from "@budibase/shared-core"
const builder = new sql.Sql(SqlClient.SQL_LITE)
const NO_SUCH_COLUMN_REGEX = new RegExp(`no such colum.+${USER_COLUMN_PREFIX}`)
function buildInternalFieldList(
table: Table,
@ -59,7 +62,7 @@ function buildInternalFieldList(
buildInternalFieldList(relatedTable, tables, { relationships: false })
)
} else {
fieldList.push(`${table._id}.${col.name}`)
fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`)
}
}
return fieldList
@ -90,6 +93,34 @@ function cleanupFilters(
)
)
// generate a map of all possible column names (these can be duplicated across tables
// the map of them will always be the same
const userColumnMap: Record<string, string> = {}
allTables.forEach(table =>
Object.keys(table.schema).forEach(
key => (userColumnMap[key] = mapToUserColumn(key))
)
)
// update the keys of filters to manage user columns
const keyInAnyTable = (key: string): boolean =>
allTables.some(table => table.schema[key])
const splitter = new dataFilters.ColumnSplitter(allTables)
for (const filter of Object.values(filters)) {
for (const key of Object.keys(filter)) {
const { numberPrefix, relationshipPrefix, column } = splitter.run(key)
if (keyInAnyTable(column)) {
filter[
`${numberPrefix || ""}${relationshipPrefix || ""}${mapToUserColumn(
column
)}`
] = filter[key]
delete filter[key]
}
}
}
return filters
}
@ -106,6 +137,25 @@ function buildTableMap(tables: Table[]) {
return tableMap
}
function reverseUserColumnMapping(rows: Row[]) {
const prefixLength = USER_COLUMN_PREFIX.length
return rows.map(row => {
const finalRow: Row = {}
for (let key of Object.keys(row)) {
// it should be the first prefix
const index = key.indexOf(USER_COLUMN_PREFIX)
if (index !== -1) {
// cut out the prefix
const newKey = key.slice(0, index) + key.slice(index + prefixLength)
finalRow[newKey] = row[key]
} else {
finalRow[key] = row[key]
}
}
return finalRow
})
}
function runSqlQuery(json: QueryJson, tables: Table[]): Promise<Row[]>
function runSqlQuery(
json: QueryJson,
@ -147,9 +197,10 @@ async function runSqlQuery(
const response = await alias.queryWithAliasing(json, processSQLQuery)
if (opts?.countTotalRows) {
return processRowCountResponse(response)
} else {
return response
} else if (Array.isArray(response)) {
return reverseUserColumnMapping(response)
}
return response
}
export async function search(
@ -185,6 +236,7 @@ export async function search(
meta: {
table,
tables: allTablesMap,
columnPrefix: USER_COLUMN_PREFIX,
},
resource: {
fields: buildInternalFieldList(table, allTables),
@ -197,7 +249,7 @@ export async function search(
const sortType =
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
request.sort = {
[sortField.name]: {
[mapToUserColumn(sortField.name)]: {
direction: params.sortOrder || SortOrder.ASCENDING,
type: sortType as SortType,
},
@ -278,7 +330,10 @@ export async function search(
return response
} catch (err: any) {
const msg = typeof err === "string" ? err : err.message
if (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) {
const syncAndRepeat =
(err.status === 400 && msg?.match(NO_SUCH_COLUMN_REGEX)) ||
(err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID))
if (syncAndRepeat) {
await sdk.tables.sqs.syncDefinition()
return search(options, table)
}

View File

@ -62,10 +62,18 @@ function buildRelationshipDefinitions(
}
}
export const USER_COLUMN_PREFIX = "data_"
// utility function to denote that columns in SQLite are mapped to avoid overlap issues
// the overlaps can occur due to case insensitivity and some of the columns which Budibase requires
export function mapToUserColumn(key: string) {
return `${USER_COLUMN_PREFIX}${key}`
}
// this can generate relationship tables as part of the mapping
function mapTable(table: Table): SQLiteTables {
const tables: SQLiteTables = {}
const fields: Record<string, SQLiteType> = {}
const fields: Record<string, { field: string; type: SQLiteType }> = {}
for (let [key, column] of Object.entries(table.schema)) {
// relationships should be handled differently
if (column.type === FieldType.LINK) {
@ -78,7 +86,10 @@ function mapTable(table: Table): SQLiteTables {
if (!FieldTypeMap[column.type]) {
throw new Error(`Unable to map type "${column.type}" to SQLite type`)
}
fields[key] = FieldTypeMap[column.type]
fields[mapToUserColumn(key)] = {
field: key,
type: FieldTypeMap[column.type],
}
}
// there are some extra columns to map - add these in
const constantMap: Record<string, SQLiteType> = {}

View File

@ -164,14 +164,17 @@ export const InvalidFileExtensions = [
export enum BpmCorrelationKey {
ONBOARDING = "budibase:onboarding:correlationkey",
VERIFY_SSO_LOGIN = "budibase:verify_sso_login:correlationkey",
}
export enum BpmInstanceKey {
ONBOARDING = "budibase:onboarding:instancekey",
VERIFY_SSO_LOGIN = "budibase:verify_sso_login:instancekey",
}
export enum BpmStatusKey {
ONBOARDING = "budibase:onboarding:status",
VERIFY_SSO_LOGIN = "budibase:verify_sso_login:status",
}
export enum BpmStatusValue {

View File

@ -13,6 +13,7 @@ import {
RowSearchParams,
EmptyFilterOption,
SearchResponse,
Table,
} from "@budibase/types"
import dayjs from "dayjs"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
@ -131,13 +132,72 @@ const cleanupQuery = (query: SearchFilters) => {
* Removes a numeric prefix on field names designed to give fields uniqueness
*/
export const removeKeyNumbering = (key: string): string => {
return getKeyNumbering(key).key
}
/**
* Gets the part of the keys, returning the numeric prefix and the field name
*/
export const getKeyNumbering = (
key: string
): { prefix?: string; key: string } => {
if (typeof key === "string" && key.match(/\d[0-9]*:/g) != null) {
const parts = key.split(":")
// remove the number
parts.shift()
return parts.join(":")
const number = parts.shift()
return { prefix: `${number}:`, key: parts.join(":") }
} else {
return key
return { key }
}
}
/**
* Generates a splitter which can be used to split columns from a context into
* their components (number prefix, relationship column/table, column name)
*/
export class ColumnSplitter {
tableNames: string[]
tableIds: string[]
relationshipColumnNames: string[]
relationships: string[]
constructor(tables: Table[]) {
this.tableNames = tables.map(table => table.name)
this.tableIds = tables.map(table => table._id!)
this.relationshipColumnNames = tables.flatMap(table =>
Object.keys(table.schema).filter(
columnName => table.schema[columnName].type === FieldType.LINK
)
)
this.relationships = this.tableNames
.concat(this.tableIds)
.concat(this.relationshipColumnNames)
// sort by length - makes sure there's no mis-matches due to similarities (sub column names)
.sort((a, b) => b.length - a.length)
}
run(key: string): {
numberPrefix?: string
relationshipPrefix?: string
column: string
} {
let { prefix, key: splitKey } = getKeyNumbering(key)
let relationship: string | undefined
for (let possibleRelationship of this.relationships) {
const withDot = `${possibleRelationship}.`
if (splitKey.startsWith(withDot)) {
const finalKeyParts = splitKey.split(withDot)
finalKeyParts.shift()
relationship = withDot
splitKey = finalKeyParts.join(".")
break
}
}
return {
numberPrefix: prefix,
relationshipPrefix: relationship,
column: splitKey,
}
}
}

View File

@ -122,6 +122,8 @@ export interface QueryJson {
table: Table
tables?: Record<string, Table>
renamed?: RenameColumn
// can specify something that columns could be prefixed with
columnPrefix?: string
}
extra?: {
idFilter?: SearchFilters

View File

@ -23,6 +23,7 @@ echo "deploy processes..."
zbctl deploy resource offboarding.bpmn --insecure
zbctl deploy resource onboarding.bpmn --insecure
zbctl deploy resource free_trial.bpmn --insecure
zbctl deploy resource verify_sso_login.bpmn --insecure
cd ../../../../../budibase/packages/account-portal/packages/server

View File

@ -16917,7 +16917,7 @@ open@^8.0.0, open@^8.4.0, open@~8.4.0:
is-docker "^2.1.1"
is-wsl "^2.2.0"
openai@4.52.1:
openai@^4.52.1:
version "4.52.1"
resolved "https://registry.yarnpkg.com/openai/-/openai-4.52.1.tgz#44acc362a844fa2927b0cfa1fb70fb51e388af65"
integrity sha512-kv2hevAWZZ3I/vd2t8znGO2rd8wkowncsfcYpo8i+wU9ML+JEcdqiViANXXjWWGjIhajFNixE6gOY1fEgqILAg==
@ -21786,16 +21786,16 @@ typescript-eslint@^7.3.1:
"@typescript-eslint/eslint-plugin" "7.3.1"
"@typescript-eslint/parser" "7.3.1"
typescript@5.2.2, "typescript@>=3 < 6":
version "5.2.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78"
integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==
typescript@5.5.2:
version "5.5.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.2.tgz#c26f023cb0054e657ce04f72583ea2d85f8d0507"
integrity sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew==
"typescript@>=3 < 6":
version "5.2.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78"
integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==
typescript@^3.9.10, typescript@^3.9.5, typescript@^3.9.7:
version "3.9.10"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.9.10.tgz#70f3910ac7a51ed6bef79da7800690b19bf778b8"