Merge pull request #14628 from Budibase/view-calculation-sql
Initial passing test for view calculations.
This commit is contained in:
commit
3c56fdc4c1
|
@ -1 +1 @@
|
|||
Subproject commit 558a32dfd1f55bd894804a503e7e1090937df88c
|
||||
Subproject commit 3e24f6293ff5ee5f9b42822e001504e3bbf19cc0
|
|
@ -10,7 +10,7 @@ import {
|
|||
StaticDatabases,
|
||||
DEFAULT_TENANT_ID,
|
||||
} from "../constants"
|
||||
import { Database, IdentityContext, Snippet, App } from "@budibase/types"
|
||||
import { Database, IdentityContext, Snippet, App, Table } from "@budibase/types"
|
||||
import { ContextMap } from "./types"
|
||||
|
||||
let TEST_APP_ID: string | null = null
|
||||
|
@ -394,3 +394,20 @@ export function setFeatureFlags(key: string, value: Record<string, any>) {
|
|||
context.featureFlagCache ??= {}
|
||||
context.featureFlagCache[key] = value
|
||||
}
|
||||
|
||||
export function getTableForView(viewId: string): Table | undefined {
|
||||
const context = getCurrentContext()
|
||||
if (!context) {
|
||||
return
|
||||
}
|
||||
return context.viewToTableCache?.[viewId]
|
||||
}
|
||||
|
||||
export function setTableForView(viewId: string, table: Table) {
|
||||
const context = getCurrentContext()
|
||||
if (!context) {
|
||||
return
|
||||
}
|
||||
context.viewToTableCache ??= {}
|
||||
context.viewToTableCache[viewId] = table
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { IdentityContext, Snippet, VM } from "@budibase/types"
|
||||
import { IdentityContext, Snippet, Table, VM } from "@budibase/types"
|
||||
import { OAuth2Client } from "google-auth-library"
|
||||
import { GoogleSpreadsheet } from "google-spreadsheet"
|
||||
|
||||
|
@ -21,4 +21,5 @@ export type ContextMap = {
|
|||
featureFlagCache?: {
|
||||
[key: string]: Record<string, any>
|
||||
}
|
||||
viewToTableCache?: Record<string, Table>
|
||||
}
|
||||
|
|
|
@ -612,7 +612,6 @@ async function runQuery<T>(
|
|||
* limit {number} The number of results to fetch
|
||||
* bookmark {string|null} Current bookmark in the recursive search
|
||||
* rows {array|null} Current results in the recursive search
|
||||
* @returns {Promise<*[]|*>}
|
||||
*/
|
||||
async function recursiveSearch<T>(
|
||||
dbName: string,
|
||||
|
|
|
@ -6,7 +6,7 @@ import {
|
|||
ViewName,
|
||||
} from "../constants"
|
||||
import { getProdAppID } from "./conversions"
|
||||
import { DatabaseQueryOpts } from "@budibase/types"
|
||||
import { DatabaseQueryOpts, VirtualDocumentType } from "@budibase/types"
|
||||
|
||||
/**
|
||||
* If creating DB allDocs/query params with only a single top level ID this can be used, this
|
||||
|
@ -66,9 +66,8 @@ export function getQueryIndex(viewName: ViewName) {
|
|||
|
||||
/**
|
||||
* Check if a given ID is that of a table.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export const isTableId = (id: string) => {
|
||||
export const isTableId = (id: string): boolean => {
|
||||
// this includes datasource plus tables
|
||||
return (
|
||||
!!id &&
|
||||
|
@ -77,13 +76,16 @@ export const isTableId = (id: string) => {
|
|||
)
|
||||
}
|
||||
|
||||
export function isViewId(id: string): boolean {
|
||||
return !!id && id.startsWith(`${VirtualDocumentType.VIEW}${SEPARATOR}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a given ID is that of a datasource or datasource plus.
|
||||
* @returns {boolean}
|
||||
*/
|
||||
export const isDatasourceId = (id: string) => {
|
||||
export const isDatasourceId = (id: string): boolean => {
|
||||
// this covers both datasources and datasource plus
|
||||
return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
|
||||
return !!id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -11,10 +11,12 @@ import {
|
|||
} from "./utils"
|
||||
import SqlTableQueryBuilder from "./sqlTable"
|
||||
import {
|
||||
Aggregation,
|
||||
AnySearchFilter,
|
||||
ArrayOperator,
|
||||
BasicOperator,
|
||||
BBReferenceFieldMetadata,
|
||||
CalculationType,
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
|
@ -824,8 +826,40 @@ class InternalBuilder {
|
|||
return query.countDistinct(`${aliased}.${primary[0]} as total`)
|
||||
}
|
||||
|
||||
addAggregations(
|
||||
query: Knex.QueryBuilder,
|
||||
aggregations: Aggregation[]
|
||||
): Knex.QueryBuilder {
|
||||
const fields = this.query.resource?.fields || []
|
||||
if (fields.length > 0) {
|
||||
query = query.groupBy(fields.map(field => `${this.table.name}.${field}`))
|
||||
}
|
||||
for (const aggregation of aggregations) {
|
||||
const op = aggregation.calculationType
|
||||
const field = `${this.table.name}.${aggregation.field} as ${aggregation.name}`
|
||||
switch (op) {
|
||||
case CalculationType.COUNT:
|
||||
query = query.count(field)
|
||||
break
|
||||
case CalculationType.SUM:
|
||||
query = query.sum(field)
|
||||
break
|
||||
case CalculationType.AVG:
|
||||
query = query.avg(field)
|
||||
break
|
||||
case CalculationType.MIN:
|
||||
query = query.min(field)
|
||||
break
|
||||
case CalculationType.MAX:
|
||||
query = query.max(field)
|
||||
break
|
||||
}
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
addSorting(query: Knex.QueryBuilder): Knex.QueryBuilder {
|
||||
let { sort } = this.query
|
||||
let { sort, resource } = this.query
|
||||
const primaryKey = this.table.primary
|
||||
const tableName = getTableName(this.table)
|
||||
const aliases = this.query.tableAliases
|
||||
|
@ -862,7 +896,8 @@ class InternalBuilder {
|
|||
|
||||
// add sorting by the primary key if the result isn't already sorted by it,
|
||||
// to make sure result is deterministic
|
||||
if (!sort || sort[primaryKey[0]] === undefined) {
|
||||
const hasAggregations = (resource?.aggregations?.length ?? 0) > 0
|
||||
if (!hasAggregations && (!sort || sort[primaryKey[0]] === undefined)) {
|
||||
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
|
||||
}
|
||||
return query
|
||||
|
@ -1246,10 +1281,15 @@ class InternalBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
// if counting, use distinct count, else select
|
||||
query = !counting
|
||||
? query.select(this.generateSelectStatement())
|
||||
: this.addDistinctCount(query)
|
||||
const aggregations = this.query.resource?.aggregations || []
|
||||
if (counting) {
|
||||
query = this.addDistinctCount(query)
|
||||
} else if (aggregations.length > 0) {
|
||||
query = this.addAggregations(query, aggregations)
|
||||
} else {
|
||||
query = query.select(this.generateSelectStatement())
|
||||
}
|
||||
|
||||
// have to add after as well (this breaks MS-SQL)
|
||||
if (!counting) {
|
||||
query = this.addSorting(query)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import dayjs from "dayjs"
|
||||
import {
|
||||
Aggregation,
|
||||
AutoFieldSubType,
|
||||
AutoReason,
|
||||
Datasource,
|
||||
|
@ -19,6 +20,7 @@ import {
|
|||
SortJson,
|
||||
SortType,
|
||||
Table,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
|
@ -46,7 +48,7 @@ import { db as dbCore } from "@budibase/backend-core"
|
|||
import sdk from "../../../sdk"
|
||||
import env from "../../../environment"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
import { dataFilters, helpers } from "@budibase/shared-core"
|
||||
|
||||
export interface ManyRelationship {
|
||||
tableId?: string
|
||||
|
@ -159,19 +161,43 @@ function isEditableColumn(column: FieldSchema) {
|
|||
|
||||
export class ExternalRequest<T extends Operation> {
|
||||
private readonly operation: T
|
||||
private readonly tableId: string
|
||||
private datasource?: Datasource
|
||||
private tables: { [key: string]: Table } = {}
|
||||
private readonly source: Table | ViewV2
|
||||
private datasource: Datasource
|
||||
|
||||
constructor(operation: T, tableId: string, datasource?: Datasource) {
|
||||
this.operation = operation
|
||||
this.tableId = tableId
|
||||
this.datasource = datasource
|
||||
if (datasource && datasource.entities) {
|
||||
this.tables = datasource.entities
|
||||
public static async for<T extends Operation>(
|
||||
operation: T,
|
||||
source: Table | ViewV2,
|
||||
opts: { datasource?: Datasource } = {}
|
||||
) {
|
||||
if (!opts.datasource) {
|
||||
if (sdk.views.isView(source)) {
|
||||
const table = await sdk.views.getTable(source.id)
|
||||
opts.datasource = await sdk.datasources.get(table.sourceId!)
|
||||
} else {
|
||||
opts.datasource = await sdk.datasources.get(source.sourceId!)
|
||||
}
|
||||
}
|
||||
|
||||
return new ExternalRequest(operation, source, opts.datasource)
|
||||
}
|
||||
|
||||
private get tables(): { [key: string]: Table } {
|
||||
if (!this.datasource.entities) {
|
||||
throw new Error("Datasource does not have entities")
|
||||
}
|
||||
return this.datasource.entities
|
||||
}
|
||||
|
||||
private constructor(
|
||||
operation: T,
|
||||
source: Table | ViewV2,
|
||||
datasource: Datasource
|
||||
) {
|
||||
this.operation = operation
|
||||
this.source = source
|
||||
this.datasource = datasource
|
||||
}
|
||||
|
||||
private prepareFilters(
|
||||
id: string | undefined | string[],
|
||||
filters: SearchFilters,
|
||||
|
@ -290,20 +316,6 @@ export class ExternalRequest<T extends Operation> {
|
|||
return this.tables[tableName]
|
||||
}
|
||||
|
||||
// seeds the object with table and datasource information
|
||||
async retrieveMetadata(
|
||||
datasourceId: string
|
||||
): Promise<{ tables: Record<string, Table>; datasource: Datasource }> {
|
||||
if (!this.datasource) {
|
||||
this.datasource = await sdk.datasources.get(datasourceId)
|
||||
if (!this.datasource || !this.datasource.entities) {
|
||||
throw "No tables found, fetch tables before query."
|
||||
}
|
||||
this.tables = this.datasource.entities
|
||||
}
|
||||
return { tables: this.tables, datasource: this.datasource }
|
||||
}
|
||||
|
||||
async getRow(table: Table, rowId: string): Promise<Row> {
|
||||
const response = await getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(table._id!, Operation.READ),
|
||||
|
@ -619,24 +631,16 @@ export class ExternalRequest<T extends Operation> {
|
|||
}
|
||||
|
||||
async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> {
|
||||
const { operation, tableId } = this
|
||||
if (!tableId) {
|
||||
throw new Error("Unable to run without a table ID")
|
||||
}
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
let datasource = this.datasource
|
||||
if (!datasource) {
|
||||
const { datasource: ds } = await this.retrieveMetadata(datasourceId)
|
||||
datasource = ds
|
||||
}
|
||||
const tables = this.tables
|
||||
const table = tables[tableName]
|
||||
let isSql = isSQL(datasource)
|
||||
if (!table) {
|
||||
throw new Error(
|
||||
`Unable to process query, table "${tableName}" not defined.`
|
||||
)
|
||||
const { operation } = this
|
||||
let table: Table
|
||||
if (sdk.views.isView(this.source)) {
|
||||
table = await sdk.views.getTable(this.source.id)
|
||||
} else {
|
||||
table = this.source
|
||||
}
|
||||
|
||||
let isSql = isSQL(this.datasource)
|
||||
|
||||
// look for specific components of config which may not be considered acceptable
|
||||
let { id, row, filters, sort, paginate, rows } = cleanupConfig(
|
||||
config,
|
||||
|
@ -679,25 +683,40 @@ export class ExternalRequest<T extends Operation> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
operation === Operation.DELETE &&
|
||||
(filters == null || Object.keys(filters).length === 0)
|
||||
) {
|
||||
throw "Deletion must be filtered"
|
||||
}
|
||||
|
||||
let aggregations: Aggregation[] = []
|
||||
if (sdk.views.isView(this.source)) {
|
||||
const calculationFields = helpers.views.calculationFields(this.source)
|
||||
for (const [key, field] of Object.entries(calculationFields)) {
|
||||
aggregations.push({
|
||||
name: key,
|
||||
field: field.field,
|
||||
calculationType: field.calculationType,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
let json: QueryJson = {
|
||||
endpoint: {
|
||||
datasourceId: datasourceId!,
|
||||
entityId: tableName,
|
||||
datasourceId: this.datasource._id!,
|
||||
entityId: table.name,
|
||||
operation,
|
||||
},
|
||||
resource: {
|
||||
// have to specify the fields to avoid column overlap (for SQL)
|
||||
fields: isSql
|
||||
? buildSqlFieldList(table, this.tables, {
|
||||
? await buildSqlFieldList(this.source, this.tables, {
|
||||
relationships: incRelationships,
|
||||
})
|
||||
: [],
|
||||
aggregations,
|
||||
},
|
||||
filters,
|
||||
sort,
|
||||
|
@ -714,7 +733,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
},
|
||||
meta: {
|
||||
table,
|
||||
tables: tables,
|
||||
tables: this.tables,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -745,7 +764,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
}
|
||||
const output = await sqlOutputProcessing(
|
||||
response,
|
||||
table,
|
||||
this.source,
|
||||
this.tables,
|
||||
relationships
|
||||
)
|
||||
|
|
|
@ -17,6 +17,7 @@ import {
|
|||
Row,
|
||||
Table,
|
||||
UserCtx,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import * as utils from "./utils"
|
||||
|
@ -29,39 +30,40 @@ import { generateIdForRow } from "./utils"
|
|||
|
||||
export async function handleRequest<T extends Operation>(
|
||||
operation: T,
|
||||
tableId: string,
|
||||
source: Table | ViewV2,
|
||||
opts?: RunConfig
|
||||
): Promise<ExternalRequestReturnType<T>> {
|
||||
return new ExternalRequest<T>(operation, tableId, opts?.datasource).run(
|
||||
opts || {}
|
||||
)
|
||||
return (
|
||||
await ExternalRequest.for<T>(operation, source, {
|
||||
datasource: opts?.datasource,
|
||||
})
|
||||
).run(opts || {})
|
||||
}
|
||||
|
||||
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||
const { tableId, viewId } = utils.getSourceId(ctx)
|
||||
|
||||
const source = await utils.getSource(ctx)
|
||||
const table = await utils.getTableFromSource(source)
|
||||
const { _id, ...rowData } = ctx.request.body
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
|
||||
const { row: dataToUpdate } = await inputProcessing(
|
||||
const dataToUpdate = await inputProcessing(
|
||||
ctx.user?._id,
|
||||
cloneDeep(table),
|
||||
cloneDeep(source),
|
||||
rowData
|
||||
)
|
||||
|
||||
const validateResult = await sdk.rows.utils.validate({
|
||||
row: dataToUpdate,
|
||||
tableId,
|
||||
source,
|
||||
})
|
||||
if (!validateResult.valid) {
|
||||
throw { validation: validateResult.errors }
|
||||
}
|
||||
|
||||
const beforeRow = await sdk.rows.external.getRow(tableId, _id, {
|
||||
const beforeRow = await sdk.rows.external.getRow(table._id!, _id, {
|
||||
relationships: true,
|
||||
})
|
||||
|
||||
const response = await handleRequest(Operation.UPDATE, tableId, {
|
||||
const response = await handleRequest(Operation.UPDATE, source, {
|
||||
id: breakRowIdField(_id),
|
||||
row: dataToUpdate,
|
||||
})
|
||||
|
@ -69,17 +71,16 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
|||
// The id might have been changed, so the refetching would fail. Recalculating the id just in case
|
||||
const updatedId =
|
||||
generateIdForRow({ ...beforeRow, ...dataToUpdate }, table) || _id
|
||||
const row = await sdk.rows.external.getRow(tableId, updatedId, {
|
||||
const row = await sdk.rows.external.getRow(table._id!, updatedId, {
|
||||
relationships: true,
|
||||
})
|
||||
|
||||
const [enrichedRow, oldRow] = await Promise.all([
|
||||
outputProcessing(table, row, {
|
||||
outputProcessing(source, row, {
|
||||
squash: true,
|
||||
preserveLinks: true,
|
||||
fromViewId: viewId,
|
||||
}),
|
||||
outputProcessing(table, beforeRow, {
|
||||
outputProcessing(source, beforeRow, {
|
||||
squash: true,
|
||||
preserveLinks: true,
|
||||
}),
|
||||
|
@ -94,9 +95,9 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
|||
}
|
||||
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
const { tableId } = utils.getSourceId(ctx)
|
||||
const source = await utils.getSource(ctx)
|
||||
const _id = ctx.request.body._id
|
||||
const { row } = await handleRequest(Operation.DELETE, tableId, {
|
||||
const { row } = await handleRequest(Operation.DELETE, source, {
|
||||
id: breakRowIdField(_id),
|
||||
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
||||
})
|
||||
|
@ -105,11 +106,11 @@ export async function destroy(ctx: UserCtx) {
|
|||
|
||||
export async function bulkDestroy(ctx: UserCtx) {
|
||||
const { rows } = ctx.request.body
|
||||
const { tableId } = utils.getSourceId(ctx)
|
||||
const source = await utils.getSource(ctx)
|
||||
let promises: Promise<{ row: Row; table: Table }>[] = []
|
||||
for (let row of rows) {
|
||||
promises.push(
|
||||
handleRequest(Operation.DELETE, tableId, {
|
||||
handleRequest(Operation.DELETE, source, {
|
||||
id: breakRowIdField(row._id),
|
||||
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
||||
})
|
||||
|
@ -124,6 +125,7 @@ export async function bulkDestroy(ctx: UserCtx) {
|
|||
|
||||
export async function fetchEnrichedRow(ctx: UserCtx) {
|
||||
const id = ctx.params.rowId
|
||||
const source = await utils.getSource(ctx)
|
||||
const { tableId } = utils.getSourceId(ctx)
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const datasource: Datasource = await sdk.datasources.get(datasourceId)
|
||||
|
@ -131,7 +133,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
|||
ctx.throw(400, "Datasource has not been configured for plus API.")
|
||||
}
|
||||
const tables = datasource.entities
|
||||
const response = await handleRequest(Operation.READ, tableId, {
|
||||
const response = await handleRequest(Operation.READ, source, {
|
||||
id,
|
||||
datasource,
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
|
@ -155,7 +157,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
|||
// don't support composite keys right now
|
||||
const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0])
|
||||
const primaryLink = linkedTable.primary?.[0] as string
|
||||
const relatedRows = await handleRequest(Operation.READ, linkedTableId!, {
|
||||
const relatedRows = await handleRequest(Operation.READ, linkedTable, {
|
||||
tables,
|
||||
filters: {
|
||||
oneOf: {
|
||||
|
|
|
@ -207,7 +207,7 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
|
|||
}
|
||||
|
||||
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
||||
const { tableId } = utils.getSourceId(ctx)
|
||||
const { tableId, viewId } = utils.getSourceId(ctx)
|
||||
|
||||
await context.ensureSnippetContext(true)
|
||||
|
||||
|
@ -222,6 +222,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
|||
...ctx.request.body,
|
||||
query: enrichedQuery,
|
||||
tableId,
|
||||
viewId,
|
||||
}
|
||||
|
||||
ctx.status = 200
|
||||
|
@ -229,14 +230,15 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
|
|||
}
|
||||
|
||||
export async function validate(ctx: Ctx<Row, ValidateResponse>) {
|
||||
const { tableId } = utils.getSourceId(ctx)
|
||||
const source = await utils.getSource(ctx)
|
||||
const table = await utils.getTableFromSource(source)
|
||||
// external tables are hard to validate currently
|
||||
if (isExternalTableID(tableId)) {
|
||||
if (isExternalTableID(table._id!)) {
|
||||
ctx.body = { valid: true, errors: {} }
|
||||
} else {
|
||||
ctx.body = await sdk.rows.utils.validate({
|
||||
row: ctx.request.body,
|
||||
tableId,
|
||||
source,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,18 +21,19 @@ import {
|
|||
import sdk from "../../../sdk"
|
||||
import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils"
|
||||
import { flatten } from "lodash"
|
||||
import { findRow } from "../../../sdk/app/rows/internal"
|
||||
|
||||
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||
const { tableId, viewId } = utils.getSourceId(ctx)
|
||||
const { tableId } = utils.getSourceId(ctx)
|
||||
const source = await utils.getSource(ctx)
|
||||
const table = sdk.views.isView(source)
|
||||
? await sdk.views.getTable(source.id)
|
||||
: source
|
||||
const inputs = ctx.request.body
|
||||
const isUserTable = tableId === InternalTables.USER_METADATA
|
||||
let oldRow
|
||||
const dbTable = await sdk.tables.getTable(tableId)
|
||||
try {
|
||||
oldRow = await outputProcessing(
|
||||
dbTable,
|
||||
await utils.findRow(tableId, inputs._id!)
|
||||
)
|
||||
oldRow = await outputProcessing(source, await findRow(tableId, inputs._id!))
|
||||
} catch (err) {
|
||||
if (isUserTable) {
|
||||
// don't include the rev, it'll be the global rev
|
||||
|
@ -48,22 +49,15 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
|||
// need to build up full patch fields before coerce
|
||||
let combinedRow: any = cloneDeep(oldRow)
|
||||
for (let key of Object.keys(inputs)) {
|
||||
if (!dbTable.schema[key]) continue
|
||||
if (!table.schema[key]) continue
|
||||
combinedRow[key] = inputs[key]
|
||||
}
|
||||
|
||||
// need to copy the table so it can be differenced on way out
|
||||
const tableClone = cloneDeep(dbTable)
|
||||
|
||||
// this returns the table and row incase they have been updated
|
||||
let { table, row } = await inputProcessing(
|
||||
ctx.user?._id,
|
||||
tableClone,
|
||||
combinedRow
|
||||
)
|
||||
let row = await inputProcessing(ctx.user?._id, source, combinedRow)
|
||||
const validateResult = await sdk.rows.utils.validate({
|
||||
row,
|
||||
table,
|
||||
source,
|
||||
})
|
||||
|
||||
if (!validateResult.valid) {
|
||||
|
@ -87,10 +81,8 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
|||
return { row: ctx.body as Row, table, oldRow }
|
||||
}
|
||||
|
||||
const result = await finaliseRow(table, row, {
|
||||
oldTable: dbTable,
|
||||
const result = await finaliseRow(source, row, {
|
||||
updateFormula: true,
|
||||
fromViewId: viewId,
|
||||
})
|
||||
|
||||
return { ...result, oldRow }
|
||||
|
@ -186,7 +178,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
|||
sdk.tables.getTable(tableId),
|
||||
linkRows.getLinkDocuments({ tableId, rowId, fieldName }),
|
||||
])
|
||||
let row = await utils.findRow(tableId, rowId)
|
||||
let row = await findRow(tableId, rowId)
|
||||
row = await outputProcessing(table, row)
|
||||
const linkVals = links as LinkDocumentValue[]
|
||||
|
||||
|
|
|
@ -4,10 +4,11 @@ import {
|
|||
processFormulas,
|
||||
} from "../../../utilities/rowProcessor"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { Table, Row, FormulaType, FieldType } from "@budibase/types"
|
||||
import { Table, Row, FormulaType, FieldType, ViewV2 } from "@budibase/types"
|
||||
import * as linkRows from "../../../db/linkedRows"
|
||||
import isEqual from "lodash/isEqual"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
/**
|
||||
* This function runs through a list of enriched rows, looks at the rows which
|
||||
|
@ -121,33 +122,26 @@ export async function updateAllFormulasInTable(table: Table) {
|
|||
* expects the row to be totally enriched/contain all relationships.
|
||||
*/
|
||||
export async function finaliseRow(
|
||||
table: Table,
|
||||
source: Table | ViewV2,
|
||||
row: Row,
|
||||
{
|
||||
oldTable,
|
||||
updateFormula,
|
||||
fromViewId,
|
||||
}: { oldTable?: Table; updateFormula: boolean; fromViewId?: string } = {
|
||||
updateFormula: true,
|
||||
}
|
||||
opts?: { updateFormula: boolean }
|
||||
) {
|
||||
const db = context.getAppDB()
|
||||
const { updateFormula = true } = opts || {}
|
||||
const table = sdk.views.isView(source)
|
||||
? await sdk.views.getTable(source.id)
|
||||
: source
|
||||
|
||||
row.type = "row"
|
||||
// process the row before return, to include relationships
|
||||
let enrichedRow = (await outputProcessing(table, cloneDeep(row), {
|
||||
let enrichedRow = await outputProcessing(source, cloneDeep(row), {
|
||||
squash: false,
|
||||
})) as Row
|
||||
})
|
||||
// use enriched row to generate formulas for saving, specifically only use as context
|
||||
row = await processFormulas(table, row, {
|
||||
dynamic: false,
|
||||
contextRows: [enrichedRow],
|
||||
})
|
||||
// don't worry about rev, tables handle rev/lastID updates
|
||||
// if another row has been written since processing this will
|
||||
// handle the auto ID clash
|
||||
if (oldTable && !isEqual(oldTable, table)) {
|
||||
await db.put(table)
|
||||
}
|
||||
const response = await db.put(row)
|
||||
// for response, calculate the formulas for the enriched row
|
||||
enrichedRow._rev = response.rev
|
||||
|
@ -158,8 +152,6 @@ export async function finaliseRow(
|
|||
if (updateFormula) {
|
||||
await updateRelatedFormula(table, enrichedRow)
|
||||
}
|
||||
const squashed = await linkRows.squashLinks(table, enrichedRow, {
|
||||
fromViewId,
|
||||
})
|
||||
const squashed = await linkRows.squashLinks(source, enrichedRow)
|
||||
return { row: enrichedRow, squashed, table }
|
||||
}
|
||||
|
|
|
@ -1,11 +1,19 @@
|
|||
// need to handle table name + field or just field, depending on if relationships used
|
||||
import { FieldSchema, FieldType, Row, Table, JsonTypes } from "@budibase/types"
|
||||
import {
|
||||
FieldSchema,
|
||||
FieldType,
|
||||
Row,
|
||||
Table,
|
||||
JsonTypes,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
helpers,
|
||||
PROTECTED_EXTERNAL_COLUMNS,
|
||||
PROTECTED_INTERNAL_COLUMNS,
|
||||
} from "@budibase/shared-core"
|
||||
import { generateRowIdField } from "../../../../integrations/utils"
|
||||
import sdk from "../../../../sdk"
|
||||
|
||||
function extractFieldValue({
|
||||
row,
|
||||
|
@ -78,20 +86,30 @@ function fixJsonTypes(row: Row, table: Table) {
|
|||
return row
|
||||
}
|
||||
|
||||
export function basicProcessing({
|
||||
export async function basicProcessing({
|
||||
row,
|
||||
table,
|
||||
source,
|
||||
tables,
|
||||
isLinked,
|
||||
sqs,
|
||||
}: {
|
||||
row: Row
|
||||
table: Table
|
||||
source: Table | ViewV2
|
||||
tables: Table[]
|
||||
isLinked: boolean
|
||||
sqs?: boolean
|
||||
}): Row {
|
||||
}): Promise<Row> {
|
||||
let table: Table
|
||||
let isCalculationView = false
|
||||
if (sdk.views.isView(source)) {
|
||||
table = await sdk.views.getTable(source.id)
|
||||
isCalculationView = helpers.views.isCalculationView(source)
|
||||
} else {
|
||||
table = source
|
||||
}
|
||||
|
||||
const thisRow: Row = {}
|
||||
|
||||
// filter the row down to what is actually the row (not joined)
|
||||
for (let fieldName of Object.keys(table.schema)) {
|
||||
let value = extractFieldValue({
|
||||
|
@ -108,13 +126,20 @@ export function basicProcessing({
|
|||
thisRow[fieldName] = value
|
||||
}
|
||||
}
|
||||
|
||||
if (sdk.views.isView(source)) {
|
||||
for (const key of Object.keys(helpers.views.calculationFields(source))) {
|
||||
thisRow[key] = row[key]
|
||||
}
|
||||
}
|
||||
|
||||
let columns: string[] = Object.keys(table.schema)
|
||||
if (!sqs) {
|
||||
if (!sqs && !isCalculationView) {
|
||||
thisRow._id = generateIdForRow(row, table, isLinked)
|
||||
thisRow.tableId = table._id
|
||||
thisRow._rev = "rev"
|
||||
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
|
||||
} else {
|
||||
} else if (!isCalculationView) {
|
||||
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
|
||||
for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) {
|
||||
thisRow[internalColumn] = extractFieldValue({
|
||||
|
@ -149,17 +174,19 @@ export function basicProcessing({
|
|||
thisRow[col] = array
|
||||
// make sure all of them have an _id
|
||||
const sortField = relatedTable.primaryDisplay || relatedTable.primary![0]!
|
||||
thisRow[col] = (thisRow[col] as Row[])
|
||||
.map(relatedRow =>
|
||||
thisRow[col] = (
|
||||
await Promise.all(
|
||||
(thisRow[col] as Row[]).map(relatedRow =>
|
||||
basicProcessing({
|
||||
row: relatedRow,
|
||||
table: relatedTable,
|
||||
source: relatedTable,
|
||||
tables,
|
||||
isLinked: false,
|
||||
sqs,
|
||||
})
|
||||
)
|
||||
.sort((a, b) => {
|
||||
)
|
||||
).sort((a, b) => {
|
||||
const aField = a?.[sortField],
|
||||
bField = b?.[sortField]
|
||||
if (!aField) {
|
||||
|
|
|
@ -7,10 +7,14 @@ import {
|
|||
ManyToManyRelationshipFieldMetadata,
|
||||
RelationshipFieldMetadata,
|
||||
RelationshipsJson,
|
||||
Row,
|
||||
Table,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { breakExternalTableId } from "../../../../integrations/utils"
|
||||
import { generateJunctionTableID } from "../../../../db/utils"
|
||||
import sdk from "../../../../sdk"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
type TableMap = Record<string, Table>
|
||||
|
||||
|
@ -108,11 +112,12 @@ export function buildInternalRelationships(
|
|||
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
|
||||
* is more performant and has the added benefit of protecting against this scenario.
|
||||
*/
|
||||
export function buildSqlFieldList(
|
||||
table: Table,
|
||||
export async function buildSqlFieldList(
|
||||
source: Table | ViewV2,
|
||||
tables: TableMap,
|
||||
opts?: { relationships: boolean }
|
||||
) {
|
||||
const { relationships } = opts || {}
|
||||
function extractRealFields(table: Table, existing: string[] = []) {
|
||||
return Object.entries(table.schema)
|
||||
.filter(
|
||||
|
@ -123,22 +128,33 @@ export function buildSqlFieldList(
|
|||
)
|
||||
.map(column => `${table.name}.${column[0]}`)
|
||||
}
|
||||
let fields = extractRealFields(table)
|
||||
|
||||
let fields: string[] = []
|
||||
if (sdk.views.isView(source)) {
|
||||
fields = Object.keys(helpers.views.basicFields(source)).filter(
|
||||
key => source.schema?.[key]?.visible !== false
|
||||
)
|
||||
} else {
|
||||
fields = extractRealFields(source)
|
||||
}
|
||||
|
||||
let table: Table
|
||||
if (sdk.views.isView(source)) {
|
||||
table = await sdk.views.getTable(source.id)
|
||||
} else {
|
||||
table = source
|
||||
}
|
||||
|
||||
for (let field of Object.values(table.schema)) {
|
||||
if (
|
||||
field.type !== FieldType.LINK ||
|
||||
!opts?.relationships ||
|
||||
!field.tableId
|
||||
) {
|
||||
if (field.type !== FieldType.LINK || !relationships || !field.tableId) {
|
||||
continue
|
||||
}
|
||||
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
|
||||
const linkTable = tables[linkTableName]
|
||||
if (linkTable) {
|
||||
const linkedFields = extractRealFields(linkTable, fields)
|
||||
fields = fields.concat(linkedFields)
|
||||
const { tableName } = breakExternalTableId(field.tableId)
|
||||
if (tables[tableName]) {
|
||||
fields = fields.concat(extractRealFields(tables[tableName], fields))
|
||||
}
|
||||
}
|
||||
|
||||
return fields
|
||||
}
|
||||
|
||||
|
@ -149,3 +165,7 @@ export function isKnexEmptyReadResponse(resp: DatasourcePlusQueryResponse) {
|
|||
(DSPlusOperation.READ in resp[0] && resp[0].read === true)
|
||||
)
|
||||
}
|
||||
|
||||
export function isKnexRows(resp: DatasourcePlusQueryResponse): resp is Row[] {
|
||||
return !isKnexEmptyReadResponse(resp)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import * as utils from "../../../../db/utils"
|
||||
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { docIds } from "@budibase/backend-core"
|
||||
import {
|
||||
Ctx,
|
||||
DatasourcePlusQueryResponse,
|
||||
|
@ -8,17 +8,18 @@ import {
|
|||
RelationshipsJson,
|
||||
Row,
|
||||
Table,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
processDates,
|
||||
processFormulas,
|
||||
} from "../../../../utilities/rowProcessor"
|
||||
import { isKnexEmptyReadResponse } from "./sqlUtils"
|
||||
import { isKnexRows } from "./sqlUtils"
|
||||
import { basicProcessing, generateIdForRow, getInternalRowId } from "./basic"
|
||||
import sdk from "../../../../sdk"
|
||||
import { processStringSync } from "@budibase/string-templates"
|
||||
import validateJs from "validate.js"
|
||||
import { getFullUser } from "../../../../utilities/users"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
validateJs.extend(validateJs.validators.datetime, {
|
||||
parse: function (value: string) {
|
||||
|
@ -58,26 +59,11 @@ export async function processRelationshipFields(
|
|||
return row
|
||||
}
|
||||
|
||||
export async function findRow(tableId: string, rowId: string) {
|
||||
const db = context.getAppDB()
|
||||
let row: Row
|
||||
// TODO remove special user case in future
|
||||
if (tableId === utils.InternalTables.USER_METADATA) {
|
||||
row = await getFullUser(rowId)
|
||||
} else {
|
||||
row = await db.get(rowId)
|
||||
}
|
||||
if (row.tableId !== tableId) {
|
||||
throw "Supplied tableId does not match the rows tableId"
|
||||
}
|
||||
return row
|
||||
}
|
||||
|
||||
export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } {
|
||||
// top priority, use the URL first
|
||||
if (ctx.params?.sourceId) {
|
||||
const { sourceId } = ctx.params
|
||||
if (utils.isViewID(sourceId)) {
|
||||
if (docIds.isViewId(sourceId)) {
|
||||
return {
|
||||
tableId: utils.extractViewInfoFromID(sourceId).tableId,
|
||||
viewId: sourceId,
|
||||
|
@ -96,22 +82,22 @@ export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } {
|
|||
throw new Error("Unable to find table ID in request")
|
||||
}
|
||||
|
||||
export async function validate(
|
||||
opts: { row: Row } & ({ tableId: string } | { table: Table })
|
||||
) {
|
||||
let fetchedTable: Table
|
||||
if ("tableId" in opts) {
|
||||
fetchedTable = await sdk.tables.getTable(opts.tableId)
|
||||
} else {
|
||||
fetchedTable = opts.table
|
||||
export async function getSource(ctx: Ctx): Promise<Table | ViewV2> {
|
||||
const { tableId, viewId } = getSourceId(ctx)
|
||||
if (viewId) {
|
||||
return sdk.views.get(viewId)
|
||||
}
|
||||
return sdk.rows.utils.validate({
|
||||
...opts,
|
||||
table: fetchedTable,
|
||||
})
|
||||
return sdk.tables.getTable(tableId)
|
||||
}
|
||||
|
||||
function fixBooleanFields({ row, table }: { row: Row; table: Table }) {
|
||||
export async function getTableFromSource(source: Table | ViewV2) {
|
||||
if (sdk.views.isView(source)) {
|
||||
return await sdk.views.getTable(source.id)
|
||||
}
|
||||
return source
|
||||
}
|
||||
|
||||
function fixBooleanFields(row: Row, table: Table) {
|
||||
for (let col of Object.values(table.schema)) {
|
||||
if (col.type === FieldType.BOOLEAN) {
|
||||
if (row[col.name] === 1) {
|
||||
|
@ -126,49 +112,45 @@ function fixBooleanFields({ row, table }: { row: Row; table: Table }) {
|
|||
|
||||
export async function sqlOutputProcessing(
|
||||
rows: DatasourcePlusQueryResponse,
|
||||
table: Table,
|
||||
source: Table | ViewV2,
|
||||
tables: Record<string, Table>,
|
||||
relationships: RelationshipsJson[],
|
||||
opts?: { sqs?: boolean }
|
||||
): Promise<Row[]> {
|
||||
if (isKnexEmptyReadResponse(rows)) {
|
||||
if (!isKnexRows(rows)) {
|
||||
return []
|
||||
}
|
||||
let finalRows: { [key: string]: Row } = {}
|
||||
for (let row of rows as Row[]) {
|
||||
let rowId = row._id
|
||||
if (opts?.sqs) {
|
||||
rowId = getInternalRowId(row, table)
|
||||
row._id = rowId
|
||||
} else if (!rowId) {
|
||||
rowId = generateIdForRow(row, table)
|
||||
row._id = rowId
|
||||
|
||||
let table: Table
|
||||
let isCalculationView = false
|
||||
if (sdk.views.isView(source)) {
|
||||
table = await sdk.views.getTable(source.id)
|
||||
isCalculationView = helpers.views.isCalculationView(source)
|
||||
} else {
|
||||
table = source
|
||||
}
|
||||
const thisRow = basicProcessing({
|
||||
|
||||
let processedRows: Row[] = []
|
||||
for (let row of rows) {
|
||||
if (opts?.sqs) {
|
||||
row._id = getInternalRowId(row, table)
|
||||
} else if (row._id == null && !isCalculationView) {
|
||||
row._id = generateIdForRow(row, table)
|
||||
}
|
||||
|
||||
row = await basicProcessing({
|
||||
row,
|
||||
table,
|
||||
source,
|
||||
tables: Object.values(tables),
|
||||
isLinked: false,
|
||||
sqs: opts?.sqs,
|
||||
})
|
||||
if (thisRow._id == null) {
|
||||
throw new Error("Unable to generate row ID for SQL rows")
|
||||
row = fixBooleanFields(row, table)
|
||||
row = await processRelationshipFields(table, tables, row, relationships)
|
||||
processedRows.push(row)
|
||||
}
|
||||
|
||||
finalRows[thisRow._id] = fixBooleanFields({ row: thisRow, table })
|
||||
}
|
||||
|
||||
// make sure all related rows are correct
|
||||
let finalRowArray = []
|
||||
for (let row of Object.values(finalRows)) {
|
||||
finalRowArray.push(
|
||||
await processRelationshipFields(table, tables, row, relationships)
|
||||
)
|
||||
}
|
||||
|
||||
// process some additional types
|
||||
finalRowArray = processDates(table, finalRowArray)
|
||||
return finalRowArray
|
||||
return processDates(table, processedRows)
|
||||
}
|
||||
|
||||
export function isUserMetadataTable(tableId: string) {
|
||||
|
|
|
@ -3,8 +3,6 @@ import {
|
|||
ViewV2,
|
||||
SearchRowResponse,
|
||||
SearchViewRowRequest,
|
||||
RequiredKeys,
|
||||
RowSearchParams,
|
||||
SearchFilterKey,
|
||||
LogicalOperator,
|
||||
} from "@budibase/types"
|
||||
|
@ -27,9 +25,6 @@ export async function searchView(
|
|||
ctx.throw(400, `This method only supports viewsV2`)
|
||||
}
|
||||
|
||||
const viewFields = Object.entries(view.schema || {})
|
||||
.filter(([_, value]) => value.visible)
|
||||
.map(([key]) => key)
|
||||
const { body } = ctx.request
|
||||
|
||||
// Enrich saved query with ephemeral query params.
|
||||
|
@ -74,22 +69,17 @@ export async function searchView(
|
|||
user: sdk.users.getUserContextBindings(ctx.user),
|
||||
})
|
||||
|
||||
const searchOptions: RequiredKeys<SearchViewRowRequest> &
|
||||
RequiredKeys<
|
||||
Pick<RowSearchParams, "tableId" | "viewId" | "query" | "fields">
|
||||
> = {
|
||||
tableId: view.tableId,
|
||||
const result = await sdk.rows.search({
|
||||
viewId: view.id,
|
||||
tableId: view.tableId,
|
||||
query: enrichedQuery,
|
||||
fields: viewFields,
|
||||
...getSortOptions(body, view),
|
||||
limit: body.limit,
|
||||
bookmark: body.bookmark,
|
||||
paginate: body.paginate,
|
||||
countRows: body.countRows,
|
||||
}
|
||||
})
|
||||
|
||||
const result = await sdk.rows.search(searchOptions)
|
||||
result.rows.forEach(r => (r._viewId = view.id))
|
||||
ctx.body = result
|
||||
}
|
||||
|
|
|
@ -113,11 +113,10 @@ export async function bulkImport(
|
|||
const processed = await inputProcessing(ctx.user?._id, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
parsedRows.push(processed.row)
|
||||
table = processed.table
|
||||
parsedRows.push(processed)
|
||||
}
|
||||
|
||||
await handleRequest(Operation.BULK_UPSERT, table._id!, {
|
||||
await handleRequest(Operation.BULK_UPSERT, table, {
|
||||
rows: parsedRows,
|
||||
})
|
||||
await events.rows.imported(table, parsedRows.length)
|
||||
|
|
|
@ -33,7 +33,7 @@ import {
|
|||
import sdk from "../../../sdk"
|
||||
import { jsonFromCsvString } from "../../../utilities/csv"
|
||||
import { builderSocket } from "../../../websockets"
|
||||
import { cloneDeep, isEqual } from "lodash"
|
||||
import { cloneDeep } from "lodash"
|
||||
import {
|
||||
helpers,
|
||||
PROTECTED_EXTERNAL_COLUMNS,
|
||||
|
@ -149,12 +149,7 @@ export async function bulkImport(
|
|||
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
|
||||
) {
|
||||
const tableId = ctx.params.tableId
|
||||
let tableBefore = await sdk.tables.getTable(tableId)
|
||||
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
|
||||
|
||||
if (!isEqual(tableBefore, tableAfter)) {
|
||||
await sdk.tables.saveTable(tableAfter)
|
||||
}
|
||||
await pickApi({ tableId }).bulkImport(ctx)
|
||||
|
||||
// right now we don't trigger anything for bulk import because it
|
||||
// can only be done in the builder, but in the future we may need to
|
||||
|
|
|
@ -3,7 +3,6 @@ import { handleDataImport } from "./utils"
|
|||
import {
|
||||
BulkImportRequest,
|
||||
BulkImportResponse,
|
||||
FieldType,
|
||||
RenameColumn,
|
||||
SaveTableRequest,
|
||||
SaveTableResponse,
|
||||
|
@ -70,22 +69,10 @@ export async function bulkImport(
|
|||
) {
|
||||
const table = await sdk.tables.getTable(ctx.params.tableId)
|
||||
const { rows, identifierFields } = ctx.request.body
|
||||
await handleDataImport(
|
||||
{
|
||||
...table,
|
||||
schema: {
|
||||
_id: {
|
||||
name: "_id",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
...table.schema,
|
||||
},
|
||||
},
|
||||
{
|
||||
await handleDataImport(table, {
|
||||
importRows: rows,
|
||||
identifierFields,
|
||||
user: ctx.user,
|
||||
}
|
||||
)
|
||||
})
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -139,8 +139,7 @@ export async function importToRows(
|
|||
const processed = await inputProcessing(user?._id, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
row = processed.row
|
||||
table = processed.table
|
||||
row = processed
|
||||
|
||||
// However here we must reference the original table, as we want to mutate
|
||||
// the real schema of the table passed in, not the clone used for
|
||||
|
|
|
@ -7,10 +7,49 @@ import {
|
|||
ViewResponse,
|
||||
ViewResponseEnriched,
|
||||
ViewV2,
|
||||
ViewFieldMetadata,
|
||||
BasicViewFieldMetadata,
|
||||
ViewCalculationFieldMetadata,
|
||||
RelationSchemaField,
|
||||
ViewFieldMetadata,
|
||||
} from "@budibase/types"
|
||||
import { builderSocket, gridSocket } from "../../../websockets"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
function stripUnknownFields(
|
||||
field: BasicViewFieldMetadata
|
||||
): RequiredKeys<BasicViewFieldMetadata> {
|
||||
if (helpers.views.isCalculationField(field)) {
|
||||
const strippedField: RequiredKeys<ViewCalculationFieldMetadata> = {
|
||||
order: field.order,
|
||||
width: field.width,
|
||||
visible: field.visible,
|
||||
readonly: field.readonly,
|
||||
icon: field.icon,
|
||||
calculationType: field.calculationType,
|
||||
field: field.field,
|
||||
columns: field.columns,
|
||||
}
|
||||
return strippedField
|
||||
} else {
|
||||
const strippedField: RequiredKeys<BasicViewFieldMetadata> = {
|
||||
order: field.order,
|
||||
width: field.width,
|
||||
visible: field.visible,
|
||||
readonly: field.readonly,
|
||||
icon: field.icon,
|
||||
columns: field.columns,
|
||||
}
|
||||
return strippedField
|
||||
}
|
||||
}
|
||||
|
||||
function stripUndefinedFields(obj: Record<string, any>): void {
|
||||
Object.keys(obj)
|
||||
.filter(key => obj[key] === undefined)
|
||||
.forEach(key => {
|
||||
delete obj[key]
|
||||
})
|
||||
}
|
||||
|
||||
async function parseSchema(view: CreateViewRequest) {
|
||||
if (!view.schema) {
|
||||
|
@ -22,6 +61,7 @@ async function parseSchema(view: CreateViewRequest) {
|
|||
let fieldRelatedSchema:
|
||||
| Record<string, RequiredKeys<RelationSchemaField>>
|
||||
| undefined
|
||||
|
||||
if (schemaValue.columns) {
|
||||
fieldRelatedSchema = Object.entries(schemaValue.columns).reduce<
|
||||
NonNullable<typeof fieldRelatedSchema>
|
||||
|
@ -35,25 +75,12 @@ async function parseSchema(view: CreateViewRequest) {
|
|||
}
|
||||
return acc
|
||||
}, {})
|
||||
schemaValue.columns = fieldRelatedSchema
|
||||
}
|
||||
|
||||
const fieldSchema: RequiredKeys<
|
||||
ViewFieldMetadata & {
|
||||
columns: typeof fieldRelatedSchema
|
||||
}
|
||||
> = {
|
||||
order: schemaValue.order,
|
||||
width: schemaValue.width,
|
||||
visible: schemaValue.visible,
|
||||
readonly: schemaValue.readonly,
|
||||
icon: schemaValue.icon,
|
||||
columns: fieldRelatedSchema,
|
||||
}
|
||||
Object.entries(fieldSchema)
|
||||
.filter(([, val]) => val === undefined)
|
||||
.forEach(([key]) => {
|
||||
delete fieldSchema[key as keyof ViewFieldMetadata]
|
||||
})
|
||||
const fieldSchema = stripUnknownFields(schemaValue)
|
||||
stripUndefinedFields(fieldSchema)
|
||||
|
||||
p[fieldName] = fieldSchema
|
||||
return p
|
||||
}, {} as Record<string, RequiredKeys<ViewFieldMetadata>>)
|
||||
|
|
|
@ -76,7 +76,7 @@ async function waitForEvent(
|
|||
}
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
["lucene", undefined],
|
||||
["sqs", undefined],
|
||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||
|
@ -2453,9 +2453,15 @@ describe.each([
|
|||
let flagCleanup: (() => void) | undefined
|
||||
|
||||
beforeAll(async () => {
|
||||
flagCleanup = setCoreEnv({
|
||||
const env = {
|
||||
TENANT_FEATURE_FLAGS: `*:${FeatureFlag.ENRICHED_RELATIONSHIPS}`,
|
||||
})
|
||||
}
|
||||
if (isSqs) {
|
||||
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:SQS`
|
||||
} else {
|
||||
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:!SQS`
|
||||
}
|
||||
flagCleanup = setCoreEnv(env)
|
||||
|
||||
const aux2Table = await config.api.table.save(saveTableRequest())
|
||||
const aux2Data = await config.api.row.save(aux2Table._id!, {})
|
||||
|
|
|
@ -157,7 +157,11 @@ describe.each([
|
|||
if (isInMemory) {
|
||||
return dataFilters.search(_.cloneDeep(rows), this.query)
|
||||
} else {
|
||||
return config.api.row.search(this.query.tableId, this.query)
|
||||
const sourceId = this.query.viewId || this.query.tableId
|
||||
if (!sourceId) {
|
||||
throw new Error("No source ID provided")
|
||||
}
|
||||
return config.api.row.search(sourceId, this.query)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,10 +18,11 @@ import {
|
|||
ViewV2,
|
||||
SearchResponse,
|
||||
BasicOperator,
|
||||
CalculationType,
|
||||
RelationshipType,
|
||||
TableSchema,
|
||||
ViewFieldMetadata,
|
||||
RenameColumn,
|
||||
ViewFieldMetadata,
|
||||
FeatureFlag,
|
||||
BBReferenceFieldSubType,
|
||||
} from "@budibase/types"
|
||||
|
@ -36,7 +37,6 @@ import {
|
|||
setEnv as setCoreEnv,
|
||||
env,
|
||||
} from "@budibase/backend-core"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
describe.each([
|
||||
["lucene", undefined],
|
||||
|
@ -2196,28 +2196,6 @@ describe.each([
|
|||
expect(response.rows).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("queries the row api passing the view fields only", async () => {
|
||||
const searchSpy = jest.spyOn(sdk.rows, "search")
|
||||
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
schema: {
|
||||
id: { visible: true },
|
||||
one: { visible: false },
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.viewV2.search(view.id, { query: {} })
|
||||
expect(searchSpy).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(searchSpy).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
fields: ["id"],
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
describe("foreign relationship columns", () => {
|
||||
let envCleanup: () => void
|
||||
beforeAll(() => {
|
||||
|
@ -2382,6 +2360,71 @@ describe.each([
|
|||
])
|
||||
})
|
||||
})
|
||||
|
||||
!isLucene &&
|
||||
describe("calculations", () => {
|
||||
let table: Table
|
||||
let rows: Row[]
|
||||
|
||||
beforeAll(async () => {
|
||||
table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
quantity: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "quantity",
|
||||
},
|
||||
price: {
|
||||
type: FieldType.NUMBER,
|
||||
name: "price",
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
rows = await Promise.all(
|
||||
Array.from({ length: 10 }, () =>
|
||||
config.api.row.save(table._id!, {
|
||||
quantity: generator.natural({ min: 1, max: 10 }),
|
||||
price: generator.natural({ min: 1, max: 10 }),
|
||||
})
|
||||
)
|
||||
)
|
||||
})
|
||||
|
||||
it("should be able to search by calculations", async () => {
|
||||
const view = await config.api.viewV2.create({
|
||||
tableId: table._id!,
|
||||
name: generator.guid(),
|
||||
schema: {
|
||||
"Quantity Sum": {
|
||||
visible: true,
|
||||
calculationType: CalculationType.SUM,
|
||||
field: "quantity",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
const response = await config.api.viewV2.search(view.id, {
|
||||
query: {},
|
||||
})
|
||||
|
||||
expect(response.rows).toHaveLength(1)
|
||||
expect(response.rows).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
"Quantity Sum": rows.reduce((acc, r) => acc + r.quantity, 0),
|
||||
}),
|
||||
])
|
||||
)
|
||||
|
||||
// Calculation views do not return rows that can be linked back to
|
||||
// the source table, and so should not have an _id field.
|
||||
for (const row of response.rows) {
|
||||
expect("_id" in row).toBe(false)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("permissions", () => {
|
||||
|
|
|
@ -27,6 +27,7 @@ import {
|
|||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../sdk"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
|
||||
export { IncludeDocs, getLinkDocuments, createLinkView } from "./linkUtils"
|
||||
|
||||
|
@ -247,26 +248,36 @@ function getPrimaryDisplayValue(row: Row, table?: Table) {
|
|||
export type SquashTableFields = Record<string, { visibleFieldNames: string[] }>
|
||||
|
||||
/**
|
||||
* This function will take the given enriched rows and squash the links to only contain the primary display field.
|
||||
* @returns The rows after having their links squashed to only contain the ID and primary display.
|
||||
* This function will take the given enriched rows and squash the links to only
|
||||
* contain the primary display field.
|
||||
*
|
||||
* @returns The rows after having their links squashed to only contain the ID
|
||||
* and primary display.
|
||||
*/
|
||||
export async function squashLinks<T = Row[] | Row>(
|
||||
table: Table,
|
||||
enriched: T,
|
||||
options?: {
|
||||
fromViewId?: string
|
||||
}
|
||||
source: Table | ViewV2,
|
||||
enriched: T
|
||||
): Promise<T> {
|
||||
const allowRelationshipSchemas = await features.flags.isEnabled(
|
||||
FeatureFlag.ENRICHED_RELATIONSHIPS
|
||||
)
|
||||
|
||||
let viewSchema: Record<string, ViewFieldMetadata> = {}
|
||||
if (options?.fromViewId && allowRelationshipSchemas) {
|
||||
const view = Object.values(table.views || {}).find(
|
||||
(v): v is ViewV2 => sdk.views.isV2(v) && v.id === options?.fromViewId
|
||||
)
|
||||
viewSchema = view?.schema || {}
|
||||
if (sdk.views.isView(source)) {
|
||||
if (helpers.views.isCalculationView(source)) {
|
||||
return enriched
|
||||
}
|
||||
|
||||
if (allowRelationshipSchemas) {
|
||||
viewSchema = source.schema || {}
|
||||
}
|
||||
}
|
||||
|
||||
let table: Table
|
||||
if (sdk.views.isView(source)) {
|
||||
table = await sdk.views.getTable(source.id)
|
||||
} else {
|
||||
table = source
|
||||
}
|
||||
|
||||
// will populate this as we find them
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { context, db as dbCore, utils } from "@budibase/backend-core"
|
||||
import { context, db as dbCore, docIds, utils } from "@budibase/backend-core"
|
||||
import {
|
||||
DatabaseQueryOpts,
|
||||
Datasource,
|
||||
|
@ -318,12 +318,8 @@ export function generateViewID(tableId: string) {
|
|||
}${SEPARATOR}${tableId}${SEPARATOR}${newid()}`
|
||||
}
|
||||
|
||||
export function isViewID(viewId: string) {
|
||||
return viewId?.split(SEPARATOR)[0] === VirtualDocumentType.VIEW
|
||||
}
|
||||
|
||||
export function extractViewInfoFromID(viewId: string) {
|
||||
if (!isViewID(viewId)) {
|
||||
if (!docIds.isViewId(viewId)) {
|
||||
throw new Error("Unable to extract table ID, is not a view ID")
|
||||
}
|
||||
const split = viewId.split(SEPARATOR)
|
||||
|
|
|
@ -15,7 +15,7 @@ export function triggerRowActionAuthorised(
|
|||
const rowActionId: string = ctx.params[actionPath]
|
||||
|
||||
const isTableId = docIds.isTableId(sourceId)
|
||||
const isViewId = utils.isViewID(sourceId)
|
||||
const isViewId = docIds.isViewId(sourceId)
|
||||
if (!isTableId && !isViewId) {
|
||||
ctx.throw(400, `'${sourceId}' is not a valid source id`)
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import { db, roles } from "@budibase/backend-core"
|
||||
import { db, docIds, roles } from "@budibase/backend-core"
|
||||
import {
|
||||
PermissionLevel,
|
||||
PermissionSource,
|
||||
VirtualDocumentType,
|
||||
} from "@budibase/types"
|
||||
import { extractViewInfoFromID, isViewID } from "../../../db/utils"
|
||||
import { extractViewInfoFromID } from "../../../db/utils"
|
||||
import {
|
||||
CURRENTLY_SUPPORTED_LEVELS,
|
||||
getBasePermissions,
|
||||
|
@ -20,7 +20,7 @@ type ResourcePermissions = Record<
|
|||
export async function getInheritablePermissions(
|
||||
resourceId: string
|
||||
): Promise<ResourcePermissions | undefined> {
|
||||
if (isViewID(resourceId)) {
|
||||
if (docIds.isViewId(resourceId)) {
|
||||
return await getResourcePerms(extractViewInfoFromID(resourceId).tableId)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import { context, HTTPError, utils } from "@budibase/backend-core"
|
||||
import { context, docIds, HTTPError, utils } from "@budibase/backend-core"
|
||||
import {
|
||||
AutomationTriggerStepId,
|
||||
SEPARATOR,
|
||||
TableRowActions,
|
||||
VirtualDocumentType,
|
||||
} from "@budibase/types"
|
||||
import { generateRowActionsID, isViewID } from "../../db/utils"
|
||||
import { generateRowActionsID } from "../../db/utils"
|
||||
import automations from "./automations"
|
||||
import { definitions as TRIGGER_DEFINITIONS } from "../../automations/triggerInfo"
|
||||
import * as triggers from "../../automations/triggers"
|
||||
|
@ -155,7 +155,7 @@ export async function update(
|
|||
|
||||
async function guardView(tableId: string, viewId: string) {
|
||||
let view
|
||||
if (isViewID(viewId)) {
|
||||
if (docIds.isViewId(viewId)) {
|
||||
view = await sdk.views.get(viewId)
|
||||
}
|
||||
if (!view || view.tableId !== tableId) {
|
||||
|
|
|
@ -1,5 +1,11 @@
|
|||
import { IncludeRelationship, Operation, Row } from "@budibase/types"
|
||||
import { HTTPError } from "@budibase/backend-core"
|
||||
import {
|
||||
IncludeRelationship,
|
||||
Operation,
|
||||
Row,
|
||||
Table,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { docIds, HTTPError } from "@budibase/backend-core"
|
||||
import { handleRequest } from "../../../api/controllers/row/external"
|
||||
import { breakRowIdField } from "../../../integrations/utils"
|
||||
import sdk from "../../../sdk"
|
||||
|
@ -8,15 +14,24 @@ import {
|
|||
outputProcessing,
|
||||
} from "../../../utilities/rowProcessor"
|
||||
import cloneDeep from "lodash/fp/cloneDeep"
|
||||
import isEqual from "lodash/fp/isEqual"
|
||||
import { tryExtractingTableAndViewId } from "./utils"
|
||||
|
||||
export async function getRow(
|
||||
tableId: string,
|
||||
sourceId: string | Table | ViewV2,
|
||||
rowId: string,
|
||||
opts?: { relationships?: boolean }
|
||||
) {
|
||||
const response = await handleRequest(Operation.READ, tableId, {
|
||||
let source: Table | ViewV2
|
||||
if (typeof sourceId === "string") {
|
||||
if (docIds.isViewId(sourceId)) {
|
||||
source = await sdk.views.get(sourceId)
|
||||
} else {
|
||||
source = await sdk.tables.getTable(sourceId)
|
||||
}
|
||||
} else {
|
||||
source = sourceId
|
||||
}
|
||||
const response = await handleRequest(Operation.READ, source, {
|
||||
id: breakRowIdField(rowId),
|
||||
includeSqlRelationships: opts?.relationships
|
||||
? IncludeRelationship.INCLUDE
|
||||
|
@ -27,45 +42,42 @@ export async function getRow(
|
|||
}
|
||||
|
||||
export async function save(
|
||||
tableOrViewId: string,
|
||||
sourceId: string,
|
||||
inputs: Row,
|
||||
userId: string | undefined
|
||||
) {
|
||||
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
const { table: updatedTable, row } = await inputProcessing(
|
||||
userId,
|
||||
cloneDeep(table),
|
||||
inputs
|
||||
)
|
||||
const { tableId, viewId } = tryExtractingTableAndViewId(sourceId)
|
||||
let source: Table | ViewV2
|
||||
if (viewId) {
|
||||
source = await sdk.views.get(viewId)
|
||||
} else {
|
||||
source = await sdk.tables.getTable(tableId)
|
||||
}
|
||||
|
||||
const row = await inputProcessing(userId, cloneDeep(source), inputs)
|
||||
|
||||
const validateResult = await sdk.rows.utils.validate({
|
||||
row,
|
||||
tableId,
|
||||
source,
|
||||
})
|
||||
if (!validateResult.valid) {
|
||||
throw { validation: validateResult.errors }
|
||||
}
|
||||
|
||||
const response = await handleRequest(Operation.CREATE, tableId, {
|
||||
const response = await handleRequest(Operation.CREATE, source, {
|
||||
row,
|
||||
})
|
||||
|
||||
if (!isEqual(table, updatedTable)) {
|
||||
await sdk.tables.saveTable(updatedTable)
|
||||
}
|
||||
|
||||
const rowId = response.row._id
|
||||
if (rowId) {
|
||||
const row = await getRow(tableId, rowId, {
|
||||
const row = await getRow(source, rowId, {
|
||||
relationships: true,
|
||||
})
|
||||
return {
|
||||
...response,
|
||||
row: await outputProcessing(table, row, {
|
||||
row: await outputProcessing(source, row, {
|
||||
preserveLinks: true,
|
||||
squash: true,
|
||||
fromViewId: viewId,
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
|
@ -76,7 +88,14 @@ export async function save(
|
|||
export async function find(tableOrViewId: string, rowId: string): Promise<Row> {
|
||||
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
|
||||
|
||||
const row = await getRow(tableId, rowId, {
|
||||
let source: Table | ViewV2
|
||||
if (viewId) {
|
||||
source = await sdk.views.get(viewId)
|
||||
} else {
|
||||
source = await sdk.tables.getTable(tableId)
|
||||
}
|
||||
|
||||
const row = await getRow(source, rowId, {
|
||||
relationships: true,
|
||||
})
|
||||
|
||||
|
@ -84,11 +103,10 @@ export async function find(tableOrViewId: string, rowId: string): Promise<Row> {
|
|||
throw new HTTPError("Row not found", 404)
|
||||
}
|
||||
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
// Preserving links, as the outputProcessing does not support external rows yet and we don't need it in this use case
|
||||
return await outputProcessing(table, row, {
|
||||
// Preserving links, as the outputProcessing does not support external rows
|
||||
// yet and we don't need it in this use case
|
||||
return await outputProcessing(source, row, {
|
||||
squash: true,
|
||||
preserveLinks: true,
|
||||
fromViewId: viewId,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import { context, db } from "@budibase/backend-core"
|
||||
import { Row } from "@budibase/types"
|
||||
import { Row, Table, ViewV2 } from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import cloneDeep from "lodash/fp/cloneDeep"
|
||||
import { finaliseRow } from "../../../api/controllers/row/staticFormula"
|
||||
import {
|
||||
inputProcessing,
|
||||
|
@ -10,7 +9,7 @@ import {
|
|||
import * as linkRows from "../../../db/linkedRows"
|
||||
import { InternalTables } from "../../../db/utils"
|
||||
import { getFullUser } from "../../../utilities/users"
|
||||
import { tryExtractingTableAndViewId } from "./utils"
|
||||
import { getSource, tryExtractingTableAndViewId } from "./utils"
|
||||
|
||||
export async function save(
|
||||
tableOrViewId: string,
|
||||
|
@ -20,21 +19,25 @@ export async function save(
|
|||
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
|
||||
inputs.tableId = tableId
|
||||
|
||||
let source: Table | ViewV2
|
||||
let table: Table
|
||||
if (viewId) {
|
||||
source = await sdk.views.get(viewId)
|
||||
table = await sdk.views.getTable(viewId)
|
||||
} else {
|
||||
source = await sdk.tables.getTable(tableId)
|
||||
table = source
|
||||
}
|
||||
|
||||
if (!inputs._rev && !inputs._id) {
|
||||
inputs._id = db.generateRowID(inputs.tableId)
|
||||
}
|
||||
|
||||
// this returns the table and row incase they have been updated
|
||||
const dbTable = await sdk.tables.getTable(inputs.tableId)
|
||||
|
||||
// need to copy the table so it can be differenced on way out
|
||||
const tableClone = cloneDeep(dbTable)
|
||||
|
||||
let { table, row } = await inputProcessing(userId, tableClone, inputs)
|
||||
let row = await inputProcessing(userId, source, inputs)
|
||||
|
||||
const validateResult = await sdk.rows.utils.validate({
|
||||
row,
|
||||
table,
|
||||
source,
|
||||
})
|
||||
|
||||
if (!validateResult.valid) {
|
||||
|
@ -49,24 +52,18 @@ export async function save(
|
|||
table,
|
||||
})) as Row
|
||||
|
||||
return finaliseRow(table, row, {
|
||||
oldTable: dbTable,
|
||||
updateFormula: true,
|
||||
fromViewId: viewId,
|
||||
return finaliseRow(source, row, { updateFormula: true })
|
||||
}
|
||||
|
||||
export async function find(sourceId: string, rowId: string): Promise<Row> {
|
||||
const source = await getSource(sourceId)
|
||||
return await outputProcessing(source, await findRow(sourceId, rowId), {
|
||||
squash: true,
|
||||
})
|
||||
}
|
||||
|
||||
export async function find(tableOrViewId: string, rowId: string): Promise<Row> {
|
||||
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
|
||||
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
let row = await findRow(tableId, rowId)
|
||||
|
||||
row = await outputProcessing(table, row, { squash: true, fromViewId: viewId })
|
||||
return row
|
||||
}
|
||||
|
||||
async function findRow(tableId: string, rowId: string) {
|
||||
export async function findRow(sourceId: string, rowId: string) {
|
||||
const { tableId } = tryExtractingTableAndViewId(sourceId)
|
||||
const db = context.getAppDB()
|
||||
let row: Row
|
||||
// TODO remove special user case in future
|
||||
|
|
|
@ -53,8 +53,8 @@ export const removeInvalidFilters = (
|
|||
}
|
||||
|
||||
export const getQueryableFields = async (
|
||||
fields: string[],
|
||||
table: Table
|
||||
table: Table,
|
||||
fields?: string[]
|
||||
): Promise<string[]> => {
|
||||
const extractTableFields = async (
|
||||
table: Table,
|
||||
|
@ -110,6 +110,9 @@ export const getQueryableFields = async (
|
|||
"_id", // Querying by _id is always allowed, even if it's never part of the schema
|
||||
]
|
||||
|
||||
if (fields == null) {
|
||||
fields = Object.keys(table.schema)
|
||||
}
|
||||
result.push(...(await extractTableFields(table, fields, [table._id!])))
|
||||
|
||||
return result
|
||||
|
|
|
@ -1,10 +1,6 @@
|
|||
import { db as dbCore, context } from "@budibase/backend-core"
|
||||
import { db as dbCore, context, docIds } from "@budibase/backend-core"
|
||||
import { Database, Row } from "@budibase/types"
|
||||
import {
|
||||
extractViewInfoFromID,
|
||||
getRowParams,
|
||||
isViewID,
|
||||
} from "../../../db/utils"
|
||||
import { extractViewInfoFromID, getRowParams } from "../../../db/utils"
|
||||
import { isExternalTableID } from "../../../integrations/utils"
|
||||
import * as internal from "./internal"
|
||||
import * as external from "./external"
|
||||
|
@ -26,7 +22,7 @@ export async function getAllInternalRows(appId?: string) {
|
|||
|
||||
function pickApi(tableOrViewId: string) {
|
||||
let tableId = tableOrViewId
|
||||
if (isViewID(tableOrViewId)) {
|
||||
if (docIds.isViewId(tableOrViewId)) {
|
||||
tableId = extractViewInfoFromID(tableOrViewId).tableId
|
||||
}
|
||||
|
||||
|
@ -37,13 +33,13 @@ function pickApi(tableOrViewId: string) {
|
|||
}
|
||||
|
||||
export async function save(
|
||||
tableOrViewId: string,
|
||||
sourceId: string,
|
||||
row: Row,
|
||||
userId: string | undefined
|
||||
) {
|
||||
return pickApi(tableOrViewId).save(tableOrViewId, row, userId)
|
||||
return pickApi(sourceId).save(sourceId, row, userId)
|
||||
}
|
||||
|
||||
export async function find(tableOrViewId: string, rowId: string) {
|
||||
return pickApi(tableOrViewId).find(tableOrViewId, rowId)
|
||||
export async function find(sourceId: string, rowId: string) {
|
||||
return pickApi(sourceId).find(sourceId, rowId)
|
||||
}
|
||||
|
|
|
@ -4,6 +4,8 @@ import {
|
|||
RowSearchParams,
|
||||
SearchResponse,
|
||||
SortOrder,
|
||||
Table,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { isExternalTableID } from "../../../integrations/utils"
|
||||
import * as internal from "./search/internal"
|
||||
|
@ -37,6 +39,7 @@ export async function search(
|
|||
return await tracer.trace("search", async span => {
|
||||
span?.addTags({
|
||||
tableId: options.tableId,
|
||||
viewId: options.viewId,
|
||||
query: options.query,
|
||||
sort: options.sort,
|
||||
sortOrder: options.sortOrder,
|
||||
|
@ -48,20 +51,18 @@ export async function search(
|
|||
countRows: options.countRows,
|
||||
})
|
||||
|
||||
const isExternalTable = isExternalTableID(options.tableId)
|
||||
options.query = dataFilters.cleanupQuery(options.query || {})
|
||||
options.query = dataFilters.fixupFilterArrays(options.query)
|
||||
|
||||
span?.addTags({
|
||||
span.addTags({
|
||||
cleanedQuery: options.query,
|
||||
isExternalTable,
|
||||
})
|
||||
|
||||
if (
|
||||
!dataFilters.hasFilters(options.query) &&
|
||||
options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE
|
||||
) {
|
||||
span?.addTags({ emptyQuery: true })
|
||||
span.addTags({ emptyQuery: true })
|
||||
return {
|
||||
rows: [],
|
||||
}
|
||||
|
@ -71,34 +72,43 @@ export async function search(
|
|||
options.sortOrder = options.sortOrder.toLowerCase() as SortOrder
|
||||
}
|
||||
|
||||
const table = await sdk.tables.getTable(options.tableId)
|
||||
let source: Table | ViewV2
|
||||
let table: Table
|
||||
if (options.viewId) {
|
||||
source = await sdk.views.get(options.viewId)
|
||||
table = await sdk.views.getTable(source)
|
||||
options = searchInputMapping(table, options)
|
||||
|
||||
if (options.query) {
|
||||
const tableFields = Object.keys(table.schema).filter(
|
||||
f => table.schema[f].visible !== false
|
||||
)
|
||||
|
||||
const queriableFields = await getQueryableFields(
|
||||
options.fields?.filter(f => tableFields.includes(f)) ?? tableFields,
|
||||
table
|
||||
)
|
||||
options.query = removeInvalidFilters(options.query, queriableFields)
|
||||
} else if (options.tableId) {
|
||||
source = await sdk.tables.getTable(options.tableId)
|
||||
table = source
|
||||
options = searchInputMapping(table, options)
|
||||
} else {
|
||||
throw new Error(`Must supply either a view ID or a table ID`)
|
||||
}
|
||||
|
||||
if (options.query) {
|
||||
const visibleFields = (
|
||||
options.fields || Object.keys(table.schema)
|
||||
).filter(field => table.schema[field].visible !== false)
|
||||
|
||||
const queryableFields = await getQueryableFields(table, visibleFields)
|
||||
options.query = removeInvalidFilters(options.query, queryableFields)
|
||||
}
|
||||
|
||||
const isExternalTable = isExternalTableID(table._id!)
|
||||
let result: SearchResponse<Row>
|
||||
if (isExternalTable) {
|
||||
span?.addTags({ searchType: "external" })
|
||||
result = await external.search(options, table)
|
||||
result = await external.search(options, source)
|
||||
} else if (await features.flags.isEnabled("SQS")) {
|
||||
span?.addTags({ searchType: "sqs" })
|
||||
result = await internal.sqs.search(options, table)
|
||||
result = await internal.sqs.search(options, source)
|
||||
} else {
|
||||
span?.addTags({ searchType: "lucene" })
|
||||
result = await internal.lucene.search(options, table)
|
||||
result = await internal.lucene.search(options, source)
|
||||
}
|
||||
|
||||
span?.addTags({
|
||||
span.addTags({
|
||||
foundRows: result.rows.length,
|
||||
totalRows: result.totalRows,
|
||||
})
|
||||
|
|
|
@ -9,6 +9,7 @@ import {
|
|||
SortJson,
|
||||
SortOrder,
|
||||
Table,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import * as exporters from "../../../../api/controllers/view/exporters"
|
||||
import { handleRequest } from "../../../../api/controllers/row/external"
|
||||
|
@ -60,9 +61,8 @@ function getPaginationAndLimitParameters(
|
|||
|
||||
export async function search(
|
||||
options: RowSearchParams,
|
||||
table: Table
|
||||
source: Table | ViewV2
|
||||
): Promise<SearchResponse<Row>> {
|
||||
const { tableId } = options
|
||||
const { countRows, paginate, query, ...params } = options
|
||||
const { limit } = params
|
||||
let bookmark =
|
||||
|
@ -106,16 +106,15 @@ export async function search(
|
|||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
}
|
||||
const [{ rows, rawResponseSize }, totalRows] = await Promise.all([
|
||||
handleRequest(Operation.READ, tableId, parameters),
|
||||
handleRequest(Operation.READ, source, parameters),
|
||||
countRows
|
||||
? handleRequest(Operation.COUNT, tableId, parameters)
|
||||
? handleRequest(Operation.COUNT, source, parameters)
|
||||
: Promise.resolve(undefined),
|
||||
])
|
||||
|
||||
let processed = await outputProcessing(table, rows, {
|
||||
let processed = await outputProcessing(source, rows, {
|
||||
preserveLinks: true,
|
||||
squash: true,
|
||||
fromViewId: options.viewId,
|
||||
})
|
||||
|
||||
let hasNextPage = false
|
||||
|
@ -128,10 +127,13 @@ export async function search(
|
|||
}
|
||||
}
|
||||
|
||||
if (options.fields) {
|
||||
const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS]
|
||||
processed = processed.map((r: any) => pick(r, fields))
|
||||
}
|
||||
const visibleFields =
|
||||
options.fields ||
|
||||
Object.keys(source.schema || {}).filter(
|
||||
key => source.schema?.[key].visible !== false
|
||||
)
|
||||
const allowedFields = [...visibleFields, ...PROTECTED_EXTERNAL_COLUMNS]
|
||||
processed = processed.map((r: any) => pick(r, allowedFields))
|
||||
|
||||
// need wrapper object for bookmarks etc when paginating
|
||||
const response: SearchResponse<Row> = { rows: processed, hasNextPage }
|
||||
|
@ -201,7 +203,7 @@ export async function exportRows(
|
|||
}
|
||||
|
||||
let result = await search(
|
||||
{ tableId, query: requestQuery, sort, sortOrder },
|
||||
{ tableId: table._id!, query: requestQuery, sort, sortOrder },
|
||||
table
|
||||
)
|
||||
let rows: Row[] = []
|
||||
|
@ -257,10 +259,10 @@ export async function exportRows(
|
|||
}
|
||||
|
||||
export async function fetch(tableId: string): Promise<Row[]> {
|
||||
const response = await handleRequest(Operation.READ, tableId, {
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
const response = await handleRequest(Operation.READ, table, {
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
return await outputProcessing(table, response.rows, {
|
||||
preserveLinks: true,
|
||||
squash: true,
|
||||
|
@ -268,7 +270,8 @@ export async function fetch(tableId: string): Promise<Row[]> {
|
|||
}
|
||||
|
||||
export async function fetchRaw(tableId: string): Promise<Row[]> {
|
||||
const response = await handleRequest(Operation.READ, tableId, {
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
const response = await handleRequest(Operation.READ, table, {
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})
|
||||
return response.rows
|
||||
|
|
|
@ -8,21 +8,29 @@ import {
|
|||
SortType,
|
||||
Table,
|
||||
User,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
|
||||
import { outputProcessing } from "../../../../../utilities/rowProcessor"
|
||||
import pick from "lodash/pick"
|
||||
import sdk from "../../../../"
|
||||
|
||||
export async function search(
|
||||
options: RowSearchParams,
|
||||
table: Table
|
||||
source: Table | ViewV2
|
||||
): Promise<SearchResponse<Row>> {
|
||||
const { tableId } = options
|
||||
let table: Table
|
||||
if (sdk.views.isView(source)) {
|
||||
table = await sdk.views.getTable(source.id)
|
||||
} else {
|
||||
table = source
|
||||
}
|
||||
|
||||
const { paginate, query } = options
|
||||
|
||||
const params: RowSearchParams = {
|
||||
tableId: options.tableId,
|
||||
viewId: options.viewId,
|
||||
sort: options.sort,
|
||||
sortOrder: options.sortOrder,
|
||||
sortType: options.sortType,
|
||||
|
@ -50,18 +58,20 @@ export async function search(
|
|||
// Enrich search results with relationships
|
||||
if (response.rows && response.rows.length) {
|
||||
// enrich with global users if from users table
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
if (table._id === InternalTables.USER_METADATA) {
|
||||
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
|
||||
}
|
||||
|
||||
if (options.fields) {
|
||||
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS]
|
||||
response.rows = response.rows.map((r: any) => pick(r, fields))
|
||||
}
|
||||
const visibleFields =
|
||||
options.fields ||
|
||||
Object.keys(source.schema || {}).filter(
|
||||
key => source.schema?.[key].visible !== false
|
||||
)
|
||||
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
|
||||
response.rows = response.rows.map((r: any) => pick(r, allowedFields))
|
||||
|
||||
response.rows = await outputProcessing(table, response.rows, {
|
||||
response.rows = await outputProcessing(source, response.rows, {
|
||||
squash: true,
|
||||
fromViewId: options.viewId,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import {
|
||||
Aggregation,
|
||||
Datasource,
|
||||
DocumentType,
|
||||
FieldType,
|
||||
|
@ -15,6 +16,7 @@ import {
|
|||
SortType,
|
||||
SqlClient,
|
||||
Table,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
buildInternalRelationships,
|
||||
|
@ -44,10 +46,12 @@ import {
|
|||
import {
|
||||
dataFilters,
|
||||
helpers,
|
||||
isInternalColumnName,
|
||||
PROTECTED_INTERNAL_COLUMNS,
|
||||
} from "@budibase/shared-core"
|
||||
import { isSearchingByRowID } from "../utils"
|
||||
import tracer from "dd-trace"
|
||||
import { cloneDeep } from "lodash"
|
||||
|
||||
const builder = new sql.Sql(SqlClient.SQL_LITE)
|
||||
const SQLITE_COLUMN_LIMIT = 2000
|
||||
|
@ -55,11 +59,34 @@ const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`)
|
|||
const MISSING_TABLE_REGX = new RegExp(`no such table: .+`)
|
||||
const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`)
|
||||
|
||||
function buildInternalFieldList(
|
||||
table: Table,
|
||||
async function buildInternalFieldList(
|
||||
source: Table | ViewV2,
|
||||
tables: Table[],
|
||||
opts?: { relationships?: RelationshipsJson[] }
|
||||
opts?: { relationships?: RelationshipsJson[]; allowedFields?: string[] }
|
||||
) {
|
||||
const { relationships, allowedFields } = opts || {}
|
||||
let schemaFields: string[] = []
|
||||
if (sdk.views.isView(source)) {
|
||||
schemaFields = Object.keys(helpers.views.basicFields(source)).filter(
|
||||
key => source.schema?.[key]?.visible !== false
|
||||
)
|
||||
} else {
|
||||
schemaFields = Object.keys(source.schema).filter(
|
||||
key => source.schema[key].visible !== false
|
||||
)
|
||||
}
|
||||
|
||||
if (allowedFields) {
|
||||
schemaFields = schemaFields.filter(field => allowedFields.includes(field))
|
||||
}
|
||||
|
||||
let table: Table
|
||||
if (sdk.views.isView(source)) {
|
||||
table = await sdk.views.getTable(source.id)
|
||||
} else {
|
||||
table = source
|
||||
}
|
||||
|
||||
let fieldList: string[] = []
|
||||
const getJunctionFields = (relatedTable: Table, fields: string[]) => {
|
||||
const junctionFields: string[] = []
|
||||
|
@ -70,13 +97,18 @@ function buildInternalFieldList(
|
|||
})
|
||||
return junctionFields
|
||||
}
|
||||
fieldList = fieldList.concat(
|
||||
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`)
|
||||
)
|
||||
for (let key of Object.keys(table.schema)) {
|
||||
if (sdk.tables.isTable(source)) {
|
||||
for (const key of PROTECTED_INTERNAL_COLUMNS) {
|
||||
if (allowedFields && !allowedFields.includes(key)) {
|
||||
continue
|
||||
}
|
||||
fieldList.push(`${table._id}.${key}`)
|
||||
}
|
||||
}
|
||||
for (let key of schemaFields) {
|
||||
const col = table.schema[key]
|
||||
const isRelationship = col.type === FieldType.LINK
|
||||
if (!opts?.relationships && isRelationship) {
|
||||
if (!relationships && isRelationship) {
|
||||
continue
|
||||
}
|
||||
if (!isRelationship) {
|
||||
|
@ -87,7 +119,9 @@ function buildInternalFieldList(
|
|||
if (!relatedTable) {
|
||||
continue
|
||||
}
|
||||
const relatedFields = buildInternalFieldList(relatedTable, tables).concat(
|
||||
const relatedFields = (
|
||||
await buildInternalFieldList(relatedTable, tables)
|
||||
).concat(
|
||||
getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"])
|
||||
)
|
||||
// break out of the loop if we have reached the max number of columns
|
||||
|
@ -128,15 +162,22 @@ function cleanupFilters(
|
|||
// generate a map of all possible column names (these can be duplicated across tables
|
||||
// the map of them will always be the same
|
||||
const userColumnMap: Record<string, string> = {}
|
||||
allTables.forEach(table =>
|
||||
Object.keys(table.schema).forEach(
|
||||
key => (userColumnMap[key] = mapToUserColumn(key))
|
||||
)
|
||||
)
|
||||
for (const table of allTables) {
|
||||
for (const key of Object.keys(table.schema)) {
|
||||
if (isInternalColumnName(key)) {
|
||||
continue
|
||||
}
|
||||
userColumnMap[key] = mapToUserColumn(key)
|
||||
}
|
||||
}
|
||||
|
||||
// update the keys of filters to manage user columns
|
||||
const keyInAnyTable = (key: string): boolean =>
|
||||
allTables.some(table => table.schema[key])
|
||||
const keyInAnyTable = (key: string): boolean => {
|
||||
if (isInternalColumnName(key)) {
|
||||
return false
|
||||
}
|
||||
return allTables.some(table => table.schema[key])
|
||||
}
|
||||
|
||||
const splitter = new dataFilters.ColumnSplitter(allTables)
|
||||
|
||||
|
@ -291,16 +332,23 @@ function resyncDefinitionsRequired(status: number, message: string) {
|
|||
|
||||
export async function search(
|
||||
options: RowSearchParams,
|
||||
table: Table,
|
||||
source: Table | ViewV2,
|
||||
opts?: { retrying?: boolean }
|
||||
): Promise<SearchResponse<Row>> {
|
||||
let { paginate, query, ...params } = options
|
||||
let { paginate, query, ...params } = cloneDeep(options)
|
||||
|
||||
let table: Table
|
||||
if (sdk.views.isView(source)) {
|
||||
table = await sdk.views.getTable(source.id)
|
||||
} else {
|
||||
table = source
|
||||
}
|
||||
|
||||
const allTables = await sdk.tables.getAllInternalTables()
|
||||
const allTablesMap = buildTableMap(allTables)
|
||||
// make sure we have the mapped/latest table
|
||||
if (table?._id) {
|
||||
table = allTablesMap[table?._id]
|
||||
if (table._id) {
|
||||
table = allTablesMap[table._id]
|
||||
}
|
||||
if (!table) {
|
||||
throw new Error("Unable to find table")
|
||||
|
@ -312,6 +360,23 @@ export async function search(
|
|||
...cleanupFilters(query, table, allTables),
|
||||
documentType: DocumentType.ROW,
|
||||
}
|
||||
|
||||
let aggregations: Aggregation[] = []
|
||||
if (sdk.views.isView(source)) {
|
||||
const calculationFields = helpers.views.calculationFields(source)
|
||||
for (const [key, field] of Object.entries(calculationFields)) {
|
||||
if (options.fields && !options.fields.includes(key)) {
|
||||
continue
|
||||
}
|
||||
|
||||
aggregations.push({
|
||||
name: key,
|
||||
field: mapToUserColumn(field.field),
|
||||
calculationType: field.calculationType,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const request: QueryJson = {
|
||||
endpoint: {
|
||||
// not important, we query ourselves
|
||||
|
@ -327,7 +392,11 @@ export async function search(
|
|||
columnPrefix: USER_COLUMN_PREFIX,
|
||||
},
|
||||
resource: {
|
||||
fields: buildInternalFieldList(table, allTables, { relationships }),
|
||||
fields: await buildInternalFieldList(source, allTables, {
|
||||
relationships,
|
||||
allowedFields: options.fields,
|
||||
}),
|
||||
aggregations,
|
||||
},
|
||||
relationships,
|
||||
}
|
||||
|
@ -372,7 +441,7 @@ export async function search(
|
|||
// make sure JSON columns corrected
|
||||
const processed = builder.convertJsonStringColumns<Row>(
|
||||
table,
|
||||
await sqlOutputProcessing(rows, table!, allTablesMap, relationships, {
|
||||
await sqlOutputProcessing(rows, source, allTablesMap, relationships, {
|
||||
sqs: true,
|
||||
})
|
||||
)
|
||||
|
@ -388,17 +457,18 @@ export async function search(
|
|||
}
|
||||
|
||||
// get the rows
|
||||
let finalRows = await outputProcessing(table, processed, {
|
||||
let finalRows = await outputProcessing(source, processed, {
|
||||
preserveLinks: true,
|
||||
squash: true,
|
||||
fromViewId: options.viewId,
|
||||
})
|
||||
|
||||
// check if we need to pick specific rows out
|
||||
if (options.fields) {
|
||||
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS]
|
||||
finalRows = finalRows.map((r: any) => pick(r, fields))
|
||||
}
|
||||
const visibleFields =
|
||||
options.fields ||
|
||||
Object.keys(source.schema || {}).filter(
|
||||
key => source.schema?.[key].visible !== false
|
||||
)
|
||||
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
|
||||
finalRows = finalRows.map((r: any) => pick(r, allowedFields))
|
||||
|
||||
const response: SearchResponse<Row> = {
|
||||
rows: finalRows,
|
||||
|
@ -419,7 +489,7 @@ export async function search(
|
|||
const msg = typeof err === "string" ? err : err.message
|
||||
if (!opts?.retrying && resyncDefinitionsRequired(err.status, msg)) {
|
||||
await sdk.tables.sqs.syncDefinition()
|
||||
return search(options, table, { retrying: true })
|
||||
return search(options, source, { retrying: true })
|
||||
}
|
||||
// previously the internal table didn't error when a column didn't exist in search
|
||||
if (err.status === 400 && msg?.match(MISSING_COLUMN_REGEX)) {
|
||||
|
|
|
@ -90,10 +90,8 @@ describe.each([tableWithUserCol, tableWithUsersCol])(
|
|||
})
|
||||
|
||||
it("shouldn't error if no query supplied", () => {
|
||||
const params: any = {
|
||||
tableId,
|
||||
}
|
||||
const output = searchInputMapping(col, params)
|
||||
// @ts-expect-error - intentionally passing in a bad type
|
||||
const output = searchInputMapping(col, { tableId })
|
||||
expect(output.query).toBeUndefined()
|
||||
})
|
||||
}
|
||||
|
|
|
@ -83,10 +83,7 @@ function userColumnMapping(column: string, options: RowSearchParams) {
|
|||
// maps through the search parameters to check if any of the inputs are invalid
|
||||
// based on the table schema, converts them to something that is valid.
|
||||
export function searchInputMapping(table: Table, options: RowSearchParams) {
|
||||
if (!table?.schema) {
|
||||
return options
|
||||
}
|
||||
for (let [key, column] of Object.entries(table.schema)) {
|
||||
for (let [key, column] of Object.entries(table.schema || {})) {
|
||||
switch (column.type) {
|
||||
case FieldType.BB_REFERENCE_SINGLE: {
|
||||
const subtype = column.subtype
|
||||
|
|
|
@ -203,7 +203,7 @@ describe("query utils", () => {
|
|||
},
|
||||
})
|
||||
|
||||
const result = await getQueryableFields(Object.keys(table.schema), table)
|
||||
const result = await getQueryableFields(table)
|
||||
expect(result).toEqual(["_id", "name", "age"])
|
||||
})
|
||||
|
||||
|
@ -216,7 +216,7 @@ describe("query utils", () => {
|
|||
},
|
||||
})
|
||||
|
||||
const result = await getQueryableFields(Object.keys(table.schema), table)
|
||||
const result = await getQueryableFields(table)
|
||||
expect(result).toEqual(["_id", "name"])
|
||||
})
|
||||
|
||||
|
@ -245,7 +245,7 @@ describe("query utils", () => {
|
|||
})
|
||||
|
||||
const result = await config.doInContext(config.appId, () => {
|
||||
return getQueryableFields(Object.keys(table.schema), table)
|
||||
return getQueryableFields(table)
|
||||
})
|
||||
expect(result).toEqual([
|
||||
"_id",
|
||||
|
@ -282,7 +282,7 @@ describe("query utils", () => {
|
|||
})
|
||||
|
||||
const result = await config.doInContext(config.appId, () => {
|
||||
return getQueryableFields(Object.keys(table.schema), table)
|
||||
return getQueryableFields(table)
|
||||
})
|
||||
expect(result).toEqual(["_id", "name", "aux.name", "auxTable.name"])
|
||||
})
|
||||
|
@ -313,7 +313,7 @@ describe("query utils", () => {
|
|||
})
|
||||
|
||||
const result = await config.doInContext(config.appId, () => {
|
||||
return getQueryableFields(Object.keys(table.schema), table)
|
||||
return getQueryableFields(table)
|
||||
})
|
||||
expect(result).toEqual(["_id", "name"])
|
||||
})
|
||||
|
@ -381,7 +381,7 @@ describe("query utils", () => {
|
|||
|
||||
it("includes nested relationship fields from main table", async () => {
|
||||
const result = await config.doInContext(config.appId, () => {
|
||||
return getQueryableFields(Object.keys(table.schema), table)
|
||||
return getQueryableFields(table)
|
||||
})
|
||||
expect(result).toEqual([
|
||||
"_id",
|
||||
|
@ -398,7 +398,7 @@ describe("query utils", () => {
|
|||
|
||||
it("includes nested relationship fields from aux 1 table", async () => {
|
||||
const result = await config.doInContext(config.appId, () => {
|
||||
return getQueryableFields(Object.keys(aux1.schema), aux1)
|
||||
return getQueryableFields(aux1)
|
||||
})
|
||||
expect(result).toEqual([
|
||||
"_id",
|
||||
|
@ -420,7 +420,7 @@ describe("query utils", () => {
|
|||
|
||||
it("includes nested relationship fields from aux 2 table", async () => {
|
||||
const result = await config.doInContext(config.appId, () => {
|
||||
return getQueryableFields(Object.keys(aux2.schema), aux2)
|
||||
return getQueryableFields(aux2)
|
||||
})
|
||||
expect(result).toEqual([
|
||||
"_id",
|
||||
|
@ -474,7 +474,7 @@ describe("query utils", () => {
|
|||
|
||||
it("includes nested relationship fields from main table", async () => {
|
||||
const result = await config.doInContext(config.appId, () => {
|
||||
return getQueryableFields(Object.keys(table.schema), table)
|
||||
return getQueryableFields(table)
|
||||
})
|
||||
expect(result).toEqual([
|
||||
"_id",
|
||||
|
@ -488,7 +488,7 @@ describe("query utils", () => {
|
|||
|
||||
it("includes nested relationship fields from aux table", async () => {
|
||||
const result = await config.doInContext(config.appId, () => {
|
||||
return getQueryableFields(Object.keys(aux.schema), aux)
|
||||
return getQueryableFields(aux)
|
||||
})
|
||||
expect(result).toEqual([
|
||||
"_id",
|
||||
|
|
|
@ -33,7 +33,7 @@ describe("validate", () => {
|
|||
it("should accept empty values", async () => {
|
||||
const row = {}
|
||||
const table = getTable()
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(true)
|
||||
expect(output.errors).toEqual({})
|
||||
})
|
||||
|
@ -43,7 +43,7 @@ describe("validate", () => {
|
|||
time: `${hour()}:${minute()}`,
|
||||
}
|
||||
const table = getTable()
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(true)
|
||||
})
|
||||
|
||||
|
@ -52,7 +52,7 @@ describe("validate", () => {
|
|||
time: `${hour()}:${minute()}:${second()}`,
|
||||
}
|
||||
const table = getTable()
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(true)
|
||||
})
|
||||
|
||||
|
@ -67,7 +67,7 @@ describe("validate", () => {
|
|||
table.schema.time.constraints = {
|
||||
presence: true,
|
||||
}
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({ time: ['"time" is not a valid time'] })
|
||||
})
|
||||
|
@ -91,7 +91,7 @@ describe("validate", () => {
|
|||
`${generator.integer({ min: 11, max: 23 })}:${minute()}`,
|
||||
])("should accept values after config value (%s)", async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(true)
|
||||
})
|
||||
|
||||
|
@ -100,7 +100,7 @@ describe("validate", () => {
|
|||
`${generator.integer({ min: 0, max: 9 })}:${minute()}`,
|
||||
])("should reject values before config value (%s)", async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({
|
||||
time: ["must be no earlier than 10:00"],
|
||||
|
@ -125,7 +125,7 @@ describe("validate", () => {
|
|||
`${generator.integer({ min: 0, max: 12 })}:${minute()}`,
|
||||
])("should accept values before config value (%s)", async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(true)
|
||||
})
|
||||
|
||||
|
@ -134,7 +134,7 @@ describe("validate", () => {
|
|||
`${generator.integer({ min: 16, max: 23 })}:${minute()}`,
|
||||
])("should reject values after config value (%s)", async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({
|
||||
time: ["must be no later than 15:16:17"],
|
||||
|
@ -156,7 +156,7 @@ describe("validate", () => {
|
|||
"should accept values in range (%s)",
|
||||
async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(true)
|
||||
}
|
||||
)
|
||||
|
@ -166,7 +166,7 @@ describe("validate", () => {
|
|||
`${generator.integer({ min: 0, max: 9 })}:${minute()}`,
|
||||
])("should reject values before range (%s)", async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({
|
||||
time: ["must be no earlier than 10:00"],
|
||||
|
@ -178,7 +178,7 @@ describe("validate", () => {
|
|||
`${generator.integer({ min: 16, max: 23 })}:${minute()}`,
|
||||
])("should reject values after range (%s)", async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({
|
||||
time: ["must be no later than 15:00"],
|
||||
|
@ -199,7 +199,7 @@ describe("validate", () => {
|
|||
"should accept values in range (%s)",
|
||||
async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(true)
|
||||
}
|
||||
)
|
||||
|
@ -208,7 +208,7 @@ describe("validate", () => {
|
|||
"should reject values out range (%s)",
|
||||
async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({
|
||||
time: ["must be no later than 10:00"],
|
||||
|
@ -226,7 +226,7 @@ describe("validate", () => {
|
|||
table.schema.time.constraints = {
|
||||
presence: true,
|
||||
}
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({ time: ["can't be blank"] })
|
||||
})
|
||||
|
@ -237,7 +237,7 @@ describe("validate", () => {
|
|||
table.schema.time.constraints = {
|
||||
presence: true,
|
||||
}
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({ time: ["can't be blank"] })
|
||||
})
|
||||
|
@ -257,7 +257,7 @@ describe("validate", () => {
|
|||
"should accept values in range (%s)",
|
||||
async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(true)
|
||||
}
|
||||
)
|
||||
|
@ -267,7 +267,7 @@ describe("validate", () => {
|
|||
`${generator.integer({ min: 0, max: 9 })}:${minute()}`,
|
||||
])("should reject values before range (%s)", async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({
|
||||
time: ["must be no earlier than 10:00"],
|
||||
|
@ -279,7 +279,7 @@ describe("validate", () => {
|
|||
`${generator.integer({ min: 16, max: 23 })}:${minute()}`,
|
||||
])("should reject values after range (%s)", async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({
|
||||
time: ["must be no later than 15:00"],
|
||||
|
@ -301,7 +301,7 @@ describe("validate", () => {
|
|||
"should accept values in range (%s)",
|
||||
async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(true)
|
||||
}
|
||||
)
|
||||
|
@ -311,7 +311,7 @@ describe("validate", () => {
|
|||
`${generator.integer({ min: 0, max: 9 })}:${minute()}`,
|
||||
])("should reject values before range (%s)", async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({
|
||||
time: ["must be no earlier than 10:00"],
|
||||
|
@ -323,7 +323,7 @@ describe("validate", () => {
|
|||
`${generator.integer({ min: 16, max: 23 })}:${minute()}`,
|
||||
])("should reject values after range (%s)", async time => {
|
||||
const row = { time }
|
||||
const output = await validate({ table, tableId: table._id!, row })
|
||||
const output = await validate({ source: table, row })
|
||||
expect(output.valid).toBe(false)
|
||||
expect(output.errors).toEqual({
|
||||
time: ["must be no later than 15:00"],
|
||||
|
|
|
@ -13,16 +13,15 @@ import {
|
|||
TableSchema,
|
||||
SqlClient,
|
||||
ArrayOperator,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
import { Format } from "../../../api/controllers/view/exporters"
|
||||
import sdk from "../.."
|
||||
import {
|
||||
extractViewInfoFromID,
|
||||
isRelationshipColumn,
|
||||
isViewID,
|
||||
} from "../../../db/utils"
|
||||
import { extractViewInfoFromID, isRelationshipColumn } from "../../../db/utils"
|
||||
import { isSQL } from "../../../integrations/utils"
|
||||
import { docIds } from "@budibase/backend-core"
|
||||
import { getTableFromSource } from "../../../api/controllers/row/utils"
|
||||
|
||||
const SQL_CLIENT_SOURCE_MAP: Record<SourceName, SqlClient | undefined> = {
|
||||
[SourceName.POSTGRES]: SqlClient.POSTGRES,
|
||||
|
@ -142,37 +141,27 @@ function isForeignKey(key: string, table: Table) {
|
|||
}
|
||||
|
||||
export async function validate({
|
||||
tableId,
|
||||
source,
|
||||
row,
|
||||
table,
|
||||
}: {
|
||||
tableId?: string
|
||||
source: Table | ViewV2
|
||||
row: Row
|
||||
table?: Table
|
||||
}): Promise<{
|
||||
valid: boolean
|
||||
errors: Record<string, any>
|
||||
}> {
|
||||
let fetchedTable: Table | undefined
|
||||
if (!table && tableId) {
|
||||
fetchedTable = await sdk.tables.getTable(tableId)
|
||||
} else if (table) {
|
||||
fetchedTable = table
|
||||
}
|
||||
if (fetchedTable === undefined) {
|
||||
throw new Error("Unable to fetch table for validation")
|
||||
}
|
||||
const table = await getTableFromSource(source)
|
||||
const errors: Record<string, any> = {}
|
||||
const disallowArrayTypes = [
|
||||
FieldType.ATTACHMENT_SINGLE,
|
||||
FieldType.BB_REFERENCE_SINGLE,
|
||||
]
|
||||
for (let fieldName of Object.keys(fetchedTable.schema)) {
|
||||
const column = fetchedTable.schema[fieldName]
|
||||
for (let fieldName of Object.keys(table.schema)) {
|
||||
const column = table.schema[fieldName]
|
||||
const constraints = cloneDeep(column.constraints)
|
||||
const type = column.type
|
||||
// foreign keys are likely to be enriched
|
||||
if (isForeignKey(fieldName, fetchedTable)) {
|
||||
if (isForeignKey(fieldName, table)) {
|
||||
continue
|
||||
}
|
||||
// formulas shouldn't validated, data will be deleted anyway
|
||||
|
@ -323,7 +312,7 @@ export function isArrayFilter(operator: any): operator is ArrayOperator {
|
|||
}
|
||||
|
||||
export function tryExtractingTableAndViewId(tableOrViewId: string) {
|
||||
if (isViewID(tableOrViewId)) {
|
||||
if (docIds.isViewId(tableOrViewId)) {
|
||||
return {
|
||||
tableId: extractViewInfoFromID(tableOrViewId).tableId,
|
||||
viewId: tableOrViewId,
|
||||
|
@ -332,3 +321,10 @@ export function tryExtractingTableAndViewId(tableOrViewId: string) {
|
|||
|
||||
return { tableId: tableOrViewId }
|
||||
}
|
||||
|
||||
export function getSource(tableOrViewId: string) {
|
||||
if (docIds.isViewId(tableOrViewId)) {
|
||||
return sdk.views.get(tableOrViewId)
|
||||
}
|
||||
return sdk.tables.getTable(tableOrViewId)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { Table, TableSourceType } from "@budibase/types"
|
||||
import { isExternalTableID } from "../../../integrations/utils"
|
||||
import { docIds } from "@budibase/backend-core"
|
||||
|
||||
export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
|
||||
if (opts.table && opts.table.sourceType === TableSourceType.EXTERNAL) {
|
||||
|
@ -9,3 +10,7 @@ export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
|
|||
}
|
||||
return false
|
||||
}
|
||||
|
||||
export function isTable(table: any): table is Table {
|
||||
return table._id && docIds.isTableId(table._id)
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ import {
|
|||
ViewV2ColumnEnriched,
|
||||
ViewV2Enriched,
|
||||
} from "@budibase/types"
|
||||
import { HTTPError } from "@budibase/backend-core"
|
||||
import { context, docIds, HTTPError } from "@budibase/backend-core"
|
||||
import {
|
||||
helpers,
|
||||
PROTECTED_EXTERNAL_COLUMNS,
|
||||
|
@ -40,16 +40,85 @@ export async function getEnriched(viewId: string): Promise<ViewV2Enriched> {
|
|||
return pickApi(tableId).getEnriched(viewId)
|
||||
}
|
||||
|
||||
export async function getTable(view: string | ViewV2): Promise<Table> {
|
||||
const viewId = typeof view === "string" ? view : view.id
|
||||
const cached = context.getTableForView(viewId)
|
||||
if (cached) {
|
||||
return cached
|
||||
}
|
||||
const { tableId } = utils.extractViewInfoFromID(viewId)
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
context.setTableForView(viewId, table)
|
||||
return table
|
||||
}
|
||||
|
||||
export function isView(view: any): view is ViewV2 {
|
||||
return view.id && docIds.isViewId(view.id) && view.version === 2
|
||||
}
|
||||
|
||||
async function guardCalculationViewSchema(
|
||||
table: Table,
|
||||
view: Omit<ViewV2, "id" | "version">
|
||||
) {
|
||||
const calculationFields = helpers.views.calculationFields(view)
|
||||
for (const calculationFieldName of Object.keys(calculationFields)) {
|
||||
const schema = calculationFields[calculationFieldName]
|
||||
const targetSchema = table.schema[schema.field]
|
||||
if (!targetSchema) {
|
||||
throw new HTTPError(
|
||||
`Calculation field "${calculationFieldName}" references field "${schema.field}" which does not exist in the table schema`,
|
||||
400
|
||||
)
|
||||
}
|
||||
|
||||
if (!helpers.schema.isNumeric(targetSchema)) {
|
||||
throw new HTTPError(
|
||||
`Calculation field "${calculationFieldName}" references field "${schema.field}" which is not a numeric field`,
|
||||
400
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const groupByFields = helpers.views.basicFields(view)
|
||||
for (const groupByFieldName of Object.keys(groupByFields)) {
|
||||
const targetSchema = table.schema[groupByFieldName]
|
||||
if (!targetSchema) {
|
||||
throw new HTTPError(
|
||||
`Group by field "${groupByFieldName}" does not exist in the table schema`,
|
||||
400
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function guardViewSchema(
|
||||
tableId: string,
|
||||
view: Omit<ViewV2, "id" | "version">
|
||||
) {
|
||||
const viewSchema = view.schema || {}
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
|
||||
if (helpers.views.isCalculationView(view)) {
|
||||
await guardCalculationViewSchema(table, view)
|
||||
}
|
||||
|
||||
await checkReadonlyFields(table, view)
|
||||
checkRequiredFields(table, view)
|
||||
checkDisplayField(view)
|
||||
}
|
||||
|
||||
async function checkReadonlyFields(
|
||||
table: Table,
|
||||
view: Omit<ViewV2, "id" | "version">
|
||||
) {
|
||||
const viewSchema = view.schema || {}
|
||||
for (const field of Object.keys(viewSchema)) {
|
||||
const tableSchemaField = table.schema[field]
|
||||
if (!tableSchemaField) {
|
||||
const viewFieldSchema = viewSchema[field]
|
||||
if (helpers.views.isCalculationField(viewFieldSchema)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const tableFieldSchema = table.schema[field]
|
||||
if (!tableFieldSchema) {
|
||||
throw new HTTPError(
|
||||
`Field "${field}" is not valid for the requested table`,
|
||||
400
|
||||
|
@ -65,18 +134,33 @@ async function guardViewSchema(
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const existingView =
|
||||
table?.views && (table.views[view.name] as ViewV2 | undefined)
|
||||
function checkDisplayField(view: Omit<ViewV2, "id" | "version">) {
|
||||
if (view.primaryDisplay) {
|
||||
const viewSchemaField = view.schema?.[view.primaryDisplay]
|
||||
|
||||
if (!viewSchemaField?.visible) {
|
||||
throw new HTTPError(
|
||||
`You can't hide "${view.primaryDisplay}" because it is the display column.`,
|
||||
400
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function checkRequiredFields(
|
||||
table: Table,
|
||||
view: Omit<ViewV2, "id" | "version">
|
||||
) {
|
||||
const existingView = table.views?.[view.name] as ViewV2 | undefined
|
||||
for (const field of Object.values(table.schema)) {
|
||||
if (!helpers.schema.isRequired(field.constraints)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const viewSchemaField = viewSchema[field.name]
|
||||
const existingViewSchema =
|
||||
existingView?.schema && existingView.schema[field.name]
|
||||
const viewSchemaField = view.schema?.[field.name]
|
||||
const existingViewSchema = existingView?.schema?.[field.name]
|
||||
if (!viewSchemaField && !existingViewSchema?.visible) {
|
||||
// Supporting existing configs with required columns but hidden in views
|
||||
continue
|
||||
|
@ -89,24 +173,16 @@ async function guardViewSchema(
|
|||
)
|
||||
}
|
||||
|
||||
if (viewSchemaField.readonly) {
|
||||
if (
|
||||
helpers.views.isBasicViewField(viewSchemaField) &&
|
||||
viewSchemaField.readonly
|
||||
) {
|
||||
throw new HTTPError(
|
||||
`You can't make "${field.name}" readonly because it is a required field.`,
|
||||
400
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (view.primaryDisplay) {
|
||||
const viewSchemaField = viewSchema[view.primaryDisplay]
|
||||
|
||||
if (!viewSchemaField?.visible) {
|
||||
throw new HTTPError(
|
||||
`You can't hide "${view.primaryDisplay}" because it is the display column.`,
|
||||
400
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function create(
|
||||
|
|
|
@ -18,6 +18,7 @@ import {
|
|||
RowAttachment,
|
||||
Table,
|
||||
User,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import {
|
||||
|
@ -33,7 +34,11 @@ import {
|
|||
PROTECTED_INTERNAL_COLUMNS,
|
||||
} from "@budibase/shared-core"
|
||||
import { processString } from "@budibase/string-templates"
|
||||
import { isUserMetadataTable } from "../../api/controllers/row/utils"
|
||||
import {
|
||||
getTableFromSource,
|
||||
isUserMetadataTable,
|
||||
} from "../../api/controllers/row/utils"
|
||||
import sdk from "../../sdk"
|
||||
|
||||
export * from "./utils"
|
||||
export * from "./attachments"
|
||||
|
@ -67,6 +72,7 @@ export async function processAutoColumn(
|
|||
// check its not user table, or whether any of the processing options have been disabled
|
||||
const shouldUpdateUserFields =
|
||||
!isUserTable && !opts?.reprocessing && !opts?.noAutoRelationships && !noUser
|
||||
let tableMutated = false
|
||||
for (let [key, schema] of Object.entries(table.schema)) {
|
||||
if (!schema.autocolumn) {
|
||||
continue
|
||||
|
@ -99,10 +105,17 @@ export async function processAutoColumn(
|
|||
row[key] = schema.lastID + 1
|
||||
schema.lastID++
|
||||
table.schema[key] = schema
|
||||
tableMutated = true
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (tableMutated) {
|
||||
const db = context.getAppDB()
|
||||
const resp = await db.put(table)
|
||||
table._rev = resp.rev
|
||||
}
|
||||
}
|
||||
|
||||
async function processDefaultValues(table: Table, row: Row) {
|
||||
|
@ -169,11 +182,12 @@ export function coerce(row: any, type: string) {
|
|||
*/
|
||||
export async function inputProcessing(
|
||||
userId: string | null | undefined,
|
||||
table: Table,
|
||||
source: Table | ViewV2,
|
||||
row: Row,
|
||||
opts?: AutoColumnProcessingOpts
|
||||
) {
|
||||
const clonedRow = cloneDeep(row)
|
||||
const table = await getTableFromSource(source)
|
||||
|
||||
const dontCleanseKeys = ["type", "_id", "_rev", "tableId"]
|
||||
for (const [key, value] of Object.entries(clonedRow)) {
|
||||
|
@ -228,8 +242,7 @@ export async function inputProcessing(
|
|||
|
||||
await processAutoColumn(userId, table, clonedRow, opts)
|
||||
await processDefaultValues(table, clonedRow)
|
||||
|
||||
return { table, row: clonedRow }
|
||||
return clonedRow
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -242,14 +255,13 @@ export async function inputProcessing(
|
|||
* @returns the enriched rows will be returned.
|
||||
*/
|
||||
export async function outputProcessing<T extends Row[] | Row>(
|
||||
table: Table,
|
||||
source: Table | ViewV2,
|
||||
rows: T,
|
||||
opts: {
|
||||
squash?: boolean
|
||||
preserveLinks?: boolean
|
||||
fromRow?: Row
|
||||
skipBBReferences?: boolean
|
||||
fromViewId?: string
|
||||
} = {
|
||||
squash: true,
|
||||
preserveLinks: false,
|
||||
|
@ -264,6 +276,14 @@ export async function outputProcessing<T extends Row[] | Row>(
|
|||
} else {
|
||||
safeRows = rows
|
||||
}
|
||||
|
||||
let table: Table
|
||||
if (sdk.views.isView(source)) {
|
||||
table = await sdk.views.getTable(source.id)
|
||||
} else {
|
||||
table = source
|
||||
}
|
||||
|
||||
// SQS returns the rows with full relationship contents
|
||||
// attach any linked row information
|
||||
let enriched = !opts.preserveLinks
|
||||
|
@ -276,25 +296,25 @@ export async function outputProcessing<T extends Row[] | Row>(
|
|||
opts.squash = true
|
||||
}
|
||||
|
||||
enriched = await coreOutputProcessing(table, enriched, opts)
|
||||
enriched = await coreOutputProcessing(source, enriched, opts)
|
||||
|
||||
if (opts.squash) {
|
||||
enriched = await linkRows.squashLinks(table, enriched, {
|
||||
fromViewId: opts?.fromViewId,
|
||||
})
|
||||
enriched = await linkRows.squashLinks(source, enriched)
|
||||
}
|
||||
|
||||
return (wasArray ? enriched : enriched[0]) as T
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is similar to the outputProcessing function above, it makes sure that all the provided
|
||||
* rows are ready for output, but does not have enrichment for squash capabilities which can cause performance issues.
|
||||
* outputProcessing should be used when responding from the API, while this should be used when internally processing
|
||||
* rows for any reason (like part of view operations).
|
||||
* This function is similar to the outputProcessing function above, it makes
|
||||
* sure that all the provided rows are ready for output, but does not have
|
||||
* enrichment for squash capabilities which can cause performance issues.
|
||||
* outputProcessing should be used when responding from the API, while this
|
||||
* should be used when internally processing rows for any reason (like part of
|
||||
* view operations).
|
||||
*/
|
||||
export async function coreOutputProcessing(
|
||||
table: Table,
|
||||
source: Table | ViewV2,
|
||||
rows: Row[],
|
||||
opts: {
|
||||
preserveLinks?: boolean
|
||||
|
@ -305,6 +325,13 @@ export async function coreOutputProcessing(
|
|||
skipBBReferences: false,
|
||||
}
|
||||
): Promise<Row[]> {
|
||||
let table: Table
|
||||
if (sdk.views.isView(source)) {
|
||||
table = await sdk.views.getTable(source.id)
|
||||
} else {
|
||||
table = source
|
||||
}
|
||||
|
||||
// process complex types: attachments, bb references...
|
||||
for (const [property, column] of Object.entries(table.schema)) {
|
||||
if (
|
||||
|
@ -409,9 +436,18 @@ export async function coreOutputProcessing(
|
|||
const tableFields = Object.keys(table.schema).filter(
|
||||
f => table.schema[f].visible !== false
|
||||
)
|
||||
|
||||
const fields = [...tableFields, ...protectedColumns].map(f =>
|
||||
f.toLowerCase()
|
||||
)
|
||||
|
||||
if (sdk.views.isView(source)) {
|
||||
const aggregations = helpers.views.calculationFields(source)
|
||||
for (const key of Object.keys(aggregations)) {
|
||||
fields.push(key.toLowerCase())
|
||||
}
|
||||
}
|
||||
|
||||
for (const row of rows) {
|
||||
for (const key of Object.keys(row)) {
|
||||
if (!fields.includes(key.toLowerCase())) {
|
||||
|
|
|
@ -65,7 +65,7 @@ describe("rowProcessor - inputProcessing", () => {
|
|||
|
||||
processInputBBReferenceMock.mockResolvedValue(user)
|
||||
|
||||
const { row } = await inputProcessing(userId, table, newRow)
|
||||
const row = await inputProcessing(userId, table, newRow)
|
||||
|
||||
expect(bbReferenceProcessor.processInputBBReference).toHaveBeenCalledTimes(
|
||||
1
|
||||
|
@ -117,7 +117,7 @@ describe("rowProcessor - inputProcessing", () => {
|
|||
|
||||
processInputBBReferencesMock.mockResolvedValue(user)
|
||||
|
||||
const { row } = await inputProcessing(userId, table, newRow)
|
||||
const row = await inputProcessing(userId, table, newRow)
|
||||
|
||||
expect(bbReferenceProcessor.processInputBBReferences).toHaveBeenCalledTimes(
|
||||
1
|
||||
|
@ -164,7 +164,7 @@ describe("rowProcessor - inputProcessing", () => {
|
|||
name: "Jack",
|
||||
}
|
||||
|
||||
const { row } = await inputProcessing(userId, table, newRow)
|
||||
const row = await inputProcessing(userId, table, newRow)
|
||||
|
||||
expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled()
|
||||
expect(row).toEqual({ ...newRow, user: undefined })
|
||||
|
@ -207,7 +207,7 @@ describe("rowProcessor - inputProcessing", () => {
|
|||
user: userValue,
|
||||
}
|
||||
|
||||
const { row } = await inputProcessing(userId, table, newRow)
|
||||
const row = await inputProcessing(userId, table, newRow)
|
||||
|
||||
if (userValue === undefined) {
|
||||
// The 'user' field is omitted
|
||||
|
@ -262,7 +262,7 @@ describe("rowProcessor - inputProcessing", () => {
|
|||
user: "123",
|
||||
}
|
||||
|
||||
const { row } = await inputProcessing(userId, table, newRow)
|
||||
const row = await inputProcessing(userId, table, newRow)
|
||||
|
||||
expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled()
|
||||
expect(row).toEqual({
|
||||
|
|
|
@ -148,9 +148,16 @@ export function parse(rows: Rows, table: Table): Rows {
|
|||
|
||||
Object.keys(row).forEach(columnName => {
|
||||
const columnData = row[columnName]
|
||||
|
||||
if (columnName === "_id") {
|
||||
parsedRow[columnName] = columnData
|
||||
return
|
||||
}
|
||||
|
||||
const schema = table.schema
|
||||
if (!(columnName in schema)) {
|
||||
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
|
||||
// Objects can be present in the row data but not in the schema, so make
|
||||
// sure we don't proceed in such a case
|
||||
return
|
||||
}
|
||||
|
||||
|
|
|
@ -2,3 +2,4 @@ export * from "./helpers"
|
|||
export * from "./integrations"
|
||||
export * as cron from "./cron"
|
||||
export * as schema from "./schema"
|
||||
export * as views from "./views"
|
||||
|
|
|
@ -45,3 +45,7 @@ export function decodeNonAscii(str: string): string {
|
|||
String.fromCharCode(parseInt(p1, 16))
|
||||
)
|
||||
}
|
||||
|
||||
export function isNumeric(field: FieldSchema) {
|
||||
return field.type === FieldType.NUMBER || field.type === FieldType.BIGINT
|
||||
}
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
import {
|
||||
BasicViewFieldMetadata,
|
||||
ViewCalculationFieldMetadata,
|
||||
ViewFieldMetadata,
|
||||
ViewV2,
|
||||
} from "@budibase/types"
|
||||
import { pickBy } from "lodash"
|
||||
|
||||
export function isCalculationField(
|
||||
field: ViewFieldMetadata
|
||||
): field is ViewCalculationFieldMetadata {
|
||||
return "calculationType" in field
|
||||
}
|
||||
|
||||
export function isBasicViewField(
|
||||
field: ViewFieldMetadata
|
||||
): field is BasicViewFieldMetadata {
|
||||
return !isCalculationField(field)
|
||||
}
|
||||
|
||||
type UnsavedViewV2 = Omit<ViewV2, "id" | "version">
|
||||
|
||||
export function isCalculationView(view: UnsavedViewV2) {
|
||||
return Object.values(view.schema || {}).some(isCalculationField)
|
||||
}
|
||||
|
||||
export function calculationFields(view: UnsavedViewV2) {
|
||||
return pickBy(view.schema || {}, isCalculationField)
|
||||
}
|
||||
|
||||
export function basicFields(view: UnsavedViewV2) {
|
||||
return pickBy(view.schema || {}, field => !isCalculationField(field))
|
||||
}
|
|
@ -33,15 +33,24 @@ export interface View {
|
|||
groupBy?: string
|
||||
}
|
||||
|
||||
export type ViewFieldMetadata = UIFieldMetadata & {
|
||||
export interface BasicViewFieldMetadata extends UIFieldMetadata {
|
||||
readonly?: boolean
|
||||
columns?: Record<string, RelationSchemaField>
|
||||
}
|
||||
|
||||
export type RelationSchemaField = UIFieldMetadata & {
|
||||
export interface RelationSchemaField extends UIFieldMetadata {
|
||||
readonly?: boolean
|
||||
}
|
||||
|
||||
export interface ViewCalculationFieldMetadata extends BasicViewFieldMetadata {
|
||||
calculationType: CalculationType
|
||||
field: string
|
||||
}
|
||||
|
||||
export type ViewFieldMetadata =
|
||||
| BasicViewFieldMetadata
|
||||
| ViewCalculationFieldMetadata
|
||||
|
||||
export enum CalculationType {
|
||||
SUM = "sum",
|
||||
AVG = "avg",
|
||||
|
@ -50,11 +59,6 @@ export enum CalculationType {
|
|||
MAX = "max",
|
||||
}
|
||||
|
||||
export type ViewCalculationFieldMetadata = ViewFieldMetadata & {
|
||||
calculationType: CalculationType
|
||||
field: string
|
||||
}
|
||||
|
||||
export interface ViewV2 {
|
||||
version: 2
|
||||
id: string
|
||||
|
@ -67,7 +71,7 @@ export interface ViewV2 {
|
|||
order?: SortOrder
|
||||
type?: SortType
|
||||
}
|
||||
schema?: Record<string, ViewFieldMetadata | ViewCalculationFieldMetadata>
|
||||
schema?: Record<string, ViewFieldMetadata>
|
||||
}
|
||||
|
||||
export type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema
|
||||
|
|
|
@ -1,8 +1,14 @@
|
|||
import { SortOrder, SortType } from "../api"
|
||||
import { SearchFilters } from "./search"
|
||||
import { Row } from "../documents"
|
||||
import { CalculationType, Row } from "../documents"
|
||||
import { WithRequired } from "../shared"
|
||||
|
||||
export interface Aggregation {
|
||||
name: string
|
||||
calculationType: CalculationType
|
||||
field: string
|
||||
}
|
||||
|
||||
export interface SearchParams {
|
||||
tableId?: string
|
||||
viewId?: string
|
||||
|
|
|
@ -2,6 +2,7 @@ import { Operation } from "./datasources"
|
|||
import { Row, Table, DocumentType } from "../documents"
|
||||
import { SortOrder, SortType } from "../api"
|
||||
import { Knex } from "knex"
|
||||
import { Aggregation } from "./row"
|
||||
|
||||
export enum BasicOperator {
|
||||
EQUAL = "equal",
|
||||
|
@ -154,6 +155,7 @@ export interface QueryJson {
|
|||
}
|
||||
resource?: {
|
||||
fields: string[]
|
||||
aggregations?: Aggregation[]
|
||||
}
|
||||
filters?: SearchFilters
|
||||
sort?: SortJson
|
||||
|
|
|
@ -4,6 +4,29 @@ export type DeepPartial<T> = {
|
|||
|
||||
export type ISO8601 = string
|
||||
|
||||
/**
|
||||
* RequiredKeys make it such that you _must_ assign a value to every key in the
|
||||
* type. It differs subtly from Required<T> in that it doesn't change the type
|
||||
* of the fields, you can specify undefined as a value and that's fine.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```ts
|
||||
* interface Foo {
|
||||
* bar: string
|
||||
* baz?: string
|
||||
* }
|
||||
*
|
||||
* type FooRequiredKeys = RequiredKeys<Foo>
|
||||
* type FooRequired = Required<Foo>
|
||||
*
|
||||
* const a: FooRequiredKeys = { bar: "hello", baz: undefined }
|
||||
* const b: FooRequired = { bar: "hello", baz: undefined }
|
||||
* ```
|
||||
*
|
||||
* In this code, a passes type checking whereas b does not. This is because
|
||||
* Required<Foo> makes baz non-optional.
|
||||
*/
|
||||
export type RequiredKeys<T> = {
|
||||
[K in keyof Required<T>]: T[K]
|
||||
}
|
||||
|
|
33
yarn.lock
33
yarn.lock
|
@ -17751,21 +17751,11 @@ periscopic@^3.1.0:
|
|||
estree-walker "^3.0.0"
|
||||
is-reference "^3.0.0"
|
||||
|
||||
pg-cloudflare@^1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz#e6d5833015b170e23ae819e8c5d7eaedb472ca98"
|
||||
integrity sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==
|
||||
|
||||
pg-connection-string@2.5.0, pg-connection-string@^2.5.0:
|
||||
version "2.5.0"
|
||||
resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34"
|
||||
integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==
|
||||
|
||||
pg-connection-string@^2.6.4:
|
||||
version "2.6.4"
|
||||
resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.6.4.tgz#f543862adfa49fa4e14bc8a8892d2a84d754246d"
|
||||
integrity sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==
|
||||
|
||||
pg-int8@1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c"
|
||||
|
@ -17776,21 +17766,11 @@ pg-pool@^3.6.0:
|
|||
resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.6.0.tgz#3190df3e4747a0d23e5e9e8045bcd99bda0a712e"
|
||||
integrity sha512-clFRf2ksqd+F497kWFyM21tMjeikn60oGDmqMT8UBrynEwVEX/5R5xd2sdvdo1cZCFlguORNpVuqxIj+aK4cfQ==
|
||||
|
||||
pg-pool@^3.6.2:
|
||||
version "3.6.2"
|
||||
resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.6.2.tgz#3a592370b8ae3f02a7c8130d245bc02fa2c5f3f2"
|
||||
integrity sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==
|
||||
|
||||
pg-protocol@*, pg-protocol@^1.6.0:
|
||||
version "1.6.0"
|
||||
resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.6.0.tgz#4c91613c0315349363af2084608db843502f8833"
|
||||
integrity sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==
|
||||
|
||||
pg-protocol@^1.6.1:
|
||||
version "1.6.1"
|
||||
resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.6.1.tgz#21333e6d83b01faaebfe7a33a7ad6bfd9ed38cb3"
|
||||
integrity sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==
|
||||
|
||||
pg-types@^2.1.0, pg-types@^2.2.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3"
|
||||
|
@ -17815,19 +17795,6 @@ pg@8.10.0:
|
|||
pg-types "^2.1.0"
|
||||
pgpass "1.x"
|
||||
|
||||
pg@^8.12.0:
|
||||
version "8.12.0"
|
||||
resolved "https://registry.yarnpkg.com/pg/-/pg-8.12.0.tgz#9341724db571022490b657908f65aee8db91df79"
|
||||
integrity sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ==
|
||||
dependencies:
|
||||
pg-connection-string "^2.6.4"
|
||||
pg-pool "^3.6.2"
|
||||
pg-protocol "^1.6.1"
|
||||
pg-types "^2.1.0"
|
||||
pgpass "1.x"
|
||||
optionalDependencies:
|
||||
pg-cloudflare "^1.1.1"
|
||||
|
||||
pgpass@1.x:
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.5.tgz#9b873e4a564bb10fa7a7dbd55312728d422a223d"
|
||||
|
|
Loading…
Reference in New Issue