Merge pull request #14628 from Budibase/view-calculation-sql

Initial passing test for view calculations.
This commit is contained in:
Sam Rose 2024-09-30 12:00:32 +01:00 committed by GitHub
commit 3c56fdc4c1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
54 changed files with 1042 additions and 628 deletions

@ -1 +1 @@
Subproject commit 558a32dfd1f55bd894804a503e7e1090937df88c Subproject commit 3e24f6293ff5ee5f9b42822e001504e3bbf19cc0

View File

@ -10,7 +10,7 @@ import {
StaticDatabases, StaticDatabases,
DEFAULT_TENANT_ID, DEFAULT_TENANT_ID,
} from "../constants" } from "../constants"
import { Database, IdentityContext, Snippet, App } from "@budibase/types" import { Database, IdentityContext, Snippet, App, Table } from "@budibase/types"
import { ContextMap } from "./types" import { ContextMap } from "./types"
let TEST_APP_ID: string | null = null let TEST_APP_ID: string | null = null
@ -394,3 +394,20 @@ export function setFeatureFlags(key: string, value: Record<string, any>) {
context.featureFlagCache ??= {} context.featureFlagCache ??= {}
context.featureFlagCache[key] = value context.featureFlagCache[key] = value
} }
export function getTableForView(viewId: string): Table | undefined {
const context = getCurrentContext()
if (!context) {
return
}
return context.viewToTableCache?.[viewId]
}
export function setTableForView(viewId: string, table: Table) {
const context = getCurrentContext()
if (!context) {
return
}
context.viewToTableCache ??= {}
context.viewToTableCache[viewId] = table
}

View File

@ -1,4 +1,4 @@
import { IdentityContext, Snippet, VM } from "@budibase/types" import { IdentityContext, Snippet, Table, VM } from "@budibase/types"
import { OAuth2Client } from "google-auth-library" import { OAuth2Client } from "google-auth-library"
import { GoogleSpreadsheet } from "google-spreadsheet" import { GoogleSpreadsheet } from "google-spreadsheet"
@ -21,4 +21,5 @@ export type ContextMap = {
featureFlagCache?: { featureFlagCache?: {
[key: string]: Record<string, any> [key: string]: Record<string, any>
} }
viewToTableCache?: Record<string, Table>
} }

View File

@ -612,7 +612,6 @@ async function runQuery<T>(
* limit {number} The number of results to fetch * limit {number} The number of results to fetch
* bookmark {string|null} Current bookmark in the recursive search * bookmark {string|null} Current bookmark in the recursive search
* rows {array|null} Current results in the recursive search * rows {array|null} Current results in the recursive search
* @returns {Promise<*[]|*>}
*/ */
async function recursiveSearch<T>( async function recursiveSearch<T>(
dbName: string, dbName: string,

View File

@ -6,7 +6,7 @@ import {
ViewName, ViewName,
} from "../constants" } from "../constants"
import { getProdAppID } from "./conversions" import { getProdAppID } from "./conversions"
import { DatabaseQueryOpts } from "@budibase/types" import { DatabaseQueryOpts, VirtualDocumentType } from "@budibase/types"
/** /**
* If creating DB allDocs/query params with only a single top level ID this can be used, this * If creating DB allDocs/query params with only a single top level ID this can be used, this
@ -66,9 +66,8 @@ export function getQueryIndex(viewName: ViewName) {
/** /**
* Check if a given ID is that of a table. * Check if a given ID is that of a table.
* @returns {boolean}
*/ */
export const isTableId = (id: string) => { export const isTableId = (id: string): boolean => {
// this includes datasource plus tables // this includes datasource plus tables
return ( return (
!!id && !!id &&
@ -77,13 +76,16 @@ export const isTableId = (id: string) => {
) )
} }
export function isViewId(id: string): boolean {
return !!id && id.startsWith(`${VirtualDocumentType.VIEW}${SEPARATOR}`)
}
/** /**
* Check if a given ID is that of a datasource or datasource plus. * Check if a given ID is that of a datasource or datasource plus.
* @returns {boolean}
*/ */
export const isDatasourceId = (id: string) => { export const isDatasourceId = (id: string): boolean => {
// this covers both datasources and datasource plus // this covers both datasources and datasource plus
return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`) return !!id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
} }
/** /**

View File

@ -11,10 +11,12 @@ import {
} from "./utils" } from "./utils"
import SqlTableQueryBuilder from "./sqlTable" import SqlTableQueryBuilder from "./sqlTable"
import { import {
Aggregation,
AnySearchFilter, AnySearchFilter,
ArrayOperator, ArrayOperator,
BasicOperator, BasicOperator,
BBReferenceFieldMetadata, BBReferenceFieldMetadata,
CalculationType,
FieldSchema, FieldSchema,
FieldType, FieldType,
INTERNAL_TABLE_SOURCE_ID, INTERNAL_TABLE_SOURCE_ID,
@ -824,8 +826,40 @@ class InternalBuilder {
return query.countDistinct(`${aliased}.${primary[0]} as total`) return query.countDistinct(`${aliased}.${primary[0]} as total`)
} }
addAggregations(
query: Knex.QueryBuilder,
aggregations: Aggregation[]
): Knex.QueryBuilder {
const fields = this.query.resource?.fields || []
if (fields.length > 0) {
query = query.groupBy(fields.map(field => `${this.table.name}.${field}`))
}
for (const aggregation of aggregations) {
const op = aggregation.calculationType
const field = `${this.table.name}.${aggregation.field} as ${aggregation.name}`
switch (op) {
case CalculationType.COUNT:
query = query.count(field)
break
case CalculationType.SUM:
query = query.sum(field)
break
case CalculationType.AVG:
query = query.avg(field)
break
case CalculationType.MIN:
query = query.min(field)
break
case CalculationType.MAX:
query = query.max(field)
break
}
}
return query
}
addSorting(query: Knex.QueryBuilder): Knex.QueryBuilder { addSorting(query: Knex.QueryBuilder): Knex.QueryBuilder {
let { sort } = this.query let { sort, resource } = this.query
const primaryKey = this.table.primary const primaryKey = this.table.primary
const tableName = getTableName(this.table) const tableName = getTableName(this.table)
const aliases = this.query.tableAliases const aliases = this.query.tableAliases
@ -862,7 +896,8 @@ class InternalBuilder {
// add sorting by the primary key if the result isn't already sorted by it, // add sorting by the primary key if the result isn't already sorted by it,
// to make sure result is deterministic // to make sure result is deterministic
if (!sort || sort[primaryKey[0]] === undefined) { const hasAggregations = (resource?.aggregations?.length ?? 0) > 0
if (!hasAggregations && (!sort || sort[primaryKey[0]] === undefined)) {
query = query.orderBy(`${aliased}.${primaryKey[0]}`) query = query.orderBy(`${aliased}.${primaryKey[0]}`)
} }
return query return query
@ -1246,10 +1281,15 @@ class InternalBuilder {
} }
} }
// if counting, use distinct count, else select const aggregations = this.query.resource?.aggregations || []
query = !counting if (counting) {
? query.select(this.generateSelectStatement()) query = this.addDistinctCount(query)
: this.addDistinctCount(query) } else if (aggregations.length > 0) {
query = this.addAggregations(query, aggregations)
} else {
query = query.select(this.generateSelectStatement())
}
// have to add after as well (this breaks MS-SQL) // have to add after as well (this breaks MS-SQL)
if (!counting) { if (!counting) {
query = this.addSorting(query) query = this.addSorting(query)

View File

@ -1,5 +1,6 @@
import dayjs from "dayjs" import dayjs from "dayjs"
import { import {
Aggregation,
AutoFieldSubType, AutoFieldSubType,
AutoReason, AutoReason,
Datasource, Datasource,
@ -19,6 +20,7 @@ import {
SortJson, SortJson,
SortType, SortType,
Table, Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { import {
breakExternalTableId, breakExternalTableId,
@ -46,7 +48,7 @@ import { db as dbCore } from "@budibase/backend-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import env from "../../../environment" import env from "../../../environment"
import { makeExternalQuery } from "../../../integrations/base/query" import { makeExternalQuery } from "../../../integrations/base/query"
import { dataFilters } from "@budibase/shared-core" import { dataFilters, helpers } from "@budibase/shared-core"
export interface ManyRelationship { export interface ManyRelationship {
tableId?: string tableId?: string
@ -159,17 +161,41 @@ function isEditableColumn(column: FieldSchema) {
export class ExternalRequest<T extends Operation> { export class ExternalRequest<T extends Operation> {
private readonly operation: T private readonly operation: T
private readonly tableId: string private readonly source: Table | ViewV2
private datasource?: Datasource private datasource: Datasource
private tables: { [key: string]: Table } = {}
constructor(operation: T, tableId: string, datasource?: Datasource) { public static async for<T extends Operation>(
this.operation = operation operation: T,
this.tableId = tableId source: Table | ViewV2,
this.datasource = datasource opts: { datasource?: Datasource } = {}
if (datasource && datasource.entities) { ) {
this.tables = datasource.entities if (!opts.datasource) {
if (sdk.views.isView(source)) {
const table = await sdk.views.getTable(source.id)
opts.datasource = await sdk.datasources.get(table.sourceId!)
} else {
opts.datasource = await sdk.datasources.get(source.sourceId!)
}
} }
return new ExternalRequest(operation, source, opts.datasource)
}
private get tables(): { [key: string]: Table } {
if (!this.datasource.entities) {
throw new Error("Datasource does not have entities")
}
return this.datasource.entities
}
private constructor(
operation: T,
source: Table | ViewV2,
datasource: Datasource
) {
this.operation = operation
this.source = source
this.datasource = datasource
} }
private prepareFilters( private prepareFilters(
@ -290,20 +316,6 @@ export class ExternalRequest<T extends Operation> {
return this.tables[tableName] return this.tables[tableName]
} }
// seeds the object with table and datasource information
async retrieveMetadata(
datasourceId: string
): Promise<{ tables: Record<string, Table>; datasource: Datasource }> {
if (!this.datasource) {
this.datasource = await sdk.datasources.get(datasourceId)
if (!this.datasource || !this.datasource.entities) {
throw "No tables found, fetch tables before query."
}
this.tables = this.datasource.entities
}
return { tables: this.tables, datasource: this.datasource }
}
async getRow(table: Table, rowId: string): Promise<Row> { async getRow(table: Table, rowId: string): Promise<Row> {
const response = await getDatasourceAndQuery({ const response = await getDatasourceAndQuery({
endpoint: getEndpoint(table._id!, Operation.READ), endpoint: getEndpoint(table._id!, Operation.READ),
@ -619,24 +631,16 @@ export class ExternalRequest<T extends Operation> {
} }
async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> { async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> {
const { operation, tableId } = this const { operation } = this
if (!tableId) { let table: Table
throw new Error("Unable to run without a table ID") if (sdk.views.isView(this.source)) {
} table = await sdk.views.getTable(this.source.id)
let { datasourceId, tableName } = breakExternalTableId(tableId) } else {
let datasource = this.datasource table = this.source
if (!datasource) {
const { datasource: ds } = await this.retrieveMetadata(datasourceId)
datasource = ds
}
const tables = this.tables
const table = tables[tableName]
let isSql = isSQL(datasource)
if (!table) {
throw new Error(
`Unable to process query, table "${tableName}" not defined.`
)
} }
let isSql = isSQL(this.datasource)
// look for specific components of config which may not be considered acceptable // look for specific components of config which may not be considered acceptable
let { id, row, filters, sort, paginate, rows } = cleanupConfig( let { id, row, filters, sort, paginate, rows } = cleanupConfig(
config, config,
@ -679,25 +683,40 @@ export class ExternalRequest<T extends Operation> {
} }
} }
} }
if ( if (
operation === Operation.DELETE && operation === Operation.DELETE &&
(filters == null || Object.keys(filters).length === 0) (filters == null || Object.keys(filters).length === 0)
) { ) {
throw "Deletion must be filtered" throw "Deletion must be filtered"
} }
let aggregations: Aggregation[] = []
if (sdk.views.isView(this.source)) {
const calculationFields = helpers.views.calculationFields(this.source)
for (const [key, field] of Object.entries(calculationFields)) {
aggregations.push({
name: key,
field: field.field,
calculationType: field.calculationType,
})
}
}
let json: QueryJson = { let json: QueryJson = {
endpoint: { endpoint: {
datasourceId: datasourceId!, datasourceId: this.datasource._id!,
entityId: tableName, entityId: table.name,
operation, operation,
}, },
resource: { resource: {
// have to specify the fields to avoid column overlap (for SQL) // have to specify the fields to avoid column overlap (for SQL)
fields: isSql fields: isSql
? buildSqlFieldList(table, this.tables, { ? await buildSqlFieldList(this.source, this.tables, {
relationships: incRelationships, relationships: incRelationships,
}) })
: [], : [],
aggregations,
}, },
filters, filters,
sort, sort,
@ -714,7 +733,7 @@ export class ExternalRequest<T extends Operation> {
}, },
meta: { meta: {
table, table,
tables: tables, tables: this.tables,
}, },
} }
@ -745,7 +764,7 @@ export class ExternalRequest<T extends Operation> {
} }
const output = await sqlOutputProcessing( const output = await sqlOutputProcessing(
response, response,
table, this.source,
this.tables, this.tables,
relationships relationships
) )

View File

@ -17,6 +17,7 @@ import {
Row, Row,
Table, Table,
UserCtx, UserCtx,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as utils from "./utils" import * as utils from "./utils"
@ -29,39 +30,40 @@ import { generateIdForRow } from "./utils"
export async function handleRequest<T extends Operation>( export async function handleRequest<T extends Operation>(
operation: T, operation: T,
tableId: string, source: Table | ViewV2,
opts?: RunConfig opts?: RunConfig
): Promise<ExternalRequestReturnType<T>> { ): Promise<ExternalRequestReturnType<T>> {
return new ExternalRequest<T>(operation, tableId, opts?.datasource).run( return (
opts || {} await ExternalRequest.for<T>(operation, source, {
) datasource: opts?.datasource,
})
).run(opts || {})
} }
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) { export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const { tableId, viewId } = utils.getSourceId(ctx) const source = await utils.getSource(ctx)
const table = await utils.getTableFromSource(source)
const { _id, ...rowData } = ctx.request.body const { _id, ...rowData } = ctx.request.body
const table = await sdk.tables.getTable(tableId)
const { row: dataToUpdate } = await inputProcessing( const dataToUpdate = await inputProcessing(
ctx.user?._id, ctx.user?._id,
cloneDeep(table), cloneDeep(source),
rowData rowData
) )
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row: dataToUpdate, row: dataToUpdate,
tableId, source,
}) })
if (!validateResult.valid) { if (!validateResult.valid) {
throw { validation: validateResult.errors } throw { validation: validateResult.errors }
} }
const beforeRow = await sdk.rows.external.getRow(tableId, _id, { const beforeRow = await sdk.rows.external.getRow(table._id!, _id, {
relationships: true, relationships: true,
}) })
const response = await handleRequest(Operation.UPDATE, tableId, { const response = await handleRequest(Operation.UPDATE, source, {
id: breakRowIdField(_id), id: breakRowIdField(_id),
row: dataToUpdate, row: dataToUpdate,
}) })
@ -69,17 +71,16 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
// The id might have been changed, so the refetching would fail. Recalculating the id just in case // The id might have been changed, so the refetching would fail. Recalculating the id just in case
const updatedId = const updatedId =
generateIdForRow({ ...beforeRow, ...dataToUpdate }, table) || _id generateIdForRow({ ...beforeRow, ...dataToUpdate }, table) || _id
const row = await sdk.rows.external.getRow(tableId, updatedId, { const row = await sdk.rows.external.getRow(table._id!, updatedId, {
relationships: true, relationships: true,
}) })
const [enrichedRow, oldRow] = await Promise.all([ const [enrichedRow, oldRow] = await Promise.all([
outputProcessing(table, row, { outputProcessing(source, row, {
squash: true, squash: true,
preserveLinks: true, preserveLinks: true,
fromViewId: viewId,
}), }),
outputProcessing(table, beforeRow, { outputProcessing(source, beforeRow, {
squash: true, squash: true,
preserveLinks: true, preserveLinks: true,
}), }),
@ -94,9 +95,9 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
} }
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx) {
const { tableId } = utils.getSourceId(ctx) const source = await utils.getSource(ctx)
const _id = ctx.request.body._id const _id = ctx.request.body._id
const { row } = await handleRequest(Operation.DELETE, tableId, { const { row } = await handleRequest(Operation.DELETE, source, {
id: breakRowIdField(_id), id: breakRowIdField(_id),
includeSqlRelationships: IncludeRelationship.EXCLUDE, includeSqlRelationships: IncludeRelationship.EXCLUDE,
}) })
@ -105,11 +106,11 @@ export async function destroy(ctx: UserCtx) {
export async function bulkDestroy(ctx: UserCtx) { export async function bulkDestroy(ctx: UserCtx) {
const { rows } = ctx.request.body const { rows } = ctx.request.body
const { tableId } = utils.getSourceId(ctx) const source = await utils.getSource(ctx)
let promises: Promise<{ row: Row; table: Table }>[] = [] let promises: Promise<{ row: Row; table: Table }>[] = []
for (let row of rows) { for (let row of rows) {
promises.push( promises.push(
handleRequest(Operation.DELETE, tableId, { handleRequest(Operation.DELETE, source, {
id: breakRowIdField(row._id), id: breakRowIdField(row._id),
includeSqlRelationships: IncludeRelationship.EXCLUDE, includeSqlRelationships: IncludeRelationship.EXCLUDE,
}) })
@ -124,6 +125,7 @@ export async function bulkDestroy(ctx: UserCtx) {
export async function fetchEnrichedRow(ctx: UserCtx) { export async function fetchEnrichedRow(ctx: UserCtx) {
const id = ctx.params.rowId const id = ctx.params.rowId
const source = await utils.getSource(ctx)
const { tableId } = utils.getSourceId(ctx) const { tableId } = utils.getSourceId(ctx)
const { datasourceId, tableName } = breakExternalTableId(tableId) const { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource: Datasource = await sdk.datasources.get(datasourceId) const datasource: Datasource = await sdk.datasources.get(datasourceId)
@ -131,7 +133,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
ctx.throw(400, "Datasource has not been configured for plus API.") ctx.throw(400, "Datasource has not been configured for plus API.")
} }
const tables = datasource.entities const tables = datasource.entities
const response = await handleRequest(Operation.READ, tableId, { const response = await handleRequest(Operation.READ, source, {
id, id,
datasource, datasource,
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
@ -155,7 +157,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
// don't support composite keys right now // don't support composite keys right now
const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0]) const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0])
const primaryLink = linkedTable.primary?.[0] as string const primaryLink = linkedTable.primary?.[0] as string
const relatedRows = await handleRequest(Operation.READ, linkedTableId!, { const relatedRows = await handleRequest(Operation.READ, linkedTable, {
tables, tables,
filters: { filters: {
oneOf: { oneOf: {

View File

@ -207,7 +207,7 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
} }
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) { export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
const { tableId } = utils.getSourceId(ctx) const { tableId, viewId } = utils.getSourceId(ctx)
await context.ensureSnippetContext(true) await context.ensureSnippetContext(true)
@ -222,6 +222,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
...ctx.request.body, ...ctx.request.body,
query: enrichedQuery, query: enrichedQuery,
tableId, tableId,
viewId,
} }
ctx.status = 200 ctx.status = 200
@ -229,14 +230,15 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
} }
export async function validate(ctx: Ctx<Row, ValidateResponse>) { export async function validate(ctx: Ctx<Row, ValidateResponse>) {
const { tableId } = utils.getSourceId(ctx) const source = await utils.getSource(ctx)
const table = await utils.getTableFromSource(source)
// external tables are hard to validate currently // external tables are hard to validate currently
if (isExternalTableID(tableId)) { if (isExternalTableID(table._id!)) {
ctx.body = { valid: true, errors: {} } ctx.body = { valid: true, errors: {} }
} else { } else {
ctx.body = await sdk.rows.utils.validate({ ctx.body = await sdk.rows.utils.validate({
row: ctx.request.body, row: ctx.request.body,
tableId, source,
}) })
} }
} }

View File

@ -21,18 +21,19 @@ import {
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils" import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils"
import { flatten } from "lodash" import { flatten } from "lodash"
import { findRow } from "../../../sdk/app/rows/internal"
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) { export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const { tableId, viewId } = utils.getSourceId(ctx) const { tableId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
const table = sdk.views.isView(source)
? await sdk.views.getTable(source.id)
: source
const inputs = ctx.request.body const inputs = ctx.request.body
const isUserTable = tableId === InternalTables.USER_METADATA const isUserTable = tableId === InternalTables.USER_METADATA
let oldRow let oldRow
const dbTable = await sdk.tables.getTable(tableId)
try { try {
oldRow = await outputProcessing( oldRow = await outputProcessing(source, await findRow(tableId, inputs._id!))
dbTable,
await utils.findRow(tableId, inputs._id!)
)
} catch (err) { } catch (err) {
if (isUserTable) { if (isUserTable) {
// don't include the rev, it'll be the global rev // don't include the rev, it'll be the global rev
@ -48,22 +49,15 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
// need to build up full patch fields before coerce // need to build up full patch fields before coerce
let combinedRow: any = cloneDeep(oldRow) let combinedRow: any = cloneDeep(oldRow)
for (let key of Object.keys(inputs)) { for (let key of Object.keys(inputs)) {
if (!dbTable.schema[key]) continue if (!table.schema[key]) continue
combinedRow[key] = inputs[key] combinedRow[key] = inputs[key]
} }
// need to copy the table so it can be differenced on way out
const tableClone = cloneDeep(dbTable)
// this returns the table and row incase they have been updated // this returns the table and row incase they have been updated
let { table, row } = await inputProcessing( let row = await inputProcessing(ctx.user?._id, source, combinedRow)
ctx.user?._id,
tableClone,
combinedRow
)
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row, row,
table, source,
}) })
if (!validateResult.valid) { if (!validateResult.valid) {
@ -87,10 +81,8 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
return { row: ctx.body as Row, table, oldRow } return { row: ctx.body as Row, table, oldRow }
} }
const result = await finaliseRow(table, row, { const result = await finaliseRow(source, row, {
oldTable: dbTable,
updateFormula: true, updateFormula: true,
fromViewId: viewId,
}) })
return { ...result, oldRow } return { ...result, oldRow }
@ -186,7 +178,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
sdk.tables.getTable(tableId), sdk.tables.getTable(tableId),
linkRows.getLinkDocuments({ tableId, rowId, fieldName }), linkRows.getLinkDocuments({ tableId, rowId, fieldName }),
]) ])
let row = await utils.findRow(tableId, rowId) let row = await findRow(tableId, rowId)
row = await outputProcessing(table, row) row = await outputProcessing(table, row)
const linkVals = links as LinkDocumentValue[] const linkVals = links as LinkDocumentValue[]

View File

@ -4,10 +4,11 @@ import {
processFormulas, processFormulas,
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { Table, Row, FormulaType, FieldType } from "@budibase/types" import { Table, Row, FormulaType, FieldType, ViewV2 } from "@budibase/types"
import * as linkRows from "../../../db/linkedRows" import * as linkRows from "../../../db/linkedRows"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import sdk from "../../../sdk"
/** /**
* This function runs through a list of enriched rows, looks at the rows which * This function runs through a list of enriched rows, looks at the rows which
@ -121,33 +122,26 @@ export async function updateAllFormulasInTable(table: Table) {
* expects the row to be totally enriched/contain all relationships. * expects the row to be totally enriched/contain all relationships.
*/ */
export async function finaliseRow( export async function finaliseRow(
table: Table, source: Table | ViewV2,
row: Row, row: Row,
{ opts?: { updateFormula: boolean }
oldTable,
updateFormula,
fromViewId,
}: { oldTable?: Table; updateFormula: boolean; fromViewId?: string } = {
updateFormula: true,
}
) { ) {
const db = context.getAppDB() const db = context.getAppDB()
const { updateFormula = true } = opts || {}
const table = sdk.views.isView(source)
? await sdk.views.getTable(source.id)
: source
row.type = "row" row.type = "row"
// process the row before return, to include relationships // process the row before return, to include relationships
let enrichedRow = (await outputProcessing(table, cloneDeep(row), { let enrichedRow = await outputProcessing(source, cloneDeep(row), {
squash: false, squash: false,
})) as Row })
// use enriched row to generate formulas for saving, specifically only use as context // use enriched row to generate formulas for saving, specifically only use as context
row = await processFormulas(table, row, { row = await processFormulas(table, row, {
dynamic: false, dynamic: false,
contextRows: [enrichedRow], contextRows: [enrichedRow],
}) })
// don't worry about rev, tables handle rev/lastID updates
// if another row has been written since processing this will
// handle the auto ID clash
if (oldTable && !isEqual(oldTable, table)) {
await db.put(table)
}
const response = await db.put(row) const response = await db.put(row)
// for response, calculate the formulas for the enriched row // for response, calculate the formulas for the enriched row
enrichedRow._rev = response.rev enrichedRow._rev = response.rev
@ -158,8 +152,6 @@ export async function finaliseRow(
if (updateFormula) { if (updateFormula) {
await updateRelatedFormula(table, enrichedRow) await updateRelatedFormula(table, enrichedRow)
} }
const squashed = await linkRows.squashLinks(table, enrichedRow, { const squashed = await linkRows.squashLinks(source, enrichedRow)
fromViewId,
})
return { row: enrichedRow, squashed, table } return { row: enrichedRow, squashed, table }
} }

View File

@ -1,11 +1,19 @@
// need to handle table name + field or just field, depending on if relationships used // need to handle table name + field or just field, depending on if relationships used
import { FieldSchema, FieldType, Row, Table, JsonTypes } from "@budibase/types" import {
FieldSchema,
FieldType,
Row,
Table,
JsonTypes,
ViewV2,
} from "@budibase/types"
import { import {
helpers, helpers,
PROTECTED_EXTERNAL_COLUMNS, PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS, PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core" } from "@budibase/shared-core"
import { generateRowIdField } from "../../../../integrations/utils" import { generateRowIdField } from "../../../../integrations/utils"
import sdk from "../../../../sdk"
function extractFieldValue({ function extractFieldValue({
row, row,
@ -78,20 +86,30 @@ function fixJsonTypes(row: Row, table: Table) {
return row return row
} }
export function basicProcessing({ export async function basicProcessing({
row, row,
table, source,
tables, tables,
isLinked, isLinked,
sqs, sqs,
}: { }: {
row: Row row: Row
table: Table source: Table | ViewV2
tables: Table[] tables: Table[]
isLinked: boolean isLinked: boolean
sqs?: boolean sqs?: boolean
}): Row { }): Promise<Row> {
let table: Table
let isCalculationView = false
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
isCalculationView = helpers.views.isCalculationView(source)
} else {
table = source
}
const thisRow: Row = {} const thisRow: Row = {}
// filter the row down to what is actually the row (not joined) // filter the row down to what is actually the row (not joined)
for (let fieldName of Object.keys(table.schema)) { for (let fieldName of Object.keys(table.schema)) {
let value = extractFieldValue({ let value = extractFieldValue({
@ -108,13 +126,20 @@ export function basicProcessing({
thisRow[fieldName] = value thisRow[fieldName] = value
} }
} }
if (sdk.views.isView(source)) {
for (const key of Object.keys(helpers.views.calculationFields(source))) {
thisRow[key] = row[key]
}
}
let columns: string[] = Object.keys(table.schema) let columns: string[] = Object.keys(table.schema)
if (!sqs) { if (!sqs && !isCalculationView) {
thisRow._id = generateIdForRow(row, table, isLinked) thisRow._id = generateIdForRow(row, table, isLinked)
thisRow.tableId = table._id thisRow.tableId = table._id
thisRow._rev = "rev" thisRow._rev = "rev"
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS) columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
} else { } else if (!isCalculationView) {
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS) columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) { for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) {
thisRow[internalColumn] = extractFieldValue({ thisRow[internalColumn] = extractFieldValue({
@ -149,28 +174,30 @@ export function basicProcessing({
thisRow[col] = array thisRow[col] = array
// make sure all of them have an _id // make sure all of them have an _id
const sortField = relatedTable.primaryDisplay || relatedTable.primary![0]! const sortField = relatedTable.primaryDisplay || relatedTable.primary![0]!
thisRow[col] = (thisRow[col] as Row[]) thisRow[col] = (
.map(relatedRow => await Promise.all(
basicProcessing({ (thisRow[col] as Row[]).map(relatedRow =>
row: relatedRow, basicProcessing({
table: relatedTable, row: relatedRow,
tables, source: relatedTable,
isLinked: false, tables,
sqs, isLinked: false,
}) sqs,
})
)
) )
.sort((a, b) => { ).sort((a, b) => {
const aField = a?.[sortField], const aField = a?.[sortField],
bField = b?.[sortField] bField = b?.[sortField]
if (!aField) { if (!aField) {
return 1 return 1
} else if (!bField) { } else if (!bField) {
return -1 return -1
} }
return aField.localeCompare return aField.localeCompare
? aField.localeCompare(bField) ? aField.localeCompare(bField)
: aField - bField : aField - bField
}) })
} }
} }
return fixJsonTypes(thisRow, table) return fixJsonTypes(thisRow, table)

View File

@ -7,10 +7,14 @@ import {
ManyToManyRelationshipFieldMetadata, ManyToManyRelationshipFieldMetadata,
RelationshipFieldMetadata, RelationshipFieldMetadata,
RelationshipsJson, RelationshipsJson,
Row,
Table, Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { breakExternalTableId } from "../../../../integrations/utils" import { breakExternalTableId } from "../../../../integrations/utils"
import { generateJunctionTableID } from "../../../../db/utils" import { generateJunctionTableID } from "../../../../db/utils"
import sdk from "../../../../sdk"
import { helpers } from "@budibase/shared-core"
type TableMap = Record<string, Table> type TableMap = Record<string, Table>
@ -108,11 +112,12 @@ export function buildInternalRelationships(
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us * Creating the specific list of fields that we desire, and excluding the ones that are no use to us
* is more performant and has the added benefit of protecting against this scenario. * is more performant and has the added benefit of protecting against this scenario.
*/ */
export function buildSqlFieldList( export async function buildSqlFieldList(
table: Table, source: Table | ViewV2,
tables: TableMap, tables: TableMap,
opts?: { relationships: boolean } opts?: { relationships: boolean }
) { ) {
const { relationships } = opts || {}
function extractRealFields(table: Table, existing: string[] = []) { function extractRealFields(table: Table, existing: string[] = []) {
return Object.entries(table.schema) return Object.entries(table.schema)
.filter( .filter(
@ -123,22 +128,33 @@ export function buildSqlFieldList(
) )
.map(column => `${table.name}.${column[0]}`) .map(column => `${table.name}.${column[0]}`)
} }
let fields = extractRealFields(table)
let fields: string[] = []
if (sdk.views.isView(source)) {
fields = Object.keys(helpers.views.basicFields(source)).filter(
key => source.schema?.[key]?.visible !== false
)
} else {
fields = extractRealFields(source)
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
for (let field of Object.values(table.schema)) { for (let field of Object.values(table.schema)) {
if ( if (field.type !== FieldType.LINK || !relationships || !field.tableId) {
field.type !== FieldType.LINK ||
!opts?.relationships ||
!field.tableId
) {
continue continue
} }
const { tableName: linkTableName } = breakExternalTableId(field.tableId) const { tableName } = breakExternalTableId(field.tableId)
const linkTable = tables[linkTableName] if (tables[tableName]) {
if (linkTable) { fields = fields.concat(extractRealFields(tables[tableName], fields))
const linkedFields = extractRealFields(linkTable, fields)
fields = fields.concat(linkedFields)
} }
} }
return fields return fields
} }
@ -149,3 +165,7 @@ export function isKnexEmptyReadResponse(resp: DatasourcePlusQueryResponse) {
(DSPlusOperation.READ in resp[0] && resp[0].read === true) (DSPlusOperation.READ in resp[0] && resp[0].read === true)
) )
} }
export function isKnexRows(resp: DatasourcePlusQueryResponse): resp is Row[] {
return !isKnexEmptyReadResponse(resp)
}

View File

@ -1,6 +1,6 @@
import * as utils from "../../../../db/utils" import * as utils from "../../../../db/utils"
import { context } from "@budibase/backend-core" import { docIds } from "@budibase/backend-core"
import { import {
Ctx, Ctx,
DatasourcePlusQueryResponse, DatasourcePlusQueryResponse,
@ -8,17 +8,18 @@ import {
RelationshipsJson, RelationshipsJson,
Row, Row,
Table, Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { import {
processDates, processDates,
processFormulas, processFormulas,
} from "../../../../utilities/rowProcessor" } from "../../../../utilities/rowProcessor"
import { isKnexEmptyReadResponse } from "./sqlUtils" import { isKnexRows } from "./sqlUtils"
import { basicProcessing, generateIdForRow, getInternalRowId } from "./basic" import { basicProcessing, generateIdForRow, getInternalRowId } from "./basic"
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
import { processStringSync } from "@budibase/string-templates" import { processStringSync } from "@budibase/string-templates"
import validateJs from "validate.js" import validateJs from "validate.js"
import { getFullUser } from "../../../../utilities/users" import { helpers } from "@budibase/shared-core"
validateJs.extend(validateJs.validators.datetime, { validateJs.extend(validateJs.validators.datetime, {
parse: function (value: string) { parse: function (value: string) {
@ -58,26 +59,11 @@ export async function processRelationshipFields(
return row return row
} }
export async function findRow(tableId: string, rowId: string) {
const db = context.getAppDB()
let row: Row
// TODO remove special user case in future
if (tableId === utils.InternalTables.USER_METADATA) {
row = await getFullUser(rowId)
} else {
row = await db.get(rowId)
}
if (row.tableId !== tableId) {
throw "Supplied tableId does not match the rows tableId"
}
return row
}
export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } { export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } {
// top priority, use the URL first // top priority, use the URL first
if (ctx.params?.sourceId) { if (ctx.params?.sourceId) {
const { sourceId } = ctx.params const { sourceId } = ctx.params
if (utils.isViewID(sourceId)) { if (docIds.isViewId(sourceId)) {
return { return {
tableId: utils.extractViewInfoFromID(sourceId).tableId, tableId: utils.extractViewInfoFromID(sourceId).tableId,
viewId: sourceId, viewId: sourceId,
@ -96,22 +82,22 @@ export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } {
throw new Error("Unable to find table ID in request") throw new Error("Unable to find table ID in request")
} }
export async function validate( export async function getSource(ctx: Ctx): Promise<Table | ViewV2> {
opts: { row: Row } & ({ tableId: string } | { table: Table }) const { tableId, viewId } = getSourceId(ctx)
) { if (viewId) {
let fetchedTable: Table return sdk.views.get(viewId)
if ("tableId" in opts) {
fetchedTable = await sdk.tables.getTable(opts.tableId)
} else {
fetchedTable = opts.table
} }
return sdk.rows.utils.validate({ return sdk.tables.getTable(tableId)
...opts,
table: fetchedTable,
})
} }
function fixBooleanFields({ row, table }: { row: Row; table: Table }) { export async function getTableFromSource(source: Table | ViewV2) {
if (sdk.views.isView(source)) {
return await sdk.views.getTable(source.id)
}
return source
}
function fixBooleanFields(row: Row, table: Table) {
for (let col of Object.values(table.schema)) { for (let col of Object.values(table.schema)) {
if (col.type === FieldType.BOOLEAN) { if (col.type === FieldType.BOOLEAN) {
if (row[col.name] === 1) { if (row[col.name] === 1) {
@ -126,49 +112,45 @@ function fixBooleanFields({ row, table }: { row: Row; table: Table }) {
export async function sqlOutputProcessing( export async function sqlOutputProcessing(
rows: DatasourcePlusQueryResponse, rows: DatasourcePlusQueryResponse,
table: Table, source: Table | ViewV2,
tables: Record<string, Table>, tables: Record<string, Table>,
relationships: RelationshipsJson[], relationships: RelationshipsJson[],
opts?: { sqs?: boolean } opts?: { sqs?: boolean }
): Promise<Row[]> { ): Promise<Row[]> {
if (isKnexEmptyReadResponse(rows)) { if (!isKnexRows(rows)) {
return [] return []
} }
let finalRows: { [key: string]: Row } = {}
for (let row of rows as Row[]) { let table: Table
let rowId = row._id let isCalculationView = false
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
isCalculationView = helpers.views.isCalculationView(source)
} else {
table = source
}
let processedRows: Row[] = []
for (let row of rows) {
if (opts?.sqs) { if (opts?.sqs) {
rowId = getInternalRowId(row, table) row._id = getInternalRowId(row, table)
row._id = rowId } else if (row._id == null && !isCalculationView) {
} else if (!rowId) { row._id = generateIdForRow(row, table)
rowId = generateIdForRow(row, table)
row._id = rowId
} }
const thisRow = basicProcessing({
row = await basicProcessing({
row, row,
table, source,
tables: Object.values(tables), tables: Object.values(tables),
isLinked: false, isLinked: false,
sqs: opts?.sqs, sqs: opts?.sqs,
}) })
if (thisRow._id == null) { row = fixBooleanFields(row, table)
throw new Error("Unable to generate row ID for SQL rows") row = await processRelationshipFields(table, tables, row, relationships)
} processedRows.push(row)
finalRows[thisRow._id] = fixBooleanFields({ row: thisRow, table })
} }
// make sure all related rows are correct return processDates(table, processedRows)
let finalRowArray = []
for (let row of Object.values(finalRows)) {
finalRowArray.push(
await processRelationshipFields(table, tables, row, relationships)
)
}
// process some additional types
finalRowArray = processDates(table, finalRowArray)
return finalRowArray
} }
export function isUserMetadataTable(tableId: string) { export function isUserMetadataTable(tableId: string) {

View File

@ -3,8 +3,6 @@ import {
ViewV2, ViewV2,
SearchRowResponse, SearchRowResponse,
SearchViewRowRequest, SearchViewRowRequest,
RequiredKeys,
RowSearchParams,
SearchFilterKey, SearchFilterKey,
LogicalOperator, LogicalOperator,
} from "@budibase/types" } from "@budibase/types"
@ -27,9 +25,6 @@ export async function searchView(
ctx.throw(400, `This method only supports viewsV2`) ctx.throw(400, `This method only supports viewsV2`)
} }
const viewFields = Object.entries(view.schema || {})
.filter(([_, value]) => value.visible)
.map(([key]) => key)
const { body } = ctx.request const { body } = ctx.request
// Enrich saved query with ephemeral query params. // Enrich saved query with ephemeral query params.
@ -74,22 +69,17 @@ export async function searchView(
user: sdk.users.getUserContextBindings(ctx.user), user: sdk.users.getUserContextBindings(ctx.user),
}) })
const searchOptions: RequiredKeys<SearchViewRowRequest> & const result = await sdk.rows.search({
RequiredKeys<
Pick<RowSearchParams, "tableId" | "viewId" | "query" | "fields">
> = {
tableId: view.tableId,
viewId: view.id, viewId: view.id,
tableId: view.tableId,
query: enrichedQuery, query: enrichedQuery,
fields: viewFields,
...getSortOptions(body, view), ...getSortOptions(body, view),
limit: body.limit, limit: body.limit,
bookmark: body.bookmark, bookmark: body.bookmark,
paginate: body.paginate, paginate: body.paginate,
countRows: body.countRows, countRows: body.countRows,
} })
const result = await sdk.rows.search(searchOptions)
result.rows.forEach(r => (r._viewId = view.id)) result.rows.forEach(r => (r._viewId = view.id))
ctx.body = result ctx.body = result
} }

View File

@ -113,11 +113,10 @@ export async function bulkImport(
const processed = await inputProcessing(ctx.user?._id, table, row, { const processed = await inputProcessing(ctx.user?._id, table, row, {
noAutoRelationships: true, noAutoRelationships: true,
}) })
parsedRows.push(processed.row) parsedRows.push(processed)
table = processed.table
} }
await handleRequest(Operation.BULK_UPSERT, table._id!, { await handleRequest(Operation.BULK_UPSERT, table, {
rows: parsedRows, rows: parsedRows,
}) })
await events.rows.imported(table, parsedRows.length) await events.rows.imported(table, parsedRows.length)

View File

@ -33,7 +33,7 @@ import {
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv" import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash" import { cloneDeep } from "lodash"
import { import {
helpers, helpers,
PROTECTED_EXTERNAL_COLUMNS, PROTECTED_EXTERNAL_COLUMNS,
@ -149,12 +149,7 @@ export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse> ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) { ) {
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
let tableBefore = await sdk.tables.getTable(tableId) await pickApi({ tableId }).bulkImport(ctx)
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
if (!isEqual(tableBefore, tableAfter)) {
await sdk.tables.saveTable(tableAfter)
}
// right now we don't trigger anything for bulk import because it // right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to // can only be done in the builder, but in the future we may need to

View File

@ -3,7 +3,6 @@ import { handleDataImport } from "./utils"
import { import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
FieldType,
RenameColumn, RenameColumn,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
@ -70,22 +69,10 @@ export async function bulkImport(
) { ) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body const { rows, identifierFields } = ctx.request.body
await handleDataImport( await handleDataImport(table, {
{ importRows: rows,
...table, identifierFields,
schema: { user: ctx.user,
_id: { })
name: "_id",
type: FieldType.STRING,
},
...table.schema,
},
},
{
importRows: rows,
identifierFields,
user: ctx.user,
}
)
return table return table
} }

View File

@ -139,8 +139,7 @@ export async function importToRows(
const processed = await inputProcessing(user?._id, table, row, { const processed = await inputProcessing(user?._id, table, row, {
noAutoRelationships: true, noAutoRelationships: true,
}) })
row = processed.row row = processed
table = processed.table
// However here we must reference the original table, as we want to mutate // However here we must reference the original table, as we want to mutate
// the real schema of the table passed in, not the clone used for // the real schema of the table passed in, not the clone used for

View File

@ -7,10 +7,49 @@ import {
ViewResponse, ViewResponse,
ViewResponseEnriched, ViewResponseEnriched,
ViewV2, ViewV2,
ViewFieldMetadata, BasicViewFieldMetadata,
ViewCalculationFieldMetadata,
RelationSchemaField, RelationSchemaField,
ViewFieldMetadata,
} from "@budibase/types" } from "@budibase/types"
import { builderSocket, gridSocket } from "../../../websockets" import { builderSocket, gridSocket } from "../../../websockets"
import { helpers } from "@budibase/shared-core"
function stripUnknownFields(
field: BasicViewFieldMetadata
): RequiredKeys<BasicViewFieldMetadata> {
if (helpers.views.isCalculationField(field)) {
const strippedField: RequiredKeys<ViewCalculationFieldMetadata> = {
order: field.order,
width: field.width,
visible: field.visible,
readonly: field.readonly,
icon: field.icon,
calculationType: field.calculationType,
field: field.field,
columns: field.columns,
}
return strippedField
} else {
const strippedField: RequiredKeys<BasicViewFieldMetadata> = {
order: field.order,
width: field.width,
visible: field.visible,
readonly: field.readonly,
icon: field.icon,
columns: field.columns,
}
return strippedField
}
}
function stripUndefinedFields(obj: Record<string, any>): void {
Object.keys(obj)
.filter(key => obj[key] === undefined)
.forEach(key => {
delete obj[key]
})
}
async function parseSchema(view: CreateViewRequest) { async function parseSchema(view: CreateViewRequest) {
if (!view.schema) { if (!view.schema) {
@ -22,6 +61,7 @@ async function parseSchema(view: CreateViewRequest) {
let fieldRelatedSchema: let fieldRelatedSchema:
| Record<string, RequiredKeys<RelationSchemaField>> | Record<string, RequiredKeys<RelationSchemaField>>
| undefined | undefined
if (schemaValue.columns) { if (schemaValue.columns) {
fieldRelatedSchema = Object.entries(schemaValue.columns).reduce< fieldRelatedSchema = Object.entries(schemaValue.columns).reduce<
NonNullable<typeof fieldRelatedSchema> NonNullable<typeof fieldRelatedSchema>
@ -35,25 +75,12 @@ async function parseSchema(view: CreateViewRequest) {
} }
return acc return acc
}, {}) }, {})
schemaValue.columns = fieldRelatedSchema
} }
const fieldSchema: RequiredKeys< const fieldSchema = stripUnknownFields(schemaValue)
ViewFieldMetadata & { stripUndefinedFields(fieldSchema)
columns: typeof fieldRelatedSchema
}
> = {
order: schemaValue.order,
width: schemaValue.width,
visible: schemaValue.visible,
readonly: schemaValue.readonly,
icon: schemaValue.icon,
columns: fieldRelatedSchema,
}
Object.entries(fieldSchema)
.filter(([, val]) => val === undefined)
.forEach(([key]) => {
delete fieldSchema[key as keyof ViewFieldMetadata]
})
p[fieldName] = fieldSchema p[fieldName] = fieldSchema
return p return p
}, {} as Record<string, RequiredKeys<ViewFieldMetadata>>) }, {} as Record<string, RequiredKeys<ViewFieldMetadata>>)

View File

@ -76,7 +76,7 @@ async function waitForEvent(
} }
describe.each([ describe.each([
["internal", undefined], ["lucene", undefined],
["sqs", undefined], ["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
@ -2453,9 +2453,15 @@ describe.each([
let flagCleanup: (() => void) | undefined let flagCleanup: (() => void) | undefined
beforeAll(async () => { beforeAll(async () => {
flagCleanup = setCoreEnv({ const env = {
TENANT_FEATURE_FLAGS: `*:${FeatureFlag.ENRICHED_RELATIONSHIPS}`, TENANT_FEATURE_FLAGS: `*:${FeatureFlag.ENRICHED_RELATIONSHIPS}`,
}) }
if (isSqs) {
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:SQS`
} else {
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:!SQS`
}
flagCleanup = setCoreEnv(env)
const aux2Table = await config.api.table.save(saveTableRequest()) const aux2Table = await config.api.table.save(saveTableRequest())
const aux2Data = await config.api.row.save(aux2Table._id!, {}) const aux2Data = await config.api.row.save(aux2Table._id!, {})

View File

@ -157,7 +157,11 @@ describe.each([
if (isInMemory) { if (isInMemory) {
return dataFilters.search(_.cloneDeep(rows), this.query) return dataFilters.search(_.cloneDeep(rows), this.query)
} else { } else {
return config.api.row.search(this.query.tableId, this.query) const sourceId = this.query.viewId || this.query.tableId
if (!sourceId) {
throw new Error("No source ID provided")
}
return config.api.row.search(sourceId, this.query)
} }
} }

View File

@ -18,10 +18,11 @@ import {
ViewV2, ViewV2,
SearchResponse, SearchResponse,
BasicOperator, BasicOperator,
CalculationType,
RelationshipType, RelationshipType,
TableSchema, TableSchema,
ViewFieldMetadata,
RenameColumn, RenameColumn,
ViewFieldMetadata,
FeatureFlag, FeatureFlag,
BBReferenceFieldSubType, BBReferenceFieldSubType,
} from "@budibase/types" } from "@budibase/types"
@ -36,7 +37,6 @@ import {
setEnv as setCoreEnv, setEnv as setCoreEnv,
env, env,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import sdk from "../../../sdk"
describe.each([ describe.each([
["lucene", undefined], ["lucene", undefined],
@ -2196,28 +2196,6 @@ describe.each([
expect(response.rows).toHaveLength(0) expect(response.rows).toHaveLength(0)
}) })
it("queries the row api passing the view fields only", async () => {
const searchSpy = jest.spyOn(sdk.rows, "search")
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
id: { visible: true },
one: { visible: false },
},
})
await config.api.viewV2.search(view.id, { query: {} })
expect(searchSpy).toHaveBeenCalledTimes(1)
expect(searchSpy).toHaveBeenCalledWith(
expect.objectContaining({
fields: ["id"],
})
)
})
describe("foreign relationship columns", () => { describe("foreign relationship columns", () => {
let envCleanup: () => void let envCleanup: () => void
beforeAll(() => { beforeAll(() => {
@ -2382,6 +2360,71 @@ describe.each([
]) ])
}) })
}) })
!isLucene &&
describe("calculations", () => {
let table: Table
let rows: Row[]
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
quantity: {
type: FieldType.NUMBER,
name: "quantity",
},
price: {
type: FieldType.NUMBER,
name: "price",
},
},
})
)
rows = await Promise.all(
Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, {
quantity: generator.natural({ min: 1, max: 10 }),
price: generator.natural({ min: 1, max: 10 }),
})
)
)
})
it("should be able to search by calculations", async () => {
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
"Quantity Sum": {
visible: true,
calculationType: CalculationType.SUM,
field: "quantity",
},
},
})
const response = await config.api.viewV2.search(view.id, {
query: {},
})
expect(response.rows).toHaveLength(1)
expect(response.rows).toEqual(
expect.arrayContaining([
expect.objectContaining({
"Quantity Sum": rows.reduce((acc, r) => acc + r.quantity, 0),
}),
])
)
// Calculation views do not return rows that can be linked back to
// the source table, and so should not have an _id field.
for (const row of response.rows) {
expect("_id" in row).toBe(false)
}
})
})
}) })
describe("permissions", () => { describe("permissions", () => {

View File

@ -27,6 +27,7 @@ import {
ViewV2, ViewV2,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../sdk" import sdk from "../../sdk"
import { helpers } from "@budibase/shared-core"
export { IncludeDocs, getLinkDocuments, createLinkView } from "./linkUtils" export { IncludeDocs, getLinkDocuments, createLinkView } from "./linkUtils"
@ -247,26 +248,36 @@ function getPrimaryDisplayValue(row: Row, table?: Table) {
export type SquashTableFields = Record<string, { visibleFieldNames: string[] }> export type SquashTableFields = Record<string, { visibleFieldNames: string[] }>
/** /**
* This function will take the given enriched rows and squash the links to only contain the primary display field. * This function will take the given enriched rows and squash the links to only
* @returns The rows after having their links squashed to only contain the ID and primary display. * contain the primary display field.
*
* @returns The rows after having their links squashed to only contain the ID
* and primary display.
*/ */
export async function squashLinks<T = Row[] | Row>( export async function squashLinks<T = Row[] | Row>(
table: Table, source: Table | ViewV2,
enriched: T, enriched: T
options?: {
fromViewId?: string
}
): Promise<T> { ): Promise<T> {
const allowRelationshipSchemas = await features.flags.isEnabled( const allowRelationshipSchemas = await features.flags.isEnabled(
FeatureFlag.ENRICHED_RELATIONSHIPS FeatureFlag.ENRICHED_RELATIONSHIPS
) )
let viewSchema: Record<string, ViewFieldMetadata> = {} let viewSchema: Record<string, ViewFieldMetadata> = {}
if (options?.fromViewId && allowRelationshipSchemas) { if (sdk.views.isView(source)) {
const view = Object.values(table.views || {}).find( if (helpers.views.isCalculationView(source)) {
(v): v is ViewV2 => sdk.views.isV2(v) && v.id === options?.fromViewId return enriched
) }
viewSchema = view?.schema || {}
if (allowRelationshipSchemas) {
viewSchema = source.schema || {}
}
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
} }
// will populate this as we find them // will populate this as we find them

View File

@ -1,4 +1,4 @@
import { context, db as dbCore, utils } from "@budibase/backend-core" import { context, db as dbCore, docIds, utils } from "@budibase/backend-core"
import { import {
DatabaseQueryOpts, DatabaseQueryOpts,
Datasource, Datasource,
@ -318,12 +318,8 @@ export function generateViewID(tableId: string) {
}${SEPARATOR}${tableId}${SEPARATOR}${newid()}` }${SEPARATOR}${tableId}${SEPARATOR}${newid()}`
} }
export function isViewID(viewId: string) {
return viewId?.split(SEPARATOR)[0] === VirtualDocumentType.VIEW
}
export function extractViewInfoFromID(viewId: string) { export function extractViewInfoFromID(viewId: string) {
if (!isViewID(viewId)) { if (!docIds.isViewId(viewId)) {
throw new Error("Unable to extract table ID, is not a view ID") throw new Error("Unable to extract table ID, is not a view ID")
} }
const split = viewId.split(SEPARATOR) const split = viewId.split(SEPARATOR)

View File

@ -15,7 +15,7 @@ export function triggerRowActionAuthorised(
const rowActionId: string = ctx.params[actionPath] const rowActionId: string = ctx.params[actionPath]
const isTableId = docIds.isTableId(sourceId) const isTableId = docIds.isTableId(sourceId)
const isViewId = utils.isViewID(sourceId) const isViewId = docIds.isViewId(sourceId)
if (!isTableId && !isViewId) { if (!isTableId && !isViewId) {
ctx.throw(400, `'${sourceId}' is not a valid source id`) ctx.throw(400, `'${sourceId}' is not a valid source id`)
} }

View File

@ -1,10 +1,10 @@
import { db, roles } from "@budibase/backend-core" import { db, docIds, roles } from "@budibase/backend-core"
import { import {
PermissionLevel, PermissionLevel,
PermissionSource, PermissionSource,
VirtualDocumentType, VirtualDocumentType,
} from "@budibase/types" } from "@budibase/types"
import { extractViewInfoFromID, isViewID } from "../../../db/utils" import { extractViewInfoFromID } from "../../../db/utils"
import { import {
CURRENTLY_SUPPORTED_LEVELS, CURRENTLY_SUPPORTED_LEVELS,
getBasePermissions, getBasePermissions,
@ -20,7 +20,7 @@ type ResourcePermissions = Record<
export async function getInheritablePermissions( export async function getInheritablePermissions(
resourceId: string resourceId: string
): Promise<ResourcePermissions | undefined> { ): Promise<ResourcePermissions | undefined> {
if (isViewID(resourceId)) { if (docIds.isViewId(resourceId)) {
return await getResourcePerms(extractViewInfoFromID(resourceId).tableId) return await getResourcePerms(extractViewInfoFromID(resourceId).tableId)
} }
} }

View File

@ -1,11 +1,11 @@
import { context, HTTPError, utils } from "@budibase/backend-core" import { context, docIds, HTTPError, utils } from "@budibase/backend-core"
import { import {
AutomationTriggerStepId, AutomationTriggerStepId,
SEPARATOR, SEPARATOR,
TableRowActions, TableRowActions,
VirtualDocumentType, VirtualDocumentType,
} from "@budibase/types" } from "@budibase/types"
import { generateRowActionsID, isViewID } from "../../db/utils" import { generateRowActionsID } from "../../db/utils"
import automations from "./automations" import automations from "./automations"
import { definitions as TRIGGER_DEFINITIONS } from "../../automations/triggerInfo" import { definitions as TRIGGER_DEFINITIONS } from "../../automations/triggerInfo"
import * as triggers from "../../automations/triggers" import * as triggers from "../../automations/triggers"
@ -155,7 +155,7 @@ export async function update(
async function guardView(tableId: string, viewId: string) { async function guardView(tableId: string, viewId: string) {
let view let view
if (isViewID(viewId)) { if (docIds.isViewId(viewId)) {
view = await sdk.views.get(viewId) view = await sdk.views.get(viewId)
} }
if (!view || view.tableId !== tableId) { if (!view || view.tableId !== tableId) {

View File

@ -1,5 +1,11 @@
import { IncludeRelationship, Operation, Row } from "@budibase/types" import {
import { HTTPError } from "@budibase/backend-core" IncludeRelationship,
Operation,
Row,
Table,
ViewV2,
} from "@budibase/types"
import { docIds, HTTPError } from "@budibase/backend-core"
import { handleRequest } from "../../../api/controllers/row/external" import { handleRequest } from "../../../api/controllers/row/external"
import { breakRowIdField } from "../../../integrations/utils" import { breakRowIdField } from "../../../integrations/utils"
import sdk from "../../../sdk" import sdk from "../../../sdk"
@ -8,15 +14,24 @@ import {
outputProcessing, outputProcessing,
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import cloneDeep from "lodash/fp/cloneDeep" import cloneDeep from "lodash/fp/cloneDeep"
import isEqual from "lodash/fp/isEqual"
import { tryExtractingTableAndViewId } from "./utils" import { tryExtractingTableAndViewId } from "./utils"
export async function getRow( export async function getRow(
tableId: string, sourceId: string | Table | ViewV2,
rowId: string, rowId: string,
opts?: { relationships?: boolean } opts?: { relationships?: boolean }
) { ) {
const response = await handleRequest(Operation.READ, tableId, { let source: Table | ViewV2
if (typeof sourceId === "string") {
if (docIds.isViewId(sourceId)) {
source = await sdk.views.get(sourceId)
} else {
source = await sdk.tables.getTable(sourceId)
}
} else {
source = sourceId
}
const response = await handleRequest(Operation.READ, source, {
id: breakRowIdField(rowId), id: breakRowIdField(rowId),
includeSqlRelationships: opts?.relationships includeSqlRelationships: opts?.relationships
? IncludeRelationship.INCLUDE ? IncludeRelationship.INCLUDE
@ -27,45 +42,42 @@ export async function getRow(
} }
export async function save( export async function save(
tableOrViewId: string, sourceId: string,
inputs: Row, inputs: Row,
userId: string | undefined userId: string | undefined
) { ) {
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId) const { tableId, viewId } = tryExtractingTableAndViewId(sourceId)
const table = await sdk.tables.getTable(tableId) let source: Table | ViewV2
const { table: updatedTable, row } = await inputProcessing( if (viewId) {
userId, source = await sdk.views.get(viewId)
cloneDeep(table), } else {
inputs source = await sdk.tables.getTable(tableId)
) }
const row = await inputProcessing(userId, cloneDeep(source), inputs)
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row, row,
tableId, source,
}) })
if (!validateResult.valid) { if (!validateResult.valid) {
throw { validation: validateResult.errors } throw { validation: validateResult.errors }
} }
const response = await handleRequest(Operation.CREATE, tableId, { const response = await handleRequest(Operation.CREATE, source, {
row, row,
}) })
if (!isEqual(table, updatedTable)) {
await sdk.tables.saveTable(updatedTable)
}
const rowId = response.row._id const rowId = response.row._id
if (rowId) { if (rowId) {
const row = await getRow(tableId, rowId, { const row = await getRow(source, rowId, {
relationships: true, relationships: true,
}) })
return { return {
...response, ...response,
row: await outputProcessing(table, row, { row: await outputProcessing(source, row, {
preserveLinks: true, preserveLinks: true,
squash: true, squash: true,
fromViewId: viewId,
}), }),
} }
} else { } else {
@ -76,7 +88,14 @@ export async function save(
export async function find(tableOrViewId: string, rowId: string): Promise<Row> { export async function find(tableOrViewId: string, rowId: string): Promise<Row> {
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId) const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
const row = await getRow(tableId, rowId, { let source: Table | ViewV2
if (viewId) {
source = await sdk.views.get(viewId)
} else {
source = await sdk.tables.getTable(tableId)
}
const row = await getRow(source, rowId, {
relationships: true, relationships: true,
}) })
@ -84,11 +103,10 @@ export async function find(tableOrViewId: string, rowId: string): Promise<Row> {
throw new HTTPError("Row not found", 404) throw new HTTPError("Row not found", 404)
} }
const table = await sdk.tables.getTable(tableId) // Preserving links, as the outputProcessing does not support external rows
// Preserving links, as the outputProcessing does not support external rows yet and we don't need it in this use case // yet and we don't need it in this use case
return await outputProcessing(table, row, { return await outputProcessing(source, row, {
squash: true, squash: true,
preserveLinks: true, preserveLinks: true,
fromViewId: viewId,
}) })
} }

View File

@ -1,7 +1,6 @@
import { context, db } from "@budibase/backend-core" import { context, db } from "@budibase/backend-core"
import { Row } from "@budibase/types" import { Row, Table, ViewV2 } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import cloneDeep from "lodash/fp/cloneDeep"
import { finaliseRow } from "../../../api/controllers/row/staticFormula" import { finaliseRow } from "../../../api/controllers/row/staticFormula"
import { import {
inputProcessing, inputProcessing,
@ -10,7 +9,7 @@ import {
import * as linkRows from "../../../db/linkedRows" import * as linkRows from "../../../db/linkedRows"
import { InternalTables } from "../../../db/utils" import { InternalTables } from "../../../db/utils"
import { getFullUser } from "../../../utilities/users" import { getFullUser } from "../../../utilities/users"
import { tryExtractingTableAndViewId } from "./utils" import { getSource, tryExtractingTableAndViewId } from "./utils"
export async function save( export async function save(
tableOrViewId: string, tableOrViewId: string,
@ -20,21 +19,25 @@ export async function save(
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId) const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
inputs.tableId = tableId inputs.tableId = tableId
let source: Table | ViewV2
let table: Table
if (viewId) {
source = await sdk.views.get(viewId)
table = await sdk.views.getTable(viewId)
} else {
source = await sdk.tables.getTable(tableId)
table = source
}
if (!inputs._rev && !inputs._id) { if (!inputs._rev && !inputs._id) {
inputs._id = db.generateRowID(inputs.tableId) inputs._id = db.generateRowID(inputs.tableId)
} }
// this returns the table and row incase they have been updated let row = await inputProcessing(userId, source, inputs)
const dbTable = await sdk.tables.getTable(inputs.tableId)
// need to copy the table so it can be differenced on way out
const tableClone = cloneDeep(dbTable)
let { table, row } = await inputProcessing(userId, tableClone, inputs)
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row, row,
table, source,
}) })
if (!validateResult.valid) { if (!validateResult.valid) {
@ -49,24 +52,18 @@ export async function save(
table, table,
})) as Row })) as Row
return finaliseRow(table, row, { return finaliseRow(source, row, { updateFormula: true })
oldTable: dbTable, }
updateFormula: true,
fromViewId: viewId, export async function find(sourceId: string, rowId: string): Promise<Row> {
const source = await getSource(sourceId)
return await outputProcessing(source, await findRow(sourceId, rowId), {
squash: true,
}) })
} }
export async function find(tableOrViewId: string, rowId: string): Promise<Row> { export async function findRow(sourceId: string, rowId: string) {
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId) const { tableId } = tryExtractingTableAndViewId(sourceId)
const table = await sdk.tables.getTable(tableId)
let row = await findRow(tableId, rowId)
row = await outputProcessing(table, row, { squash: true, fromViewId: viewId })
return row
}
async function findRow(tableId: string, rowId: string) {
const db = context.getAppDB() const db = context.getAppDB()
let row: Row let row: Row
// TODO remove special user case in future // TODO remove special user case in future

View File

@ -53,8 +53,8 @@ export const removeInvalidFilters = (
} }
export const getQueryableFields = async ( export const getQueryableFields = async (
fields: string[], table: Table,
table: Table fields?: string[]
): Promise<string[]> => { ): Promise<string[]> => {
const extractTableFields = async ( const extractTableFields = async (
table: Table, table: Table,
@ -110,6 +110,9 @@ export const getQueryableFields = async (
"_id", // Querying by _id is always allowed, even if it's never part of the schema "_id", // Querying by _id is always allowed, even if it's never part of the schema
] ]
if (fields == null) {
fields = Object.keys(table.schema)
}
result.push(...(await extractTableFields(table, fields, [table._id!]))) result.push(...(await extractTableFields(table, fields, [table._id!])))
return result return result

View File

@ -1,10 +1,6 @@
import { db as dbCore, context } from "@budibase/backend-core" import { db as dbCore, context, docIds } from "@budibase/backend-core"
import { Database, Row } from "@budibase/types" import { Database, Row } from "@budibase/types"
import { import { extractViewInfoFromID, getRowParams } from "../../../db/utils"
extractViewInfoFromID,
getRowParams,
isViewID,
} from "../../../db/utils"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./internal" import * as internal from "./internal"
import * as external from "./external" import * as external from "./external"
@ -26,7 +22,7 @@ export async function getAllInternalRows(appId?: string) {
function pickApi(tableOrViewId: string) { function pickApi(tableOrViewId: string) {
let tableId = tableOrViewId let tableId = tableOrViewId
if (isViewID(tableOrViewId)) { if (docIds.isViewId(tableOrViewId)) {
tableId = extractViewInfoFromID(tableOrViewId).tableId tableId = extractViewInfoFromID(tableOrViewId).tableId
} }
@ -37,13 +33,13 @@ function pickApi(tableOrViewId: string) {
} }
export async function save( export async function save(
tableOrViewId: string, sourceId: string,
row: Row, row: Row,
userId: string | undefined userId: string | undefined
) { ) {
return pickApi(tableOrViewId).save(tableOrViewId, row, userId) return pickApi(sourceId).save(sourceId, row, userId)
} }
export async function find(tableOrViewId: string, rowId: string) { export async function find(sourceId: string, rowId: string) {
return pickApi(tableOrViewId).find(tableOrViewId, rowId) return pickApi(sourceId).find(sourceId, rowId)
} }

View File

@ -4,6 +4,8 @@ import {
RowSearchParams, RowSearchParams,
SearchResponse, SearchResponse,
SortOrder, SortOrder,
Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./search/internal" import * as internal from "./search/internal"
@ -37,6 +39,7 @@ export async function search(
return await tracer.trace("search", async span => { return await tracer.trace("search", async span => {
span?.addTags({ span?.addTags({
tableId: options.tableId, tableId: options.tableId,
viewId: options.viewId,
query: options.query, query: options.query,
sort: options.sort, sort: options.sort,
sortOrder: options.sortOrder, sortOrder: options.sortOrder,
@ -48,20 +51,18 @@ export async function search(
countRows: options.countRows, countRows: options.countRows,
}) })
const isExternalTable = isExternalTableID(options.tableId)
options.query = dataFilters.cleanupQuery(options.query || {}) options.query = dataFilters.cleanupQuery(options.query || {})
options.query = dataFilters.fixupFilterArrays(options.query) options.query = dataFilters.fixupFilterArrays(options.query)
span?.addTags({ span.addTags({
cleanedQuery: options.query, cleanedQuery: options.query,
isExternalTable,
}) })
if ( if (
!dataFilters.hasFilters(options.query) && !dataFilters.hasFilters(options.query) &&
options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE
) { ) {
span?.addTags({ emptyQuery: true }) span.addTags({ emptyQuery: true })
return { return {
rows: [], rows: [],
} }
@ -71,34 +72,43 @@ export async function search(
options.sortOrder = options.sortOrder.toLowerCase() as SortOrder options.sortOrder = options.sortOrder.toLowerCase() as SortOrder
} }
const table = await sdk.tables.getTable(options.tableId) let source: Table | ViewV2
options = searchInputMapping(table, options) let table: Table
if (options.viewId) {
if (options.query) { source = await sdk.views.get(options.viewId)
const tableFields = Object.keys(table.schema).filter( table = await sdk.views.getTable(source)
f => table.schema[f].visible !== false options = searchInputMapping(table, options)
) } else if (options.tableId) {
source = await sdk.tables.getTable(options.tableId)
const queriableFields = await getQueryableFields( table = source
options.fields?.filter(f => tableFields.includes(f)) ?? tableFields, options = searchInputMapping(table, options)
table } else {
) throw new Error(`Must supply either a view ID or a table ID`)
options.query = removeInvalidFilters(options.query, queriableFields)
} }
if (options.query) {
const visibleFields = (
options.fields || Object.keys(table.schema)
).filter(field => table.schema[field].visible !== false)
const queryableFields = await getQueryableFields(table, visibleFields)
options.query = removeInvalidFilters(options.query, queryableFields)
}
const isExternalTable = isExternalTableID(table._id!)
let result: SearchResponse<Row> let result: SearchResponse<Row>
if (isExternalTable) { if (isExternalTable) {
span?.addTags({ searchType: "external" }) span?.addTags({ searchType: "external" })
result = await external.search(options, table) result = await external.search(options, source)
} else if (await features.flags.isEnabled("SQS")) { } else if (await features.flags.isEnabled("SQS")) {
span?.addTags({ searchType: "sqs" }) span?.addTags({ searchType: "sqs" })
result = await internal.sqs.search(options, table) result = await internal.sqs.search(options, source)
} else { } else {
span?.addTags({ searchType: "lucene" }) span?.addTags({ searchType: "lucene" })
result = await internal.lucene.search(options, table) result = await internal.lucene.search(options, source)
} }
span?.addTags({ span.addTags({
foundRows: result.rows.length, foundRows: result.rows.length,
totalRows: result.totalRows, totalRows: result.totalRows,
}) })

View File

@ -9,6 +9,7 @@ import {
SortJson, SortJson,
SortOrder, SortOrder,
Table, Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters" import * as exporters from "../../../../api/controllers/view/exporters"
import { handleRequest } from "../../../../api/controllers/row/external" import { handleRequest } from "../../../../api/controllers/row/external"
@ -60,9 +61,8 @@ function getPaginationAndLimitParameters(
export async function search( export async function search(
options: RowSearchParams, options: RowSearchParams,
table: Table source: Table | ViewV2
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId } = options
const { countRows, paginate, query, ...params } = options const { countRows, paginate, query, ...params } = options
const { limit } = params const { limit } = params
let bookmark = let bookmark =
@ -106,16 +106,15 @@ export async function search(
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
} }
const [{ rows, rawResponseSize }, totalRows] = await Promise.all([ const [{ rows, rawResponseSize }, totalRows] = await Promise.all([
handleRequest(Operation.READ, tableId, parameters), handleRequest(Operation.READ, source, parameters),
countRows countRows
? handleRequest(Operation.COUNT, tableId, parameters) ? handleRequest(Operation.COUNT, source, parameters)
: Promise.resolve(undefined), : Promise.resolve(undefined),
]) ])
let processed = await outputProcessing(table, rows, { let processed = await outputProcessing(source, rows, {
preserveLinks: true, preserveLinks: true,
squash: true, squash: true,
fromViewId: options.viewId,
}) })
let hasNextPage = false let hasNextPage = false
@ -128,10 +127,13 @@ export async function search(
} }
} }
if (options.fields) { const visibleFields =
const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS] options.fields ||
processed = processed.map((r: any) => pick(r, fields)) Object.keys(source.schema || {}).filter(
} key => source.schema?.[key].visible !== false
)
const allowedFields = [...visibleFields, ...PROTECTED_EXTERNAL_COLUMNS]
processed = processed.map((r: any) => pick(r, allowedFields))
// need wrapper object for bookmarks etc when paginating // need wrapper object for bookmarks etc when paginating
const response: SearchResponse<Row> = { rows: processed, hasNextPage } const response: SearchResponse<Row> = { rows: processed, hasNextPage }
@ -201,7 +203,7 @@ export async function exportRows(
} }
let result = await search( let result = await search(
{ tableId, query: requestQuery, sort, sortOrder }, { tableId: table._id!, query: requestQuery, sort, sortOrder },
table table
) )
let rows: Row[] = [] let rows: Row[] = []
@ -257,10 +259,10 @@ export async function exportRows(
} }
export async function fetch(tableId: string): Promise<Row[]> { export async function fetch(tableId: string): Promise<Row[]> {
const response = await handleRequest(Operation.READ, tableId, { const table = await sdk.tables.getTable(tableId)
const response = await handleRequest(Operation.READ, table, {
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
}) })
const table = await sdk.tables.getTable(tableId)
return await outputProcessing(table, response.rows, { return await outputProcessing(table, response.rows, {
preserveLinks: true, preserveLinks: true,
squash: true, squash: true,
@ -268,7 +270,8 @@ export async function fetch(tableId: string): Promise<Row[]> {
} }
export async function fetchRaw(tableId: string): Promise<Row[]> { export async function fetchRaw(tableId: string): Promise<Row[]> {
const response = await handleRequest(Operation.READ, tableId, { const table = await sdk.tables.getTable(tableId)
const response = await handleRequest(Operation.READ, table, {
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
}) })
return response.rows return response.rows

View File

@ -8,21 +8,29 @@ import {
SortType, SortType,
Table, Table,
User, User,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global" import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
import { outputProcessing } from "../../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../../utilities/rowProcessor"
import pick from "lodash/pick" import pick from "lodash/pick"
import sdk from "../../../../"
export async function search( export async function search(
options: RowSearchParams, options: RowSearchParams,
table: Table source: Table | ViewV2
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId } = options let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
const { paginate, query } = options const { paginate, query } = options
const params: RowSearchParams = { const params: RowSearchParams = {
tableId: options.tableId, tableId: options.tableId,
viewId: options.viewId,
sort: options.sort, sort: options.sort,
sortOrder: options.sortOrder, sortOrder: options.sortOrder,
sortType: options.sortType, sortType: options.sortType,
@ -50,18 +58,20 @@ export async function search(
// Enrich search results with relationships // Enrich search results with relationships
if (response.rows && response.rows.length) { if (response.rows && response.rows.length) {
// enrich with global users if from users table // enrich with global users if from users table
if (tableId === InternalTables.USER_METADATA) { if (table._id === InternalTables.USER_METADATA) {
response.rows = await getGlobalUsersFromMetadata(response.rows as User[]) response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
} }
if (options.fields) { const visibleFields =
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS] options.fields ||
response.rows = response.rows.map((r: any) => pick(r, fields)) Object.keys(source.schema || {}).filter(
} key => source.schema?.[key].visible !== false
)
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
response.rows = response.rows.map((r: any) => pick(r, allowedFields))
response.rows = await outputProcessing(table, response.rows, { response.rows = await outputProcessing(source, response.rows, {
squash: true, squash: true,
fromViewId: options.viewId,
}) })
} }

View File

@ -1,4 +1,5 @@
import { import {
Aggregation,
Datasource, Datasource,
DocumentType, DocumentType,
FieldType, FieldType,
@ -15,6 +16,7 @@ import {
SortType, SortType,
SqlClient, SqlClient,
Table, Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { import {
buildInternalRelationships, buildInternalRelationships,
@ -44,10 +46,12 @@ import {
import { import {
dataFilters, dataFilters,
helpers, helpers,
isInternalColumnName,
PROTECTED_INTERNAL_COLUMNS, PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core" } from "@budibase/shared-core"
import { isSearchingByRowID } from "../utils" import { isSearchingByRowID } from "../utils"
import tracer from "dd-trace" import tracer from "dd-trace"
import { cloneDeep } from "lodash"
const builder = new sql.Sql(SqlClient.SQL_LITE) const builder = new sql.Sql(SqlClient.SQL_LITE)
const SQLITE_COLUMN_LIMIT = 2000 const SQLITE_COLUMN_LIMIT = 2000
@ -55,11 +59,34 @@ const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`)
const MISSING_TABLE_REGX = new RegExp(`no such table: .+`) const MISSING_TABLE_REGX = new RegExp(`no such table: .+`)
const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`) const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`)
function buildInternalFieldList( async function buildInternalFieldList(
table: Table, source: Table | ViewV2,
tables: Table[], tables: Table[],
opts?: { relationships?: RelationshipsJson[] } opts?: { relationships?: RelationshipsJson[]; allowedFields?: string[] }
) { ) {
const { relationships, allowedFields } = opts || {}
let schemaFields: string[] = []
if (sdk.views.isView(source)) {
schemaFields = Object.keys(helpers.views.basicFields(source)).filter(
key => source.schema?.[key]?.visible !== false
)
} else {
schemaFields = Object.keys(source.schema).filter(
key => source.schema[key].visible !== false
)
}
if (allowedFields) {
schemaFields = schemaFields.filter(field => allowedFields.includes(field))
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
let fieldList: string[] = [] let fieldList: string[] = []
const getJunctionFields = (relatedTable: Table, fields: string[]) => { const getJunctionFields = (relatedTable: Table, fields: string[]) => {
const junctionFields: string[] = [] const junctionFields: string[] = []
@ -70,13 +97,18 @@ function buildInternalFieldList(
}) })
return junctionFields return junctionFields
} }
fieldList = fieldList.concat( if (sdk.tables.isTable(source)) {
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`) for (const key of PROTECTED_INTERNAL_COLUMNS) {
) if (allowedFields && !allowedFields.includes(key)) {
for (let key of Object.keys(table.schema)) { continue
}
fieldList.push(`${table._id}.${key}`)
}
}
for (let key of schemaFields) {
const col = table.schema[key] const col = table.schema[key]
const isRelationship = col.type === FieldType.LINK const isRelationship = col.type === FieldType.LINK
if (!opts?.relationships && isRelationship) { if (!relationships && isRelationship) {
continue continue
} }
if (!isRelationship) { if (!isRelationship) {
@ -87,7 +119,9 @@ function buildInternalFieldList(
if (!relatedTable) { if (!relatedTable) {
continue continue
} }
const relatedFields = buildInternalFieldList(relatedTable, tables).concat( const relatedFields = (
await buildInternalFieldList(relatedTable, tables)
).concat(
getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"]) getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"])
) )
// break out of the loop if we have reached the max number of columns // break out of the loop if we have reached the max number of columns
@ -128,15 +162,22 @@ function cleanupFilters(
// generate a map of all possible column names (these can be duplicated across tables // generate a map of all possible column names (these can be duplicated across tables
// the map of them will always be the same // the map of them will always be the same
const userColumnMap: Record<string, string> = {} const userColumnMap: Record<string, string> = {}
allTables.forEach(table => for (const table of allTables) {
Object.keys(table.schema).forEach( for (const key of Object.keys(table.schema)) {
key => (userColumnMap[key] = mapToUserColumn(key)) if (isInternalColumnName(key)) {
) continue
) }
userColumnMap[key] = mapToUserColumn(key)
}
}
// update the keys of filters to manage user columns // update the keys of filters to manage user columns
const keyInAnyTable = (key: string): boolean => const keyInAnyTable = (key: string): boolean => {
allTables.some(table => table.schema[key]) if (isInternalColumnName(key)) {
return false
}
return allTables.some(table => table.schema[key])
}
const splitter = new dataFilters.ColumnSplitter(allTables) const splitter = new dataFilters.ColumnSplitter(allTables)
@ -291,16 +332,23 @@ function resyncDefinitionsRequired(status: number, message: string) {
export async function search( export async function search(
options: RowSearchParams, options: RowSearchParams,
table: Table, source: Table | ViewV2,
opts?: { retrying?: boolean } opts?: { retrying?: boolean }
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
let { paginate, query, ...params } = options let { paginate, query, ...params } = cloneDeep(options)
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
const allTables = await sdk.tables.getAllInternalTables() const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables) const allTablesMap = buildTableMap(allTables)
// make sure we have the mapped/latest table // make sure we have the mapped/latest table
if (table?._id) { if (table._id) {
table = allTablesMap[table?._id] table = allTablesMap[table._id]
} }
if (!table) { if (!table) {
throw new Error("Unable to find table") throw new Error("Unable to find table")
@ -312,6 +360,23 @@ export async function search(
...cleanupFilters(query, table, allTables), ...cleanupFilters(query, table, allTables),
documentType: DocumentType.ROW, documentType: DocumentType.ROW,
} }
let aggregations: Aggregation[] = []
if (sdk.views.isView(source)) {
const calculationFields = helpers.views.calculationFields(source)
for (const [key, field] of Object.entries(calculationFields)) {
if (options.fields && !options.fields.includes(key)) {
continue
}
aggregations.push({
name: key,
field: mapToUserColumn(field.field),
calculationType: field.calculationType,
})
}
}
const request: QueryJson = { const request: QueryJson = {
endpoint: { endpoint: {
// not important, we query ourselves // not important, we query ourselves
@ -327,7 +392,11 @@ export async function search(
columnPrefix: USER_COLUMN_PREFIX, columnPrefix: USER_COLUMN_PREFIX,
}, },
resource: { resource: {
fields: buildInternalFieldList(table, allTables, { relationships }), fields: await buildInternalFieldList(source, allTables, {
relationships,
allowedFields: options.fields,
}),
aggregations,
}, },
relationships, relationships,
} }
@ -372,7 +441,7 @@ export async function search(
// make sure JSON columns corrected // make sure JSON columns corrected
const processed = builder.convertJsonStringColumns<Row>( const processed = builder.convertJsonStringColumns<Row>(
table, table,
await sqlOutputProcessing(rows, table!, allTablesMap, relationships, { await sqlOutputProcessing(rows, source, allTablesMap, relationships, {
sqs: true, sqs: true,
}) })
) )
@ -388,17 +457,18 @@ export async function search(
} }
// get the rows // get the rows
let finalRows = await outputProcessing(table, processed, { let finalRows = await outputProcessing(source, processed, {
preserveLinks: true, preserveLinks: true,
squash: true, squash: true,
fromViewId: options.viewId,
}) })
// check if we need to pick specific rows out const visibleFields =
if (options.fields) { options.fields ||
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS] Object.keys(source.schema || {}).filter(
finalRows = finalRows.map((r: any) => pick(r, fields)) key => source.schema?.[key].visible !== false
} )
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
finalRows = finalRows.map((r: any) => pick(r, allowedFields))
const response: SearchResponse<Row> = { const response: SearchResponse<Row> = {
rows: finalRows, rows: finalRows,
@ -419,7 +489,7 @@ export async function search(
const msg = typeof err === "string" ? err : err.message const msg = typeof err === "string" ? err : err.message
if (!opts?.retrying && resyncDefinitionsRequired(err.status, msg)) { if (!opts?.retrying && resyncDefinitionsRequired(err.status, msg)) {
await sdk.tables.sqs.syncDefinition() await sdk.tables.sqs.syncDefinition()
return search(options, table, { retrying: true }) return search(options, source, { retrying: true })
} }
// previously the internal table didn't error when a column didn't exist in search // previously the internal table didn't error when a column didn't exist in search
if (err.status === 400 && msg?.match(MISSING_COLUMN_REGEX)) { if (err.status === 400 && msg?.match(MISSING_COLUMN_REGEX)) {

View File

@ -90,10 +90,8 @@ describe.each([tableWithUserCol, tableWithUsersCol])(
}) })
it("shouldn't error if no query supplied", () => { it("shouldn't error if no query supplied", () => {
const params: any = { // @ts-expect-error - intentionally passing in a bad type
tableId, const output = searchInputMapping(col, { tableId })
}
const output = searchInputMapping(col, params)
expect(output.query).toBeUndefined() expect(output.query).toBeUndefined()
}) })
} }

View File

@ -83,10 +83,7 @@ function userColumnMapping(column: string, options: RowSearchParams) {
// maps through the search parameters to check if any of the inputs are invalid // maps through the search parameters to check if any of the inputs are invalid
// based on the table schema, converts them to something that is valid. // based on the table schema, converts them to something that is valid.
export function searchInputMapping(table: Table, options: RowSearchParams) { export function searchInputMapping(table: Table, options: RowSearchParams) {
if (!table?.schema) { for (let [key, column] of Object.entries(table.schema || {})) {
return options
}
for (let [key, column] of Object.entries(table.schema)) {
switch (column.type) { switch (column.type) {
case FieldType.BB_REFERENCE_SINGLE: { case FieldType.BB_REFERENCE_SINGLE: {
const subtype = column.subtype const subtype = column.subtype

View File

@ -203,7 +203,7 @@ describe("query utils", () => {
}, },
}) })
const result = await getQueryableFields(Object.keys(table.schema), table) const result = await getQueryableFields(table)
expect(result).toEqual(["_id", "name", "age"]) expect(result).toEqual(["_id", "name", "age"])
}) })
@ -216,7 +216,7 @@ describe("query utils", () => {
}, },
}) })
const result = await getQueryableFields(Object.keys(table.schema), table) const result = await getQueryableFields(table)
expect(result).toEqual(["_id", "name"]) expect(result).toEqual(["_id", "name"])
}) })
@ -245,7 +245,7 @@ describe("query utils", () => {
}) })
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table) return getQueryableFields(table)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",
@ -282,7 +282,7 @@ describe("query utils", () => {
}) })
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table) return getQueryableFields(table)
}) })
expect(result).toEqual(["_id", "name", "aux.name", "auxTable.name"]) expect(result).toEqual(["_id", "name", "aux.name", "auxTable.name"])
}) })
@ -313,7 +313,7 @@ describe("query utils", () => {
}) })
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table) return getQueryableFields(table)
}) })
expect(result).toEqual(["_id", "name"]) expect(result).toEqual(["_id", "name"])
}) })
@ -381,7 +381,7 @@ describe("query utils", () => {
it("includes nested relationship fields from main table", async () => { it("includes nested relationship fields from main table", async () => {
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table) return getQueryableFields(table)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",
@ -398,7 +398,7 @@ describe("query utils", () => {
it("includes nested relationship fields from aux 1 table", async () => { it("includes nested relationship fields from aux 1 table", async () => {
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(aux1.schema), aux1) return getQueryableFields(aux1)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",
@ -420,7 +420,7 @@ describe("query utils", () => {
it("includes nested relationship fields from aux 2 table", async () => { it("includes nested relationship fields from aux 2 table", async () => {
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(aux2.schema), aux2) return getQueryableFields(aux2)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",
@ -474,7 +474,7 @@ describe("query utils", () => {
it("includes nested relationship fields from main table", async () => { it("includes nested relationship fields from main table", async () => {
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table) return getQueryableFields(table)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",
@ -488,7 +488,7 @@ describe("query utils", () => {
it("includes nested relationship fields from aux table", async () => { it("includes nested relationship fields from aux table", async () => {
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(aux.schema), aux) return getQueryableFields(aux)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",

View File

@ -33,7 +33,7 @@ describe("validate", () => {
it("should accept empty values", async () => { it("should accept empty values", async () => {
const row = {} const row = {}
const table = getTable() const table = getTable()
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
expect(output.errors).toEqual({}) expect(output.errors).toEqual({})
}) })
@ -43,7 +43,7 @@ describe("validate", () => {
time: `${hour()}:${minute()}`, time: `${hour()}:${minute()}`,
} }
const table = getTable() const table = getTable()
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
}) })
@ -52,7 +52,7 @@ describe("validate", () => {
time: `${hour()}:${minute()}:${second()}`, time: `${hour()}:${minute()}:${second()}`,
} }
const table = getTable() const table = getTable()
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
}) })
@ -67,7 +67,7 @@ describe("validate", () => {
table.schema.time.constraints = { table.schema.time.constraints = {
presence: true, presence: true,
} }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ time: ['"time" is not a valid time'] }) expect(output.errors).toEqual({ time: ['"time" is not a valid time'] })
}) })
@ -91,7 +91,7 @@ describe("validate", () => {
`${generator.integer({ min: 11, max: 23 })}:${minute()}`, `${generator.integer({ min: 11, max: 23 })}:${minute()}`,
])("should accept values after config value (%s)", async time => { ])("should accept values after config value (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
}) })
@ -100,7 +100,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`, `${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before config value (%s)", async time => { ])("should reject values before config value (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"], time: ["must be no earlier than 10:00"],
@ -125,7 +125,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 12 })}:${minute()}`, `${generator.integer({ min: 0, max: 12 })}:${minute()}`,
])("should accept values before config value (%s)", async time => { ])("should accept values before config value (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
}) })
@ -134,7 +134,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`, `${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after config value (%s)", async time => { ])("should reject values after config value (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no later than 15:16:17"], time: ["must be no later than 15:16:17"],
@ -156,7 +156,7 @@ describe("validate", () => {
"should accept values in range (%s)", "should accept values in range (%s)",
async time => { async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
} }
) )
@ -166,7 +166,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`, `${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before range (%s)", async time => { ])("should reject values before range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"], time: ["must be no earlier than 10:00"],
@ -178,7 +178,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`, `${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after range (%s)", async time => { ])("should reject values after range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no later than 15:00"], time: ["must be no later than 15:00"],
@ -199,7 +199,7 @@ describe("validate", () => {
"should accept values in range (%s)", "should accept values in range (%s)",
async time => { async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
} }
) )
@ -208,7 +208,7 @@ describe("validate", () => {
"should reject values out range (%s)", "should reject values out range (%s)",
async time => { async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no later than 10:00"], time: ["must be no later than 10:00"],
@ -226,7 +226,7 @@ describe("validate", () => {
table.schema.time.constraints = { table.schema.time.constraints = {
presence: true, presence: true,
} }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ time: ["can't be blank"] }) expect(output.errors).toEqual({ time: ["can't be blank"] })
}) })
@ -237,7 +237,7 @@ describe("validate", () => {
table.schema.time.constraints = { table.schema.time.constraints = {
presence: true, presence: true,
} }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ time: ["can't be blank"] }) expect(output.errors).toEqual({ time: ["can't be blank"] })
}) })
@ -257,7 +257,7 @@ describe("validate", () => {
"should accept values in range (%s)", "should accept values in range (%s)",
async time => { async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
} }
) )
@ -267,7 +267,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`, `${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before range (%s)", async time => { ])("should reject values before range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"], time: ["must be no earlier than 10:00"],
@ -279,7 +279,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`, `${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after range (%s)", async time => { ])("should reject values after range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no later than 15:00"], time: ["must be no later than 15:00"],
@ -301,7 +301,7 @@ describe("validate", () => {
"should accept values in range (%s)", "should accept values in range (%s)",
async time => { async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
} }
) )
@ -311,7 +311,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`, `${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before range (%s)", async time => { ])("should reject values before range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"], time: ["must be no earlier than 10:00"],
@ -323,7 +323,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`, `${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after range (%s)", async time => { ])("should reject values after range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no later than 15:00"], time: ["must be no later than 15:00"],

View File

@ -13,16 +13,15 @@ import {
TableSchema, TableSchema,
SqlClient, SqlClient,
ArrayOperator, ArrayOperator,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { makeExternalQuery } from "../../../integrations/base/query" import { makeExternalQuery } from "../../../integrations/base/query"
import { Format } from "../../../api/controllers/view/exporters" import { Format } from "../../../api/controllers/view/exporters"
import sdk from "../.." import sdk from "../.."
import { import { extractViewInfoFromID, isRelationshipColumn } from "../../../db/utils"
extractViewInfoFromID,
isRelationshipColumn,
isViewID,
} from "../../../db/utils"
import { isSQL } from "../../../integrations/utils" import { isSQL } from "../../../integrations/utils"
import { docIds } from "@budibase/backend-core"
import { getTableFromSource } from "../../../api/controllers/row/utils"
const SQL_CLIENT_SOURCE_MAP: Record<SourceName, SqlClient | undefined> = { const SQL_CLIENT_SOURCE_MAP: Record<SourceName, SqlClient | undefined> = {
[SourceName.POSTGRES]: SqlClient.POSTGRES, [SourceName.POSTGRES]: SqlClient.POSTGRES,
@ -142,37 +141,27 @@ function isForeignKey(key: string, table: Table) {
} }
export async function validate({ export async function validate({
tableId, source,
row, row,
table,
}: { }: {
tableId?: string source: Table | ViewV2
row: Row row: Row
table?: Table
}): Promise<{ }): Promise<{
valid: boolean valid: boolean
errors: Record<string, any> errors: Record<string, any>
}> { }> {
let fetchedTable: Table | undefined const table = await getTableFromSource(source)
if (!table && tableId) {
fetchedTable = await sdk.tables.getTable(tableId)
} else if (table) {
fetchedTable = table
}
if (fetchedTable === undefined) {
throw new Error("Unable to fetch table for validation")
}
const errors: Record<string, any> = {} const errors: Record<string, any> = {}
const disallowArrayTypes = [ const disallowArrayTypes = [
FieldType.ATTACHMENT_SINGLE, FieldType.ATTACHMENT_SINGLE,
FieldType.BB_REFERENCE_SINGLE, FieldType.BB_REFERENCE_SINGLE,
] ]
for (let fieldName of Object.keys(fetchedTable.schema)) { for (let fieldName of Object.keys(table.schema)) {
const column = fetchedTable.schema[fieldName] const column = table.schema[fieldName]
const constraints = cloneDeep(column.constraints) const constraints = cloneDeep(column.constraints)
const type = column.type const type = column.type
// foreign keys are likely to be enriched // foreign keys are likely to be enriched
if (isForeignKey(fieldName, fetchedTable)) { if (isForeignKey(fieldName, table)) {
continue continue
} }
// formulas shouldn't validated, data will be deleted anyway // formulas shouldn't validated, data will be deleted anyway
@ -323,7 +312,7 @@ export function isArrayFilter(operator: any): operator is ArrayOperator {
} }
export function tryExtractingTableAndViewId(tableOrViewId: string) { export function tryExtractingTableAndViewId(tableOrViewId: string) {
if (isViewID(tableOrViewId)) { if (docIds.isViewId(tableOrViewId)) {
return { return {
tableId: extractViewInfoFromID(tableOrViewId).tableId, tableId: extractViewInfoFromID(tableOrViewId).tableId,
viewId: tableOrViewId, viewId: tableOrViewId,
@ -332,3 +321,10 @@ export function tryExtractingTableAndViewId(tableOrViewId: string) {
return { tableId: tableOrViewId } return { tableId: tableOrViewId }
} }
export function getSource(tableOrViewId: string) {
if (docIds.isViewId(tableOrViewId)) {
return sdk.views.get(tableOrViewId)
}
return sdk.tables.getTable(tableOrViewId)
}

View File

@ -1,5 +1,6 @@
import { Table, TableSourceType } from "@budibase/types" import { Table, TableSourceType } from "@budibase/types"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import { docIds } from "@budibase/backend-core"
export function isExternal(opts: { table?: Table; tableId?: string }): boolean { export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
if (opts.table && opts.table.sourceType === TableSourceType.EXTERNAL) { if (opts.table && opts.table.sourceType === TableSourceType.EXTERNAL) {
@ -9,3 +10,7 @@ export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
} }
return false return false
} }
export function isTable(table: any): table is Table {
return table._id && docIds.isTableId(table._id)
}

View File

@ -9,7 +9,7 @@ import {
ViewV2ColumnEnriched, ViewV2ColumnEnriched,
ViewV2Enriched, ViewV2Enriched,
} from "@budibase/types" } from "@budibase/types"
import { HTTPError } from "@budibase/backend-core" import { context, docIds, HTTPError } from "@budibase/backend-core"
import { import {
helpers, helpers,
PROTECTED_EXTERNAL_COLUMNS, PROTECTED_EXTERNAL_COLUMNS,
@ -40,16 +40,85 @@ export async function getEnriched(viewId: string): Promise<ViewV2Enriched> {
return pickApi(tableId).getEnriched(viewId) return pickApi(tableId).getEnriched(viewId)
} }
export async function getTable(view: string | ViewV2): Promise<Table> {
const viewId = typeof view === "string" ? view : view.id
const cached = context.getTableForView(viewId)
if (cached) {
return cached
}
const { tableId } = utils.extractViewInfoFromID(viewId)
const table = await sdk.tables.getTable(tableId)
context.setTableForView(viewId, table)
return table
}
export function isView(view: any): view is ViewV2 {
return view.id && docIds.isViewId(view.id) && view.version === 2
}
async function guardCalculationViewSchema(
table: Table,
view: Omit<ViewV2, "id" | "version">
) {
const calculationFields = helpers.views.calculationFields(view)
for (const calculationFieldName of Object.keys(calculationFields)) {
const schema = calculationFields[calculationFieldName]
const targetSchema = table.schema[schema.field]
if (!targetSchema) {
throw new HTTPError(
`Calculation field "${calculationFieldName}" references field "${schema.field}" which does not exist in the table schema`,
400
)
}
if (!helpers.schema.isNumeric(targetSchema)) {
throw new HTTPError(
`Calculation field "${calculationFieldName}" references field "${schema.field}" which is not a numeric field`,
400
)
}
}
const groupByFields = helpers.views.basicFields(view)
for (const groupByFieldName of Object.keys(groupByFields)) {
const targetSchema = table.schema[groupByFieldName]
if (!targetSchema) {
throw new HTTPError(
`Group by field "${groupByFieldName}" does not exist in the table schema`,
400
)
}
}
}
async function guardViewSchema( async function guardViewSchema(
tableId: string, tableId: string,
view: Omit<ViewV2, "id" | "version"> view: Omit<ViewV2, "id" | "version">
) { ) {
const viewSchema = view.schema || {}
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
if (helpers.views.isCalculationView(view)) {
await guardCalculationViewSchema(table, view)
}
await checkReadonlyFields(table, view)
checkRequiredFields(table, view)
checkDisplayField(view)
}
async function checkReadonlyFields(
table: Table,
view: Omit<ViewV2, "id" | "version">
) {
const viewSchema = view.schema || {}
for (const field of Object.keys(viewSchema)) { for (const field of Object.keys(viewSchema)) {
const tableSchemaField = table.schema[field] const viewFieldSchema = viewSchema[field]
if (!tableSchemaField) { if (helpers.views.isCalculationField(viewFieldSchema)) {
continue
}
const tableFieldSchema = table.schema[field]
if (!tableFieldSchema) {
throw new HTTPError( throw new HTTPError(
`Field "${field}" is not valid for the requested table`, `Field "${field}" is not valid for the requested table`,
400 400
@ -65,18 +134,33 @@ async function guardViewSchema(
} }
} }
} }
}
const existingView = function checkDisplayField(view: Omit<ViewV2, "id" | "version">) {
table?.views && (table.views[view.name] as ViewV2 | undefined) if (view.primaryDisplay) {
const viewSchemaField = view.schema?.[view.primaryDisplay]
if (!viewSchemaField?.visible) {
throw new HTTPError(
`You can't hide "${view.primaryDisplay}" because it is the display column.`,
400
)
}
}
}
function checkRequiredFields(
table: Table,
view: Omit<ViewV2, "id" | "version">
) {
const existingView = table.views?.[view.name] as ViewV2 | undefined
for (const field of Object.values(table.schema)) { for (const field of Object.values(table.schema)) {
if (!helpers.schema.isRequired(field.constraints)) { if (!helpers.schema.isRequired(field.constraints)) {
continue continue
} }
const viewSchemaField = viewSchema[field.name] const viewSchemaField = view.schema?.[field.name]
const existingViewSchema = const existingViewSchema = existingView?.schema?.[field.name]
existingView?.schema && existingView.schema[field.name]
if (!viewSchemaField && !existingViewSchema?.visible) { if (!viewSchemaField && !existingViewSchema?.visible) {
// Supporting existing configs with required columns but hidden in views // Supporting existing configs with required columns but hidden in views
continue continue
@ -89,24 +173,16 @@ async function guardViewSchema(
) )
} }
if (viewSchemaField.readonly) { if (
helpers.views.isBasicViewField(viewSchemaField) &&
viewSchemaField.readonly
) {
throw new HTTPError( throw new HTTPError(
`You can't make "${field.name}" readonly because it is a required field.`, `You can't make "${field.name}" readonly because it is a required field.`,
400 400
) )
} }
} }
if (view.primaryDisplay) {
const viewSchemaField = viewSchema[view.primaryDisplay]
if (!viewSchemaField?.visible) {
throw new HTTPError(
`You can't hide "${view.primaryDisplay}" because it is the display column.`,
400
)
}
}
} }
export async function create( export async function create(

View File

@ -18,6 +18,7 @@ import {
RowAttachment, RowAttachment,
Table, Table,
User, User,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { import {
@ -33,7 +34,11 @@ import {
PROTECTED_INTERNAL_COLUMNS, PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core" } from "@budibase/shared-core"
import { processString } from "@budibase/string-templates" import { processString } from "@budibase/string-templates"
import { isUserMetadataTable } from "../../api/controllers/row/utils" import {
getTableFromSource,
isUserMetadataTable,
} from "../../api/controllers/row/utils"
import sdk from "../../sdk"
export * from "./utils" export * from "./utils"
export * from "./attachments" export * from "./attachments"
@ -67,6 +72,7 @@ export async function processAutoColumn(
// check its not user table, or whether any of the processing options have been disabled // check its not user table, or whether any of the processing options have been disabled
const shouldUpdateUserFields = const shouldUpdateUserFields =
!isUserTable && !opts?.reprocessing && !opts?.noAutoRelationships && !noUser !isUserTable && !opts?.reprocessing && !opts?.noAutoRelationships && !noUser
let tableMutated = false
for (let [key, schema] of Object.entries(table.schema)) { for (let [key, schema] of Object.entries(table.schema)) {
if (!schema.autocolumn) { if (!schema.autocolumn) {
continue continue
@ -99,10 +105,17 @@ export async function processAutoColumn(
row[key] = schema.lastID + 1 row[key] = schema.lastID + 1
schema.lastID++ schema.lastID++
table.schema[key] = schema table.schema[key] = schema
tableMutated = true
} }
break break
} }
} }
if (tableMutated) {
const db = context.getAppDB()
const resp = await db.put(table)
table._rev = resp.rev
}
} }
async function processDefaultValues(table: Table, row: Row) { async function processDefaultValues(table: Table, row: Row) {
@ -169,11 +182,12 @@ export function coerce(row: any, type: string) {
*/ */
export async function inputProcessing( export async function inputProcessing(
userId: string | null | undefined, userId: string | null | undefined,
table: Table, source: Table | ViewV2,
row: Row, row: Row,
opts?: AutoColumnProcessingOpts opts?: AutoColumnProcessingOpts
) { ) {
const clonedRow = cloneDeep(row) const clonedRow = cloneDeep(row)
const table = await getTableFromSource(source)
const dontCleanseKeys = ["type", "_id", "_rev", "tableId"] const dontCleanseKeys = ["type", "_id", "_rev", "tableId"]
for (const [key, value] of Object.entries(clonedRow)) { for (const [key, value] of Object.entries(clonedRow)) {
@ -228,8 +242,7 @@ export async function inputProcessing(
await processAutoColumn(userId, table, clonedRow, opts) await processAutoColumn(userId, table, clonedRow, opts)
await processDefaultValues(table, clonedRow) await processDefaultValues(table, clonedRow)
return clonedRow
return { table, row: clonedRow }
} }
/** /**
@ -242,14 +255,13 @@ export async function inputProcessing(
* @returns the enriched rows will be returned. * @returns the enriched rows will be returned.
*/ */
export async function outputProcessing<T extends Row[] | Row>( export async function outputProcessing<T extends Row[] | Row>(
table: Table, source: Table | ViewV2,
rows: T, rows: T,
opts: { opts: {
squash?: boolean squash?: boolean
preserveLinks?: boolean preserveLinks?: boolean
fromRow?: Row fromRow?: Row
skipBBReferences?: boolean skipBBReferences?: boolean
fromViewId?: string
} = { } = {
squash: true, squash: true,
preserveLinks: false, preserveLinks: false,
@ -264,6 +276,14 @@ export async function outputProcessing<T extends Row[] | Row>(
} else { } else {
safeRows = rows safeRows = rows
} }
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
// SQS returns the rows with full relationship contents // SQS returns the rows with full relationship contents
// attach any linked row information // attach any linked row information
let enriched = !opts.preserveLinks let enriched = !opts.preserveLinks
@ -276,25 +296,25 @@ export async function outputProcessing<T extends Row[] | Row>(
opts.squash = true opts.squash = true
} }
enriched = await coreOutputProcessing(table, enriched, opts) enriched = await coreOutputProcessing(source, enriched, opts)
if (opts.squash) { if (opts.squash) {
enriched = await linkRows.squashLinks(table, enriched, { enriched = await linkRows.squashLinks(source, enriched)
fromViewId: opts?.fromViewId,
})
} }
return (wasArray ? enriched : enriched[0]) as T return (wasArray ? enriched : enriched[0]) as T
} }
/** /**
* This function is similar to the outputProcessing function above, it makes sure that all the provided * This function is similar to the outputProcessing function above, it makes
* rows are ready for output, but does not have enrichment for squash capabilities which can cause performance issues. * sure that all the provided rows are ready for output, but does not have
* outputProcessing should be used when responding from the API, while this should be used when internally processing * enrichment for squash capabilities which can cause performance issues.
* rows for any reason (like part of view operations). * outputProcessing should be used when responding from the API, while this
* should be used when internally processing rows for any reason (like part of
* view operations).
*/ */
export async function coreOutputProcessing( export async function coreOutputProcessing(
table: Table, source: Table | ViewV2,
rows: Row[], rows: Row[],
opts: { opts: {
preserveLinks?: boolean preserveLinks?: boolean
@ -305,6 +325,13 @@ export async function coreOutputProcessing(
skipBBReferences: false, skipBBReferences: false,
} }
): Promise<Row[]> { ): Promise<Row[]> {
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
// process complex types: attachments, bb references... // process complex types: attachments, bb references...
for (const [property, column] of Object.entries(table.schema)) { for (const [property, column] of Object.entries(table.schema)) {
if ( if (
@ -409,9 +436,18 @@ export async function coreOutputProcessing(
const tableFields = Object.keys(table.schema).filter( const tableFields = Object.keys(table.schema).filter(
f => table.schema[f].visible !== false f => table.schema[f].visible !== false
) )
const fields = [...tableFields, ...protectedColumns].map(f => const fields = [...tableFields, ...protectedColumns].map(f =>
f.toLowerCase() f.toLowerCase()
) )
if (sdk.views.isView(source)) {
const aggregations = helpers.views.calculationFields(source)
for (const key of Object.keys(aggregations)) {
fields.push(key.toLowerCase())
}
}
for (const row of rows) { for (const row of rows) {
for (const key of Object.keys(row)) { for (const key of Object.keys(row)) {
if (!fields.includes(key.toLowerCase())) { if (!fields.includes(key.toLowerCase())) {

View File

@ -65,7 +65,7 @@ describe("rowProcessor - inputProcessing", () => {
processInputBBReferenceMock.mockResolvedValue(user) processInputBBReferenceMock.mockResolvedValue(user)
const { row } = await inputProcessing(userId, table, newRow) const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReference).toHaveBeenCalledTimes( expect(bbReferenceProcessor.processInputBBReference).toHaveBeenCalledTimes(
1 1
@ -117,7 +117,7 @@ describe("rowProcessor - inputProcessing", () => {
processInputBBReferencesMock.mockResolvedValue(user) processInputBBReferencesMock.mockResolvedValue(user)
const { row } = await inputProcessing(userId, table, newRow) const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).toHaveBeenCalledTimes( expect(bbReferenceProcessor.processInputBBReferences).toHaveBeenCalledTimes(
1 1
@ -164,7 +164,7 @@ describe("rowProcessor - inputProcessing", () => {
name: "Jack", name: "Jack",
} }
const { row } = await inputProcessing(userId, table, newRow) const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled() expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled()
expect(row).toEqual({ ...newRow, user: undefined }) expect(row).toEqual({ ...newRow, user: undefined })
@ -207,7 +207,7 @@ describe("rowProcessor - inputProcessing", () => {
user: userValue, user: userValue,
} }
const { row } = await inputProcessing(userId, table, newRow) const row = await inputProcessing(userId, table, newRow)
if (userValue === undefined) { if (userValue === undefined) {
// The 'user' field is omitted // The 'user' field is omitted
@ -262,7 +262,7 @@ describe("rowProcessor - inputProcessing", () => {
user: "123", user: "123",
} }
const { row } = await inputProcessing(userId, table, newRow) const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled() expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled()
expect(row).toEqual({ expect(row).toEqual({

View File

@ -148,9 +148,16 @@ export function parse(rows: Rows, table: Table): Rows {
Object.keys(row).forEach(columnName => { Object.keys(row).forEach(columnName => {
const columnData = row[columnName] const columnData = row[columnName]
if (columnName === "_id") {
parsedRow[columnName] = columnData
return
}
const schema = table.schema const schema = table.schema
if (!(columnName in schema)) { if (!(columnName in schema)) {
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case // Objects can be present in the row data but not in the schema, so make
// sure we don't proceed in such a case
return return
} }

View File

@ -2,3 +2,4 @@ export * from "./helpers"
export * from "./integrations" export * from "./integrations"
export * as cron from "./cron" export * as cron from "./cron"
export * as schema from "./schema" export * as schema from "./schema"
export * as views from "./views"

View File

@ -45,3 +45,7 @@ export function decodeNonAscii(str: string): string {
String.fromCharCode(parseInt(p1, 16)) String.fromCharCode(parseInt(p1, 16))
) )
} }
export function isNumeric(field: FieldSchema) {
return field.type === FieldType.NUMBER || field.type === FieldType.BIGINT
}

View File

@ -0,0 +1,33 @@
import {
BasicViewFieldMetadata,
ViewCalculationFieldMetadata,
ViewFieldMetadata,
ViewV2,
} from "@budibase/types"
import { pickBy } from "lodash"
export function isCalculationField(
field: ViewFieldMetadata
): field is ViewCalculationFieldMetadata {
return "calculationType" in field
}
export function isBasicViewField(
field: ViewFieldMetadata
): field is BasicViewFieldMetadata {
return !isCalculationField(field)
}
type UnsavedViewV2 = Omit<ViewV2, "id" | "version">
export function isCalculationView(view: UnsavedViewV2) {
return Object.values(view.schema || {}).some(isCalculationField)
}
export function calculationFields(view: UnsavedViewV2) {
return pickBy(view.schema || {}, isCalculationField)
}
export function basicFields(view: UnsavedViewV2) {
return pickBy(view.schema || {}, field => !isCalculationField(field))
}

View File

@ -33,15 +33,24 @@ export interface View {
groupBy?: string groupBy?: string
} }
export type ViewFieldMetadata = UIFieldMetadata & { export interface BasicViewFieldMetadata extends UIFieldMetadata {
readonly?: boolean readonly?: boolean
columns?: Record<string, RelationSchemaField> columns?: Record<string, RelationSchemaField>
} }
export type RelationSchemaField = UIFieldMetadata & { export interface RelationSchemaField extends UIFieldMetadata {
readonly?: boolean readonly?: boolean
} }
export interface ViewCalculationFieldMetadata extends BasicViewFieldMetadata {
calculationType: CalculationType
field: string
}
export type ViewFieldMetadata =
| BasicViewFieldMetadata
| ViewCalculationFieldMetadata
export enum CalculationType { export enum CalculationType {
SUM = "sum", SUM = "sum",
AVG = "avg", AVG = "avg",
@ -50,11 +59,6 @@ export enum CalculationType {
MAX = "max", MAX = "max",
} }
export type ViewCalculationFieldMetadata = ViewFieldMetadata & {
calculationType: CalculationType
field: string
}
export interface ViewV2 { export interface ViewV2 {
version: 2 version: 2
id: string id: string
@ -67,7 +71,7 @@ export interface ViewV2 {
order?: SortOrder order?: SortOrder
type?: SortType type?: SortType
} }
schema?: Record<string, ViewFieldMetadata | ViewCalculationFieldMetadata> schema?: Record<string, ViewFieldMetadata>
} }
export type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema export type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema

View File

@ -1,8 +1,14 @@
import { SortOrder, SortType } from "../api" import { SortOrder, SortType } from "../api"
import { SearchFilters } from "./search" import { SearchFilters } from "./search"
import { Row } from "../documents" import { CalculationType, Row } from "../documents"
import { WithRequired } from "../shared" import { WithRequired } from "../shared"
export interface Aggregation {
name: string
calculationType: CalculationType
field: string
}
export interface SearchParams { export interface SearchParams {
tableId?: string tableId?: string
viewId?: string viewId?: string

View File

@ -2,6 +2,7 @@ import { Operation } from "./datasources"
import { Row, Table, DocumentType } from "../documents" import { Row, Table, DocumentType } from "../documents"
import { SortOrder, SortType } from "../api" import { SortOrder, SortType } from "../api"
import { Knex } from "knex" import { Knex } from "knex"
import { Aggregation } from "./row"
export enum BasicOperator { export enum BasicOperator {
EQUAL = "equal", EQUAL = "equal",
@ -154,6 +155,7 @@ export interface QueryJson {
} }
resource?: { resource?: {
fields: string[] fields: string[]
aggregations?: Aggregation[]
} }
filters?: SearchFilters filters?: SearchFilters
sort?: SortJson sort?: SortJson

View File

@ -4,6 +4,29 @@ export type DeepPartial<T> = {
export type ISO8601 = string export type ISO8601 = string
/**
* RequiredKeys make it such that you _must_ assign a value to every key in the
* type. It differs subtly from Required<T> in that it doesn't change the type
* of the fields, you can specify undefined as a value and that's fine.
*
* Example:
*
* ```ts
* interface Foo {
* bar: string
* baz?: string
* }
*
* type FooRequiredKeys = RequiredKeys<Foo>
* type FooRequired = Required<Foo>
*
* const a: FooRequiredKeys = { bar: "hello", baz: undefined }
* const b: FooRequired = { bar: "hello", baz: undefined }
* ```
*
* In this code, a passes type checking whereas b does not. This is because
* Required<Foo> makes baz non-optional.
*/
export type RequiredKeys<T> = { export type RequiredKeys<T> = {
[K in keyof Required<T>]: T[K] [K in keyof Required<T>]: T[K]
} }

View File

@ -17751,21 +17751,11 @@ periscopic@^3.1.0:
estree-walker "^3.0.0" estree-walker "^3.0.0"
is-reference "^3.0.0" is-reference "^3.0.0"
pg-cloudflare@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/pg-cloudflare/-/pg-cloudflare-1.1.1.tgz#e6d5833015b170e23ae819e8c5d7eaedb472ca98"
integrity sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==
pg-connection-string@2.5.0, pg-connection-string@^2.5.0: pg-connection-string@2.5.0, pg-connection-string@^2.5.0:
version "2.5.0" version "2.5.0"
resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34" resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34"
integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ== integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==
pg-connection-string@^2.6.4:
version "2.6.4"
resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.6.4.tgz#f543862adfa49fa4e14bc8a8892d2a84d754246d"
integrity sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==
pg-int8@1.0.1: pg-int8@1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c" resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c"
@ -17776,21 +17766,11 @@ pg-pool@^3.6.0:
resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.6.0.tgz#3190df3e4747a0d23e5e9e8045bcd99bda0a712e" resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.6.0.tgz#3190df3e4747a0d23e5e9e8045bcd99bda0a712e"
integrity sha512-clFRf2ksqd+F497kWFyM21tMjeikn60oGDmqMT8UBrynEwVEX/5R5xd2sdvdo1cZCFlguORNpVuqxIj+aK4cfQ== integrity sha512-clFRf2ksqd+F497kWFyM21tMjeikn60oGDmqMT8UBrynEwVEX/5R5xd2sdvdo1cZCFlguORNpVuqxIj+aK4cfQ==
pg-pool@^3.6.2:
version "3.6.2"
resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.6.2.tgz#3a592370b8ae3f02a7c8130d245bc02fa2c5f3f2"
integrity sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==
pg-protocol@*, pg-protocol@^1.6.0: pg-protocol@*, pg-protocol@^1.6.0:
version "1.6.0" version "1.6.0"
resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.6.0.tgz#4c91613c0315349363af2084608db843502f8833" resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.6.0.tgz#4c91613c0315349363af2084608db843502f8833"
integrity sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q== integrity sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==
pg-protocol@^1.6.1:
version "1.6.1"
resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.6.1.tgz#21333e6d83b01faaebfe7a33a7ad6bfd9ed38cb3"
integrity sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==
pg-types@^2.1.0, pg-types@^2.2.0: pg-types@^2.1.0, pg-types@^2.2.0:
version "2.2.0" version "2.2.0"
resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3" resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3"
@ -17815,19 +17795,6 @@ pg@8.10.0:
pg-types "^2.1.0" pg-types "^2.1.0"
pgpass "1.x" pgpass "1.x"
pg@^8.12.0:
version "8.12.0"
resolved "https://registry.yarnpkg.com/pg/-/pg-8.12.0.tgz#9341724db571022490b657908f65aee8db91df79"
integrity sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ==
dependencies:
pg-connection-string "^2.6.4"
pg-pool "^3.6.2"
pg-protocol "^1.6.1"
pg-types "^2.1.0"
pgpass "1.x"
optionalDependencies:
pg-cloudflare "^1.1.1"
pgpass@1.x: pgpass@1.x:
version "1.0.5" version "1.0.5"
resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.5.tgz#9b873e4a564bb10fa7a7dbd55312728d422a223d" resolved "https://registry.yarnpkg.com/pgpass/-/pgpass-1.0.5.tgz#9b873e4a564bb10fa7a7dbd55312728d422a223d"