Merge branch 'master' of github.com:Budibase/budibase into feature/role-multi-inheritance

This commit is contained in:
mike12345567 2024-10-03 17:22:09 +01:00
commit bfbc576ed1
86 changed files with 5315 additions and 4156 deletions

View File

@ -23,6 +23,7 @@ jobs:
PAYLOAD_BRANCH: ${{ github.head_ref }} PAYLOAD_BRANCH: ${{ github.head_ref }}
PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }} PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }}
PAYLOAD_LICENSE_TYPE: "free" PAYLOAD_LICENSE_TYPE: "free"
PAYLOAD_DEPLOY: true
with: with:
repository: budibase/budibase-deploys repository: budibase/budibase-deploys
event: featurebranch-qa-deploy event: featurebranch-qa-deploy

View File

@ -1,3 +1,3 @@
nodejs 20.10.0 nodejs 20.10.0
python 3.10.0 python 3.10.0
yarn 1.22.19 yarn 1.22.22

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.32.8", "version": "2.32.11",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

@ -1 +1 @@
Subproject commit 558a32dfd1f55bd894804a503e7e1090937df88c Subproject commit 3e24f6293ff5ee5f9b42822e001504e3bbf19cc0

View File

@ -10,7 +10,7 @@ import {
StaticDatabases, StaticDatabases,
DEFAULT_TENANT_ID, DEFAULT_TENANT_ID,
} from "../constants" } from "../constants"
import { Database, IdentityContext, Snippet, App } from "@budibase/types" import { Database, IdentityContext, Snippet, App, Table } from "@budibase/types"
import { ContextMap } from "./types" import { ContextMap } from "./types"
let TEST_APP_ID: string | null = null let TEST_APP_ID: string | null = null
@ -394,3 +394,20 @@ export function setFeatureFlags(key: string, value: Record<string, any>) {
context.featureFlagCache ??= {} context.featureFlagCache ??= {}
context.featureFlagCache[key] = value context.featureFlagCache[key] = value
} }
export function getTableForView(viewId: string): Table | undefined {
const context = getCurrentContext()
if (!context) {
return
}
return context.viewToTableCache?.[viewId]
}
export function setTableForView(viewId: string, table: Table) {
const context = getCurrentContext()
if (!context) {
return
}
context.viewToTableCache ??= {}
context.viewToTableCache[viewId] = table
}

View File

@ -1,4 +1,4 @@
import { IdentityContext, Snippet, VM } from "@budibase/types" import { IdentityContext, Snippet, Table, VM } from "@budibase/types"
import { OAuth2Client } from "google-auth-library" import { OAuth2Client } from "google-auth-library"
import { GoogleSpreadsheet } from "google-spreadsheet" import { GoogleSpreadsheet } from "google-spreadsheet"
@ -21,4 +21,5 @@ export type ContextMap = {
featureFlagCache?: { featureFlagCache?: {
[key: string]: Record<string, any> [key: string]: Record<string, any>
} }
viewToTableCache?: Record<string, Table>
} }

View File

@ -612,7 +612,6 @@ async function runQuery<T>(
* limit {number} The number of results to fetch * limit {number} The number of results to fetch
* bookmark {string|null} Current bookmark in the recursive search * bookmark {string|null} Current bookmark in the recursive search
* rows {array|null} Current results in the recursive search * rows {array|null} Current results in the recursive search
* @returns {Promise<*[]|*>}
*/ */
async function recursiveSearch<T>( async function recursiveSearch<T>(
dbName: string, dbName: string,

View File

@ -6,7 +6,7 @@ import {
ViewName, ViewName,
} from "../constants" } from "../constants"
import { getProdAppID } from "./conversions" import { getProdAppID } from "./conversions"
import { DatabaseQueryOpts } from "@budibase/types" import { DatabaseQueryOpts, VirtualDocumentType } from "@budibase/types"
/** /**
* If creating DB allDocs/query params with only a single top level ID this can be used, this * If creating DB allDocs/query params with only a single top level ID this can be used, this
@ -66,9 +66,8 @@ export function getQueryIndex(viewName: ViewName) {
/** /**
* Check if a given ID is that of a table. * Check if a given ID is that of a table.
* @returns {boolean}
*/ */
export const isTableId = (id: string) => { export const isTableId = (id: string): boolean => {
// this includes datasource plus tables // this includes datasource plus tables
return ( return (
!!id && !!id &&
@ -77,13 +76,16 @@ export const isTableId = (id: string) => {
) )
} }
export function isViewId(id: string): boolean {
return !!id && id.startsWith(`${VirtualDocumentType.VIEW}${SEPARATOR}`)
}
/** /**
* Check if a given ID is that of a datasource or datasource plus. * Check if a given ID is that of a datasource or datasource plus.
* @returns {boolean}
*/ */
export const isDatasourceId = (id: string) => { export const isDatasourceId = (id: string): boolean => {
// this covers both datasources and datasource plus // this covers both datasources and datasource plus
return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`) return !!id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
} }
/** /**

View File

@ -1,5 +1,5 @@
export * as utils from "./utils" export * as utils from "./utils"
export { default as Sql } from "./sql" export { default as Sql, COUNT_FIELD_NAME } from "./sql"
export { default as SqlTable } from "./sqlTable" export { default as SqlTable } from "./sqlTable"
export * as designDoc from "./designDoc" export * as designDoc from "./designDoc"

View File

@ -11,10 +11,12 @@ import {
} from "./utils" } from "./utils"
import SqlTableQueryBuilder from "./sqlTable" import SqlTableQueryBuilder from "./sqlTable"
import { import {
Aggregation,
AnySearchFilter, AnySearchFilter,
ArrayOperator, ArrayOperator,
BasicOperator, BasicOperator,
BBReferenceFieldMetadata, BBReferenceFieldMetadata,
CalculationType,
FieldSchema, FieldSchema,
FieldType, FieldType,
INTERNAL_TABLE_SOURCE_ID, INTERNAL_TABLE_SOURCE_ID,
@ -41,6 +43,8 @@ import { cloneDeep } from "lodash"
type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any
export const COUNT_FIELD_NAME = "__bb_total"
function getBaseLimit() { function getBaseLimit() {
const envLimit = environment.SQL_MAX_ROWS const envLimit = environment.SQL_MAX_ROWS
? parseInt(environment.SQL_MAX_ROWS) ? parseInt(environment.SQL_MAX_ROWS)
@ -69,18 +73,6 @@ function prioritisedArraySort(toSort: string[], priorities: string[]) {
}) })
} }
function getTableName(table?: Table): string | undefined {
// SQS uses the table ID rather than the table name
if (
table?.sourceType === TableSourceType.INTERNAL ||
table?.sourceId === INTERNAL_TABLE_SOURCE_ID
) {
return table?._id
} else {
return table?.name
}
}
function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] { function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] {
if (Array.isArray(query)) { if (Array.isArray(query)) {
return query.map((q: SqlQuery) => convertBooleans(q) as SqlQuery) return query.map((q: SqlQuery) => convertBooleans(q) as SqlQuery)
@ -97,6 +89,13 @@ function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] {
return query return query
} }
function isSqs(table: Table): boolean {
return (
table.sourceType === TableSourceType.INTERNAL ||
table.sourceId === INTERNAL_TABLE_SOURCE_ID
)
}
class InternalBuilder { class InternalBuilder {
private readonly client: SqlClient private readonly client: SqlClient
private readonly query: QueryJson private readonly query: QueryJson
@ -178,15 +177,13 @@ class InternalBuilder {
} }
private generateSelectStatement(): (string | Knex.Raw)[] | "*" { private generateSelectStatement(): (string | Knex.Raw)[] | "*" {
const { meta, endpoint, resource, tableAliases } = this.query const { meta, endpoint, resource } = this.query
if (!resource || !resource.fields || resource.fields.length === 0) { if (!resource || !resource.fields || resource.fields.length === 0) {
return "*" return "*"
} }
const alias = tableAliases?.[endpoint.entityId] const alias = this.getTableName(endpoint.entityId)
? tableAliases?.[endpoint.entityId]
: endpoint.entityId
const schema = meta.table.schema const schema = meta.table.schema
if (!this.isFullSelectStatementRequired()) { if (!this.isFullSelectStatementRequired()) {
return [this.knex.raw(`${this.quote(alias)}.*`)] return [this.knex.raw(`${this.quote(alias)}.*`)]
@ -811,26 +808,88 @@ class InternalBuilder {
return query return query
} }
isSqs(): boolean {
return isSqs(this.table)
}
getTableName(tableOrName?: Table | string): string {
let table: Table
if (typeof tableOrName === "string") {
const name = tableOrName
if (this.query.table?.name === name) {
table = this.query.table
} else if (this.query.meta.table?.name === name) {
table = this.query.meta.table
} else if (!this.query.meta.tables?.[name]) {
// This can legitimately happen in custom queries, where the user is
// querying against a table that may not have been imported into
// Budibase.
return name
} else {
table = this.query.meta.tables[name]
}
} else if (tableOrName) {
table = tableOrName
} else {
table = this.table
}
let name = table.name
if (isSqs(table) && table._id) {
// SQS uses the table ID rather than the table name
name = table._id
}
const aliases = this.query.tableAliases || {}
return aliases[name] ? aliases[name] : name
}
addDistinctCount(query: Knex.QueryBuilder): Knex.QueryBuilder { addDistinctCount(query: Knex.QueryBuilder): Knex.QueryBuilder {
const primary = this.table.primary if (!this.table.primary) {
const aliases = this.query.tableAliases
const aliased =
this.table.name && aliases?.[this.table.name]
? aliases[this.table.name]
: this.table.name
if (!primary) {
throw new Error("SQL counting requires primary key to be supplied") throw new Error("SQL counting requires primary key to be supplied")
} }
return query.countDistinct(`${aliased}.${primary[0]} as total`) return query.countDistinct(
`${this.getTableName()}.${this.table.primary[0]} as ${COUNT_FIELD_NAME}`
)
}
addAggregations(
query: Knex.QueryBuilder,
aggregations: Aggregation[]
): Knex.QueryBuilder {
const fields = this.query.resource?.fields || []
const tableName = this.getTableName()
if (fields.length > 0) {
query = query.groupBy(fields.map(field => `${tableName}.${field}`))
query = query.select(fields.map(field => `${tableName}.${field}`))
}
for (const aggregation of aggregations) {
const op = aggregation.calculationType
const field = `${tableName}.${aggregation.field} as ${aggregation.name}`
switch (op) {
case CalculationType.COUNT:
query = query.count(field)
break
case CalculationType.SUM:
query = query.sum(field)
break
case CalculationType.AVG:
query = query.avg(field)
break
case CalculationType.MIN:
query = query.min(field)
break
case CalculationType.MAX:
query = query.max(field)
break
}
}
return query
} }
addSorting(query: Knex.QueryBuilder): Knex.QueryBuilder { addSorting(query: Knex.QueryBuilder): Knex.QueryBuilder {
let { sort } = this.query let { sort, resource } = this.query
const primaryKey = this.table.primary const primaryKey = this.table.primary
const tableName = getTableName(this.table) const aliased = this.getTableName()
const aliases = this.query.tableAliases
const aliased =
tableName && aliases?.[tableName] ? aliases[tableName] : this.table?.name
if (!Array.isArray(primaryKey)) { if (!Array.isArray(primaryKey)) {
throw new Error("Sorting requires primary key to be specified for table") throw new Error("Sorting requires primary key to be specified for table")
} }
@ -862,7 +921,8 @@ class InternalBuilder {
// add sorting by the primary key if the result isn't already sorted by it, // add sorting by the primary key if the result isn't already sorted by it,
// to make sure result is deterministic // to make sure result is deterministic
if (!sort || sort[primaryKey[0]] === undefined) { const hasAggregations = (resource?.aggregations?.length ?? 0) > 0
if (!hasAggregations && (!sort || sort[primaryKey[0]] === undefined)) {
query = query.orderBy(`${aliased}.${primaryKey[0]}`) query = query.orderBy(`${aliased}.${primaryKey[0]}`)
} }
return query return query
@ -1246,10 +1306,15 @@ class InternalBuilder {
} }
} }
// if counting, use distinct count, else select const aggregations = this.query.resource?.aggregations || []
query = !counting if (counting) {
? query.select(this.generateSelectStatement()) query = this.addDistinctCount(query)
: this.addDistinctCount(query) } else if (aggregations.length > 0) {
query = this.addAggregations(query, aggregations)
} else {
query = query.select(this.generateSelectStatement())
}
// have to add after as well (this breaks MS-SQL) // have to add after as well (this breaks MS-SQL)
if (!counting) { if (!counting) {
query = this.addSorting(query) query = this.addSorting(query)
@ -1468,23 +1533,40 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
return results.length ? results : [{ [operation.toLowerCase()]: true }] return results.length ? results : [{ [operation.toLowerCase()]: true }]
} }
private getTableName(
table: Table,
aliases?: Record<string, string>
): string | undefined {
let name = table.name
if (
table.sourceType === TableSourceType.INTERNAL ||
table.sourceId === INTERNAL_TABLE_SOURCE_ID
) {
if (!table._id) {
return
}
// SQS uses the table ID rather than the table name
name = table._id
}
return aliases?.[name] || name
}
convertJsonStringColumns<T extends Record<string, any>>( convertJsonStringColumns<T extends Record<string, any>>(
table: Table, table: Table,
results: T[], results: T[],
aliases?: Record<string, string> aliases?: Record<string, string>
): T[] { ): T[] {
const tableName = getTableName(table) const tableName = this.getTableName(table, aliases)
for (const [name, field] of Object.entries(table.schema)) { for (const [name, field] of Object.entries(table.schema)) {
if (!this._isJsonColumn(field)) { if (!this._isJsonColumn(field)) {
continue continue
} }
const aliasedTableName = (tableName && aliases?.[tableName]) || tableName const fullName = `${tableName}.${name}` as keyof T
const fullName = `${aliasedTableName}.${name}`
for (let row of results) { for (let row of results) {
if (typeof row[fullName as keyof T] === "string") { if (typeof row[fullName] === "string") {
row[fullName as keyof T] = JSON.parse(row[fullName]) row[fullName] = JSON.parse(row[fullName])
} }
if (typeof row[name as keyof T] === "string") { if (typeof row[name] === "string") {
row[name as keyof T] = JSON.parse(row[name]) row[name as keyof T] = JSON.parse(row[name])
} }
} }

View File

@ -17,11 +17,8 @@ import {
ContextUser, ContextUser,
CouchFindOptions, CouchFindOptions,
DatabaseQueryOpts, DatabaseQueryOpts,
SearchFilters,
SearchUsersRequest, SearchUsersRequest,
User, User,
BasicOperator,
ArrayOperator,
} from "@budibase/types" } from "@budibase/types"
import * as context from "../context" import * as context from "../context"
import { getGlobalDB } from "../context" import { getGlobalDB } from "../context"
@ -45,32 +42,6 @@ function removeUserPassword(users: User | User[]) {
return users return users
} }
export function isSupportedUserSearch(query: SearchFilters) {
const allowed = [
{ op: BasicOperator.STRING, key: "email" },
{ op: BasicOperator.EQUAL, key: "_id" },
{ op: ArrayOperator.ONE_OF, key: "_id" },
]
for (let [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") {
return false
}
const fields = Object.keys(operation || {})
// this filter doesn't contain options - ignore
if (fields.length === 0) {
continue
}
const allowedOperation = allowed.find(
allow =>
allow.op === key && fields.length === 1 && fields[0] === allow.key
)
if (!allowedOperation) {
return false
}
}
return true
}
export async function bulkGetGlobalUsersById( export async function bulkGetGlobalUsersById(
userIds: string[], userIds: string[],
opts?: GetOpts opts?: GetOpts

View File

@ -0,0 +1,29 @@
<script>
export let width
export let height
</script>
<svg
{width}
{height}
viewBox="0 0 13 12"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M9.4179 4.13222C9.4179 3.73121 9.26166 3.35428 8.97913 3.07175C8.41342 2.50538 7.4239 2.50408 6.85753 3.07175L5.64342 4.28586C5.6291 4.30018 5.61543 4.3158 5.60305 4.33143C5.58678 4.3438 5.5718 4.35747 5.55683 4.37244L0.491426 9.43785C0.208245 9.72103 0.052002 10.098 0.052002 10.4983C0.052002 10.8987 0.208245 11.2756 0.491426 11.5588C0.774607 11.842 1.15153 11.9982 1.5519 11.9982C1.95227 11.9982 2.32919 11.842 2.61238 11.5588L8.97848 5.1927C9.26166 4.90952 9.4179 4.53259 9.4179 4.13222ZM1.90539 10.8518C1.7166 11.0406 1.3872 11.0406 1.1984 10.8518C1.10401 10.7574 1.05193 10.6318 1.05193 10.4983C1.05193 10.3649 1.104 10.2392 1.1984 10.1448L5.99821 5.34503L6.70845 6.04875L1.90539 10.8518ZM8.2715 4.48571L7.41544 5.34178L6.7052 4.63805L7.56452 3.77873C7.7533 3.58995 8.08271 3.58929 8.2715 3.77939C8.36589 3.87313 8.41798 3.99877 8.41798 4.13223C8.41798 4.26569 8.3659 4.39132 8.2715 4.48571Z"
fill="#C8C8C8"
/>
<path
d="M11.8552 6.55146L11.0144 6.21913L10.879 5.32449C10.8356 5.03919 10.3737 4.98776 10.2686 5.255L9.93606 6.09642L9.04143 6.23085C8.89951 6.25216 8.78884 6.36658 8.77257 6.50947C8.75629 6.65253 8.83783 6.78826 8.97193 6.84148L9.81335 7.17464L9.94794 8.06862C9.9691 8.21053 10.0835 8.32121 10.2266 8.33748C10.3695 8.35375 10.5052 8.27221 10.5586 8.13811L10.8914 7.29751L11.7855 7.1621C11.9283 7.1403 12.0381 7.02637 12.0544 6.88348C12.0707 6.74058 11.9887 6.60403 11.8552 6.55146Z"
fill="#F9634C"
/>
<path
d="M8.94215 1.76145L9.78356 2.0946L9.91815 2.9885C9.93931 3.13049 10.0539 3.24117 10.1968 3.25744C10.3398 3.27371 10.4756 3.19218 10.5288 3.05807L10.8618 2.21739L11.7559 2.08207C11.8985 2.06034 12.0085 1.94633 12.0248 1.80344C12.0411 1.66054 11.959 1.524 11.8254 1.47143L10.9847 1.13909L10.8494 0.244456C10.806 -0.0409246 10.3439 -0.0922745 10.2388 0.174881L9.90643 1.0163L9.0118 1.15089C8.86972 1.17213 8.75905 1.28654 8.74278 1.42952C8.72651 1.57249 8.80804 1.70823 8.94215 1.76145Z"
fill="#8488FD"
/>
<path
d="M3.2379 2.46066L3.92063 2.73091L4.02984 3.45637C4.04709 3.57151 4.14002 3.66135 4.25606 3.67453C4.37194 3.6878 4.48212 3.62163 4.52541 3.51276L4.79557 2.83059L5.52094 2.72074C5.63682 2.70316 5.72601 2.61072 5.73936 2.49468C5.75254 2.37864 5.68597 2.26797 5.57758 2.22533L4.89533 1.95565L4.78548 1.22963C4.75016 0.998038 4.37535 0.956375 4.29007 1.17315L4.0204 1.85597L3.29437 1.96517C3.17915 1.98235 3.08931 2.07527 3.07613 2.19131C3.06294 2.30727 3.12902 2.41737 3.2379 2.46066Z"
fill="#F7D804"
/>
</svg>

View File

@ -67,6 +67,7 @@
"@spectrum-css/vars": "^3.0.1", "@spectrum-css/vars": "^3.0.1",
"@zerodevx/svelte-json-view": "^1.0.7", "@zerodevx/svelte-json-view": "^1.0.7",
"codemirror": "^5.65.16", "codemirror": "^5.65.16",
"cron-parser": "^4.9.0",
"dayjs": "^1.10.8", "dayjs": "^1.10.8",
"downloadjs": "1.4.7", "downloadjs": "1.4.7",
"fast-json-patch": "^3.1.1", "fast-json-patch": "^3.1.1",

View File

@ -641,6 +641,8 @@
let hasUserDefinedName = automation.stepNames?.[allSteps[idx]?.id] let hasUserDefinedName = automation.stepNames?.[allSteps[idx]?.id]
if (isLoopBlock) { if (isLoopBlock) {
runtimeName = `loop.${name}` runtimeName = `loop.${name}`
} else if (idx === 0) {
runtimeName = `trigger.${name}`
} else if (block.name.startsWith("JS")) { } else if (block.name.startsWith("JS")) {
runtimeName = hasUserDefinedName runtimeName = hasUserDefinedName
? `stepsByName["${bindingName}"].${name}` ? `stepsByName["${bindingName}"].${name}`
@ -650,7 +652,7 @@
? `stepsByName.${bindingName}.${name}` ? `stepsByName.${bindingName}.${name}`
: `steps.${idx - loopBlockCount}.${name}` : `steps.${idx - loopBlockCount}.${name}`
} }
return idx === 0 ? `trigger.${name}` : runtimeName return runtimeName
} }
const determineCategoryName = (idx, isLoopBlock, bindingName) => { const determineCategoryName = (idx, isLoopBlock, bindingName) => {
@ -677,7 +679,7 @@
) )
return { return {
readableBinding: readableBinding:
bindingName && !isLoopBlock bindingName && !isLoopBlock && idx !== 0
? `steps.${bindingName}.${name}` ? `steps.${bindingName}.${name}`
: runtimeBinding, : runtimeBinding,
runtimeBinding, runtimeBinding,
@ -1048,7 +1050,7 @@
{:else if value.customType === "cron"} {:else if value.customType === "cron"}
<CronBuilder <CronBuilder
on:change={e => onChange({ [key]: e.detail })} on:change={e => onChange({ [key]: e.detail })}
value={inputData[key]} cronExpression={inputData[key]}
/> />
{:else if value.customType === "automationFields"} {:else if value.customType === "automationFields"}
<AutomationSelector <AutomationSelector

View File

@ -1,41 +1,70 @@
<script> <script>
import { Button, Select, Input, Label } from "@budibase/bbui" import {
Select,
InlineAlert,
Input,
Label,
Layout,
notifications,
} from "@budibase/bbui"
import { onMount, createEventDispatcher } from "svelte" import { onMount, createEventDispatcher } from "svelte"
import { flags } from "stores/builder" import { flags } from "stores/builder"
import { licensing } from "stores/portal"
import { API } from "api"
import MagicWand from "../../../../assets/MagicWand.svelte"
import { helpers, REBOOT_CRON } from "@budibase/shared-core" import { helpers, REBOOT_CRON } from "@budibase/shared-core"
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
export let value export let cronExpression
let error let error
let nextExecutions
// AI prompt
let aiCronPrompt = ""
let loadingAICronExpression = false
$: aiEnabled =
$licensing.customAIConfigsEnabled || $licensing.budibaseAIEnabled
$: { $: {
const exists = CRON_EXPRESSIONS.some(cron => cron.value === value) if (cronExpression) {
const customIndex = CRON_EXPRESSIONS.findIndex( try {
cron => cron.label === "Custom" nextExecutions = helpers.cron
) .getNextExecutionDates(cronExpression)
.join("\n")
if (!exists && customIndex === -1) { } catch (err) {
CRON_EXPRESSIONS[0] = { label: "Custom", value: value } nextExecutions = null
} else if (exists && customIndex !== -1) { }
CRON_EXPRESSIONS.splice(customIndex, 1)
} }
} }
const onChange = e => { const onChange = e => {
if (value !== REBOOT_CRON) { if (e.detail !== REBOOT_CRON) {
error = helpers.cron.validate(e.detail).err error = helpers.cron.validate(e.detail).err
} }
if (e.detail === value || error) { if (e.detail === cronExpression || error) {
return return
} }
value = e.detail cronExpression = e.detail
dispatch("change", e.detail) dispatch("change", e.detail)
} }
const updatePreset = e => {
aiCronPrompt = ""
onChange(e)
}
const updateCronExpression = e => {
aiCronPrompt = ""
cronExpression = null
nextExecutions = null
onChange(e)
}
let touched = false let touched = false
let presets = false
const CRON_EXPRESSIONS = [ const CRON_EXPRESSIONS = [
{ {
@ -64,45 +93,130 @@
}) })
} }
}) })
async function generateAICronExpression() {
loadingAICronExpression = true
try {
const response = await API.generateCronExpression({
prompt: aiCronPrompt,
})
cronExpression = response.message
dispatch("change", response.message)
} catch (err) {
notifications.error(err.message)
} finally {
loadingAICronExpression = false
}
}
</script> </script>
<div class="block-field"> <!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<Layout noPadding gap="S">
<Select
on:change={updatePreset}
value={cronExpression || "Custom"}
secondary
extraThin
label="Use a Preset (Optional)"
options={CRON_EXPRESSIONS}
/>
{#if aiEnabled}
<div class="cron-ai-generator">
<Input
bind:value={aiCronPrompt}
label="Generate Cron Expression with AI"
size="S"
placeholder="Run every hour between 1pm to 4pm everyday of the week"
/>
{#if aiCronPrompt}
<div
class="icon"
class:pulsing-text={loadingAICronExpression}
on:click={generateAICronExpression}
>
<MagicWand height="17" width="17" />
</div>
{/if}
</div>
{/if}
<Input <Input
label="Cron Expression"
{error} {error}
on:change={onChange} on:change={updateCronExpression}
{value} value={cronExpression}
on:blur={() => (touched = true)} on:blur={() => (touched = true)}
updateOnChange={false} updateOnChange={false}
/> />
{#if touched && !value} {#if touched && !cronExpression}
<Label><div class="error">Please specify a CRON expression</div></Label> <Label><div class="error">Please specify a CRON expression</div></Label>
{/if} {/if}
<div class="presets"> {#if nextExecutions}
<Button on:click={() => (presets = !presets)} <InlineAlert
>{presets ? "Hide" : "Show"} Presets</Button type="info"
> header="Next Executions"
{#if presets} message={nextExecutions}
<Select />
on:change={onChange} {/if}
value={value || "Custom"} </Layout>
secondary
extraThin
label="Presets"
options={CRON_EXPRESSIONS}
/>
{/if}
</div>
</div>
<style> <style>
.presets { .cron-ai-generator {
margin-top: var(--spacing-m); flex: 1;
position: relative;
} }
.block-field { .icon {
padding-top: var(--spacing-s); right: 1px;
bottom: 1px;
position: absolute;
justify-content: center;
align-items: center;
display: flex;
flex-direction: row;
box-sizing: border-box;
border-left: 1px solid var(--spectrum-alias-border-color);
border-top-right-radius: var(--spectrum-alias-border-radius-regular);
border-bottom-right-radius: var(--spectrum-alias-border-radius-regular);
width: 31px;
color: var(--spectrum-alias-text-color);
background-color: var(--spectrum-global-color-gray-75);
transition: background-color
var(--spectrum-global-animation-duration-100, 130ms),
box-shadow var(--spectrum-global-animation-duration-100, 130ms),
border-color var(--spectrum-global-animation-duration-100, 130ms);
height: calc(var(--spectrum-alias-item-height-m) - 2px);
} }
.icon:hover {
cursor: pointer;
color: var(--spectrum-alias-text-color-hover);
background-color: var(--spectrum-global-color-gray-50);
border-color: var(--spectrum-alias-border-color-hover);
}
.error { .error {
padding-top: var(--spacing-xs); padding-top: var(--spacing-xs);
color: var(--spectrum-global-color-red-500); color: var(--spectrum-global-color-red-500);
} }
.pulsing-text {
font-size: 24px;
font-weight: bold;
animation: pulse 1.5s infinite;
}
@keyframes pulse {
0% {
opacity: 0.3;
transform: scale(1);
}
50% {
opacity: 1;
transform: scale(1.05);
}
100% {
opacity: 0.3;
transform: scale(1);
}
}
</style> </style>

View File

@ -21,6 +21,7 @@
PROTECTED_EXTERNAL_COLUMNS, PROTECTED_EXTERNAL_COLUMNS,
canHaveDefaultColumn, canHaveDefaultColumn,
} from "@budibase/shared-core" } from "@budibase/shared-core"
import { makePropSafe } from "@budibase/string-templates"
import { createEventDispatcher, getContext, onMount } from "svelte" import { createEventDispatcher, getContext, onMount } from "svelte"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { tables, datasources } from "stores/builder" import { tables, datasources } from "stores/builder"
@ -46,6 +47,7 @@
import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte" import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte"
import OptionsEditor from "./OptionsEditor.svelte" import OptionsEditor from "./OptionsEditor.svelte"
import { isEnabled } from "helpers/featureFlags" import { isEnabled } from "helpers/featureFlags"
import { getUserBindings } from "dataBinding"
const AUTO_TYPE = FieldType.AUTO const AUTO_TYPE = FieldType.AUTO
const FORMULA_TYPE = FieldType.FORMULA const FORMULA_TYPE = FieldType.FORMULA
@ -191,6 +193,19 @@
fieldId: makeFieldId(t.type, t.subtype), fieldId: makeFieldId(t.type, t.subtype),
...t, ...t,
})) }))
$: defaultValueBindings = [
{
type: "context",
runtimeBinding: `${makePropSafe("now")}`,
readableBinding: `Date`,
category: "Date",
icon: "Date",
display: {
name: "Server date",
},
},
...getUserBindings(),
]
const fieldDefinitions = Object.values(FIELDS).reduce( const fieldDefinitions = Object.values(FIELDS).reduce(
// Storing the fields by complex field id // Storing the fields by complex field id
@ -781,9 +796,8 @@
setRequired(false) setRequired(false)
} }
}} }}
bindings={getBindings({ table })} bindings={defaultValueBindings}
allowJS allowJS
context={rowGoldenSample}
/> />
</div> </div>
{/if} {/if}

View File

@ -2,7 +2,12 @@
import { getContext } from "svelte" import { getContext } from "svelte"
import CreateEditColumn from "components/backend/DataTable/modals/CreateEditColumn.svelte" import CreateEditColumn from "components/backend/DataTable/modals/CreateEditColumn.svelte"
const { datasource } = getContext("grid") const { datasource, rows } = getContext("grid")
const onUpdate = async () => {
await datasource.actions.refreshDefinition()
await rows.actions.refreshData()
}
</script> </script>
<CreateEditColumn on:updatecolumns={datasource.actions.refreshDefinition} /> <CreateEditColumn on:updatecolumns={onUpdate} />

View File

@ -63,7 +63,7 @@
// Look up the component tree and find something that is provided by an // Look up the component tree and find something that is provided by an
// ancestor that matches our datasource. This is for backwards compatibility // ancestor that matches our datasource. This is for backwards compatibility
// as previously we could use the "closest" context. // as previously we could use the "closest" context.
for (let id of path.reverse().slice(1)) { for (let id of path.toReversed().slice(1)) {
// Check for matching view datasource // Check for matching view datasource
if ( if (
dataSource.type === "viewV2" && dataSource.type === "viewV2" &&

View File

@ -0,0 +1,11 @@
export const buildAIEndpoints = API => ({
/**
* Generates a cron expression from a prompt
*/
generateCronExpression: async ({ prompt }) => {
return await API.post({
url: "/api/ai/cron",
body: { prompt },
})
},
})

View File

@ -2,6 +2,7 @@ import { Helpers } from "@budibase/bbui"
import { Header } from "@budibase/shared-core" import { Header } from "@budibase/shared-core"
import { ApiVersion } from "../constants" import { ApiVersion } from "../constants"
import { buildAnalyticsEndpoints } from "./analytics" import { buildAnalyticsEndpoints } from "./analytics"
import { buildAIEndpoints } from "./ai"
import { buildAppEndpoints } from "./app" import { buildAppEndpoints } from "./app"
import { buildAttachmentEndpoints } from "./attachments" import { buildAttachmentEndpoints } from "./attachments"
import { buildAuthEndpoints } from "./auth" import { buildAuthEndpoints } from "./auth"
@ -268,6 +269,7 @@ export const createAPIClient = config => {
// Attach all endpoints // Attach all endpoints
return { return {
...API, ...API,
...buildAIEndpoints(API),
...buildAnalyticsEndpoints(API), ...buildAnalyticsEndpoints(API),
...buildAppEndpoints(API), ...buildAppEndpoints(API),
...buildAttachmentEndpoints(API), ...buildAttachmentEndpoints(API),

@ -1 +1 @@
Subproject commit e2fe0f9cc856b4ee1a97df96d623b2d87d4e8733 Subproject commit aca9828117bb97f54f40ee359f1a3f6e259174e7

View File

@ -1,9 +1,7 @@
import { permissions, roles, context } from "@budibase/backend-core" import { permissions, roles, context } from "@budibase/backend-core"
import { import {
UserCtx, UserCtx,
Database,
Role, Role,
PermissionLevel,
GetResourcePermsResponse, GetResourcePermsResponse,
ResourcePermissionInfo, ResourcePermissionInfo,
GetDependantResourcesResponse, GetDependantResourcesResponse,
@ -12,107 +10,15 @@ import {
RemovePermissionRequest, RemovePermissionRequest,
RemovePermissionResponse, RemovePermissionResponse,
} from "@budibase/types" } from "@budibase/types"
import { getRoleParams } from "../../db/utils"
import { import {
CURRENTLY_SUPPORTED_LEVELS, CURRENTLY_SUPPORTED_LEVELS,
getBasePermissions, getBasePermissions,
} from "../../utilities/security" } from "../../utilities/security"
import { removeFromArray } from "../../utilities"
import sdk from "../../sdk" import sdk from "../../sdk"
import { PermissionUpdateType } from "../../sdk/app/permissions"
const enum PermissionUpdateType {
REMOVE = "remove",
ADD = "add",
}
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
// utility function to stop this repetition - permissions always stored under roles
async function getAllDBRoles(db: Database) {
const body = await db.allDocs<Role>(
getRoleParams(null, {
include_docs: true,
})
)
return body.rows.map(row => row.doc!)
}
async function updatePermissionOnRole(
{
roleId,
resourceId,
level,
}: { roleId: string; resourceId: string; level: PermissionLevel },
updateType: PermissionUpdateType
) {
const db = context.getAppDB()
const remove = updateType === PermissionUpdateType.REMOVE
const isABuiltin = roles.isBuiltin(roleId)
const dbRoleId = roles.getDBRoleID(roleId)
const dbRoles = await getAllDBRoles(db)
const docUpdates: Role[] = []
// the permission is for a built in, make sure it exists
if (isABuiltin && !dbRoles.some(role => role._id === dbRoleId)) {
const builtin = roles.getBuiltinRoles()[roleId]
builtin._id = roles.getDBRoleID(builtin._id!)
dbRoles.push(builtin)
}
// now try to find any roles which need updated, e.g. removing the
// resource from another role and then adding to the new role
for (let role of dbRoles) {
let updated = false
const rolePermissions: Record<string, PermissionLevel[]> = role.permissions
? role.permissions
: {}
// make sure its an array, also handle migrating
if (
!rolePermissions[resourceId] ||
!Array.isArray(rolePermissions[resourceId])
) {
rolePermissions[resourceId] =
typeof rolePermissions[resourceId] === "string"
? [rolePermissions[resourceId] as unknown as PermissionLevel]
: []
}
// handle the removal/updating the role which has this permission first
// the updating (role._id !== dbRoleId) is required because a resource/level can
// only be permitted in a single role (this reduces hierarchy confusion and simplifies
// the general UI for this, rather than needing to show everywhere it is used)
if (
(role._id !== dbRoleId || remove) &&
rolePermissions[resourceId].indexOf(level) !== -1
) {
removeFromArray(rolePermissions[resourceId], level)
updated = true
}
// handle the adding, we're on the correct role, at it to this
if (!remove && role._id === dbRoleId) {
const set = new Set(rolePermissions[resourceId])
rolePermissions[resourceId] = [...set.add(level)]
updated = true
}
// handle the update, add it to bulk docs to perform at end
if (updated) {
role.permissions = rolePermissions
docUpdates.push(role)
}
}
const response = await db.bulkDocs(docUpdates)
return response.map(resp => {
const version = docUpdates.find(role => role._id === resp.id)?.version
const _id = roles.getExternalRoleID(resp.id, version)
return {
_id,
rev: resp.rev,
error: resp.error,
reason: resp.reason,
}
})
}
export function fetchBuiltin(ctx: UserCtx) { export function fetchBuiltin(ctx: UserCtx) {
ctx.body = Object.values(permissions.getBuiltinPermissions()) ctx.body = Object.values(permissions.getBuiltinPermissions())
} }
@ -124,7 +30,7 @@ export function fetchLevels(ctx: UserCtx) {
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx) {
const db = context.getAppDB() const db = context.getAppDB()
const dbRoles: Role[] = await getAllDBRoles(db) const dbRoles: Role[] = await sdk.permissions.getAllDBRoles(db)
let permissions: any = {} let permissions: any = {}
// create an object with structure role ID -> resource ID -> level // create an object with structure role ID -> resource ID -> level
for (let role of dbRoles) { for (let role of dbRoles) {
@ -186,12 +92,18 @@ export async function getDependantResources(
export async function addPermission(ctx: UserCtx<void, AddPermissionResponse>) { export async function addPermission(ctx: UserCtx<void, AddPermissionResponse>) {
const params: AddPermissionRequest = ctx.params const params: AddPermissionRequest = ctx.params
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.ADD) ctx.body = await sdk.permissions.updatePermissionOnRole(
params,
PermissionUpdateType.ADD
)
} }
export async function removePermission( export async function removePermission(
ctx: UserCtx<void, RemovePermissionResponse> ctx: UserCtx<void, RemovePermissionResponse>
) { ) {
const params: RemovePermissionRequest = ctx.params const params: RemovePermissionRequest = ctx.params
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.REMOVE) ctx.body = await sdk.permissions.updatePermissionOnRole(
params,
PermissionUpdateType.REMOVE
)
} }

View File

@ -1,5 +1,6 @@
import dayjs from "dayjs" import dayjs from "dayjs"
import { import {
Aggregation,
AutoFieldSubType, AutoFieldSubType,
AutoReason, AutoReason,
Datasource, Datasource,
@ -19,6 +20,7 @@ import {
SortJson, SortJson,
SortType, SortType,
Table, Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { import {
breakExternalTableId, breakExternalTableId,
@ -46,7 +48,7 @@ import { db as dbCore } from "@budibase/backend-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import env from "../../../environment" import env from "../../../environment"
import { makeExternalQuery } from "../../../integrations/base/query" import { makeExternalQuery } from "../../../integrations/base/query"
import { dataFilters } from "@budibase/shared-core" import { dataFilters, helpers } from "@budibase/shared-core"
export interface ManyRelationship { export interface ManyRelationship {
tableId?: string tableId?: string
@ -159,17 +161,41 @@ function isEditableColumn(column: FieldSchema) {
export class ExternalRequest<T extends Operation> { export class ExternalRequest<T extends Operation> {
private readonly operation: T private readonly operation: T
private readonly tableId: string private readonly source: Table | ViewV2
private datasource?: Datasource private datasource: Datasource
private tables: { [key: string]: Table } = {}
constructor(operation: T, tableId: string, datasource?: Datasource) { public static async for<T extends Operation>(
this.operation = operation operation: T,
this.tableId = tableId source: Table | ViewV2,
this.datasource = datasource opts: { datasource?: Datasource } = {}
if (datasource && datasource.entities) { ) {
this.tables = datasource.entities if (!opts.datasource) {
if (sdk.views.isView(source)) {
const table = await sdk.views.getTable(source.id)
opts.datasource = await sdk.datasources.get(table.sourceId!)
} else {
opts.datasource = await sdk.datasources.get(source.sourceId!)
}
} }
return new ExternalRequest(operation, source, opts.datasource)
}
private get tables(): { [key: string]: Table } {
if (!this.datasource.entities) {
throw new Error("Datasource does not have entities")
}
return this.datasource.entities
}
private constructor(
operation: T,
source: Table | ViewV2,
datasource: Datasource
) {
this.operation = operation
this.source = source
this.datasource = datasource
} }
private prepareFilters( private prepareFilters(
@ -290,20 +316,6 @@ export class ExternalRequest<T extends Operation> {
return this.tables[tableName] return this.tables[tableName]
} }
// seeds the object with table and datasource information
async retrieveMetadata(
datasourceId: string
): Promise<{ tables: Record<string, Table>; datasource: Datasource }> {
if (!this.datasource) {
this.datasource = await sdk.datasources.get(datasourceId)
if (!this.datasource || !this.datasource.entities) {
throw "No tables found, fetch tables before query."
}
this.tables = this.datasource.entities
}
return { tables: this.tables, datasource: this.datasource }
}
async getRow(table: Table, rowId: string): Promise<Row> { async getRow(table: Table, rowId: string): Promise<Row> {
const response = await getDatasourceAndQuery({ const response = await getDatasourceAndQuery({
endpoint: getEndpoint(table._id!, Operation.READ), endpoint: getEndpoint(table._id!, Operation.READ),
@ -619,24 +631,16 @@ export class ExternalRequest<T extends Operation> {
} }
async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> { async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> {
const { operation, tableId } = this const { operation } = this
if (!tableId) { let table: Table
throw new Error("Unable to run without a table ID") if (sdk.views.isView(this.source)) {
} table = await sdk.views.getTable(this.source.id)
let { datasourceId, tableName } = breakExternalTableId(tableId) } else {
let datasource = this.datasource table = this.source
if (!datasource) {
const { datasource: ds } = await this.retrieveMetadata(datasourceId)
datasource = ds
}
const tables = this.tables
const table = tables[tableName]
let isSql = isSQL(datasource)
if (!table) {
throw new Error(
`Unable to process query, table "${tableName}" not defined.`
)
} }
let isSql = isSQL(this.datasource)
// look for specific components of config which may not be considered acceptable // look for specific components of config which may not be considered acceptable
let { id, row, filters, sort, paginate, rows } = cleanupConfig( let { id, row, filters, sort, paginate, rows } = cleanupConfig(
config, config,
@ -679,25 +683,40 @@ export class ExternalRequest<T extends Operation> {
} }
} }
} }
if ( if (
operation === Operation.DELETE && operation === Operation.DELETE &&
(filters == null || Object.keys(filters).length === 0) (filters == null || Object.keys(filters).length === 0)
) { ) {
throw "Deletion must be filtered" throw "Deletion must be filtered"
} }
let aggregations: Aggregation[] = []
if (sdk.views.isView(this.source)) {
const calculationFields = helpers.views.calculationFields(this.source)
for (const [key, field] of Object.entries(calculationFields)) {
aggregations.push({
name: key,
field: field.field,
calculationType: field.calculationType,
})
}
}
let json: QueryJson = { let json: QueryJson = {
endpoint: { endpoint: {
datasourceId: datasourceId!, datasourceId: this.datasource._id!,
entityId: tableName, entityId: table.name,
operation, operation,
}, },
resource: { resource: {
// have to specify the fields to avoid column overlap (for SQL) // have to specify the fields to avoid column overlap (for SQL)
fields: isSql fields: isSql
? buildSqlFieldList(table, this.tables, { ? await buildSqlFieldList(this.source, this.tables, {
relationships: incRelationships, relationships: incRelationships,
}) })
: [], : [],
aggregations,
}, },
filters, filters,
sort, sort,
@ -714,7 +733,7 @@ export class ExternalRequest<T extends Operation> {
}, },
meta: { meta: {
table, table,
tables: tables, tables: this.tables,
}, },
} }
@ -745,7 +764,7 @@ export class ExternalRequest<T extends Operation> {
} }
const output = await sqlOutputProcessing( const output = await sqlOutputProcessing(
response, response,
table, this.source,
this.tables, this.tables,
relationships relationships
) )

View File

@ -17,6 +17,7 @@ import {
Row, Row,
Table, Table,
UserCtx, UserCtx,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import * as utils from "./utils" import * as utils from "./utils"
@ -29,39 +30,40 @@ import { generateIdForRow } from "./utils"
export async function handleRequest<T extends Operation>( export async function handleRequest<T extends Operation>(
operation: T, operation: T,
tableId: string, source: Table | ViewV2,
opts?: RunConfig opts?: RunConfig
): Promise<ExternalRequestReturnType<T>> { ): Promise<ExternalRequestReturnType<T>> {
return new ExternalRequest<T>(operation, tableId, opts?.datasource).run( return (
opts || {} await ExternalRequest.for<T>(operation, source, {
) datasource: opts?.datasource,
})
).run(opts || {})
} }
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) { export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const { tableId, viewId } = utils.getSourceId(ctx) const source = await utils.getSource(ctx)
const table = await utils.getTableFromSource(source)
const { _id, ...rowData } = ctx.request.body const { _id, ...rowData } = ctx.request.body
const table = await sdk.tables.getTable(tableId)
const { row: dataToUpdate } = await inputProcessing( const dataToUpdate = await inputProcessing(
ctx.user?._id, ctx.user?._id,
cloneDeep(table), cloneDeep(source),
rowData rowData
) )
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row: dataToUpdate, row: dataToUpdate,
tableId, source,
}) })
if (!validateResult.valid) { if (!validateResult.valid) {
throw { validation: validateResult.errors } throw { validation: validateResult.errors }
} }
const beforeRow = await sdk.rows.external.getRow(tableId, _id, { const beforeRow = await sdk.rows.external.getRow(table._id!, _id, {
relationships: true, relationships: true,
}) })
const response = await handleRequest(Operation.UPDATE, tableId, { const response = await handleRequest(Operation.UPDATE, source, {
id: breakRowIdField(_id), id: breakRowIdField(_id),
row: dataToUpdate, row: dataToUpdate,
}) })
@ -69,17 +71,16 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
// The id might have been changed, so the refetching would fail. Recalculating the id just in case // The id might have been changed, so the refetching would fail. Recalculating the id just in case
const updatedId = const updatedId =
generateIdForRow({ ...beforeRow, ...dataToUpdate }, table) || _id generateIdForRow({ ...beforeRow, ...dataToUpdate }, table) || _id
const row = await sdk.rows.external.getRow(tableId, updatedId, { const row = await sdk.rows.external.getRow(table._id!, updatedId, {
relationships: true, relationships: true,
}) })
const [enrichedRow, oldRow] = await Promise.all([ const [enrichedRow, oldRow] = await Promise.all([
outputProcessing(table, row, { outputProcessing(source, row, {
squash: true, squash: true,
preserveLinks: true, preserveLinks: true,
fromViewId: viewId,
}), }),
outputProcessing(table, beforeRow, { outputProcessing(source, beforeRow, {
squash: true, squash: true,
preserveLinks: true, preserveLinks: true,
}), }),
@ -94,9 +95,9 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
} }
export async function destroy(ctx: UserCtx) { export async function destroy(ctx: UserCtx) {
const { tableId } = utils.getSourceId(ctx) const source = await utils.getSource(ctx)
const _id = ctx.request.body._id const _id = ctx.request.body._id
const { row } = await handleRequest(Operation.DELETE, tableId, { const { row } = await handleRequest(Operation.DELETE, source, {
id: breakRowIdField(_id), id: breakRowIdField(_id),
includeSqlRelationships: IncludeRelationship.EXCLUDE, includeSqlRelationships: IncludeRelationship.EXCLUDE,
}) })
@ -105,11 +106,11 @@ export async function destroy(ctx: UserCtx) {
export async function bulkDestroy(ctx: UserCtx) { export async function bulkDestroy(ctx: UserCtx) {
const { rows } = ctx.request.body const { rows } = ctx.request.body
const { tableId } = utils.getSourceId(ctx) const source = await utils.getSource(ctx)
let promises: Promise<{ row: Row; table: Table }>[] = [] let promises: Promise<{ row: Row; table: Table }>[] = []
for (let row of rows) { for (let row of rows) {
promises.push( promises.push(
handleRequest(Operation.DELETE, tableId, { handleRequest(Operation.DELETE, source, {
id: breakRowIdField(row._id), id: breakRowIdField(row._id),
includeSqlRelationships: IncludeRelationship.EXCLUDE, includeSqlRelationships: IncludeRelationship.EXCLUDE,
}) })
@ -124,6 +125,7 @@ export async function bulkDestroy(ctx: UserCtx) {
export async function fetchEnrichedRow(ctx: UserCtx) { export async function fetchEnrichedRow(ctx: UserCtx) {
const id = ctx.params.rowId const id = ctx.params.rowId
const source = await utils.getSource(ctx)
const { tableId } = utils.getSourceId(ctx) const { tableId } = utils.getSourceId(ctx)
const { datasourceId, tableName } = breakExternalTableId(tableId) const { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource: Datasource = await sdk.datasources.get(datasourceId) const datasource: Datasource = await sdk.datasources.get(datasourceId)
@ -131,7 +133,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
ctx.throw(400, "Datasource has not been configured for plus API.") ctx.throw(400, "Datasource has not been configured for plus API.")
} }
const tables = datasource.entities const tables = datasource.entities
const response = await handleRequest(Operation.READ, tableId, { const response = await handleRequest(Operation.READ, source, {
id, id,
datasource, datasource,
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
@ -155,7 +157,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
// don't support composite keys right now // don't support composite keys right now
const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0]) const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0])
const primaryLink = linkedTable.primary?.[0] as string const primaryLink = linkedTable.primary?.[0] as string
const relatedRows = await handleRequest(Operation.READ, linkedTableId!, { const relatedRows = await handleRequest(Operation.READ, linkedTable, {
tables, tables,
filters: { filters: {
oneOf: { oneOf: {

View File

@ -207,7 +207,7 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
} }
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) { export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
const { tableId } = utils.getSourceId(ctx) const { tableId, viewId } = utils.getSourceId(ctx)
await context.ensureSnippetContext(true) await context.ensureSnippetContext(true)
@ -222,6 +222,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
...ctx.request.body, ...ctx.request.body,
query: enrichedQuery, query: enrichedQuery,
tableId, tableId,
viewId,
} }
ctx.status = 200 ctx.status = 200
@ -229,14 +230,15 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
} }
export async function validate(ctx: Ctx<Row, ValidateResponse>) { export async function validate(ctx: Ctx<Row, ValidateResponse>) {
const { tableId } = utils.getSourceId(ctx) const source = await utils.getSource(ctx)
const table = await utils.getTableFromSource(source)
// external tables are hard to validate currently // external tables are hard to validate currently
if (isExternalTableID(tableId)) { if (isExternalTableID(table._id!)) {
ctx.body = { valid: true, errors: {} } ctx.body = { valid: true, errors: {} }
} else { } else {
ctx.body = await sdk.rows.utils.validate({ ctx.body = await sdk.rows.utils.validate({
row: ctx.request.body, row: ctx.request.body,
tableId, source,
}) })
} }
} }

View File

@ -21,18 +21,19 @@ import {
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils" import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils"
import { flatten } from "lodash" import { flatten } from "lodash"
import { findRow } from "../../../sdk/app/rows/internal"
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) { export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const { tableId, viewId } = utils.getSourceId(ctx) const { tableId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
const table = sdk.views.isView(source)
? await sdk.views.getTable(source.id)
: source
const inputs = ctx.request.body const inputs = ctx.request.body
const isUserTable = tableId === InternalTables.USER_METADATA const isUserTable = tableId === InternalTables.USER_METADATA
let oldRow let oldRow
const dbTable = await sdk.tables.getTable(tableId)
try { try {
oldRow = await outputProcessing( oldRow = await outputProcessing(source, await findRow(tableId, inputs._id!))
dbTable,
await utils.findRow(tableId, inputs._id!)
)
} catch (err) { } catch (err) {
if (isUserTable) { if (isUserTable) {
// don't include the rev, it'll be the global rev // don't include the rev, it'll be the global rev
@ -48,22 +49,15 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
// need to build up full patch fields before coerce // need to build up full patch fields before coerce
let combinedRow: any = cloneDeep(oldRow) let combinedRow: any = cloneDeep(oldRow)
for (let key of Object.keys(inputs)) { for (let key of Object.keys(inputs)) {
if (!dbTable.schema[key]) continue if (!table.schema[key]) continue
combinedRow[key] = inputs[key] combinedRow[key] = inputs[key]
} }
// need to copy the table so it can be differenced on way out
const tableClone = cloneDeep(dbTable)
// this returns the table and row incase they have been updated // this returns the table and row incase they have been updated
let { table, row } = await inputProcessing( let row = await inputProcessing(ctx.user?._id, source, combinedRow)
ctx.user?._id,
tableClone,
combinedRow
)
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row, row,
table, source,
}) })
if (!validateResult.valid) { if (!validateResult.valid) {
@ -87,10 +81,8 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
return { row: ctx.body as Row, table, oldRow } return { row: ctx.body as Row, table, oldRow }
} }
const result = await finaliseRow(table, row, { const result = await finaliseRow(source, row, {
oldTable: dbTable,
updateFormula: true, updateFormula: true,
fromViewId: viewId,
}) })
return { ...result, oldRow } return { ...result, oldRow }
@ -186,7 +178,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
sdk.tables.getTable(tableId), sdk.tables.getTable(tableId),
linkRows.getLinkDocuments({ tableId, rowId, fieldName }), linkRows.getLinkDocuments({ tableId, rowId, fieldName }),
]) ])
let row = await utils.findRow(tableId, rowId) let row = await findRow(tableId, rowId)
row = await outputProcessing(table, row) row = await outputProcessing(table, row)
const linkVals = links as LinkDocumentValue[] const linkVals = links as LinkDocumentValue[]

View File

@ -4,10 +4,11 @@ import {
processFormulas, processFormulas,
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { Table, Row, FormulaType, FieldType } from "@budibase/types" import { Table, Row, FormulaType, FieldType, ViewV2 } from "@budibase/types"
import * as linkRows from "../../../db/linkedRows" import * as linkRows from "../../../db/linkedRows"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import sdk from "../../../sdk"
/** /**
* This function runs through a list of enriched rows, looks at the rows which * This function runs through a list of enriched rows, looks at the rows which
@ -121,33 +122,26 @@ export async function updateAllFormulasInTable(table: Table) {
* expects the row to be totally enriched/contain all relationships. * expects the row to be totally enriched/contain all relationships.
*/ */
export async function finaliseRow( export async function finaliseRow(
table: Table, source: Table | ViewV2,
row: Row, row: Row,
{ opts?: { updateFormula: boolean }
oldTable,
updateFormula,
fromViewId,
}: { oldTable?: Table; updateFormula: boolean; fromViewId?: string } = {
updateFormula: true,
}
) { ) {
const db = context.getAppDB() const db = context.getAppDB()
const { updateFormula = true } = opts || {}
const table = sdk.views.isView(source)
? await sdk.views.getTable(source.id)
: source
row.type = "row" row.type = "row"
// process the row before return, to include relationships // process the row before return, to include relationships
let enrichedRow = (await outputProcessing(table, cloneDeep(row), { let enrichedRow = await outputProcessing(source, cloneDeep(row), {
squash: false, squash: false,
})) as Row })
// use enriched row to generate formulas for saving, specifically only use as context // use enriched row to generate formulas for saving, specifically only use as context
row = await processFormulas(table, row, { row = await processFormulas(table, row, {
dynamic: false, dynamic: false,
contextRows: [enrichedRow], contextRows: [enrichedRow],
}) })
// don't worry about rev, tables handle rev/lastID updates
// if another row has been written since processing this will
// handle the auto ID clash
if (oldTable && !isEqual(oldTable, table)) {
await db.put(table)
}
const response = await db.put(row) const response = await db.put(row)
// for response, calculate the formulas for the enriched row // for response, calculate the formulas for the enriched row
enrichedRow._rev = response.rev enrichedRow._rev = response.rev
@ -158,8 +152,6 @@ export async function finaliseRow(
if (updateFormula) { if (updateFormula) {
await updateRelatedFormula(table, enrichedRow) await updateRelatedFormula(table, enrichedRow)
} }
const squashed = await linkRows.squashLinks(table, enrichedRow, { const squashed = await linkRows.squashLinks(source, enrichedRow)
fromViewId,
})
return { row: enrichedRow, squashed, table } return { row: enrichedRow, squashed, table }
} }

View File

@ -1,11 +1,19 @@
// need to handle table name + field or just field, depending on if relationships used // need to handle table name + field or just field, depending on if relationships used
import { FieldSchema, FieldType, Row, Table, JsonTypes } from "@budibase/types" import {
FieldSchema,
FieldType,
Row,
Table,
JsonTypes,
ViewV2,
} from "@budibase/types"
import { import {
helpers, helpers,
PROTECTED_EXTERNAL_COLUMNS, PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS, PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core" } from "@budibase/shared-core"
import { generateRowIdField } from "../../../../integrations/utils" import { generateRowIdField } from "../../../../integrations/utils"
import sdk from "../../../../sdk"
function extractFieldValue({ function extractFieldValue({
row, row,
@ -78,20 +86,30 @@ function fixJsonTypes(row: Row, table: Table) {
return row return row
} }
export function basicProcessing({ export async function basicProcessing({
row, row,
table, source,
tables, tables,
isLinked, isLinked,
sqs, sqs,
}: { }: {
row: Row row: Row
table: Table source: Table | ViewV2
tables: Table[] tables: Table[]
isLinked: boolean isLinked: boolean
sqs?: boolean sqs?: boolean
}): Row { }): Promise<Row> {
let table: Table
let isCalculationView = false
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
isCalculationView = helpers.views.isCalculationView(source)
} else {
table = source
}
const thisRow: Row = {} const thisRow: Row = {}
// filter the row down to what is actually the row (not joined) // filter the row down to what is actually the row (not joined)
for (let fieldName of Object.keys(table.schema)) { for (let fieldName of Object.keys(table.schema)) {
let value = extractFieldValue({ let value = extractFieldValue({
@ -108,13 +126,20 @@ export function basicProcessing({
thisRow[fieldName] = value thisRow[fieldName] = value
} }
} }
if (sdk.views.isView(source)) {
for (const key of Object.keys(helpers.views.calculationFields(source))) {
thisRow[key] = row[key]
}
}
let columns: string[] = Object.keys(table.schema) let columns: string[] = Object.keys(table.schema)
if (!sqs) { if (!sqs && !isCalculationView) {
thisRow._id = generateIdForRow(row, table, isLinked) thisRow._id = generateIdForRow(row, table, isLinked)
thisRow.tableId = table._id thisRow.tableId = table._id
thisRow._rev = "rev" thisRow._rev = "rev"
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS) columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
} else { } else if (!isCalculationView) {
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS) columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) { for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) {
thisRow[internalColumn] = extractFieldValue({ thisRow[internalColumn] = extractFieldValue({
@ -149,28 +174,30 @@ export function basicProcessing({
thisRow[col] = array thisRow[col] = array
// make sure all of them have an _id // make sure all of them have an _id
const sortField = relatedTable.primaryDisplay || relatedTable.primary![0]! const sortField = relatedTable.primaryDisplay || relatedTable.primary![0]!
thisRow[col] = (thisRow[col] as Row[]) thisRow[col] = (
.map(relatedRow => await Promise.all(
basicProcessing({ (thisRow[col] as Row[]).map(relatedRow =>
row: relatedRow, basicProcessing({
table: relatedTable, row: relatedRow,
tables, source: relatedTable,
isLinked: false, tables,
sqs, isLinked: false,
}) sqs,
})
)
) )
.sort((a, b) => { ).sort((a, b) => {
const aField = a?.[sortField], const aField = a?.[sortField],
bField = b?.[sortField] bField = b?.[sortField]
if (!aField) { if (!aField) {
return 1 return 1
} else if (!bField) { } else if (!bField) {
return -1 return -1
} }
return aField.localeCompare return aField.localeCompare
? aField.localeCompare(bField) ? aField.localeCompare(bField)
: aField - bField : aField - bField
}) })
} }
} }
return fixJsonTypes(thisRow, table) return fixJsonTypes(thisRow, table)

View File

@ -7,10 +7,14 @@ import {
ManyToManyRelationshipFieldMetadata, ManyToManyRelationshipFieldMetadata,
RelationshipFieldMetadata, RelationshipFieldMetadata,
RelationshipsJson, RelationshipsJson,
Row,
Table, Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { breakExternalTableId } from "../../../../integrations/utils" import { breakExternalTableId } from "../../../../integrations/utils"
import { generateJunctionTableID } from "../../../../db/utils" import { generateJunctionTableID } from "../../../../db/utils"
import sdk from "../../../../sdk"
import { helpers } from "@budibase/shared-core"
type TableMap = Record<string, Table> type TableMap = Record<string, Table>
@ -108,37 +112,51 @@ export function buildInternalRelationships(
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us * Creating the specific list of fields that we desire, and excluding the ones that are no use to us
* is more performant and has the added benefit of protecting against this scenario. * is more performant and has the added benefit of protecting against this scenario.
*/ */
export function buildSqlFieldList( export async function buildSqlFieldList(
table: Table, source: Table | ViewV2,
tables: TableMap, tables: TableMap,
opts?: { relationships: boolean } opts?: { relationships: boolean }
) { ) {
const { relationships } = opts || {}
function extractRealFields(table: Table, existing: string[] = []) { function extractRealFields(table: Table, existing: string[] = []) {
return Object.entries(table.schema) return Object.entries(table.schema)
.filter( .filter(
([columnName, column]) => ([columnName, column]) =>
column.type !== FieldType.LINK && column.type !== FieldType.LINK &&
column.type !== FieldType.FORMULA && column.type !== FieldType.FORMULA &&
!existing.find((field: string) => field === columnName) !existing.find(
(field: string) => field === `${table.name}.${columnName}`
)
) )
.map(column => `${table.name}.${column[0]}`) .map(([columnName]) => `${table.name}.${columnName}`)
} }
let fields = extractRealFields(table)
let fields: string[] = []
if (sdk.views.isView(source)) {
fields = Object.keys(helpers.views.basicFields(source)).filter(
key => source.schema?.[key]?.visible !== false
)
} else {
fields = extractRealFields(source)
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
for (let field of Object.values(table.schema)) { for (let field of Object.values(table.schema)) {
if ( if (field.type !== FieldType.LINK || !relationships || !field.tableId) {
field.type !== FieldType.LINK ||
!opts?.relationships ||
!field.tableId
) {
continue continue
} }
const { tableName: linkTableName } = breakExternalTableId(field.tableId) const { tableName } = breakExternalTableId(field.tableId)
const linkTable = tables[linkTableName] if (tables[tableName]) {
if (linkTable) { fields = fields.concat(extractRealFields(tables[tableName], fields))
const linkedFields = extractRealFields(linkTable, fields)
fields = fields.concat(linkedFields)
} }
} }
return fields return fields
} }
@ -149,3 +167,7 @@ export function isKnexEmptyReadResponse(resp: DatasourcePlusQueryResponse) {
(DSPlusOperation.READ in resp[0] && resp[0].read === true) (DSPlusOperation.READ in resp[0] && resp[0].read === true)
) )
} }
export function isKnexRows(resp: DatasourcePlusQueryResponse): resp is Row[] {
return !isKnexEmptyReadResponse(resp)
}

View File

@ -1,6 +1,6 @@
import * as utils from "../../../../db/utils" import * as utils from "../../../../db/utils"
import { context } from "@budibase/backend-core" import { docIds } from "@budibase/backend-core"
import { import {
Ctx, Ctx,
DatasourcePlusQueryResponse, DatasourcePlusQueryResponse,
@ -8,17 +8,18 @@ import {
RelationshipsJson, RelationshipsJson,
Row, Row,
Table, Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { import {
processDates, processDates,
processFormulas, processFormulas,
} from "../../../../utilities/rowProcessor" } from "../../../../utilities/rowProcessor"
import { isKnexEmptyReadResponse } from "./sqlUtils" import { isKnexRows } from "./sqlUtils"
import { basicProcessing, generateIdForRow, getInternalRowId } from "./basic" import { basicProcessing, generateIdForRow, getInternalRowId } from "./basic"
import sdk from "../../../../sdk" import sdk from "../../../../sdk"
import { processStringSync } from "@budibase/string-templates" import { processStringSync } from "@budibase/string-templates"
import validateJs from "validate.js" import validateJs from "validate.js"
import { getFullUser } from "../../../../utilities/users" import { helpers } from "@budibase/shared-core"
validateJs.extend(validateJs.validators.datetime, { validateJs.extend(validateJs.validators.datetime, {
parse: function (value: string) { parse: function (value: string) {
@ -58,26 +59,11 @@ export async function processRelationshipFields(
return row return row
} }
export async function findRow(tableId: string, rowId: string) {
const db = context.getAppDB()
let row: Row
// TODO remove special user case in future
if (tableId === utils.InternalTables.USER_METADATA) {
row = await getFullUser(rowId)
} else {
row = await db.get(rowId)
}
if (row.tableId !== tableId) {
throw "Supplied tableId does not match the rows tableId"
}
return row
}
export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } { export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } {
// top priority, use the URL first // top priority, use the URL first
if (ctx.params?.sourceId) { if (ctx.params?.sourceId) {
const { sourceId } = ctx.params const { sourceId } = ctx.params
if (utils.isViewID(sourceId)) { if (docIds.isViewId(sourceId)) {
return { return {
tableId: utils.extractViewInfoFromID(sourceId).tableId, tableId: utils.extractViewInfoFromID(sourceId).tableId,
viewId: sourceId, viewId: sourceId,
@ -96,22 +82,22 @@ export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } {
throw new Error("Unable to find table ID in request") throw new Error("Unable to find table ID in request")
} }
export async function validate( export async function getSource(ctx: Ctx): Promise<Table | ViewV2> {
opts: { row: Row } & ({ tableId: string } | { table: Table }) const { tableId, viewId } = getSourceId(ctx)
) { if (viewId) {
let fetchedTable: Table return sdk.views.get(viewId)
if ("tableId" in opts) {
fetchedTable = await sdk.tables.getTable(opts.tableId)
} else {
fetchedTable = opts.table
} }
return sdk.rows.utils.validate({ return sdk.tables.getTable(tableId)
...opts,
table: fetchedTable,
})
} }
function fixBooleanFields({ row, table }: { row: Row; table: Table }) { export async function getTableFromSource(source: Table | ViewV2) {
if (sdk.views.isView(source)) {
return await sdk.views.getTable(source.id)
}
return source
}
function fixBooleanFields(row: Row, table: Table) {
for (let col of Object.values(table.schema)) { for (let col of Object.values(table.schema)) {
if (col.type === FieldType.BOOLEAN) { if (col.type === FieldType.BOOLEAN) {
if (row[col.name] === 1) { if (row[col.name] === 1) {
@ -126,49 +112,45 @@ function fixBooleanFields({ row, table }: { row: Row; table: Table }) {
export async function sqlOutputProcessing( export async function sqlOutputProcessing(
rows: DatasourcePlusQueryResponse, rows: DatasourcePlusQueryResponse,
table: Table, source: Table | ViewV2,
tables: Record<string, Table>, tables: Record<string, Table>,
relationships: RelationshipsJson[], relationships: RelationshipsJson[],
opts?: { sqs?: boolean } opts?: { sqs?: boolean }
): Promise<Row[]> { ): Promise<Row[]> {
if (isKnexEmptyReadResponse(rows)) { if (!isKnexRows(rows)) {
return [] return []
} }
let finalRows: { [key: string]: Row } = {}
for (let row of rows as Row[]) { let table: Table
let rowId = row._id let isCalculationView = false
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
isCalculationView = helpers.views.isCalculationView(source)
} else {
table = source
}
let processedRows: Row[] = []
for (let row of rows) {
if (opts?.sqs) { if (opts?.sqs) {
rowId = getInternalRowId(row, table) row._id = getInternalRowId(row, table)
row._id = rowId } else if (row._id == null && !isCalculationView) {
} else if (!rowId) { row._id = generateIdForRow(row, table)
rowId = generateIdForRow(row, table)
row._id = rowId
} }
const thisRow = basicProcessing({
row = await basicProcessing({
row, row,
table, source,
tables: Object.values(tables), tables: Object.values(tables),
isLinked: false, isLinked: false,
sqs: opts?.sqs, sqs: opts?.sqs,
}) })
if (thisRow._id == null) { row = fixBooleanFields(row, table)
throw new Error("Unable to generate row ID for SQL rows") row = await processRelationshipFields(table, tables, row, relationships)
} processedRows.push(row)
finalRows[thisRow._id] = fixBooleanFields({ row: thisRow, table })
} }
// make sure all related rows are correct return processDates(table, processedRows)
let finalRowArray = []
for (let row of Object.values(finalRows)) {
finalRowArray.push(
await processRelationshipFields(table, tables, row, relationships)
)
}
// process some additional types
finalRowArray = processDates(table, finalRowArray)
return finalRowArray
} }
export function isUserMetadataTable(tableId: string) { export function isUserMetadataTable(tableId: string) {

View File

@ -5,14 +5,9 @@ import {
SearchViewRowRequest, SearchViewRowRequest,
RequiredKeys, RequiredKeys,
RowSearchParams, RowSearchParams,
SearchFilterKey,
LogicalOperator,
} from "@budibase/types" } from "@budibase/types"
import { dataFilters } from "@budibase/shared-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { db, context, features } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { enrichSearchContext } from "./utils"
import { isExternalTableID } from "../../../integrations/utils"
export async function searchView( export async function searchView(
ctx: UserCtx<SearchViewRowRequest, SearchRowResponse> ctx: UserCtx<SearchViewRowRequest, SearchRowResponse>
@ -32,55 +27,15 @@ export async function searchView(
.map(([key]) => key) .map(([key]) => key)
const { body } = ctx.request const { body } = ctx.request
// Enrich saved query with ephemeral query params.
// We prevent searching on any fields that are saved as part of the query, as
// that could let users find rows they should not be allowed to access.
let query = dataFilters.buildQuery(view.query || [])
if (body.query) {
// Delete extraneous search params that cannot be overridden
delete body.query.onEmptyFilter
if (
!isExternalTableID(view.tableId) &&
!(await features.flags.isEnabled("SQS"))
) {
// Extract existing fields
const existingFields =
view.query
?.filter(filter => filter.field)
.map(filter => db.removeKeyNumbering(filter.field)) || []
// Carry over filters for unused fields
Object.keys(body.query).forEach(key => {
const operator = key as Exclude<SearchFilterKey, LogicalOperator>
Object.keys(body.query[operator] || {}).forEach(field => {
if (!existingFields.includes(db.removeKeyNumbering(field))) {
query[operator]![field] = body.query[operator]![field]
}
})
})
} else {
query = {
$and: {
conditions: [query, body.query],
},
}
}
}
await context.ensureSnippetContext(true) await context.ensureSnippetContext(true)
const enrichedQuery = await enrichSearchContext(query, {
user: sdk.users.getUserContextBindings(ctx.user),
})
const searchOptions: RequiredKeys<SearchViewRowRequest> & const searchOptions: RequiredKeys<SearchViewRowRequest> &
RequiredKeys< RequiredKeys<
Pick<RowSearchParams, "tableId" | "viewId" | "query" | "fields"> Pick<RowSearchParams, "tableId" | "viewId" | "query" | "fields">
> = { > = {
tableId: view.tableId, tableId: view.tableId,
viewId: view.id, viewId: view.id,
query: enrichedQuery, query: body.query,
fields: viewFields, fields: viewFields,
...getSortOptions(body, view), ...getSortOptions(body, view),
limit: body.limit, limit: body.limit,
@ -89,11 +44,12 @@ export async function searchView(
countRows: body.countRows, countRows: body.countRows,
} }
const result = await sdk.rows.search(searchOptions) const result = await sdk.rows.search(searchOptions, {
user: sdk.users.getUserContextBindings(ctx.user),
})
result.rows.forEach(r => (r._viewId = view.id)) result.rows.forEach(r => (r._viewId = view.id))
ctx.body = result ctx.body = result
} }
function getSortOptions(request: SearchViewRowRequest, view: ViewV2) { function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
if (request.sort) { if (request.sort) {
return { return {

View File

@ -113,11 +113,10 @@ export async function bulkImport(
const processed = await inputProcessing(ctx.user?._id, table, row, { const processed = await inputProcessing(ctx.user?._id, table, row, {
noAutoRelationships: true, noAutoRelationships: true,
}) })
parsedRows.push(processed.row) parsedRows.push(processed)
table = processed.table
} }
await handleRequest(Operation.BULK_UPSERT, table._id!, { await handleRequest(Operation.BULK_UPSERT, table, {
rows: parsedRows, rows: parsedRows,
}) })
await events.rows.imported(table, parsedRows.length) await events.rows.imported(table, parsedRows.length)

View File

@ -33,7 +33,7 @@ import {
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv" import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash" import { cloneDeep } from "lodash"
import { import {
helpers, helpers,
PROTECTED_EXTERNAL_COLUMNS, PROTECTED_EXTERNAL_COLUMNS,
@ -149,12 +149,7 @@ export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse> ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) { ) {
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
let tableBefore = await sdk.tables.getTable(tableId) await pickApi({ tableId }).bulkImport(ctx)
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
if (!isEqual(tableBefore, tableAfter)) {
await sdk.tables.saveTable(tableAfter)
}
// right now we don't trigger anything for bulk import because it // right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to // can only be done in the builder, but in the future we may need to

View File

@ -3,7 +3,6 @@ import { handleDataImport } from "./utils"
import { import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
FieldType,
RenameColumn, RenameColumn,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
@ -70,22 +69,10 @@ export async function bulkImport(
) { ) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body const { rows, identifierFields } = ctx.request.body
await handleDataImport( await handleDataImport(table, {
{ importRows: rows,
...table, identifierFields,
schema: { user: ctx.user,
_id: { })
name: "_id",
type: FieldType.STRING,
},
...table.schema,
},
},
{
importRows: rows,
identifierFields,
user: ctx.user,
}
)
return table return table
} }

View File

@ -139,8 +139,7 @@ export async function importToRows(
const processed = await inputProcessing(user?._id, table, row, { const processed = await inputProcessing(user?._id, table, row, {
noAutoRelationships: true, noAutoRelationships: true,
}) })
row = processed.row row = processed
table = processed.table
// However here we must reference the original table, as we want to mutate // However here we must reference the original table, as we want to mutate
// the real schema of the table passed in, not the clone used for // the real schema of the table passed in, not the clone used for

View File

@ -7,10 +7,49 @@ import {
ViewResponse, ViewResponse,
ViewResponseEnriched, ViewResponseEnriched,
ViewV2, ViewV2,
ViewFieldMetadata, BasicViewFieldMetadata,
ViewCalculationFieldMetadata,
RelationSchemaField, RelationSchemaField,
ViewFieldMetadata,
} from "@budibase/types" } from "@budibase/types"
import { builderSocket, gridSocket } from "../../../websockets" import { builderSocket, gridSocket } from "../../../websockets"
import { helpers } from "@budibase/shared-core"
function stripUnknownFields(
field: BasicViewFieldMetadata
): RequiredKeys<BasicViewFieldMetadata> {
if (helpers.views.isCalculationField(field)) {
const strippedField: RequiredKeys<ViewCalculationFieldMetadata> = {
order: field.order,
width: field.width,
visible: field.visible,
readonly: field.readonly,
icon: field.icon,
calculationType: field.calculationType,
field: field.field,
columns: field.columns,
}
return strippedField
} else {
const strippedField: RequiredKeys<BasicViewFieldMetadata> = {
order: field.order,
width: field.width,
visible: field.visible,
readonly: field.readonly,
icon: field.icon,
columns: field.columns,
}
return strippedField
}
}
function stripUndefinedFields(obj: Record<string, any>): void {
Object.keys(obj)
.filter(key => obj[key] === undefined)
.forEach(key => {
delete obj[key]
})
}
async function parseSchema(view: CreateViewRequest) { async function parseSchema(view: CreateViewRequest) {
if (!view.schema) { if (!view.schema) {
@ -22,6 +61,7 @@ async function parseSchema(view: CreateViewRequest) {
let fieldRelatedSchema: let fieldRelatedSchema:
| Record<string, RequiredKeys<RelationSchemaField>> | Record<string, RequiredKeys<RelationSchemaField>>
| undefined | undefined
if (schemaValue.columns) { if (schemaValue.columns) {
fieldRelatedSchema = Object.entries(schemaValue.columns).reduce< fieldRelatedSchema = Object.entries(schemaValue.columns).reduce<
NonNullable<typeof fieldRelatedSchema> NonNullable<typeof fieldRelatedSchema>
@ -35,25 +75,12 @@ async function parseSchema(view: CreateViewRequest) {
} }
return acc return acc
}, {}) }, {})
schemaValue.columns = fieldRelatedSchema
} }
const fieldSchema: RequiredKeys< const fieldSchema = stripUnknownFields(schemaValue)
ViewFieldMetadata & { stripUndefinedFields(fieldSchema)
columns: typeof fieldRelatedSchema
}
> = {
order: schemaValue.order,
width: schemaValue.width,
visible: schemaValue.visible,
readonly: schemaValue.readonly,
icon: schemaValue.icon,
columns: fieldRelatedSchema,
}
Object.entries(fieldSchema)
.filter(([, val]) => val === undefined)
.forEach(([key]) => {
delete fieldSchema[key as keyof ViewFieldMetadata]
})
p[fieldName] = fieldSchema p[fieldName] = fieldSchema
return p return p
}, {} as Record<string, RequiredKeys<ViewFieldMetadata>>) }, {} as Record<string, RequiredKeys<ViewFieldMetadata>>)
@ -76,6 +103,7 @@ export async function create(ctx: Ctx<CreateViewRequest, ViewResponse>) {
name: view.name, name: view.name,
tableId: view.tableId, tableId: view.tableId,
query: view.query, query: view.query,
queryUI: view.queryUI,
sort: view.sort, sort: view.sort,
schema, schema,
primaryDisplay: view.primaryDisplay, primaryDisplay: view.primaryDisplay,
@ -111,6 +139,7 @@ export async function update(ctx: Ctx<UpdateViewRequest, ViewResponse>) {
version: view.version, version: view.version,
tableId: view.tableId, tableId: view.tableId,
query: view.query, query: view.query,
queryUI: view.queryUI,
sort: view.sort, sort: view.sort,
schema, schema,
primaryDisplay: view.primaryDisplay, primaryDisplay: view.primaryDisplay,

View File

@ -33,6 +33,7 @@ import rowActionRoutes from "./rowAction"
export { default as staticRoutes } from "./static" export { default as staticRoutes } from "./static"
export { default as publicRoutes } from "./public" export { default as publicRoutes } from "./public"
const aiRoutes = pro.ai
const appBackupRoutes = pro.appBackups const appBackupRoutes = pro.appBackups
const environmentVariableRoutes = pro.environmentVariables const environmentVariableRoutes = pro.environmentVariables
@ -67,6 +68,7 @@ export const mainRoutes: Router[] = [
debugRoutes, debugRoutes,
environmentVariableRoutes, environmentVariableRoutes,
rowActionRoutes, rowActionRoutes,
aiRoutes,
// these need to be handled last as they still use /api/:tableId // these need to be handled last as they still use /api/:tableId
// this could be breaking as koa may recognise other routes as this // this could be breaking as koa may recognise other routes as this
tableRoutes, tableRoutes,

View File

@ -125,6 +125,13 @@ describe("/permission", () => {
}) })
it("should be able to access the view data when the table is set to public and with no view permissions overrides", async () => { it("should be able to access the view data when the table is set to public and with no view permissions overrides", async () => {
// Make view inherit table permissions. Needed for backwards compatibility with existing views.
await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: view.id,
level: PermissionLevel.READ,
})
// replicate changes before checking permissions // replicate changes before checking permissions
await config.publish() await config.publish()
@ -138,6 +145,14 @@ describe("/permission", () => {
resourceId: table._id, resourceId: table._id,
level: PermissionLevel.READ, level: PermissionLevel.READ,
}) })
// Make view inherit table permissions. Needed for backwards compatibility with existing views.
await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: view.id,
level: PermissionLevel.READ,
})
// replicate changes before checking permissions // replicate changes before checking permissions
await config.publish() await config.publish()

View File

@ -76,7 +76,7 @@ async function waitForEvent(
} }
describe.each([ describe.each([
["internal", undefined], ["lucene", undefined],
["sqs", undefined], ["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
@ -695,6 +695,69 @@ describe.each([
}) })
}) })
describe("options column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
status: {
name: "status",
type: FieldType.OPTIONS,
default: "requested",
constraints: {
inclusion: ["requested", "approved"],
},
},
},
})
)
})
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.status).toEqual("requested")
})
it("does not use default value if value specified", async () => {
const row = await config.api.row.save(table._id!, {
status: "approved",
})
expect(row.status).toEqual("approved")
})
})
describe("array column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
food: {
name: "food",
type: FieldType.ARRAY,
default: ["apple", "orange"],
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: ["apple", "orange", "banana"],
},
},
},
})
)
})
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.food).toEqual(["apple", "orange"])
})
it("does not use default value if value specified", async () => {
const row = await config.api.row.save(table._id!, {
food: ["orange"],
})
expect(row.food).toEqual(["orange"])
})
})
describe("bindings", () => { describe("bindings", () => {
describe("string column", () => { describe("string column", () => {
beforeAll(async () => { beforeAll(async () => {
@ -2453,9 +2516,15 @@ describe.each([
let flagCleanup: (() => void) | undefined let flagCleanup: (() => void) | undefined
beforeAll(async () => { beforeAll(async () => {
flagCleanup = setCoreEnv({ const env = {
TENANT_FEATURE_FLAGS: `*:${FeatureFlag.ENRICHED_RELATIONSHIPS}`, TENANT_FEATURE_FLAGS: `*:${FeatureFlag.ENRICHED_RELATIONSHIPS}`,
}) }
if (isSqs) {
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:SQS`
} else {
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:!SQS`
}
flagCleanup = setCoreEnv(env)
const aux2Table = await config.api.table.save(saveTableRequest()) const aux2Table = await config.api.table.save(saveTableRequest())
const aux2Data = await config.api.row.save(aux2Table._id!, {}) const aux2Data = await config.api.row.save(aux2Table._id!, {})

View File

@ -826,11 +826,20 @@ describe("/rowsActions", () => {
) )
).id ).id
// Allow row action on view
await config.api.rowAction.setViewPermission( await config.api.rowAction.setViewPermission(
tableId, tableId,
viewId, viewId,
rowAction.id rowAction.id
) )
// Delete explicit view permissions so they inherit table permissions
await config.api.permission.revoke({
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, // Don't think this matters since we are revoking the permission
level: PermissionLevel.READ,
resourceId: viewId,
})
return { permissionResource: tableId, triggerResouce: viewId } return { permissionResource: tableId, triggerResouce: viewId }
}, },
], ],

File diff suppressed because it is too large Load Diff

View File

@ -18,12 +18,14 @@ import {
ViewV2, ViewV2,
SearchResponse, SearchResponse,
BasicOperator, BasicOperator,
CalculationType,
RelationshipType, RelationshipType,
TableSchema, TableSchema,
ViewFieldMetadata,
RenameColumn, RenameColumn,
FeatureFlag, FeatureFlag,
BBReferenceFieldSubType, BBReferenceFieldSubType,
ViewV2Schema,
ViewCalculationFieldMetadata,
} from "@budibase/types" } from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests" import { generator, mocks } from "@budibase/backend-core/tests"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
@ -36,7 +38,6 @@ import {
setEnv as setCoreEnv, setEnv as setCoreEnv,
env, env,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import sdk from "../../../sdk"
describe.each([ describe.each([
["lucene", undefined], ["lucene", undefined],
@ -154,7 +155,7 @@ describe.each([
}) })
it("can persist views with all fields", async () => { it("can persist views with all fields", async () => {
const newView: Required<CreateViewRequest> = { const newView: Required<Omit<CreateViewRequest, "queryUI">> = {
name: generator.name(), name: generator.name(),
tableId: table._id!, tableId: table._id!,
primaryDisplay: "id", primaryDisplay: "id",
@ -540,6 +541,33 @@ describe.each([
status: 201, status: 201,
}) })
}) })
it("can create a view with calculation fields", async () => {
let view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
sum: {
visible: true,
calculationType: CalculationType.SUM,
field: "Price",
},
},
})
expect(Object.keys(view.schema!)).toHaveLength(1)
let sum = view.schema!.sum as ViewCalculationFieldMetadata
expect(sum).toBeDefined()
expect(sum.calculationType).toEqual(CalculationType.SUM)
expect(sum.field).toEqual("Price")
view = await config.api.viewV2.get(view.id)
sum = view.schema!.sum as ViewCalculationFieldMetadata
expect(sum).toBeDefined()
expect(sum.calculationType).toEqual(CalculationType.SUM)
expect(sum.field).toEqual("Price")
})
}) })
describe("update", () => { describe("update", () => {
@ -584,7 +612,7 @@ describe.each([
it("can update all fields", async () => { it("can update all fields", async () => {
const tableId = table._id! const tableId = table._id!
const updatedData: Required<UpdateViewRequest> = { const updatedData: Required<Omit<UpdateViewRequest, "queryUI">> = {
version: view.version, version: view.version,
id: view.id, id: view.id,
tableId, tableId,
@ -1152,10 +1180,7 @@ describe.each([
return table return table
} }
const createView = async ( const createView = async (tableId: string, schema: ViewV2Schema) =>
tableId: string,
schema: Record<string, ViewFieldMetadata>
) =>
await config.api.viewV2.create({ await config.api.viewV2.create({
name: generator.guid(), name: generator.guid(),
tableId, tableId,
@ -1738,6 +1763,40 @@ describe.each([
}) })
}) })
it("views filters are respected even if the column is hidden", async () => {
await config.api.row.save(table._id!, {
one: "foo",
two: "bar",
})
const two = await config.api.row.save(table._id!, {
one: "foo2",
two: "bar2",
})
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
query: [
{
operator: BasicOperator.EQUAL,
field: "two",
value: "bar2",
},
],
schema: {
id: { visible: true },
one: { visible: false },
two: { visible: false },
},
})
const response = await config.api.viewV2.search(view.id)
expect(response.rows).toHaveLength(1)
expect(response.rows).toEqual([
expect.objectContaining({ _id: two._id }),
])
})
it("views without data can be returned", async () => { it("views without data can be returned", async () => {
const response = await config.api.viewV2.search(view.id) const response = await config.api.viewV2.search(view.id)
expect(response.rows).toHaveLength(0) expect(response.rows).toHaveLength(0)
@ -2196,28 +2255,6 @@ describe.each([
expect(response.rows).toHaveLength(0) expect(response.rows).toHaveLength(0)
}) })
it("queries the row api passing the view fields only", async () => {
const searchSpy = jest.spyOn(sdk.rows, "search")
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
id: { visible: true },
one: { visible: false },
},
})
await config.api.viewV2.search(view.id, { query: {} })
expect(searchSpy).toHaveBeenCalledTimes(1)
expect(searchSpy).toHaveBeenCalledWith(
expect.objectContaining({
fields: ["id"],
})
)
})
describe("foreign relationship columns", () => { describe("foreign relationship columns", () => {
let envCleanup: () => void let envCleanup: () => void
beforeAll(() => { beforeAll(() => {
@ -2382,6 +2419,203 @@ describe.each([
]) ])
}) })
}) })
!isLucene &&
describe("calculations", () => {
let table: Table
let rows: Row[]
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
quantity: {
type: FieldType.NUMBER,
name: "quantity",
},
price: {
type: FieldType.NUMBER,
name: "price",
},
},
})
)
rows = await Promise.all(
Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, {
quantity: generator.natural({ min: 1, max: 10 }),
price: generator.natural({ min: 1, max: 10 }),
})
)
)
})
it("should be able to search by calculations", async () => {
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
"Quantity Sum": {
visible: true,
calculationType: CalculationType.SUM,
field: "quantity",
},
},
})
const response = await config.api.viewV2.search(view.id, {
query: {},
})
expect(response.rows).toHaveLength(1)
expect(response.rows).toEqual(
expect.arrayContaining([
expect.objectContaining({
"Quantity Sum": rows.reduce((acc, r) => acc + r.quantity, 0),
}),
])
)
// Calculation views do not return rows that can be linked back to
// the source table, and so should not have an _id field.
for (const row of response.rows) {
expect("_id" in row).toBe(false)
}
})
it("should be able to group by a basic field", async () => {
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
quantity: {
visible: true,
field: "quantity",
},
"Total Price": {
visible: true,
calculationType: CalculationType.SUM,
field: "price",
},
},
})
const response = await config.api.viewV2.search(view.id, {
query: {},
})
const priceByQuantity: Record<number, number> = {}
for (const row of rows) {
priceByQuantity[row.quantity] ??= 0
priceByQuantity[row.quantity] += row.price
}
for (const row of response.rows) {
expect(row["Total Price"]).toEqual(priceByQuantity[row.quantity])
}
})
it.each([
CalculationType.COUNT,
CalculationType.SUM,
CalculationType.AVG,
CalculationType.MIN,
CalculationType.MAX,
])("should be able to calculate $type", async type => {
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
aggregate: {
visible: true,
calculationType: type,
field: "price",
},
},
})
const response = await config.api.viewV2.search(view.id, {
query: {},
})
function calculate(
type: CalculationType,
numbers: number[]
): number {
switch (type) {
case CalculationType.COUNT:
return numbers.length
case CalculationType.SUM:
return numbers.reduce((a, b) => a + b, 0)
case CalculationType.AVG:
return numbers.reduce((a, b) => a + b, 0) / numbers.length
case CalculationType.MIN:
return Math.min(...numbers)
case CalculationType.MAX:
return Math.max(...numbers)
}
}
const prices = rows.map(row => row.price)
const expected = calculate(type, prices)
const actual = response.rows[0].aggregate
if (type === CalculationType.AVG) {
// The average calculation can introduce floating point rounding
// errors, so we need to compare to within a small margin of
// error.
expect(actual).toBeCloseTo(expected)
} else {
expect(actual).toEqual(expected)
}
})
})
!isLucene &&
it("should not need required fields to be present", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
name: "name",
type: FieldType.STRING,
constraints: {
presence: true,
},
},
age: {
name: "age",
type: FieldType.NUMBER,
},
},
})
)
await Promise.all([
config.api.row.save(table._id!, { name: "Steve", age: 30 }),
config.api.row.save(table._id!, { name: "Jane", age: 31 }),
])
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
sum: {
visible: true,
calculationType: CalculationType.SUM,
field: "age",
},
},
})
const response = await config.api.viewV2.search(view.id, {
query: {},
})
expect(response.rows).toHaveLength(1)
expect(response.rows[0].sum).toEqual(61)
})
}) })
describe("permissions", () => { describe("permissions", () => {
@ -2417,6 +2651,11 @@ describe.each([
level: PermissionLevel.READ, level: PermissionLevel.READ,
resourceId: table._id!, resourceId: table._id!,
}) })
await config.api.permission.revoke({
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, // Don't think this matters since we are revoking the permission
level: PermissionLevel.READ,
resourceId: view.id,
})
await config.publish() await config.publish()
const response = await config.api.viewV2.publicSearch(view.id) const response = await config.api.viewV2.publicSearch(view.id)

View File

@ -17,44 +17,65 @@ describe("Branching automations", () => {
afterAll(setup.afterAll) afterAll(setup.afterAll)
it("should run a multiple nested branching automation", async () => { it("should run a multiple nested branching automation", async () => {
const firstLogId = "11111111-1111-1111-1111-111111111111"
const branch1LogId = "22222222-2222-2222-2222-222222222222"
const branch2LogId = "33333333-3333-3333-3333-333333333333"
const branch2Id = "44444444-4444-4444-4444-444444444444"
const builder = createAutomationBuilder({ const builder = createAutomationBuilder({
name: "Test Trigger with Loop and Create Row", name: "Test Trigger with Loop and Create Row",
}) })
const results = await builder const results = await builder
.appAction({ fields: {} }) .appAction({ fields: {} })
.serverLog({ text: "Starting automation" }) .serverLog(
{ text: "Starting automation" },
{ stepName: "FirstLog", stepId: firstLogId }
)
.branch({ .branch({
topLevelBranch1: { topLevelBranch1: {
steps: stepBuilder => steps: stepBuilder =>
stepBuilder.serverLog({ text: "Branch 1" }).branch({ stepBuilder
branch1: { .serverLog(
steps: stepBuilder => { text: "Branch 1" },
stepBuilder.serverLog({ text: "Branch 1.1" }), { stepId: "66666666-6666-6666-6666-666666666666" }
condition: { )
equal: { "{{steps.1.success}}": true }, .branch({
branch1: {
steps: stepBuilder =>
stepBuilder.serverLog(
{ text: "Branch 1.1" },
{ stepId: branch1LogId }
),
condition: {
equal: { [`{{ steps.${firstLogId}.success }}`]: true },
},
}, },
}, branch2: {
branch2: { steps: stepBuilder =>
steps: stepBuilder => stepBuilder.serverLog(
stepBuilder.serverLog({ text: "Branch 1.2" }), { text: "Branch 1.2" },
condition: { { stepId: branch2LogId }
equal: { "{{steps.1.success}}": false }, ),
condition: {
equal: { [`{{ steps.${firstLogId}.success }}`]: false },
},
}, },
}, }),
}),
condition: { condition: {
equal: { "{{steps.1.success}}": true }, equal: { [`{{ steps.${firstLogId}.success }}`]: true },
}, },
}, },
topLevelBranch2: { topLevelBranch2: {
steps: stepBuilder => stepBuilder.serverLog({ text: "Branch 2" }), steps: stepBuilder =>
stepBuilder.serverLog({ text: "Branch 2" }, { stepId: branch2Id }),
condition: { condition: {
equal: { "{{steps.1.success}}": false }, equal: { [`{{ steps.${firstLogId}.success }}`]: false },
}, },
}, },
}) })
.run() .run()
expect(results.steps[3].outputs.status).toContain("branch1 branch taken") expect(results.steps[3].outputs.status).toContain("branch1 branch taken")
expect(results.steps[4].outputs.message).toContain("Branch 1.1") expect(results.steps[4].outputs.message).toContain("Branch 1.1")
}) })

View File

@ -64,18 +64,18 @@ class BaseStepBuilder {
stepId: TStep, stepId: TStep,
stepSchema: Omit<AutomationStep, "id" | "stepId" | "inputs">, stepSchema: Omit<AutomationStep, "id" | "stepId" | "inputs">,
inputs: AutomationStepInputs<TStep>, inputs: AutomationStepInputs<TStep>,
stepName?: string opts?: { stepName?: string; stepId?: string }
): this { ): this {
const id = uuidv4() const id = opts?.stepId || uuidv4()
this.steps.push({ this.steps.push({
...stepSchema, ...stepSchema,
inputs: inputs as any, inputs: inputs as any,
id, id,
stepId, stepId,
name: stepName || stepSchema.name, name: opts?.stepName || stepSchema.name,
}) })
if (stepName) { if (opts?.stepName) {
this.stepNames[id] = stepName this.stepNames[id] = opts.stepName
} }
return this return this
} }
@ -95,7 +95,6 @@ class BaseStepBuilder {
}) })
branchStepInputs.children![key] = stepBuilder.build() branchStepInputs.children![key] = stepBuilder.build()
}) })
const branchStep: AutomationStep = { const branchStep: AutomationStep = {
...definition, ...definition,
id: uuidv4(), id: uuidv4(),
@ -106,80 +105,98 @@ class BaseStepBuilder {
} }
// STEPS // STEPS
createRow(inputs: CreateRowStepInputs, opts?: { stepName?: string }): this { createRow(
inputs: CreateRowStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step( return this.step(
AutomationActionStepId.CREATE_ROW, AutomationActionStepId.CREATE_ROW,
BUILTIN_ACTION_DEFINITIONS.CREATE_ROW, BUILTIN_ACTION_DEFINITIONS.CREATE_ROW,
inputs, inputs,
opts?.stepName opts
) )
} }
updateRow(inputs: UpdateRowStepInputs, opts?: { stepName?: string }): this { updateRow(
inputs: UpdateRowStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step( return this.step(
AutomationActionStepId.UPDATE_ROW, AutomationActionStepId.UPDATE_ROW,
BUILTIN_ACTION_DEFINITIONS.UPDATE_ROW, BUILTIN_ACTION_DEFINITIONS.UPDATE_ROW,
inputs, inputs,
opts?.stepName opts
) )
} }
deleteRow(inputs: DeleteRowStepInputs, opts?: { stepName?: string }): this { deleteRow(
inputs: DeleteRowStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step( return this.step(
AutomationActionStepId.DELETE_ROW, AutomationActionStepId.DELETE_ROW,
BUILTIN_ACTION_DEFINITIONS.DELETE_ROW, BUILTIN_ACTION_DEFINITIONS.DELETE_ROW,
inputs, inputs,
opts?.stepName opts
) )
} }
sendSmtpEmail( sendSmtpEmail(
inputs: SmtpEmailStepInputs, inputs: SmtpEmailStepInputs,
opts?: { stepName?: string } opts?: { stepName?: string; stepId?: string }
): this { ): this {
return this.step( return this.step(
AutomationActionStepId.SEND_EMAIL_SMTP, AutomationActionStepId.SEND_EMAIL_SMTP,
BUILTIN_ACTION_DEFINITIONS.SEND_EMAIL_SMTP, BUILTIN_ACTION_DEFINITIONS.SEND_EMAIL_SMTP,
inputs, inputs,
opts?.stepName opts
) )
} }
executeQuery( executeQuery(
inputs: ExecuteQueryStepInputs, inputs: ExecuteQueryStepInputs,
opts?: { stepName?: string } opts?: { stepName?: string; stepId?: string }
): this { ): this {
return this.step( return this.step(
AutomationActionStepId.EXECUTE_QUERY, AutomationActionStepId.EXECUTE_QUERY,
BUILTIN_ACTION_DEFINITIONS.EXECUTE_QUERY, BUILTIN_ACTION_DEFINITIONS.EXECUTE_QUERY,
inputs, inputs,
opts?.stepName opts
) )
} }
queryRows(inputs: QueryRowsStepInputs, opts?: { stepName?: string }): this { queryRows(
inputs: QueryRowsStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step( return this.step(
AutomationActionStepId.QUERY_ROWS, AutomationActionStepId.QUERY_ROWS,
BUILTIN_ACTION_DEFINITIONS.QUERY_ROWS, BUILTIN_ACTION_DEFINITIONS.QUERY_ROWS,
inputs, inputs,
opts?.stepName opts
) )
} }
loop(inputs: LoopStepInputs, opts?: { stepName?: string }): this { loop(
inputs: LoopStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step( return this.step(
AutomationActionStepId.LOOP, AutomationActionStepId.LOOP,
BUILTIN_ACTION_DEFINITIONS.LOOP, BUILTIN_ACTION_DEFINITIONS.LOOP,
inputs, inputs,
opts?.stepName opts
) )
} }
serverLog(input: ServerLogStepInputs, opts?: { stepName?: string }): this { serverLog(
input: ServerLogStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step( return this.step(
AutomationActionStepId.SERVER_LOG, AutomationActionStepId.SERVER_LOG,
BUILTIN_ACTION_DEFINITIONS.SERVER_LOG, BUILTIN_ACTION_DEFINITIONS.SERVER_LOG,
input, input,
opts?.stepName opts
) )
} }

View File

@ -23,10 +23,11 @@ import {
Row, Row,
Table, Table,
TableSchema, TableSchema,
ViewFieldMetadata,
ViewV2, ViewV2,
ViewV2Schema,
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../sdk" import sdk from "../../sdk"
import { helpers } from "@budibase/shared-core"
export { IncludeDocs, getLinkDocuments, createLinkView } from "./linkUtils" export { IncludeDocs, getLinkDocuments, createLinkView } from "./linkUtils"
@ -247,26 +248,36 @@ function getPrimaryDisplayValue(row: Row, table?: Table) {
export type SquashTableFields = Record<string, { visibleFieldNames: string[] }> export type SquashTableFields = Record<string, { visibleFieldNames: string[] }>
/** /**
* This function will take the given enriched rows and squash the links to only contain the primary display field. * This function will take the given enriched rows and squash the links to only
* @returns The rows after having their links squashed to only contain the ID and primary display. * contain the primary display field.
*
* @returns The rows after having their links squashed to only contain the ID
* and primary display.
*/ */
export async function squashLinks<T = Row[] | Row>( export async function squashLinks<T = Row[] | Row>(
table: Table, source: Table | ViewV2,
enriched: T, enriched: T
options?: {
fromViewId?: string
}
): Promise<T> { ): Promise<T> {
const allowRelationshipSchemas = await features.flags.isEnabled( const allowRelationshipSchemas = await features.flags.isEnabled(
FeatureFlag.ENRICHED_RELATIONSHIPS FeatureFlag.ENRICHED_RELATIONSHIPS
) )
let viewSchema: Record<string, ViewFieldMetadata> = {} let viewSchema: ViewV2Schema = {}
if (options?.fromViewId && allowRelationshipSchemas) { if (sdk.views.isView(source)) {
const view = Object.values(table.views || {}).find( if (helpers.views.isCalculationView(source)) {
(v): v is ViewV2 => sdk.views.isV2(v) && v.id === options?.fromViewId return enriched
) }
viewSchema = view?.schema || {}
if (allowRelationshipSchemas) {
viewSchema = source.schema || {}
}
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
} }
// will populate this as we find them // will populate this as we find them

View File

@ -1,4 +1,4 @@
import { context, db as dbCore, utils } from "@budibase/backend-core" import { context, db as dbCore, docIds, utils } from "@budibase/backend-core"
import { import {
DatabaseQueryOpts, DatabaseQueryOpts,
Datasource, Datasource,
@ -318,12 +318,8 @@ export function generateViewID(tableId: string) {
}${SEPARATOR}${tableId}${SEPARATOR}${newid()}` }${SEPARATOR}${tableId}${SEPARATOR}${newid()}`
} }
export function isViewID(viewId: string) {
return viewId?.split(SEPARATOR)[0] === VirtualDocumentType.VIEW
}
export function extractViewInfoFromID(viewId: string) { export function extractViewInfoFromID(viewId: string) {
if (!isViewID(viewId)) { if (!docIds.isViewId(viewId)) {
throw new Error("Unable to extract table ID, is not a view ID") throw new Error("Unable to extract table ID, is not a view ID")
} }
const split = viewId.split(SEPARATOR) const split = viewId.split(SEPARATOR)

View File

@ -15,7 +15,8 @@ export interface TriggerOutput {
export interface AutomationContext extends AutomationResults { export interface AutomationContext extends AutomationResults {
steps: any[] steps: any[]
stepsByName?: Record<string, any> stepsById: Record<string, any>
stepsByName: Record<string, any>
env?: Record<string, string> env?: Record<string, string>
trigger: any trigger: any
} }

View File

@ -15,7 +15,7 @@ export function triggerRowActionAuthorised(
const rowActionId: string = ctx.params[actionPath] const rowActionId: string = ctx.params[actionPath]
const isTableId = docIds.isTableId(sourceId) const isTableId = docIds.isTableId(sourceId)
const isViewId = utils.isViewID(sourceId) const isViewId = docIds.isViewId(sourceId)
if (!isTableId && !isViewId) { if (!isTableId && !isViewId) {
ctx.throw(400, `'${sourceId}' is not a valid source id`) ctx.throw(400, `'${sourceId}' is not a valid source id`)
} }

View File

@ -1,26 +1,34 @@
import { db, roles } from "@budibase/backend-core" import { db, roles, context, docIds } from "@budibase/backend-core"
import { import {
PermissionLevel, PermissionLevel,
PermissionSource, PermissionSource,
VirtualDocumentType, VirtualDocumentType,
Role,
Database,
} from "@budibase/types" } from "@budibase/types"
import { extractViewInfoFromID, isViewID } from "../../../db/utils" import { extractViewInfoFromID, getRoleParams } from "../../../db/utils"
import { import {
CURRENTLY_SUPPORTED_LEVELS, CURRENTLY_SUPPORTED_LEVELS,
getBasePermissions, getBasePermissions,
} from "../../../utilities/security" } from "../../../utilities/security"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { isV2 } from "../views" import { isV2 } from "../views"
import { removeFromArray } from "../../../utilities"
type ResourcePermissions = Record< type ResourcePermissions = Record<
string, string,
{ role: string; type: PermissionSource } { role: string; type: PermissionSource }
> >
export const enum PermissionUpdateType {
REMOVE = "remove",
ADD = "add",
}
export async function getInheritablePermissions( export async function getInheritablePermissions(
resourceId: string resourceId: string
): Promise<ResourcePermissions | undefined> { ): Promise<ResourcePermissions | undefined> {
if (isViewID(resourceId)) { if (docIds.isViewId(resourceId)) {
return await getResourcePerms(extractViewInfoFromID(resourceId).tableId) return await getResourcePerms(extractViewInfoFromID(resourceId).tableId)
} }
} }
@ -100,3 +108,89 @@ export async function getDependantResources(
return return
} }
export async function updatePermissionOnRole(
{
roleId,
resourceId,
level,
}: { roleId: string; resourceId: string; level: PermissionLevel },
updateType: PermissionUpdateType
) {
const db = context.getAppDB()
const remove = updateType === PermissionUpdateType.REMOVE
const isABuiltin = roles.isBuiltin(roleId)
const dbRoleId = roles.getDBRoleID(roleId)
const dbRoles = await getAllDBRoles(db)
const docUpdates: Role[] = []
// the permission is for a built in, make sure it exists
if (isABuiltin && !dbRoles.some(role => role._id === dbRoleId)) {
const builtin = roles.getBuiltinRoles()[roleId]
builtin._id = roles.getDBRoleID(builtin._id!)
dbRoles.push(builtin)
}
// now try to find any roles which need updated, e.g. removing the
// resource from another role and then adding to the new role
for (let role of dbRoles) {
let updated = false
const rolePermissions: Record<string, PermissionLevel[]> = role.permissions
? role.permissions
: {}
// make sure its an array, also handle migrating
if (
!rolePermissions[resourceId] ||
!Array.isArray(rolePermissions[resourceId])
) {
rolePermissions[resourceId] =
typeof rolePermissions[resourceId] === "string"
? [rolePermissions[resourceId] as unknown as PermissionLevel]
: []
}
// handle the removal/updating the role which has this permission first
// the updating (role._id !== dbRoleId) is required because a resource/level can
// only be permitted in a single role (this reduces hierarchy confusion and simplifies
// the general UI for this, rather than needing to show everywhere it is used)
if (
(role._id !== dbRoleId || remove) &&
rolePermissions[resourceId].indexOf(level) !== -1
) {
removeFromArray(rolePermissions[resourceId], level)
updated = true
}
// handle the adding, we're on the correct role, at it to this
if (!remove && role._id === dbRoleId) {
const set = new Set(rolePermissions[resourceId])
rolePermissions[resourceId] = [...set.add(level)]
updated = true
}
// handle the update, add it to bulk docs to perform at end
if (updated) {
role.permissions = rolePermissions
docUpdates.push(role)
}
}
const response = await db.bulkDocs(docUpdates)
return response.map(resp => {
const version = docUpdates.find(role => role._id === resp.id)?.version
const _id = roles.getExternalRoleID(resp.id, version)
return {
_id,
rev: resp.rev,
error: resp.error,
reason: resp.reason,
}
})
}
// utility function to stop this repetition - permissions always stored under roles
export async function getAllDBRoles(db: Database) {
const body = await db.allDocs<Role>(
getRoleParams(null, {
include_docs: true,
})
)
return body.rows.map(row => row.doc!)
}

View File

@ -1,11 +1,11 @@
import { context, HTTPError, utils } from "@budibase/backend-core" import { context, docIds, HTTPError, utils } from "@budibase/backend-core"
import { import {
AutomationTriggerStepId, AutomationTriggerStepId,
SEPARATOR, SEPARATOR,
TableRowActions, TableRowActions,
VirtualDocumentType, VirtualDocumentType,
} from "@budibase/types" } from "@budibase/types"
import { generateRowActionsID, isViewID } from "../../db/utils" import { generateRowActionsID } from "../../db/utils"
import automations from "./automations" import automations from "./automations"
import { definitions as TRIGGER_DEFINITIONS } from "../../automations/triggerInfo" import { definitions as TRIGGER_DEFINITIONS } from "../../automations/triggerInfo"
import * as triggers from "../../automations/triggers" import * as triggers from "../../automations/triggers"
@ -155,7 +155,7 @@ export async function update(
async function guardView(tableId: string, viewId: string) { async function guardView(tableId: string, viewId: string) {
let view let view
if (isViewID(viewId)) { if (docIds.isViewId(viewId)) {
view = await sdk.views.get(viewId) view = await sdk.views.get(viewId)
} }
if (!view || view.tableId !== tableId) { if (!view || view.tableId !== tableId) {

View File

@ -1,5 +1,11 @@
import { IncludeRelationship, Operation, Row } from "@budibase/types" import {
import { HTTPError } from "@budibase/backend-core" IncludeRelationship,
Operation,
Row,
Table,
ViewV2,
} from "@budibase/types"
import { docIds, HTTPError } from "@budibase/backend-core"
import { handleRequest } from "../../../api/controllers/row/external" import { handleRequest } from "../../../api/controllers/row/external"
import { breakRowIdField } from "../../../integrations/utils" import { breakRowIdField } from "../../../integrations/utils"
import sdk from "../../../sdk" import sdk from "../../../sdk"
@ -8,15 +14,24 @@ import {
outputProcessing, outputProcessing,
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import cloneDeep from "lodash/fp/cloneDeep" import cloneDeep from "lodash/fp/cloneDeep"
import isEqual from "lodash/fp/isEqual"
import { tryExtractingTableAndViewId } from "./utils" import { tryExtractingTableAndViewId } from "./utils"
export async function getRow( export async function getRow(
tableId: string, sourceId: string | Table | ViewV2,
rowId: string, rowId: string,
opts?: { relationships?: boolean } opts?: { relationships?: boolean }
) { ) {
const response = await handleRequest(Operation.READ, tableId, { let source: Table | ViewV2
if (typeof sourceId === "string") {
if (docIds.isViewId(sourceId)) {
source = await sdk.views.get(sourceId)
} else {
source = await sdk.tables.getTable(sourceId)
}
} else {
source = sourceId
}
const response = await handleRequest(Operation.READ, source, {
id: breakRowIdField(rowId), id: breakRowIdField(rowId),
includeSqlRelationships: opts?.relationships includeSqlRelationships: opts?.relationships
? IncludeRelationship.INCLUDE ? IncludeRelationship.INCLUDE
@ -27,45 +42,42 @@ export async function getRow(
} }
export async function save( export async function save(
tableOrViewId: string, sourceId: string,
inputs: Row, inputs: Row,
userId: string | undefined userId: string | undefined
) { ) {
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId) const { tableId, viewId } = tryExtractingTableAndViewId(sourceId)
const table = await sdk.tables.getTable(tableId) let source: Table | ViewV2
const { table: updatedTable, row } = await inputProcessing( if (viewId) {
userId, source = await sdk.views.get(viewId)
cloneDeep(table), } else {
inputs source = await sdk.tables.getTable(tableId)
) }
const row = await inputProcessing(userId, cloneDeep(source), inputs)
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row, row,
tableId, source,
}) })
if (!validateResult.valid) { if (!validateResult.valid) {
throw { validation: validateResult.errors } throw { validation: validateResult.errors }
} }
const response = await handleRequest(Operation.CREATE, tableId, { const response = await handleRequest(Operation.CREATE, source, {
row, row,
}) })
if (!isEqual(table, updatedTable)) {
await sdk.tables.saveTable(updatedTable)
}
const rowId = response.row._id const rowId = response.row._id
if (rowId) { if (rowId) {
const row = await getRow(tableId, rowId, { const row = await getRow(source, rowId, {
relationships: true, relationships: true,
}) })
return { return {
...response, ...response,
row: await outputProcessing(table, row, { row: await outputProcessing(source, row, {
preserveLinks: true, preserveLinks: true,
squash: true, squash: true,
fromViewId: viewId,
}), }),
} }
} else { } else {
@ -76,7 +88,14 @@ export async function save(
export async function find(tableOrViewId: string, rowId: string): Promise<Row> { export async function find(tableOrViewId: string, rowId: string): Promise<Row> {
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId) const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
const row = await getRow(tableId, rowId, { let source: Table | ViewV2
if (viewId) {
source = await sdk.views.get(viewId)
} else {
source = await sdk.tables.getTable(tableId)
}
const row = await getRow(source, rowId, {
relationships: true, relationships: true,
}) })
@ -84,11 +103,10 @@ export async function find(tableOrViewId: string, rowId: string): Promise<Row> {
throw new HTTPError("Row not found", 404) throw new HTTPError("Row not found", 404)
} }
const table = await sdk.tables.getTable(tableId) // Preserving links, as the outputProcessing does not support external rows
// Preserving links, as the outputProcessing does not support external rows yet and we don't need it in this use case // yet and we don't need it in this use case
return await outputProcessing(table, row, { return await outputProcessing(source, row, {
squash: true, squash: true,
preserveLinks: true, preserveLinks: true,
fromViewId: viewId,
}) })
} }

View File

@ -1,7 +1,6 @@
import { context, db } from "@budibase/backend-core" import { context, db } from "@budibase/backend-core"
import { Row } from "@budibase/types" import { Row, Table, ViewV2 } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import cloneDeep from "lodash/fp/cloneDeep"
import { finaliseRow } from "../../../api/controllers/row/staticFormula" import { finaliseRow } from "../../../api/controllers/row/staticFormula"
import { import {
inputProcessing, inputProcessing,
@ -10,7 +9,7 @@ import {
import * as linkRows from "../../../db/linkedRows" import * as linkRows from "../../../db/linkedRows"
import { InternalTables } from "../../../db/utils" import { InternalTables } from "../../../db/utils"
import { getFullUser } from "../../../utilities/users" import { getFullUser } from "../../../utilities/users"
import { tryExtractingTableAndViewId } from "./utils" import { getSource, tryExtractingTableAndViewId } from "./utils"
export async function save( export async function save(
tableOrViewId: string, tableOrViewId: string,
@ -20,21 +19,25 @@ export async function save(
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId) const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
inputs.tableId = tableId inputs.tableId = tableId
let source: Table | ViewV2
let table: Table
if (viewId) {
source = await sdk.views.get(viewId)
table = await sdk.views.getTable(viewId)
} else {
source = await sdk.tables.getTable(tableId)
table = source
}
if (!inputs._rev && !inputs._id) { if (!inputs._rev && !inputs._id) {
inputs._id = db.generateRowID(inputs.tableId) inputs._id = db.generateRowID(inputs.tableId)
} }
// this returns the table and row incase they have been updated let row = await inputProcessing(userId, source, inputs)
const dbTable = await sdk.tables.getTable(inputs.tableId)
// need to copy the table so it can be differenced on way out
const tableClone = cloneDeep(dbTable)
let { table, row } = await inputProcessing(userId, tableClone, inputs)
const validateResult = await sdk.rows.utils.validate({ const validateResult = await sdk.rows.utils.validate({
row, row,
table, source,
}) })
if (!validateResult.valid) { if (!validateResult.valid) {
@ -49,24 +52,18 @@ export async function save(
table, table,
})) as Row })) as Row
return finaliseRow(table, row, { return finaliseRow(source, row, { updateFormula: true })
oldTable: dbTable, }
updateFormula: true,
fromViewId: viewId, export async function find(sourceId: string, rowId: string): Promise<Row> {
const source = await getSource(sourceId)
return await outputProcessing(source, await findRow(sourceId, rowId), {
squash: true,
}) })
} }
export async function find(tableOrViewId: string, rowId: string): Promise<Row> { export async function findRow(sourceId: string, rowId: string) {
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId) const { tableId } = tryExtractingTableAndViewId(sourceId)
const table = await sdk.tables.getTable(tableId)
let row = await findRow(tableId, rowId)
row = await outputProcessing(table, row, { squash: true, fromViewId: viewId })
return row
}
async function findRow(tableId: string, rowId: string) {
const db = context.getAppDB() const db = context.getAppDB()
let row: Row let row: Row
// TODO remove special user case in future // TODO remove special user case in future

View File

@ -16,11 +16,11 @@ export const removeInvalidFilters = (
validFields = validFields.map(f => f.toLowerCase()) validFields = validFields.map(f => f.toLowerCase())
for (const filterKey of Object.keys(result) as (keyof SearchFilters)[]) { for (const filterKey of Object.keys(result) as (keyof SearchFilters)[]) {
const filter = result[filterKey]
if (!filter || typeof filter !== "object") {
continue
}
if (isLogicalSearchOperator(filterKey)) { if (isLogicalSearchOperator(filterKey)) {
const filter = result[filterKey]
if (!filter || typeof filter !== "object") {
continue
}
const resultingConditions: SearchFilters[] = [] const resultingConditions: SearchFilters[] = []
for (const condition of filter.conditions) { for (const condition of filter.conditions) {
const resultingCondition = removeInvalidFilters(condition, validFields) const resultingCondition = removeInvalidFilters(condition, validFields)
@ -36,6 +36,11 @@ export const removeInvalidFilters = (
continue continue
} }
const filter = result[filterKey]
if (!filter || typeof filter !== "object") {
continue
}
for (const columnKey of Object.keys(filter)) { for (const columnKey of Object.keys(filter)) {
const possibleKeys = [columnKey, db.removeKeyNumbering(columnKey)].map( const possibleKeys = [columnKey, db.removeKeyNumbering(columnKey)].map(
c => c.toLowerCase() c => c.toLowerCase()
@ -53,8 +58,8 @@ export const removeInvalidFilters = (
} }
export const getQueryableFields = async ( export const getQueryableFields = async (
fields: string[], table: Table,
table: Table fields?: string[]
): Promise<string[]> => { ): Promise<string[]> => {
const extractTableFields = async ( const extractTableFields = async (
table: Table, table: Table,
@ -110,6 +115,9 @@ export const getQueryableFields = async (
"_id", // Querying by _id is always allowed, even if it's never part of the schema "_id", // Querying by _id is always allowed, even if it's never part of the schema
] ]
if (fields == null) {
fields = Object.keys(table.schema)
}
result.push(...(await extractTableFields(table, fields, [table._id!]))) result.push(...(await extractTableFields(table, fields, [table._id!])))
return result return result

View File

@ -1,10 +1,6 @@
import { db as dbCore, context } from "@budibase/backend-core" import { db as dbCore, context, docIds } from "@budibase/backend-core"
import { Database, Row } from "@budibase/types" import { Database, Row } from "@budibase/types"
import { import { extractViewInfoFromID, getRowParams } from "../../../db/utils"
extractViewInfoFromID,
getRowParams,
isViewID,
} from "../../../db/utils"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./internal" import * as internal from "./internal"
import * as external from "./external" import * as external from "./external"
@ -26,7 +22,7 @@ export async function getAllInternalRows(appId?: string) {
function pickApi(tableOrViewId: string) { function pickApi(tableOrViewId: string) {
let tableId = tableOrViewId let tableId = tableOrViewId
if (isViewID(tableOrViewId)) { if (docIds.isViewId(tableOrViewId)) {
tableId = extractViewInfoFromID(tableOrViewId).tableId tableId = extractViewInfoFromID(tableOrViewId).tableId
} }
@ -37,13 +33,13 @@ function pickApi(tableOrViewId: string) {
} }
export async function save( export async function save(
tableOrViewId: string, sourceId: string,
row: Row, row: Row,
userId: string | undefined userId: string | undefined
) { ) {
return pickApi(tableOrViewId).save(tableOrViewId, row, userId) return pickApi(sourceId).save(sourceId, row, userId)
} }
export async function find(tableOrViewId: string, rowId: string) { export async function find(sourceId: string, rowId: string) {
return pickApi(tableOrViewId).find(tableOrViewId, rowId) return pickApi(sourceId).find(sourceId, rowId)
} }

View File

@ -1,9 +1,14 @@
import { import {
EmptyFilterOption, EmptyFilterOption,
LegacyFilter,
LogicalOperator,
Row, Row,
RowSearchParams, RowSearchParams,
SearchFilterKey,
SearchResponse, SearchResponse,
SortOrder, SortOrder,
Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./search/internal" import * as internal from "./search/internal"
@ -12,9 +17,10 @@ import { ExportRowsParams, ExportRowsResult } from "./search/types"
import { dataFilters } from "@budibase/shared-core" import { dataFilters } from "@budibase/shared-core"
import sdk from "../../index" import sdk from "../../index"
import { searchInputMapping } from "./search/utils" import { searchInputMapping } from "./search/utils"
import { features } from "@budibase/backend-core" import { db, features } from "@budibase/backend-core"
import tracer from "dd-trace" import tracer from "dd-trace"
import { getQueryableFields, removeInvalidFilters } from "./queryUtils" import { getQueryableFields, removeInvalidFilters } from "./queryUtils"
import { enrichSearchContext } from "../../../api/controllers/row/utils"
export { isValidFilter } from "../../../integrations/utils" export { isValidFilter } from "../../../integrations/utils"
@ -32,11 +38,13 @@ function pickApi(tableId: any) {
} }
export async function search( export async function search(
options: RowSearchParams options: RowSearchParams,
context?: Record<string, any>
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
return await tracer.trace("search", async span => { return await tracer.trace("search", async span => {
span?.addTags({ span?.addTags({
tableId: options.tableId, tableId: options.tableId,
viewId: options.viewId,
query: options.query, query: options.query,
sort: options.sort, sort: options.sort,
sortOrder: options.sortOrder, sortOrder: options.sortOrder,
@ -48,20 +56,103 @@ export async function search(
countRows: options.countRows, countRows: options.countRows,
}) })
const isExternalTable = isExternalTableID(options.tableId) let source: Table | ViewV2
options.query = dataFilters.cleanupQuery(options.query || {}) let table: Table
if (options.viewId) {
source = await sdk.views.get(options.viewId)
table = await sdk.views.getTable(source)
} else if (options.tableId) {
source = await sdk.tables.getTable(options.tableId)
table = source
} else {
throw new Error(`Must supply either a view ID or a table ID`)
}
const isExternalTable = isExternalTableID(table._id!)
if (options.query) {
const visibleFields = (
options.fields || Object.keys(table.schema)
).filter(field => table.schema[field]?.visible !== false)
const queryableFields = await getQueryableFields(table, visibleFields)
options.query = removeInvalidFilters(options.query, queryableFields)
} else {
options.query = {}
}
// need to make sure filters in correct shape before checking for view
options = searchInputMapping(table, options)
if (options.viewId) {
// Delete extraneous search params that cannot be overridden
delete options.query.onEmptyFilter
const view = source as ViewV2
// Enrich saved query with ephemeral query params.
// We prevent searching on any fields that are saved as part of the query, as
// that could let users find rows they should not be allowed to access.
let viewQuery = dataFilters.buildQueryLegacy(view.query) || {}
delete viewQuery?.onEmptyFilter
const sqsEnabled = await features.flags.isEnabled("SQS")
const supportsLogicalOperators =
isExternalTableID(view.tableId) || sqsEnabled
if (!supportsLogicalOperators) {
// In the unlikely event that a Grouped Filter is in a non-SQS environment
// It needs to be ignored entirely
let queryFilters: LegacyFilter[] = Array.isArray(view.query)
? view.query
: []
delete options.query.onEmptyFilter
// Extract existing fields
const existingFields =
queryFilters
?.filter(filter => filter.field)
.map(filter => db.removeKeyNumbering(filter.field)) || []
viewQuery ??= {}
// Carry over filters for unused fields
Object.keys(options.query).forEach(key => {
const operator = key as Exclude<SearchFilterKey, LogicalOperator>
Object.keys(options.query[operator] || {}).forEach(field => {
if (!existingFields.includes(db.removeKeyNumbering(field))) {
viewQuery![operator]![field] = options.query[operator]![field]
}
})
})
options.query = viewQuery
} else {
const conditions = viewQuery ? [viewQuery] : []
options.query = {
$and: {
conditions: [...conditions, options.query],
},
}
if (viewQuery.onEmptyFilter) {
options.query.onEmptyFilter = viewQuery.onEmptyFilter
}
}
}
if (context) {
options.query = await enrichSearchContext(options.query, context)
}
options.query = dataFilters.cleanupQuery(options.query)
options.query = dataFilters.fixupFilterArrays(options.query) options.query = dataFilters.fixupFilterArrays(options.query)
span?.addTags({ span.addTags({
cleanedQuery: options.query, cleanedQuery: options.query,
isExternalTable,
}) })
if ( if (
!dataFilters.hasFilters(options.query) && !dataFilters.hasFilters(options.query) &&
options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE
) { ) {
span?.addTags({ emptyQuery: true }) span.addTags({ emptyQuery: true })
return { return {
rows: [], rows: [],
} }
@ -71,34 +162,19 @@ export async function search(
options.sortOrder = options.sortOrder.toLowerCase() as SortOrder options.sortOrder = options.sortOrder.toLowerCase() as SortOrder
} }
const table = await sdk.tables.getTable(options.tableId)
options = searchInputMapping(table, options)
if (options.query) {
const tableFields = Object.keys(table.schema).filter(
f => table.schema[f].visible !== false
)
const queriableFields = await getQueryableFields(
options.fields?.filter(f => tableFields.includes(f)) ?? tableFields,
table
)
options.query = removeInvalidFilters(options.query, queriableFields)
}
let result: SearchResponse<Row> let result: SearchResponse<Row>
if (isExternalTable) { if (isExternalTable) {
span?.addTags({ searchType: "external" }) span?.addTags({ searchType: "external" })
result = await external.search(options, table) result = await external.search(options, source)
} else if (await features.flags.isEnabled("SQS")) { } else if (await features.flags.isEnabled("SQS")) {
span?.addTags({ searchType: "sqs" }) span?.addTags({ searchType: "sqs" })
result = await internal.sqs.search(options, table) result = await internal.sqs.search(options, source)
} else { } else {
span?.addTags({ searchType: "lucene" }) span?.addTags({ searchType: "lucene" })
result = await internal.lucene.search(options, table) result = await internal.lucene.search(options, source)
} }
span?.addTags({ span.addTags({
foundRows: result.rows.length, foundRows: result.rows.length,
totalRows: result.totalRows, totalRows: result.totalRows,
}) })

View File

@ -9,6 +9,7 @@ import {
SortJson, SortJson,
SortOrder, SortOrder,
Table, Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters" import * as exporters from "../../../../api/controllers/view/exporters"
import { handleRequest } from "../../../../api/controllers/row/external" import { handleRequest } from "../../../../api/controllers/row/external"
@ -60,9 +61,8 @@ function getPaginationAndLimitParameters(
export async function search( export async function search(
options: RowSearchParams, options: RowSearchParams,
table: Table source: Table | ViewV2
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId } = options
const { countRows, paginate, query, ...params } = options const { countRows, paginate, query, ...params } = options
const { limit } = params const { limit } = params
let bookmark = let bookmark =
@ -106,16 +106,15 @@ export async function search(
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
} }
const [{ rows, rawResponseSize }, totalRows] = await Promise.all([ const [{ rows, rawResponseSize }, totalRows] = await Promise.all([
handleRequest(Operation.READ, tableId, parameters), handleRequest(Operation.READ, source, parameters),
countRows countRows
? handleRequest(Operation.COUNT, tableId, parameters) ? handleRequest(Operation.COUNT, source, parameters)
: Promise.resolve(undefined), : Promise.resolve(undefined),
]) ])
let processed = await outputProcessing(table, rows, { let processed = await outputProcessing(source, rows, {
preserveLinks: true, preserveLinks: true,
squash: true, squash: true,
fromViewId: options.viewId,
}) })
let hasNextPage = false let hasNextPage = false
@ -128,10 +127,13 @@ export async function search(
} }
} }
if (options.fields) { const visibleFields =
const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS] options.fields ||
processed = processed.map((r: any) => pick(r, fields)) Object.keys(source.schema || {}).filter(
} key => source.schema?.[key].visible !== false
)
const allowedFields = [...visibleFields, ...PROTECTED_EXTERNAL_COLUMNS]
processed = processed.map((r: any) => pick(r, allowedFields))
// need wrapper object for bookmarks etc when paginating // need wrapper object for bookmarks etc when paginating
const response: SearchResponse<Row> = { rows: processed, hasNextPage } const response: SearchResponse<Row> = { rows: processed, hasNextPage }
@ -201,7 +203,7 @@ export async function exportRows(
} }
let result = await search( let result = await search(
{ tableId, query: requestQuery, sort, sortOrder }, { tableId: table._id!, query: requestQuery, sort, sortOrder },
table table
) )
let rows: Row[] = [] let rows: Row[] = []
@ -257,10 +259,10 @@ export async function exportRows(
} }
export async function fetch(tableId: string): Promise<Row[]> { export async function fetch(tableId: string): Promise<Row[]> {
const response = await handleRequest(Operation.READ, tableId, { const table = await sdk.tables.getTable(tableId)
const response = await handleRequest(Operation.READ, table, {
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
}) })
const table = await sdk.tables.getTable(tableId)
return await outputProcessing(table, response.rows, { return await outputProcessing(table, response.rows, {
preserveLinks: true, preserveLinks: true,
squash: true, squash: true,
@ -268,7 +270,8 @@ export async function fetch(tableId: string): Promise<Row[]> {
} }
export async function fetchRaw(tableId: string): Promise<Row[]> { export async function fetchRaw(tableId: string): Promise<Row[]> {
const response = await handleRequest(Operation.READ, tableId, { const table = await sdk.tables.getTable(tableId)
const response = await handleRequest(Operation.READ, table, {
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
}) })
return response.rows return response.rows

View File

@ -8,21 +8,29 @@ import {
SortType, SortType,
Table, Table,
User, User,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global" import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
import { outputProcessing } from "../../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../../utilities/rowProcessor"
import pick from "lodash/pick" import pick from "lodash/pick"
import sdk from "../../../../"
export async function search( export async function search(
options: RowSearchParams, options: RowSearchParams,
table: Table source: Table | ViewV2
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId } = options let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
const { paginate, query } = options const { paginate, query } = options
const params: RowSearchParams = { const params: RowSearchParams = {
tableId: options.tableId, tableId: options.tableId,
viewId: options.viewId,
sort: options.sort, sort: options.sort,
sortOrder: options.sortOrder, sortOrder: options.sortOrder,
sortType: options.sortType, sortType: options.sortType,
@ -50,18 +58,20 @@ export async function search(
// Enrich search results with relationships // Enrich search results with relationships
if (response.rows && response.rows.length) { if (response.rows && response.rows.length) {
// enrich with global users if from users table // enrich with global users if from users table
if (tableId === InternalTables.USER_METADATA) { if (table._id === InternalTables.USER_METADATA) {
response.rows = await getGlobalUsersFromMetadata(response.rows as User[]) response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
} }
if (options.fields) { const visibleFields =
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS] options.fields ||
response.rows = response.rows.map((r: any) => pick(r, fields)) Object.keys(source.schema || {}).filter(
} key => source.schema?.[key].visible !== false
)
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
response.rows = response.rows.map((r: any) => pick(r, allowedFields))
response.rows = await outputProcessing(table, response.rows, { response.rows = await outputProcessing(source, response.rows, {
squash: true, squash: true,
fromViewId: options.viewId,
}) })
} }

View File

@ -1,4 +1,5 @@
import { import {
Aggregation,
Datasource, Datasource,
DocumentType, DocumentType,
FieldType, FieldType,
@ -15,6 +16,7 @@ import {
SortType, SortType,
SqlClient, SqlClient,
Table, Table,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { import {
buildInternalRelationships, buildInternalRelationships,
@ -44,10 +46,12 @@ import {
import { import {
dataFilters, dataFilters,
helpers, helpers,
isInternalColumnName,
PROTECTED_INTERNAL_COLUMNS, PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core" } from "@budibase/shared-core"
import { isSearchingByRowID } from "../utils" import { isSearchingByRowID } from "../utils"
import tracer from "dd-trace" import tracer from "dd-trace"
import { cloneDeep } from "lodash"
const builder = new sql.Sql(SqlClient.SQL_LITE) const builder = new sql.Sql(SqlClient.SQL_LITE)
const SQLITE_COLUMN_LIMIT = 2000 const SQLITE_COLUMN_LIMIT = 2000
@ -55,11 +59,34 @@ const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`)
const MISSING_TABLE_REGX = new RegExp(`no such table: .+`) const MISSING_TABLE_REGX = new RegExp(`no such table: .+`)
const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`) const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`)
function buildInternalFieldList( async function buildInternalFieldList(
table: Table, source: Table | ViewV2,
tables: Table[], tables: Table[],
opts?: { relationships?: RelationshipsJson[] } opts?: { relationships?: RelationshipsJson[]; allowedFields?: string[] }
) { ) {
const { relationships, allowedFields } = opts || {}
let schemaFields: string[] = []
if (sdk.views.isView(source)) {
schemaFields = Object.keys(helpers.views.basicFields(source)).filter(
key => source.schema?.[key]?.visible !== false
)
} else {
schemaFields = Object.keys(source.schema).filter(
key => source.schema[key].visible !== false
)
}
if (allowedFields) {
schemaFields = schemaFields.filter(field => allowedFields.includes(field))
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
let fieldList: string[] = [] let fieldList: string[] = []
const getJunctionFields = (relatedTable: Table, fields: string[]) => { const getJunctionFields = (relatedTable: Table, fields: string[]) => {
const junctionFields: string[] = [] const junctionFields: string[] = []
@ -70,13 +97,18 @@ function buildInternalFieldList(
}) })
return junctionFields return junctionFields
} }
fieldList = fieldList.concat( if (sdk.tables.isTable(source)) {
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`) for (const key of PROTECTED_INTERNAL_COLUMNS) {
) if (allowedFields && !allowedFields.includes(key)) {
for (let key of Object.keys(table.schema)) { continue
}
fieldList.push(`${table._id}.${key}`)
}
}
for (let key of schemaFields) {
const col = table.schema[key] const col = table.schema[key]
const isRelationship = col.type === FieldType.LINK const isRelationship = col.type === FieldType.LINK
if (!opts?.relationships && isRelationship) { if (!relationships && isRelationship) {
continue continue
} }
if (!isRelationship) { if (!isRelationship) {
@ -87,7 +119,9 @@ function buildInternalFieldList(
if (!relatedTable) { if (!relatedTable) {
continue continue
} }
const relatedFields = buildInternalFieldList(relatedTable, tables).concat( const relatedFields = (
await buildInternalFieldList(relatedTable, tables)
).concat(
getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"]) getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"])
) )
// break out of the loop if we have reached the max number of columns // break out of the loop if we have reached the max number of columns
@ -128,15 +162,22 @@ function cleanupFilters(
// generate a map of all possible column names (these can be duplicated across tables // generate a map of all possible column names (these can be duplicated across tables
// the map of them will always be the same // the map of them will always be the same
const userColumnMap: Record<string, string> = {} const userColumnMap: Record<string, string> = {}
allTables.forEach(table => for (const table of allTables) {
Object.keys(table.schema).forEach( for (const key of Object.keys(table.schema)) {
key => (userColumnMap[key] = mapToUserColumn(key)) if (isInternalColumnName(key)) {
) continue
) }
userColumnMap[key] = mapToUserColumn(key)
}
}
// update the keys of filters to manage user columns // update the keys of filters to manage user columns
const keyInAnyTable = (key: string): boolean => const keyInAnyTable = (key: string): boolean => {
allTables.some(table => table.schema[key]) if (isInternalColumnName(key)) {
return false
}
return allTables.some(table => table.schema[key])
}
const splitter = new dataFilters.ColumnSplitter(allTables) const splitter = new dataFilters.ColumnSplitter(allTables)
@ -291,16 +332,23 @@ function resyncDefinitionsRequired(status: number, message: string) {
export async function search( export async function search(
options: RowSearchParams, options: RowSearchParams,
table: Table, source: Table | ViewV2,
opts?: { retrying?: boolean } opts?: { retrying?: boolean }
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
let { paginate, query, ...params } = options let { paginate, query, ...params } = cloneDeep(options)
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
const allTables = await sdk.tables.getAllInternalTables() const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables) const allTablesMap = buildTableMap(allTables)
// make sure we have the mapped/latest table // make sure we have the mapped/latest table
if (table?._id) { if (table._id) {
table = allTablesMap[table?._id] table = allTablesMap[table._id]
} }
if (!table) { if (!table) {
throw new Error("Unable to find table") throw new Error("Unable to find table")
@ -312,6 +360,23 @@ export async function search(
...cleanupFilters(query, table, allTables), ...cleanupFilters(query, table, allTables),
documentType: DocumentType.ROW, documentType: DocumentType.ROW,
} }
let aggregations: Aggregation[] = []
if (sdk.views.isView(source)) {
const calculationFields = helpers.views.calculationFields(source)
for (const [key, field] of Object.entries(calculationFields)) {
if (options.fields && !options.fields.includes(key)) {
continue
}
aggregations.push({
name: key,
field: mapToUserColumn(field.field),
calculationType: field.calculationType,
})
}
}
const request: QueryJson = { const request: QueryJson = {
endpoint: { endpoint: {
// not important, we query ourselves // not important, we query ourselves
@ -327,7 +392,11 @@ export async function search(
columnPrefix: USER_COLUMN_PREFIX, columnPrefix: USER_COLUMN_PREFIX,
}, },
resource: { resource: {
fields: buildInternalFieldList(table, allTables, { relationships }), fields: await buildInternalFieldList(source, allTables, {
relationships,
allowedFields: options.fields,
}),
aggregations,
}, },
relationships, relationships,
} }
@ -372,7 +441,7 @@ export async function search(
// make sure JSON columns corrected // make sure JSON columns corrected
const processed = builder.convertJsonStringColumns<Row>( const processed = builder.convertJsonStringColumns<Row>(
table, table,
await sqlOutputProcessing(rows, table!, allTablesMap, relationships, { await sqlOutputProcessing(rows, source, allTablesMap, relationships, {
sqs: true, sqs: true,
}) })
) )
@ -388,17 +457,18 @@ export async function search(
} }
// get the rows // get the rows
let finalRows = await outputProcessing(table, processed, { let finalRows = await outputProcessing(source, processed, {
preserveLinks: true, preserveLinks: true,
squash: true, squash: true,
fromViewId: options.viewId,
}) })
// check if we need to pick specific rows out const visibleFields =
if (options.fields) { options.fields ||
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS] Object.keys(source.schema || {}).filter(
finalRows = finalRows.map((r: any) => pick(r, fields)) key => source.schema?.[key].visible !== false
} )
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
finalRows = finalRows.map((r: any) => pick(r, allowedFields))
const response: SearchResponse<Row> = { const response: SearchResponse<Row> = {
rows: finalRows, rows: finalRows,
@ -419,7 +489,7 @@ export async function search(
const msg = typeof err === "string" ? err : err.message const msg = typeof err === "string" ? err : err.message
if (!opts?.retrying && resyncDefinitionsRequired(err.status, msg)) { if (!opts?.retrying && resyncDefinitionsRequired(err.status, msg)) {
await sdk.tables.sqs.syncDefinition() await sdk.tables.sqs.syncDefinition()
return search(options, table, { retrying: true }) return search(options, source, { retrying: true })
} }
// previously the internal table didn't error when a column didn't exist in search // previously the internal table didn't error when a column didn't exist in search
if (err.status === 400 && msg?.match(MISSING_COLUMN_REGEX)) { if (err.status === 400 && msg?.match(MISSING_COLUMN_REGEX)) {

View File

@ -90,10 +90,8 @@ describe.each([tableWithUserCol, tableWithUsersCol])(
}) })
it("shouldn't error if no query supplied", () => { it("shouldn't error if no query supplied", () => {
const params: any = { // @ts-expect-error - intentionally passing in a bad type
tableId, const output = searchInputMapping(col, { tableId })
}
const output = searchInputMapping(col, params)
expect(output.query).toBeUndefined() expect(output.query).toBeUndefined()
}) })
} }

View File

@ -11,7 +11,7 @@ import {
RowSearchParams, RowSearchParams,
} from "@budibase/types" } from "@budibase/types"
import { db as dbCore, context } from "@budibase/backend-core" import { db as dbCore, context } from "@budibase/backend-core"
import { utils } from "@budibase/shared-core" import { utils, dataFilters } from "@budibase/shared-core"
export async function paginatedSearch( export async function paginatedSearch(
query: SearchFilters, query: SearchFilters,
@ -31,13 +31,13 @@ export async function fullSearch(
function findColumnInQueries( function findColumnInQueries(
column: string, column: string,
options: RowSearchParams, filters: SearchFilters,
callback: (filter: any) => any callback: (filter: any) => any
) { ) {
if (!options.query) { if (!filters) {
return return
} }
for (let filterBlock of Object.values(options.query)) { for (let filterBlock of Object.values(filters)) {
if (typeof filterBlock !== "object") { if (typeof filterBlock !== "object") {
continue continue
} }
@ -49,8 +49,8 @@ function findColumnInQueries(
} }
} }
function userColumnMapping(column: string, options: RowSearchParams) { function userColumnMapping(column: string, filters: SearchFilters) {
findColumnInQueries(column, options, (filterValue: any): any => { findColumnInQueries(column, filters, (filterValue: any): any => {
const isArray = Array.isArray(filterValue), const isArray = Array.isArray(filterValue),
isString = typeof filterValue === "string" isString = typeof filterValue === "string"
if (!isString && !isArray) { if (!isString && !isArray) {
@ -83,28 +83,32 @@ function userColumnMapping(column: string, options: RowSearchParams) {
// maps through the search parameters to check if any of the inputs are invalid // maps through the search parameters to check if any of the inputs are invalid
// based on the table schema, converts them to something that is valid. // based on the table schema, converts them to something that is valid.
export function searchInputMapping(table: Table, options: RowSearchParams) { export function searchInputMapping(table: Table, options: RowSearchParams) {
if (!table?.schema) { // need an internal function to loop over filters, because this takes the full options
return options function checkFilters(filters: SearchFilters) {
} for (let [key, column] of Object.entries(table.schema || {})) {
for (let [key, column] of Object.entries(table.schema)) { switch (column.type) {
switch (column.type) { case FieldType.BB_REFERENCE_SINGLE: {
case FieldType.BB_REFERENCE_SINGLE: { const subtype = column.subtype
const subtype = column.subtype switch (subtype) {
switch (subtype) { case BBReferenceFieldSubType.USER:
case BBReferenceFieldSubType.USER: userColumnMapping(key, filters)
userColumnMapping(key, options) break
break
default: default:
utils.unreachable(subtype) utils.unreachable(subtype)
}
break
}
case FieldType.BB_REFERENCE: {
userColumnMapping(key, filters)
break
} }
break
}
case FieldType.BB_REFERENCE: {
userColumnMapping(key, options)
break
} }
} }
return dataFilters.recurseLogicalOperators(filters, checkFilters)
}
if (options.query) {
options.query = checkFilters(options.query)
} }
return options return options
} }

View File

@ -203,7 +203,7 @@ describe("query utils", () => {
}, },
}) })
const result = await getQueryableFields(Object.keys(table.schema), table) const result = await getQueryableFields(table)
expect(result).toEqual(["_id", "name", "age"]) expect(result).toEqual(["_id", "name", "age"])
}) })
@ -216,7 +216,7 @@ describe("query utils", () => {
}, },
}) })
const result = await getQueryableFields(Object.keys(table.schema), table) const result = await getQueryableFields(table)
expect(result).toEqual(["_id", "name"]) expect(result).toEqual(["_id", "name"])
}) })
@ -245,7 +245,7 @@ describe("query utils", () => {
}) })
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table) return getQueryableFields(table)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",
@ -282,7 +282,7 @@ describe("query utils", () => {
}) })
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table) return getQueryableFields(table)
}) })
expect(result).toEqual(["_id", "name", "aux.name", "auxTable.name"]) expect(result).toEqual(["_id", "name", "aux.name", "auxTable.name"])
}) })
@ -313,7 +313,7 @@ describe("query utils", () => {
}) })
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table) return getQueryableFields(table)
}) })
expect(result).toEqual(["_id", "name"]) expect(result).toEqual(["_id", "name"])
}) })
@ -381,7 +381,7 @@ describe("query utils", () => {
it("includes nested relationship fields from main table", async () => { it("includes nested relationship fields from main table", async () => {
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table) return getQueryableFields(table)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",
@ -398,7 +398,7 @@ describe("query utils", () => {
it("includes nested relationship fields from aux 1 table", async () => { it("includes nested relationship fields from aux 1 table", async () => {
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(aux1.schema), aux1) return getQueryableFields(aux1)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",
@ -420,7 +420,7 @@ describe("query utils", () => {
it("includes nested relationship fields from aux 2 table", async () => { it("includes nested relationship fields from aux 2 table", async () => {
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(aux2.schema), aux2) return getQueryableFields(aux2)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",
@ -474,7 +474,7 @@ describe("query utils", () => {
it("includes nested relationship fields from main table", async () => { it("includes nested relationship fields from main table", async () => {
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table) return getQueryableFields(table)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",
@ -488,7 +488,7 @@ describe("query utils", () => {
it("includes nested relationship fields from aux table", async () => { it("includes nested relationship fields from aux table", async () => {
const result = await config.doInContext(config.appId, () => { const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(aux.schema), aux) return getQueryableFields(aux)
}) })
expect(result).toEqual([ expect(result).toEqual([
"_id", "_id",

View File

@ -33,7 +33,7 @@ describe("validate", () => {
it("should accept empty values", async () => { it("should accept empty values", async () => {
const row = {} const row = {}
const table = getTable() const table = getTable()
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
expect(output.errors).toEqual({}) expect(output.errors).toEqual({})
}) })
@ -43,7 +43,7 @@ describe("validate", () => {
time: `${hour()}:${minute()}`, time: `${hour()}:${minute()}`,
} }
const table = getTable() const table = getTable()
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
}) })
@ -52,7 +52,7 @@ describe("validate", () => {
time: `${hour()}:${minute()}:${second()}`, time: `${hour()}:${minute()}:${second()}`,
} }
const table = getTable() const table = getTable()
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
}) })
@ -67,7 +67,7 @@ describe("validate", () => {
table.schema.time.constraints = { table.schema.time.constraints = {
presence: true, presence: true,
} }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ time: ['"time" is not a valid time'] }) expect(output.errors).toEqual({ time: ['"time" is not a valid time'] })
}) })
@ -91,7 +91,7 @@ describe("validate", () => {
`${generator.integer({ min: 11, max: 23 })}:${minute()}`, `${generator.integer({ min: 11, max: 23 })}:${minute()}`,
])("should accept values after config value (%s)", async time => { ])("should accept values after config value (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
}) })
@ -100,7 +100,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`, `${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before config value (%s)", async time => { ])("should reject values before config value (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"], time: ["must be no earlier than 10:00"],
@ -125,7 +125,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 12 })}:${minute()}`, `${generator.integer({ min: 0, max: 12 })}:${minute()}`,
])("should accept values before config value (%s)", async time => { ])("should accept values before config value (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
}) })
@ -134,7 +134,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`, `${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after config value (%s)", async time => { ])("should reject values after config value (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no later than 15:16:17"], time: ["must be no later than 15:16:17"],
@ -156,7 +156,7 @@ describe("validate", () => {
"should accept values in range (%s)", "should accept values in range (%s)",
async time => { async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
} }
) )
@ -166,7 +166,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`, `${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before range (%s)", async time => { ])("should reject values before range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"], time: ["must be no earlier than 10:00"],
@ -178,7 +178,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`, `${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after range (%s)", async time => { ])("should reject values after range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no later than 15:00"], time: ["must be no later than 15:00"],
@ -199,7 +199,7 @@ describe("validate", () => {
"should accept values in range (%s)", "should accept values in range (%s)",
async time => { async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
} }
) )
@ -208,7 +208,7 @@ describe("validate", () => {
"should reject values out range (%s)", "should reject values out range (%s)",
async time => { async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no later than 10:00"], time: ["must be no later than 10:00"],
@ -226,7 +226,7 @@ describe("validate", () => {
table.schema.time.constraints = { table.schema.time.constraints = {
presence: true, presence: true,
} }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ time: ["can't be blank"] }) expect(output.errors).toEqual({ time: ["can't be blank"] })
}) })
@ -237,7 +237,7 @@ describe("validate", () => {
table.schema.time.constraints = { table.schema.time.constraints = {
presence: true, presence: true,
} }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ time: ["can't be blank"] }) expect(output.errors).toEqual({ time: ["can't be blank"] })
}) })
@ -257,7 +257,7 @@ describe("validate", () => {
"should accept values in range (%s)", "should accept values in range (%s)",
async time => { async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
} }
) )
@ -267,7 +267,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`, `${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before range (%s)", async time => { ])("should reject values before range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"], time: ["must be no earlier than 10:00"],
@ -279,7 +279,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`, `${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after range (%s)", async time => { ])("should reject values after range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no later than 15:00"], time: ["must be no later than 15:00"],
@ -301,7 +301,7 @@ describe("validate", () => {
"should accept values in range (%s)", "should accept values in range (%s)",
async time => { async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(true) expect(output.valid).toBe(true)
} }
) )
@ -311,7 +311,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`, `${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before range (%s)", async time => { ])("should reject values before range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"], time: ["must be no earlier than 10:00"],
@ -323,7 +323,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`, `${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after range (%s)", async time => { ])("should reject values after range (%s)", async time => {
const row = { time } const row = { time }
const output = await validate({ table, tableId: table._id!, row }) const output = await validate({ source: table, row })
expect(output.valid).toBe(false) expect(output.valid).toBe(false)
expect(output.errors).toEqual({ expect(output.errors).toEqual({
time: ["must be no later than 15:00"], time: ["must be no later than 15:00"],

View File

@ -13,16 +13,15 @@ import {
TableSchema, TableSchema,
SqlClient, SqlClient,
ArrayOperator, ArrayOperator,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { makeExternalQuery } from "../../../integrations/base/query" import { makeExternalQuery } from "../../../integrations/base/query"
import { Format } from "../../../api/controllers/view/exporters" import { Format } from "../../../api/controllers/view/exporters"
import sdk from "../.." import sdk from "../.."
import { import { extractViewInfoFromID, isRelationshipColumn } from "../../../db/utils"
extractViewInfoFromID,
isRelationshipColumn,
isViewID,
} from "../../../db/utils"
import { isSQL } from "../../../integrations/utils" import { isSQL } from "../../../integrations/utils"
import { docIds, sql } from "@budibase/backend-core"
import { getTableFromSource } from "../../../api/controllers/row/utils"
const SQL_CLIENT_SOURCE_MAP: Record<SourceName, SqlClient | undefined> = { const SQL_CLIENT_SOURCE_MAP: Record<SourceName, SqlClient | undefined> = {
[SourceName.POSTGRES]: SqlClient.POSTGRES, [SourceName.POSTGRES]: SqlClient.POSTGRES,
@ -58,8 +57,12 @@ export function getSQLClient(datasource: Datasource): SqlClient {
export function processRowCountResponse( export function processRowCountResponse(
response: DatasourcePlusQueryResponse response: DatasourcePlusQueryResponse
): number { ): number {
if (response && response.length === 1 && "total" in response[0]) { if (
const total = response[0].total response &&
response.length === 1 &&
sql.COUNT_FIELD_NAME in response[0]
) {
const total = response[0][sql.COUNT_FIELD_NAME]
return typeof total === "number" ? total : parseInt(total) return typeof total === "number" ? total : parseInt(total)
} else { } else {
throw new Error("Unable to count rows in query - no count response") throw new Error("Unable to count rows in query - no count response")
@ -142,37 +145,27 @@ function isForeignKey(key: string, table: Table) {
} }
export async function validate({ export async function validate({
tableId, source,
row, row,
table,
}: { }: {
tableId?: string source: Table | ViewV2
row: Row row: Row
table?: Table
}): Promise<{ }): Promise<{
valid: boolean valid: boolean
errors: Record<string, any> errors: Record<string, any>
}> { }> {
let fetchedTable: Table | undefined const table = await getTableFromSource(source)
if (!table && tableId) {
fetchedTable = await sdk.tables.getTable(tableId)
} else if (table) {
fetchedTable = table
}
if (fetchedTable === undefined) {
throw new Error("Unable to fetch table for validation")
}
const errors: Record<string, any> = {} const errors: Record<string, any> = {}
const disallowArrayTypes = [ const disallowArrayTypes = [
FieldType.ATTACHMENT_SINGLE, FieldType.ATTACHMENT_SINGLE,
FieldType.BB_REFERENCE_SINGLE, FieldType.BB_REFERENCE_SINGLE,
] ]
for (let fieldName of Object.keys(fetchedTable.schema)) { for (let fieldName of Object.keys(table.schema)) {
const column = fetchedTable.schema[fieldName] const column = table.schema[fieldName]
const constraints = cloneDeep(column.constraints) const constraints = cloneDeep(column.constraints)
const type = column.type const type = column.type
// foreign keys are likely to be enriched // foreign keys are likely to be enriched
if (isForeignKey(fieldName, fetchedTable)) { if (isForeignKey(fieldName, table)) {
continue continue
} }
// formulas shouldn't validated, data will be deleted anyway // formulas shouldn't validated, data will be deleted anyway
@ -323,7 +316,7 @@ export function isArrayFilter(operator: any): operator is ArrayOperator {
} }
export function tryExtractingTableAndViewId(tableOrViewId: string) { export function tryExtractingTableAndViewId(tableOrViewId: string) {
if (isViewID(tableOrViewId)) { if (docIds.isViewId(tableOrViewId)) {
return { return {
tableId: extractViewInfoFromID(tableOrViewId).tableId, tableId: extractViewInfoFromID(tableOrViewId).tableId,
viewId: tableOrViewId, viewId: tableOrViewId,
@ -332,3 +325,10 @@ export function tryExtractingTableAndViewId(tableOrViewId: string) {
return { tableId: tableOrViewId } return { tableId: tableOrViewId }
} }
export function getSource(tableOrViewId: string) {
if (docIds.isViewId(tableOrViewId)) {
return sdk.views.get(tableOrViewId)
}
return sdk.tables.getTable(tableOrViewId)
}

View File

@ -1,5 +1,6 @@
import { Table, TableSourceType } from "@budibase/types" import { Table, TableSourceType } from "@budibase/types"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import { docIds } from "@budibase/backend-core"
export function isExternal(opts: { table?: Table; tableId?: string }): boolean { export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
if (opts.table && opts.table.sourceType === TableSourceType.EXTERNAL) { if (opts.table && opts.table.sourceType === TableSourceType.EXTERNAL) {
@ -9,3 +10,7 @@ export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
} }
return false return false
} }
export function isTable(table: any): table is Table {
return table._id && docIds.isTableId(table._id)
}

View File

@ -1,5 +1,6 @@
import { import {
FieldType, FieldType,
PermissionLevel,
RelationSchemaField, RelationSchemaField,
RenameColumn, RenameColumn,
Table, Table,
@ -9,7 +10,7 @@ import {
ViewV2ColumnEnriched, ViewV2ColumnEnriched,
ViewV2Enriched, ViewV2Enriched,
} from "@budibase/types" } from "@budibase/types"
import { HTTPError } from "@budibase/backend-core" import { context, docIds, HTTPError, roles } from "@budibase/backend-core"
import { import {
helpers, helpers,
PROTECTED_EXTERNAL_COLUMNS, PROTECTED_EXTERNAL_COLUMNS,
@ -22,6 +23,7 @@ import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./internal" import * as internal from "./internal"
import * as external from "./external" import * as external from "./external"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { PermissionUpdateType, updatePermissionOnRole } from "../permissions"
function pickApi(tableId: any) { function pickApi(tableId: any) {
if (isExternalTableID(tableId)) { if (isExternalTableID(tableId)) {
@ -40,16 +42,85 @@ export async function getEnriched(viewId: string): Promise<ViewV2Enriched> {
return pickApi(tableId).getEnriched(viewId) return pickApi(tableId).getEnriched(viewId)
} }
export async function getTable(view: string | ViewV2): Promise<Table> {
const viewId = typeof view === "string" ? view : view.id
const cached = context.getTableForView(viewId)
if (cached) {
return cached
}
const { tableId } = utils.extractViewInfoFromID(viewId)
const table = await sdk.tables.getTable(tableId)
context.setTableForView(viewId, table)
return table
}
export function isView(view: any): view is ViewV2 {
return view.id && docIds.isViewId(view.id) && view.version === 2
}
async function guardCalculationViewSchema(
table: Table,
view: Omit<ViewV2, "id" | "version">
) {
const calculationFields = helpers.views.calculationFields(view)
for (const calculationFieldName of Object.keys(calculationFields)) {
const schema = calculationFields[calculationFieldName]
const targetSchema = table.schema[schema.field]
if (!targetSchema) {
throw new HTTPError(
`Calculation field "${calculationFieldName}" references field "${schema.field}" which does not exist in the table schema`,
400
)
}
if (!helpers.schema.isNumeric(targetSchema)) {
throw new HTTPError(
`Calculation field "${calculationFieldName}" references field "${schema.field}" which is not a numeric field`,
400
)
}
}
const groupByFields = helpers.views.basicFields(view)
for (const groupByFieldName of Object.keys(groupByFields)) {
const targetSchema = table.schema[groupByFieldName]
if (!targetSchema) {
throw new HTTPError(
`Group by field "${groupByFieldName}" does not exist in the table schema`,
400
)
}
}
}
async function guardViewSchema( async function guardViewSchema(
tableId: string, tableId: string,
view: Omit<ViewV2, "id" | "version"> view: Omit<ViewV2, "id" | "version">
) { ) {
const viewSchema = view.schema || {}
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
if (helpers.views.isCalculationView(view)) {
await guardCalculationViewSchema(table, view)
}
await checkReadonlyFields(table, view)
checkRequiredFields(table, view)
checkDisplayField(view)
}
async function checkReadonlyFields(
table: Table,
view: Omit<ViewV2, "id" | "version">
) {
const viewSchema = view.schema || {}
for (const field of Object.keys(viewSchema)) { for (const field of Object.keys(viewSchema)) {
const tableSchemaField = table.schema[field] const viewFieldSchema = viewSchema[field]
if (!tableSchemaField) { if (helpers.views.isCalculationField(viewFieldSchema)) {
continue
}
const tableFieldSchema = table.schema[field]
if (!tableFieldSchema) {
throw new HTTPError( throw new HTTPError(
`Field "${field}" is not valid for the requested table`, `Field "${field}" is not valid for the requested table`,
400 400
@ -65,18 +136,33 @@ async function guardViewSchema(
} }
} }
} }
}
const existingView = function checkDisplayField(view: Omit<ViewV2, "id" | "version">) {
table?.views && (table.views[view.name] as ViewV2 | undefined) if (view.primaryDisplay) {
const viewSchemaField = view.schema?.[view.primaryDisplay]
if (!viewSchemaField?.visible) {
throw new HTTPError(
`You can't hide "${view.primaryDisplay}" because it is the display column.`,
400
)
}
}
}
function checkRequiredFields(
table: Table,
view: Omit<ViewV2, "id" | "version">
) {
const existingView = table.views?.[view.name] as ViewV2 | undefined
for (const field of Object.values(table.schema)) { for (const field of Object.values(table.schema)) {
if (!helpers.schema.isRequired(field.constraints)) { if (!helpers.schema.isRequired(field.constraints)) {
continue continue
} }
const viewSchemaField = viewSchema[field.name] const viewSchemaField = view.schema?.[field.name]
const existingViewSchema = const existingViewSchema = existingView?.schema?.[field.name]
existingView?.schema && existingView.schema[field.name]
if (!viewSchemaField && !existingViewSchema?.visible) { if (!viewSchemaField && !existingViewSchema?.visible) {
// Supporting existing configs with required columns but hidden in views // Supporting existing configs with required columns but hidden in views
continue continue
@ -89,24 +175,16 @@ async function guardViewSchema(
) )
} }
if (viewSchemaField.readonly) { if (
helpers.views.isBasicViewField(viewSchemaField) &&
viewSchemaField.readonly
) {
throw new HTTPError( throw new HTTPError(
`You can't make "${field.name}" readonly because it is a required field.`, `You can't make "${field.name}" readonly because it is a required field.`,
400 400
) )
} }
} }
if (view.primaryDisplay) {
const viewSchemaField = viewSchema[view.primaryDisplay]
if (!viewSchemaField?.visible) {
throw new HTTPError(
`You can't hide "${view.primaryDisplay}" because it is the display column.`,
400
)
}
}
} }
export async function create( export async function create(
@ -115,7 +193,30 @@ export async function create(
): Promise<ViewV2> { ): Promise<ViewV2> {
await guardViewSchema(tableId, viewRequest) await guardViewSchema(tableId, viewRequest)
return pickApi(tableId).create(tableId, viewRequest) const view = await pickApi(tableId).create(tableId, viewRequest)
// Set permissions to be the same as the table
const tablePerms = await sdk.permissions.getResourcePerms(tableId)
const readRole = tablePerms[PermissionLevel.READ]?.role
const writeRole = tablePerms[PermissionLevel.WRITE]?.role
await updatePermissionOnRole(
{
roleId: readRole || roles.BUILTIN_ROLE_IDS.BASIC,
resourceId: view.id,
level: PermissionLevel.READ,
},
PermissionUpdateType.ADD
)
await updatePermissionOnRole(
{
roleId: writeRole || roles.BUILTIN_ROLE_IDS.BASIC,
resourceId: view.id,
level: PermissionLevel.WRITE,
},
PermissionUpdateType.ADD
)
return view
} }
export async function update(tableId: string, view: ViewV2): Promise<ViewV2> { export async function update(tableId: string, view: ViewV2): Promise<ViewV2> {
@ -157,19 +258,12 @@ export async function enrichSchema(
view: ViewV2, view: ViewV2,
tableSchema: TableSchema tableSchema: TableSchema
): Promise<ViewV2Enriched> { ): Promise<ViewV2Enriched> {
const tableCache: Record<string, Table> = {}
async function populateRelTableSchema( async function populateRelTableSchema(
tableId: string, tableId: string,
viewFields: Record<string, RelationSchemaField> viewFields: Record<string, RelationSchemaField>
) { ) {
if (!tableCache[tableId]) { const relTable = await sdk.tables.getTable(tableId)
tableCache[tableId] = await sdk.tables.getTable(tableId)
}
const relTable = tableCache[tableId]
const result: Record<string, ViewV2ColumnEnriched> = {} const result: Record<string, ViewV2ColumnEnriched> = {}
for (const relTableFieldName of Object.keys(relTable.schema)) { for (const relTableFieldName of Object.keys(relTable.schema)) {
const relTableField = relTable.schema[relTableFieldName] const relTableField = relTable.schema[relTableFieldName]
if ([FieldType.LINK, FieldType.FORMULA].includes(relTableField.type)) { if ([FieldType.LINK, FieldType.FORMULA].includes(relTableField.type)) {
@ -198,15 +292,24 @@ export async function enrichSchema(
const viewSchema = view.schema || {} const viewSchema = view.schema || {}
const anyViewOrder = Object.values(viewSchema).some(ui => ui.order != null) const anyViewOrder = Object.values(viewSchema).some(ui => ui.order != null)
for (const key of Object.keys(tableSchema).filter(
k => tableSchema[k].visible !== false const visibleSchemaFields = Object.keys(viewSchema).filter(key => {
)) { if (helpers.views.isCalculationField(viewSchema[key])) {
return viewSchema[key].visible !== false
}
return key in tableSchema && tableSchema[key].visible !== false
})
const visibleTableFields = Object.keys(tableSchema).filter(
key => tableSchema[key].visible !== false
)
const visibleFields = new Set([...visibleSchemaFields, ...visibleTableFields])
for (const key of visibleFields) {
// if nothing specified in view, then it is not visible // if nothing specified in view, then it is not visible
const ui = viewSchema[key] || { visible: false } const ui = viewSchema[key] || { visible: false }
schema[key] = { schema[key] = {
...tableSchema[key], ...tableSchema[key],
...ui, ...ui,
order: anyViewOrder ? ui?.order ?? undefined : tableSchema[key].order, order: anyViewOrder ? ui?.order ?? undefined : tableSchema[key]?.order,
columns: undefined, columns: undefined,
} }
@ -218,10 +321,7 @@ export async function enrichSchema(
} }
} }
return { return { ...view, schema }
...view,
schema: schema,
}
} }
export function syncSchema( export function syncSchema(

View File

@ -130,6 +130,26 @@ export function getUserContextBindings(user: ContextUser) {
return {} return {}
} }
// Current user context for bindable search // Current user context for bindable search
const { _id, _rev, firstName, lastName, email, status, roleId } = user const {
return { _id, _rev, firstName, lastName, email, status, roleId } _id,
_rev,
firstName,
lastName,
email,
status,
roleId,
globalId,
userId,
} = user
return {
_id,
_rev,
firstName,
lastName,
email,
status,
roleId,
globalId,
userId,
}
} }

View File

@ -7,11 +7,11 @@ import {
BulkImportRequest, BulkImportRequest,
BulkImportResponse, BulkImportResponse,
SearchRowResponse, SearchRowResponse,
RowSearchParams,
DeleteRows, DeleteRows,
DeleteRow, DeleteRow,
PaginatedSearchRowResponse, PaginatedSearchRowResponse,
RowExportFormat, RowExportFormat,
SearchRowRequest,
} from "@budibase/types" } from "@budibase/types"
import { Expectations, TestAPI } from "./base" import { Expectations, TestAPI } from "./base"
@ -136,7 +136,7 @@ export class RowAPI extends TestAPI {
) )
} }
search = async <T extends RowSearchParams>( search = async <T extends SearchRowRequest>(
sourceId: string, sourceId: string,
params?: T, params?: T,
expectations?: Expectations expectations?: Expectations

View File

@ -74,7 +74,7 @@ class Orchestrator {
private job: Job private job: Job
private loopStepOutputs: LoopStep[] private loopStepOutputs: LoopStep[]
private stopped: boolean private stopped: boolean
private executionOutput: AutomationContext private executionOutput: Omit<AutomationContext, "stepsByName" | "stepsById">
constructor(job: AutomationJob) { constructor(job: AutomationJob) {
let automation = job.data.automation let automation = job.data.automation
@ -91,6 +91,7 @@ class Orchestrator {
// step zero is never used as the template string is zero indexed for customer facing // step zero is never used as the template string is zero indexed for customer facing
this.context = { this.context = {
steps: [{}], steps: [{}],
stepsById: {},
stepsByName: {}, stepsByName: {},
trigger: triggerOutput, trigger: triggerOutput,
} }
@ -457,8 +458,9 @@ class Orchestrator {
inputs: steps[stepToLoopIndex].inputs, inputs: steps[stepToLoopIndex].inputs,
}) })
this.context.stepsById[steps[stepToLoopIndex].id] = tempOutput
const stepName = steps[stepToLoopIndex].name || steps[stepToLoopIndex].id const stepName = steps[stepToLoopIndex].name || steps[stepToLoopIndex].id
this.context.stepsByName![stepName] = tempOutput this.context.stepsByName[stepName] = tempOutput
this.context.steps[this.context.steps.length] = tempOutput this.context.steps[this.context.steps.length] = tempOutput
this.context.steps = this.context.steps.filter( this.context.steps = this.context.steps.filter(
item => !item.hasOwnProperty.call(item, "currentItem") item => !item.hasOwnProperty.call(item, "currentItem")
@ -517,7 +519,10 @@ class Orchestrator {
Object.entries(filter).forEach(([_, value]) => { Object.entries(filter).forEach(([_, value]) => {
Object.entries(value).forEach(([field, _]) => { Object.entries(value).forEach(([field, _]) => {
const updatedField = field.replace("{{", "{{ literal ") const updatedField = field.replace("{{", "{{ literal ")
const fromContext = processStringSync(updatedField, this.context) const fromContext = processStringSync(
updatedField,
this.processContext(this.context)
)
toFilter[field] = fromContext toFilter[field] = fromContext
}) })
}) })
@ -563,9 +568,9 @@ class Orchestrator {
} }
const stepFn = await this.getStepFunctionality(step.stepId) const stepFn = await this.getStepFunctionality(step.stepId)
let inputs = await this.addContextAndProcess( let inputs = await processObject(
originalStepInput, originalStepInput,
this.context this.processContext(this.context)
) )
inputs = automationUtils.cleanInputValues(inputs, step.schema.inputs) inputs = automationUtils.cleanInputValues(inputs, step.schema.inputs)
@ -594,16 +599,16 @@ class Orchestrator {
return null return null
} }
private async addContextAndProcess(inputs: any, context: any) { private processContext(context: AutomationContext) {
const processContext = { const processContext = {
...context, ...context,
steps: { steps: {
...context.steps, ...context.steps,
...context.stepsById,
...context.stepsByName, ...context.stepsByName,
}, },
} }
return processContext
return processObject(inputs, processContext)
} }
private handleStepOutput( private handleStepOutput(
@ -623,6 +628,7 @@ class Orchestrator {
} else { } else {
this.updateExecutionOutput(step.id, step.stepId, step.inputs, outputs) this.updateExecutionOutput(step.id, step.stepId, step.inputs, outputs)
this.context.steps[this.context.steps.length] = outputs this.context.steps[this.context.steps.length] = outputs
this.context.stepsById![step.id] = outputs
const stepName = step.name || step.id const stepName = step.name || step.id
this.context.stepsByName![stepName] = outputs this.context.stepsByName![stepName] = outputs
} }

View File

@ -18,6 +18,7 @@ import {
RowAttachment, RowAttachment,
Table, Table,
User, User,
ViewV2,
} from "@budibase/types" } from "@budibase/types"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { import {
@ -33,7 +34,11 @@ import {
PROTECTED_INTERNAL_COLUMNS, PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core" } from "@budibase/shared-core"
import { processString } from "@budibase/string-templates" import { processString } from "@budibase/string-templates"
import { isUserMetadataTable } from "../../api/controllers/row/utils" import {
getTableFromSource,
isUserMetadataTable,
} from "../../api/controllers/row/utils"
import sdk from "../../sdk"
export * from "./utils" export * from "./utils"
export * from "./attachments" export * from "./attachments"
@ -67,6 +72,7 @@ export async function processAutoColumn(
// check its not user table, or whether any of the processing options have been disabled // check its not user table, or whether any of the processing options have been disabled
const shouldUpdateUserFields = const shouldUpdateUserFields =
!isUserTable && !opts?.reprocessing && !opts?.noAutoRelationships && !noUser !isUserTable && !opts?.reprocessing && !opts?.noAutoRelationships && !noUser
let tableMutated = false
for (let [key, schema] of Object.entries(table.schema)) { for (let [key, schema] of Object.entries(table.schema)) {
if (!schema.autocolumn) { if (!schema.autocolumn) {
continue continue
@ -99,10 +105,17 @@ export async function processAutoColumn(
row[key] = schema.lastID + 1 row[key] = schema.lastID + 1
schema.lastID++ schema.lastID++
table.schema[key] = schema table.schema[key] = schema
tableMutated = true
} }
break break
} }
} }
if (tableMutated) {
const db = context.getAppDB()
const resp = await db.put(table)
table._rev = resp.rev
}
} }
async function processDefaultValues(table: Table, row: Row) { async function processDefaultValues(table: Table, row: Row) {
@ -121,8 +134,10 @@ async function processDefaultValues(table: Table, row: Row) {
for (const [key, schema] of Object.entries(table.schema)) { for (const [key, schema] of Object.entries(table.schema)) {
if ("default" in schema && schema.default != null && row[key] == null) { if ("default" in schema && schema.default != null && row[key] == null) {
const processed = await processString(schema.default, ctx) const processed =
typeof schema.default === "string"
? await processString(schema.default, ctx)
: schema.default
try { try {
row[key] = coerce(processed, schema.type) row[key] = coerce(processed, schema.type)
} catch (err: any) { } catch (err: any) {
@ -169,11 +184,12 @@ export function coerce(row: any, type: string) {
*/ */
export async function inputProcessing( export async function inputProcessing(
userId: string | null | undefined, userId: string | null | undefined,
table: Table, source: Table | ViewV2,
row: Row, row: Row,
opts?: AutoColumnProcessingOpts opts?: AutoColumnProcessingOpts
) { ) {
const clonedRow = cloneDeep(row) const clonedRow = cloneDeep(row)
const table = await getTableFromSource(source)
const dontCleanseKeys = ["type", "_id", "_rev", "tableId"] const dontCleanseKeys = ["type", "_id", "_rev", "tableId"]
for (const [key, value] of Object.entries(clonedRow)) { for (const [key, value] of Object.entries(clonedRow)) {
@ -228,8 +244,7 @@ export async function inputProcessing(
await processAutoColumn(userId, table, clonedRow, opts) await processAutoColumn(userId, table, clonedRow, opts)
await processDefaultValues(table, clonedRow) await processDefaultValues(table, clonedRow)
return clonedRow
return { table, row: clonedRow }
} }
/** /**
@ -242,14 +257,13 @@ export async function inputProcessing(
* @returns the enriched rows will be returned. * @returns the enriched rows will be returned.
*/ */
export async function outputProcessing<T extends Row[] | Row>( export async function outputProcessing<T extends Row[] | Row>(
table: Table, source: Table | ViewV2,
rows: T, rows: T,
opts: { opts: {
squash?: boolean squash?: boolean
preserveLinks?: boolean preserveLinks?: boolean
fromRow?: Row fromRow?: Row
skipBBReferences?: boolean skipBBReferences?: boolean
fromViewId?: string
} = { } = {
squash: true, squash: true,
preserveLinks: false, preserveLinks: false,
@ -264,6 +278,14 @@ export async function outputProcessing<T extends Row[] | Row>(
} else { } else {
safeRows = rows safeRows = rows
} }
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
// SQS returns the rows with full relationship contents // SQS returns the rows with full relationship contents
// attach any linked row information // attach any linked row information
let enriched = !opts.preserveLinks let enriched = !opts.preserveLinks
@ -276,25 +298,25 @@ export async function outputProcessing<T extends Row[] | Row>(
opts.squash = true opts.squash = true
} }
enriched = await coreOutputProcessing(table, enriched, opts) enriched = await coreOutputProcessing(source, enriched, opts)
if (opts.squash) { if (opts.squash) {
enriched = await linkRows.squashLinks(table, enriched, { enriched = await linkRows.squashLinks(source, enriched)
fromViewId: opts?.fromViewId,
})
} }
return (wasArray ? enriched : enriched[0]) as T return (wasArray ? enriched : enriched[0]) as T
} }
/** /**
* This function is similar to the outputProcessing function above, it makes sure that all the provided * This function is similar to the outputProcessing function above, it makes
* rows are ready for output, but does not have enrichment for squash capabilities which can cause performance issues. * sure that all the provided rows are ready for output, but does not have
* outputProcessing should be used when responding from the API, while this should be used when internally processing * enrichment for squash capabilities which can cause performance issues.
* rows for any reason (like part of view operations). * outputProcessing should be used when responding from the API, while this
* should be used when internally processing rows for any reason (like part of
* view operations).
*/ */
export async function coreOutputProcessing( export async function coreOutputProcessing(
table: Table, source: Table | ViewV2,
rows: Row[], rows: Row[],
opts: { opts: {
preserveLinks?: boolean preserveLinks?: boolean
@ -305,6 +327,13 @@ export async function coreOutputProcessing(
skipBBReferences: false, skipBBReferences: false,
} }
): Promise<Row[]> { ): Promise<Row[]> {
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
// process complex types: attachments, bb references... // process complex types: attachments, bb references...
for (const [property, column] of Object.entries(table.schema)) { for (const [property, column] of Object.entries(table.schema)) {
if ( if (
@ -399,6 +428,25 @@ export async function coreOutputProcessing(
} }
} }
} }
if (sdk.views.isView(source)) {
const calculationFields = Object.keys(
helpers.views.calculationFields(source)
)
// We ensure all calculation fields are returned as numbers. During the
// testing of this feature it was discovered that the COUNT operation
// returns a string for MySQL, MariaDB, and Postgres. But given that all
// calculation fields should be numbers, we blanket make sure of that
// here.
for (const key of calculationFields) {
for (const row of rows) {
if (typeof row[key] === "string") {
row[key] = parseFloat(row[key])
}
}
}
}
} }
if (!isUserMetadataTable(table._id!)) { if (!isUserMetadataTable(table._id!)) {
@ -409,9 +457,18 @@ export async function coreOutputProcessing(
const tableFields = Object.keys(table.schema).filter( const tableFields = Object.keys(table.schema).filter(
f => table.schema[f].visible !== false f => table.schema[f].visible !== false
) )
const fields = [...tableFields, ...protectedColumns].map(f => const fields = [...tableFields, ...protectedColumns].map(f =>
f.toLowerCase() f.toLowerCase()
) )
if (sdk.views.isView(source)) {
const aggregations = helpers.views.calculationFields(source)
for (const key of Object.keys(aggregations)) {
fields.push(key.toLowerCase())
}
}
for (const row of rows) { for (const row of rows) {
for (const key of Object.keys(row)) { for (const key of Object.keys(row)) {
if (!fields.includes(key.toLowerCase())) { if (!fields.includes(key.toLowerCase())) {

View File

@ -65,7 +65,7 @@ describe("rowProcessor - inputProcessing", () => {
processInputBBReferenceMock.mockResolvedValue(user) processInputBBReferenceMock.mockResolvedValue(user)
const { row } = await inputProcessing(userId, table, newRow) const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReference).toHaveBeenCalledTimes( expect(bbReferenceProcessor.processInputBBReference).toHaveBeenCalledTimes(
1 1
@ -117,7 +117,7 @@ describe("rowProcessor - inputProcessing", () => {
processInputBBReferencesMock.mockResolvedValue(user) processInputBBReferencesMock.mockResolvedValue(user)
const { row } = await inputProcessing(userId, table, newRow) const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).toHaveBeenCalledTimes( expect(bbReferenceProcessor.processInputBBReferences).toHaveBeenCalledTimes(
1 1
@ -164,7 +164,7 @@ describe("rowProcessor - inputProcessing", () => {
name: "Jack", name: "Jack",
} }
const { row } = await inputProcessing(userId, table, newRow) const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled() expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled()
expect(row).toEqual({ ...newRow, user: undefined }) expect(row).toEqual({ ...newRow, user: undefined })
@ -207,7 +207,7 @@ describe("rowProcessor - inputProcessing", () => {
user: userValue, user: userValue,
} }
const { row } = await inputProcessing(userId, table, newRow) const row = await inputProcessing(userId, table, newRow)
if (userValue === undefined) { if (userValue === undefined) {
// The 'user' field is omitted // The 'user' field is omitted
@ -262,7 +262,7 @@ describe("rowProcessor - inputProcessing", () => {
user: "123", user: "123",
} }
const { row } = await inputProcessing(userId, table, newRow) const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled() expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled()
expect(row).toEqual({ expect(row).toEqual({

View File

@ -148,9 +148,16 @@ export function parse(rows: Rows, table: Table): Rows {
Object.keys(row).forEach(columnName => { Object.keys(row).forEach(columnName => {
const columnData = row[columnName] const columnData = row[columnName]
if (columnName === "_id") {
parsedRow[columnName] = columnData
return
}
const schema = table.schema const schema = table.schema
if (!(columnName in schema)) { if (!(columnName in schema)) {
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case // Objects can be present in the row data but not in the schema, so make
// sure we don't proceed in such a case
return return
} }

View File

@ -3,7 +3,7 @@ import {
BBReferenceFieldSubType, BBReferenceFieldSubType,
FieldType, FieldType,
FormulaType, FormulaType,
SearchFilter, LegacyFilter,
SearchFilters, SearchFilters,
SearchQueryFields, SearchQueryFields,
ArrayOperator, ArrayOperator,
@ -19,9 +19,12 @@ import {
RangeOperator, RangeOperator,
LogicalOperator, LogicalOperator,
isLogicalSearchOperator, isLogicalSearchOperator,
SearchFilterGroup,
FilterGroupLogicalOperator,
} from "@budibase/types" } from "@budibase/types"
import dayjs from "dayjs" import dayjs from "dayjs"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
import { processSearchFilters } from "./utils"
import { deepGet, schema } from "./helpers" import { deepGet, schema } from "./helpers"
import { isPlainObject, isEmpty } from "lodash" import { isPlainObject, isEmpty } from "lodash"
import { decodeNonAscii } from "./helpers/schema" import { decodeNonAscii } from "./helpers/schema"
@ -160,9 +163,6 @@ export function recurseSearchFilters(
* https://github.com/Budibase/budibase/issues/10118 * https://github.com/Budibase/budibase/issues/10118
*/ */
export const cleanupQuery = (query: SearchFilters) => { export const cleanupQuery = (query: SearchFilters) => {
if (!query) {
return query
}
for (let filterField of NoEmptyFilterStrings) { for (let filterField of NoEmptyFilterStrings) {
if (!query[filterField]) { if (!query[filterField]) {
continue continue
@ -304,10 +304,143 @@ export class ColumnSplitter {
} }
/** /**
* Builds a JSON query from the filter structure generated in the builder * Builds a JSON query from the filter a SearchFilter definition
* @param filter the builder filter structure * @param filter the builder filter structure
*/ */
export const buildQuery = (filter: SearchFilter[]) => {
const buildCondition = (expression: LegacyFilter) => {
// Filter body
let query: SearchFilters = {
string: {},
fuzzy: {},
range: {},
equal: {},
notEqual: {},
empty: {},
notEmpty: {},
contains: {},
notContains: {},
oneOf: {},
containsAny: {},
}
let { operator, field, type, value, externalType, onEmptyFilter } = expression
if (!operator || !field) {
return
}
const queryOperator = operator as SearchFilterOperator
const isHbs =
typeof value === "string" && (value.match(HBS_REGEX) || []).length > 0
// Parse all values into correct types
if (operator === "allOr") {
query.allOr = true
return
}
if (onEmptyFilter) {
query.onEmptyFilter = onEmptyFilter
return
}
// Default the value for noValue fields to ensure they are correctly added
// to the final query
if (queryOperator === "empty" || queryOperator === "notEmpty") {
value = null
}
if (
type === "datetime" &&
!isHbs &&
queryOperator !== "empty" &&
queryOperator !== "notEmpty"
) {
// Ensure date value is a valid date and parse into correct format
if (!value) {
return
}
try {
value = new Date(value).toISOString()
} catch (error) {
return
}
}
if (type === "number" && typeof value === "string" && !isHbs) {
if (queryOperator === "oneOf") {
value = value.split(",").map(item => parseFloat(item))
} else {
value = parseFloat(value)
}
}
if (type === "boolean") {
value = `${value}`?.toLowerCase() === "true"
}
if (
["contains", "notContains", "containsAny"].includes(
operator.toLocaleString()
) &&
type === "array" &&
typeof value === "string"
) {
value = value.split(",")
}
if (operator.toLocaleString().startsWith("range") && query.range) {
const minint =
SqlNumberTypeRangeMap[externalType as keyof typeof SqlNumberTypeRangeMap]
?.min || Number.MIN_SAFE_INTEGER
const maxint =
SqlNumberTypeRangeMap[externalType as keyof typeof SqlNumberTypeRangeMap]
?.max || Number.MAX_SAFE_INTEGER
if (!query.range[field]) {
query.range[field] = {
low: type === "number" ? minint : "0000-00-00T00:00:00.000Z",
high: type === "number" ? maxint : "9999-00-00T00:00:00.000Z",
}
}
if (operator === "rangeLow" && value != null && value !== "") {
query.range[field] = {
...query.range[field],
low: value,
}
} else if (operator === "rangeHigh" && value != null && value !== "") {
query.range[field] = {
...query.range[field],
high: value,
}
}
} else if (isLogicalSearchOperator(queryOperator)) {
// TODO
} else if (query[queryOperator] && operator !== "onEmptyFilter") {
if (type === "boolean") {
// Transform boolean filters to cope with null.
// "equals false" needs to be "not equals true"
// "not equals false" needs to be "equals true"
if (queryOperator === "equal" && value === false) {
query.notEqual = query.notEqual || {}
query.notEqual[field] = true
} else if (queryOperator === "notEqual" && value === false) {
query.equal = query.equal || {}
query.equal[field] = true
} else {
query[queryOperator] ??= {}
query[queryOperator]![field] = value
}
} else {
query[queryOperator] ??= {}
query[queryOperator]![field] = value
}
}
return query
}
export const buildQueryLegacy = (
filter?: LegacyFilter[] | SearchFilters
): SearchFilters | undefined => {
// this is of type SearchFilters or is undefined
if (!Array.isArray(filter)) {
return filter
}
let query: SearchFilters = { let query: SearchFilters = {
string: {}, string: {},
fuzzy: {}, fuzzy: {},
@ -368,13 +501,15 @@ export const buildQuery = (filter: SearchFilter[]) => {
value = `${value}`?.toLowerCase() === "true" value = `${value}`?.toLowerCase() === "true"
} }
if ( if (
["contains", "notContains", "containsAny"].includes(operator) && ["contains", "notContains", "containsAny"].includes(
operator.toLocaleString()
) &&
type === "array" && type === "array" &&
typeof value === "string" typeof value === "string"
) { ) {
value = value.split(",") value = value.split(",")
} }
if (operator.startsWith("range") && query.range) { if (operator.toLocaleString().startsWith("range") && query.range) {
const minint = const minint =
SqlNumberTypeRangeMap[ SqlNumberTypeRangeMap[
externalType as keyof typeof SqlNumberTypeRangeMap externalType as keyof typeof SqlNumberTypeRangeMap
@ -401,7 +536,7 @@ export const buildQuery = (filter: SearchFilter[]) => {
} }
} }
} else if (isLogicalSearchOperator(queryOperator)) { } else if (isLogicalSearchOperator(queryOperator)) {
// TODO // ignore
} else if (query[queryOperator] && operator !== "onEmptyFilter") { } else if (query[queryOperator] && operator !== "onEmptyFilter") {
if (type === "boolean") { if (type === "boolean") {
// Transform boolean filters to cope with null. // Transform boolean filters to cope with null.
@ -423,10 +558,60 @@ export const buildQuery = (filter: SearchFilter[]) => {
} }
} }
}) })
return query return query
} }
/**
* Converts a **SearchFilterGroup** filter definition into a grouped
* search query of type **SearchFilters**
*
* Legacy support remains for the old **SearchFilter[]** format.
* These will be migrated to an appropriate **SearchFilters** object, if encountered
*
* @param filter
*
* @returns {SearchFilters}
*/
export const buildQuery = (
filter?: SearchFilterGroup | LegacyFilter[]
): SearchFilters | undefined => {
const parsedFilter: SearchFilterGroup | undefined =
processSearchFilters(filter)
if (!parsedFilter) {
return
}
const operatorMap: { [key in FilterGroupLogicalOperator]: LogicalOperator } =
{
[FilterGroupLogicalOperator.ALL]: LogicalOperator.AND,
[FilterGroupLogicalOperator.ANY]: LogicalOperator.OR,
}
const globalOnEmpty = parsedFilter.onEmptyFilter
? parsedFilter.onEmptyFilter
: null
const globalOperator: LogicalOperator =
operatorMap[parsedFilter.logicalOperator as FilterGroupLogicalOperator]
return {
...(globalOnEmpty ? { onEmptyFilter: globalOnEmpty } : {}),
[globalOperator]: {
conditions: parsedFilter.groups?.map((group: SearchFilterGroup) => {
return {
[operatorMap[group.logicalOperator]]: {
conditions: group.filters
?.map(x => buildCondition(x))
.filter(filter => filter),
},
}
}),
},
}
}
// The frontend can send single values for array fields sometimes, so to handle // The frontend can send single values for array fields sometimes, so to handle
// this we convert them to arrays at the controller level so that nothing below // this we convert them to arrays at the controller level so that nothing below
// this has to worry about the non-array values. // this has to worry about the non-array values.

View File

@ -1,4 +1,5 @@
import cronValidate from "cron-validate" import cronValidate from "cron-validate"
import cronParser from "cron-parser"
const INPUT_CRON_START = "(Input cron: " const INPUT_CRON_START = "(Input cron: "
const ERROR_SWAPS = { const ERROR_SWAPS = {
@ -30,6 +31,19 @@ function improveErrors(errors: string[]): string[] {
return finalErrors return finalErrors
} }
export function getNextExecutionDates(
cronExpression: string,
limit: number = 4
): string[] {
const parsed = cronParser.parseExpression(cronExpression)
const nextRuns = []
for (let i = 0; i < limit; i++) {
nextRuns.push(parsed.next().toString())
}
return nextRuns
}
export function validate( export function validate(
cronExpression: string cronExpression: string
): { valid: false; err: string[] } | { valid: true } { ): { valid: false; err: string[] } | { valid: true } {

View File

@ -2,3 +2,4 @@ export * from "./helpers"
export * from "./integrations" export * from "./integrations"
export * as cron from "./cron" export * as cron from "./cron"
export * as schema from "./schema" export * as schema from "./schema"
export * as views from "./views"

View File

@ -45,3 +45,7 @@ export function decodeNonAscii(str: string): string {
String.fromCharCode(parseInt(p1, 16)) String.fromCharCode(parseInt(p1, 16))
) )
} }
export function isNumeric(field: FieldSchema) {
return field.type === FieldType.NUMBER || field.type === FieldType.BIGINT
}

View File

@ -0,0 +1,33 @@
import {
BasicViewFieldMetadata,
ViewCalculationFieldMetadata,
ViewFieldMetadata,
ViewV2,
} from "@budibase/types"
import { pickBy } from "lodash"
export function isCalculationField(
field: ViewFieldMetadata
): field is ViewCalculationFieldMetadata {
return "calculationType" in field
}
export function isBasicViewField(
field: ViewFieldMetadata
): field is BasicViewFieldMetadata {
return !isCalculationField(field)
}
type UnsavedViewV2 = Omit<ViewV2, "id" | "version">
export function isCalculationView(view: UnsavedViewV2) {
return Object.values(view.schema || {}).some(isCalculationField)
}
export function calculationFields(view: UnsavedViewV2) {
return pickBy(view.schema || {}, isCalculationField)
}
export function basicFields(view: UnsavedViewV2) {
return pickBy(view.schema || {}, field => !isCalculationField(field))
}

View File

@ -53,8 +53,9 @@ const allowDefaultColumnByType: Record<FieldType, boolean> = {
[FieldType.DATETIME]: true, [FieldType.DATETIME]: true,
[FieldType.LONGFORM]: true, [FieldType.LONGFORM]: true,
[FieldType.STRING]: true, [FieldType.STRING]: true,
[FieldType.OPTIONS]: true,
[FieldType.ARRAY]: true,
[FieldType.OPTIONS]: false,
[FieldType.AUTO]: false, [FieldType.AUTO]: false,
[FieldType.INTERNAL]: false, [FieldType.INTERNAL]: false,
[FieldType.BARCODEQR]: false, [FieldType.BARCODEQR]: false,
@ -64,7 +65,6 @@ const allowDefaultColumnByType: Record<FieldType, boolean> = {
[FieldType.ATTACHMENTS]: false, [FieldType.ATTACHMENTS]: false,
[FieldType.ATTACHMENT_SINGLE]: false, [FieldType.ATTACHMENT_SINGLE]: false,
[FieldType.SIGNATURE_SINGLE]: false, [FieldType.SIGNATURE_SINGLE]: false,
[FieldType.ARRAY]: false,
[FieldType.LINK]: false, [FieldType.LINK]: false,
[FieldType.BB_REFERENCE]: false, [FieldType.BB_REFERENCE]: false,
[FieldType.BB_REFERENCE_SINGLE]: false, [FieldType.BB_REFERENCE_SINGLE]: false,

View File

@ -1,4 +1,17 @@
import {
LegacyFilter,
SearchFilterGroup,
FilterGroupLogicalOperator,
SearchFilters,
BasicOperator,
ArrayOperator,
} from "@budibase/types"
import * as Constants from "./constants" import * as Constants from "./constants"
import { removeKeyNumbering } from "./filters"
// an array of keys from filter type to properties that are in the type
// this can then be converted using .fromEntries to an object
type AllowedFilters = [keyof LegacyFilter, LegacyFilter[keyof LegacyFilter]][]
export function unreachable( export function unreachable(
value: never, value: never,
@ -77,3 +90,124 @@ export function trimOtherProps(object: any, allowedProps: string[]) {
) )
return result return result
} }
export function isSupportedUserSearch(query: SearchFilters) {
const allowed = [
{ op: BasicOperator.STRING, key: "email" },
{ op: BasicOperator.EQUAL, key: "_id" },
{ op: ArrayOperator.ONE_OF, key: "_id" },
]
for (let [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") {
return false
}
const fields = Object.keys(operation || {})
// this filter doesn't contain options - ignore
if (fields.length === 0) {
continue
}
const allowedOperation = allowed.find(
allow =>
allow.op === key && fields.length === 1 && fields[0] === allow.key
)
if (!allowedOperation) {
return false
}
}
return true
}
/**
* Processes the filter config. Filters are migrated from
* SearchFilter[] to SearchFilterGroup
*
* If filters is not an array, the migration is skipped
*
* @param {LegacyFilter[] | SearchFilterGroup} filters
*/
export const processSearchFilters = (
filters: LegacyFilter[] | SearchFilterGroup | undefined
): SearchFilterGroup | undefined => {
if (!filters) {
return
}
// Base search config.
const defaultCfg: SearchFilterGroup = {
logicalOperator: FilterGroupLogicalOperator.ALL,
groups: [],
}
const filterAllowedKeys = [
"field",
"operator",
"value",
"type",
"externalType",
"valueType",
"noValue",
"formulaType",
]
if (Array.isArray(filters)) {
let baseGroup: SearchFilterGroup = {
filters: [],
logicalOperator: FilterGroupLogicalOperator.ALL,
}
return filters.reduce((acc: SearchFilterGroup, filter: LegacyFilter) => {
// Sort the properties for easier debugging
const filterPropertyKeys = (Object.keys(filter) as (keyof LegacyFilter)[])
.sort((a, b) => {
return a.localeCompare(b)
})
.filter(key => key in filter)
if (filterPropertyKeys.length == 1) {
const key = filterPropertyKeys[0],
value = filter[key]
// Global
if (key === "onEmptyFilter") {
// unset otherwise
acc.onEmptyFilter = value
} else if (key === "operator" && value === "allOr") {
// Group 1 logical operator
baseGroup.logicalOperator = FilterGroupLogicalOperator.ANY
}
return acc
}
const allowedFilterSettings: AllowedFilters = filterPropertyKeys.reduce(
(acc: AllowedFilters, key) => {
const value = filter[key]
if (filterAllowedKeys.includes(key)) {
if (key === "field") {
acc.push([key, removeKeyNumbering(value)])
} else {
acc.push([key, value])
}
}
return acc
},
[]
)
const migratedFilter: LegacyFilter = Object.fromEntries(
allowedFilterSettings
) as LegacyFilter
baseGroup.filters!.push(migratedFilter)
if (!acc.groups || !acc.groups.length) {
// init the base group
acc.groups = [baseGroup]
}
return acc
}, defaultCfg)
} else if (!filters?.groups) {
return
}
return filters
}

View File

@ -1,7 +1,11 @@
import { FieldType } from "../../documents" import { FieldType } from "../../documents"
import { EmptyFilterOption, SearchFilters } from "../../sdk" import {
EmptyFilterOption,
FilterGroupLogicalOperator,
SearchFilters,
} from "../../sdk"
export type SearchFilter = { export type LegacyFilter = {
operator: keyof SearchFilters | "rangeLow" | "rangeHigh" operator: keyof SearchFilters | "rangeLow" | "rangeHigh"
onEmptyFilter?: EmptyFilterOption onEmptyFilter?: EmptyFilterOption
field: string field: string
@ -9,3 +13,11 @@ export type SearchFilter = {
value: any value: any
externalType?: string externalType?: string
} }
// this is a type purely used by the UI
export type SearchFilterGroup = {
logicalOperator: FilterGroupLogicalOperator
onEmptyFilter?: EmptyFilterOption
groups?: SearchFilterGroup[]
filters?: LegacyFilter[]
}

View File

@ -161,6 +161,7 @@ export interface OptionsFieldMetadata extends BaseFieldSchema {
constraints: FieldConstraints & { constraints: FieldConstraints & {
inclusion: string[] inclusion: string[]
} }
default?: string
} }
export interface ArrayFieldMetadata extends BaseFieldSchema { export interface ArrayFieldMetadata extends BaseFieldSchema {
@ -169,6 +170,7 @@ export interface ArrayFieldMetadata extends BaseFieldSchema {
type: JsonFieldSubType.ARRAY type: JsonFieldSubType.ARRAY
inclusion: string[] inclusion: string[]
} }
default?: string[]
} }
interface BaseFieldSchema extends UIFieldMetadata { interface BaseFieldSchema extends UIFieldMetadata {

View File

@ -1,7 +1,7 @@
import { SearchFilter, SortOrder, SortType } from "../../api" import { LegacyFilter, SearchFilterGroup, SortOrder, SortType } from "../../api"
import { UIFieldMetadata } from "./table" import { UIFieldMetadata } from "./table"
import { Document } from "../document" import { Document } from "../document"
import { DBView } from "../../sdk" import { DBView, SearchFilters } from "../../sdk"
export type ViewTemplateOpts = { export type ViewTemplateOpts = {
field: string field: string
@ -33,15 +33,24 @@ export interface View {
groupBy?: string groupBy?: string
} }
export type ViewFieldMetadata = UIFieldMetadata & { export interface BasicViewFieldMetadata extends UIFieldMetadata {
readonly?: boolean readonly?: boolean
columns?: Record<string, RelationSchemaField> columns?: Record<string, RelationSchemaField>
} }
export type RelationSchemaField = UIFieldMetadata & { export interface RelationSchemaField extends UIFieldMetadata {
readonly?: boolean readonly?: boolean
} }
export interface ViewCalculationFieldMetadata extends BasicViewFieldMetadata {
calculationType: CalculationType
field: string
}
export type ViewFieldMetadata =
| BasicViewFieldMetadata
| ViewCalculationFieldMetadata
export enum CalculationType { export enum CalculationType {
SUM = "sum", SUM = "sum",
AVG = "avg", AVG = "avg",
@ -50,26 +59,25 @@ export enum CalculationType {
MAX = "max", MAX = "max",
} }
export type ViewCalculationFieldMetadata = ViewFieldMetadata & {
calculationType: CalculationType
field: string
}
export interface ViewV2 { export interface ViewV2 {
version: 2 version: 2
id: string id: string
name: string name: string
primaryDisplay?: string primaryDisplay?: string
tableId: string tableId: string
query?: SearchFilter[] query?: LegacyFilter[] | SearchFilters
// duplicate to store UI information about filters
queryUI?: SearchFilterGroup
sort?: { sort?: {
field: string field: string
order?: SortOrder order?: SortOrder
type?: SortType type?: SortType
} }
schema?: Record<string, ViewFieldMetadata | ViewCalculationFieldMetadata> schema?: ViewV2Schema
} }
export type ViewV2Schema = Record<string, ViewFieldMetadata>
export type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema export type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema
export interface ViewCountOrSumSchema { export interface ViewCountOrSumSchema {

View File

@ -1,8 +1,14 @@
import { SortOrder, SortType } from "../api" import { SortOrder, SortType } from "../api"
import { SearchFilters } from "./search" import { SearchFilters } from "./search"
import { Row } from "../documents" import { CalculationType, Row } from "../documents"
import { WithRequired } from "../shared" import { WithRequired } from "../shared"
export interface Aggregation {
name: string
calculationType: CalculationType
field: string
}
export interface SearchParams { export interface SearchParams {
tableId?: string tableId?: string
viewId?: string viewId?: string

View File

@ -2,6 +2,7 @@ import { Operation } from "./datasources"
import { Row, Table, DocumentType } from "../documents" import { Row, Table, DocumentType } from "../documents"
import { SortOrder, SortType } from "../api" import { SortOrder, SortType } from "../api"
import { Knex } from "knex" import { Knex } from "knex"
import { Aggregation } from "./row"
export enum BasicOperator { export enum BasicOperator {
EQUAL = "equal", EQUAL = "equal",
@ -67,6 +68,8 @@ type RangeFilter = Record<
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: never [InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: never
} }
type LogicalFilter = { conditions: SearchFilters[] }
export type AnySearchFilter = BasicFilter | ArrayFilter | RangeFilter export type AnySearchFilter = BasicFilter | ArrayFilter | RangeFilter
export interface SearchFilters { export interface SearchFilters {
@ -91,12 +94,8 @@ export interface SearchFilters {
// specific document type (such as just rows) // specific document type (such as just rows)
documentType?: DocumentType documentType?: DocumentType
[LogicalOperator.AND]?: { [LogicalOperator.AND]?: LogicalFilter
conditions: SearchFilters[] [LogicalOperator.OR]?: LogicalFilter
}
[LogicalOperator.OR]?: {
conditions: SearchFilters[]
}
} }
export type SearchFilterKey = keyof Omit< export type SearchFilterKey = keyof Omit<
@ -154,6 +153,7 @@ export interface QueryJson {
} }
resource?: { resource?: {
fields: string[] fields: string[]
aggregations?: Aggregation[]
} }
filters?: SearchFilters filters?: SearchFilters
sort?: SortJson sort?: SortJson
@ -191,6 +191,11 @@ export enum EmptyFilterOption {
RETURN_NONE = "none", RETURN_NONE = "none",
} }
export enum FilterGroupLogicalOperator {
ALL = "all",
ANY = "any",
}
export enum SqlClient { export enum SqlClient {
MS_SQL = "mssql", MS_SQL = "mssql",
POSTGRES = "pg", POSTGRES = "pg",

View File

@ -4,6 +4,29 @@ export type DeepPartial<T> = {
export type ISO8601 = string export type ISO8601 = string
/**
* RequiredKeys make it such that you _must_ assign a value to every key in the
* type. It differs subtly from Required<T> in that it doesn't change the type
* of the fields, you can specify undefined as a value and that's fine.
*
* Example:
*
* ```ts
* interface Foo {
* bar: string
* baz?: string
* }
*
* type FooRequiredKeys = RequiredKeys<Foo>
* type FooRequired = Required<Foo>
*
* const a: FooRequiredKeys = { bar: "hello", baz: undefined }
* const b: FooRequired = { bar: "hello", baz: undefined }
* ```
*
* In this code, a passes type checking whereas b does not. This is because
* Required<Foo> makes baz non-optional.
*/
export type RequiredKeys<T> = { export type RequiredKeys<T> = {
[K in keyof Required<T>]: T[K] [K in keyof Required<T>]: T[K]
} }

View File

@ -37,7 +37,7 @@ import {
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { checkAnyUserExists } from "../../../utilities/users" import { checkAnyUserExists } from "../../../utilities/users"
import { isEmailConfigured } from "../../../utilities/email" import { isEmailConfigured } from "../../../utilities/email"
import { BpmStatusKey, BpmStatusValue } from "@budibase/shared-core" import { BpmStatusKey, BpmStatusValue, utils } from "@budibase/shared-core"
const MAX_USERS_UPLOAD_LIMIT = 1000 const MAX_USERS_UPLOAD_LIMIT = 1000
@ -256,7 +256,7 @@ export const search = async (ctx: Ctx<SearchUsersRequest>) => {
} }
} }
// Validate we aren't trying to search on any illegal fields // Validate we aren't trying to search on any illegal fields
if (!userSdk.core.isSupportedUserSearch(body.query)) { if (!utils.isSupportedUserSearch(body.query)) {
ctx.throw(400, "Can only search by string.email, equal._id or oneOf._id") ctx.throw(400, "Can only search by string.email, equal._id or oneOf._id")
} }
} }

591
yarn.lock

File diff suppressed because it is too large Load Diff