Merge branch 'master' of github.com:Budibase/budibase into feature/role-multi-inheritance

This commit is contained in:
mike12345567 2024-10-03 17:22:09 +01:00
commit bfbc576ed1
86 changed files with 5315 additions and 4156 deletions

View File

@ -23,6 +23,7 @@ jobs:
PAYLOAD_BRANCH: ${{ github.head_ref }}
PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }}
PAYLOAD_LICENSE_TYPE: "free"
PAYLOAD_DEPLOY: true
with:
repository: budibase/budibase-deploys
event: featurebranch-qa-deploy

View File

@ -1,3 +1,3 @@
nodejs 20.10.0
python 3.10.0
yarn 1.22.19
yarn 1.22.22

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.32.8",
"version": "2.32.11",
"npmClient": "yarn",
"packages": [
"packages/*",

@ -1 +1 @@
Subproject commit 558a32dfd1f55bd894804a503e7e1090937df88c
Subproject commit 3e24f6293ff5ee5f9b42822e001504e3bbf19cc0

View File

@ -10,7 +10,7 @@ import {
StaticDatabases,
DEFAULT_TENANT_ID,
} from "../constants"
import { Database, IdentityContext, Snippet, App } from "@budibase/types"
import { Database, IdentityContext, Snippet, App, Table } from "@budibase/types"
import { ContextMap } from "./types"
let TEST_APP_ID: string | null = null
@ -394,3 +394,20 @@ export function setFeatureFlags(key: string, value: Record<string, any>) {
context.featureFlagCache ??= {}
context.featureFlagCache[key] = value
}
export function getTableForView(viewId: string): Table | undefined {
const context = getCurrentContext()
if (!context) {
return
}
return context.viewToTableCache?.[viewId]
}
export function setTableForView(viewId: string, table: Table) {
const context = getCurrentContext()
if (!context) {
return
}
context.viewToTableCache ??= {}
context.viewToTableCache[viewId] = table
}

View File

@ -1,4 +1,4 @@
import { IdentityContext, Snippet, VM } from "@budibase/types"
import { IdentityContext, Snippet, Table, VM } from "@budibase/types"
import { OAuth2Client } from "google-auth-library"
import { GoogleSpreadsheet } from "google-spreadsheet"
@ -21,4 +21,5 @@ export type ContextMap = {
featureFlagCache?: {
[key: string]: Record<string, any>
}
viewToTableCache?: Record<string, Table>
}

View File

@ -612,7 +612,6 @@ async function runQuery<T>(
* limit {number} The number of results to fetch
* bookmark {string|null} Current bookmark in the recursive search
* rows {array|null} Current results in the recursive search
* @returns {Promise<*[]|*>}
*/
async function recursiveSearch<T>(
dbName: string,

View File

@ -6,7 +6,7 @@ import {
ViewName,
} from "../constants"
import { getProdAppID } from "./conversions"
import { DatabaseQueryOpts } from "@budibase/types"
import { DatabaseQueryOpts, VirtualDocumentType } from "@budibase/types"
/**
* If creating DB allDocs/query params with only a single top level ID this can be used, this
@ -66,9 +66,8 @@ export function getQueryIndex(viewName: ViewName) {
/**
* Check if a given ID is that of a table.
* @returns {boolean}
*/
export const isTableId = (id: string) => {
export const isTableId = (id: string): boolean => {
// this includes datasource plus tables
return (
!!id &&
@ -77,13 +76,16 @@ export const isTableId = (id: string) => {
)
}
export function isViewId(id: string): boolean {
return !!id && id.startsWith(`${VirtualDocumentType.VIEW}${SEPARATOR}`)
}
/**
* Check if a given ID is that of a datasource or datasource plus.
* @returns {boolean}
*/
export const isDatasourceId = (id: string) => {
export const isDatasourceId = (id: string): boolean => {
// this covers both datasources and datasource plus
return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
return !!id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
}
/**

View File

@ -1,5 +1,5 @@
export * as utils from "./utils"
export { default as Sql } from "./sql"
export { default as Sql, COUNT_FIELD_NAME } from "./sql"
export { default as SqlTable } from "./sqlTable"
export * as designDoc from "./designDoc"

View File

@ -11,10 +11,12 @@ import {
} from "./utils"
import SqlTableQueryBuilder from "./sqlTable"
import {
Aggregation,
AnySearchFilter,
ArrayOperator,
BasicOperator,
BBReferenceFieldMetadata,
CalculationType,
FieldSchema,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
@ -41,6 +43,8 @@ import { cloneDeep } from "lodash"
type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any
export const COUNT_FIELD_NAME = "__bb_total"
function getBaseLimit() {
const envLimit = environment.SQL_MAX_ROWS
? parseInt(environment.SQL_MAX_ROWS)
@ -69,18 +73,6 @@ function prioritisedArraySort(toSort: string[], priorities: string[]) {
})
}
function getTableName(table?: Table): string | undefined {
// SQS uses the table ID rather than the table name
if (
table?.sourceType === TableSourceType.INTERNAL ||
table?.sourceId === INTERNAL_TABLE_SOURCE_ID
) {
return table?._id
} else {
return table?.name
}
}
function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] {
if (Array.isArray(query)) {
return query.map((q: SqlQuery) => convertBooleans(q) as SqlQuery)
@ -97,6 +89,13 @@ function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] {
return query
}
function isSqs(table: Table): boolean {
return (
table.sourceType === TableSourceType.INTERNAL ||
table.sourceId === INTERNAL_TABLE_SOURCE_ID
)
}
class InternalBuilder {
private readonly client: SqlClient
private readonly query: QueryJson
@ -178,15 +177,13 @@ class InternalBuilder {
}
private generateSelectStatement(): (string | Knex.Raw)[] | "*" {
const { meta, endpoint, resource, tableAliases } = this.query
const { meta, endpoint, resource } = this.query
if (!resource || !resource.fields || resource.fields.length === 0) {
return "*"
}
const alias = tableAliases?.[endpoint.entityId]
? tableAliases?.[endpoint.entityId]
: endpoint.entityId
const alias = this.getTableName(endpoint.entityId)
const schema = meta.table.schema
if (!this.isFullSelectStatementRequired()) {
return [this.knex.raw(`${this.quote(alias)}.*`)]
@ -811,26 +808,88 @@ class InternalBuilder {
return query
}
isSqs(): boolean {
return isSqs(this.table)
}
getTableName(tableOrName?: Table | string): string {
let table: Table
if (typeof tableOrName === "string") {
const name = tableOrName
if (this.query.table?.name === name) {
table = this.query.table
} else if (this.query.meta.table?.name === name) {
table = this.query.meta.table
} else if (!this.query.meta.tables?.[name]) {
// This can legitimately happen in custom queries, where the user is
// querying against a table that may not have been imported into
// Budibase.
return name
} else {
table = this.query.meta.tables[name]
}
} else if (tableOrName) {
table = tableOrName
} else {
table = this.table
}
let name = table.name
if (isSqs(table) && table._id) {
// SQS uses the table ID rather than the table name
name = table._id
}
const aliases = this.query.tableAliases || {}
return aliases[name] ? aliases[name] : name
}
addDistinctCount(query: Knex.QueryBuilder): Knex.QueryBuilder {
const primary = this.table.primary
const aliases = this.query.tableAliases
const aliased =
this.table.name && aliases?.[this.table.name]
? aliases[this.table.name]
: this.table.name
if (!primary) {
if (!this.table.primary) {
throw new Error("SQL counting requires primary key to be supplied")
}
return query.countDistinct(`${aliased}.${primary[0]} as total`)
return query.countDistinct(
`${this.getTableName()}.${this.table.primary[0]} as ${COUNT_FIELD_NAME}`
)
}
addAggregations(
query: Knex.QueryBuilder,
aggregations: Aggregation[]
): Knex.QueryBuilder {
const fields = this.query.resource?.fields || []
const tableName = this.getTableName()
if (fields.length > 0) {
query = query.groupBy(fields.map(field => `${tableName}.${field}`))
query = query.select(fields.map(field => `${tableName}.${field}`))
}
for (const aggregation of aggregations) {
const op = aggregation.calculationType
const field = `${tableName}.${aggregation.field} as ${aggregation.name}`
switch (op) {
case CalculationType.COUNT:
query = query.count(field)
break
case CalculationType.SUM:
query = query.sum(field)
break
case CalculationType.AVG:
query = query.avg(field)
break
case CalculationType.MIN:
query = query.min(field)
break
case CalculationType.MAX:
query = query.max(field)
break
}
}
return query
}
addSorting(query: Knex.QueryBuilder): Knex.QueryBuilder {
let { sort } = this.query
let { sort, resource } = this.query
const primaryKey = this.table.primary
const tableName = getTableName(this.table)
const aliases = this.query.tableAliases
const aliased =
tableName && aliases?.[tableName] ? aliases[tableName] : this.table?.name
const aliased = this.getTableName()
if (!Array.isArray(primaryKey)) {
throw new Error("Sorting requires primary key to be specified for table")
}
@ -862,7 +921,8 @@ class InternalBuilder {
// add sorting by the primary key if the result isn't already sorted by it,
// to make sure result is deterministic
if (!sort || sort[primaryKey[0]] === undefined) {
const hasAggregations = (resource?.aggregations?.length ?? 0) > 0
if (!hasAggregations && (!sort || sort[primaryKey[0]] === undefined)) {
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
}
return query
@ -1246,10 +1306,15 @@ class InternalBuilder {
}
}
// if counting, use distinct count, else select
query = !counting
? query.select(this.generateSelectStatement())
: this.addDistinctCount(query)
const aggregations = this.query.resource?.aggregations || []
if (counting) {
query = this.addDistinctCount(query)
} else if (aggregations.length > 0) {
query = this.addAggregations(query, aggregations)
} else {
query = query.select(this.generateSelectStatement())
}
// have to add after as well (this breaks MS-SQL)
if (!counting) {
query = this.addSorting(query)
@ -1468,23 +1533,40 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
return results.length ? results : [{ [operation.toLowerCase()]: true }]
}
private getTableName(
table: Table,
aliases?: Record<string, string>
): string | undefined {
let name = table.name
if (
table.sourceType === TableSourceType.INTERNAL ||
table.sourceId === INTERNAL_TABLE_SOURCE_ID
) {
if (!table._id) {
return
}
// SQS uses the table ID rather than the table name
name = table._id
}
return aliases?.[name] || name
}
convertJsonStringColumns<T extends Record<string, any>>(
table: Table,
results: T[],
aliases?: Record<string, string>
): T[] {
const tableName = getTableName(table)
const tableName = this.getTableName(table, aliases)
for (const [name, field] of Object.entries(table.schema)) {
if (!this._isJsonColumn(field)) {
continue
}
const aliasedTableName = (tableName && aliases?.[tableName]) || tableName
const fullName = `${aliasedTableName}.${name}`
const fullName = `${tableName}.${name}` as keyof T
for (let row of results) {
if (typeof row[fullName as keyof T] === "string") {
row[fullName as keyof T] = JSON.parse(row[fullName])
if (typeof row[fullName] === "string") {
row[fullName] = JSON.parse(row[fullName])
}
if (typeof row[name as keyof T] === "string") {
if (typeof row[name] === "string") {
row[name as keyof T] = JSON.parse(row[name])
}
}

View File

@ -17,11 +17,8 @@ import {
ContextUser,
CouchFindOptions,
DatabaseQueryOpts,
SearchFilters,
SearchUsersRequest,
User,
BasicOperator,
ArrayOperator,
} from "@budibase/types"
import * as context from "../context"
import { getGlobalDB } from "../context"
@ -45,32 +42,6 @@ function removeUserPassword(users: User | User[]) {
return users
}
export function isSupportedUserSearch(query: SearchFilters) {
const allowed = [
{ op: BasicOperator.STRING, key: "email" },
{ op: BasicOperator.EQUAL, key: "_id" },
{ op: ArrayOperator.ONE_OF, key: "_id" },
]
for (let [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") {
return false
}
const fields = Object.keys(operation || {})
// this filter doesn't contain options - ignore
if (fields.length === 0) {
continue
}
const allowedOperation = allowed.find(
allow =>
allow.op === key && fields.length === 1 && fields[0] === allow.key
)
if (!allowedOperation) {
return false
}
}
return true
}
export async function bulkGetGlobalUsersById(
userIds: string[],
opts?: GetOpts

View File

@ -0,0 +1,29 @@
<script>
export let width
export let height
</script>
<svg
{width}
{height}
viewBox="0 0 13 12"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M9.4179 4.13222C9.4179 3.73121 9.26166 3.35428 8.97913 3.07175C8.41342 2.50538 7.4239 2.50408 6.85753 3.07175L5.64342 4.28586C5.6291 4.30018 5.61543 4.3158 5.60305 4.33143C5.58678 4.3438 5.5718 4.35747 5.55683 4.37244L0.491426 9.43785C0.208245 9.72103 0.052002 10.098 0.052002 10.4983C0.052002 10.8987 0.208245 11.2756 0.491426 11.5588C0.774607 11.842 1.15153 11.9982 1.5519 11.9982C1.95227 11.9982 2.32919 11.842 2.61238 11.5588L8.97848 5.1927C9.26166 4.90952 9.4179 4.53259 9.4179 4.13222ZM1.90539 10.8518C1.7166 11.0406 1.3872 11.0406 1.1984 10.8518C1.10401 10.7574 1.05193 10.6318 1.05193 10.4983C1.05193 10.3649 1.104 10.2392 1.1984 10.1448L5.99821 5.34503L6.70845 6.04875L1.90539 10.8518ZM8.2715 4.48571L7.41544 5.34178L6.7052 4.63805L7.56452 3.77873C7.7533 3.58995 8.08271 3.58929 8.2715 3.77939C8.36589 3.87313 8.41798 3.99877 8.41798 4.13223C8.41798 4.26569 8.3659 4.39132 8.2715 4.48571Z"
fill="#C8C8C8"
/>
<path
d="M11.8552 6.55146L11.0144 6.21913L10.879 5.32449C10.8356 5.03919 10.3737 4.98776 10.2686 5.255L9.93606 6.09642L9.04143 6.23085C8.89951 6.25216 8.78884 6.36658 8.77257 6.50947C8.75629 6.65253 8.83783 6.78826 8.97193 6.84148L9.81335 7.17464L9.94794 8.06862C9.9691 8.21053 10.0835 8.32121 10.2266 8.33748C10.3695 8.35375 10.5052 8.27221 10.5586 8.13811L10.8914 7.29751L11.7855 7.1621C11.9283 7.1403 12.0381 7.02637 12.0544 6.88348C12.0707 6.74058 11.9887 6.60403 11.8552 6.55146Z"
fill="#F9634C"
/>
<path
d="M8.94215 1.76145L9.78356 2.0946L9.91815 2.9885C9.93931 3.13049 10.0539 3.24117 10.1968 3.25744C10.3398 3.27371 10.4756 3.19218 10.5288 3.05807L10.8618 2.21739L11.7559 2.08207C11.8985 2.06034 12.0085 1.94633 12.0248 1.80344C12.0411 1.66054 11.959 1.524 11.8254 1.47143L10.9847 1.13909L10.8494 0.244456C10.806 -0.0409246 10.3439 -0.0922745 10.2388 0.174881L9.90643 1.0163L9.0118 1.15089C8.86972 1.17213 8.75905 1.28654 8.74278 1.42952C8.72651 1.57249 8.80804 1.70823 8.94215 1.76145Z"
fill="#8488FD"
/>
<path
d="M3.2379 2.46066L3.92063 2.73091L4.02984 3.45637C4.04709 3.57151 4.14002 3.66135 4.25606 3.67453C4.37194 3.6878 4.48212 3.62163 4.52541 3.51276L4.79557 2.83059L5.52094 2.72074C5.63682 2.70316 5.72601 2.61072 5.73936 2.49468C5.75254 2.37864 5.68597 2.26797 5.57758 2.22533L4.89533 1.95565L4.78548 1.22963C4.75016 0.998038 4.37535 0.956375 4.29007 1.17315L4.0204 1.85597L3.29437 1.96517C3.17915 1.98235 3.08931 2.07527 3.07613 2.19131C3.06294 2.30727 3.12902 2.41737 3.2379 2.46066Z"
fill="#F7D804"
/>
</svg>

View File

@ -67,6 +67,7 @@
"@spectrum-css/vars": "^3.0.1",
"@zerodevx/svelte-json-view": "^1.0.7",
"codemirror": "^5.65.16",
"cron-parser": "^4.9.0",
"dayjs": "^1.10.8",
"downloadjs": "1.4.7",
"fast-json-patch": "^3.1.1",

View File

@ -641,6 +641,8 @@
let hasUserDefinedName = automation.stepNames?.[allSteps[idx]?.id]
if (isLoopBlock) {
runtimeName = `loop.${name}`
} else if (idx === 0) {
runtimeName = `trigger.${name}`
} else if (block.name.startsWith("JS")) {
runtimeName = hasUserDefinedName
? `stepsByName["${bindingName}"].${name}`
@ -650,7 +652,7 @@
? `stepsByName.${bindingName}.${name}`
: `steps.${idx - loopBlockCount}.${name}`
}
return idx === 0 ? `trigger.${name}` : runtimeName
return runtimeName
}
const determineCategoryName = (idx, isLoopBlock, bindingName) => {
@ -677,7 +679,7 @@
)
return {
readableBinding:
bindingName && !isLoopBlock
bindingName && !isLoopBlock && idx !== 0
? `steps.${bindingName}.${name}`
: runtimeBinding,
runtimeBinding,
@ -1048,7 +1050,7 @@
{:else if value.customType === "cron"}
<CronBuilder
on:change={e => onChange({ [key]: e.detail })}
value={inputData[key]}
cronExpression={inputData[key]}
/>
{:else if value.customType === "automationFields"}
<AutomationSelector

View File

@ -1,41 +1,70 @@
<script>
import { Button, Select, Input, Label } from "@budibase/bbui"
import {
Select,
InlineAlert,
Input,
Label,
Layout,
notifications,
} from "@budibase/bbui"
import { onMount, createEventDispatcher } from "svelte"
import { flags } from "stores/builder"
import { licensing } from "stores/portal"
import { API } from "api"
import MagicWand from "../../../../assets/MagicWand.svelte"
import { helpers, REBOOT_CRON } from "@budibase/shared-core"
const dispatch = createEventDispatcher()
export let value
export let cronExpression
let error
let nextExecutions
// AI prompt
let aiCronPrompt = ""
let loadingAICronExpression = false
$: aiEnabled =
$licensing.customAIConfigsEnabled || $licensing.budibaseAIEnabled
$: {
const exists = CRON_EXPRESSIONS.some(cron => cron.value === value)
const customIndex = CRON_EXPRESSIONS.findIndex(
cron => cron.label === "Custom"
)
if (!exists && customIndex === -1) {
CRON_EXPRESSIONS[0] = { label: "Custom", value: value }
} else if (exists && customIndex !== -1) {
CRON_EXPRESSIONS.splice(customIndex, 1)
if (cronExpression) {
try {
nextExecutions = helpers.cron
.getNextExecutionDates(cronExpression)
.join("\n")
} catch (err) {
nextExecutions = null
}
}
}
const onChange = e => {
if (value !== REBOOT_CRON) {
if (e.detail !== REBOOT_CRON) {
error = helpers.cron.validate(e.detail).err
}
if (e.detail === value || error) {
if (e.detail === cronExpression || error) {
return
}
value = e.detail
cronExpression = e.detail
dispatch("change", e.detail)
}
const updatePreset = e => {
aiCronPrompt = ""
onChange(e)
}
const updateCronExpression = e => {
aiCronPrompt = ""
cronExpression = null
nextExecutions = null
onChange(e)
}
let touched = false
let presets = false
const CRON_EXPRESSIONS = [
{
@ -64,45 +93,130 @@
})
}
})
async function generateAICronExpression() {
loadingAICronExpression = true
try {
const response = await API.generateCronExpression({
prompt: aiCronPrompt,
})
cronExpression = response.message
dispatch("change", response.message)
} catch (err) {
notifications.error(err.message)
} finally {
loadingAICronExpression = false
}
}
</script>
<div class="block-field">
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<Layout noPadding gap="S">
<Select
on:change={updatePreset}
value={cronExpression || "Custom"}
secondary
extraThin
label="Use a Preset (Optional)"
options={CRON_EXPRESSIONS}
/>
{#if aiEnabled}
<div class="cron-ai-generator">
<Input
bind:value={aiCronPrompt}
label="Generate Cron Expression with AI"
size="S"
placeholder="Run every hour between 1pm to 4pm everyday of the week"
/>
{#if aiCronPrompt}
<div
class="icon"
class:pulsing-text={loadingAICronExpression}
on:click={generateAICronExpression}
>
<MagicWand height="17" width="17" />
</div>
{/if}
</div>
{/if}
<Input
label="Cron Expression"
{error}
on:change={onChange}
{value}
on:change={updateCronExpression}
value={cronExpression}
on:blur={() => (touched = true)}
updateOnChange={false}
/>
{#if touched && !value}
{#if touched && !cronExpression}
<Label><div class="error">Please specify a CRON expression</div></Label>
{/if}
<div class="presets">
<Button on:click={() => (presets = !presets)}
>{presets ? "Hide" : "Show"} Presets</Button
>
{#if presets}
<Select
on:change={onChange}
value={value || "Custom"}
secondary
extraThin
label="Presets"
options={CRON_EXPRESSIONS}
/>
{/if}
</div>
</div>
{#if nextExecutions}
<InlineAlert
type="info"
header="Next Executions"
message={nextExecutions}
/>
{/if}
</Layout>
<style>
.presets {
margin-top: var(--spacing-m);
.cron-ai-generator {
flex: 1;
position: relative;
}
.block-field {
padding-top: var(--spacing-s);
.icon {
right: 1px;
bottom: 1px;
position: absolute;
justify-content: center;
align-items: center;
display: flex;
flex-direction: row;
box-sizing: border-box;
border-left: 1px solid var(--spectrum-alias-border-color);
border-top-right-radius: var(--spectrum-alias-border-radius-regular);
border-bottom-right-radius: var(--spectrum-alias-border-radius-regular);
width: 31px;
color: var(--spectrum-alias-text-color);
background-color: var(--spectrum-global-color-gray-75);
transition: background-color
var(--spectrum-global-animation-duration-100, 130ms),
box-shadow var(--spectrum-global-animation-duration-100, 130ms),
border-color var(--spectrum-global-animation-duration-100, 130ms);
height: calc(var(--spectrum-alias-item-height-m) - 2px);
}
.icon:hover {
cursor: pointer;
color: var(--spectrum-alias-text-color-hover);
background-color: var(--spectrum-global-color-gray-50);
border-color: var(--spectrum-alias-border-color-hover);
}
.error {
padding-top: var(--spacing-xs);
color: var(--spectrum-global-color-red-500);
}
.pulsing-text {
font-size: 24px;
font-weight: bold;
animation: pulse 1.5s infinite;
}
@keyframes pulse {
0% {
opacity: 0.3;
transform: scale(1);
}
50% {
opacity: 1;
transform: scale(1.05);
}
100% {
opacity: 0.3;
transform: scale(1);
}
}
</style>

View File

@ -21,6 +21,7 @@
PROTECTED_EXTERNAL_COLUMNS,
canHaveDefaultColumn,
} from "@budibase/shared-core"
import { makePropSafe } from "@budibase/string-templates"
import { createEventDispatcher, getContext, onMount } from "svelte"
import { cloneDeep } from "lodash/fp"
import { tables, datasources } from "stores/builder"
@ -46,6 +47,7 @@
import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte"
import OptionsEditor from "./OptionsEditor.svelte"
import { isEnabled } from "helpers/featureFlags"
import { getUserBindings } from "dataBinding"
const AUTO_TYPE = FieldType.AUTO
const FORMULA_TYPE = FieldType.FORMULA
@ -191,6 +193,19 @@
fieldId: makeFieldId(t.type, t.subtype),
...t,
}))
$: defaultValueBindings = [
{
type: "context",
runtimeBinding: `${makePropSafe("now")}`,
readableBinding: `Date`,
category: "Date",
icon: "Date",
display: {
name: "Server date",
},
},
...getUserBindings(),
]
const fieldDefinitions = Object.values(FIELDS).reduce(
// Storing the fields by complex field id
@ -781,9 +796,8 @@
setRequired(false)
}
}}
bindings={getBindings({ table })}
bindings={defaultValueBindings}
allowJS
context={rowGoldenSample}
/>
</div>
{/if}

View File

@ -2,7 +2,12 @@
import { getContext } from "svelte"
import CreateEditColumn from "components/backend/DataTable/modals/CreateEditColumn.svelte"
const { datasource } = getContext("grid")
const { datasource, rows } = getContext("grid")
const onUpdate = async () => {
await datasource.actions.refreshDefinition()
await rows.actions.refreshData()
}
</script>
<CreateEditColumn on:updatecolumns={datasource.actions.refreshDefinition} />
<CreateEditColumn on:updatecolumns={onUpdate} />

View File

@ -63,7 +63,7 @@
// Look up the component tree and find something that is provided by an
// ancestor that matches our datasource. This is for backwards compatibility
// as previously we could use the "closest" context.
for (let id of path.reverse().slice(1)) {
for (let id of path.toReversed().slice(1)) {
// Check for matching view datasource
if (
dataSource.type === "viewV2" &&

View File

@ -0,0 +1,11 @@
export const buildAIEndpoints = API => ({
/**
* Generates a cron expression from a prompt
*/
generateCronExpression: async ({ prompt }) => {
return await API.post({
url: "/api/ai/cron",
body: { prompt },
})
},
})

View File

@ -2,6 +2,7 @@ import { Helpers } from "@budibase/bbui"
import { Header } from "@budibase/shared-core"
import { ApiVersion } from "../constants"
import { buildAnalyticsEndpoints } from "./analytics"
import { buildAIEndpoints } from "./ai"
import { buildAppEndpoints } from "./app"
import { buildAttachmentEndpoints } from "./attachments"
import { buildAuthEndpoints } from "./auth"
@ -268,6 +269,7 @@ export const createAPIClient = config => {
// Attach all endpoints
return {
...API,
...buildAIEndpoints(API),
...buildAnalyticsEndpoints(API),
...buildAppEndpoints(API),
...buildAttachmentEndpoints(API),

@ -1 +1 @@
Subproject commit e2fe0f9cc856b4ee1a97df96d623b2d87d4e8733
Subproject commit aca9828117bb97f54f40ee359f1a3f6e259174e7

View File

@ -1,9 +1,7 @@
import { permissions, roles, context } from "@budibase/backend-core"
import {
UserCtx,
Database,
Role,
PermissionLevel,
GetResourcePermsResponse,
ResourcePermissionInfo,
GetDependantResourcesResponse,
@ -12,107 +10,15 @@ import {
RemovePermissionRequest,
RemovePermissionResponse,
} from "@budibase/types"
import { getRoleParams } from "../../db/utils"
import {
CURRENTLY_SUPPORTED_LEVELS,
getBasePermissions,
} from "../../utilities/security"
import { removeFromArray } from "../../utilities"
import sdk from "../../sdk"
const enum PermissionUpdateType {
REMOVE = "remove",
ADD = "add",
}
import { PermissionUpdateType } from "../../sdk/app/permissions"
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
// utility function to stop this repetition - permissions always stored under roles
async function getAllDBRoles(db: Database) {
const body = await db.allDocs<Role>(
getRoleParams(null, {
include_docs: true,
})
)
return body.rows.map(row => row.doc!)
}
async function updatePermissionOnRole(
{
roleId,
resourceId,
level,
}: { roleId: string; resourceId: string; level: PermissionLevel },
updateType: PermissionUpdateType
) {
const db = context.getAppDB()
const remove = updateType === PermissionUpdateType.REMOVE
const isABuiltin = roles.isBuiltin(roleId)
const dbRoleId = roles.getDBRoleID(roleId)
const dbRoles = await getAllDBRoles(db)
const docUpdates: Role[] = []
// the permission is for a built in, make sure it exists
if (isABuiltin && !dbRoles.some(role => role._id === dbRoleId)) {
const builtin = roles.getBuiltinRoles()[roleId]
builtin._id = roles.getDBRoleID(builtin._id!)
dbRoles.push(builtin)
}
// now try to find any roles which need updated, e.g. removing the
// resource from another role and then adding to the new role
for (let role of dbRoles) {
let updated = false
const rolePermissions: Record<string, PermissionLevel[]> = role.permissions
? role.permissions
: {}
// make sure its an array, also handle migrating
if (
!rolePermissions[resourceId] ||
!Array.isArray(rolePermissions[resourceId])
) {
rolePermissions[resourceId] =
typeof rolePermissions[resourceId] === "string"
? [rolePermissions[resourceId] as unknown as PermissionLevel]
: []
}
// handle the removal/updating the role which has this permission first
// the updating (role._id !== dbRoleId) is required because a resource/level can
// only be permitted in a single role (this reduces hierarchy confusion and simplifies
// the general UI for this, rather than needing to show everywhere it is used)
if (
(role._id !== dbRoleId || remove) &&
rolePermissions[resourceId].indexOf(level) !== -1
) {
removeFromArray(rolePermissions[resourceId], level)
updated = true
}
// handle the adding, we're on the correct role, at it to this
if (!remove && role._id === dbRoleId) {
const set = new Set(rolePermissions[resourceId])
rolePermissions[resourceId] = [...set.add(level)]
updated = true
}
// handle the update, add it to bulk docs to perform at end
if (updated) {
role.permissions = rolePermissions
docUpdates.push(role)
}
}
const response = await db.bulkDocs(docUpdates)
return response.map(resp => {
const version = docUpdates.find(role => role._id === resp.id)?.version
const _id = roles.getExternalRoleID(resp.id, version)
return {
_id,
rev: resp.rev,
error: resp.error,
reason: resp.reason,
}
})
}
export function fetchBuiltin(ctx: UserCtx) {
ctx.body = Object.values(permissions.getBuiltinPermissions())
}
@ -124,7 +30,7 @@ export function fetchLevels(ctx: UserCtx) {
export async function fetch(ctx: UserCtx) {
const db = context.getAppDB()
const dbRoles: Role[] = await getAllDBRoles(db)
const dbRoles: Role[] = await sdk.permissions.getAllDBRoles(db)
let permissions: any = {}
// create an object with structure role ID -> resource ID -> level
for (let role of dbRoles) {
@ -186,12 +92,18 @@ export async function getDependantResources(
export async function addPermission(ctx: UserCtx<void, AddPermissionResponse>) {
const params: AddPermissionRequest = ctx.params
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.ADD)
ctx.body = await sdk.permissions.updatePermissionOnRole(
params,
PermissionUpdateType.ADD
)
}
export async function removePermission(
ctx: UserCtx<void, RemovePermissionResponse>
) {
const params: RemovePermissionRequest = ctx.params
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.REMOVE)
ctx.body = await sdk.permissions.updatePermissionOnRole(
params,
PermissionUpdateType.REMOVE
)
}

View File

@ -1,5 +1,6 @@
import dayjs from "dayjs"
import {
Aggregation,
AutoFieldSubType,
AutoReason,
Datasource,
@ -19,6 +20,7 @@ import {
SortJson,
SortType,
Table,
ViewV2,
} from "@budibase/types"
import {
breakExternalTableId,
@ -46,7 +48,7 @@ import { db as dbCore } from "@budibase/backend-core"
import sdk from "../../../sdk"
import env from "../../../environment"
import { makeExternalQuery } from "../../../integrations/base/query"
import { dataFilters } from "@budibase/shared-core"
import { dataFilters, helpers } from "@budibase/shared-core"
export interface ManyRelationship {
tableId?: string
@ -159,17 +161,41 @@ function isEditableColumn(column: FieldSchema) {
export class ExternalRequest<T extends Operation> {
private readonly operation: T
private readonly tableId: string
private datasource?: Datasource
private tables: { [key: string]: Table } = {}
private readonly source: Table | ViewV2
private datasource: Datasource
constructor(operation: T, tableId: string, datasource?: Datasource) {
this.operation = operation
this.tableId = tableId
this.datasource = datasource
if (datasource && datasource.entities) {
this.tables = datasource.entities
public static async for<T extends Operation>(
operation: T,
source: Table | ViewV2,
opts: { datasource?: Datasource } = {}
) {
if (!opts.datasource) {
if (sdk.views.isView(source)) {
const table = await sdk.views.getTable(source.id)
opts.datasource = await sdk.datasources.get(table.sourceId!)
} else {
opts.datasource = await sdk.datasources.get(source.sourceId!)
}
}
return new ExternalRequest(operation, source, opts.datasource)
}
private get tables(): { [key: string]: Table } {
if (!this.datasource.entities) {
throw new Error("Datasource does not have entities")
}
return this.datasource.entities
}
private constructor(
operation: T,
source: Table | ViewV2,
datasource: Datasource
) {
this.operation = operation
this.source = source
this.datasource = datasource
}
private prepareFilters(
@ -290,20 +316,6 @@ export class ExternalRequest<T extends Operation> {
return this.tables[tableName]
}
// seeds the object with table and datasource information
async retrieveMetadata(
datasourceId: string
): Promise<{ tables: Record<string, Table>; datasource: Datasource }> {
if (!this.datasource) {
this.datasource = await sdk.datasources.get(datasourceId)
if (!this.datasource || !this.datasource.entities) {
throw "No tables found, fetch tables before query."
}
this.tables = this.datasource.entities
}
return { tables: this.tables, datasource: this.datasource }
}
async getRow(table: Table, rowId: string): Promise<Row> {
const response = await getDatasourceAndQuery({
endpoint: getEndpoint(table._id!, Operation.READ),
@ -619,24 +631,16 @@ export class ExternalRequest<T extends Operation> {
}
async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> {
const { operation, tableId } = this
if (!tableId) {
throw new Error("Unable to run without a table ID")
}
let { datasourceId, tableName } = breakExternalTableId(tableId)
let datasource = this.datasource
if (!datasource) {
const { datasource: ds } = await this.retrieveMetadata(datasourceId)
datasource = ds
}
const tables = this.tables
const table = tables[tableName]
let isSql = isSQL(datasource)
if (!table) {
throw new Error(
`Unable to process query, table "${tableName}" not defined.`
)
const { operation } = this
let table: Table
if (sdk.views.isView(this.source)) {
table = await sdk.views.getTable(this.source.id)
} else {
table = this.source
}
let isSql = isSQL(this.datasource)
// look for specific components of config which may not be considered acceptable
let { id, row, filters, sort, paginate, rows } = cleanupConfig(
config,
@ -679,25 +683,40 @@ export class ExternalRequest<T extends Operation> {
}
}
}
if (
operation === Operation.DELETE &&
(filters == null || Object.keys(filters).length === 0)
) {
throw "Deletion must be filtered"
}
let aggregations: Aggregation[] = []
if (sdk.views.isView(this.source)) {
const calculationFields = helpers.views.calculationFields(this.source)
for (const [key, field] of Object.entries(calculationFields)) {
aggregations.push({
name: key,
field: field.field,
calculationType: field.calculationType,
})
}
}
let json: QueryJson = {
endpoint: {
datasourceId: datasourceId!,
entityId: tableName,
datasourceId: this.datasource._id!,
entityId: table.name,
operation,
},
resource: {
// have to specify the fields to avoid column overlap (for SQL)
fields: isSql
? buildSqlFieldList(table, this.tables, {
? await buildSqlFieldList(this.source, this.tables, {
relationships: incRelationships,
})
: [],
aggregations,
},
filters,
sort,
@ -714,7 +733,7 @@ export class ExternalRequest<T extends Operation> {
},
meta: {
table,
tables: tables,
tables: this.tables,
},
}
@ -745,7 +764,7 @@ export class ExternalRequest<T extends Operation> {
}
const output = await sqlOutputProcessing(
response,
table,
this.source,
this.tables,
relationships
)

View File

@ -17,6 +17,7 @@ import {
Row,
Table,
UserCtx,
ViewV2,
} from "@budibase/types"
import sdk from "../../../sdk"
import * as utils from "./utils"
@ -29,39 +30,40 @@ import { generateIdForRow } from "./utils"
export async function handleRequest<T extends Operation>(
operation: T,
tableId: string,
source: Table | ViewV2,
opts?: RunConfig
): Promise<ExternalRequestReturnType<T>> {
return new ExternalRequest<T>(operation, tableId, opts?.datasource).run(
opts || {}
)
return (
await ExternalRequest.for<T>(operation, source, {
datasource: opts?.datasource,
})
).run(opts || {})
}
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const { tableId, viewId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
const table = await utils.getTableFromSource(source)
const { _id, ...rowData } = ctx.request.body
const table = await sdk.tables.getTable(tableId)
const { row: dataToUpdate } = await inputProcessing(
const dataToUpdate = await inputProcessing(
ctx.user?._id,
cloneDeep(table),
cloneDeep(source),
rowData
)
const validateResult = await sdk.rows.utils.validate({
row: dataToUpdate,
tableId,
source,
})
if (!validateResult.valid) {
throw { validation: validateResult.errors }
}
const beforeRow = await sdk.rows.external.getRow(tableId, _id, {
const beforeRow = await sdk.rows.external.getRow(table._id!, _id, {
relationships: true,
})
const response = await handleRequest(Operation.UPDATE, tableId, {
const response = await handleRequest(Operation.UPDATE, source, {
id: breakRowIdField(_id),
row: dataToUpdate,
})
@ -69,17 +71,16 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
// The id might have been changed, so the refetching would fail. Recalculating the id just in case
const updatedId =
generateIdForRow({ ...beforeRow, ...dataToUpdate }, table) || _id
const row = await sdk.rows.external.getRow(tableId, updatedId, {
const row = await sdk.rows.external.getRow(table._id!, updatedId, {
relationships: true,
})
const [enrichedRow, oldRow] = await Promise.all([
outputProcessing(table, row, {
outputProcessing(source, row, {
squash: true,
preserveLinks: true,
fromViewId: viewId,
}),
outputProcessing(table, beforeRow, {
outputProcessing(source, beforeRow, {
squash: true,
preserveLinks: true,
}),
@ -94,9 +95,9 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
}
export async function destroy(ctx: UserCtx) {
const { tableId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
const _id = ctx.request.body._id
const { row } = await handleRequest(Operation.DELETE, tableId, {
const { row } = await handleRequest(Operation.DELETE, source, {
id: breakRowIdField(_id),
includeSqlRelationships: IncludeRelationship.EXCLUDE,
})
@ -105,11 +106,11 @@ export async function destroy(ctx: UserCtx) {
export async function bulkDestroy(ctx: UserCtx) {
const { rows } = ctx.request.body
const { tableId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
let promises: Promise<{ row: Row; table: Table }>[] = []
for (let row of rows) {
promises.push(
handleRequest(Operation.DELETE, tableId, {
handleRequest(Operation.DELETE, source, {
id: breakRowIdField(row._id),
includeSqlRelationships: IncludeRelationship.EXCLUDE,
})
@ -124,6 +125,7 @@ export async function bulkDestroy(ctx: UserCtx) {
export async function fetchEnrichedRow(ctx: UserCtx) {
const id = ctx.params.rowId
const source = await utils.getSource(ctx)
const { tableId } = utils.getSourceId(ctx)
const { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource: Datasource = await sdk.datasources.get(datasourceId)
@ -131,7 +133,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
ctx.throw(400, "Datasource has not been configured for plus API.")
}
const tables = datasource.entities
const response = await handleRequest(Operation.READ, tableId, {
const response = await handleRequest(Operation.READ, source, {
id,
datasource,
includeSqlRelationships: IncludeRelationship.INCLUDE,
@ -155,7 +157,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
// don't support composite keys right now
const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0])
const primaryLink = linkedTable.primary?.[0] as string
const relatedRows = await handleRequest(Operation.READ, linkedTableId!, {
const relatedRows = await handleRequest(Operation.READ, linkedTable, {
tables,
filters: {
oneOf: {

View File

@ -207,7 +207,7 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
}
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
const { tableId } = utils.getSourceId(ctx)
const { tableId, viewId } = utils.getSourceId(ctx)
await context.ensureSnippetContext(true)
@ -222,6 +222,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
...ctx.request.body,
query: enrichedQuery,
tableId,
viewId,
}
ctx.status = 200
@ -229,14 +230,15 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
}
export async function validate(ctx: Ctx<Row, ValidateResponse>) {
const { tableId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
const table = await utils.getTableFromSource(source)
// external tables are hard to validate currently
if (isExternalTableID(tableId)) {
if (isExternalTableID(table._id!)) {
ctx.body = { valid: true, errors: {} }
} else {
ctx.body = await sdk.rows.utils.validate({
row: ctx.request.body,
tableId,
source,
})
}
}

View File

@ -21,18 +21,19 @@ import {
import sdk from "../../../sdk"
import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils"
import { flatten } from "lodash"
import { findRow } from "../../../sdk/app/rows/internal"
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const { tableId, viewId } = utils.getSourceId(ctx)
const { tableId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
const table = sdk.views.isView(source)
? await sdk.views.getTable(source.id)
: source
const inputs = ctx.request.body
const isUserTable = tableId === InternalTables.USER_METADATA
let oldRow
const dbTable = await sdk.tables.getTable(tableId)
try {
oldRow = await outputProcessing(
dbTable,
await utils.findRow(tableId, inputs._id!)
)
oldRow = await outputProcessing(source, await findRow(tableId, inputs._id!))
} catch (err) {
if (isUserTable) {
// don't include the rev, it'll be the global rev
@ -48,22 +49,15 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
// need to build up full patch fields before coerce
let combinedRow: any = cloneDeep(oldRow)
for (let key of Object.keys(inputs)) {
if (!dbTable.schema[key]) continue
if (!table.schema[key]) continue
combinedRow[key] = inputs[key]
}
// need to copy the table so it can be differenced on way out
const tableClone = cloneDeep(dbTable)
// this returns the table and row incase they have been updated
let { table, row } = await inputProcessing(
ctx.user?._id,
tableClone,
combinedRow
)
let row = await inputProcessing(ctx.user?._id, source, combinedRow)
const validateResult = await sdk.rows.utils.validate({
row,
table,
source,
})
if (!validateResult.valid) {
@ -87,10 +81,8 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
return { row: ctx.body as Row, table, oldRow }
}
const result = await finaliseRow(table, row, {
oldTable: dbTable,
const result = await finaliseRow(source, row, {
updateFormula: true,
fromViewId: viewId,
})
return { ...result, oldRow }
@ -186,7 +178,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
sdk.tables.getTable(tableId),
linkRows.getLinkDocuments({ tableId, rowId, fieldName }),
])
let row = await utils.findRow(tableId, rowId)
let row = await findRow(tableId, rowId)
row = await outputProcessing(table, row)
const linkVals = links as LinkDocumentValue[]

View File

@ -4,10 +4,11 @@ import {
processFormulas,
} from "../../../utilities/rowProcessor"
import { context } from "@budibase/backend-core"
import { Table, Row, FormulaType, FieldType } from "@budibase/types"
import { Table, Row, FormulaType, FieldType, ViewV2 } from "@budibase/types"
import * as linkRows from "../../../db/linkedRows"
import isEqual from "lodash/isEqual"
import { cloneDeep } from "lodash/fp"
import sdk from "../../../sdk"
/**
* This function runs through a list of enriched rows, looks at the rows which
@ -121,33 +122,26 @@ export async function updateAllFormulasInTable(table: Table) {
* expects the row to be totally enriched/contain all relationships.
*/
export async function finaliseRow(
table: Table,
source: Table | ViewV2,
row: Row,
{
oldTable,
updateFormula,
fromViewId,
}: { oldTable?: Table; updateFormula: boolean; fromViewId?: string } = {
updateFormula: true,
}
opts?: { updateFormula: boolean }
) {
const db = context.getAppDB()
const { updateFormula = true } = opts || {}
const table = sdk.views.isView(source)
? await sdk.views.getTable(source.id)
: source
row.type = "row"
// process the row before return, to include relationships
let enrichedRow = (await outputProcessing(table, cloneDeep(row), {
let enrichedRow = await outputProcessing(source, cloneDeep(row), {
squash: false,
})) as Row
})
// use enriched row to generate formulas for saving, specifically only use as context
row = await processFormulas(table, row, {
dynamic: false,
contextRows: [enrichedRow],
})
// don't worry about rev, tables handle rev/lastID updates
// if another row has been written since processing this will
// handle the auto ID clash
if (oldTable && !isEqual(oldTable, table)) {
await db.put(table)
}
const response = await db.put(row)
// for response, calculate the formulas for the enriched row
enrichedRow._rev = response.rev
@ -158,8 +152,6 @@ export async function finaliseRow(
if (updateFormula) {
await updateRelatedFormula(table, enrichedRow)
}
const squashed = await linkRows.squashLinks(table, enrichedRow, {
fromViewId,
})
const squashed = await linkRows.squashLinks(source, enrichedRow)
return { row: enrichedRow, squashed, table }
}

View File

@ -1,11 +1,19 @@
// need to handle table name + field or just field, depending on if relationships used
import { FieldSchema, FieldType, Row, Table, JsonTypes } from "@budibase/types"
import {
FieldSchema,
FieldType,
Row,
Table,
JsonTypes,
ViewV2,
} from "@budibase/types"
import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import { generateRowIdField } from "../../../../integrations/utils"
import sdk from "../../../../sdk"
function extractFieldValue({
row,
@ -78,20 +86,30 @@ function fixJsonTypes(row: Row, table: Table) {
return row
}
export function basicProcessing({
export async function basicProcessing({
row,
table,
source,
tables,
isLinked,
sqs,
}: {
row: Row
table: Table
source: Table | ViewV2
tables: Table[]
isLinked: boolean
sqs?: boolean
}): Row {
}): Promise<Row> {
let table: Table
let isCalculationView = false
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
isCalculationView = helpers.views.isCalculationView(source)
} else {
table = source
}
const thisRow: Row = {}
// filter the row down to what is actually the row (not joined)
for (let fieldName of Object.keys(table.schema)) {
let value = extractFieldValue({
@ -108,13 +126,20 @@ export function basicProcessing({
thisRow[fieldName] = value
}
}
if (sdk.views.isView(source)) {
for (const key of Object.keys(helpers.views.calculationFields(source))) {
thisRow[key] = row[key]
}
}
let columns: string[] = Object.keys(table.schema)
if (!sqs) {
if (!sqs && !isCalculationView) {
thisRow._id = generateIdForRow(row, table, isLinked)
thisRow.tableId = table._id
thisRow._rev = "rev"
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
} else {
} else if (!isCalculationView) {
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) {
thisRow[internalColumn] = extractFieldValue({
@ -149,28 +174,30 @@ export function basicProcessing({
thisRow[col] = array
// make sure all of them have an _id
const sortField = relatedTable.primaryDisplay || relatedTable.primary![0]!
thisRow[col] = (thisRow[col] as Row[])
.map(relatedRow =>
basicProcessing({
row: relatedRow,
table: relatedTable,
tables,
isLinked: false,
sqs,
})
thisRow[col] = (
await Promise.all(
(thisRow[col] as Row[]).map(relatedRow =>
basicProcessing({
row: relatedRow,
source: relatedTable,
tables,
isLinked: false,
sqs,
})
)
)
.sort((a, b) => {
const aField = a?.[sortField],
bField = b?.[sortField]
if (!aField) {
return 1
} else if (!bField) {
return -1
}
return aField.localeCompare
? aField.localeCompare(bField)
: aField - bField
})
).sort((a, b) => {
const aField = a?.[sortField],
bField = b?.[sortField]
if (!aField) {
return 1
} else if (!bField) {
return -1
}
return aField.localeCompare
? aField.localeCompare(bField)
: aField - bField
})
}
}
return fixJsonTypes(thisRow, table)

View File

@ -7,10 +7,14 @@ import {
ManyToManyRelationshipFieldMetadata,
RelationshipFieldMetadata,
RelationshipsJson,
Row,
Table,
ViewV2,
} from "@budibase/types"
import { breakExternalTableId } from "../../../../integrations/utils"
import { generateJunctionTableID } from "../../../../db/utils"
import sdk from "../../../../sdk"
import { helpers } from "@budibase/shared-core"
type TableMap = Record<string, Table>
@ -108,37 +112,51 @@ export function buildInternalRelationships(
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
* is more performant and has the added benefit of protecting against this scenario.
*/
export function buildSqlFieldList(
table: Table,
export async function buildSqlFieldList(
source: Table | ViewV2,
tables: TableMap,
opts?: { relationships: boolean }
) {
const { relationships } = opts || {}
function extractRealFields(table: Table, existing: string[] = []) {
return Object.entries(table.schema)
.filter(
([columnName, column]) =>
column.type !== FieldType.LINK &&
column.type !== FieldType.FORMULA &&
!existing.find((field: string) => field === columnName)
!existing.find(
(field: string) => field === `${table.name}.${columnName}`
)
)
.map(column => `${table.name}.${column[0]}`)
.map(([columnName]) => `${table.name}.${columnName}`)
}
let fields = extractRealFields(table)
let fields: string[] = []
if (sdk.views.isView(source)) {
fields = Object.keys(helpers.views.basicFields(source)).filter(
key => source.schema?.[key]?.visible !== false
)
} else {
fields = extractRealFields(source)
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
for (let field of Object.values(table.schema)) {
if (
field.type !== FieldType.LINK ||
!opts?.relationships ||
!field.tableId
) {
if (field.type !== FieldType.LINK || !relationships || !field.tableId) {
continue
}
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
const linkTable = tables[linkTableName]
if (linkTable) {
const linkedFields = extractRealFields(linkTable, fields)
fields = fields.concat(linkedFields)
const { tableName } = breakExternalTableId(field.tableId)
if (tables[tableName]) {
fields = fields.concat(extractRealFields(tables[tableName], fields))
}
}
return fields
}
@ -149,3 +167,7 @@ export function isKnexEmptyReadResponse(resp: DatasourcePlusQueryResponse) {
(DSPlusOperation.READ in resp[0] && resp[0].read === true)
)
}
export function isKnexRows(resp: DatasourcePlusQueryResponse): resp is Row[] {
return !isKnexEmptyReadResponse(resp)
}

View File

@ -1,6 +1,6 @@
import * as utils from "../../../../db/utils"
import { context } from "@budibase/backend-core"
import { docIds } from "@budibase/backend-core"
import {
Ctx,
DatasourcePlusQueryResponse,
@ -8,17 +8,18 @@ import {
RelationshipsJson,
Row,
Table,
ViewV2,
} from "@budibase/types"
import {
processDates,
processFormulas,
} from "../../../../utilities/rowProcessor"
import { isKnexEmptyReadResponse } from "./sqlUtils"
import { isKnexRows } from "./sqlUtils"
import { basicProcessing, generateIdForRow, getInternalRowId } from "./basic"
import sdk from "../../../../sdk"
import { processStringSync } from "@budibase/string-templates"
import validateJs from "validate.js"
import { getFullUser } from "../../../../utilities/users"
import { helpers } from "@budibase/shared-core"
validateJs.extend(validateJs.validators.datetime, {
parse: function (value: string) {
@ -58,26 +59,11 @@ export async function processRelationshipFields(
return row
}
export async function findRow(tableId: string, rowId: string) {
const db = context.getAppDB()
let row: Row
// TODO remove special user case in future
if (tableId === utils.InternalTables.USER_METADATA) {
row = await getFullUser(rowId)
} else {
row = await db.get(rowId)
}
if (row.tableId !== tableId) {
throw "Supplied tableId does not match the rows tableId"
}
return row
}
export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } {
// top priority, use the URL first
if (ctx.params?.sourceId) {
const { sourceId } = ctx.params
if (utils.isViewID(sourceId)) {
if (docIds.isViewId(sourceId)) {
return {
tableId: utils.extractViewInfoFromID(sourceId).tableId,
viewId: sourceId,
@ -96,22 +82,22 @@ export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } {
throw new Error("Unable to find table ID in request")
}
export async function validate(
opts: { row: Row } & ({ tableId: string } | { table: Table })
) {
let fetchedTable: Table
if ("tableId" in opts) {
fetchedTable = await sdk.tables.getTable(opts.tableId)
} else {
fetchedTable = opts.table
export async function getSource(ctx: Ctx): Promise<Table | ViewV2> {
const { tableId, viewId } = getSourceId(ctx)
if (viewId) {
return sdk.views.get(viewId)
}
return sdk.rows.utils.validate({
...opts,
table: fetchedTable,
})
return sdk.tables.getTable(tableId)
}
function fixBooleanFields({ row, table }: { row: Row; table: Table }) {
export async function getTableFromSource(source: Table | ViewV2) {
if (sdk.views.isView(source)) {
return await sdk.views.getTable(source.id)
}
return source
}
function fixBooleanFields(row: Row, table: Table) {
for (let col of Object.values(table.schema)) {
if (col.type === FieldType.BOOLEAN) {
if (row[col.name] === 1) {
@ -126,49 +112,45 @@ function fixBooleanFields({ row, table }: { row: Row; table: Table }) {
export async function sqlOutputProcessing(
rows: DatasourcePlusQueryResponse,
table: Table,
source: Table | ViewV2,
tables: Record<string, Table>,
relationships: RelationshipsJson[],
opts?: { sqs?: boolean }
): Promise<Row[]> {
if (isKnexEmptyReadResponse(rows)) {
if (!isKnexRows(rows)) {
return []
}
let finalRows: { [key: string]: Row } = {}
for (let row of rows as Row[]) {
let rowId = row._id
let table: Table
let isCalculationView = false
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
isCalculationView = helpers.views.isCalculationView(source)
} else {
table = source
}
let processedRows: Row[] = []
for (let row of rows) {
if (opts?.sqs) {
rowId = getInternalRowId(row, table)
row._id = rowId
} else if (!rowId) {
rowId = generateIdForRow(row, table)
row._id = rowId
row._id = getInternalRowId(row, table)
} else if (row._id == null && !isCalculationView) {
row._id = generateIdForRow(row, table)
}
const thisRow = basicProcessing({
row = await basicProcessing({
row,
table,
source,
tables: Object.values(tables),
isLinked: false,
sqs: opts?.sqs,
})
if (thisRow._id == null) {
throw new Error("Unable to generate row ID for SQL rows")
}
finalRows[thisRow._id] = fixBooleanFields({ row: thisRow, table })
row = fixBooleanFields(row, table)
row = await processRelationshipFields(table, tables, row, relationships)
processedRows.push(row)
}
// make sure all related rows are correct
let finalRowArray = []
for (let row of Object.values(finalRows)) {
finalRowArray.push(
await processRelationshipFields(table, tables, row, relationships)
)
}
// process some additional types
finalRowArray = processDates(table, finalRowArray)
return finalRowArray
return processDates(table, processedRows)
}
export function isUserMetadataTable(tableId: string) {

View File

@ -5,14 +5,9 @@ import {
SearchViewRowRequest,
RequiredKeys,
RowSearchParams,
SearchFilterKey,
LogicalOperator,
} from "@budibase/types"
import { dataFilters } from "@budibase/shared-core"
import sdk from "../../../sdk"
import { db, context, features } from "@budibase/backend-core"
import { enrichSearchContext } from "./utils"
import { isExternalTableID } from "../../../integrations/utils"
import { context } from "@budibase/backend-core"
export async function searchView(
ctx: UserCtx<SearchViewRowRequest, SearchRowResponse>
@ -32,55 +27,15 @@ export async function searchView(
.map(([key]) => key)
const { body } = ctx.request
// Enrich saved query with ephemeral query params.
// We prevent searching on any fields that are saved as part of the query, as
// that could let users find rows they should not be allowed to access.
let query = dataFilters.buildQuery(view.query || [])
if (body.query) {
// Delete extraneous search params that cannot be overridden
delete body.query.onEmptyFilter
if (
!isExternalTableID(view.tableId) &&
!(await features.flags.isEnabled("SQS"))
) {
// Extract existing fields
const existingFields =
view.query
?.filter(filter => filter.field)
.map(filter => db.removeKeyNumbering(filter.field)) || []
// Carry over filters for unused fields
Object.keys(body.query).forEach(key => {
const operator = key as Exclude<SearchFilterKey, LogicalOperator>
Object.keys(body.query[operator] || {}).forEach(field => {
if (!existingFields.includes(db.removeKeyNumbering(field))) {
query[operator]![field] = body.query[operator]![field]
}
})
})
} else {
query = {
$and: {
conditions: [query, body.query],
},
}
}
}
await context.ensureSnippetContext(true)
const enrichedQuery = await enrichSearchContext(query, {
user: sdk.users.getUserContextBindings(ctx.user),
})
const searchOptions: RequiredKeys<SearchViewRowRequest> &
RequiredKeys<
Pick<RowSearchParams, "tableId" | "viewId" | "query" | "fields">
> = {
tableId: view.tableId,
viewId: view.id,
query: enrichedQuery,
query: body.query,
fields: viewFields,
...getSortOptions(body, view),
limit: body.limit,
@ -89,11 +44,12 @@ export async function searchView(
countRows: body.countRows,
}
const result = await sdk.rows.search(searchOptions)
const result = await sdk.rows.search(searchOptions, {
user: sdk.users.getUserContextBindings(ctx.user),
})
result.rows.forEach(r => (r._viewId = view.id))
ctx.body = result
}
function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
if (request.sort) {
return {

View File

@ -113,11 +113,10 @@ export async function bulkImport(
const processed = await inputProcessing(ctx.user?._id, table, row, {
noAutoRelationships: true,
})
parsedRows.push(processed.row)
table = processed.table
parsedRows.push(processed)
}
await handleRequest(Operation.BULK_UPSERT, table._id!, {
await handleRequest(Operation.BULK_UPSERT, table, {
rows: parsedRows,
})
await events.rows.imported(table, parsedRows.length)

View File

@ -33,7 +33,7 @@ import {
import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash"
import { cloneDeep } from "lodash"
import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
@ -149,12 +149,7 @@ export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const tableId = ctx.params.tableId
let tableBefore = await sdk.tables.getTable(tableId)
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
if (!isEqual(tableBefore, tableAfter)) {
await sdk.tables.saveTable(tableAfter)
}
await pickApi({ tableId }).bulkImport(ctx)
// right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to

View File

@ -3,7 +3,6 @@ import { handleDataImport } from "./utils"
import {
BulkImportRequest,
BulkImportResponse,
FieldType,
RenameColumn,
SaveTableRequest,
SaveTableResponse,
@ -70,22 +69,10 @@ export async function bulkImport(
) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body
await handleDataImport(
{
...table,
schema: {
_id: {
name: "_id",
type: FieldType.STRING,
},
...table.schema,
},
},
{
importRows: rows,
identifierFields,
user: ctx.user,
}
)
await handleDataImport(table, {
importRows: rows,
identifierFields,
user: ctx.user,
})
return table
}

View File

@ -139,8 +139,7 @@ export async function importToRows(
const processed = await inputProcessing(user?._id, table, row, {
noAutoRelationships: true,
})
row = processed.row
table = processed.table
row = processed
// However here we must reference the original table, as we want to mutate
// the real schema of the table passed in, not the clone used for

View File

@ -7,10 +7,49 @@ import {
ViewResponse,
ViewResponseEnriched,
ViewV2,
ViewFieldMetadata,
BasicViewFieldMetadata,
ViewCalculationFieldMetadata,
RelationSchemaField,
ViewFieldMetadata,
} from "@budibase/types"
import { builderSocket, gridSocket } from "../../../websockets"
import { helpers } from "@budibase/shared-core"
function stripUnknownFields(
field: BasicViewFieldMetadata
): RequiredKeys<BasicViewFieldMetadata> {
if (helpers.views.isCalculationField(field)) {
const strippedField: RequiredKeys<ViewCalculationFieldMetadata> = {
order: field.order,
width: field.width,
visible: field.visible,
readonly: field.readonly,
icon: field.icon,
calculationType: field.calculationType,
field: field.field,
columns: field.columns,
}
return strippedField
} else {
const strippedField: RequiredKeys<BasicViewFieldMetadata> = {
order: field.order,
width: field.width,
visible: field.visible,
readonly: field.readonly,
icon: field.icon,
columns: field.columns,
}
return strippedField
}
}
function stripUndefinedFields(obj: Record<string, any>): void {
Object.keys(obj)
.filter(key => obj[key] === undefined)
.forEach(key => {
delete obj[key]
})
}
async function parseSchema(view: CreateViewRequest) {
if (!view.schema) {
@ -22,6 +61,7 @@ async function parseSchema(view: CreateViewRequest) {
let fieldRelatedSchema:
| Record<string, RequiredKeys<RelationSchemaField>>
| undefined
if (schemaValue.columns) {
fieldRelatedSchema = Object.entries(schemaValue.columns).reduce<
NonNullable<typeof fieldRelatedSchema>
@ -35,25 +75,12 @@ async function parseSchema(view: CreateViewRequest) {
}
return acc
}, {})
schemaValue.columns = fieldRelatedSchema
}
const fieldSchema: RequiredKeys<
ViewFieldMetadata & {
columns: typeof fieldRelatedSchema
}
> = {
order: schemaValue.order,
width: schemaValue.width,
visible: schemaValue.visible,
readonly: schemaValue.readonly,
icon: schemaValue.icon,
columns: fieldRelatedSchema,
}
Object.entries(fieldSchema)
.filter(([, val]) => val === undefined)
.forEach(([key]) => {
delete fieldSchema[key as keyof ViewFieldMetadata]
})
const fieldSchema = stripUnknownFields(schemaValue)
stripUndefinedFields(fieldSchema)
p[fieldName] = fieldSchema
return p
}, {} as Record<string, RequiredKeys<ViewFieldMetadata>>)
@ -76,6 +103,7 @@ export async function create(ctx: Ctx<CreateViewRequest, ViewResponse>) {
name: view.name,
tableId: view.tableId,
query: view.query,
queryUI: view.queryUI,
sort: view.sort,
schema,
primaryDisplay: view.primaryDisplay,
@ -111,6 +139,7 @@ export async function update(ctx: Ctx<UpdateViewRequest, ViewResponse>) {
version: view.version,
tableId: view.tableId,
query: view.query,
queryUI: view.queryUI,
sort: view.sort,
schema,
primaryDisplay: view.primaryDisplay,

View File

@ -33,6 +33,7 @@ import rowActionRoutes from "./rowAction"
export { default as staticRoutes } from "./static"
export { default as publicRoutes } from "./public"
const aiRoutes = pro.ai
const appBackupRoutes = pro.appBackups
const environmentVariableRoutes = pro.environmentVariables
@ -67,6 +68,7 @@ export const mainRoutes: Router[] = [
debugRoutes,
environmentVariableRoutes,
rowActionRoutes,
aiRoutes,
// these need to be handled last as they still use /api/:tableId
// this could be breaking as koa may recognise other routes as this
tableRoutes,

View File

@ -125,6 +125,13 @@ describe("/permission", () => {
})
it("should be able to access the view data when the table is set to public and with no view permissions overrides", async () => {
// Make view inherit table permissions. Needed for backwards compatibility with existing views.
await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: view.id,
level: PermissionLevel.READ,
})
// replicate changes before checking permissions
await config.publish()
@ -138,6 +145,14 @@ describe("/permission", () => {
resourceId: table._id,
level: PermissionLevel.READ,
})
// Make view inherit table permissions. Needed for backwards compatibility with existing views.
await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: view.id,
level: PermissionLevel.READ,
})
// replicate changes before checking permissions
await config.publish()

View File

@ -76,7 +76,7 @@ async function waitForEvent(
}
describe.each([
["internal", undefined],
["lucene", undefined],
["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
@ -695,6 +695,69 @@ describe.each([
})
})
describe("options column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
status: {
name: "status",
type: FieldType.OPTIONS,
default: "requested",
constraints: {
inclusion: ["requested", "approved"],
},
},
},
})
)
})
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.status).toEqual("requested")
})
it("does not use default value if value specified", async () => {
const row = await config.api.row.save(table._id!, {
status: "approved",
})
expect(row.status).toEqual("approved")
})
})
describe("array column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
food: {
name: "food",
type: FieldType.ARRAY,
default: ["apple", "orange"],
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: ["apple", "orange", "banana"],
},
},
},
})
)
})
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.food).toEqual(["apple", "orange"])
})
it("does not use default value if value specified", async () => {
const row = await config.api.row.save(table._id!, {
food: ["orange"],
})
expect(row.food).toEqual(["orange"])
})
})
describe("bindings", () => {
describe("string column", () => {
beforeAll(async () => {
@ -2453,9 +2516,15 @@ describe.each([
let flagCleanup: (() => void) | undefined
beforeAll(async () => {
flagCleanup = setCoreEnv({
const env = {
TENANT_FEATURE_FLAGS: `*:${FeatureFlag.ENRICHED_RELATIONSHIPS}`,
})
}
if (isSqs) {
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:SQS`
} else {
env.TENANT_FEATURE_FLAGS = `${env.TENANT_FEATURE_FLAGS},*:!SQS`
}
flagCleanup = setCoreEnv(env)
const aux2Table = await config.api.table.save(saveTableRequest())
const aux2Data = await config.api.row.save(aux2Table._id!, {})

View File

@ -826,11 +826,20 @@ describe("/rowsActions", () => {
)
).id
// Allow row action on view
await config.api.rowAction.setViewPermission(
tableId,
viewId,
rowAction.id
)
// Delete explicit view permissions so they inherit table permissions
await config.api.permission.revoke({
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, // Don't think this matters since we are revoking the permission
level: PermissionLevel.READ,
resourceId: viewId,
})
return { permissionResource: tableId, triggerResouce: viewId }
},
],

File diff suppressed because it is too large Load Diff

View File

@ -18,12 +18,14 @@ import {
ViewV2,
SearchResponse,
BasicOperator,
CalculationType,
RelationshipType,
TableSchema,
ViewFieldMetadata,
RenameColumn,
FeatureFlag,
BBReferenceFieldSubType,
ViewV2Schema,
ViewCalculationFieldMetadata,
} from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
@ -36,7 +38,6 @@ import {
setEnv as setCoreEnv,
env,
} from "@budibase/backend-core"
import sdk from "../../../sdk"
describe.each([
["lucene", undefined],
@ -154,7 +155,7 @@ describe.each([
})
it("can persist views with all fields", async () => {
const newView: Required<CreateViewRequest> = {
const newView: Required<Omit<CreateViewRequest, "queryUI">> = {
name: generator.name(),
tableId: table._id!,
primaryDisplay: "id",
@ -540,6 +541,33 @@ describe.each([
status: 201,
})
})
it("can create a view with calculation fields", async () => {
let view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
sum: {
visible: true,
calculationType: CalculationType.SUM,
field: "Price",
},
},
})
expect(Object.keys(view.schema!)).toHaveLength(1)
let sum = view.schema!.sum as ViewCalculationFieldMetadata
expect(sum).toBeDefined()
expect(sum.calculationType).toEqual(CalculationType.SUM)
expect(sum.field).toEqual("Price")
view = await config.api.viewV2.get(view.id)
sum = view.schema!.sum as ViewCalculationFieldMetadata
expect(sum).toBeDefined()
expect(sum.calculationType).toEqual(CalculationType.SUM)
expect(sum.field).toEqual("Price")
})
})
describe("update", () => {
@ -584,7 +612,7 @@ describe.each([
it("can update all fields", async () => {
const tableId = table._id!
const updatedData: Required<UpdateViewRequest> = {
const updatedData: Required<Omit<UpdateViewRequest, "queryUI">> = {
version: view.version,
id: view.id,
tableId,
@ -1152,10 +1180,7 @@ describe.each([
return table
}
const createView = async (
tableId: string,
schema: Record<string, ViewFieldMetadata>
) =>
const createView = async (tableId: string, schema: ViewV2Schema) =>
await config.api.viewV2.create({
name: generator.guid(),
tableId,
@ -1738,6 +1763,40 @@ describe.each([
})
})
it("views filters are respected even if the column is hidden", async () => {
await config.api.row.save(table._id!, {
one: "foo",
two: "bar",
})
const two = await config.api.row.save(table._id!, {
one: "foo2",
two: "bar2",
})
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
query: [
{
operator: BasicOperator.EQUAL,
field: "two",
value: "bar2",
},
],
schema: {
id: { visible: true },
one: { visible: false },
two: { visible: false },
},
})
const response = await config.api.viewV2.search(view.id)
expect(response.rows).toHaveLength(1)
expect(response.rows).toEqual([
expect.objectContaining({ _id: two._id }),
])
})
it("views without data can be returned", async () => {
const response = await config.api.viewV2.search(view.id)
expect(response.rows).toHaveLength(0)
@ -2196,28 +2255,6 @@ describe.each([
expect(response.rows).toHaveLength(0)
})
it("queries the row api passing the view fields only", async () => {
const searchSpy = jest.spyOn(sdk.rows, "search")
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
id: { visible: true },
one: { visible: false },
},
})
await config.api.viewV2.search(view.id, { query: {} })
expect(searchSpy).toHaveBeenCalledTimes(1)
expect(searchSpy).toHaveBeenCalledWith(
expect.objectContaining({
fields: ["id"],
})
)
})
describe("foreign relationship columns", () => {
let envCleanup: () => void
beforeAll(() => {
@ -2382,6 +2419,203 @@ describe.each([
])
})
})
!isLucene &&
describe("calculations", () => {
let table: Table
let rows: Row[]
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
quantity: {
type: FieldType.NUMBER,
name: "quantity",
},
price: {
type: FieldType.NUMBER,
name: "price",
},
},
})
)
rows = await Promise.all(
Array.from({ length: 10 }, () =>
config.api.row.save(table._id!, {
quantity: generator.natural({ min: 1, max: 10 }),
price: generator.natural({ min: 1, max: 10 }),
})
)
)
})
it("should be able to search by calculations", async () => {
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
"Quantity Sum": {
visible: true,
calculationType: CalculationType.SUM,
field: "quantity",
},
},
})
const response = await config.api.viewV2.search(view.id, {
query: {},
})
expect(response.rows).toHaveLength(1)
expect(response.rows).toEqual(
expect.arrayContaining([
expect.objectContaining({
"Quantity Sum": rows.reduce((acc, r) => acc + r.quantity, 0),
}),
])
)
// Calculation views do not return rows that can be linked back to
// the source table, and so should not have an _id field.
for (const row of response.rows) {
expect("_id" in row).toBe(false)
}
})
it("should be able to group by a basic field", async () => {
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
quantity: {
visible: true,
field: "quantity",
},
"Total Price": {
visible: true,
calculationType: CalculationType.SUM,
field: "price",
},
},
})
const response = await config.api.viewV2.search(view.id, {
query: {},
})
const priceByQuantity: Record<number, number> = {}
for (const row of rows) {
priceByQuantity[row.quantity] ??= 0
priceByQuantity[row.quantity] += row.price
}
for (const row of response.rows) {
expect(row["Total Price"]).toEqual(priceByQuantity[row.quantity])
}
})
it.each([
CalculationType.COUNT,
CalculationType.SUM,
CalculationType.AVG,
CalculationType.MIN,
CalculationType.MAX,
])("should be able to calculate $type", async type => {
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
aggregate: {
visible: true,
calculationType: type,
field: "price",
},
},
})
const response = await config.api.viewV2.search(view.id, {
query: {},
})
function calculate(
type: CalculationType,
numbers: number[]
): number {
switch (type) {
case CalculationType.COUNT:
return numbers.length
case CalculationType.SUM:
return numbers.reduce((a, b) => a + b, 0)
case CalculationType.AVG:
return numbers.reduce((a, b) => a + b, 0) / numbers.length
case CalculationType.MIN:
return Math.min(...numbers)
case CalculationType.MAX:
return Math.max(...numbers)
}
}
const prices = rows.map(row => row.price)
const expected = calculate(type, prices)
const actual = response.rows[0].aggregate
if (type === CalculationType.AVG) {
// The average calculation can introduce floating point rounding
// errors, so we need to compare to within a small margin of
// error.
expect(actual).toBeCloseTo(expected)
} else {
expect(actual).toEqual(expected)
}
})
})
!isLucene &&
it("should not need required fields to be present", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
name: "name",
type: FieldType.STRING,
constraints: {
presence: true,
},
},
age: {
name: "age",
type: FieldType.NUMBER,
},
},
})
)
await Promise.all([
config.api.row.save(table._id!, { name: "Steve", age: 30 }),
config.api.row.save(table._id!, { name: "Jane", age: 31 }),
])
const view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
schema: {
sum: {
visible: true,
calculationType: CalculationType.SUM,
field: "age",
},
},
})
const response = await config.api.viewV2.search(view.id, {
query: {},
})
expect(response.rows).toHaveLength(1)
expect(response.rows[0].sum).toEqual(61)
})
})
describe("permissions", () => {
@ -2417,6 +2651,11 @@ describe.each([
level: PermissionLevel.READ,
resourceId: table._id!,
})
await config.api.permission.revoke({
roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, // Don't think this matters since we are revoking the permission
level: PermissionLevel.READ,
resourceId: view.id,
})
await config.publish()
const response = await config.api.viewV2.publicSearch(view.id)

View File

@ -17,44 +17,65 @@ describe("Branching automations", () => {
afterAll(setup.afterAll)
it("should run a multiple nested branching automation", async () => {
const firstLogId = "11111111-1111-1111-1111-111111111111"
const branch1LogId = "22222222-2222-2222-2222-222222222222"
const branch2LogId = "33333333-3333-3333-3333-333333333333"
const branch2Id = "44444444-4444-4444-4444-444444444444"
const builder = createAutomationBuilder({
name: "Test Trigger with Loop and Create Row",
})
const results = await builder
.appAction({ fields: {} })
.serverLog({ text: "Starting automation" })
.serverLog(
{ text: "Starting automation" },
{ stepName: "FirstLog", stepId: firstLogId }
)
.branch({
topLevelBranch1: {
steps: stepBuilder =>
stepBuilder.serverLog({ text: "Branch 1" }).branch({
branch1: {
steps: stepBuilder =>
stepBuilder.serverLog({ text: "Branch 1.1" }),
condition: {
equal: { "{{steps.1.success}}": true },
stepBuilder
.serverLog(
{ text: "Branch 1" },
{ stepId: "66666666-6666-6666-6666-666666666666" }
)
.branch({
branch1: {
steps: stepBuilder =>
stepBuilder.serverLog(
{ text: "Branch 1.1" },
{ stepId: branch1LogId }
),
condition: {
equal: { [`{{ steps.${firstLogId}.success }}`]: true },
},
},
},
branch2: {
steps: stepBuilder =>
stepBuilder.serverLog({ text: "Branch 1.2" }),
condition: {
equal: { "{{steps.1.success}}": false },
branch2: {
steps: stepBuilder =>
stepBuilder.serverLog(
{ text: "Branch 1.2" },
{ stepId: branch2LogId }
),
condition: {
equal: { [`{{ steps.${firstLogId}.success }}`]: false },
},
},
},
}),
}),
condition: {
equal: { "{{steps.1.success}}": true },
equal: { [`{{ steps.${firstLogId}.success }}`]: true },
},
},
topLevelBranch2: {
steps: stepBuilder => stepBuilder.serverLog({ text: "Branch 2" }),
steps: stepBuilder =>
stepBuilder.serverLog({ text: "Branch 2" }, { stepId: branch2Id }),
condition: {
equal: { "{{steps.1.success}}": false },
equal: { [`{{ steps.${firstLogId}.success }}`]: false },
},
},
})
.run()
expect(results.steps[3].outputs.status).toContain("branch1 branch taken")
expect(results.steps[4].outputs.message).toContain("Branch 1.1")
})

View File

@ -64,18 +64,18 @@ class BaseStepBuilder {
stepId: TStep,
stepSchema: Omit<AutomationStep, "id" | "stepId" | "inputs">,
inputs: AutomationStepInputs<TStep>,
stepName?: string
opts?: { stepName?: string; stepId?: string }
): this {
const id = uuidv4()
const id = opts?.stepId || uuidv4()
this.steps.push({
...stepSchema,
inputs: inputs as any,
id,
stepId,
name: stepName || stepSchema.name,
name: opts?.stepName || stepSchema.name,
})
if (stepName) {
this.stepNames[id] = stepName
if (opts?.stepName) {
this.stepNames[id] = opts.stepName
}
return this
}
@ -95,7 +95,6 @@ class BaseStepBuilder {
})
branchStepInputs.children![key] = stepBuilder.build()
})
const branchStep: AutomationStep = {
...definition,
id: uuidv4(),
@ -106,80 +105,98 @@ class BaseStepBuilder {
}
// STEPS
createRow(inputs: CreateRowStepInputs, opts?: { stepName?: string }): this {
createRow(
inputs: CreateRowStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.CREATE_ROW,
BUILTIN_ACTION_DEFINITIONS.CREATE_ROW,
inputs,
opts?.stepName
opts
)
}
updateRow(inputs: UpdateRowStepInputs, opts?: { stepName?: string }): this {
updateRow(
inputs: UpdateRowStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.UPDATE_ROW,
BUILTIN_ACTION_DEFINITIONS.UPDATE_ROW,
inputs,
opts?.stepName
opts
)
}
deleteRow(inputs: DeleteRowStepInputs, opts?: { stepName?: string }): this {
deleteRow(
inputs: DeleteRowStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.DELETE_ROW,
BUILTIN_ACTION_DEFINITIONS.DELETE_ROW,
inputs,
opts?.stepName
opts
)
}
sendSmtpEmail(
inputs: SmtpEmailStepInputs,
opts?: { stepName?: string }
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.SEND_EMAIL_SMTP,
BUILTIN_ACTION_DEFINITIONS.SEND_EMAIL_SMTP,
inputs,
opts?.stepName
opts
)
}
executeQuery(
inputs: ExecuteQueryStepInputs,
opts?: { stepName?: string }
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.EXECUTE_QUERY,
BUILTIN_ACTION_DEFINITIONS.EXECUTE_QUERY,
inputs,
opts?.stepName
opts
)
}
queryRows(inputs: QueryRowsStepInputs, opts?: { stepName?: string }): this {
queryRows(
inputs: QueryRowsStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.QUERY_ROWS,
BUILTIN_ACTION_DEFINITIONS.QUERY_ROWS,
inputs,
opts?.stepName
opts
)
}
loop(inputs: LoopStepInputs, opts?: { stepName?: string }): this {
loop(
inputs: LoopStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.LOOP,
BUILTIN_ACTION_DEFINITIONS.LOOP,
inputs,
opts?.stepName
opts
)
}
serverLog(input: ServerLogStepInputs, opts?: { stepName?: string }): this {
serverLog(
input: ServerLogStepInputs,
opts?: { stepName?: string; stepId?: string }
): this {
return this.step(
AutomationActionStepId.SERVER_LOG,
BUILTIN_ACTION_DEFINITIONS.SERVER_LOG,
input,
opts?.stepName
opts
)
}

View File

@ -23,10 +23,11 @@ import {
Row,
Table,
TableSchema,
ViewFieldMetadata,
ViewV2,
ViewV2Schema,
} from "@budibase/types"
import sdk from "../../sdk"
import { helpers } from "@budibase/shared-core"
export { IncludeDocs, getLinkDocuments, createLinkView } from "./linkUtils"
@ -247,26 +248,36 @@ function getPrimaryDisplayValue(row: Row, table?: Table) {
export type SquashTableFields = Record<string, { visibleFieldNames: string[] }>
/**
* This function will take the given enriched rows and squash the links to only contain the primary display field.
* @returns The rows after having their links squashed to only contain the ID and primary display.
* This function will take the given enriched rows and squash the links to only
* contain the primary display field.
*
* @returns The rows after having their links squashed to only contain the ID
* and primary display.
*/
export async function squashLinks<T = Row[] | Row>(
table: Table,
enriched: T,
options?: {
fromViewId?: string
}
source: Table | ViewV2,
enriched: T
): Promise<T> {
const allowRelationshipSchemas = await features.flags.isEnabled(
FeatureFlag.ENRICHED_RELATIONSHIPS
)
let viewSchema: Record<string, ViewFieldMetadata> = {}
if (options?.fromViewId && allowRelationshipSchemas) {
const view = Object.values(table.views || {}).find(
(v): v is ViewV2 => sdk.views.isV2(v) && v.id === options?.fromViewId
)
viewSchema = view?.schema || {}
let viewSchema: ViewV2Schema = {}
if (sdk.views.isView(source)) {
if (helpers.views.isCalculationView(source)) {
return enriched
}
if (allowRelationshipSchemas) {
viewSchema = source.schema || {}
}
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
// will populate this as we find them

View File

@ -1,4 +1,4 @@
import { context, db as dbCore, utils } from "@budibase/backend-core"
import { context, db as dbCore, docIds, utils } from "@budibase/backend-core"
import {
DatabaseQueryOpts,
Datasource,
@ -318,12 +318,8 @@ export function generateViewID(tableId: string) {
}${SEPARATOR}${tableId}${SEPARATOR}${newid()}`
}
export function isViewID(viewId: string) {
return viewId?.split(SEPARATOR)[0] === VirtualDocumentType.VIEW
}
export function extractViewInfoFromID(viewId: string) {
if (!isViewID(viewId)) {
if (!docIds.isViewId(viewId)) {
throw new Error("Unable to extract table ID, is not a view ID")
}
const split = viewId.split(SEPARATOR)

View File

@ -15,7 +15,8 @@ export interface TriggerOutput {
export interface AutomationContext extends AutomationResults {
steps: any[]
stepsByName?: Record<string, any>
stepsById: Record<string, any>
stepsByName: Record<string, any>
env?: Record<string, string>
trigger: any
}

View File

@ -15,7 +15,7 @@ export function triggerRowActionAuthorised(
const rowActionId: string = ctx.params[actionPath]
const isTableId = docIds.isTableId(sourceId)
const isViewId = utils.isViewID(sourceId)
const isViewId = docIds.isViewId(sourceId)
if (!isTableId && !isViewId) {
ctx.throw(400, `'${sourceId}' is not a valid source id`)
}

View File

@ -1,26 +1,34 @@
import { db, roles } from "@budibase/backend-core"
import { db, roles, context, docIds } from "@budibase/backend-core"
import {
PermissionLevel,
PermissionSource,
VirtualDocumentType,
Role,
Database,
} from "@budibase/types"
import { extractViewInfoFromID, isViewID } from "../../../db/utils"
import { extractViewInfoFromID, getRoleParams } from "../../../db/utils"
import {
CURRENTLY_SUPPORTED_LEVELS,
getBasePermissions,
} from "../../../utilities/security"
import sdk from "../../../sdk"
import { isV2 } from "../views"
import { removeFromArray } from "../../../utilities"
type ResourcePermissions = Record<
string,
{ role: string; type: PermissionSource }
>
export const enum PermissionUpdateType {
REMOVE = "remove",
ADD = "add",
}
export async function getInheritablePermissions(
resourceId: string
): Promise<ResourcePermissions | undefined> {
if (isViewID(resourceId)) {
if (docIds.isViewId(resourceId)) {
return await getResourcePerms(extractViewInfoFromID(resourceId).tableId)
}
}
@ -100,3 +108,89 @@ export async function getDependantResources(
return
}
export async function updatePermissionOnRole(
{
roleId,
resourceId,
level,
}: { roleId: string; resourceId: string; level: PermissionLevel },
updateType: PermissionUpdateType
) {
const db = context.getAppDB()
const remove = updateType === PermissionUpdateType.REMOVE
const isABuiltin = roles.isBuiltin(roleId)
const dbRoleId = roles.getDBRoleID(roleId)
const dbRoles = await getAllDBRoles(db)
const docUpdates: Role[] = []
// the permission is for a built in, make sure it exists
if (isABuiltin && !dbRoles.some(role => role._id === dbRoleId)) {
const builtin = roles.getBuiltinRoles()[roleId]
builtin._id = roles.getDBRoleID(builtin._id!)
dbRoles.push(builtin)
}
// now try to find any roles which need updated, e.g. removing the
// resource from another role and then adding to the new role
for (let role of dbRoles) {
let updated = false
const rolePermissions: Record<string, PermissionLevel[]> = role.permissions
? role.permissions
: {}
// make sure its an array, also handle migrating
if (
!rolePermissions[resourceId] ||
!Array.isArray(rolePermissions[resourceId])
) {
rolePermissions[resourceId] =
typeof rolePermissions[resourceId] === "string"
? [rolePermissions[resourceId] as unknown as PermissionLevel]
: []
}
// handle the removal/updating the role which has this permission first
// the updating (role._id !== dbRoleId) is required because a resource/level can
// only be permitted in a single role (this reduces hierarchy confusion and simplifies
// the general UI for this, rather than needing to show everywhere it is used)
if (
(role._id !== dbRoleId || remove) &&
rolePermissions[resourceId].indexOf(level) !== -1
) {
removeFromArray(rolePermissions[resourceId], level)
updated = true
}
// handle the adding, we're on the correct role, at it to this
if (!remove && role._id === dbRoleId) {
const set = new Set(rolePermissions[resourceId])
rolePermissions[resourceId] = [...set.add(level)]
updated = true
}
// handle the update, add it to bulk docs to perform at end
if (updated) {
role.permissions = rolePermissions
docUpdates.push(role)
}
}
const response = await db.bulkDocs(docUpdates)
return response.map(resp => {
const version = docUpdates.find(role => role._id === resp.id)?.version
const _id = roles.getExternalRoleID(resp.id, version)
return {
_id,
rev: resp.rev,
error: resp.error,
reason: resp.reason,
}
})
}
// utility function to stop this repetition - permissions always stored under roles
export async function getAllDBRoles(db: Database) {
const body = await db.allDocs<Role>(
getRoleParams(null, {
include_docs: true,
})
)
return body.rows.map(row => row.doc!)
}

View File

@ -1,11 +1,11 @@
import { context, HTTPError, utils } from "@budibase/backend-core"
import { context, docIds, HTTPError, utils } from "@budibase/backend-core"
import {
AutomationTriggerStepId,
SEPARATOR,
TableRowActions,
VirtualDocumentType,
} from "@budibase/types"
import { generateRowActionsID, isViewID } from "../../db/utils"
import { generateRowActionsID } from "../../db/utils"
import automations from "./automations"
import { definitions as TRIGGER_DEFINITIONS } from "../../automations/triggerInfo"
import * as triggers from "../../automations/triggers"
@ -155,7 +155,7 @@ export async function update(
async function guardView(tableId: string, viewId: string) {
let view
if (isViewID(viewId)) {
if (docIds.isViewId(viewId)) {
view = await sdk.views.get(viewId)
}
if (!view || view.tableId !== tableId) {

View File

@ -1,5 +1,11 @@
import { IncludeRelationship, Operation, Row } from "@budibase/types"
import { HTTPError } from "@budibase/backend-core"
import {
IncludeRelationship,
Operation,
Row,
Table,
ViewV2,
} from "@budibase/types"
import { docIds, HTTPError } from "@budibase/backend-core"
import { handleRequest } from "../../../api/controllers/row/external"
import { breakRowIdField } from "../../../integrations/utils"
import sdk from "../../../sdk"
@ -8,15 +14,24 @@ import {
outputProcessing,
} from "../../../utilities/rowProcessor"
import cloneDeep from "lodash/fp/cloneDeep"
import isEqual from "lodash/fp/isEqual"
import { tryExtractingTableAndViewId } from "./utils"
export async function getRow(
tableId: string,
sourceId: string | Table | ViewV2,
rowId: string,
opts?: { relationships?: boolean }
) {
const response = await handleRequest(Operation.READ, tableId, {
let source: Table | ViewV2
if (typeof sourceId === "string") {
if (docIds.isViewId(sourceId)) {
source = await sdk.views.get(sourceId)
} else {
source = await sdk.tables.getTable(sourceId)
}
} else {
source = sourceId
}
const response = await handleRequest(Operation.READ, source, {
id: breakRowIdField(rowId),
includeSqlRelationships: opts?.relationships
? IncludeRelationship.INCLUDE
@ -27,45 +42,42 @@ export async function getRow(
}
export async function save(
tableOrViewId: string,
sourceId: string,
inputs: Row,
userId: string | undefined
) {
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
const table = await sdk.tables.getTable(tableId)
const { table: updatedTable, row } = await inputProcessing(
userId,
cloneDeep(table),
inputs
)
const { tableId, viewId } = tryExtractingTableAndViewId(sourceId)
let source: Table | ViewV2
if (viewId) {
source = await sdk.views.get(viewId)
} else {
source = await sdk.tables.getTable(tableId)
}
const row = await inputProcessing(userId, cloneDeep(source), inputs)
const validateResult = await sdk.rows.utils.validate({
row,
tableId,
source,
})
if (!validateResult.valid) {
throw { validation: validateResult.errors }
}
const response = await handleRequest(Operation.CREATE, tableId, {
const response = await handleRequest(Operation.CREATE, source, {
row,
})
if (!isEqual(table, updatedTable)) {
await sdk.tables.saveTable(updatedTable)
}
const rowId = response.row._id
if (rowId) {
const row = await getRow(tableId, rowId, {
const row = await getRow(source, rowId, {
relationships: true,
})
return {
...response,
row: await outputProcessing(table, row, {
row: await outputProcessing(source, row, {
preserveLinks: true,
squash: true,
fromViewId: viewId,
}),
}
} else {
@ -76,7 +88,14 @@ export async function save(
export async function find(tableOrViewId: string, rowId: string): Promise<Row> {
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
const row = await getRow(tableId, rowId, {
let source: Table | ViewV2
if (viewId) {
source = await sdk.views.get(viewId)
} else {
source = await sdk.tables.getTable(tableId)
}
const row = await getRow(source, rowId, {
relationships: true,
})
@ -84,11 +103,10 @@ export async function find(tableOrViewId: string, rowId: string): Promise<Row> {
throw new HTTPError("Row not found", 404)
}
const table = await sdk.tables.getTable(tableId)
// Preserving links, as the outputProcessing does not support external rows yet and we don't need it in this use case
return await outputProcessing(table, row, {
// Preserving links, as the outputProcessing does not support external rows
// yet and we don't need it in this use case
return await outputProcessing(source, row, {
squash: true,
preserveLinks: true,
fromViewId: viewId,
})
}

View File

@ -1,7 +1,6 @@
import { context, db } from "@budibase/backend-core"
import { Row } from "@budibase/types"
import { Row, Table, ViewV2 } from "@budibase/types"
import sdk from "../../../sdk"
import cloneDeep from "lodash/fp/cloneDeep"
import { finaliseRow } from "../../../api/controllers/row/staticFormula"
import {
inputProcessing,
@ -10,7 +9,7 @@ import {
import * as linkRows from "../../../db/linkedRows"
import { InternalTables } from "../../../db/utils"
import { getFullUser } from "../../../utilities/users"
import { tryExtractingTableAndViewId } from "./utils"
import { getSource, tryExtractingTableAndViewId } from "./utils"
export async function save(
tableOrViewId: string,
@ -20,21 +19,25 @@ export async function save(
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
inputs.tableId = tableId
let source: Table | ViewV2
let table: Table
if (viewId) {
source = await sdk.views.get(viewId)
table = await sdk.views.getTable(viewId)
} else {
source = await sdk.tables.getTable(tableId)
table = source
}
if (!inputs._rev && !inputs._id) {
inputs._id = db.generateRowID(inputs.tableId)
}
// this returns the table and row incase they have been updated
const dbTable = await sdk.tables.getTable(inputs.tableId)
// need to copy the table so it can be differenced on way out
const tableClone = cloneDeep(dbTable)
let { table, row } = await inputProcessing(userId, tableClone, inputs)
let row = await inputProcessing(userId, source, inputs)
const validateResult = await sdk.rows.utils.validate({
row,
table,
source,
})
if (!validateResult.valid) {
@ -49,24 +52,18 @@ export async function save(
table,
})) as Row
return finaliseRow(table, row, {
oldTable: dbTable,
updateFormula: true,
fromViewId: viewId,
return finaliseRow(source, row, { updateFormula: true })
}
export async function find(sourceId: string, rowId: string): Promise<Row> {
const source = await getSource(sourceId)
return await outputProcessing(source, await findRow(sourceId, rowId), {
squash: true,
})
}
export async function find(tableOrViewId: string, rowId: string): Promise<Row> {
const { tableId, viewId } = tryExtractingTableAndViewId(tableOrViewId)
const table = await sdk.tables.getTable(tableId)
let row = await findRow(tableId, rowId)
row = await outputProcessing(table, row, { squash: true, fromViewId: viewId })
return row
}
async function findRow(tableId: string, rowId: string) {
export async function findRow(sourceId: string, rowId: string) {
const { tableId } = tryExtractingTableAndViewId(sourceId)
const db = context.getAppDB()
let row: Row
// TODO remove special user case in future

View File

@ -16,11 +16,11 @@ export const removeInvalidFilters = (
validFields = validFields.map(f => f.toLowerCase())
for (const filterKey of Object.keys(result) as (keyof SearchFilters)[]) {
const filter = result[filterKey]
if (!filter || typeof filter !== "object") {
continue
}
if (isLogicalSearchOperator(filterKey)) {
const filter = result[filterKey]
if (!filter || typeof filter !== "object") {
continue
}
const resultingConditions: SearchFilters[] = []
for (const condition of filter.conditions) {
const resultingCondition = removeInvalidFilters(condition, validFields)
@ -36,6 +36,11 @@ export const removeInvalidFilters = (
continue
}
const filter = result[filterKey]
if (!filter || typeof filter !== "object") {
continue
}
for (const columnKey of Object.keys(filter)) {
const possibleKeys = [columnKey, db.removeKeyNumbering(columnKey)].map(
c => c.toLowerCase()
@ -53,8 +58,8 @@ export const removeInvalidFilters = (
}
export const getQueryableFields = async (
fields: string[],
table: Table
table: Table,
fields?: string[]
): Promise<string[]> => {
const extractTableFields = async (
table: Table,
@ -110,6 +115,9 @@ export const getQueryableFields = async (
"_id", // Querying by _id is always allowed, even if it's never part of the schema
]
if (fields == null) {
fields = Object.keys(table.schema)
}
result.push(...(await extractTableFields(table, fields, [table._id!])))
return result

View File

@ -1,10 +1,6 @@
import { db as dbCore, context } from "@budibase/backend-core"
import { db as dbCore, context, docIds } from "@budibase/backend-core"
import { Database, Row } from "@budibase/types"
import {
extractViewInfoFromID,
getRowParams,
isViewID,
} from "../../../db/utils"
import { extractViewInfoFromID, getRowParams } from "../../../db/utils"
import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./internal"
import * as external from "./external"
@ -26,7 +22,7 @@ export async function getAllInternalRows(appId?: string) {
function pickApi(tableOrViewId: string) {
let tableId = tableOrViewId
if (isViewID(tableOrViewId)) {
if (docIds.isViewId(tableOrViewId)) {
tableId = extractViewInfoFromID(tableOrViewId).tableId
}
@ -37,13 +33,13 @@ function pickApi(tableOrViewId: string) {
}
export async function save(
tableOrViewId: string,
sourceId: string,
row: Row,
userId: string | undefined
) {
return pickApi(tableOrViewId).save(tableOrViewId, row, userId)
return pickApi(sourceId).save(sourceId, row, userId)
}
export async function find(tableOrViewId: string, rowId: string) {
return pickApi(tableOrViewId).find(tableOrViewId, rowId)
export async function find(sourceId: string, rowId: string) {
return pickApi(sourceId).find(sourceId, rowId)
}

View File

@ -1,9 +1,14 @@
import {
EmptyFilterOption,
LegacyFilter,
LogicalOperator,
Row,
RowSearchParams,
SearchFilterKey,
SearchResponse,
SortOrder,
Table,
ViewV2,
} from "@budibase/types"
import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./search/internal"
@ -12,9 +17,10 @@ import { ExportRowsParams, ExportRowsResult } from "./search/types"
import { dataFilters } from "@budibase/shared-core"
import sdk from "../../index"
import { searchInputMapping } from "./search/utils"
import { features } from "@budibase/backend-core"
import { db, features } from "@budibase/backend-core"
import tracer from "dd-trace"
import { getQueryableFields, removeInvalidFilters } from "./queryUtils"
import { enrichSearchContext } from "../../../api/controllers/row/utils"
export { isValidFilter } from "../../../integrations/utils"
@ -32,11 +38,13 @@ function pickApi(tableId: any) {
}
export async function search(
options: RowSearchParams
options: RowSearchParams,
context?: Record<string, any>
): Promise<SearchResponse<Row>> {
return await tracer.trace("search", async span => {
span?.addTags({
tableId: options.tableId,
viewId: options.viewId,
query: options.query,
sort: options.sort,
sortOrder: options.sortOrder,
@ -48,20 +56,103 @@ export async function search(
countRows: options.countRows,
})
const isExternalTable = isExternalTableID(options.tableId)
options.query = dataFilters.cleanupQuery(options.query || {})
let source: Table | ViewV2
let table: Table
if (options.viewId) {
source = await sdk.views.get(options.viewId)
table = await sdk.views.getTable(source)
} else if (options.tableId) {
source = await sdk.tables.getTable(options.tableId)
table = source
} else {
throw new Error(`Must supply either a view ID or a table ID`)
}
const isExternalTable = isExternalTableID(table._id!)
if (options.query) {
const visibleFields = (
options.fields || Object.keys(table.schema)
).filter(field => table.schema[field]?.visible !== false)
const queryableFields = await getQueryableFields(table, visibleFields)
options.query = removeInvalidFilters(options.query, queryableFields)
} else {
options.query = {}
}
// need to make sure filters in correct shape before checking for view
options = searchInputMapping(table, options)
if (options.viewId) {
// Delete extraneous search params that cannot be overridden
delete options.query.onEmptyFilter
const view = source as ViewV2
// Enrich saved query with ephemeral query params.
// We prevent searching on any fields that are saved as part of the query, as
// that could let users find rows they should not be allowed to access.
let viewQuery = dataFilters.buildQueryLegacy(view.query) || {}
delete viewQuery?.onEmptyFilter
const sqsEnabled = await features.flags.isEnabled("SQS")
const supportsLogicalOperators =
isExternalTableID(view.tableId) || sqsEnabled
if (!supportsLogicalOperators) {
// In the unlikely event that a Grouped Filter is in a non-SQS environment
// It needs to be ignored entirely
let queryFilters: LegacyFilter[] = Array.isArray(view.query)
? view.query
: []
delete options.query.onEmptyFilter
// Extract existing fields
const existingFields =
queryFilters
?.filter(filter => filter.field)
.map(filter => db.removeKeyNumbering(filter.field)) || []
viewQuery ??= {}
// Carry over filters for unused fields
Object.keys(options.query).forEach(key => {
const operator = key as Exclude<SearchFilterKey, LogicalOperator>
Object.keys(options.query[operator] || {}).forEach(field => {
if (!existingFields.includes(db.removeKeyNumbering(field))) {
viewQuery![operator]![field] = options.query[operator]![field]
}
})
})
options.query = viewQuery
} else {
const conditions = viewQuery ? [viewQuery] : []
options.query = {
$and: {
conditions: [...conditions, options.query],
},
}
if (viewQuery.onEmptyFilter) {
options.query.onEmptyFilter = viewQuery.onEmptyFilter
}
}
}
if (context) {
options.query = await enrichSearchContext(options.query, context)
}
options.query = dataFilters.cleanupQuery(options.query)
options.query = dataFilters.fixupFilterArrays(options.query)
span?.addTags({
span.addTags({
cleanedQuery: options.query,
isExternalTable,
})
if (
!dataFilters.hasFilters(options.query) &&
options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE
) {
span?.addTags({ emptyQuery: true })
span.addTags({ emptyQuery: true })
return {
rows: [],
}
@ -71,34 +162,19 @@ export async function search(
options.sortOrder = options.sortOrder.toLowerCase() as SortOrder
}
const table = await sdk.tables.getTable(options.tableId)
options = searchInputMapping(table, options)
if (options.query) {
const tableFields = Object.keys(table.schema).filter(
f => table.schema[f].visible !== false
)
const queriableFields = await getQueryableFields(
options.fields?.filter(f => tableFields.includes(f)) ?? tableFields,
table
)
options.query = removeInvalidFilters(options.query, queriableFields)
}
let result: SearchResponse<Row>
if (isExternalTable) {
span?.addTags({ searchType: "external" })
result = await external.search(options, table)
result = await external.search(options, source)
} else if (await features.flags.isEnabled("SQS")) {
span?.addTags({ searchType: "sqs" })
result = await internal.sqs.search(options, table)
result = await internal.sqs.search(options, source)
} else {
span?.addTags({ searchType: "lucene" })
result = await internal.lucene.search(options, table)
result = await internal.lucene.search(options, source)
}
span?.addTags({
span.addTags({
foundRows: result.rows.length,
totalRows: result.totalRows,
})

View File

@ -9,6 +9,7 @@ import {
SortJson,
SortOrder,
Table,
ViewV2,
} from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters"
import { handleRequest } from "../../../../api/controllers/row/external"
@ -60,9 +61,8 @@ function getPaginationAndLimitParameters(
export async function search(
options: RowSearchParams,
table: Table
source: Table | ViewV2
): Promise<SearchResponse<Row>> {
const { tableId } = options
const { countRows, paginate, query, ...params } = options
const { limit } = params
let bookmark =
@ -106,16 +106,15 @@ export async function search(
includeSqlRelationships: IncludeRelationship.INCLUDE,
}
const [{ rows, rawResponseSize }, totalRows] = await Promise.all([
handleRequest(Operation.READ, tableId, parameters),
handleRequest(Operation.READ, source, parameters),
countRows
? handleRequest(Operation.COUNT, tableId, parameters)
? handleRequest(Operation.COUNT, source, parameters)
: Promise.resolve(undefined),
])
let processed = await outputProcessing(table, rows, {
let processed = await outputProcessing(source, rows, {
preserveLinks: true,
squash: true,
fromViewId: options.viewId,
})
let hasNextPage = false
@ -128,10 +127,13 @@ export async function search(
}
}
if (options.fields) {
const fields = [...options.fields, ...PROTECTED_EXTERNAL_COLUMNS]
processed = processed.map((r: any) => pick(r, fields))
}
const visibleFields =
options.fields ||
Object.keys(source.schema || {}).filter(
key => source.schema?.[key].visible !== false
)
const allowedFields = [...visibleFields, ...PROTECTED_EXTERNAL_COLUMNS]
processed = processed.map((r: any) => pick(r, allowedFields))
// need wrapper object for bookmarks etc when paginating
const response: SearchResponse<Row> = { rows: processed, hasNextPage }
@ -201,7 +203,7 @@ export async function exportRows(
}
let result = await search(
{ tableId, query: requestQuery, sort, sortOrder },
{ tableId: table._id!, query: requestQuery, sort, sortOrder },
table
)
let rows: Row[] = []
@ -257,10 +259,10 @@ export async function exportRows(
}
export async function fetch(tableId: string): Promise<Row[]> {
const response = await handleRequest(Operation.READ, tableId, {
const table = await sdk.tables.getTable(tableId)
const response = await handleRequest(Operation.READ, table, {
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
const table = await sdk.tables.getTable(tableId)
return await outputProcessing(table, response.rows, {
preserveLinks: true,
squash: true,
@ -268,7 +270,8 @@ export async function fetch(tableId: string): Promise<Row[]> {
}
export async function fetchRaw(tableId: string): Promise<Row[]> {
const response = await handleRequest(Operation.READ, tableId, {
const table = await sdk.tables.getTable(tableId)
const response = await handleRequest(Operation.READ, table, {
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
return response.rows

View File

@ -8,21 +8,29 @@ import {
SortType,
Table,
User,
ViewV2,
} from "@budibase/types"
import { getGlobalUsersFromMetadata } from "../../../../../utilities/global"
import { outputProcessing } from "../../../../../utilities/rowProcessor"
import pick from "lodash/pick"
import sdk from "../../../../"
export async function search(
options: RowSearchParams,
table: Table
source: Table | ViewV2
): Promise<SearchResponse<Row>> {
const { tableId } = options
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
const { paginate, query } = options
const params: RowSearchParams = {
tableId: options.tableId,
viewId: options.viewId,
sort: options.sort,
sortOrder: options.sortOrder,
sortType: options.sortType,
@ -50,18 +58,20 @@ export async function search(
// Enrich search results with relationships
if (response.rows && response.rows.length) {
// enrich with global users if from users table
if (tableId === InternalTables.USER_METADATA) {
if (table._id === InternalTables.USER_METADATA) {
response.rows = await getGlobalUsersFromMetadata(response.rows as User[])
}
if (options.fields) {
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS]
response.rows = response.rows.map((r: any) => pick(r, fields))
}
const visibleFields =
options.fields ||
Object.keys(source.schema || {}).filter(
key => source.schema?.[key].visible !== false
)
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
response.rows = response.rows.map((r: any) => pick(r, allowedFields))
response.rows = await outputProcessing(table, response.rows, {
response.rows = await outputProcessing(source, response.rows, {
squash: true,
fromViewId: options.viewId,
})
}

View File

@ -1,4 +1,5 @@
import {
Aggregation,
Datasource,
DocumentType,
FieldType,
@ -15,6 +16,7 @@ import {
SortType,
SqlClient,
Table,
ViewV2,
} from "@budibase/types"
import {
buildInternalRelationships,
@ -44,10 +46,12 @@ import {
import {
dataFilters,
helpers,
isInternalColumnName,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import { isSearchingByRowID } from "../utils"
import tracer from "dd-trace"
import { cloneDeep } from "lodash"
const builder = new sql.Sql(SqlClient.SQL_LITE)
const SQLITE_COLUMN_LIMIT = 2000
@ -55,11 +59,34 @@ const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`)
const MISSING_TABLE_REGX = new RegExp(`no such table: .+`)
const DUPLICATE_COLUMN_REGEX = new RegExp(`duplicate column name: .+`)
function buildInternalFieldList(
table: Table,
async function buildInternalFieldList(
source: Table | ViewV2,
tables: Table[],
opts?: { relationships?: RelationshipsJson[] }
opts?: { relationships?: RelationshipsJson[]; allowedFields?: string[] }
) {
const { relationships, allowedFields } = opts || {}
let schemaFields: string[] = []
if (sdk.views.isView(source)) {
schemaFields = Object.keys(helpers.views.basicFields(source)).filter(
key => source.schema?.[key]?.visible !== false
)
} else {
schemaFields = Object.keys(source.schema).filter(
key => source.schema[key].visible !== false
)
}
if (allowedFields) {
schemaFields = schemaFields.filter(field => allowedFields.includes(field))
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
let fieldList: string[] = []
const getJunctionFields = (relatedTable: Table, fields: string[]) => {
const junctionFields: string[] = []
@ -70,13 +97,18 @@ function buildInternalFieldList(
})
return junctionFields
}
fieldList = fieldList.concat(
PROTECTED_INTERNAL_COLUMNS.map(col => `${table._id}.${col}`)
)
for (let key of Object.keys(table.schema)) {
if (sdk.tables.isTable(source)) {
for (const key of PROTECTED_INTERNAL_COLUMNS) {
if (allowedFields && !allowedFields.includes(key)) {
continue
}
fieldList.push(`${table._id}.${key}`)
}
}
for (let key of schemaFields) {
const col = table.schema[key]
const isRelationship = col.type === FieldType.LINK
if (!opts?.relationships && isRelationship) {
if (!relationships && isRelationship) {
continue
}
if (!isRelationship) {
@ -87,7 +119,9 @@ function buildInternalFieldList(
if (!relatedTable) {
continue
}
const relatedFields = buildInternalFieldList(relatedTable, tables).concat(
const relatedFields = (
await buildInternalFieldList(relatedTable, tables)
).concat(
getJunctionFields(relatedTable, ["doc1.fieldName", "doc2.fieldName"])
)
// break out of the loop if we have reached the max number of columns
@ -128,15 +162,22 @@ function cleanupFilters(
// generate a map of all possible column names (these can be duplicated across tables
// the map of them will always be the same
const userColumnMap: Record<string, string> = {}
allTables.forEach(table =>
Object.keys(table.schema).forEach(
key => (userColumnMap[key] = mapToUserColumn(key))
)
)
for (const table of allTables) {
for (const key of Object.keys(table.schema)) {
if (isInternalColumnName(key)) {
continue
}
userColumnMap[key] = mapToUserColumn(key)
}
}
// update the keys of filters to manage user columns
const keyInAnyTable = (key: string): boolean =>
allTables.some(table => table.schema[key])
const keyInAnyTable = (key: string): boolean => {
if (isInternalColumnName(key)) {
return false
}
return allTables.some(table => table.schema[key])
}
const splitter = new dataFilters.ColumnSplitter(allTables)
@ -291,16 +332,23 @@ function resyncDefinitionsRequired(status: number, message: string) {
export async function search(
options: RowSearchParams,
table: Table,
source: Table | ViewV2,
opts?: { retrying?: boolean }
): Promise<SearchResponse<Row>> {
let { paginate, query, ...params } = options
let { paginate, query, ...params } = cloneDeep(options)
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables)
// make sure we have the mapped/latest table
if (table?._id) {
table = allTablesMap[table?._id]
if (table._id) {
table = allTablesMap[table._id]
}
if (!table) {
throw new Error("Unable to find table")
@ -312,6 +360,23 @@ export async function search(
...cleanupFilters(query, table, allTables),
documentType: DocumentType.ROW,
}
let aggregations: Aggregation[] = []
if (sdk.views.isView(source)) {
const calculationFields = helpers.views.calculationFields(source)
for (const [key, field] of Object.entries(calculationFields)) {
if (options.fields && !options.fields.includes(key)) {
continue
}
aggregations.push({
name: key,
field: mapToUserColumn(field.field),
calculationType: field.calculationType,
})
}
}
const request: QueryJson = {
endpoint: {
// not important, we query ourselves
@ -327,7 +392,11 @@ export async function search(
columnPrefix: USER_COLUMN_PREFIX,
},
resource: {
fields: buildInternalFieldList(table, allTables, { relationships }),
fields: await buildInternalFieldList(source, allTables, {
relationships,
allowedFields: options.fields,
}),
aggregations,
},
relationships,
}
@ -372,7 +441,7 @@ export async function search(
// make sure JSON columns corrected
const processed = builder.convertJsonStringColumns<Row>(
table,
await sqlOutputProcessing(rows, table!, allTablesMap, relationships, {
await sqlOutputProcessing(rows, source, allTablesMap, relationships, {
sqs: true,
})
)
@ -388,17 +457,18 @@ export async function search(
}
// get the rows
let finalRows = await outputProcessing(table, processed, {
let finalRows = await outputProcessing(source, processed, {
preserveLinks: true,
squash: true,
fromViewId: options.viewId,
})
// check if we need to pick specific rows out
if (options.fields) {
const fields = [...options.fields, ...PROTECTED_INTERNAL_COLUMNS]
finalRows = finalRows.map((r: any) => pick(r, fields))
}
const visibleFields =
options.fields ||
Object.keys(source.schema || {}).filter(
key => source.schema?.[key].visible !== false
)
const allowedFields = [...visibleFields, ...PROTECTED_INTERNAL_COLUMNS]
finalRows = finalRows.map((r: any) => pick(r, allowedFields))
const response: SearchResponse<Row> = {
rows: finalRows,
@ -419,7 +489,7 @@ export async function search(
const msg = typeof err === "string" ? err : err.message
if (!opts?.retrying && resyncDefinitionsRequired(err.status, msg)) {
await sdk.tables.sqs.syncDefinition()
return search(options, table, { retrying: true })
return search(options, source, { retrying: true })
}
// previously the internal table didn't error when a column didn't exist in search
if (err.status === 400 && msg?.match(MISSING_COLUMN_REGEX)) {

View File

@ -90,10 +90,8 @@ describe.each([tableWithUserCol, tableWithUsersCol])(
})
it("shouldn't error if no query supplied", () => {
const params: any = {
tableId,
}
const output = searchInputMapping(col, params)
// @ts-expect-error - intentionally passing in a bad type
const output = searchInputMapping(col, { tableId })
expect(output.query).toBeUndefined()
})
}

View File

@ -11,7 +11,7 @@ import {
RowSearchParams,
} from "@budibase/types"
import { db as dbCore, context } from "@budibase/backend-core"
import { utils } from "@budibase/shared-core"
import { utils, dataFilters } from "@budibase/shared-core"
export async function paginatedSearch(
query: SearchFilters,
@ -31,13 +31,13 @@ export async function fullSearch(
function findColumnInQueries(
column: string,
options: RowSearchParams,
filters: SearchFilters,
callback: (filter: any) => any
) {
if (!options.query) {
if (!filters) {
return
}
for (let filterBlock of Object.values(options.query)) {
for (let filterBlock of Object.values(filters)) {
if (typeof filterBlock !== "object") {
continue
}
@ -49,8 +49,8 @@ function findColumnInQueries(
}
}
function userColumnMapping(column: string, options: RowSearchParams) {
findColumnInQueries(column, options, (filterValue: any): any => {
function userColumnMapping(column: string, filters: SearchFilters) {
findColumnInQueries(column, filters, (filterValue: any): any => {
const isArray = Array.isArray(filterValue),
isString = typeof filterValue === "string"
if (!isString && !isArray) {
@ -83,28 +83,32 @@ function userColumnMapping(column: string, options: RowSearchParams) {
// maps through the search parameters to check if any of the inputs are invalid
// based on the table schema, converts them to something that is valid.
export function searchInputMapping(table: Table, options: RowSearchParams) {
if (!table?.schema) {
return options
}
for (let [key, column] of Object.entries(table.schema)) {
switch (column.type) {
case FieldType.BB_REFERENCE_SINGLE: {
const subtype = column.subtype
switch (subtype) {
case BBReferenceFieldSubType.USER:
userColumnMapping(key, options)
break
// need an internal function to loop over filters, because this takes the full options
function checkFilters(filters: SearchFilters) {
for (let [key, column] of Object.entries(table.schema || {})) {
switch (column.type) {
case FieldType.BB_REFERENCE_SINGLE: {
const subtype = column.subtype
switch (subtype) {
case BBReferenceFieldSubType.USER:
userColumnMapping(key, filters)
break
default:
utils.unreachable(subtype)
default:
utils.unreachable(subtype)
}
break
}
case FieldType.BB_REFERENCE: {
userColumnMapping(key, filters)
break
}
break
}
case FieldType.BB_REFERENCE: {
userColumnMapping(key, options)
break
}
}
return dataFilters.recurseLogicalOperators(filters, checkFilters)
}
if (options.query) {
options.query = checkFilters(options.query)
}
return options
}

View File

@ -203,7 +203,7 @@ describe("query utils", () => {
},
})
const result = await getQueryableFields(Object.keys(table.schema), table)
const result = await getQueryableFields(table)
expect(result).toEqual(["_id", "name", "age"])
})
@ -216,7 +216,7 @@ describe("query utils", () => {
},
})
const result = await getQueryableFields(Object.keys(table.schema), table)
const result = await getQueryableFields(table)
expect(result).toEqual(["_id", "name"])
})
@ -245,7 +245,7 @@ describe("query utils", () => {
})
const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table)
return getQueryableFields(table)
})
expect(result).toEqual([
"_id",
@ -282,7 +282,7 @@ describe("query utils", () => {
})
const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table)
return getQueryableFields(table)
})
expect(result).toEqual(["_id", "name", "aux.name", "auxTable.name"])
})
@ -313,7 +313,7 @@ describe("query utils", () => {
})
const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table)
return getQueryableFields(table)
})
expect(result).toEqual(["_id", "name"])
})
@ -381,7 +381,7 @@ describe("query utils", () => {
it("includes nested relationship fields from main table", async () => {
const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table)
return getQueryableFields(table)
})
expect(result).toEqual([
"_id",
@ -398,7 +398,7 @@ describe("query utils", () => {
it("includes nested relationship fields from aux 1 table", async () => {
const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(aux1.schema), aux1)
return getQueryableFields(aux1)
})
expect(result).toEqual([
"_id",
@ -420,7 +420,7 @@ describe("query utils", () => {
it("includes nested relationship fields from aux 2 table", async () => {
const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(aux2.schema), aux2)
return getQueryableFields(aux2)
})
expect(result).toEqual([
"_id",
@ -474,7 +474,7 @@ describe("query utils", () => {
it("includes nested relationship fields from main table", async () => {
const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(table.schema), table)
return getQueryableFields(table)
})
expect(result).toEqual([
"_id",
@ -488,7 +488,7 @@ describe("query utils", () => {
it("includes nested relationship fields from aux table", async () => {
const result = await config.doInContext(config.appId, () => {
return getQueryableFields(Object.keys(aux.schema), aux)
return getQueryableFields(aux)
})
expect(result).toEqual([
"_id",

View File

@ -33,7 +33,7 @@ describe("validate", () => {
it("should accept empty values", async () => {
const row = {}
const table = getTable()
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(true)
expect(output.errors).toEqual({})
})
@ -43,7 +43,7 @@ describe("validate", () => {
time: `${hour()}:${minute()}`,
}
const table = getTable()
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(true)
})
@ -52,7 +52,7 @@ describe("validate", () => {
time: `${hour()}:${minute()}:${second()}`,
}
const table = getTable()
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(true)
})
@ -67,7 +67,7 @@ describe("validate", () => {
table.schema.time.constraints = {
presence: true,
}
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({ time: ['"time" is not a valid time'] })
})
@ -91,7 +91,7 @@ describe("validate", () => {
`${generator.integer({ min: 11, max: 23 })}:${minute()}`,
])("should accept values after config value (%s)", async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(true)
})
@ -100,7 +100,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before config value (%s)", async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"],
@ -125,7 +125,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 12 })}:${minute()}`,
])("should accept values before config value (%s)", async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(true)
})
@ -134,7 +134,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after config value (%s)", async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({
time: ["must be no later than 15:16:17"],
@ -156,7 +156,7 @@ describe("validate", () => {
"should accept values in range (%s)",
async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(true)
}
)
@ -166,7 +166,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before range (%s)", async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"],
@ -178,7 +178,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after range (%s)", async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({
time: ["must be no later than 15:00"],
@ -199,7 +199,7 @@ describe("validate", () => {
"should accept values in range (%s)",
async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(true)
}
)
@ -208,7 +208,7 @@ describe("validate", () => {
"should reject values out range (%s)",
async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({
time: ["must be no later than 10:00"],
@ -226,7 +226,7 @@ describe("validate", () => {
table.schema.time.constraints = {
presence: true,
}
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({ time: ["can't be blank"] })
})
@ -237,7 +237,7 @@ describe("validate", () => {
table.schema.time.constraints = {
presence: true,
}
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({ time: ["can't be blank"] })
})
@ -257,7 +257,7 @@ describe("validate", () => {
"should accept values in range (%s)",
async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(true)
}
)
@ -267,7 +267,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before range (%s)", async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"],
@ -279,7 +279,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after range (%s)", async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({
time: ["must be no later than 15:00"],
@ -301,7 +301,7 @@ describe("validate", () => {
"should accept values in range (%s)",
async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(true)
}
)
@ -311,7 +311,7 @@ describe("validate", () => {
`${generator.integer({ min: 0, max: 9 })}:${minute()}`,
])("should reject values before range (%s)", async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({
time: ["must be no earlier than 10:00"],
@ -323,7 +323,7 @@ describe("validate", () => {
`${generator.integer({ min: 16, max: 23 })}:${minute()}`,
])("should reject values after range (%s)", async time => {
const row = { time }
const output = await validate({ table, tableId: table._id!, row })
const output = await validate({ source: table, row })
expect(output.valid).toBe(false)
expect(output.errors).toEqual({
time: ["must be no later than 15:00"],

View File

@ -13,16 +13,15 @@ import {
TableSchema,
SqlClient,
ArrayOperator,
ViewV2,
} from "@budibase/types"
import { makeExternalQuery } from "../../../integrations/base/query"
import { Format } from "../../../api/controllers/view/exporters"
import sdk from "../.."
import {
extractViewInfoFromID,
isRelationshipColumn,
isViewID,
} from "../../../db/utils"
import { extractViewInfoFromID, isRelationshipColumn } from "../../../db/utils"
import { isSQL } from "../../../integrations/utils"
import { docIds, sql } from "@budibase/backend-core"
import { getTableFromSource } from "../../../api/controllers/row/utils"
const SQL_CLIENT_SOURCE_MAP: Record<SourceName, SqlClient | undefined> = {
[SourceName.POSTGRES]: SqlClient.POSTGRES,
@ -58,8 +57,12 @@ export function getSQLClient(datasource: Datasource): SqlClient {
export function processRowCountResponse(
response: DatasourcePlusQueryResponse
): number {
if (response && response.length === 1 && "total" in response[0]) {
const total = response[0].total
if (
response &&
response.length === 1 &&
sql.COUNT_FIELD_NAME in response[0]
) {
const total = response[0][sql.COUNT_FIELD_NAME]
return typeof total === "number" ? total : parseInt(total)
} else {
throw new Error("Unable to count rows in query - no count response")
@ -142,37 +145,27 @@ function isForeignKey(key: string, table: Table) {
}
export async function validate({
tableId,
source,
row,
table,
}: {
tableId?: string
source: Table | ViewV2
row: Row
table?: Table
}): Promise<{
valid: boolean
errors: Record<string, any>
}> {
let fetchedTable: Table | undefined
if (!table && tableId) {
fetchedTable = await sdk.tables.getTable(tableId)
} else if (table) {
fetchedTable = table
}
if (fetchedTable === undefined) {
throw new Error("Unable to fetch table for validation")
}
const table = await getTableFromSource(source)
const errors: Record<string, any> = {}
const disallowArrayTypes = [
FieldType.ATTACHMENT_SINGLE,
FieldType.BB_REFERENCE_SINGLE,
]
for (let fieldName of Object.keys(fetchedTable.schema)) {
const column = fetchedTable.schema[fieldName]
for (let fieldName of Object.keys(table.schema)) {
const column = table.schema[fieldName]
const constraints = cloneDeep(column.constraints)
const type = column.type
// foreign keys are likely to be enriched
if (isForeignKey(fieldName, fetchedTable)) {
if (isForeignKey(fieldName, table)) {
continue
}
// formulas shouldn't validated, data will be deleted anyway
@ -323,7 +316,7 @@ export function isArrayFilter(operator: any): operator is ArrayOperator {
}
export function tryExtractingTableAndViewId(tableOrViewId: string) {
if (isViewID(tableOrViewId)) {
if (docIds.isViewId(tableOrViewId)) {
return {
tableId: extractViewInfoFromID(tableOrViewId).tableId,
viewId: tableOrViewId,
@ -332,3 +325,10 @@ export function tryExtractingTableAndViewId(tableOrViewId: string) {
return { tableId: tableOrViewId }
}
export function getSource(tableOrViewId: string) {
if (docIds.isViewId(tableOrViewId)) {
return sdk.views.get(tableOrViewId)
}
return sdk.tables.getTable(tableOrViewId)
}

View File

@ -1,5 +1,6 @@
import { Table, TableSourceType } from "@budibase/types"
import { isExternalTableID } from "../../../integrations/utils"
import { docIds } from "@budibase/backend-core"
export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
if (opts.table && opts.table.sourceType === TableSourceType.EXTERNAL) {
@ -9,3 +10,7 @@ export function isExternal(opts: { table?: Table; tableId?: string }): boolean {
}
return false
}
export function isTable(table: any): table is Table {
return table._id && docIds.isTableId(table._id)
}

View File

@ -1,5 +1,6 @@
import {
FieldType,
PermissionLevel,
RelationSchemaField,
RenameColumn,
Table,
@ -9,7 +10,7 @@ import {
ViewV2ColumnEnriched,
ViewV2Enriched,
} from "@budibase/types"
import { HTTPError } from "@budibase/backend-core"
import { context, docIds, HTTPError, roles } from "@budibase/backend-core"
import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
@ -22,6 +23,7 @@ import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./internal"
import * as external from "./external"
import sdk from "../../../sdk"
import { PermissionUpdateType, updatePermissionOnRole } from "../permissions"
function pickApi(tableId: any) {
if (isExternalTableID(tableId)) {
@ -40,16 +42,85 @@ export async function getEnriched(viewId: string): Promise<ViewV2Enriched> {
return pickApi(tableId).getEnriched(viewId)
}
export async function getTable(view: string | ViewV2): Promise<Table> {
const viewId = typeof view === "string" ? view : view.id
const cached = context.getTableForView(viewId)
if (cached) {
return cached
}
const { tableId } = utils.extractViewInfoFromID(viewId)
const table = await sdk.tables.getTable(tableId)
context.setTableForView(viewId, table)
return table
}
export function isView(view: any): view is ViewV2 {
return view.id && docIds.isViewId(view.id) && view.version === 2
}
async function guardCalculationViewSchema(
table: Table,
view: Omit<ViewV2, "id" | "version">
) {
const calculationFields = helpers.views.calculationFields(view)
for (const calculationFieldName of Object.keys(calculationFields)) {
const schema = calculationFields[calculationFieldName]
const targetSchema = table.schema[schema.field]
if (!targetSchema) {
throw new HTTPError(
`Calculation field "${calculationFieldName}" references field "${schema.field}" which does not exist in the table schema`,
400
)
}
if (!helpers.schema.isNumeric(targetSchema)) {
throw new HTTPError(
`Calculation field "${calculationFieldName}" references field "${schema.field}" which is not a numeric field`,
400
)
}
}
const groupByFields = helpers.views.basicFields(view)
for (const groupByFieldName of Object.keys(groupByFields)) {
const targetSchema = table.schema[groupByFieldName]
if (!targetSchema) {
throw new HTTPError(
`Group by field "${groupByFieldName}" does not exist in the table schema`,
400
)
}
}
}
async function guardViewSchema(
tableId: string,
view: Omit<ViewV2, "id" | "version">
) {
const viewSchema = view.schema || {}
const table = await sdk.tables.getTable(tableId)
if (helpers.views.isCalculationView(view)) {
await guardCalculationViewSchema(table, view)
}
await checkReadonlyFields(table, view)
checkRequiredFields(table, view)
checkDisplayField(view)
}
async function checkReadonlyFields(
table: Table,
view: Omit<ViewV2, "id" | "version">
) {
const viewSchema = view.schema || {}
for (const field of Object.keys(viewSchema)) {
const tableSchemaField = table.schema[field]
if (!tableSchemaField) {
const viewFieldSchema = viewSchema[field]
if (helpers.views.isCalculationField(viewFieldSchema)) {
continue
}
const tableFieldSchema = table.schema[field]
if (!tableFieldSchema) {
throw new HTTPError(
`Field "${field}" is not valid for the requested table`,
400
@ -65,18 +136,33 @@ async function guardViewSchema(
}
}
}
}
const existingView =
table?.views && (table.views[view.name] as ViewV2 | undefined)
function checkDisplayField(view: Omit<ViewV2, "id" | "version">) {
if (view.primaryDisplay) {
const viewSchemaField = view.schema?.[view.primaryDisplay]
if (!viewSchemaField?.visible) {
throw new HTTPError(
`You can't hide "${view.primaryDisplay}" because it is the display column.`,
400
)
}
}
}
function checkRequiredFields(
table: Table,
view: Omit<ViewV2, "id" | "version">
) {
const existingView = table.views?.[view.name] as ViewV2 | undefined
for (const field of Object.values(table.schema)) {
if (!helpers.schema.isRequired(field.constraints)) {
continue
}
const viewSchemaField = viewSchema[field.name]
const existingViewSchema =
existingView?.schema && existingView.schema[field.name]
const viewSchemaField = view.schema?.[field.name]
const existingViewSchema = existingView?.schema?.[field.name]
if (!viewSchemaField && !existingViewSchema?.visible) {
// Supporting existing configs with required columns but hidden in views
continue
@ -89,24 +175,16 @@ async function guardViewSchema(
)
}
if (viewSchemaField.readonly) {
if (
helpers.views.isBasicViewField(viewSchemaField) &&
viewSchemaField.readonly
) {
throw new HTTPError(
`You can't make "${field.name}" readonly because it is a required field.`,
400
)
}
}
if (view.primaryDisplay) {
const viewSchemaField = viewSchema[view.primaryDisplay]
if (!viewSchemaField?.visible) {
throw new HTTPError(
`You can't hide "${view.primaryDisplay}" because it is the display column.`,
400
)
}
}
}
export async function create(
@ -115,7 +193,30 @@ export async function create(
): Promise<ViewV2> {
await guardViewSchema(tableId, viewRequest)
return pickApi(tableId).create(tableId, viewRequest)
const view = await pickApi(tableId).create(tableId, viewRequest)
// Set permissions to be the same as the table
const tablePerms = await sdk.permissions.getResourcePerms(tableId)
const readRole = tablePerms[PermissionLevel.READ]?.role
const writeRole = tablePerms[PermissionLevel.WRITE]?.role
await updatePermissionOnRole(
{
roleId: readRole || roles.BUILTIN_ROLE_IDS.BASIC,
resourceId: view.id,
level: PermissionLevel.READ,
},
PermissionUpdateType.ADD
)
await updatePermissionOnRole(
{
roleId: writeRole || roles.BUILTIN_ROLE_IDS.BASIC,
resourceId: view.id,
level: PermissionLevel.WRITE,
},
PermissionUpdateType.ADD
)
return view
}
export async function update(tableId: string, view: ViewV2): Promise<ViewV2> {
@ -157,19 +258,12 @@ export async function enrichSchema(
view: ViewV2,
tableSchema: TableSchema
): Promise<ViewV2Enriched> {
const tableCache: Record<string, Table> = {}
async function populateRelTableSchema(
tableId: string,
viewFields: Record<string, RelationSchemaField>
) {
if (!tableCache[tableId]) {
tableCache[tableId] = await sdk.tables.getTable(tableId)
}
const relTable = tableCache[tableId]
const relTable = await sdk.tables.getTable(tableId)
const result: Record<string, ViewV2ColumnEnriched> = {}
for (const relTableFieldName of Object.keys(relTable.schema)) {
const relTableField = relTable.schema[relTableFieldName]
if ([FieldType.LINK, FieldType.FORMULA].includes(relTableField.type)) {
@ -198,15 +292,24 @@ export async function enrichSchema(
const viewSchema = view.schema || {}
const anyViewOrder = Object.values(viewSchema).some(ui => ui.order != null)
for (const key of Object.keys(tableSchema).filter(
k => tableSchema[k].visible !== false
)) {
const visibleSchemaFields = Object.keys(viewSchema).filter(key => {
if (helpers.views.isCalculationField(viewSchema[key])) {
return viewSchema[key].visible !== false
}
return key in tableSchema && tableSchema[key].visible !== false
})
const visibleTableFields = Object.keys(tableSchema).filter(
key => tableSchema[key].visible !== false
)
const visibleFields = new Set([...visibleSchemaFields, ...visibleTableFields])
for (const key of visibleFields) {
// if nothing specified in view, then it is not visible
const ui = viewSchema[key] || { visible: false }
schema[key] = {
...tableSchema[key],
...ui,
order: anyViewOrder ? ui?.order ?? undefined : tableSchema[key].order,
order: anyViewOrder ? ui?.order ?? undefined : tableSchema[key]?.order,
columns: undefined,
}
@ -218,10 +321,7 @@ export async function enrichSchema(
}
}
return {
...view,
schema: schema,
}
return { ...view, schema }
}
export function syncSchema(

View File

@ -130,6 +130,26 @@ export function getUserContextBindings(user: ContextUser) {
return {}
}
// Current user context for bindable search
const { _id, _rev, firstName, lastName, email, status, roleId } = user
return { _id, _rev, firstName, lastName, email, status, roleId }
const {
_id,
_rev,
firstName,
lastName,
email,
status,
roleId,
globalId,
userId,
} = user
return {
_id,
_rev,
firstName,
lastName,
email,
status,
roleId,
globalId,
userId,
}
}

View File

@ -7,11 +7,11 @@ import {
BulkImportRequest,
BulkImportResponse,
SearchRowResponse,
RowSearchParams,
DeleteRows,
DeleteRow,
PaginatedSearchRowResponse,
RowExportFormat,
SearchRowRequest,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
@ -136,7 +136,7 @@ export class RowAPI extends TestAPI {
)
}
search = async <T extends RowSearchParams>(
search = async <T extends SearchRowRequest>(
sourceId: string,
params?: T,
expectations?: Expectations

View File

@ -74,7 +74,7 @@ class Orchestrator {
private job: Job
private loopStepOutputs: LoopStep[]
private stopped: boolean
private executionOutput: AutomationContext
private executionOutput: Omit<AutomationContext, "stepsByName" | "stepsById">
constructor(job: AutomationJob) {
let automation = job.data.automation
@ -91,6 +91,7 @@ class Orchestrator {
// step zero is never used as the template string is zero indexed for customer facing
this.context = {
steps: [{}],
stepsById: {},
stepsByName: {},
trigger: triggerOutput,
}
@ -457,8 +458,9 @@ class Orchestrator {
inputs: steps[stepToLoopIndex].inputs,
})
this.context.stepsById[steps[stepToLoopIndex].id] = tempOutput
const stepName = steps[stepToLoopIndex].name || steps[stepToLoopIndex].id
this.context.stepsByName![stepName] = tempOutput
this.context.stepsByName[stepName] = tempOutput
this.context.steps[this.context.steps.length] = tempOutput
this.context.steps = this.context.steps.filter(
item => !item.hasOwnProperty.call(item, "currentItem")
@ -517,7 +519,10 @@ class Orchestrator {
Object.entries(filter).forEach(([_, value]) => {
Object.entries(value).forEach(([field, _]) => {
const updatedField = field.replace("{{", "{{ literal ")
const fromContext = processStringSync(updatedField, this.context)
const fromContext = processStringSync(
updatedField,
this.processContext(this.context)
)
toFilter[field] = fromContext
})
})
@ -563,9 +568,9 @@ class Orchestrator {
}
const stepFn = await this.getStepFunctionality(step.stepId)
let inputs = await this.addContextAndProcess(
let inputs = await processObject(
originalStepInput,
this.context
this.processContext(this.context)
)
inputs = automationUtils.cleanInputValues(inputs, step.schema.inputs)
@ -594,16 +599,16 @@ class Orchestrator {
return null
}
private async addContextAndProcess(inputs: any, context: any) {
private processContext(context: AutomationContext) {
const processContext = {
...context,
steps: {
...context.steps,
...context.stepsById,
...context.stepsByName,
},
}
return processObject(inputs, processContext)
return processContext
}
private handleStepOutput(
@ -623,6 +628,7 @@ class Orchestrator {
} else {
this.updateExecutionOutput(step.id, step.stepId, step.inputs, outputs)
this.context.steps[this.context.steps.length] = outputs
this.context.stepsById![step.id] = outputs
const stepName = step.name || step.id
this.context.stepsByName![stepName] = outputs
}

View File

@ -18,6 +18,7 @@ import {
RowAttachment,
Table,
User,
ViewV2,
} from "@budibase/types"
import { cloneDeep } from "lodash/fp"
import {
@ -33,7 +34,11 @@ import {
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import { processString } from "@budibase/string-templates"
import { isUserMetadataTable } from "../../api/controllers/row/utils"
import {
getTableFromSource,
isUserMetadataTable,
} from "../../api/controllers/row/utils"
import sdk from "../../sdk"
export * from "./utils"
export * from "./attachments"
@ -67,6 +72,7 @@ export async function processAutoColumn(
// check its not user table, or whether any of the processing options have been disabled
const shouldUpdateUserFields =
!isUserTable && !opts?.reprocessing && !opts?.noAutoRelationships && !noUser
let tableMutated = false
for (let [key, schema] of Object.entries(table.schema)) {
if (!schema.autocolumn) {
continue
@ -99,10 +105,17 @@ export async function processAutoColumn(
row[key] = schema.lastID + 1
schema.lastID++
table.schema[key] = schema
tableMutated = true
}
break
}
}
if (tableMutated) {
const db = context.getAppDB()
const resp = await db.put(table)
table._rev = resp.rev
}
}
async function processDefaultValues(table: Table, row: Row) {
@ -121,8 +134,10 @@ async function processDefaultValues(table: Table, row: Row) {
for (const [key, schema] of Object.entries(table.schema)) {
if ("default" in schema && schema.default != null && row[key] == null) {
const processed = await processString(schema.default, ctx)
const processed =
typeof schema.default === "string"
? await processString(schema.default, ctx)
: schema.default
try {
row[key] = coerce(processed, schema.type)
} catch (err: any) {
@ -169,11 +184,12 @@ export function coerce(row: any, type: string) {
*/
export async function inputProcessing(
userId: string | null | undefined,
table: Table,
source: Table | ViewV2,
row: Row,
opts?: AutoColumnProcessingOpts
) {
const clonedRow = cloneDeep(row)
const table = await getTableFromSource(source)
const dontCleanseKeys = ["type", "_id", "_rev", "tableId"]
for (const [key, value] of Object.entries(clonedRow)) {
@ -228,8 +244,7 @@ export async function inputProcessing(
await processAutoColumn(userId, table, clonedRow, opts)
await processDefaultValues(table, clonedRow)
return { table, row: clonedRow }
return clonedRow
}
/**
@ -242,14 +257,13 @@ export async function inputProcessing(
* @returns the enriched rows will be returned.
*/
export async function outputProcessing<T extends Row[] | Row>(
table: Table,
source: Table | ViewV2,
rows: T,
opts: {
squash?: boolean
preserveLinks?: boolean
fromRow?: Row
skipBBReferences?: boolean
fromViewId?: string
} = {
squash: true,
preserveLinks: false,
@ -264,6 +278,14 @@ export async function outputProcessing<T extends Row[] | Row>(
} else {
safeRows = rows
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
// SQS returns the rows with full relationship contents
// attach any linked row information
let enriched = !opts.preserveLinks
@ -276,25 +298,25 @@ export async function outputProcessing<T extends Row[] | Row>(
opts.squash = true
}
enriched = await coreOutputProcessing(table, enriched, opts)
enriched = await coreOutputProcessing(source, enriched, opts)
if (opts.squash) {
enriched = await linkRows.squashLinks(table, enriched, {
fromViewId: opts?.fromViewId,
})
enriched = await linkRows.squashLinks(source, enriched)
}
return (wasArray ? enriched : enriched[0]) as T
}
/**
* This function is similar to the outputProcessing function above, it makes sure that all the provided
* rows are ready for output, but does not have enrichment for squash capabilities which can cause performance issues.
* outputProcessing should be used when responding from the API, while this should be used when internally processing
* rows for any reason (like part of view operations).
* This function is similar to the outputProcessing function above, it makes
* sure that all the provided rows are ready for output, but does not have
* enrichment for squash capabilities which can cause performance issues.
* outputProcessing should be used when responding from the API, while this
* should be used when internally processing rows for any reason (like part of
* view operations).
*/
export async function coreOutputProcessing(
table: Table,
source: Table | ViewV2,
rows: Row[],
opts: {
preserveLinks?: boolean
@ -305,6 +327,13 @@ export async function coreOutputProcessing(
skipBBReferences: false,
}
): Promise<Row[]> {
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
// process complex types: attachments, bb references...
for (const [property, column] of Object.entries(table.schema)) {
if (
@ -399,6 +428,25 @@ export async function coreOutputProcessing(
}
}
}
if (sdk.views.isView(source)) {
const calculationFields = Object.keys(
helpers.views.calculationFields(source)
)
// We ensure all calculation fields are returned as numbers. During the
// testing of this feature it was discovered that the COUNT operation
// returns a string for MySQL, MariaDB, and Postgres. But given that all
// calculation fields should be numbers, we blanket make sure of that
// here.
for (const key of calculationFields) {
for (const row of rows) {
if (typeof row[key] === "string") {
row[key] = parseFloat(row[key])
}
}
}
}
}
if (!isUserMetadataTable(table._id!)) {
@ -409,9 +457,18 @@ export async function coreOutputProcessing(
const tableFields = Object.keys(table.schema).filter(
f => table.schema[f].visible !== false
)
const fields = [...tableFields, ...protectedColumns].map(f =>
f.toLowerCase()
)
if (sdk.views.isView(source)) {
const aggregations = helpers.views.calculationFields(source)
for (const key of Object.keys(aggregations)) {
fields.push(key.toLowerCase())
}
}
for (const row of rows) {
for (const key of Object.keys(row)) {
if (!fields.includes(key.toLowerCase())) {

View File

@ -65,7 +65,7 @@ describe("rowProcessor - inputProcessing", () => {
processInputBBReferenceMock.mockResolvedValue(user)
const { row } = await inputProcessing(userId, table, newRow)
const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReference).toHaveBeenCalledTimes(
1
@ -117,7 +117,7 @@ describe("rowProcessor - inputProcessing", () => {
processInputBBReferencesMock.mockResolvedValue(user)
const { row } = await inputProcessing(userId, table, newRow)
const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).toHaveBeenCalledTimes(
1
@ -164,7 +164,7 @@ describe("rowProcessor - inputProcessing", () => {
name: "Jack",
}
const { row } = await inputProcessing(userId, table, newRow)
const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled()
expect(row).toEqual({ ...newRow, user: undefined })
@ -207,7 +207,7 @@ describe("rowProcessor - inputProcessing", () => {
user: userValue,
}
const { row } = await inputProcessing(userId, table, newRow)
const row = await inputProcessing(userId, table, newRow)
if (userValue === undefined) {
// The 'user' field is omitted
@ -262,7 +262,7 @@ describe("rowProcessor - inputProcessing", () => {
user: "123",
}
const { row } = await inputProcessing(userId, table, newRow)
const row = await inputProcessing(userId, table, newRow)
expect(bbReferenceProcessor.processInputBBReferences).not.toHaveBeenCalled()
expect(row).toEqual({

View File

@ -148,9 +148,16 @@ export function parse(rows: Rows, table: Table): Rows {
Object.keys(row).forEach(columnName => {
const columnData = row[columnName]
if (columnName === "_id") {
parsedRow[columnName] = columnData
return
}
const schema = table.schema
if (!(columnName in schema)) {
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
// Objects can be present in the row data but not in the schema, so make
// sure we don't proceed in such a case
return
}

View File

@ -3,7 +3,7 @@ import {
BBReferenceFieldSubType,
FieldType,
FormulaType,
SearchFilter,
LegacyFilter,
SearchFilters,
SearchQueryFields,
ArrayOperator,
@ -19,9 +19,12 @@ import {
RangeOperator,
LogicalOperator,
isLogicalSearchOperator,
SearchFilterGroup,
FilterGroupLogicalOperator,
} from "@budibase/types"
import dayjs from "dayjs"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
import { processSearchFilters } from "./utils"
import { deepGet, schema } from "./helpers"
import { isPlainObject, isEmpty } from "lodash"
import { decodeNonAscii } from "./helpers/schema"
@ -160,9 +163,6 @@ export function recurseSearchFilters(
* https://github.com/Budibase/budibase/issues/10118
*/
export const cleanupQuery = (query: SearchFilters) => {
if (!query) {
return query
}
for (let filterField of NoEmptyFilterStrings) {
if (!query[filterField]) {
continue
@ -304,10 +304,143 @@ export class ColumnSplitter {
}
/**
* Builds a JSON query from the filter structure generated in the builder
* Builds a JSON query from the filter a SearchFilter definition
* @param filter the builder filter structure
*/
export const buildQuery = (filter: SearchFilter[]) => {
const buildCondition = (expression: LegacyFilter) => {
// Filter body
let query: SearchFilters = {
string: {},
fuzzy: {},
range: {},
equal: {},
notEqual: {},
empty: {},
notEmpty: {},
contains: {},
notContains: {},
oneOf: {},
containsAny: {},
}
let { operator, field, type, value, externalType, onEmptyFilter } = expression
if (!operator || !field) {
return
}
const queryOperator = operator as SearchFilterOperator
const isHbs =
typeof value === "string" && (value.match(HBS_REGEX) || []).length > 0
// Parse all values into correct types
if (operator === "allOr") {
query.allOr = true
return
}
if (onEmptyFilter) {
query.onEmptyFilter = onEmptyFilter
return
}
// Default the value for noValue fields to ensure they are correctly added
// to the final query
if (queryOperator === "empty" || queryOperator === "notEmpty") {
value = null
}
if (
type === "datetime" &&
!isHbs &&
queryOperator !== "empty" &&
queryOperator !== "notEmpty"
) {
// Ensure date value is a valid date and parse into correct format
if (!value) {
return
}
try {
value = new Date(value).toISOString()
} catch (error) {
return
}
}
if (type === "number" && typeof value === "string" && !isHbs) {
if (queryOperator === "oneOf") {
value = value.split(",").map(item => parseFloat(item))
} else {
value = parseFloat(value)
}
}
if (type === "boolean") {
value = `${value}`?.toLowerCase() === "true"
}
if (
["contains", "notContains", "containsAny"].includes(
operator.toLocaleString()
) &&
type === "array" &&
typeof value === "string"
) {
value = value.split(",")
}
if (operator.toLocaleString().startsWith("range") && query.range) {
const minint =
SqlNumberTypeRangeMap[externalType as keyof typeof SqlNumberTypeRangeMap]
?.min || Number.MIN_SAFE_INTEGER
const maxint =
SqlNumberTypeRangeMap[externalType as keyof typeof SqlNumberTypeRangeMap]
?.max || Number.MAX_SAFE_INTEGER
if (!query.range[field]) {
query.range[field] = {
low: type === "number" ? minint : "0000-00-00T00:00:00.000Z",
high: type === "number" ? maxint : "9999-00-00T00:00:00.000Z",
}
}
if (operator === "rangeLow" && value != null && value !== "") {
query.range[field] = {
...query.range[field],
low: value,
}
} else if (operator === "rangeHigh" && value != null && value !== "") {
query.range[field] = {
...query.range[field],
high: value,
}
}
} else if (isLogicalSearchOperator(queryOperator)) {
// TODO
} else if (query[queryOperator] && operator !== "onEmptyFilter") {
if (type === "boolean") {
// Transform boolean filters to cope with null.
// "equals false" needs to be "not equals true"
// "not equals false" needs to be "equals true"
if (queryOperator === "equal" && value === false) {
query.notEqual = query.notEqual || {}
query.notEqual[field] = true
} else if (queryOperator === "notEqual" && value === false) {
query.equal = query.equal || {}
query.equal[field] = true
} else {
query[queryOperator] ??= {}
query[queryOperator]![field] = value
}
} else {
query[queryOperator] ??= {}
query[queryOperator]![field] = value
}
}
return query
}
export const buildQueryLegacy = (
filter?: LegacyFilter[] | SearchFilters
): SearchFilters | undefined => {
// this is of type SearchFilters or is undefined
if (!Array.isArray(filter)) {
return filter
}
let query: SearchFilters = {
string: {},
fuzzy: {},
@ -368,13 +501,15 @@ export const buildQuery = (filter: SearchFilter[]) => {
value = `${value}`?.toLowerCase() === "true"
}
if (
["contains", "notContains", "containsAny"].includes(operator) &&
["contains", "notContains", "containsAny"].includes(
operator.toLocaleString()
) &&
type === "array" &&
typeof value === "string"
) {
value = value.split(",")
}
if (operator.startsWith("range") && query.range) {
if (operator.toLocaleString().startsWith("range") && query.range) {
const minint =
SqlNumberTypeRangeMap[
externalType as keyof typeof SqlNumberTypeRangeMap
@ -401,7 +536,7 @@ export const buildQuery = (filter: SearchFilter[]) => {
}
}
} else if (isLogicalSearchOperator(queryOperator)) {
// TODO
// ignore
} else if (query[queryOperator] && operator !== "onEmptyFilter") {
if (type === "boolean") {
// Transform boolean filters to cope with null.
@ -423,10 +558,60 @@ export const buildQuery = (filter: SearchFilter[]) => {
}
}
})
return query
}
/**
* Converts a **SearchFilterGroup** filter definition into a grouped
* search query of type **SearchFilters**
*
* Legacy support remains for the old **SearchFilter[]** format.
* These will be migrated to an appropriate **SearchFilters** object, if encountered
*
* @param filter
*
* @returns {SearchFilters}
*/
export const buildQuery = (
filter?: SearchFilterGroup | LegacyFilter[]
): SearchFilters | undefined => {
const parsedFilter: SearchFilterGroup | undefined =
processSearchFilters(filter)
if (!parsedFilter) {
return
}
const operatorMap: { [key in FilterGroupLogicalOperator]: LogicalOperator } =
{
[FilterGroupLogicalOperator.ALL]: LogicalOperator.AND,
[FilterGroupLogicalOperator.ANY]: LogicalOperator.OR,
}
const globalOnEmpty = parsedFilter.onEmptyFilter
? parsedFilter.onEmptyFilter
: null
const globalOperator: LogicalOperator =
operatorMap[parsedFilter.logicalOperator as FilterGroupLogicalOperator]
return {
...(globalOnEmpty ? { onEmptyFilter: globalOnEmpty } : {}),
[globalOperator]: {
conditions: parsedFilter.groups?.map((group: SearchFilterGroup) => {
return {
[operatorMap[group.logicalOperator]]: {
conditions: group.filters
?.map(x => buildCondition(x))
.filter(filter => filter),
},
}
}),
},
}
}
// The frontend can send single values for array fields sometimes, so to handle
// this we convert them to arrays at the controller level so that nothing below
// this has to worry about the non-array values.

View File

@ -1,4 +1,5 @@
import cronValidate from "cron-validate"
import cronParser from "cron-parser"
const INPUT_CRON_START = "(Input cron: "
const ERROR_SWAPS = {
@ -30,6 +31,19 @@ function improveErrors(errors: string[]): string[] {
return finalErrors
}
export function getNextExecutionDates(
cronExpression: string,
limit: number = 4
): string[] {
const parsed = cronParser.parseExpression(cronExpression)
const nextRuns = []
for (let i = 0; i < limit; i++) {
nextRuns.push(parsed.next().toString())
}
return nextRuns
}
export function validate(
cronExpression: string
): { valid: false; err: string[] } | { valid: true } {

View File

@ -2,3 +2,4 @@ export * from "./helpers"
export * from "./integrations"
export * as cron from "./cron"
export * as schema from "./schema"
export * as views from "./views"

View File

@ -45,3 +45,7 @@ export function decodeNonAscii(str: string): string {
String.fromCharCode(parseInt(p1, 16))
)
}
export function isNumeric(field: FieldSchema) {
return field.type === FieldType.NUMBER || field.type === FieldType.BIGINT
}

View File

@ -0,0 +1,33 @@
import {
BasicViewFieldMetadata,
ViewCalculationFieldMetadata,
ViewFieldMetadata,
ViewV2,
} from "@budibase/types"
import { pickBy } from "lodash"
export function isCalculationField(
field: ViewFieldMetadata
): field is ViewCalculationFieldMetadata {
return "calculationType" in field
}
export function isBasicViewField(
field: ViewFieldMetadata
): field is BasicViewFieldMetadata {
return !isCalculationField(field)
}
type UnsavedViewV2 = Omit<ViewV2, "id" | "version">
export function isCalculationView(view: UnsavedViewV2) {
return Object.values(view.schema || {}).some(isCalculationField)
}
export function calculationFields(view: UnsavedViewV2) {
return pickBy(view.schema || {}, isCalculationField)
}
export function basicFields(view: UnsavedViewV2) {
return pickBy(view.schema || {}, field => !isCalculationField(field))
}

View File

@ -53,8 +53,9 @@ const allowDefaultColumnByType: Record<FieldType, boolean> = {
[FieldType.DATETIME]: true,
[FieldType.LONGFORM]: true,
[FieldType.STRING]: true,
[FieldType.OPTIONS]: true,
[FieldType.ARRAY]: true,
[FieldType.OPTIONS]: false,
[FieldType.AUTO]: false,
[FieldType.INTERNAL]: false,
[FieldType.BARCODEQR]: false,
@ -64,7 +65,6 @@ const allowDefaultColumnByType: Record<FieldType, boolean> = {
[FieldType.ATTACHMENTS]: false,
[FieldType.ATTACHMENT_SINGLE]: false,
[FieldType.SIGNATURE_SINGLE]: false,
[FieldType.ARRAY]: false,
[FieldType.LINK]: false,
[FieldType.BB_REFERENCE]: false,
[FieldType.BB_REFERENCE_SINGLE]: false,

View File

@ -1,4 +1,17 @@
import {
LegacyFilter,
SearchFilterGroup,
FilterGroupLogicalOperator,
SearchFilters,
BasicOperator,
ArrayOperator,
} from "@budibase/types"
import * as Constants from "./constants"
import { removeKeyNumbering } from "./filters"
// an array of keys from filter type to properties that are in the type
// this can then be converted using .fromEntries to an object
type AllowedFilters = [keyof LegacyFilter, LegacyFilter[keyof LegacyFilter]][]
export function unreachable(
value: never,
@ -77,3 +90,124 @@ export function trimOtherProps(object: any, allowedProps: string[]) {
)
return result
}
export function isSupportedUserSearch(query: SearchFilters) {
const allowed = [
{ op: BasicOperator.STRING, key: "email" },
{ op: BasicOperator.EQUAL, key: "_id" },
{ op: ArrayOperator.ONE_OF, key: "_id" },
]
for (let [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") {
return false
}
const fields = Object.keys(operation || {})
// this filter doesn't contain options - ignore
if (fields.length === 0) {
continue
}
const allowedOperation = allowed.find(
allow =>
allow.op === key && fields.length === 1 && fields[0] === allow.key
)
if (!allowedOperation) {
return false
}
}
return true
}
/**
* Processes the filter config. Filters are migrated from
* SearchFilter[] to SearchFilterGroup
*
* If filters is not an array, the migration is skipped
*
* @param {LegacyFilter[] | SearchFilterGroup} filters
*/
export const processSearchFilters = (
filters: LegacyFilter[] | SearchFilterGroup | undefined
): SearchFilterGroup | undefined => {
if (!filters) {
return
}
// Base search config.
const defaultCfg: SearchFilterGroup = {
logicalOperator: FilterGroupLogicalOperator.ALL,
groups: [],
}
const filterAllowedKeys = [
"field",
"operator",
"value",
"type",
"externalType",
"valueType",
"noValue",
"formulaType",
]
if (Array.isArray(filters)) {
let baseGroup: SearchFilterGroup = {
filters: [],
logicalOperator: FilterGroupLogicalOperator.ALL,
}
return filters.reduce((acc: SearchFilterGroup, filter: LegacyFilter) => {
// Sort the properties for easier debugging
const filterPropertyKeys = (Object.keys(filter) as (keyof LegacyFilter)[])
.sort((a, b) => {
return a.localeCompare(b)
})
.filter(key => key in filter)
if (filterPropertyKeys.length == 1) {
const key = filterPropertyKeys[0],
value = filter[key]
// Global
if (key === "onEmptyFilter") {
// unset otherwise
acc.onEmptyFilter = value
} else if (key === "operator" && value === "allOr") {
// Group 1 logical operator
baseGroup.logicalOperator = FilterGroupLogicalOperator.ANY
}
return acc
}
const allowedFilterSettings: AllowedFilters = filterPropertyKeys.reduce(
(acc: AllowedFilters, key) => {
const value = filter[key]
if (filterAllowedKeys.includes(key)) {
if (key === "field") {
acc.push([key, removeKeyNumbering(value)])
} else {
acc.push([key, value])
}
}
return acc
},
[]
)
const migratedFilter: LegacyFilter = Object.fromEntries(
allowedFilterSettings
) as LegacyFilter
baseGroup.filters!.push(migratedFilter)
if (!acc.groups || !acc.groups.length) {
// init the base group
acc.groups = [baseGroup]
}
return acc
}, defaultCfg)
} else if (!filters?.groups) {
return
}
return filters
}

View File

@ -1,7 +1,11 @@
import { FieldType } from "../../documents"
import { EmptyFilterOption, SearchFilters } from "../../sdk"
import {
EmptyFilterOption,
FilterGroupLogicalOperator,
SearchFilters,
} from "../../sdk"
export type SearchFilter = {
export type LegacyFilter = {
operator: keyof SearchFilters | "rangeLow" | "rangeHigh"
onEmptyFilter?: EmptyFilterOption
field: string
@ -9,3 +13,11 @@ export type SearchFilter = {
value: any
externalType?: string
}
// this is a type purely used by the UI
export type SearchFilterGroup = {
logicalOperator: FilterGroupLogicalOperator
onEmptyFilter?: EmptyFilterOption
groups?: SearchFilterGroup[]
filters?: LegacyFilter[]
}

View File

@ -161,6 +161,7 @@ export interface OptionsFieldMetadata extends BaseFieldSchema {
constraints: FieldConstraints & {
inclusion: string[]
}
default?: string
}
export interface ArrayFieldMetadata extends BaseFieldSchema {
@ -169,6 +170,7 @@ export interface ArrayFieldMetadata extends BaseFieldSchema {
type: JsonFieldSubType.ARRAY
inclusion: string[]
}
default?: string[]
}
interface BaseFieldSchema extends UIFieldMetadata {

View File

@ -1,7 +1,7 @@
import { SearchFilter, SortOrder, SortType } from "../../api"
import { LegacyFilter, SearchFilterGroup, SortOrder, SortType } from "../../api"
import { UIFieldMetadata } from "./table"
import { Document } from "../document"
import { DBView } from "../../sdk"
import { DBView, SearchFilters } from "../../sdk"
export type ViewTemplateOpts = {
field: string
@ -33,15 +33,24 @@ export interface View {
groupBy?: string
}
export type ViewFieldMetadata = UIFieldMetadata & {
export interface BasicViewFieldMetadata extends UIFieldMetadata {
readonly?: boolean
columns?: Record<string, RelationSchemaField>
}
export type RelationSchemaField = UIFieldMetadata & {
export interface RelationSchemaField extends UIFieldMetadata {
readonly?: boolean
}
export interface ViewCalculationFieldMetadata extends BasicViewFieldMetadata {
calculationType: CalculationType
field: string
}
export type ViewFieldMetadata =
| BasicViewFieldMetadata
| ViewCalculationFieldMetadata
export enum CalculationType {
SUM = "sum",
AVG = "avg",
@ -50,26 +59,25 @@ export enum CalculationType {
MAX = "max",
}
export type ViewCalculationFieldMetadata = ViewFieldMetadata & {
calculationType: CalculationType
field: string
}
export interface ViewV2 {
version: 2
id: string
name: string
primaryDisplay?: string
tableId: string
query?: SearchFilter[]
query?: LegacyFilter[] | SearchFilters
// duplicate to store UI information about filters
queryUI?: SearchFilterGroup
sort?: {
field: string
order?: SortOrder
type?: SortType
}
schema?: Record<string, ViewFieldMetadata | ViewCalculationFieldMetadata>
schema?: ViewV2Schema
}
export type ViewV2Schema = Record<string, ViewFieldMetadata>
export type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema
export interface ViewCountOrSumSchema {

View File

@ -1,8 +1,14 @@
import { SortOrder, SortType } from "../api"
import { SearchFilters } from "./search"
import { Row } from "../documents"
import { CalculationType, Row } from "../documents"
import { WithRequired } from "../shared"
export interface Aggregation {
name: string
calculationType: CalculationType
field: string
}
export interface SearchParams {
tableId?: string
viewId?: string

View File

@ -2,6 +2,7 @@ import { Operation } from "./datasources"
import { Row, Table, DocumentType } from "../documents"
import { SortOrder, SortType } from "../api"
import { Knex } from "knex"
import { Aggregation } from "./row"
export enum BasicOperator {
EQUAL = "equal",
@ -67,6 +68,8 @@ type RangeFilter = Record<
[InternalSearchFilterOperator.COMPLEX_ID_OPERATOR]?: never
}
type LogicalFilter = { conditions: SearchFilters[] }
export type AnySearchFilter = BasicFilter | ArrayFilter | RangeFilter
export interface SearchFilters {
@ -91,12 +94,8 @@ export interface SearchFilters {
// specific document type (such as just rows)
documentType?: DocumentType
[LogicalOperator.AND]?: {
conditions: SearchFilters[]
}
[LogicalOperator.OR]?: {
conditions: SearchFilters[]
}
[LogicalOperator.AND]?: LogicalFilter
[LogicalOperator.OR]?: LogicalFilter
}
export type SearchFilterKey = keyof Omit<
@ -154,6 +153,7 @@ export interface QueryJson {
}
resource?: {
fields: string[]
aggregations?: Aggregation[]
}
filters?: SearchFilters
sort?: SortJson
@ -191,6 +191,11 @@ export enum EmptyFilterOption {
RETURN_NONE = "none",
}
export enum FilterGroupLogicalOperator {
ALL = "all",
ANY = "any",
}
export enum SqlClient {
MS_SQL = "mssql",
POSTGRES = "pg",

View File

@ -4,6 +4,29 @@ export type DeepPartial<T> = {
export type ISO8601 = string
/**
* RequiredKeys make it such that you _must_ assign a value to every key in the
* type. It differs subtly from Required<T> in that it doesn't change the type
* of the fields, you can specify undefined as a value and that's fine.
*
* Example:
*
* ```ts
* interface Foo {
* bar: string
* baz?: string
* }
*
* type FooRequiredKeys = RequiredKeys<Foo>
* type FooRequired = Required<Foo>
*
* const a: FooRequiredKeys = { bar: "hello", baz: undefined }
* const b: FooRequired = { bar: "hello", baz: undefined }
* ```
*
* In this code, a passes type checking whereas b does not. This is because
* Required<Foo> makes baz non-optional.
*/
export type RequiredKeys<T> = {
[K in keyof Required<T>]: T[K]
}

View File

@ -37,7 +37,7 @@ import {
} from "@budibase/backend-core"
import { checkAnyUserExists } from "../../../utilities/users"
import { isEmailConfigured } from "../../../utilities/email"
import { BpmStatusKey, BpmStatusValue } from "@budibase/shared-core"
import { BpmStatusKey, BpmStatusValue, utils } from "@budibase/shared-core"
const MAX_USERS_UPLOAD_LIMIT = 1000
@ -256,7 +256,7 @@ export const search = async (ctx: Ctx<SearchUsersRequest>) => {
}
}
// Validate we aren't trying to search on any illegal fields
if (!userSdk.core.isSupportedUserSearch(body.query)) {
if (!utils.isSupportedUserSearch(body.query)) {
ctx.throw(400, "Can only search by string.email, equal._id or oneOf._id")
}
}

591
yarn.lock

File diff suppressed because it is too large Load Diff