Merge branch 'master' into table-changes

This commit is contained in:
Andrew Kingston 2024-06-26 09:31:45 +02:00 committed by GitHub
commit 6d1dd4fe51
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
61 changed files with 2035 additions and 982 deletions

View File

@ -1,5 +1,5 @@
{ {
"version": "2.29.0", "version": "2.29.3",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

@ -1 +1 @@
Subproject commit 247f56d455abbd64da17d865275ed978f577549f Subproject commit ff16525b73c5751d344f5c161a682609c0a993f2

View File

@ -72,4 +72,4 @@ export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory" export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses" export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee" export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default" export { DEFAULT_BB_DATASOURCE_ID } from "@budibase/shared-core"

View File

@ -1,10 +1,10 @@
import { Knex, knex } from "knex" import { Knex, knex } from "knex"
import * as dbCore from "../db" import * as dbCore from "../db"
import { import {
isIsoDateString,
isValidFilter,
getNativeSql, getNativeSql,
isExternalTable, isExternalTable,
isIsoDateString,
isValidFilter,
} from "./utils" } from "./utils"
import { SqlStatements } from "./sqlStatements" import { SqlStatements } from "./sqlStatements"
import SqlTableQueryBuilder from "./sqlTable" import SqlTableQueryBuilder from "./sqlTable"
@ -12,21 +12,21 @@ import {
BBReferenceFieldMetadata, BBReferenceFieldMetadata,
FieldSchema, FieldSchema,
FieldType, FieldType,
INTERNAL_TABLE_SOURCE_ID,
JsonFieldMetadata, JsonFieldMetadata,
JsonTypes,
Operation, Operation,
prefixed,
QueryJson, QueryJson,
SqlQuery, QueryOptions,
RelationshipsJson, RelationshipsJson,
SearchFilters, SearchFilters,
SortOrder,
SqlClient,
SqlQuery,
SqlQueryBinding, SqlQueryBinding,
Table, Table,
TableSourceType, TableSourceType,
INTERNAL_TABLE_SOURCE_ID,
SqlClient,
QueryOptions,
JsonTypes,
prefixed,
SortOrder,
} from "@budibase/types" } from "@budibase/types"
import environment from "../environment" import environment from "../environment"
import { helpers } from "@budibase/shared-core" import { helpers } from "@budibase/shared-core"
@ -114,7 +114,7 @@ function generateSelectStatement(
): (string | Knex.Raw)[] | "*" { ): (string | Knex.Raw)[] | "*" {
const { resource, meta } = json const { resource, meta } = json
if (!resource) { if (!resource || !resource.fields || resource.fields.length === 0) {
return "*" return "*"
} }
@ -410,13 +410,32 @@ class InternalBuilder {
return query return query
} }
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { addDistinctCount(
let { sort, paginate } = json query: Knex.QueryBuilder,
json: QueryJson
): Knex.QueryBuilder {
const table = json.meta.table const table = json.meta.table
const primary = table.primary
const aliases = json.tableAliases
const aliased =
table.name && aliases?.[table.name] ? aliases[table.name] : table.name
if (!primary) {
throw new Error("SQL counting requires primary key to be supplied")
}
return query.countDistinct(`${aliased}.${primary[0]} as total`)
}
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort } = json
const table = json.meta.table
const primaryKey = table.primary
const tableName = getTableName(table) const tableName = getTableName(table)
const aliases = json.tableAliases const aliases = json.tableAliases
const aliased = const aliased =
tableName && aliases?.[tableName] ? aliases[tableName] : table?.name tableName && aliases?.[tableName] ? aliases[tableName] : table?.name
if (!Array.isArray(primaryKey)) {
throw new Error("Sorting requires primary key to be specified for table")
}
if (sort && Object.keys(sort || {}).length > 0) { if (sort && Object.keys(sort || {}).length > 0) {
for (let [key, value] of Object.entries(sort)) { for (let [key, value] of Object.entries(sort)) {
const direction = const direction =
@ -429,9 +448,12 @@ class InternalBuilder {
query = query.orderBy(`${aliased}.${key}`, direction, nulls) query = query.orderBy(`${aliased}.${key}`, direction, nulls)
} }
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) { }
// @ts-ignore
query = query.orderBy(`${aliased}.${table?.primary[0]}`) // add sorting by the primary key if the result isn't already sorted by it,
// to make sure result is deterministic
if (!sort || sort[primaryKey[0]] === undefined) {
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
} }
return query return query
} }
@ -522,7 +544,7 @@ class InternalBuilder {
}) })
} }
} }
return query.limit(BASE_LIMIT) return query
} }
knexWithAlias( knexWithAlias(
@ -533,13 +555,12 @@ class InternalBuilder {
const tableName = endpoint.entityId const tableName = endpoint.entityId
const tableAlias = aliases?.[tableName] const tableAlias = aliases?.[tableName]
const query = knex( return knex(
this.tableNameWithSchema(tableName, { this.tableNameWithSchema(tableName, {
alias: tableAlias, alias: tableAlias,
schema: endpoint.schema, schema: endpoint.schema,
}) })
) )
return query
} }
create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
@ -587,7 +608,8 @@ class InternalBuilder {
if (!primary) { if (!primary) {
throw new Error("Primary key is required for upsert") throw new Error("Primary key is required for upsert")
} }
return query.insert(parsedBody).onConflict(primary).merge() const ret = query.insert(parsedBody).onConflict(primary).merge()
return ret
} else if (this.client === SqlClient.MS_SQL) { } else if (this.client === SqlClient.MS_SQL) {
// No upsert or onConflict support in MSSQL yet, see: // No upsert or onConflict support in MSSQL yet, see:
// https://github.com/knex/knex/pull/6050 // https://github.com/knex/knex/pull/6050
@ -596,25 +618,23 @@ class InternalBuilder {
return query.upsert(parsedBody) return query.upsert(parsedBody)
} }
read(knex: Knex, json: QueryJson, limit: number): Knex.QueryBuilder { read(
let { endpoint, resource, filters, paginate, relationships, tableAliases } = knex: Knex,
json json: QueryJson,
opts: {
limits?: { base: number; query: number }
} = {}
): Knex.QueryBuilder {
let { endpoint, filters, paginate, relationships, tableAliases } = json
const { limits } = opts
const counting = endpoint.operation === Operation.COUNT
const tableName = endpoint.entityId const tableName = endpoint.entityId
// select all if not specified // start building the query
if (!resource) { let query = this.knexWithAlias(knex, endpoint, tableAliases)
resource = { fields: [] }
}
let selectStatement: string | (string | Knex.Raw)[] = "*"
// handle select
if (resource.fields && resource.fields.length > 0) {
// select the resources as the format "table.columnName" - this is what is provided
// by the resource builder further up
selectStatement = generateSelectStatement(json, knex)
}
let foundLimit = limit || BASE_LIMIT
// handle pagination // handle pagination
let foundOffset: number | null = null let foundOffset: number | null = null
let foundLimit = limits?.query || limits?.base
if (paginate && paginate.page && paginate.limit) { if (paginate && paginate.page && paginate.limit) {
// @ts-ignore // @ts-ignore
const page = paginate.page <= 1 ? 0 : paginate.page - 1 const page = paginate.page <= 1 ? 0 : paginate.page - 1
@ -627,24 +647,39 @@ class InternalBuilder {
} else if (paginate && paginate.limit) { } else if (paginate && paginate.limit) {
foundLimit = paginate.limit foundLimit = paginate.limit
} }
// start building the query // counting should not sort, limit or offset
let query = this.knexWithAlias(knex, endpoint, tableAliases) if (!counting) {
// add the found limit if supplied
if (foundLimit != null) {
query = query.limit(foundLimit) query = query.limit(foundLimit)
if (foundOffset) { }
// add overall pagination
if (foundOffset != null) {
query = query.offset(foundOffset) query = query.offset(foundOffset)
} }
// add sorting to pre-query
// no point in sorting when counting
query = this.addSorting(query, json)
}
// add filters to the query (where)
query = this.addFilters(query, filters, json.meta.table, { query = this.addFilters(query, filters, json.meta.table, {
aliases: tableAliases, aliases: tableAliases,
}) })
// add sorting to pre-query
query = this.addSorting(query, json)
const alias = tableAliases?.[tableName] || tableName const alias = tableAliases?.[tableName] || tableName
let preQuery = knex({ let preQuery: Knex.QueryBuilder = knex({
[alias]: query, // the typescript definition for the knex constructor doesn't support this
} as any).select(selectStatement) as any // syntax, but it is the only way to alias a pre-query result as part of
// a query - there is an alias dictionary type, but it assumes it can only
// be a table name, not a pre-query
[alias]: query as any,
})
// if counting, use distinct count, else select
preQuery = !counting
? preQuery.select(generateSelectStatement(json, knex))
: this.addDistinctCount(preQuery, json)
// have to add after as well (this breaks MS-SQL) // have to add after as well (this breaks MS-SQL)
if (this.client !== SqlClient.MS_SQL) { if (this.client !== SqlClient.MS_SQL && !counting) {
preQuery = this.addSorting(preQuery, json) preQuery = this.addSorting(preQuery, json)
} }
// handle joins // handle joins
@ -655,6 +690,13 @@ class InternalBuilder {
endpoint.schema, endpoint.schema,
tableAliases tableAliases
) )
// add a base limit over the whole query
// if counting we can't set this limit
if (limits?.base) {
query = query.limit(limits.base)
}
return this.addFilters(query, filters, json.meta.table, { return this.addFilters(query, filters, json.meta.table, {
relationship: true, relationship: true,
aliases: tableAliases, aliases: tableAliases,
@ -699,6 +741,19 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
this.limit = limit this.limit = limit
} }
private convertToNative(query: Knex.QueryBuilder, opts: QueryOptions = {}) {
const sqlClient = this.getSqlClient()
if (opts?.disableBindings) {
return { sql: query.toString() }
} else {
let native = getNativeSql(query)
if (sqlClient === SqlClient.SQL_LITE) {
native = convertBooleans(native)
}
return native
}
}
/** /**
* @param json The JSON query DSL which is to be converted to SQL. * @param json The JSON query DSL which is to be converted to SQL.
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning * @param opts extra options which are to be passed into the query builder, e.g. disableReturning
@ -722,7 +777,16 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
query = builder.create(client, json, opts) query = builder.create(client, json, opts)
break break
case Operation.READ: case Operation.READ:
query = builder.read(client, json, this.limit) query = builder.read(client, json, {
limits: {
query: this.limit,
base: BASE_LIMIT,
},
})
break
case Operation.COUNT:
// read without any limits to count
query = builder.read(client, json)
break break
case Operation.UPDATE: case Operation.UPDATE:
query = builder.update(client, json, opts) query = builder.update(client, json, opts)
@ -744,15 +808,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
throw `Operation type is not supported by SQL query builder` throw `Operation type is not supported by SQL query builder`
} }
if (opts?.disableBindings) { return this.convertToNative(query, opts)
return { sql: query.toString() }
} else {
let native = getNativeSql(query)
if (sqlClient === SqlClient.SQL_LITE) {
native = convertBooleans(native)
}
return native
}
} }
async getReturningRow(queryFn: QueryFunction, json: QueryJson) { async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
@ -828,6 +884,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
await this.getReturningRow(queryFn, this.checkLookupKeys(id, json)) await this.getReturningRow(queryFn, this.checkLookupKeys(id, json))
) )
} }
if (operation === Operation.COUNT) {
return results
}
if (operation !== Operation.READ) { if (operation !== Operation.READ) {
return row return row
} }

View File

@ -109,8 +109,10 @@ function generateSchema(
const { tableName } = breakExternalTableId(column.tableId) const { tableName } = breakExternalTableId(column.tableId)
// @ts-ignore // @ts-ignore
const relatedTable = tables[tableName] const relatedTable = tables[tableName]
if (!relatedTable) { if (!relatedTable || !relatedTable.primary) {
throw new Error("Referenced table doesn't exist") throw new Error(
"Referenced table doesn't exist or has no primary keys"
)
} }
const relatedPrimary = relatedTable.primary[0] const relatedPrimary = relatedTable.primary[0]
const externalType = relatedTable.schema[relatedPrimary].externalType const externalType = relatedTable.schema[relatedPrimary].externalType

View File

@ -55,10 +55,7 @@ export function buildExternalTableId(datasourceId: string, tableName: string) {
return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}` return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}`
} }
export function breakExternalTableId(tableId: string | undefined) { export function breakExternalTableId(tableId: string) {
if (!tableId) {
return {}
}
const parts = tableId.split(DOUBLE_SEPARATOR) const parts = tableId.split(DOUBLE_SEPARATOR)
let datasourceId = parts.shift() let datasourceId = parts.shift()
// if they need joined // if they need joined
@ -67,6 +64,9 @@ export function breakExternalTableId(tableId: string | undefined) {
if (tableName.includes(ENCODED_SPACE)) { if (tableName.includes(ENCODED_SPACE)) {
tableName = decodeURIComponent(tableName) tableName = decodeURIComponent(tableName)
} }
if (!datasourceId || !tableName) {
throw new Error("Unable to get datasource/table name from table ID")
}
return { datasourceId, tableName } return { datasourceId, tableName }
} }

View File

@ -24,7 +24,6 @@ export const account = (partial: Partial<Account> = {}): Account => {
createdAt: Date.now(), createdAt: Date.now(),
verified: true, verified: true,
verificationSent: true, verificationSent: true,
tier: "FREE", // DEPRECATED
authType: AuthType.PASSWORD, authType: AuthType.PASSWORD,
name: generator.name(), name: generator.name(),
size: "10+", size: "10+",

View File

@ -1,9 +1,14 @@
<script> <script>
import { FieldType, BBReferenceFieldSubType } from "@budibase/types" import {
FieldType,
BBReferenceFieldSubType,
SourceName,
} from "@budibase/types"
import { Select, Toggle, Multiselect } from "@budibase/bbui" import { Select, Toggle, Multiselect } from "@budibase/bbui"
import { DB_TYPE_INTERNAL } from "constants/backend" import { DB_TYPE_INTERNAL } from "constants/backend"
import { API } from "api" import { API } from "api"
import { parseFile } from "./utils" import { parseFile } from "./utils"
import { tables, datasources } from "stores/builder"
let error = null let error = null
let fileName = null let fileName = null
@ -80,6 +85,9 @@
schema = fetchSchema(tableId) schema = fetchSchema(tableId)
} }
$: table = $tables.list.find(table => table._id === tableId)
$: datasource = $datasources.list.find(ds => ds._id === table?.sourceId)
async function fetchSchema(tableId) { async function fetchSchema(tableId) {
try { try {
const definition = await API.fetchTableDefinition(tableId) const definition = await API.fetchTableDefinition(tableId)
@ -185,20 +193,25 @@
</div> </div>
{/each} {/each}
</div> </div>
{#if tableType === DB_TYPE_INTERNAL}
<br /> <br />
<!-- SQL Server doesn't yet support overwriting rows by existing keys -->
{#if datasource?.source !== SourceName.SQL_SERVER}
<Toggle <Toggle
bind:value={updateExistingRows} bind:value={updateExistingRows}
on:change={() => (identifierFields = [])} on:change={() => (identifierFields = [])}
thin thin
text="Update existing rows" text="Update existing rows"
/> />
{/if}
{#if updateExistingRows} {#if updateExistingRows}
{#if tableType === DB_TYPE_INTERNAL}
<Multiselect <Multiselect
label="Identifier field(s)" label="Identifier field(s)"
options={Object.keys(validation)} options={Object.keys(validation)}
bind:value={identifierFields} bind:value={identifierFields}
/> />
{:else}
<p>Rows will be updated based on the table's primary key.</p>
{/if} {/if}
{/if} {/if}
{#if invalidColumns.length > 0} {#if invalidColumns.length > 0}

View File

@ -53,6 +53,12 @@
placeholder="Are you sure you want to delete?" placeholder="Are you sure you want to delete?"
bind:value={parameters.confirmText} bind:value={parameters.confirmText}
/> />
<Label small>Confirm Text</Label>
<Input placeholder="Confirm" bind:value={parameters.confirmButtonText} />
<Label small>Cancel Text</Label>
<Input placeholder="Cancel" bind:value={parameters.cancelButtonText} />
{/if} {/if}
</div> </div>
</div> </div>

View File

@ -83,6 +83,12 @@
placeholder="Are you sure you want to duplicate this row?" placeholder="Are you sure you want to duplicate this row?"
bind:value={parameters.confirmText} bind:value={parameters.confirmText}
/> />
<Label small>Confirm Text</Label>
<Input placeholder="Confirm" bind:value={parameters.confirmButtonText} />
<Label small>Cancel Text</Label>
<Input placeholder="Cancel" bind:value={parameters.cancelButtonText} />
{/if} {/if}
</div> </div>

View File

@ -74,6 +74,18 @@
placeholder="Are you sure you want to execute this query?" placeholder="Are you sure you want to execute this query?"
bind:value={parameters.confirmText} bind:value={parameters.confirmText}
/> />
<Input
label="Confirm Text"
placeholder="Confirm"
bind:value={parameters.confirmButtonText}
/>
<Input
label="Cancel Text"
placeholder="Cancel"
bind:value={parameters.cancelButtonText}
/>
{/if} {/if}
{#if query?.parameters?.length > 0} {#if query?.parameters?.length > 0}

View File

@ -80,6 +80,12 @@
placeholder="Are you sure you want to save this row?" placeholder="Are you sure you want to save this row?"
bind:value={parameters.confirmText} bind:value={parameters.confirmText}
/> />
<Label small>Confirm Text</Label>
<Input placeholder="Confirm" bind:value={parameters.confirmButtonText} />
<Label small>Cancel Text</Label>
<Input placeholder="Cancel" bind:value={parameters.cancelButtonText} />
{/if} {/if}
</div> </div>

View File

@ -233,9 +233,9 @@
response.info = response.info || { code: 200 } response.info = response.info || { code: 200 }
// if existing schema, copy over what it is // if existing schema, copy over what it is
if (schema) { if (schema) {
for (let [name, field] of Object.entries(schema)) { for (let [name, field] of Object.entries(response.schema)) {
if (response.schema[name]) { if (!schema[name]) {
response.schema[name] = field schema[name] = field
} }
} }
} }

View File

@ -8,6 +8,8 @@
<ModalContent <ModalContent
title={$confirmationStore.title} title={$confirmationStore.title}
onConfirm={confirmationStore.actions.confirm} onConfirm={confirmationStore.actions.confirm}
confirmText={$confirmationStore.confirmButtonText}
cancelText={$confirmationStore.cancelButtonText}
> >
{$confirmationStore.text} {$confirmationStore.text}
</ModalContent> </ModalContent>

View File

@ -4,6 +4,8 @@ const initialState = {
showConfirmation: false, showConfirmation: false,
title: null, title: null,
text: null, text: null,
confirmButtonText: null,
cancelButtonText: null,
onConfirm: null, onConfirm: null,
onCancel: null, onCancel: null,
} }
@ -11,11 +13,20 @@ const initialState = {
const createConfirmationStore = () => { const createConfirmationStore = () => {
const store = writable(initialState) const store = writable(initialState)
const showConfirmation = (title, text, onConfirm, onCancel) => { const showConfirmation = (
title,
text,
onConfirm,
onCancel,
confirmButtonText,
cancelButtonText
) => {
store.set({ store.set({
showConfirmation: true, showConfirmation: true,
title, title,
text, text,
confirmButtonText,
cancelButtonText,
onConfirm, onConfirm,
onCancel, onCancel,
}) })

View File

@ -522,6 +522,7 @@ const confirmTextMap = {
["Execute Query"]: "Are you sure you want to execute this query?", ["Execute Query"]: "Are you sure you want to execute this query?",
["Trigger Automation"]: "Are you sure you want to trigger this automation?", ["Trigger Automation"]: "Are you sure you want to trigger this automation?",
["Prompt User"]: "Are you sure you want to continue?", ["Prompt User"]: "Are you sure you want to continue?",
["Duplicate Row"]: "Are you sure you want to duplicate this row?",
} }
/** /**
@ -582,6 +583,11 @@ export const enrichButtonActions = (actions, context) => {
const defaultTitleText = action["##eventHandlerType"] const defaultTitleText = action["##eventHandlerType"]
const customTitleText = const customTitleText =
action.parameters?.customTitleText || defaultTitleText action.parameters?.customTitleText || defaultTitleText
const cancelButtonText =
action.parameters?.cancelButtonText || "Cancel"
const confirmButtonText =
action.parameters?.confirmButtonText || "Confirm"
confirmationStore.actions.showConfirmation( confirmationStore.actions.showConfirmation(
customTitleText, customTitleText,
confirmText, confirmText,
@ -612,7 +618,9 @@ export const enrichButtonActions = (actions, context) => {
}, },
() => { () => {
resolve(false) resolve(false)
} },
confirmButtonText,
cancelButtonText
) )
}) })
} }

View File

@ -18,7 +18,7 @@
import FilterUsers from "./FilterUsers.svelte" import FilterUsers from "./FilterUsers.svelte"
import { getFields } from "../utils/searchFields" import { getFields } from "../utils/searchFields"
const { OperatorOptions } = Constants const { OperatorOptions, DEFAULT_BB_DATASOURCE_ID } = Constants
export let schemaFields export let schemaFields
export let filters = [] export let filters = []
@ -28,6 +28,23 @@
export let allowBindings = false export let allowBindings = false
export let filtersLabel = "Filters" export let filtersLabel = "Filters"
$: {
if (
tables.find(
table =>
table._id === datasource.tableId &&
table.sourceId === DEFAULT_BB_DATASOURCE_ID
) &&
!schemaFields.some(field => field.name === "_id")
) {
schemaFields = [
...schemaFields,
{ name: "_id", type: "string" },
{ name: "_rev", type: "string" },
]
}
}
$: matchAny = filters?.find(filter => filter.operator === "allOr") != null $: matchAny = filters?.find(filter => filter.operator === "allOr") != null
$: onEmptyFilter = $: onEmptyFilter =
filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all" filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all"
@ -35,7 +52,6 @@
$: fieldFilters = filters.filter( $: fieldFilters = filters.filter(
filter => filter.operator !== "allOr" && !filter.onEmptyFilter filter => filter.operator !== "allOr" && !filter.onEmptyFilter
) )
const behaviourOptions = [ const behaviourOptions = [
{ value: "and", label: "Match all filters" }, { value: "and", label: "Match all filters" },
{ value: "or", label: "Match any filter" }, { value: "or", label: "Match any filter" },
@ -44,7 +60,6 @@
{ value: "all", label: "Return all table rows" }, { value: "all", label: "Return all table rows" },
{ value: "none", label: "Return no rows" }, { value: "none", label: "Return no rows" },
] ]
const context = getContext("context") const context = getContext("context")
$: fieldOptions = getFields(tables, schemaFields || [], { $: fieldOptions = getFields(tables, schemaFields || [], {

View File

@ -1,7 +1,11 @@
/** /**
* Operator options for lucene queries * Operator options for lucene queries
*/ */
export { OperatorOptions, SqlNumberTypeRangeMap } from "@budibase/shared-core" export {
OperatorOptions,
SqlNumberTypeRangeMap,
DEFAULT_BB_DATASOURCE_ID,
} from "@budibase/shared-core"
export { Feature as Features } from "@budibase/types" export { Feature as Features } from "@budibase/types"
import { BpmCorrelationKey } from "@budibase/shared-core" import { BpmCorrelationKey } from "@budibase/shared-core"
import { FieldType, BBReferenceFieldSubType } from "@budibase/types" import { FieldType, BBReferenceFieldSubType } from "@budibase/types"

@ -1 +1 @@
Subproject commit bf30f47a28292d619cf0837f21d66790ff31c3a6 Subproject commit 6c8d0174ca58c578a37022965ddb923fdbf8e32a

View File

@ -1,4 +1,4 @@
FROM mcr.microsoft.com/mssql/server:2017-latest FROM mcr.microsoft.com/mssql/server:2022-latest
ENV ACCEPT_EULA=Y ENV ACCEPT_EULA=Y
ENV SA_PASSWORD=Passw0rd ENV SA_PASSWORD=Passw0rd

View File

@ -54,8 +54,31 @@ INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('Mi
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996); INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993); INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support'); INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support');
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (1, 2, 'assembling', TRUE); INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Dave', 'Bar', '2 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (2, 1, 'processing', FALSE); INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('James', 'Bar', '3 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Jenny', 'Bar', '4 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Grace', 'Bar', '5 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Sarah', 'Bar', '6 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Kelly', 'Bar', '7 Foo Street', 'Bartown', 'support', 0, 1993);
-- insert a lot of tasks for testing
WITH RECURSIVE generate_series AS (
SELECT 1 AS n
UNION ALL
SELECT n + 1 FROM generate_series WHERE n < 6000
),
random_data AS (
SELECT
n,
(random() * 9 + 1)::int AS ExecutorID,
(random() * 9 + 1)::int AS QaID,
'assembling' AS TaskName,
(random() < 0.5) AS Completed
FROM generate_series
)
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed)
SELECT ExecutorID, QaID, TaskName, Completed
FROM random_data;
INSERT INTO Products (ProductName) VALUES ('Computers'); INSERT INTO Products (ProductName) VALUES ('Computers');
INSERT INTO Products (ProductName) VALUES ('Laptops'); INSERT INTO Products (ProductName) VALUES ('Laptops');
INSERT INTO Products (ProductName) VALUES ('Chairs'); INSERT INTO Products (ProductName) VALUES ('Chairs');

View File

@ -311,8 +311,8 @@ export async function preview(
// if existing schema, update to include any previous schema keys // if existing schema, update to include any previous schema keys
if (existingSchema) { if (existingSchema) {
for (let key of Object.keys(previewSchema)) { for (let key of Object.keys(existingSchema)) {
if (existingSchema[key]) { if (!previewSchema[key]) {
previewSchema[key] = existingSchema[key] previewSchema[key] = existingSchema[key]
} }
} }

View File

@ -7,6 +7,7 @@ import {
FieldType, FieldType,
FilterType, FilterType,
IncludeRelationship, IncludeRelationship,
isManyToOne,
OneToManyRelationshipFieldMetadata, OneToManyRelationshipFieldMetadata,
Operation, Operation,
PaginationJson, PaginationJson,
@ -16,29 +17,33 @@ import {
SortJson, SortJson,
SortType, SortType,
Table, Table,
isManyToOne,
} from "@budibase/types" } from "@budibase/types"
import { import {
breakExternalTableId, breakExternalTableId,
breakRowIdField, breakRowIdField,
convertRowId, convertRowId,
generateRowIdField,
isRowId, isRowId,
isSQL, isSQL,
generateRowIdField,
} from "../../../integrations/utils" } from "../../../integrations/utils"
import { import {
buildExternalRelationships, buildExternalRelationships,
buildSqlFieldList, buildSqlFieldList,
generateIdForRow, generateIdForRow,
sqlOutputProcessing, isKnexEmptyReadResponse,
isManyToMany, isManyToMany,
sqlOutputProcessing,
} from "./utils" } from "./utils"
import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" import {
getDatasourceAndQuery,
processRowCountResponse,
} from "../../../sdk/app/rows/utils"
import { processObjectSync } from "@budibase/string-templates" import { processObjectSync } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import env from "../../../environment" import env from "../../../environment"
import { makeExternalQuery } from "../../../integrations/base/query"
export interface ManyRelationship { export interface ManyRelationship {
tableId?: string tableId?: string
@ -60,91 +65,12 @@ export interface RunConfig {
includeSqlRelationships?: IncludeRelationship includeSqlRelationships?: IncludeRelationship
} }
function buildFilters( export type ExternalRequestReturnType<T extends Operation> =
id: string | undefined | string[], T extends Operation.READ
filters: SearchFilters, ? Row[]
table: Table : T extends Operation.COUNT
) { ? number
const primary = table.primary : { row: Row; table: Table }
// if passed in array need to copy for shifting etc
let idCopy: undefined | string | any[] = cloneDeep(id)
if (filters) {
// need to map over the filters and make sure the _id field isn't present
let prefix = 1
for (let operator of Object.values(filters)) {
for (let field of Object.keys(operator || {})) {
if (dbCore.removeKeyNumbering(field) === "_id") {
if (primary) {
const parts = breakRowIdField(operator[field])
for (let field of primary) {
operator[`${prefix}:${field}`] = parts.shift()
}
prefix++
}
// make sure this field doesn't exist on any filter
delete operator[field]
}
}
}
}
// there is no id, just use the user provided filters
if (!idCopy || !table) {
return filters
}
// if used as URL parameter it will have been joined
if (!Array.isArray(idCopy)) {
idCopy = breakRowIdField(idCopy)
}
const equal: any = {}
if (primary && idCopy) {
for (let field of primary) {
// work through the ID and get the parts
equal[field] = idCopy.shift()
}
}
return {
equal,
}
}
async function removeManyToManyRelationships(
rowId: string,
table: Table,
colName: string
) {
const tableId = table._id!
const filters = buildFilters(rowId, {}, table)
// safety check, if there are no filters on deletion bad things happen
if (Object.keys(filters).length !== 0) {
return getDatasourceAndQuery({
endpoint: getEndpoint(tableId, Operation.DELETE),
body: { [colName]: null },
filters,
meta: {
table,
},
})
} else {
return []
}
}
async function removeOneToManyRelationships(rowId: string, table: Table) {
const tableId = table._id!
const filters = buildFilters(rowId, {}, table)
// safety check, if there are no filters on deletion bad things happen
if (Object.keys(filters).length !== 0) {
return getDatasourceAndQuery({
endpoint: getEndpoint(tableId, Operation.UPDATE),
filters,
meta: {
table,
},
})
} else {
return []
}
}
/** /**
* This function checks the incoming parameters to make sure all the inputs are * This function checks the incoming parameters to make sure all the inputs are
@ -200,8 +126,8 @@ function getEndpoint(tableId: string | undefined, operation: string) {
} }
const { datasourceId, tableName } = breakExternalTableId(tableId) const { datasourceId, tableName } = breakExternalTableId(tableId)
return { return {
datasourceId: datasourceId!, datasourceId: datasourceId,
entityId: tableName!, entityId: tableName,
operation: operation as Operation, operation: operation as Operation,
} }
} }
@ -223,14 +149,12 @@ function isEditableColumn(column: FieldSchema) {
return !(isExternalAutoColumn || isFormula) return !(isExternalAutoColumn || isFormula)
} }
export type ExternalRequestReturnType<T extends Operation> =
T extends Operation.READ ? Row[] : { row: Row; table: Table }
export class ExternalRequest<T extends Operation> { export class ExternalRequest<T extends Operation> {
private readonly operation: T private readonly operation: T
private readonly tableId: string private readonly tableId: string
private datasource?: Datasource private datasource?: Datasource
private tables: { [key: string]: Table } = {} private tables: { [key: string]: Table } = {}
private tableList: Table[]
constructor(operation: T, tableId: string, datasource?: Datasource) { constructor(operation: T, tableId: string, datasource?: Datasource) {
this.operation = operation this.operation = operation
@ -239,22 +163,134 @@ export class ExternalRequest<T extends Operation> {
if (datasource && datasource.entities) { if (datasource && datasource.entities) {
this.tables = datasource.entities this.tables = datasource.entities
} }
this.tableList = Object.values(this.tables)
}
private prepareFilters(
id: string | undefined | string[],
filters: SearchFilters,
table: Table
): SearchFilters {
// replace any relationship columns initially, table names and relationship column names are acceptable
const relationshipColumns = sdk.rows.filters.getRelationshipColumns(table)
filters = sdk.rows.filters.updateFilterKeys(
filters,
relationshipColumns.map(({ name, definition }) => {
const { tableName } = breakExternalTableId(definition.tableId)
return {
original: name,
updated: tableName,
}
})
)
const primary = table.primary
// if passed in array need to copy for shifting etc
let idCopy: undefined | string | any[] = cloneDeep(id)
if (filters) {
// need to map over the filters and make sure the _id field isn't present
let prefix = 1
for (let operator of Object.values(filters)) {
for (let field of Object.keys(operator || {})) {
if (dbCore.removeKeyNumbering(field) === "_id") {
if (primary) {
const parts = breakRowIdField(operator[field])
for (let field of primary) {
operator[`${prefix}:${field}`] = parts.shift()
}
prefix++
}
// make sure this field doesn't exist on any filter
delete operator[field]
}
}
}
}
// there is no id, just use the user provided filters
if (!idCopy || !table) {
return filters
}
// if used as URL parameter it will have been joined
if (!Array.isArray(idCopy)) {
idCopy = breakRowIdField(idCopy)
}
const equal: SearchFilters["equal"] = {}
if (primary && idCopy) {
for (let field of primary) {
// work through the ID and get the parts
equal[field] = idCopy.shift()
}
}
return {
equal,
}
}
private async removeManyToManyRelationships(
rowId: string,
table: Table,
colName: string
) {
const tableId = table._id!
const filters = this.prepareFilters(rowId, {}, table)
// safety check, if there are no filters on deletion bad things happen
if (Object.keys(filters).length !== 0) {
return getDatasourceAndQuery({
endpoint: getEndpoint(tableId, Operation.DELETE),
body: { [colName]: null },
filters,
meta: {
table,
},
})
} else {
return []
}
}
private async removeOneToManyRelationships(rowId: string, table: Table) {
const tableId = table._id!
const filters = this.prepareFilters(rowId, {}, table)
// safety check, if there are no filters on deletion bad things happen
if (Object.keys(filters).length !== 0) {
return getDatasourceAndQuery({
endpoint: getEndpoint(tableId, Operation.UPDATE),
filters,
meta: {
table,
},
})
} else {
return []
}
} }
getTable(tableId: string | undefined): Table | undefined { getTable(tableId: string | undefined): Table | undefined {
if (!tableId) { if (!tableId) {
throw "Table ID is unknown, cannot find table" throw new Error("Table ID is unknown, cannot find table")
} }
const { tableName } = breakExternalTableId(tableId) const { tableName } = breakExternalTableId(tableId)
if (tableName) {
return this.tables[tableName] return this.tables[tableName]
} }
// seeds the object with table and datasource information
async retrieveMetadata(
datasourceId: string
): Promise<{ tables: Record<string, Table>; datasource: Datasource }> {
if (!this.datasource) {
this.datasource = await sdk.datasources.get(datasourceId)
if (!this.datasource || !this.datasource.entities) {
throw "No tables found, fetch tables before query."
}
this.tables = this.datasource.entities
this.tableList = Object.values(this.tables)
}
return { tables: this.tables, datasource: this.datasource }
} }
async getRow(table: Table, rowId: string): Promise<Row> { async getRow(table: Table, rowId: string): Promise<Row> {
const response = await getDatasourceAndQuery({ const response = await getDatasourceAndQuery({
endpoint: getEndpoint(table._id!, Operation.READ), endpoint: getEndpoint(table._id!, Operation.READ),
filters: buildFilters(rowId, {}, table), filters: this.prepareFilters(rowId, {}, table),
meta: { meta: {
table, table,
}, },
@ -280,16 +316,20 @@ export class ExternalRequest<T extends Operation> {
manyRelationships: ManyRelationship[] = [] manyRelationships: ManyRelationship[] = []
for (let [key, field] of Object.entries(table.schema)) { for (let [key, field] of Object.entries(table.schema)) {
// if set already, or not set just skip it // if set already, or not set just skip it
if (row[key] === undefined || newRow[key] || !isEditableColumn(field)) { if (row[key] === undefined || newRow[key]) {
continue
}
if (
!(this.operation === Operation.BULK_UPSERT) &&
!isEditableColumn(field)
) {
continue continue
} }
// parse floats/numbers // parse floats/numbers
if (field.type === FieldType.NUMBER && !isNaN(parseFloat(row[key]))) { if (field.type === FieldType.NUMBER && !isNaN(parseFloat(row[key]))) {
newRow[key] = parseFloat(row[key]) newRow[key] = parseFloat(row[key])
} else if (field.type === FieldType.LINK) { } else if (field.type === FieldType.LINK) {
const { tableName: linkTableName } = breakExternalTableId( const { tableName: linkTableName } = breakExternalTableId(field.tableId)
field?.tableId
)
// table has to exist for many to many // table has to exist for many to many
if (!linkTableName || !this.tables[linkTableName]) { if (!linkTableName || !this.tables[linkTableName]) {
continue continue
@ -370,9 +410,6 @@ export class ExternalRequest<T extends Operation> {
[key: string]: { rows: Row[]; isMany: boolean; tableId: string } [key: string]: { rows: Row[]; isMany: boolean; tableId: string }
} = {} } = {}
const { tableName } = breakExternalTableId(tableId) const { tableName } = breakExternalTableId(tableId)
if (!tableName) {
return related
}
const table = this.tables[tableName] const table = this.tables[tableName]
// @ts-ignore // @ts-ignore
const primaryKey = table.primary[0] const primaryKey = table.primary[0]
@ -428,7 +465,9 @@ export class ExternalRequest<T extends Operation> {
}) })
// this is the response from knex if no rows found // this is the response from knex if no rows found
const rows: Row[] = const rows: Row[] =
!Array.isArray(response) || response?.[0].read ? [] : response !Array.isArray(response) || isKnexEmptyReadResponse(response)
? []
: response
const storeTo = isManyToMany(field) const storeTo = isManyToMany(field)
? field.throughFrom || linkPrimaryKey ? field.throughFrom || linkPrimaryKey
: fieldName : fieldName
@ -503,7 +542,7 @@ export class ExternalRequest<T extends Operation> {
endpoint: getEndpoint(tableId, operation), endpoint: getEndpoint(tableId, operation),
// if we're doing many relationships then we're writing, only one response // if we're doing many relationships then we're writing, only one response
body, body,
filters: buildFilters(id, {}, linkTable), filters: this.prepareFilters(id, {}, linkTable),
meta: { meta: {
table: linkTable, table: linkTable,
}, },
@ -517,7 +556,7 @@ export class ExternalRequest<T extends Operation> {
// finally cleanup anything that needs to be removed // finally cleanup anything that needs to be removed
for (let [colName, { isMany, rows, tableId }] of Object.entries(related)) { for (let [colName, { isMany, rows, tableId }] of Object.entries(related)) {
const table: Table | undefined = this.getTable(tableId) const table: Table | undefined = this.getTable(tableId)
// if its not the foreign key skip it, nothing to do // if it's not the foreign key skip it, nothing to do
if ( if (
!table || !table ||
(!isMany && table.primary && table.primary.indexOf(colName) !== -1) (!isMany && table.primary && table.primary.indexOf(colName) !== -1)
@ -527,8 +566,8 @@ export class ExternalRequest<T extends Operation> {
for (let row of rows) { for (let row of rows) {
const rowId = generateIdForRow(row, table) const rowId = generateIdForRow(row, table)
const promise: Promise<any> = isMany const promise: Promise<any> = isMany
? removeManyToManyRelationships(rowId, table, colName) ? this.removeManyToManyRelationships(rowId, table, colName)
: removeOneToManyRelationships(rowId, table) : this.removeOneToManyRelationships(rowId, table)
if (promise) { if (promise) {
promises.push(promise) promises.push(promise)
} }
@ -551,12 +590,12 @@ export class ExternalRequest<T extends Operation> {
rows.map(row => { rows.map(row => {
const rowId = generateIdForRow(row, table) const rowId = generateIdForRow(row, table)
return isMany return isMany
? removeManyToManyRelationships( ? this.removeManyToManyRelationships(
rowId, rowId,
table, table,
relationshipColumn.fieldName relationshipColumn.fieldName
) )
: removeOneToManyRelationships(rowId, table) : this.removeOneToManyRelationships(rowId, table)
}) })
) )
} }
@ -564,21 +603,21 @@ export class ExternalRequest<T extends Operation> {
async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> { async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> {
const { operation, tableId } = this const { operation, tableId } = this
if (!tableId) {
throw new Error("Unable to run without a table ID")
}
let { datasourceId, tableName } = breakExternalTableId(tableId) let { datasourceId, tableName } = breakExternalTableId(tableId)
if (!tableName) { let datasource = this.datasource
throw "Unable to run without a table name" if (!datasource) {
} const { datasource: ds } = await this.retrieveMetadata(datasourceId)
if (!this.datasource) { datasource = ds
this.datasource = await sdk.datasources.get(datasourceId!)
if (!this.datasource || !this.datasource.entities) {
throw "No tables found, fetch tables before query."
}
this.tables = this.datasource.entities
} }
const table = this.tables[tableName] const table = this.tables[tableName]
let isSql = isSQL(this.datasource) let isSql = isSQL(datasource)
if (!table) { if (!table) {
throw `Unable to process query, table "${tableName}" not defined.` throw new Error(
`Unable to process query, table "${tableName}" not defined.`
)
} }
// look for specific components of config which may not be considered acceptable // look for specific components of config which may not be considered acceptable
let { id, row, filters, sort, paginate, rows } = cleanupConfig( let { id, row, filters, sort, paginate, rows } = cleanupConfig(
@ -601,7 +640,7 @@ export class ExternalRequest<T extends Operation> {
break break
} }
} }
filters = buildFilters(id, filters || {}, table) filters = this.prepareFilters(id, filters || {}, table)
const relationships = buildExternalRelationships(table, this.tables) const relationships = buildExternalRelationships(table, this.tables)
const incRelationships = const incRelationships =
@ -649,10 +688,15 @@ export class ExternalRequest<T extends Operation> {
body: row || rows, body: row || rows,
// pass an id filter into extra, purely for mysql/returning // pass an id filter into extra, purely for mysql/returning
extra: { extra: {
idFilter: buildFilters(id || generateIdForRow(row, table), {}, table), idFilter: this.prepareFilters(
id || generateIdForRow(row, table),
{},
table
),
}, },
meta: { meta: {
table, table,
id: config.id,
}, },
} }
@ -662,12 +706,14 @@ export class ExternalRequest<T extends Operation> {
} }
// aliasing can be disabled fully if desired // aliasing can be disabled fully if desired
let response
if (env.SQL_ALIASING_DISABLE) {
response = await getDatasourceAndQuery(json)
} else {
const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables)) const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
response = await aliasing.queryWithAliasing(json) let response = env.SQL_ALIASING_DISABLE
? await getDatasourceAndQuery(json)
: await aliasing.queryWithAliasing(json, makeExternalQuery)
// if it's a counting operation there will be no more processing, just return the number
if (this.operation === Operation.COUNT) {
return processRowCountResponse(response) as ExternalRequestReturnType<T>
} }
const responseRows = Array.isArray(response) ? response : [] const responseRows = Array.isArray(response) ? response : []

View File

@ -136,10 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
const id = ctx.params.rowId const id = ctx.params.rowId
const tableId = utils.getTableId(ctx) const tableId = utils.getTableId(ctx)
const { datasourceId, tableName } = breakExternalTableId(tableId) const { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource: Datasource = await sdk.datasources.get(datasourceId!) const datasource: Datasource = await sdk.datasources.get(datasourceId)
if (!tableName) {
ctx.throw(400, "Unable to find table.")
}
if (!datasource || !datasource.entities) { if (!datasource || !datasource.entities) {
ctx.throw(400, "Datasource has not been configured for plus API.") ctx.throw(400, "Datasource has not been configured for plus API.")
} }
@ -163,7 +160,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
} }
const links = row[fieldName] const links = row[fieldName]
const linkedTableId = field.tableId const linkedTableId = field.tableId
const linkedTableName = breakExternalTableId(linkedTableId).tableName! const linkedTableName = breakExternalTableId(linkedTableId).tableName
const linkedTable = tables[linkedTableName] const linkedTable = tables[linkedTableName]
// don't support composite keys right now // don't support composite keys right now
const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0]) const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0])

View File

@ -99,7 +99,7 @@ export function basicProcessing({
row, row,
tableName: table._id!, tableName: table._id!,
fieldName: internalColumn, fieldName: internalColumn,
isLinked: false, isLinked,
}) })
} }
} }

View File

@ -1,5 +1,9 @@
import { import {
DatasourcePlusQueryResponse,
DSPlusOperation,
FieldType, FieldType,
isManyToOne,
isOneToMany,
ManyToManyRelationshipFieldMetadata, ManyToManyRelationshipFieldMetadata,
RelationshipFieldMetadata, RelationshipFieldMetadata,
RelationshipsJson, RelationshipsJson,
@ -91,12 +95,12 @@ export function buildExternalRelationships(
): RelationshipsJson[] { ): RelationshipsJson[] {
const relationships = [] const relationships = []
for (let [fieldName, field] of Object.entries(table.schema)) { for (let [fieldName, field] of Object.entries(table.schema)) {
if (field.type !== FieldType.LINK) { if (field.type !== FieldType.LINK || !field.tableId) {
continue continue
} }
const { tableName: linkTableName } = breakExternalTableId(field.tableId) const { tableName: linkTableName } = breakExternalTableId(field.tableId)
// no table to link to, this is not a valid relationships // no table to link to, this is not a valid relationships
if (!linkTableName || !tables[linkTableName]) { if (!tables[linkTableName]) {
continue continue
} }
const linkTable = tables[linkTableName] const linkTable = tables[linkTableName]
@ -108,7 +112,7 @@ export function buildExternalRelationships(
// need to specify where to put this back into // need to specify where to put this back into
column: fieldName, column: fieldName,
} }
if (isManyToMany(field)) { if (isManyToMany(field) && field.through) {
const { tableName: throughTableName } = breakExternalTableId( const { tableName: throughTableName } = breakExternalTableId(
field.through field.through
) )
@ -118,7 +122,7 @@ export function buildExternalRelationships(
definition.to = field.throughFrom || linkTable.primary[0] definition.to = field.throughFrom || linkTable.primary[0]
definition.fromPrimary = table.primary[0] definition.fromPrimary = table.primary[0]
definition.toPrimary = linkTable.primary[0] definition.toPrimary = linkTable.primary[0]
} else { } else if (isManyToOne(field) || isOneToMany(field)) {
// if no foreign key specified then use the name of the field in other table // if no foreign key specified then use the name of the field in other table
definition.from = field.foreignKey || table.primary[0] definition.from = field.foreignKey || table.primary[0]
definition.to = field.fieldName definition.to = field.fieldName
@ -178,17 +182,27 @@ export function buildSqlFieldList(
} }
let fields = extractRealFields(table) let fields = extractRealFields(table)
for (let field of Object.values(table.schema)) { for (let field of Object.values(table.schema)) {
if (field.type !== FieldType.LINK || !opts?.relationships) { if (
field.type !== FieldType.LINK ||
!opts?.relationships ||
!field.tableId
) {
continue continue
} }
const { tableName: linkTableName } = breakExternalTableId(field.tableId) const { tableName: linkTableName } = breakExternalTableId(field.tableId)
if (linkTableName) {
const linkTable = tables[linkTableName] const linkTable = tables[linkTableName]
if (linkTable) { if (linkTable) {
const linkedFields = extractRealFields(linkTable, fields) const linkedFields = extractRealFields(linkTable, fields)
fields = fields.concat(linkedFields) fields = fields.concat(linkedFields)
} }
} }
}
return fields return fields
} }
export function isKnexEmptyReadResponse(resp: DatasourcePlusQueryResponse) {
return (
!Array.isArray(resp) ||
resp.length === 0 ||
(DSPlusOperation.READ in resp[0] && resp[0].read === true)
)
}

View File

@ -14,7 +14,7 @@ import {
processDates, processDates,
processFormulas, processFormulas,
} from "../../../../utilities/rowProcessor" } from "../../../../utilities/rowProcessor"
import { updateRelationshipColumns } from "./sqlUtils" import { isKnexEmptyReadResponse, updateRelationshipColumns } from "./sqlUtils"
import { import {
basicProcessing, basicProcessing,
generateIdForRow, generateIdForRow,
@ -137,7 +137,7 @@ export async function sqlOutputProcessing(
relationships: RelationshipsJson[], relationships: RelationshipsJson[],
opts?: { sqs?: boolean } opts?: { sqs?: boolean }
): Promise<Row[]> { ): Promise<Row[]> {
if (!Array.isArray(rows) || rows.length === 0 || rows[0].read === true) { if (isKnexEmptyReadResponse(rows)) {
return [] return []
} }
let finalRows: { [key: string]: Row } = {} let finalRows: { [key: string]: Row } = {}

View File

@ -69,6 +69,7 @@ export async function searchView(
limit: body.limit, limit: body.limit,
bookmark: body.bookmark, bookmark: body.bookmark,
paginate: body.paginate, paginate: body.paginate,
countRows: body.countRows,
} }
const result = await sdk.rows.search(searchOptions) const result = await sdk.rows.search(searchOptions)

View File

@ -16,14 +16,18 @@ import {
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
import { inputProcessing } from "../../../utilities/rowProcessor" import { inputProcessing } from "../../../utilities/rowProcessor"
import { isEqual } from "lodash"
function getDatasourceId(table: Table) { function getDatasourceId(table: Table) {
if (!table) { if (!table) {
throw "No table supplied" throw new Error("No table supplied")
} }
if (table.sourceId) { if (table.sourceId) {
return table.sourceId return table.sourceId
} }
if (!table._id) {
throw new Error("No table ID supplied")
}
return breakExternalTableId(table._id).datasourceId return breakExternalTableId(table._id).datasourceId
} }
@ -82,15 +86,30 @@ export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse> ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) { ) {
let table = await sdk.tables.getTable(ctx.params.tableId) let table = await sdk.tables.getTable(ctx.params.tableId)
const { rows } = ctx.request.body const { rows, identifierFields } = ctx.request.body
const schema = table.schema const schema = table.schema
if (
identifierFields &&
identifierFields.length > 0 &&
!isEqual(identifierFields, table.primary)
) {
// This is becuse we make use of the ON CONFLICT functionality in SQL
// databases, which only triggers when there's a conflict against a unique
// index. The only unique index we can count on atm in Budibase is the
// primary key, so this functionality always uses the primary key.
ctx.throw(
400,
"Identifier fields are not supported for bulk import into an external datasource."
)
}
if (!rows || !isRows(rows) || !isSchema(schema)) { if (!rows || !isRows(rows) || !isSchema(schema)) {
ctx.throw(400, "Provided data import information is invalid.") ctx.throw(400, "Provided data import information is invalid.")
} }
const parsedRows = [] const parsedRows = []
for (const row of parse(rows, schema)) { for (const row of parse(rows, table)) {
const processed = await inputProcessing(ctx.user?._id, table, row, { const processed = await inputProcessing(ctx.user?._id, table, row, {
noAutoRelationships: true, noAutoRelationships: true,
}) })

View File

@ -178,7 +178,7 @@ export async function handleDataImport(
} }
const db = context.getAppDB() const db = context.getAppDB()
const data = parse(importRows, schema) const data = parse(importRows, table)
let finalData: any = await importToRows(data, table, user) let finalData: any = await importToRows(data, table, user)

View File

@ -86,6 +86,7 @@ router
router.post( router.post(
"/api/v2/views/:viewId/search", "/api/v2/views/:viewId/search",
internalSearchValidator(),
authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"), authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"),
rowController.views.searchView rowController.views.searchView
) )

View File

@ -250,6 +250,67 @@ describe.each(
expect(events.query.previewed).toHaveBeenCalledTimes(1) expect(events.query.previewed).toHaveBeenCalledTimes(1)
}) })
it("should update schema when column type changes from number to string", async () => {
const tableName = "schema_change_test"
await client.schema.dropTableIfExists(tableName)
await client.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
table.integer("data")
})
await client(tableName).insert({
name: "test",
data: 123,
})
const firstPreview = await config.api.query.preview({
datasourceId: datasource._id!,
name: "Test Query",
queryVerb: "read",
fields: {
sql: `SELECT * FROM ${tableName}`,
},
parameters: [],
transformer: "return data",
schema: {},
readable: true,
})
expect(firstPreview.schema).toEqual(
expect.objectContaining({
data: { type: "number", name: "data" },
})
)
await client.schema.alterTable(tableName, table => {
table.string("data").alter()
})
await client(tableName).update({
data: "string value",
})
const secondPreview = await config.api.query.preview({
datasourceId: datasource._id!,
name: "Test Query",
queryVerb: "read",
fields: {
sql: `SELECT * FROM ${tableName}`,
},
parameters: [],
transformer: "return data",
schema: firstPreview.schema,
readable: true,
})
expect(secondPreview.schema).toEqual(
expect.objectContaining({
data: { type: "string", name: "data" },
})
)
})
it("should work with static variables", async () => { it("should work with static variables", async () => {
await config.api.datasource.update({ await config.api.datasource.update({
...datasource, ...datasource,
@ -734,6 +795,7 @@ describe.each(
name: entityId, name: entityId,
schema: {}, schema: {},
type: "table", type: "table",
primary: ["id"],
sourceId: datasource._id!, sourceId: datasource._id!,
sourceType: TableSourceType.EXTERNAL, sourceType: TableSourceType.EXTERNAL,
}, },

View File

@ -137,6 +137,67 @@ describe("/queries", () => {
}) })
}) })
it("should update schema when structure changes from object to array", async () => {
const name = generator.guid()
await withCollection(async collection => {
await collection.insertOne({ name, field: { subfield: "value" } })
})
const firstPreview = await config.api.query.preview({
name: "Test Query",
datasourceId: datasource._id!,
fields: {
json: { name: { $eq: name } },
extra: {
collection,
actionType: "findOne",
},
},
schema: {},
queryVerb: "read",
parameters: [],
transformer: "return data",
readable: true,
})
expect(firstPreview.schema).toEqual(
expect.objectContaining({
field: { type: "json", name: "field" },
})
)
await withCollection(async collection => {
await collection.updateOne(
{ name },
{ $set: { field: ["value1", "value2"] } }
)
})
const secondPreview = await config.api.query.preview({
name: "Test Query",
datasourceId: datasource._id!,
fields: {
json: { name: { $eq: name } },
extra: {
collection,
actionType: "findOne",
},
},
schema: firstPreview.schema,
queryVerb: "read",
parameters: [],
transformer: "return data",
readable: true,
})
expect(secondPreview.schema).toEqual(
expect.objectContaining({
field: { type: "array", name: "field" },
})
)
})
it("should generate a nested schema based on all of the nested items", async () => { it("should generate a nested schema based on all of the nested items", async () => {
const name = generator.guid() const name = generator.guid()
const item = { const item = {

View File

@ -92,6 +92,61 @@ describe("rest", () => {
expect(cached.rows[0].name).toEqual("one") expect(cached.rows[0].name).toEqual("one")
}) })
it("should update schema when structure changes from JSON to array", async () => {
const datasource = await config.api.datasource.create({
name: generator.guid(),
type: "test",
source: SourceName.REST,
config: {},
})
nock("http://www.example.com")
.get("/")
.reply(200, [{ obj: {}, id: "1" }])
const firstResponse = await config.api.query.preview({
datasourceId: datasource._id!,
name: "test query",
parameters: [],
queryVerb: "read",
transformer: "",
schema: {},
readable: true,
fields: {
path: "www.example.com",
},
})
expect(firstResponse.schema).toEqual({
obj: { type: "json", name: "obj" },
id: { type: "string", name: "id" },
})
nock.cleanAll()
nock("http://www.example.com")
.get("/")
.reply(200, [{ obj: [], id: "1" }])
const secondResponse = await config.api.query.preview({
datasourceId: datasource._id!,
name: "test query",
parameters: [],
queryVerb: "read",
transformer: "",
schema: firstResponse.schema,
readable: true,
fields: {
path: "www.example.com",
},
})
expect(secondResponse.schema).toEqual({
obj: { type: "array", name: "obj" },
id: { type: "string", name: "id" },
})
})
it("should parse global and query level header mappings", async () => { it("should parse global and query level header mappings", async () => {
const datasource = await config.api.datasource.create({ const datasource = await config.api.datasource.create({
name: generator.guid(), name: generator.guid(),

View File

@ -1,4 +1,8 @@
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import {
DatabaseName,
getDatasource,
knexClient,
} from "../../../integrations/tests/utils"
import tk from "timekeeper" import tk from "timekeeper"
import emitter from "../../../../src/events" import emitter from "../../../../src/events"
@ -31,6 +35,7 @@ import {
import { generator, mocks } from "@budibase/backend-core/tests" import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash" import _, { merge } from "lodash"
import * as uuid from "uuid" import * as uuid from "uuid"
import { Knex } from "knex"
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString() const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
tk.freeze(timestamp) tk.freeze(timestamp)
@ -70,13 +75,16 @@ describe.each([
let table: Table let table: Table
let datasource: Datasource | undefined let datasource: Datasource | undefined
let client: Knex | undefined
beforeAll(async () => { beforeAll(async () => {
await config.init() await config.init()
if (dsProvider) { if (dsProvider) {
const rawDatasource = await dsProvider
datasource = await config.createDatasource({ datasource = await config.createDatasource({
datasource: await dsProvider, datasource: rawDatasource,
}) })
client = await knexClient(rawDatasource)
} }
}) })
@ -307,13 +315,13 @@ describe.each([
// as quickly as possible. // as quickly as possible.
await Promise.all( await Promise.all(
sequence.map(async () => { sequence.map(async () => {
const attempts = 20 const attempts = 30
for (let attempt = 0; attempt < attempts; attempt++) { for (let attempt = 0; attempt < attempts; attempt++) {
try { try {
await config.api.row.save(table._id!, {}) await config.api.row.save(table._id!, {})
return return
} catch (e) { } catch (e) {
await new Promise(r => setTimeout(r, Math.random() * 15)) await new Promise(r => setTimeout(r, Math.random() * 50))
} }
} }
throw new Error(`Failed to create row after ${attempts} attempts`) throw new Error(`Failed to create row after ${attempts} attempts`)
@ -598,6 +606,35 @@ describe.each([
expect(res.name).toEqual("Updated Name") expect(res.name).toEqual("Updated Name")
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) })
!isInternal &&
it("can update a row on an external table with a primary key", async () => {
const tableName = uuid.v4().substring(0, 10)
await client!.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
})
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const table = res.datasource.entities![tableName]
const row = await config.api.row.save(table._id!, {
id: 1,
name: "Row 1",
})
const updatedRow = await config.api.row.save(table._id!, {
_id: row._id!,
name: "Row 1 Updated",
})
expect(updatedRow.name).toEqual("Row 1 Updated")
const rows = await config.api.row.fetch(table._id!)
expect(rows).toHaveLength(1)
})
}) })
describe("patch", () => { describe("patch", () => {
@ -667,6 +704,7 @@ describe.each([
expect(event.oldRow.description).toEqual(beforeRow.description) expect(event.oldRow.description).toEqual(beforeRow.description)
expect(event.row.description).toEqual(beforeRow.description) expect(event.row.description).toEqual(beforeRow.description)
}) })
it("should throw an error when given improper types", async () => { it("should throw an error when given improper types", async () => {
const existing = await config.api.row.save(table._id!, {}) const existing = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
@ -758,7 +796,8 @@ describe.each([
}) })
!isInternal && !isInternal &&
// TODO: SQL is having issues creating composite keys // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
// to identity columns. This is not something Budibase does currently.
providerType !== DatabaseName.SQL_SERVER && providerType !== DatabaseName.SQL_SERVER &&
it("should support updating fields that are part of a composite key", async () => { it("should support updating fields that are part of a composite key", async () => {
const tableRequest = saveTableRequest({ const tableRequest = saveTableRequest({
@ -911,24 +950,12 @@ describe.each([
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
}) })
it("Should ignore malformed/invalid delete requests", async () => { it.each([{ not: "valid" }, { rows: 123 }, "invalid"])(
"Should ignore malformed/invalid delete request: %s",
async (request: any) => {
const rowUsage = await getRowUsage() const rowUsage = await getRowUsage()
await config.api.row.delete(table._id!, { not: "valid" } as any, { await config.api.row.delete(table._id!, request, {
status: 400,
body: {
message: "Invalid delete rows request",
},
})
await config.api.row.delete(table._id!, { rows: 123 } as any, {
status: 400,
body: {
message: "Invalid delete rows request",
},
})
await config.api.row.delete(table._id!, "invalid" as any, {
status: 400, status: 400,
body: { body: {
message: "Invalid delete rows request", message: "Invalid delete rows request",
@ -936,7 +963,8 @@ describe.each([
}) })
await assertRowUsage(rowUsage) await assertRowUsage(rowUsage)
}) }
)
}) })
describe("bulkImport", () => { describe("bulkImport", () => {
@ -1085,6 +1113,121 @@ describe.each([
expect(rows[2].name).toEqual("Row 3") expect(rows[2].name).toEqual("Row 3")
expect(rows[2].description).toEqual("Row 3 description") expect(rows[2].description).toEqual("Row 3 description")
}) })
// Upserting isn't yet supported in MSSQL, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isInternal &&
it("should be able to update existing rows with composite primary keys with bulkImport", async () => {
const tableName = uuid.v4()
await client?.schema.createTable(tableName, table => {
table.integer("companyId")
table.integer("userId")
table.string("name")
table.string("description")
table.primary(["companyId", "userId"])
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const table = resp.datasource.entities![tableName]
const row1 = await config.api.row.save(table._id!, {
companyId: 1,
userId: 1,
name: "Row 1",
description: "Row 1 description",
})
const row2 = await config.api.row.save(table._id!, {
companyId: 1,
userId: 2,
name: "Row 2",
description: "Row 2 description",
})
await config.api.row.bulkImport(table._id!, {
identifierFields: ["companyId", "userId"],
rows: [
{
companyId: 1,
userId: row1.userId,
name: "Row 1 updated",
description: "Row 1 description updated",
},
{
companyId: 1,
userId: row2.userId,
name: "Row 2 updated",
description: "Row 2 description updated",
},
{
companyId: 1,
userId: 3,
name: "Row 3",
description: "Row 3 description",
},
],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1 updated")
expect(rows[0].description).toEqual("Row 1 description updated")
expect(rows[1].name).toEqual("Row 2 updated")
expect(rows[1].description).toEqual("Row 2 description updated")
expect(rows[2].name).toEqual("Row 3")
expect(rows[2].description).toEqual("Row 3 description")
})
// Upserting isn't yet supported in MSSQL, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isInternal &&
it("should be able to update existing rows an autoID primary key", async () => {
const tableName = uuid.v4()
await client!.schema.createTable(tableName, table => {
table.increments("userId").primary()
table.string("name")
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const table = resp.datasource.entities![tableName]
const row1 = await config.api.row.save(table._id!, {
name: "Clare",
})
const row2 = await config.api.row.save(table._id!, {
name: "Jeff",
})
await config.api.row.bulkImport(table._id!, {
identifierFields: ["userId"],
rows: [
{
userId: row1.userId,
name: "Clare updated",
},
{
userId: row2.userId,
name: "Jeff updated",
},
],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(2)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Clare updated")
expect(rows[1].name).toEqual("Jeff updated")
})
}) })
describe("enrich", () => { describe("enrich", () => {

File diff suppressed because it is too large Load Diff

View File

@ -7,6 +7,7 @@ import {
INTERNAL_TABLE_SOURCE_ID, INTERNAL_TABLE_SOURCE_ID,
PermissionLevel, PermissionLevel,
QuotaUsageType, QuotaUsageType,
Row,
SaveTableRequest, SaveTableRequest,
SearchFilterOperator, SearchFilterOperator,
SortOrder, SortOrder,
@ -17,6 +18,7 @@ import {
UpdateViewRequest, UpdateViewRequest,
ViewUIFieldMetadata, ViewUIFieldMetadata,
ViewV2, ViewV2,
SearchResponse,
} from "@budibase/types" } from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests" import { generator, mocks } from "@budibase/backend-core/tests"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
@ -25,17 +27,21 @@ import { quotas } from "@budibase/pro"
import { db, roles } from "@budibase/backend-core" import { db, roles } from "@budibase/backend-core"
describe.each([ describe.each([
["internal", undefined], ["lucene", undefined],
["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/v2/views (%s)", (_, dsProvider) => { ])("/v2/views (%s)", (name, dsProvider) => {
const config = setup.getConfig() const config = setup.getConfig()
const isInternal = !dsProvider const isSqs = name === "sqs"
const isLucene = name === "lucene"
const isInternal = isSqs || isLucene
let table: Table let table: Table
let datasource: Datasource let datasource: Datasource
let envCleanup: (() => void) | undefined
function saveTableRequest( function saveTableRequest(
...overrides: Partial<Omit<SaveTableRequest, "name">>[] ...overrides: Partial<Omit<SaveTableRequest, "name">>[]
@ -82,6 +88,9 @@ describe.each([
} }
beforeAll(async () => { beforeAll(async () => {
if (isSqs) {
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
}
await config.init() await config.init()
if (dsProvider) { if (dsProvider) {
@ -94,6 +103,9 @@ describe.each([
afterAll(async () => { afterAll(async () => {
setup.afterAll() setup.afterAll()
if (envCleanup) {
envCleanup()
}
}) })
beforeEach(() => { beforeEach(() => {
@ -1252,12 +1264,13 @@ describe.each([
paginate: true, paginate: true,
limit: 4, limit: 4,
query: {}, query: {},
countRows: true,
}) })
expect(page1).toEqual({ expect(page1).toEqual({
rows: expect.arrayContaining(rows.slice(0, 4)), rows: expect.arrayContaining(rows.slice(0, 4)),
totalRows: isInternal ? 10 : undefined,
hasNextPage: true, hasNextPage: true,
bookmark: expect.anything(), bookmark: expect.anything(),
totalRows: 10,
}) })
const page2 = await config.api.viewV2.search(view.id, { const page2 = await config.api.viewV2.search(view.id, {
@ -1265,12 +1278,13 @@ describe.each([
limit: 4, limit: 4,
bookmark: page1.bookmark, bookmark: page1.bookmark,
query: {}, query: {},
countRows: true,
}) })
expect(page2).toEqual({ expect(page2).toEqual({
rows: expect.arrayContaining(rows.slice(4, 8)), rows: expect.arrayContaining(rows.slice(4, 8)),
totalRows: isInternal ? 10 : undefined,
hasNextPage: true, hasNextPage: true,
bookmark: expect.anything(), bookmark: expect.anything(),
totalRows: 10,
}) })
const page3 = await config.api.viewV2.search(view.id, { const page3 = await config.api.viewV2.search(view.id, {
@ -1278,13 +1292,17 @@ describe.each([
limit: 4, limit: 4,
bookmark: page2.bookmark, bookmark: page2.bookmark,
query: {}, query: {},
countRows: true,
}) })
expect(page3).toEqual({ const expectation: SearchResponse<Row> = {
rows: expect.arrayContaining(rows.slice(8)), rows: expect.arrayContaining(rows.slice(8)),
totalRows: isInternal ? 10 : undefined,
hasNextPage: false, hasNextPage: false,
bookmark: expect.anything(), totalRows: 10,
}) }
if (isLucene) {
expectation.bookmark = expect.anything()
}
expect(page3).toEqual(expectation)
}) })
const sortTestOptions: [ const sortTestOptions: [

View File

@ -109,6 +109,7 @@ export function internalSearchValidator() {
sortOrder: OPTIONAL_STRING, sortOrder: OPTIONAL_STRING,
sortType: OPTIONAL_STRING, sortType: OPTIONAL_STRING,
paginate: Joi.boolean(), paginate: Joi.boolean(),
countRows: Joi.boolean(),
bookmark: Joi.alternatives() bookmark: Joi.alternatives()
.try(OPTIONAL_STRING, OPTIONAL_NUMBER) .try(OPTIONAL_STRING, OPTIONAL_NUMBER)
.optional(), .optional(),

View File

@ -0,0 +1,36 @@
import * as automationUtils from "./automationUtils"
type ObjValue = {
[key: string]: string | ObjValue
}
export function replaceFakeBindings(
originalStepInput: Record<string, any>,
loopStepNumber: number
) {
for (const [key, value] of Object.entries(originalStepInput)) {
originalStepInput[key] = replaceBindingsRecursive(value, loopStepNumber)
}
return originalStepInput
}
function replaceBindingsRecursive(
value: string | ObjValue,
loopStepNumber: number
) {
if (typeof value === "object") {
for (const [innerKey, innerValue] of Object.entries(value)) {
if (typeof innerValue === "string") {
value[innerKey] = automationUtils.substituteLoopStep(
innerValue,
`steps.${loopStepNumber}`
)
} else if (typeof innerValue === "object") {
value[innerKey] = replaceBindingsRecursive(innerValue, loopStepNumber)
}
}
} else if (typeof value === "string") {
value = automationUtils.substituteLoopStep(value, `steps.${loopStepNumber}`)
}
return value
}

View File

@ -73,7 +73,12 @@ export async function run({ inputs }: AutomationStepInput) {
try { try {
let { field, condition, value } = inputs let { field, condition, value } = inputs
// coerce types so that we can use them // coerce types so that we can use them
if (!isNaN(value) && !isNaN(field)) { if (
!isNaN(value) &&
!isNaN(field) &&
typeof field !== "boolean" &&
typeof value !== "boolean"
) {
value = parseFloat(value) value = parseFloat(value)
field = parseFloat(field) field = parseFloat(field)
} else if (!isNaN(Date.parse(value)) && !isNaN(Date.parse(field))) { } else if (!isNaN(Date.parse(value)) && !isNaN(Date.parse(field))) {

View File

@ -22,6 +22,9 @@ export async function makeExternalQuery(
) { ) {
throw new Error("Entity ID and table metadata do not align") throw new Error("Entity ID and table metadata do not align")
} }
if (!datasource) {
throw new Error("No datasource provided for external query")
}
datasource = await sdk.datasources.enrich(datasource) datasource = await sdk.datasources.enrich(datasource)
const Integration = await getIntegration(datasource.source) const Integration = await getIntegration(datasource.source)
// query is the opinionated function // query is the opinionated function

View File

@ -142,7 +142,7 @@ describe("SQL query builder", () => {
const query = sql._query(generateRelationshipJson({ schema: "production" })) const query = sql._query(generateRelationshipJson({ schema: "production" }))
expect(query).toEqual({ expect(query).toEqual({
bindings: [500, 5000], bindings: [500, 5000],
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" order by "test"."id" asc limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`,
}) })
}) })
@ -150,7 +150,7 @@ describe("SQL query builder", () => {
const query = sql._query(generateRelationshipJson()) const query = sql._query(generateRelationshipJson())
expect(query).toEqual({ expect(query).toEqual({
bindings: [500, 5000], bindings: [500, 5000],
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" order by "test"."id" asc limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`,
}) })
}) })
@ -160,7 +160,7 @@ describe("SQL query builder", () => {
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: [500, 5000], bindings: [500, 5000],
sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" limit $2`, sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" order by "test"."id" asc limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" order by "test"."id" asc limit $2`,
}) })
}) })
@ -175,8 +175,8 @@ describe("SQL query builder", () => {
}) })
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: ["john%", limit], bindings: ["john%", limit, 5000],
sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`, sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
}) })
query = new Sql(SqlClient.ORACLE, limit)._query( query = new Sql(SqlClient.ORACLE, limit)._query(
@ -190,8 +190,8 @@ describe("SQL query builder", () => {
}) })
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit], bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit, 5000],
sql: `select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4)) where rownum <= :5) "test"`, sql: `select * from (select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4) order by "test"."id" asc) where rownum <= :5) "test" order by "test"."id" asc) where rownum <= :6`,
}) })
query = new Sql(SqlClient.ORACLE, limit)._query( query = new Sql(SqlClient.ORACLE, limit)._query(
@ -204,8 +204,8 @@ describe("SQL query builder", () => {
}) })
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: [`%jo%`, limit], bindings: [`%jo%`, limit, 5000],
sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`, sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
}) })
}) })
}) })

View File

@ -57,15 +57,14 @@ describe("Captures of real examples", () => {
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
expect(query).toEqual({ expect(query).toEqual({
bindings: [relationshipLimit, limit], bindings: [relationshipLimit, limit],
sql: multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", sql: expect.stringContaining(
multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid",
"a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city", "a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city",
"a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname",
"b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid",
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid" "b"."completed" as "b.completed", "b"."qaid" as "b.qaid"`)
from (select * from "persons" as "a" order by "a"."firstname" asc nulls first limit $1) as "a" ),
left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid"
order by "a"."firstname" asc nulls first limit $2`),
}) })
}) })
@ -74,13 +73,10 @@ describe("Captures of real examples", () => {
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
expect(query).toEqual({ expect(query).toEqual({
bindings: [relationshipLimit, "assembling", limit], bindings: [relationshipLimit, "assembling", limit],
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", sql: expect.stringContaining(
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", multiline(`where COALESCE("b"."taskname" = $2, FALSE)
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid" order by "a"."productname" asc nulls first, "a"."productid" asc limit $3`)
from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a" ),
left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where COALESCE("b"."taskname" = $2, FALSE)
order by "a"."productname" asc nulls first limit $3`),
}) })
}) })
@ -89,13 +85,10 @@ describe("Captures of real examples", () => {
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
expect(query).toEqual({ expect(query).toEqual({
bindings: [relationshipLimit, limit], bindings: [relationshipLimit, limit],
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", sql: expect.stringContaining(
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", multiline(`left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid" left join "tasks" as "b" on "b"."taskid" = "c"."taskid" `)
from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a" ),
left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
left join "tasks" as "b" on "b"."taskid" = "c"."taskid"
order by "a"."productname" asc nulls first limit $2`),
}) })
}) })
@ -108,9 +101,9 @@ describe("Captures of real examples", () => {
sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname",
"a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid",
"b"."productname" as "b.productname", "b"."productid" as "b.productid" "b"."productname" as "b.productname", "b"."productid" as "b.productid"
from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a" from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) order by "a"."taskid" asc limit $3) as "a"
left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid"
left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`), left join "products" as "b" on "b"."productid" = "c"."productid" order by "a"."taskid" asc limit $4`),
}) })
}) })
@ -132,19 +125,11 @@ describe("Captures of real examples", () => {
equalValue, equalValue,
limit, limit,
], ],
sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid", sql: expect.stringContaining(
"a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", multiline(
"b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname", `where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE)`
"c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", )
"c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname", ),
"c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type",
"c"."city" as "c.city", "c"."lastname" as "c.lastname"
from (select * from "tasks" as "a" where COALESCE("a"."completed" != $1, TRUE)
order by "a"."taskname" asc nulls first limit $2) as "a"
left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid"
left join "products" as "b" on "b"."productid" = "d"."productid"
left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid"
where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE) order by "a"."taskname" asc nulls first limit $6`),
}) })
}) })
}) })
@ -200,8 +185,9 @@ describe("Captures of real examples", () => {
returningQuery = input returningQuery = input
}, queryJson) }, queryJson)
expect(returningQuery).toEqual({ expect(returningQuery).toEqual({
sql: "select * from (select top (@p0) * from [people] where CASE WHEN [people].[name] = @p1 THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p2 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]", sql: multiline(`select top (@p0) * from (select top (@p1) * from [people] where CASE WHEN [people].[name] = @p2
bindings: [1, "Test", 22], THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p3 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]`),
bindings: [5000, 1, "Test", 22],
}) })
}) })
}) })

View File

@ -3,12 +3,14 @@ import * as rows from "./rows"
import * as search from "./search" import * as search from "./search"
import * as utils from "./utils" import * as utils from "./utils"
import * as external from "./external" import * as external from "./external"
import * as filters from "./search/filters"
import AliasTables from "./sqlAlias" import AliasTables from "./sqlAlias"
export default { export default {
...attachments, ...attachments,
...rows, ...rows,
...search, ...search,
filters,
utils, utils,
external, external,
AliasTables, AliasTables,

View File

@ -1,14 +1,14 @@
import { import {
SortJson, IncludeRelationship,
Operation, Operation,
PaginationJson, PaginationJson,
IncludeRelationship,
Row, Row,
SearchFilters,
RowSearchParams, RowSearchParams,
SearchFilters,
SearchResponse, SearchResponse,
Table, SortJson,
SortOrder, SortOrder,
Table,
} from "@budibase/types" } from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters" import * as exporters from "../../../../api/controllers/view/exporters"
import { handleRequest } from "../../../../api/controllers/row/external" import { handleRequest } from "../../../../api/controllers/row/external"
@ -18,7 +18,7 @@ import {
} from "../../../../integrations/utils" } from "../../../../integrations/utils"
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "./types" import { ExportRowsParams, ExportRowsResult } from "./types"
import { HTTPError, db } from "@budibase/backend-core" import { db, HTTPError } from "@budibase/backend-core"
import pick from "lodash/pick" import pick from "lodash/pick"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import sdk from "../../../" import sdk from "../../../"
@ -28,20 +28,26 @@ export async function search(
table: Table table: Table
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId } = options const { tableId } = options
const { paginate, query, ...params } = options const { countRows, paginate, query, ...params } = options
const { limit } = params const { limit } = params
let bookmark = let bookmark =
(params.bookmark && parseInt(params.bookmark as string)) || undefined (params.bookmark && parseInt(params.bookmark as string)) || undefined
if (paginate && !bookmark) { if (paginate && !bookmark) {
bookmark = 1 bookmark = 0
} }
let paginateObj = {} let paginateObj: PaginationJson | undefined
if (paginate) { if (paginate && !limit) {
throw new Error("Cannot paginate query without a limit")
}
if (paginate && limit) {
paginateObj = { paginateObj = {
// add one so we can track if there is another page // add one so we can track if there is another page
limit: limit, limit: limit + 1,
page: bookmark, }
if (bookmark) {
paginateObj.offset = limit * bookmark
} }
} else if (params && limit) { } else if (params && limit) {
paginateObj = { paginateObj = {
@ -69,24 +75,27 @@ export async function search(
} }
try { try {
let rows = await handleRequest(Operation.READ, tableId, { const parameters = {
filters: query, filters: query,
sort, sort,
paginate: paginateObj as PaginationJson, paginate: paginateObj as PaginationJson,
includeSqlRelationships: IncludeRelationship.INCLUDE, includeSqlRelationships: IncludeRelationship.INCLUDE,
}) }
const queries: Promise<Row[] | number>[] = []
queries.push(handleRequest(Operation.READ, tableId, parameters))
if (countRows) {
queries.push(handleRequest(Operation.COUNT, tableId, parameters))
}
const responses = await Promise.all(queries)
let rows = responses[0] as Row[]
const totalRows =
responses.length > 1 ? (responses[1] as number) : undefined
let hasNextPage = false let hasNextPage = false
if (paginate && rows.length === limit) { // remove the extra row if it's there
const nextRows = await handleRequest(Operation.READ, tableId, { if (paginate && limit && rows.length > limit) {
filters: query, rows.pop()
sort, hasNextPage = true
paginate: {
limit: 1,
page: bookmark! * limit + 1,
},
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
hasNextPage = nextRows.length > 0
} }
if (options.fields) { if (options.fields) {
@ -100,7 +109,17 @@ export async function search(
}) })
// need wrapper object for bookmarks etc when paginating // need wrapper object for bookmarks etc when paginating
return { rows, hasNextPage, bookmark: bookmark && bookmark + 1 } const response: SearchResponse<Row> = { rows, hasNextPage }
if (hasNextPage && bookmark != null) {
response.bookmark = bookmark + 1
}
if (totalRows != null) {
response.totalRows = totalRows
}
if (paginate && !hasNextPage) {
response.hasNextPage = false
}
return response
} catch (err: any) { } catch (err: any) {
if (err.message && err.message.includes("does not exist")) { if (err.message && err.message.includes("does not exist")) {
throw new Error( throw new Error(
@ -126,6 +145,10 @@ export async function exportRows(
delimiter, delimiter,
customHeaders, customHeaders,
} = options } = options
if (!tableId) {
throw new HTTPError("No table ID for search provided.", 400)
}
const { datasourceId, tableName } = breakExternalTableId(tableId) const { datasourceId, tableName } = breakExternalTableId(tableId)
let requestQuery: SearchFilters = {} let requestQuery: SearchFilters = {}
@ -148,7 +171,7 @@ export async function exportRows(
requestQuery = query || {} requestQuery = query || {}
} }
const datasource = await sdk.datasources.get(datasourceId!) const datasource = await sdk.datasources.get(datasourceId)
const table = await sdk.tables.getTable(tableId) const table = await sdk.tables.getTable(tableId)
if (!datasource || !datasource.entities) { if (!datasource || !datasource.entities) {
throw new HTTPError("Datasource has not been configured for plus API.", 400) throw new HTTPError("Datasource has not been configured for plus API.", 400)
@ -161,10 +184,6 @@ export async function exportRows(
let rows: Row[] = [] let rows: Row[] = []
let headers let headers
if (!tableName) {
throw new HTTPError("Could not find table name.", 400)
}
// Filter data to only specified columns if required // Filter data to only specified columns if required
if (columns && columns.length) { if (columns && columns.length) {
for (let i = 0; i < result.rows.length; i++) { for (let i = 0; i < result.rows.length; i++) {

View File

@ -0,0 +1,62 @@
import {
FieldType,
RelationshipFieldMetadata,
SearchFilters,
Table,
} from "@budibase/types"
import { isPlainObject } from "lodash"
export function getRelationshipColumns(table: Table): {
name: string
definition: RelationshipFieldMetadata
}[] {
// performing this with a for loop rather than an array filter improves
// type guarding, as no casts are required
const linkEntries: [string, RelationshipFieldMetadata][] = []
for (let entry of Object.entries(table.schema)) {
if (entry[1].type === FieldType.LINK) {
const linkColumn: RelationshipFieldMetadata = entry[1]
linkEntries.push([entry[0], linkColumn])
}
}
return linkEntries.map(entry => ({
name: entry[0],
definition: entry[1],
}))
}
export function getTableIDList(
tables: Table[]
): { name: string; id: string }[] {
return tables
.filter(table => table.originalName && table._id)
.map(table => ({ id: table._id!, name: table.originalName! }))
}
export function updateFilterKeys(
filters: SearchFilters,
updates: { original: string; updated: string }[]
): SearchFilters {
const makeFilterKeyRegex = (str: string) =>
new RegExp(`^${str}\\.|:${str}\\.`)
for (let filter of Object.values(filters)) {
if (!isPlainObject(filter)) {
continue
}
for (let [key, keyFilter] of Object.entries(filter)) {
if (keyFilter === "") {
delete filter[key]
}
const possibleKey = updates.find(({ original }) =>
key.match(makeFilterKeyRegex(original))
)
if (possibleKey && possibleKey.original !== possibleKey.updated) {
// only replace the first, not replaceAll
filter[key.replace(possibleKey.original, possibleKey.updated)] =
filter[key]
delete filter[key]
}
}
}
return filters
}

View File

@ -1,4 +1,5 @@
import { import {
Datasource,
DocumentType, DocumentType,
FieldType, FieldType,
Operation, Operation,
@ -28,6 +29,12 @@ import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
import AliasTables from "../sqlAlias" import AliasTables from "../sqlAlias"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import pick from "lodash/pick" import pick from "lodash/pick"
import { processRowCountResponse } from "../utils"
import {
updateFilterKeys,
getRelationshipColumns,
getTableIDList,
} from "./filters"
const builder = new sql.Sql(SqlClient.SQL_LITE) const builder = new sql.Sql(SqlClient.SQL_LITE)
@ -58,34 +65,31 @@ function buildInternalFieldList(
return fieldList return fieldList
} }
function tableNameInFieldRegex(tableName: string) { function cleanupFilters(
return new RegExp(`^${tableName}.|:${tableName}.`, "g") filters: SearchFilters,
} table: Table,
allTables: Table[]
function cleanupFilters(filters: SearchFilters, tables: Table[]) { ) {
for (let filter of Object.values(filters)) { // get a list of all relationship columns in the table for updating
if (typeof filter !== "object") { const relationshipColumns = getRelationshipColumns(table)
continue // get table names to ID map for relationships
} const tableNameToID = getTableIDList(allTables)
for (let [key, keyFilter] of Object.entries(filter)) { // all should be applied at once
if (keyFilter === "") { filters = updateFilterKeys(
delete filter[key] filters,
} relationshipColumns
.map(({ name, definition }) => ({
// relationship, switch to table ID original: name,
const tableRelated = tables.find( updated: definition.tableId,
table => }))
table.originalName && .concat(
key.match(tableNameInFieldRegex(table.originalName)) tableNameToID.map(({ name, id }) => ({
original: name,
updated: id,
}))
) )
if (tableRelated && tableRelated.originalName) { )
// only replace the first, not replaceAll
filter[key.replace(tableRelated.originalName, tableRelated._id!)] =
filter[key]
delete filter[key]
}
}
}
return filters return filters
} }
@ -95,14 +99,29 @@ function buildTableMap(tables: Table[]) {
// update the table name, should never query by name for SQLite // update the table name, should never query by name for SQLite
table.originalName = table.name table.originalName = table.name
table.name = table._id! table.name = table._id!
// need a primary for sorting, lookups etc
table.primary = ["_id"]
tableMap[table._id!] = table tableMap[table._id!] = table
} }
return tableMap return tableMap
} }
async function runSqlQuery(json: QueryJson, tables: Table[]) { function runSqlQuery(json: QueryJson, tables: Table[]): Promise<Row[]>
function runSqlQuery(
json: QueryJson,
tables: Table[],
opts: { countTotalRows: true }
): Promise<number>
async function runSqlQuery(
json: QueryJson,
tables: Table[],
opts?: { countTotalRows?: boolean }
) {
const alias = new AliasTables(tables.map(table => table.name)) const alias = new AliasTables(tables.map(table => table.name))
return await alias.queryWithAliasing(json, async json => { if (opts?.countTotalRows) {
json.endpoint.operation = Operation.COUNT
}
const processSQLQuery = async (_: Datasource, json: QueryJson) => {
const query = builder._query(json, { const query = builder._query(json, {
disableReturning: true, disableReturning: true,
}) })
@ -124,17 +143,27 @@ async function runSqlQuery(json: QueryJson, tables: Table[]) {
const db = context.getAppDB() const db = context.getAppDB()
return await db.sql<Row>(sql, bindings) return await db.sql<Row>(sql, bindings)
}) }
const response = await alias.queryWithAliasing(json, processSQLQuery)
if (opts?.countTotalRows) {
return processRowCountResponse(response)
} else {
return response
}
} }
export async function search( export async function search(
options: RowSearchParams, options: RowSearchParams,
table: Table table: Table
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { paginate, query, ...params } = options let { paginate, query, ...params } = options
const allTables = await sdk.tables.getAllInternalTables() const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables) const allTablesMap = buildTableMap(allTables)
// make sure we have the mapped/latest table
if (table?._id) {
table = allTablesMap[table?._id]
}
if (!table) { if (!table) {
throw new Error("Unable to find table") throw new Error("Unable to find table")
} }
@ -149,7 +178,7 @@ export async function search(
operation: Operation.READ, operation: Operation.READ,
}, },
filters: { filters: {
...cleanupFilters(query, allTables), ...cleanupFilters(query, table, allTables),
documentType: DocumentType.ROW, documentType: DocumentType.ROW,
}, },
table, table,
@ -169,7 +198,7 @@ export async function search(
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
request.sort = { request.sort = {
[sortField.name]: { [sortField.name]: {
direction: params.sortOrder || SortOrder.DESCENDING, direction: params.sortOrder || SortOrder.ASCENDING,
type: sortType as SortType, type: sortType as SortType,
}, },
} }
@ -180,7 +209,8 @@ export async function search(
} }
const bookmark: number = (params.bookmark as number) || 0 const bookmark: number = (params.bookmark as number) || 0
if (paginate && params.limit) { if (params.limit) {
paginate = true
request.paginate = { request.paginate = {
limit: params.limit + 1, limit: params.limit + 1,
offset: bookmark * params.limit, offset: bookmark * params.limit,
@ -188,7 +218,20 @@ export async function search(
} }
try { try {
const rows = await runSqlQuery(request, allTables) const queries: Promise<Row[] | number>[] = []
queries.push(runSqlQuery(request, allTables))
if (options.countRows) {
// get the total count of rows
queries.push(
runSqlQuery(request, allTables, {
countTotalRows: true,
})
)
}
const responses = await Promise.all(queries)
let rows = responses[0] as Row[]
const totalRows =
responses.length > 1 ? (responses[1] as number) : undefined
// process from the format of tableId.column to expected format also // process from the format of tableId.column to expected format also
// make sure JSON columns corrected // make sure JSON columns corrected
@ -201,7 +244,8 @@ export async function search(
// check for pagination final row // check for pagination final row
let nextRow: Row | undefined let nextRow: Row | undefined
if (paginate && params.limit && processed.length > params.limit) { if (paginate && params.limit && rows.length > params.limit) {
// remove the extra row that confirmed if there is another row to move to
nextRow = processed.pop() nextRow = processed.pop()
} }
@ -217,21 +261,21 @@ export async function search(
finalRows = finalRows.map((r: any) => pick(r, fields)) finalRows = finalRows.map((r: any) => pick(r, fields))
} }
// check for pagination
if (paginate) {
const response: SearchResponse<Row> = { const response: SearchResponse<Row> = {
rows: finalRows, rows: finalRows,
} }
if (nextRow) { if (totalRows != null) {
response.totalRows = totalRows
}
// check for pagination
if (paginate && nextRow) {
response.hasNextPage = true response.hasNextPage = true
response.bookmark = bookmark + 1 response.bookmark = bookmark + 1
} }
if (paginate && !nextRow) {
response.hasNextPage = false
}
return response return response
} else {
return {
rows: finalRows,
}
}
} catch (err: any) { } catch (err: any) {
const msg = typeof err === "string" ? err : err.message const msg = typeof err === "string" ? err : err.message
if (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) { if (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) {

View File

@ -11,7 +11,12 @@ import { SQS_DATASOURCE_INTERNAL } from "@budibase/backend-core"
import { getSQLClient } from "./utils" import { getSQLClient } from "./utils"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import datasources from "../datasources" import datasources from "../datasources"
import { makeExternalQuery } from "../../../integrations/base/query" import { BudibaseInternalDB } from "../../../db/utils"
type PerformQueryFunction = (
datasource: Datasource,
json: QueryJson
) => Promise<DatasourcePlusQueryResponse>
const WRITE_OPERATIONS: Operation[] = [ const WRITE_OPERATIONS: Operation[] = [
Operation.CREATE, Operation.CREATE,
@ -65,7 +70,7 @@ export default class AliasTables {
this.charSeq = new CharSequence() this.charSeq = new CharSequence()
} }
isAliasingEnabled(json: QueryJson, datasource: Datasource) { isAliasingEnabled(json: QueryJson, datasource?: Datasource) {
const operation = json.endpoint.operation const operation = json.endpoint.operation
const fieldLength = json.resource?.fields?.length const fieldLength = json.resource?.fields?.length
if ( if (
@ -75,6 +80,10 @@ export default class AliasTables {
) { ) {
return false return false
} }
// SQS - doesn't have a datasource
if (!datasource) {
return true
}
try { try {
const sqlClient = getSQLClient(datasource) const sqlClient = getSQLClient(datasource)
const isWrite = WRITE_OPERATIONS.includes(operation) const isWrite = WRITE_OPERATIONS.includes(operation)
@ -167,13 +176,14 @@ export default class AliasTables {
async queryWithAliasing( async queryWithAliasing(
json: QueryJson, json: QueryJson,
queryFn?: (json: QueryJson) => Promise<DatasourcePlusQueryResponse> queryFn: PerformQueryFunction
): Promise<DatasourcePlusQueryResponse> { ): Promise<DatasourcePlusQueryResponse> {
const datasourceId = json.endpoint.datasourceId const datasourceId = json.endpoint.datasourceId
const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL
let aliasingEnabled: boolean, datasource: Datasource | undefined let aliasingEnabled: boolean, datasource: Datasource
if (isSqs) { if (isSqs) {
aliasingEnabled = true aliasingEnabled = this.isAliasingEnabled(json)
datasource = BudibaseInternalDB
} else { } else {
datasource = await datasources.get(datasourceId) datasource = await datasources.get(datasourceId)
aliasingEnabled = this.isAliasingEnabled(json, datasource) aliasingEnabled = this.isAliasingEnabled(json, datasource)
@ -225,14 +235,7 @@ export default class AliasTables {
json.tableAliases = invertedTableAliases json.tableAliases = invertedTableAliases
} }
let response: DatasourcePlusQueryResponse let response: DatasourcePlusQueryResponse = await queryFn(datasource, json)
if (datasource && !isSqs) {
response = await makeExternalQuery(datasource, json)
} else if (queryFn) {
response = await queryFn(json)
} else {
throw new Error("No supplied method to perform aliased query")
}
if (Array.isArray(response) && aliasingEnabled) { if (Array.isArray(response) && aliasingEnabled) {
return this.reverse(response) return this.reverse(response)
} else { } else {

View File

@ -50,6 +50,17 @@ export function getSQLClient(datasource: Datasource): SqlClient {
throw new Error("Unable to determine client for SQL datasource") throw new Error("Unable to determine client for SQL datasource")
} }
export function processRowCountResponse(
response: DatasourcePlusQueryResponse
): number {
if (response && response.length === 1 && "total" in response[0]) {
const total = response[0].total
return typeof total === "number" ? total : parseInt(total)
} else {
throw new Error("Unable to count rows in query - no count response")
}
}
export async function getDatasourceAndQuery( export async function getDatasourceAndQuery(
json: QueryJson json: QueryJson
): Promise<DatasourcePlusQueryResponse> { ): Promise<DatasourcePlusQueryResponse> {

View File

@ -90,10 +90,10 @@ export async function getExternalTable(
export async function getTable(tableId: string): Promise<Table> { export async function getTable(tableId: string): Promise<Table> {
const db = context.getAppDB() const db = context.getAppDB()
let output: Table let output: Table
if (isExternalTableID(tableId)) { if (tableId && isExternalTableID(tableId)) {
let { datasourceId, tableName } = breakExternalTableId(tableId) let { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource = await datasources.get(datasourceId!) const datasource = await datasources.get(datasourceId)
const table = await getExternalTable(datasourceId!, tableName!) const table = await getExternalTable(datasourceId, tableName)
output = { ...table, sql: isSQL(datasource) } output = { ...table, sql: isSQL(datasource) }
} else { } else {
output = await db.get<Table>(tableId) output = await db.get<Table>(tableId)

View File

@ -10,9 +10,9 @@ export async function get(viewId: string): Promise<ViewV2> {
const { tableId } = utils.extractViewInfoFromID(viewId) const { tableId } = utils.extractViewInfoFromID(viewId)
const { datasourceId, tableName } = breakExternalTableId(tableId) const { datasourceId, tableName } = breakExternalTableId(tableId)
const ds = await sdk.datasources.get(datasourceId!) const ds = await sdk.datasources.get(datasourceId)
const table = ds.entities![tableName!] const table = ds.entities![tableName]
const views = Object.values(table.views!).filter(isV2) const views = Object.values(table.views!).filter(isV2)
const found = views.find(v => v.id === viewId) const found = views.find(v => v.id === viewId)
if (!found) { if (!found) {
@ -25,9 +25,9 @@ export async function getEnriched(viewId: string): Promise<ViewV2Enriched> {
const { tableId } = utils.extractViewInfoFromID(viewId) const { tableId } = utils.extractViewInfoFromID(viewId)
const { datasourceId, tableName } = breakExternalTableId(tableId) const { datasourceId, tableName } = breakExternalTableId(tableId)
const ds = await sdk.datasources.get(datasourceId!) const ds = await sdk.datasources.get(datasourceId)
const table = ds.entities![tableName!] const table = ds.entities![tableName]
const views = Object.values(table.views!).filter(isV2) const views = Object.values(table.views!).filter(isV2)
const found = views.find(v => v.id === viewId) const found = views.find(v => v.id === viewId)
if (!found) { if (!found) {
@ -49,9 +49,9 @@ export async function create(
const db = context.getAppDB() const db = context.getAppDB()
const { datasourceId, tableName } = breakExternalTableId(tableId) const { datasourceId, tableName } = breakExternalTableId(tableId)
const ds = await sdk.datasources.get(datasourceId!) const ds = await sdk.datasources.get(datasourceId)
ds.entities![tableName!].views ??= {} ds.entities![tableName].views ??= {}
ds.entities![tableName!].views![view.name] = view ds.entities![tableName].views![view.name] = view
await db.put(ds) await db.put(ds)
return view return view
} }
@ -60,9 +60,9 @@ export async function update(tableId: string, view: ViewV2): Promise<ViewV2> {
const db = context.getAppDB() const db = context.getAppDB()
const { datasourceId, tableName } = breakExternalTableId(tableId) const { datasourceId, tableName } = breakExternalTableId(tableId)
const ds = await sdk.datasources.get(datasourceId!) const ds = await sdk.datasources.get(datasourceId)
ds.entities![tableName!].views ??= {} ds.entities![tableName].views ??= {}
const views = ds.entities![tableName!].views! const views = ds.entities![tableName].views!
const existingView = Object.values(views).find( const existingView = Object.values(views).find(
v => isV2(v) && v.id === view.id v => isV2(v) && v.id === view.id
@ -87,9 +87,9 @@ export async function remove(viewId: string): Promise<ViewV2> {
} }
const { datasourceId, tableName } = breakExternalTableId(view.tableId) const { datasourceId, tableName } = breakExternalTableId(view.tableId)
const ds = await sdk.datasources.get(datasourceId!) const ds = await sdk.datasources.get(datasourceId)
delete ds.entities![tableName!].views![view?.name] delete ds.entities![tableName].views![view?.name]
await db.put(ds) await db.put(ds)
return view return view
} }

View File

@ -7,6 +7,8 @@ import {
} from "../automations/utils" } from "../automations/utils"
import * as actions from "../automations/actions" import * as actions from "../automations/actions"
import * as automationUtils from "../automations/automationUtils" import * as automationUtils from "../automations/automationUtils"
import { replaceFakeBindings } from "../automations/loopUtils"
import { default as AutomationEmitter } from "../events/AutomationEmitter" import { default as AutomationEmitter } from "../events/AutomationEmitter"
import { generateAutomationMetadataID, isProdAppID } from "../db/utils" import { generateAutomationMetadataID, isProdAppID } from "../db/utils"
import { definitions as triggerDefs } from "../automations/triggerInfo" import { definitions as triggerDefs } from "../automations/triggerInfo"
@ -214,15 +216,15 @@ class Orchestrator {
} }
updateContextAndOutput( updateContextAndOutput(
loopStepNumber: number | undefined, currentLoopStepIndex: number | undefined,
step: AutomationStep, step: AutomationStep,
output: any, output: any,
result: { success: boolean; status: string } result: { success: boolean; status: string }
) { ) {
if (!loopStepNumber) { if (!currentLoopStepIndex) {
throw new Error("No loop step number provided.") throw new Error("No loop step number provided.")
} }
this.executionOutput.steps.splice(loopStepNumber, 0, { this.executionOutput.steps.splice(currentLoopStepIndex, 0, {
id: step.id, id: step.id,
stepId: step.stepId, stepId: step.stepId,
outputs: { outputs: {
@ -232,7 +234,7 @@ class Orchestrator {
}, },
inputs: step.inputs, inputs: step.inputs,
}) })
this._context.steps.splice(loopStepNumber, 0, { this._context.steps.splice(currentLoopStepIndex, 0, {
...output, ...output,
success: result.success, success: result.success,
status: result.status, status: result.status,
@ -256,7 +258,7 @@ class Orchestrator {
let loopStep: LoopStep | undefined = undefined let loopStep: LoopStep | undefined = undefined
let stepCount = 0 let stepCount = 0
let loopStepNumber: any = undefined let currentLoopStepIndex: number = 0
let loopSteps: LoopStep[] | undefined = [] let loopSteps: LoopStep[] | undefined = []
let metadata let metadata
let timeoutFlag = false let timeoutFlag = false
@ -290,7 +292,7 @@ class Orchestrator {
}, },
}) })
let input: any, let input: LoopInput | undefined,
iterations = 1, iterations = 1,
iterationCount = 0 iterationCount = 0
@ -309,19 +311,19 @@ class Orchestrator {
stepCount++ stepCount++
if (step.stepId === LOOP_STEP_ID) { if (step.stepId === LOOP_STEP_ID) {
loopStep = step as LoopStep loopStep = step as LoopStep
loopStepNumber = stepCount currentLoopStepIndex = stepCount
continue continue
} }
if (loopStep) { if (loopStep) {
input = await processObject(loopStep.inputs, this._context) input = await processObject(loopStep.inputs, this._context)
iterations = getLoopIterations(loopStep as LoopStep) iterations = getLoopIterations(loopStep)
stepSpan?.addTags({ step: { iterations } }) stepSpan?.addTags({ step: { iterations } })
} }
for (let index = 0; index < iterations; index++) {
for (let stepIndex = 0; stepIndex < iterations; stepIndex++) {
let originalStepInput = cloneDeep(step.inputs) let originalStepInput = cloneDeep(step.inputs)
// Handle if the user has set a max iteration count or if it reaches the max limit set by us if (loopStep && input?.binding) {
if (loopStep && input.binding) {
let tempOutput = { let tempOutput = {
items: loopSteps, items: loopSteps,
iterations: iterationCount, iterations: iterationCount,
@ -332,7 +334,7 @@ class Orchestrator {
) )
} catch (err) { } catch (err) {
this.updateContextAndOutput( this.updateContextAndOutput(
loopStepNumber, currentLoopStepIndex,
step, step,
tempOutput, tempOutput,
{ {
@ -353,55 +355,22 @@ class Orchestrator {
} else if (Array.isArray(loopStep.inputs.binding)) { } else if (Array.isArray(loopStep.inputs.binding)) {
item = loopStep.inputs.binding item = loopStep.inputs.binding
} }
this._context.steps[loopStepNumber] = { this._context.steps[currentLoopStepIndex] = {
currentItem: item[index], currentItem: item[stepIndex],
} }
// The "Loop" binding in the front end is "fake", so replace it here so the context can understand it originalStepInput = replaceFakeBindings(
// Pretty hacky because we need to account for the row object originalStepInput,
for (let [key, value] of Object.entries(originalStepInput)) { currentLoopStepIndex
if (typeof value === "object") {
for (let [innerKey, innerValue] of Object.entries(
originalStepInput[key]
)) {
if (typeof innerValue === "string") {
originalStepInput[key][innerKey] =
automationUtils.substituteLoopStep(
innerValue,
`steps.${loopStepNumber}`
) )
} else if (typeof value === "object") {
for (let [innerObject, innerValue] of Object.entries(
originalStepInput[key][innerKey]
)) {
if (typeof innerValue === "string") {
originalStepInput[key][innerKey][innerObject] =
automationUtils.substituteLoopStep(
innerValue,
`steps.${loopStepNumber}`
)
}
}
}
}
} else {
if (typeof value === "string") {
originalStepInput[key] =
automationUtils.substituteLoopStep(
value,
`steps.${loopStepNumber}`
)
}
}
}
if ( if (
index === env.AUTOMATION_MAX_ITERATIONS || stepIndex === env.AUTOMATION_MAX_ITERATIONS ||
(loopStep.inputs.iterations && (loopStep.inputs.iterations &&
index === parseInt(loopStep.inputs.iterations)) stepIndex === parseInt(loopStep.inputs.iterations))
) { ) {
this.updateContextAndOutput( this.updateContextAndOutput(
loopStepNumber, currentLoopStepIndex,
step, step,
tempOutput, tempOutput,
{ {
@ -416,7 +385,7 @@ class Orchestrator {
let isFailure = false let isFailure = false
const currentItem = const currentItem =
this._context.steps[loopStepNumber]?.currentItem this._context.steps[currentLoopStepIndex]?.currentItem
if (currentItem && typeof currentItem === "object") { if (currentItem && typeof currentItem === "object") {
isFailure = Object.keys(currentItem).some(value => { isFailure = Object.keys(currentItem).some(value => {
return currentItem[value] === loopStep?.inputs.failure return currentItem[value] === loopStep?.inputs.failure
@ -428,7 +397,7 @@ class Orchestrator {
if (isFailure) { if (isFailure) {
this.updateContextAndOutput( this.updateContextAndOutput(
loopStepNumber, currentLoopStepIndex,
step, step,
tempOutput, tempOutput,
{ {
@ -453,7 +422,6 @@ class Orchestrator {
continue continue
} }
// If it's a loop step, we need to manually add the bindings to the context
let stepFn = await this.getStepFunctionality(step.stepId) let stepFn = await this.getStepFunctionality(step.stepId)
let inputs = await processObject(originalStepInput, this._context) let inputs = await processObject(originalStepInput, this._context)
inputs = automationUtils.cleanInputValues( inputs = automationUtils.cleanInputValues(
@ -502,9 +470,9 @@ class Orchestrator {
if (loopStep) { if (loopStep) {
iterationCount++ iterationCount++
if (index === iterations - 1) { if (stepIndex === iterations - 1) {
loopStep = undefined loopStep = undefined
this._context.steps.splice(loopStepNumber, 1) this._context.steps.splice(currentLoopStepIndex, 1)
break break
} }
} }
@ -515,7 +483,7 @@ class Orchestrator {
if (loopStep && iterations === 0) { if (loopStep && iterations === 0) {
loopStep = undefined loopStep = undefined
this.executionOutput.steps.splice(loopStepNumber + 1, 0, { this.executionOutput.steps.splice(currentLoopStepIndex + 1, 0, {
id: step.id, id: step.id,
stepId: step.stepId, stepId: step.stepId,
outputs: { outputs: {
@ -525,14 +493,14 @@ class Orchestrator {
inputs: {}, inputs: {},
}) })
this._context.steps.splice(loopStepNumber, 1) this._context.steps.splice(currentLoopStepIndex, 1)
iterations = 1 iterations = 1
} }
// Delete the step after the loop step as it's irrelevant, since information is included // Delete the step after the loop step as it's irrelevant, since information is included
// in the loop step // in the loop step
if (wasLoopStep && !loopStep) { if (wasLoopStep && !loopStep) {
this._context.steps.splice(loopStepNumber + 1, 1) this._context.steps.splice(currentLoopStepIndex + 1, 1)
wasLoopStep = false wasLoopStep = false
} }
if (loopSteps && loopSteps.length) { if (loopSteps && loopSteps.length) {
@ -541,13 +509,13 @@ class Orchestrator {
items: loopSteps, items: loopSteps,
iterations: iterationCount, iterations: iterationCount,
} }
this.executionOutput.steps.splice(loopStepNumber + 1, 0, { this.executionOutput.steps.splice(currentLoopStepIndex + 1, 0, {
id: step.id, id: step.id,
stepId: step.stepId, stepId: step.stepId,
outputs: tempOutput, outputs: tempOutput,
inputs: step.inputs, inputs: step.inputs,
}) })
this._context.steps[loopStepNumber] = tempOutput this._context.steps[currentLoopStepIndex] = tempOutput
wasLoopStep = true wasLoopStep = true
loopSteps = [] loopSteps = []

View File

@ -4,6 +4,7 @@ import {
TableSchema, TableSchema,
FieldSchema, FieldSchema,
Row, Row,
Table,
} from "@budibase/types" } from "@budibase/types"
import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core" import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core"
import { db } from "@budibase/backend-core" import { db } from "@budibase/backend-core"
@ -118,16 +119,26 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
return results return results
} }
export function parse(rows: Rows, schema: TableSchema): Rows { export function parse(rows: Rows, table: Table): Rows {
return rows.map(row => { return rows.map(row => {
const parsedRow: Row = {} const parsedRow: Row = {}
Object.entries(row).forEach(([columnName, columnData]) => { Object.entries(row).forEach(([columnName, columnData]) => {
if (!(columnName in schema) || schema[columnName]?.autocolumn) { const schema = table.schema
if (!(columnName in schema)) {
// Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case // Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case
return return
} }
if (
schema[columnName].autocolumn &&
!table.primary?.includes(columnName)
) {
// Don't want the user specifying values for autocolumns unless they're updating
// a row through its primary key.
return
}
const columnSchema = schema[columnName] const columnSchema = schema[columnName]
const { type: columnType } = columnSchema const { type: columnType } = columnSchema
if (columnType === FieldType.NUMBER) { if (columnType === FieldType.NUMBER) {

View File

@ -179,3 +179,5 @@ export enum BpmStatusValue {
VERIFYING_EMAIL = "verifying_email", VERIFYING_EMAIL = "verifying_email",
COMPLETED = "completed", COMPLETED = "completed",
} }
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"

View File

@ -12,6 +12,7 @@ import {
SortOrder, SortOrder,
RowSearchParams, RowSearchParams,
EmptyFilterOption, EmptyFilterOption,
SearchResponse,
} from "@budibase/types" } from "@budibase/types"
import dayjs from "dayjs" import dayjs from "dayjs"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
@ -262,15 +263,23 @@ export const buildQuery = (filter: SearchFilter[]) => {
return query return query
} }
export const search = (docs: Record<string, any>[], query: RowSearchParams) => { export const search = (
docs: Record<string, any>[],
query: RowSearchParams
): SearchResponse<Record<string, any>> => {
let result = runQuery(docs, query.query) let result = runQuery(docs, query.query)
if (query.sort) { if (query.sort) {
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING) result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
} }
let totalRows = result.length
if (query.limit) { if (query.limit) {
result = limit(result, query.limit.toString()) result = limit(result, query.limit.toString())
} }
return result const response: SearchResponse<Record<string, any>> = { rows: result }
if (query.countRows) {
response.totalRows = totalRows
}
return response
} }
/** /**

View File

@ -25,6 +25,7 @@ export interface SearchViewRowRequest
| "bookmark" | "bookmark"
| "paginate" | "paginate"
| "query" | "query"
| "countRows"
> {} > {}
export interface SearchRowResponse { export interface SearchRowResponse {

View File

@ -42,10 +42,7 @@ export interface Account extends CreateAccount {
verified: boolean verified: boolean
verificationSent: boolean verificationSent: boolean
// licensing // licensing
tier: string // deprecated
planType?: PlanType planType?: PlanType
/** @deprecated */
planTier?: number
license?: License license?: License
installId?: string installId?: string
installTenantId?: string installTenantId?: string

View File

@ -144,7 +144,7 @@ interface BaseIOStructure {
required?: string[] required?: string[]
} }
interface InputOutputBlock { export interface InputOutputBlock {
properties: { properties: {
[key: string]: BaseIOStructure [key: string]: BaseIOStructure
} }

View File

@ -8,6 +8,7 @@ export enum Operation {
READ = "READ", READ = "READ",
UPDATE = "UPDATE", UPDATE = "UPDATE",
DELETE = "DELETE", DELETE = "DELETE",
COUNT = "COUNT",
BULK_CREATE = "BULK_CREATE", BULK_CREATE = "BULK_CREATE",
BULK_UPSERT = "BULK_UPSERT", BULK_UPSERT = "BULK_UPSERT",
CREATE_TABLE = "CREATE_TABLE", CREATE_TABLE = "CREATE_TABLE",
@ -188,7 +189,7 @@ export interface Schema {
} }
// return these when an operation occurred but we got no response // return these when an operation occurred but we got no response
enum DSPlusOperation { export enum DSPlusOperation {
CREATE = "create", CREATE = "create",
READ = "read", READ = "read",
UPDATE = "update", UPDATE = "update",
@ -198,6 +199,7 @@ enum DSPlusOperation {
export type DatasourcePlusQueryResponse = export type DatasourcePlusQueryResponse =
| Row[] | Row[]
| Record<DSPlusOperation, boolean>[] | Record<DSPlusOperation, boolean>[]
| { total: number }[]
| void | void
export interface DatasourcePlus extends IntegrationBase { export interface DatasourcePlus extends IntegrationBase {

View File

@ -17,6 +17,7 @@ export interface SearchParams {
fields?: string[] fields?: string[]
indexer?: () => Promise<any> indexer?: () => Promise<any>
rows?: Row[] rows?: Row[]
countRows?: boolean
} }
// when searching for rows we want a more extensive search type that requires certain properties // when searching for rows we want a more extensive search type that requires certain properties

View File

@ -10296,7 +10296,7 @@ engine.io-parser@~5.0.3:
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.0.6.tgz#7811244af173e157295dec9b2718dfe42a64ef45" resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.0.6.tgz#7811244af173e157295dec9b2718dfe42a64ef45"
integrity sha512-tjuoZDMAdEhVnSFleYPCtdL2GXwVTGtNjoeJd9IhIG3C1xs9uwxqRNEu5WpnDZCaozwVlK/nuQhpodhXSIMaxw== integrity sha512-tjuoZDMAdEhVnSFleYPCtdL2GXwVTGtNjoeJd9IhIG3C1xs9uwxqRNEu5WpnDZCaozwVlK/nuQhpodhXSIMaxw==
engine.io@~6.4.1: engine.io@~6.4.2:
version "6.4.2" version "6.4.2"
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.2.tgz#ffeaf68f69b1364b0286badddf15ff633476473f" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.2.tgz#ffeaf68f69b1364b0286badddf15ff633476473f"
integrity sha512-FKn/3oMiJjrOEOeUub2WCox6JhxBXq/Zn3fZOMCBxKnNYtsdKjxhl7yR3fZhM9PV+rdE75SU5SYMc+2PGzo+Tg== integrity sha512-FKn/3oMiJjrOEOeUub2WCox6JhxBXq/Zn3fZOMCBxKnNYtsdKjxhl7yR3fZhM9PV+rdE75SU5SYMc+2PGzo+Tg==
@ -20160,17 +20160,25 @@ socket.io-parser@~4.2.1:
"@socket.io/component-emitter" "~3.1.0" "@socket.io/component-emitter" "~3.1.0"
debug "~4.3.1" debug "~4.3.1"
socket.io@4.6.1: socket.io-parser@~4.2.4:
version "4.6.1" version "4.2.4"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.1.tgz#62ec117e5fce0692fa50498da9347cfb52c3bc70" resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.4.tgz#c806966cf7270601e47469ddeec30fbdfda44c83"
integrity sha512-KMcaAi4l/8+xEjkRICl6ak8ySoxsYG+gG6/XfRCPJPQ/haCRIJBTL4wIl8YCsmtaBovcAXGLOShyVWQ/FG8GZA== integrity sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==
dependencies:
"@socket.io/component-emitter" "~3.1.0"
debug "~4.3.1"
socket.io@4.6.2:
version "4.6.2"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.2.tgz#d597db077d4df9cbbdfaa7a9ed8ccc3d49439786"
integrity sha512-Vp+lSks5k0dewYTfwgPT9UeGGd+ht7sCpB7p0e83VgO4X/AHYWhXITMrNk/pg8syY2bpx23ptClCQuHhqi2BgQ==
dependencies: dependencies:
accepts "~1.3.4" accepts "~1.3.4"
base64id "~2.0.0" base64id "~2.0.0"
debug "~4.3.2" debug "~4.3.2"
engine.io "~6.4.1" engine.io "~6.4.2"
socket.io-adapter "~2.5.2" socket.io-adapter "~2.5.2"
socket.io-parser "~4.2.1" socket.io-parser "~4.2.4"
socks-proxy-agent@^7.0.0: socks-proxy-agent@^7.0.0:
version "7.0.0" version "7.0.0"
@ -21102,18 +21110,6 @@ tar@6.1.11:
mkdirp "^1.0.3" mkdirp "^1.0.3"
yallist "^4.0.0" yallist "^4.0.0"
tar@6.1.15:
version "6.1.15"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69"
integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==
dependencies:
chownr "^2.0.0"
fs-minipass "^2.0.0"
minipass "^5.0.0"
minizlib "^2.1.1"
mkdirp "^1.0.3"
yallist "^4.0.0"
tar@6.2.1, tar@^6.1.11, tar@^6.1.2: tar@6.2.1, tar@^6.1.11, tar@^6.1.2:
version "6.2.1" version "6.2.1"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a"