diff --git a/.eslintrc.json b/.eslintrc.json index 9dab2f1a88..f614f1ad91 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -92,7 +92,8 @@ // differs to external, but the API is broadly the same "jest/no-conditional-expect": "off", // have to turn this off to allow function overloading in typescript - "no-dupe-class-members": "off" + "no-dupe-class-members": "off", + "no-redeclare": "off" } }, { diff --git a/lerna.json b/lerna.json index abce1679c8..0efaf75283 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.28.7", + "version": "2.29.1", "npmClient": "yarn", "packages": [ "packages/*", diff --git a/packages/account-portal b/packages/account-portal index 247f56d455..b600cca314 160000 --- a/packages/account-portal +++ b/packages/account-portal @@ -1 +1 @@ -Subproject commit 247f56d455abbd64da17d865275ed978f577549f +Subproject commit b600cca314a5cc9971e44d46047d1a0019b46b08 diff --git a/packages/backend-core/src/constants/db.ts b/packages/backend-core/src/constants/db.ts index 2fd713119b..3085b91ef1 100644 --- a/packages/backend-core/src/constants/db.ts +++ b/packages/backend-core/src/constants/db.ts @@ -72,4 +72,4 @@ export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs" export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory" export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses" export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee" -export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default" +export { DEFAULT_BB_DATASOURCE_ID } from "@budibase/shared-core" diff --git a/packages/backend-core/src/db/constants.ts b/packages/backend-core/src/db/constants.ts index bfa7595d62..69c98fe569 100644 --- a/packages/backend-core/src/db/constants.ts +++ b/packages/backend-core/src/db/constants.ts @@ -1,14 +1,5 @@ -export const CONSTANT_INTERNAL_ROW_COLS = [ - "_id", - "_rev", - "type", - "createdAt", - "updatedAt", - "tableId", -] as const - -export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const - -export function isInternalColumnName(name: string): boolean { - return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name) -} +export { + CONSTANT_INTERNAL_ROW_COLS, + CONSTANT_EXTERNAL_ROW_COLS, + isInternalColumnName, +} from "@budibase/shared-core" diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 9bc2092b83..cdc5f3d3c8 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -1,10 +1,10 @@ import { Knex, knex } from "knex" import * as dbCore from "../db" import { - isIsoDateString, - isValidFilter, getNativeSql, isExternalTable, + isIsoDateString, + isValidFilter, } from "./utils" import { SqlStatements } from "./sqlStatements" import SqlTableQueryBuilder from "./sqlTable" @@ -12,21 +12,21 @@ import { BBReferenceFieldMetadata, FieldSchema, FieldType, + INTERNAL_TABLE_SOURCE_ID, JsonFieldMetadata, + JsonTypes, Operation, + prefixed, QueryJson, - SqlQuery, + QueryOptions, RelationshipsJson, SearchFilters, + SortOrder, + SqlClient, + SqlQuery, SqlQueryBinding, Table, TableSourceType, - INTERNAL_TABLE_SOURCE_ID, - SqlClient, - QueryOptions, - JsonTypes, - prefixed, - SortOrder, } from "@budibase/types" import environment from "../environment" import { helpers } from "@budibase/shared-core" @@ -114,7 +114,7 @@ function generateSelectStatement( ): (string | Knex.Raw)[] | "*" { const { resource, meta } = json - if (!resource) { + if (!resource || !resource.fields || resource.fields.length === 0) { return "*" } @@ -410,13 +410,32 @@ class InternalBuilder { return query } - addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { - let { sort, paginate } = json + addDistinctCount( + query: Knex.QueryBuilder, + json: QueryJson + ): Knex.QueryBuilder { const table = json.meta.table + const primary = table.primary + const aliases = json.tableAliases + const aliased = + table.name && aliases?.[table.name] ? aliases[table.name] : table.name + if (!primary) { + throw new Error("SQL counting requires primary key to be supplied") + } + return query.countDistinct(`${aliased}.${primary[0]} as total`) + } + + addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { + let { sort } = json + const table = json.meta.table + const primaryKey = table.primary const tableName = getTableName(table) const aliases = json.tableAliases const aliased = tableName && aliases?.[tableName] ? aliases[tableName] : table?.name + if (!Array.isArray(primaryKey)) { + throw new Error("Sorting requires primary key to be specified for table") + } if (sort && Object.keys(sort || {}).length > 0) { for (let [key, value] of Object.entries(sort)) { const direction = @@ -429,9 +448,12 @@ class InternalBuilder { query = query.orderBy(`${aliased}.${key}`, direction, nulls) } - } else if (this.client === SqlClient.MS_SQL && paginate?.limit) { - // @ts-ignore - query = query.orderBy(`${aliased}.${table?.primary[0]}`) + } + + // add sorting by the primary key if the result isn't already sorted by it, + // to make sure result is deterministic + if (!sort || sort[primaryKey[0]] === undefined) { + query = query.orderBy(`${aliased}.${primaryKey[0]}`) } return query } @@ -522,7 +544,7 @@ class InternalBuilder { }) } } - return query.limit(BASE_LIMIT) + return query } knexWithAlias( @@ -533,13 +555,12 @@ class InternalBuilder { const tableName = endpoint.entityId const tableAlias = aliases?.[tableName] - const query = knex( + return knex( this.tableNameWithSchema(tableName, { alias: tableAlias, schema: endpoint.schema, }) ) - return query } create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { @@ -571,25 +592,49 @@ class InternalBuilder { return query.insert(parsedBody) } - read(knex: Knex, json: QueryJson, limit: number): Knex.QueryBuilder { - let { endpoint, resource, filters, paginate, relationships, tableAliases } = - json + bulkUpsert(knex: Knex, json: QueryJson): Knex.QueryBuilder { + const { endpoint, body } = json + let query = this.knexWithAlias(knex, endpoint) + if (!Array.isArray(body)) { + return query + } + const parsedBody = body.map(row => parseBody(row)) + if ( + this.client === SqlClient.POSTGRES || + this.client === SqlClient.SQL_LITE || + this.client === SqlClient.MY_SQL + ) { + const primary = json.meta.table.primary + if (!primary) { + throw new Error("Primary key is required for upsert") + } + const ret = query.insert(parsedBody).onConflict(primary).merge() + return ret + } else if (this.client === SqlClient.MS_SQL) { + // No upsert or onConflict support in MSSQL yet, see: + // https://github.com/knex/knex/pull/6050 + return query.insert(parsedBody) + } + return query.upsert(parsedBody) + } + + read( + knex: Knex, + json: QueryJson, + opts: { + limits?: { base: number; query: number } + } = {} + ): Knex.QueryBuilder { + let { endpoint, filters, paginate, relationships, tableAliases } = json + const { limits } = opts + const counting = endpoint.operation === Operation.COUNT const tableName = endpoint.entityId - // select all if not specified - if (!resource) { - resource = { fields: [] } - } - let selectStatement: string | (string | Knex.Raw)[] = "*" - // handle select - if (resource.fields && resource.fields.length > 0) { - // select the resources as the format "table.columnName" - this is what is provided - // by the resource builder further up - selectStatement = generateSelectStatement(json, knex) - } - let foundLimit = limit || BASE_LIMIT + // start building the query + let query = this.knexWithAlias(knex, endpoint, tableAliases) // handle pagination let foundOffset: number | null = null + let foundLimit = limits?.query || limits?.base if (paginate && paginate.page && paginate.limit) { // @ts-ignore const page = paginate.page <= 1 ? 0 : paginate.page - 1 @@ -602,24 +647,39 @@ class InternalBuilder { } else if (paginate && paginate.limit) { foundLimit = paginate.limit } - // start building the query - let query = this.knexWithAlias(knex, endpoint, tableAliases) - query = query.limit(foundLimit) - if (foundOffset) { - query = query.offset(foundOffset) + // counting should not sort, limit or offset + if (!counting) { + // add the found limit if supplied + if (foundLimit != null) { + query = query.limit(foundLimit) + } + // add overall pagination + if (foundOffset != null) { + query = query.offset(foundOffset) + } + // add sorting to pre-query + // no point in sorting when counting + query = this.addSorting(query, json) } + // add filters to the query (where) query = this.addFilters(query, filters, json.meta.table, { aliases: tableAliases, }) - // add sorting to pre-query - query = this.addSorting(query, json) const alias = tableAliases?.[tableName] || tableName - let preQuery = knex({ - [alias]: query, - } as any).select(selectStatement) as any + let preQuery: Knex.QueryBuilder = knex({ + // the typescript definition for the knex constructor doesn't support this + // syntax, but it is the only way to alias a pre-query result as part of + // a query - there is an alias dictionary type, but it assumes it can only + // be a table name, not a pre-query + [alias]: query as any, + }) + // if counting, use distinct count, else select + preQuery = !counting + ? preQuery.select(generateSelectStatement(json, knex)) + : this.addDistinctCount(preQuery, json) // have to add after as well (this breaks MS-SQL) - if (this.client !== SqlClient.MS_SQL) { + if (this.client !== SqlClient.MS_SQL && !counting) { preQuery = this.addSorting(preQuery, json) } // handle joins @@ -630,6 +690,13 @@ class InternalBuilder { endpoint.schema, tableAliases ) + + // add a base limit over the whole query + // if counting we can't set this limit + if (limits?.base) { + query = query.limit(limits.base) + } + return this.addFilters(query, filters, json.meta.table, { relationship: true, aliases: tableAliases, @@ -674,6 +741,19 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { this.limit = limit } + private convertToNative(query: Knex.QueryBuilder, opts: QueryOptions = {}) { + const sqlClient = this.getSqlClient() + if (opts?.disableBindings) { + return { sql: query.toString() } + } else { + let native = getNativeSql(query) + if (sqlClient === SqlClient.SQL_LITE) { + native = convertBooleans(native) + } + return native + } + } + /** * @param json The JSON query DSL which is to be converted to SQL. * @param opts extra options which are to be passed into the query builder, e.g. disableReturning @@ -697,7 +777,16 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { query = builder.create(client, json, opts) break case Operation.READ: - query = builder.read(client, json, this.limit) + query = builder.read(client, json, { + limits: { + query: this.limit, + base: BASE_LIMIT, + }, + }) + break + case Operation.COUNT: + // read without any limits to count + query = builder.read(client, json) break case Operation.UPDATE: query = builder.update(client, json, opts) @@ -708,6 +797,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { case Operation.BULK_CREATE: query = builder.bulkCreate(client, json) break + case Operation.BULK_UPSERT: + query = builder.bulkUpsert(client, json) + break case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: case Operation.DELETE_TABLE: @@ -716,15 +808,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { throw `Operation type is not supported by SQL query builder` } - if (opts?.disableBindings) { - return { sql: query.toString() } - } else { - let native = getNativeSql(query) - if (sqlClient === SqlClient.SQL_LITE) { - native = convertBooleans(native) - } - return native - } + return this.convertToNative(query, opts) } async getReturningRow(queryFn: QueryFunction, json: QueryJson) { @@ -800,6 +884,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { await this.getReturningRow(queryFn, this.checkLookupKeys(id, json)) ) } + if (operation === Operation.COUNT) { + return results + } if (operation !== Operation.READ) { return row } diff --git a/packages/backend-core/src/sql/sqlTable.ts b/packages/backend-core/src/sql/sqlTable.ts index 09f9908baa..bdc8a3dd69 100644 --- a/packages/backend-core/src/sql/sqlTable.ts +++ b/packages/backend-core/src/sql/sqlTable.ts @@ -109,8 +109,10 @@ function generateSchema( const { tableName } = breakExternalTableId(column.tableId) // @ts-ignore const relatedTable = tables[tableName] - if (!relatedTable) { - throw new Error("Referenced table doesn't exist") + if (!relatedTable || !relatedTable.primary) { + throw new Error( + "Referenced table doesn't exist or has no primary keys" + ) } const relatedPrimary = relatedTable.primary[0] const externalType = relatedTable.schema[relatedPrimary].externalType diff --git a/packages/backend-core/src/sql/utils.ts b/packages/backend-core/src/sql/utils.ts index 2d9b289417..45ab510948 100644 --- a/packages/backend-core/src/sql/utils.ts +++ b/packages/backend-core/src/sql/utils.ts @@ -55,10 +55,7 @@ export function buildExternalTableId(datasourceId: string, tableName: string) { return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}` } -export function breakExternalTableId(tableId: string | undefined) { - if (!tableId) { - return {} - } +export function breakExternalTableId(tableId: string) { const parts = tableId.split(DOUBLE_SEPARATOR) let datasourceId = parts.shift() // if they need joined @@ -67,6 +64,9 @@ export function breakExternalTableId(tableId: string | undefined) { if (tableName.includes(ENCODED_SPACE)) { tableName = decodeURIComponent(tableName) } + if (!datasourceId || !tableName) { + throw new Error("Unable to get datasource/table name from table ID") + } return { datasourceId, tableName } } diff --git a/packages/bbui/src/Modal/Modal.svelte b/packages/bbui/src/Modal/Modal.svelte index 4656be69d1..dec1455d0c 100644 --- a/packages/bbui/src/Modal/Modal.svelte +++ b/packages/bbui/src/Modal/Modal.svelte @@ -162,6 +162,7 @@ max-height: 100%; } .modal-inner-wrapper { + padding: 40px; flex: 1 1 auto; display: flex; flex-direction: row; @@ -176,7 +177,6 @@ border: 2px solid var(--spectrum-global-color-gray-200); overflow: visible; max-height: none; - margin: 40px 0; transform: none; --spectrum-dialog-confirm-border-radius: var( --spectrum-global-dimension-size-100 diff --git a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte index b8b7c5ae54..57ca19ddb2 100644 --- a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte +++ b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte @@ -16,6 +16,8 @@ DatePicker, DrawerContent, Toggle, + Icon, + Divider, } from "@budibase/bbui" import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte" import { automationStore, selectedAutomation, tables } from "stores/builder" @@ -89,6 +91,8 @@ ? [hbAutocomplete([...bindingsToCompletions(bindings, codeMode)])] : [] + let testDataRowVisibility = {} + const getInputData = (testData, blockInputs) => { // Test data is not cloned for reactivity let newInputData = testData || cloneDeep(blockInputs) @@ -196,7 +200,8 @@ (automation.trigger?.event === "row:update" || automation.trigger?.event === "row:save") ) { - if (name !== "id" && name !== "revision") return `trigger.row.${name}` + let noRowKeywordBindings = ["id", "revision", "oldRow"] + if (!noRowKeywordBindings.includes(name)) return `trigger.row.${name}` } /* End special cases for generating custom schemas based on triggers */ @@ -372,7 +377,11 @@ function getFieldLabel(key, value) { const requiredSuffix = requiredProperties.includes(key) ? "*" : "" - return `${value.title || (key === "row" ? "Table" : key)} ${requiredSuffix}` + return `${value.title || (key === "row" ? "Row" : key)} ${requiredSuffix}` + } + + function toggleTestDataRowVisibility(key) { + testDataRowVisibility[key] = !testDataRowVisibility[key] } function handleAttachmentParams(keyValueObj) { @@ -607,20 +616,48 @@ on:change={e => onChange(e, key)} /> {:else if value.customType === "row"} - { - if (e.detail?.key) { - onChange(e, e.detail.key) - } else { - onChange(e, key) - } - }} - {bindings} - {isTestModal} - {isUpdateRow} - /> + {#if isTestModal} +
+ toggleTestDataRowVisibility(key)} + /> + +
+ {#if testDataRowVisibility[key]} + { + if (e.detail?.key) { + onChange(e, e.detail.key) + } else { + onChange(e, key) + } + }} + {bindings} + {isTestModal} + {isUpdateRow} + /> + {/if} + + {:else} + { + if (e.detail?.key) { + onChange(e, e.detail.key) + } else { + onChange(e, key) + } + }} + {bindings} + {isTestModal} + {isUpdateRow} + /> + {/if} {:else if value.customType === "webhookUrl"} onChange(e, key)} @@ -736,6 +773,12 @@ width: 320px; } + .align-horizontally { + display: flex; + gap: var(--spacing-s); + align-items: center; + } + .fields { display: flex; flex-direction: column; diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte index 17ecd8f844..d79eedd194 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte @@ -17,6 +17,8 @@ SWITCHABLE_TYPES, ValidColumnNameRegex, helpers, + CONSTANT_INTERNAL_ROW_COLS, + CONSTANT_EXTERNAL_ROW_COLS, } from "@budibase/shared-core" import { createEventDispatcher, getContext, onMount } from "svelte" import { cloneDeep } from "lodash/fp" @@ -52,7 +54,6 @@ const DATE_TYPE = FieldType.DATETIME const dispatch = createEventDispatcher() - const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"] const { dispatch: gridDispatch, rows } = getContext("grid") export let field @@ -487,20 +488,27 @@ }) } const newError = {} + const prohibited = externalTable + ? CONSTANT_EXTERNAL_ROW_COLS + : CONSTANT_INTERNAL_ROW_COLS if (!externalTable && fieldInfo.name?.startsWith("_")) { newError.name = `Column name cannot start with an underscore.` } else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) { newError.name = `Illegal character; must be alpha-numeric.` - } else if (PROHIBITED_COLUMN_NAMES.some(name => fieldInfo.name === name)) { - newError.name = `${PROHIBITED_COLUMN_NAMES.join( + } else if ( + prohibited.some( + name => fieldInfo?.name?.toLowerCase() === name.toLowerCase() + ) + ) { + newError.name = `${prohibited.join( ", " - )} are not allowed as column names` + )} are not allowed as column names - case insensitive.` } else if (inUse($tables.selected, fieldInfo.name, originalName)) { newError.name = `Column name already in use.` } if (fieldInfo.type === FieldType.AUTO && !fieldInfo.subtype) { - newError.subtype = `Auto Column requires a type` + newError.subtype = `Auto Column requires a type.` } if (fieldInfo.fieldName && fieldInfo.tableId) { diff --git a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte index de56fa8ce5..80655d1099 100644 --- a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte +++ b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte @@ -1,9 +1,14 @@ + +
+ + + + + {/if}
diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/index.js b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/index.js index 587993377d..606ee41d02 100644 --- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/index.js +++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/index.js @@ -21,5 +21,7 @@ export { default as ShowNotification } from "./ShowNotification.svelte" export { default as PromptUser } from "./PromptUser.svelte" export { default as OpenSidePanel } from "./OpenSidePanel.svelte" export { default as CloseSidePanel } from "./CloseSidePanel.svelte" +export { default as OpenModal } from "./OpenModal.svelte" +export { default as CloseModal } from "./CloseModal.svelte" export { default as ClearRowSelection } from "./ClearRowSelection.svelte" export { default as DownloadFile } from "./DownloadFile.svelte" diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/manifest.json b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/manifest.json index 2840a0d662..4022926e7f 100644 --- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/manifest.json +++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/manifest.json @@ -157,6 +157,18 @@ "component": "CloseSidePanel", "dependsOnFeature": "sidePanel" }, + { + "name": "Open Modal", + "type": "application", + "component": "OpenModal", + "dependsOnFeature": "modal" + }, + { + "name": "Close Modal", + "type": "application", + "component": "CloseModal", + "dependsOnFeature": "modal" + }, { "name": "Clear Row Selection", "type": "data", diff --git a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/new/_components/NewComponentPanel.svelte b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/new/_components/NewComponentPanel.svelte index c7c58a6e16..361e07a026 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/new/_components/NewComponentPanel.svelte +++ b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/new/_components/NewComponentPanel.svelte @@ -59,7 +59,14 @@ // Build up list of illegal children from ancestors let illegalChildren = definition.illegalChildren || [] path.forEach(ancestor => { - if (ancestor._component === `@budibase/standard-components/sidepanel`) { + // Sidepanels and modals can be nested anywhere in the component tree, but really they are always rendered at the top level. + // Because of this, it doesn't make sense to carry over any parent illegal children to them, so the array is reset here. + if ( + [ + "@budibase/standard-components/sidepanel", + "@budibase/standard-components/modal", + ].includes(ancestor._component) + ) { illegalChildren = [] } const def = componentStore.getDefinition(ancestor._component) diff --git a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/new/_components/componentStructure.json b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/new/_components/componentStructure.json index ba6f403d81..ff58a66221 100644 --- a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/new/_components/componentStructure.json +++ b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/new/_components/componentStructure.json @@ -14,7 +14,7 @@ { "name": "Layout", "icon": "ClassicGridView", - "children": ["container", "section", "sidepanel"] + "children": ["container", "section", "sidepanel", "modal"] }, { "name": "Data", diff --git a/packages/builder/src/stores/builder/screens.js b/packages/builder/src/stores/builder/screens.js index 7339593960..b1bef10c36 100644 --- a/packages/builder/src/stores/builder/screens.js +++ b/packages/builder/src/stores/builder/screens.js @@ -125,7 +125,14 @@ export class ScreenStore extends BudiStore { return } - if (type === "@budibase/standard-components/sidepanel") { + // Sidepanels and modals can be nested anywhere in the component tree, but really they are always rendered at the top level. + // Because of this, it doesn't make sense to carry over any parent illegal children to them, so the array is reset here. + if ( + [ + "@budibase/standard-components/sidepanel", + "@budibase/standard-components/modal", + ].includes(type) + ) { illegalChildren = [] } diff --git a/packages/cli/package.json b/packages/cli/package.json index c1ba49c5e7..88d5926ae3 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -32,7 +32,7 @@ "pouchdb": "7.3.0", "pouchdb-replication-stream": "1.2.9", "randomstring": "1.1.5", - "tar": "6.1.15", + "tar": "6.2.1", "yaml": "^2.1.1" }, "devDependencies": { diff --git a/packages/client/manifest.json b/packages/client/manifest.json index 38e9bd8a87..00b503626f 100644 --- a/packages/client/manifest.json +++ b/packages/client/manifest.json @@ -11,6 +11,7 @@ "continueIfAction": true, "showNotificationAction": true, "sidePanel": true, + "modal": true, "skeletonLoader": true }, "typeSupportPresets": { @@ -6975,7 +6976,7 @@ "name": "Side Panel", "icon": "RailRight", "hasChildren": true, - "illegalChildren": ["section", "sidepanel"], + "illegalChildren": ["section", "sidepanel", "modal"], "showEmptyState": false, "draggable": false, "info": "Side panels are hidden by default. They will only be revealed when triggered by the 'Open Side Panel' action.", @@ -6993,6 +6994,52 @@ } ] }, + "modal": { + "name": "Modal", + "icon": "MBox", + "hasChildren": true, + "illegalChildren": ["section", "modal", "sidepanel"], + "showEmptyState": false, + "draggable": false, + "info": "Modals are hidden by default. They will only be revealed when triggered by the 'Open Modal' action.", + "settings": [ + { + "type": "boolean", + "key": "ignoreClicksOutside", + "label": "Ignore clicks outside", + "defaultValue": false + }, + { + "type": "event", + "key": "onClose", + "label": "On close" + }, + { + "type": "select", + "label": "Size", + "key": "size", + "defaultValue": "small", + "options": [ + { + "label": "Small", + "value": "small" + }, + { + "label": "Medium", + "value": "medium" + }, + { + "label": "Large", + "value": "large" + }, + { + "label": "Fullscreen", + "value": "fullscreen" + } + ] + } + ] + }, "rowexplorer": { "block": true, "name": "Row Explorer Block", diff --git a/packages/client/src/components/ClientApp.svelte b/packages/client/src/components/ClientApp.svelte index c1bdc92ac4..9bfb1192ea 100644 --- a/packages/client/src/components/ClientApp.svelte +++ b/packages/client/src/components/ClientApp.svelte @@ -20,6 +20,7 @@ devToolsEnabled, environmentStore, sidePanelStore, + modalStore, } from "stores" import NotificationDisplay from "components/overlay/NotificationDisplay.svelte" import ConfirmationDisplay from "components/overlay/ConfirmationDisplay.svelte" @@ -104,10 +105,15 @@ }) } const handleHashChange = () => { - const { open } = $sidePanelStore - if (open) { + const { open: sidePanelOpen } = $sidePanelStore + if (sidePanelOpen) { sidePanelStore.actions.close() } + + const { open: modalOpen } = $modalStore + if (modalOpen) { + modalStore.actions.close() + } } window.addEventListener("hashchange", handleHashChange) return () => { diff --git a/packages/client/src/components/app/Layout.svelte b/packages/client/src/components/app/Layout.svelte index 72da3e9012..af74e14aa0 100644 --- a/packages/client/src/components/app/Layout.svelte +++ b/packages/client/src/components/app/Layout.svelte @@ -12,6 +12,7 @@ linkable, builderStore, sidePanelStore, + modalStore, appStore, } = sdk const context = getContext("context") @@ -77,6 +78,7 @@ !$builderStore.inBuilder && $sidePanelStore.open && !$sidePanelStore.ignoreClicksOutside + $: screenId = $builderStore.inBuilder ? `${$builderStore.screen?._id}-screen` : "screen" @@ -198,6 +200,7 @@ const handleClickLink = () => { mobileOpen = false sidePanelStore.actions.close() + modalStore.actions.close() } diff --git a/packages/client/src/components/app/Link.svelte b/packages/client/src/components/app/Link.svelte index 7eddcc6fe5..1ddc63066d 100644 --- a/packages/client/src/components/app/Link.svelte +++ b/packages/client/src/components/app/Link.svelte @@ -1,7 +1,7 @@ + + +{#if !$builderStore.inBuilder || open} + +
+ + +
+
+{/if} + + diff --git a/packages/client/src/components/app/blocks/form/FormBlock.svelte b/packages/client/src/components/app/blocks/form/FormBlock.svelte index d249569731..e3aa20ffa6 100644 --- a/packages/client/src/components/app/blocks/form/FormBlock.svelte +++ b/packages/client/src/components/app/blocks/form/FormBlock.svelte @@ -31,41 +31,23 @@ let schema - $: formattedFields = convertOldFieldFormat(fields) - $: fieldsOrDefault = getDefaultFields(formattedFields, schema) $: fetchSchema(dataSource) $: id = $component.id - // We could simply spread $$props into the inner form and append our - // additions, but that would create svelte warnings about unused props and - // make maintenance in future more confusing as we typically always have a - // proper mapping of schema settings to component exports, without having to - // search multiple files - $: innerProps = { - dataSource, - actionUrl, - actionType, - size, - disabled, - fields: fieldsOrDefault, - title, - description, - schema, - notificationOverride, - buttons: - buttons || - Utils.buildFormBlockButtonConfig({ - _id: id, - showDeleteButton, - showSaveButton, - saveButtonLabel, - deleteButtonLabel, - notificationOverride, - actionType, - actionUrl, - dataSource, - }), - buttonPosition: buttons ? buttonPosition : "top", - } + $: formattedFields = convertOldFieldFormat(fields) + $: fieldsOrDefault = getDefaultFields(formattedFields, schema) + $: buttonsOrDefault = + buttons || + Utils.buildFormBlockButtonConfig({ + _id: id, + showDeleteButton, + showSaveButton, + saveButtonLabel, + deleteButtonLabel, + notificationOverride, + actionType, + actionUrl, + dataSource, + }) // Provide additional data context for live binding eval export const getAdditionalDataContext = () => { @@ -123,5 +105,18 @@ - + diff --git a/packages/client/src/components/app/blocks/form/InnerFormBlock.svelte b/packages/client/src/components/app/blocks/form/InnerFormBlock.svelte index b0733f3f4b..0227107dd2 100644 --- a/packages/client/src/components/app/blocks/form/InnerFormBlock.svelte +++ b/packages/client/src/components/app/blocks/form/InnerFormBlock.svelte @@ -91,15 +91,13 @@ {#if description} {/if} - {#key fields} - -
- {#each fields as field, idx} - - {/each} -
-
- {/key} + +
+ {#each fields as field, idx} + + {/each} +
+
{#if buttonPosition === "bottom"} {$confirmationStore.text} diff --git a/packages/client/src/components/preview/IndicatorSet.svelte b/packages/client/src/components/preview/IndicatorSet.svelte index 3cbd7e2464..2b941b2662 100644 --- a/packages/client/src/components/preview/IndicatorSet.svelte +++ b/packages/client/src/components/preview/IndicatorSet.svelte @@ -57,7 +57,9 @@ return } nextState.indicators[idx].visible = - nextState.indicators[idx].insideSidePanel || entries[0].isIntersecting + nextState.indicators[idx].insideModal || + nextState.indicators[idx].insideSidePanel || + entries[0].isIntersecting if (++callbackCount === observers.length) { state = nextState updating = false @@ -139,6 +141,7 @@ height: elBounds.height + 4, visible: false, insideSidePanel: !!child.closest(".side-panel"), + insideModal: !!child.closest(".modal-content"), }) }) } diff --git a/packages/client/src/sdk.js b/packages/client/src/sdk.js index 90e0f9c7dc..50d3f857d5 100644 --- a/packages/client/src/sdk.js +++ b/packages/client/src/sdk.js @@ -11,6 +11,7 @@ import { currentRole, environmentStore, sidePanelStore, + modalStore, dndIsDragging, confirmationStore, roleStore, @@ -53,6 +54,7 @@ export default { componentStore, environmentStore, sidePanelStore, + modalStore, dndIsDragging, currentRole, confirmationStore, diff --git a/packages/client/src/stores/confirmation.js b/packages/client/src/stores/confirmation.js index bb9a54386f..3fbf3d5deb 100644 --- a/packages/client/src/stores/confirmation.js +++ b/packages/client/src/stores/confirmation.js @@ -4,6 +4,8 @@ const initialState = { showConfirmation: false, title: null, text: null, + confirmButtonText: null, + cancelButtonText: null, onConfirm: null, onCancel: null, } @@ -11,11 +13,20 @@ const initialState = { const createConfirmationStore = () => { const store = writable(initialState) - const showConfirmation = (title, text, onConfirm, onCancel) => { + const showConfirmation = ( + title, + text, + onConfirm, + onCancel, + confirmButtonText, + cancelButtonText + ) => { store.set({ showConfirmation: true, title, text, + confirmButtonText, + cancelButtonText, onConfirm, onCancel, }) diff --git a/packages/client/src/stores/index.js b/packages/client/src/stores/index.js index e9b1ce4434..f2b80ed732 100644 --- a/packages/client/src/stores/index.js +++ b/packages/client/src/stores/index.js @@ -27,6 +27,7 @@ export { dndIsDragging, } from "./dnd" export { sidePanelStore } from "./sidePanel" +export { modalStore } from "./modal" export { hoverStore } from "./hover" // Context stores are layered and duplicated, so it is not a singleton diff --git a/packages/client/src/stores/modal.js b/packages/client/src/stores/modal.js new file mode 100644 index 0000000000..4d1331283d --- /dev/null +++ b/packages/client/src/stores/modal.js @@ -0,0 +1,32 @@ +import { writable } from "svelte/store" + +export const createModalStore = () => { + const initialState = { + contentId: null, + } + const store = writable(initialState) + + const open = id => { + store.update(state => { + state.contentId = id + return state + }) + } + + const close = () => { + store.update(state => { + state.contentId = null + return state + }) + } + + return { + subscribe: store.subscribe, + actions: { + open, + close, + }, + } +} + +export const modalStore = createModalStore() diff --git a/packages/client/src/utils/buttonActions.js b/packages/client/src/utils/buttonActions.js index 482b36cdb8..8f0cb575a7 100644 --- a/packages/client/src/utils/buttonActions.js +++ b/packages/client/src/utils/buttonActions.js @@ -12,6 +12,7 @@ import { uploadStore, rowSelectionStore, sidePanelStore, + modalStore, } from "stores" import { API } from "api" import { ActionTypes } from "constants" @@ -436,6 +437,17 @@ const closeSidePanelHandler = () => { sidePanelStore.actions.close() } +const openModalHandler = action => { + const { id } = action.parameters + if (id) { + modalStore.actions.open(id) + } +} + +const closeModalHandler = () => { + modalStore.actions.close() +} + const downloadFileHandler = async action => { const { url, fileName } = action.parameters try { @@ -499,6 +511,8 @@ const handlerMap = { ["Prompt User"]: promptUserHandler, ["Open Side Panel"]: openSidePanelHandler, ["Close Side Panel"]: closeSidePanelHandler, + ["Open Modal"]: openModalHandler, + ["Close Modal"]: closeModalHandler, ["Download File"]: downloadFileHandler, } @@ -508,6 +522,7 @@ const confirmTextMap = { ["Execute Query"]: "Are you sure you want to execute this query?", ["Trigger Automation"]: "Are you sure you want to trigger this automation?", ["Prompt User"]: "Are you sure you want to continue?", + ["Duplicate Row"]: "Are you sure you want to duplicate this row?", } /** @@ -568,6 +583,11 @@ export const enrichButtonActions = (actions, context) => { const defaultTitleText = action["##eventHandlerType"] const customTitleText = action.parameters?.customTitleText || defaultTitleText + const cancelButtonText = + action.parameters?.cancelButtonText || "Cancel" + const confirmButtonText = + action.parameters?.confirmButtonText || "Confirm" + confirmationStore.actions.showConfirmation( customTitleText, confirmText, @@ -598,7 +618,9 @@ export const enrichButtonActions = (actions, context) => { }, () => { resolve(false) - } + }, + confirmButtonText, + cancelButtonText ) }) } diff --git a/packages/frontend-core/src/components/FilterBuilder.svelte b/packages/frontend-core/src/components/FilterBuilder.svelte index 0d254186f2..6d1e1fa502 100644 --- a/packages/frontend-core/src/components/FilterBuilder.svelte +++ b/packages/frontend-core/src/components/FilterBuilder.svelte @@ -18,7 +18,7 @@ import FilterUsers from "./FilterUsers.svelte" import { getFields } from "../utils/searchFields" - const { OperatorOptions } = Constants + const { OperatorOptions, DEFAULT_BB_DATASOURCE_ID } = Constants export let schemaFields export let filters = [] @@ -28,6 +28,23 @@ export let allowBindings = false export let filtersLabel = "Filters" + $: { + if ( + tables.find( + table => + table._id === datasource.tableId && + table.sourceId === DEFAULT_BB_DATASOURCE_ID + ) && + !schemaFields.some(field => field.name === "_id") + ) { + schemaFields = [ + ...schemaFields, + { name: "_id", type: "string" }, + { name: "_rev", type: "string" }, + ] + } + } + $: matchAny = filters?.find(filter => filter.operator === "allOr") != null $: onEmptyFilter = filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all" @@ -35,7 +52,6 @@ $: fieldFilters = filters.filter( filter => filter.operator !== "allOr" && !filter.onEmptyFilter ) - const behaviourOptions = [ { value: "and", label: "Match all filters" }, { value: "or", label: "Match any filter" }, @@ -44,7 +60,6 @@ { value: "all", label: "Return all table rows" }, { value: "none", label: "Return no rows" }, ] - const context = getContext("context") $: fieldOptions = getFields(tables, schemaFields || [], { diff --git a/packages/frontend-core/src/constants.js b/packages/frontend-core/src/constants.js index 0d6261f5f8..e5869a3b98 100644 --- a/packages/frontend-core/src/constants.js +++ b/packages/frontend-core/src/constants.js @@ -1,7 +1,11 @@ /** * Operator options for lucene queries */ -export { OperatorOptions, SqlNumberTypeRangeMap } from "@budibase/shared-core" +export { + OperatorOptions, + SqlNumberTypeRangeMap, + DEFAULT_BB_DATASOURCE_ID, +} from "@budibase/shared-core" export { Feature as Features } from "@budibase/types" import { BpmCorrelationKey } from "@budibase/shared-core" import { FieldType, BBReferenceFieldSubType } from "@budibase/types" diff --git a/packages/frontend-core/src/utils/utils.js b/packages/frontend-core/src/utils/utils.js index 65690cd535..1bee3d6c04 100644 --- a/packages/frontend-core/src/utils/utils.js +++ b/packages/frontend-core/src/utils/utils.js @@ -161,6 +161,9 @@ export const buildFormBlockButtonConfig = props => { { "##eventHandlerType": "Close Side Panel", }, + { + "##eventHandlerType": "Close Modal", + }, // Clear a create form once submitted ...(actionType !== "Create" ? [] diff --git a/packages/pro b/packages/pro index bf30f47a28..6c8d0174ca 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit bf30f47a28292d619cf0837f21d66790ff31c3a6 +Subproject commit 6c8d0174ca58c578a37022965ddb923fdbf8e32a diff --git a/packages/server/package.json b/packages/server/package.json index 28b1e876c2..e146bd081c 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -109,7 +109,7 @@ "serialize-error": "^7.0.1", "server-destroy": "1.0.1", "snowflake-promise": "^4.5.0", - "socket.io": "4.6.1", + "socket.io": "4.6.2", "tar": "6.2.1", "to-json-schema": "0.2.5", "uuid": "^8.3.2", diff --git a/packages/server/scripts/integrations/mssql/data/Dockerfile b/packages/server/scripts/integrations/mssql/data/Dockerfile index b8c96e8419..c26af556a9 100644 --- a/packages/server/scripts/integrations/mssql/data/Dockerfile +++ b/packages/server/scripts/integrations/mssql/data/Dockerfile @@ -1,4 +1,4 @@ -FROM mcr.microsoft.com/mssql/server:2017-latest +FROM mcr.microsoft.com/mssql/server:2022-latest ENV ACCEPT_EULA=Y ENV SA_PASSWORD=Passw0rd diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql index b7ce1b7d5b..9624208deb 100644 --- a/packages/server/scripts/integrations/postgres/init.sql +++ b/packages/server/scripts/integrations/postgres/init.sql @@ -54,8 +54,31 @@ INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('Mi INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996); INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993); INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support'); -INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (1, 2, 'assembling', TRUE); -INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (2, 1, 'processing', FALSE); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Dave', 'Bar', '2 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('James', 'Bar', '3 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Jenny', 'Bar', '4 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Grace', 'Bar', '5 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Sarah', 'Bar', '6 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Kelly', 'Bar', '7 Foo Street', 'Bartown', 'support', 0, 1993); + +-- insert a lot of tasks for testing +WITH RECURSIVE generate_series AS ( + SELECT 1 AS n + UNION ALL + SELECT n + 1 FROM generate_series WHERE n < 6000 +), +random_data AS ( + SELECT + n, + (random() * 9 + 1)::int AS ExecutorID, + (random() * 9 + 1)::int AS QaID, + 'assembling' AS TaskName, + (random() < 0.5) AS Completed + FROM generate_series +) +INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) +SELECT ExecutorID, QaID, TaskName, Completed +FROM random_data; INSERT INTO Products (ProductName) VALUES ('Computers'); INSERT INTO Products (ProductName) VALUES ('Laptops'); INSERT INTO Products (ProductName) VALUES ('Chairs'); diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index b30c97e289..b51de46e99 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -7,6 +7,7 @@ import { FieldType, FilterType, IncludeRelationship, + isManyToOne, OneToManyRelationshipFieldMetadata, Operation, PaginationJson, @@ -16,29 +17,33 @@ import { SortJson, SortType, Table, - isManyToOne, } from "@budibase/types" import { breakExternalTableId, breakRowIdField, convertRowId, + generateRowIdField, isRowId, isSQL, - generateRowIdField, } from "../../../integrations/utils" import { buildExternalRelationships, buildSqlFieldList, generateIdForRow, - sqlOutputProcessing, + isKnexEmptyReadResponse, isManyToMany, + sqlOutputProcessing, } from "./utils" -import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" +import { + getDatasourceAndQuery, + processRowCountResponse, +} from "../../../sdk/app/rows/utils" import { processObjectSync } from "@budibase/string-templates" import { cloneDeep } from "lodash/fp" import { db as dbCore } from "@budibase/backend-core" import sdk from "../../../sdk" import env from "../../../environment" +import { makeExternalQuery } from "../../../integrations/base/query" export interface ManyRelationship { tableId?: string @@ -60,91 +65,12 @@ export interface RunConfig { includeSqlRelationships?: IncludeRelationship } -function buildFilters( - id: string | undefined | string[], - filters: SearchFilters, - table: Table -) { - const primary = table.primary - // if passed in array need to copy for shifting etc - let idCopy: undefined | string | any[] = cloneDeep(id) - if (filters) { - // need to map over the filters and make sure the _id field isn't present - let prefix = 1 - for (let operator of Object.values(filters)) { - for (let field of Object.keys(operator || {})) { - if (dbCore.removeKeyNumbering(field) === "_id") { - if (primary) { - const parts = breakRowIdField(operator[field]) - for (let field of primary) { - operator[`${prefix}:${field}`] = parts.shift() - } - prefix++ - } - // make sure this field doesn't exist on any filter - delete operator[field] - } - } - } - } - // there is no id, just use the user provided filters - if (!idCopy || !table) { - return filters - } - // if used as URL parameter it will have been joined - if (!Array.isArray(idCopy)) { - idCopy = breakRowIdField(idCopy) - } - const equal: any = {} - if (primary && idCopy) { - for (let field of primary) { - // work through the ID and get the parts - equal[field] = idCopy.shift() - } - } - return { - equal, - } -} - -async function removeManyToManyRelationships( - rowId: string, - table: Table, - colName: string -) { - const tableId = table._id! - const filters = buildFilters(rowId, {}, table) - // safety check, if there are no filters on deletion bad things happen - if (Object.keys(filters).length !== 0) { - return getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, Operation.DELETE), - body: { [colName]: null }, - filters, - meta: { - table, - }, - }) - } else { - return [] - } -} - -async function removeOneToManyRelationships(rowId: string, table: Table) { - const tableId = table._id! - const filters = buildFilters(rowId, {}, table) - // safety check, if there are no filters on deletion bad things happen - if (Object.keys(filters).length !== 0) { - return getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, Operation.UPDATE), - filters, - meta: { - table, - }, - }) - } else { - return [] - } -} +export type ExternalRequestReturnType = + T extends Operation.READ + ? Row[] + : T extends Operation.COUNT + ? number + : { row: Row; table: Table } /** * This function checks the incoming parameters to make sure all the inputs are @@ -200,8 +126,8 @@ function getEndpoint(tableId: string | undefined, operation: string) { } const { datasourceId, tableName } = breakExternalTableId(tableId) return { - datasourceId: datasourceId!, - entityId: tableName!, + datasourceId: datasourceId, + entityId: tableName, operation: operation as Operation, } } @@ -223,14 +149,12 @@ function isEditableColumn(column: FieldSchema) { return !(isExternalAutoColumn || isFormula) } -export type ExternalRequestReturnType = - T extends Operation.READ ? Row[] : { row: Row; table: Table } - export class ExternalRequest { private readonly operation: T private readonly tableId: string private datasource?: Datasource private tables: { [key: string]: Table } = {} + private tableList: Table[] constructor(operation: T, tableId: string, datasource?: Datasource) { this.operation = operation @@ -239,22 +163,134 @@ export class ExternalRequest { if (datasource && datasource.entities) { this.tables = datasource.entities } + this.tableList = Object.values(this.tables) + } + + private prepareFilters( + id: string | undefined | string[], + filters: SearchFilters, + table: Table + ): SearchFilters { + // replace any relationship columns initially, table names and relationship column names are acceptable + const relationshipColumns = sdk.rows.filters.getRelationshipColumns(table) + filters = sdk.rows.filters.updateFilterKeys( + filters, + relationshipColumns.map(({ name, definition }) => { + const { tableName } = breakExternalTableId(definition.tableId) + return { + original: name, + updated: tableName, + } + }) + ) + const primary = table.primary + // if passed in array need to copy for shifting etc + let idCopy: undefined | string | any[] = cloneDeep(id) + if (filters) { + // need to map over the filters and make sure the _id field isn't present + let prefix = 1 + for (let operator of Object.values(filters)) { + for (let field of Object.keys(operator || {})) { + if (dbCore.removeKeyNumbering(field) === "_id") { + if (primary) { + const parts = breakRowIdField(operator[field]) + for (let field of primary) { + operator[`${prefix}:${field}`] = parts.shift() + } + prefix++ + } + // make sure this field doesn't exist on any filter + delete operator[field] + } + } + } + } + // there is no id, just use the user provided filters + if (!idCopy || !table) { + return filters + } + // if used as URL parameter it will have been joined + if (!Array.isArray(idCopy)) { + idCopy = breakRowIdField(idCopy) + } + const equal: SearchFilters["equal"] = {} + if (primary && idCopy) { + for (let field of primary) { + // work through the ID and get the parts + equal[field] = idCopy.shift() + } + } + return { + equal, + } + } + + private async removeManyToManyRelationships( + rowId: string, + table: Table, + colName: string + ) { + const tableId = table._id! + const filters = this.prepareFilters(rowId, {}, table) + // safety check, if there are no filters on deletion bad things happen + if (Object.keys(filters).length !== 0) { + return getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, Operation.DELETE), + body: { [colName]: null }, + filters, + meta: { + table, + }, + }) + } else { + return [] + } + } + + private async removeOneToManyRelationships(rowId: string, table: Table) { + const tableId = table._id! + const filters = this.prepareFilters(rowId, {}, table) + // safety check, if there are no filters on deletion bad things happen + if (Object.keys(filters).length !== 0) { + return getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, Operation.UPDATE), + filters, + meta: { + table, + }, + }) + } else { + return [] + } } getTable(tableId: string | undefined): Table | undefined { if (!tableId) { - throw "Table ID is unknown, cannot find table" + throw new Error("Table ID is unknown, cannot find table") } const { tableName } = breakExternalTableId(tableId) - if (tableName) { - return this.tables[tableName] + return this.tables[tableName] + } + + // seeds the object with table and datasource information + async retrieveMetadata( + datasourceId: string + ): Promise<{ tables: Record; datasource: Datasource }> { + if (!this.datasource) { + this.datasource = await sdk.datasources.get(datasourceId) + if (!this.datasource || !this.datasource.entities) { + throw "No tables found, fetch tables before query." + } + this.tables = this.datasource.entities + this.tableList = Object.values(this.tables) } + return { tables: this.tables, datasource: this.datasource } } async getRow(table: Table, rowId: string): Promise { const response = await getDatasourceAndQuery({ endpoint: getEndpoint(table._id!, Operation.READ), - filters: buildFilters(rowId, {}, table), + filters: this.prepareFilters(rowId, {}, table), meta: { table, }, @@ -280,16 +316,20 @@ export class ExternalRequest { manyRelationships: ManyRelationship[] = [] for (let [key, field] of Object.entries(table.schema)) { // if set already, or not set just skip it - if (row[key] === undefined || newRow[key] || !isEditableColumn(field)) { + if (row[key] === undefined || newRow[key]) { + continue + } + if ( + !(this.operation === Operation.BULK_UPSERT) && + !isEditableColumn(field) + ) { continue } // parse floats/numbers if (field.type === FieldType.NUMBER && !isNaN(parseFloat(row[key]))) { newRow[key] = parseFloat(row[key]) } else if (field.type === FieldType.LINK) { - const { tableName: linkTableName } = breakExternalTableId( - field?.tableId - ) + const { tableName: linkTableName } = breakExternalTableId(field.tableId) // table has to exist for many to many if (!linkTableName || !this.tables[linkTableName]) { continue @@ -370,9 +410,6 @@ export class ExternalRequest { [key: string]: { rows: Row[]; isMany: boolean; tableId: string } } = {} const { tableName } = breakExternalTableId(tableId) - if (!tableName) { - return related - } const table = this.tables[tableName] // @ts-ignore const primaryKey = table.primary[0] @@ -428,7 +465,9 @@ export class ExternalRequest { }) // this is the response from knex if no rows found const rows: Row[] = - !Array.isArray(response) || response?.[0].read ? [] : response + !Array.isArray(response) || isKnexEmptyReadResponse(response) + ? [] + : response const storeTo = isManyToMany(field) ? field.throughFrom || linkPrimaryKey : fieldName @@ -503,7 +542,7 @@ export class ExternalRequest { endpoint: getEndpoint(tableId, operation), // if we're doing many relationships then we're writing, only one response body, - filters: buildFilters(id, {}, linkTable), + filters: this.prepareFilters(id, {}, linkTable), meta: { table: linkTable, }, @@ -517,7 +556,7 @@ export class ExternalRequest { // finally cleanup anything that needs to be removed for (let [colName, { isMany, rows, tableId }] of Object.entries(related)) { const table: Table | undefined = this.getTable(tableId) - // if its not the foreign key skip it, nothing to do + // if it's not the foreign key skip it, nothing to do if ( !table || (!isMany && table.primary && table.primary.indexOf(colName) !== -1) @@ -527,8 +566,8 @@ export class ExternalRequest { for (let row of rows) { const rowId = generateIdForRow(row, table) const promise: Promise = isMany - ? removeManyToManyRelationships(rowId, table, colName) - : removeOneToManyRelationships(rowId, table) + ? this.removeManyToManyRelationships(rowId, table, colName) + : this.removeOneToManyRelationships(rowId, table) if (promise) { promises.push(promise) } @@ -551,12 +590,12 @@ export class ExternalRequest { rows.map(row => { const rowId = generateIdForRow(row, table) return isMany - ? removeManyToManyRelationships( + ? this.removeManyToManyRelationships( rowId, table, relationshipColumn.fieldName ) - : removeOneToManyRelationships(rowId, table) + : this.removeOneToManyRelationships(rowId, table) }) ) } @@ -564,21 +603,21 @@ export class ExternalRequest { async run(config: RunConfig): Promise> { const { operation, tableId } = this - let { datasourceId, tableName } = breakExternalTableId(tableId) - if (!tableName) { - throw "Unable to run without a table name" + if (!tableId) { + throw new Error("Unable to run without a table ID") } - if (!this.datasource) { - this.datasource = await sdk.datasources.get(datasourceId!) - if (!this.datasource || !this.datasource.entities) { - throw "No tables found, fetch tables before query." - } - this.tables = this.datasource.entities + let { datasourceId, tableName } = breakExternalTableId(tableId) + let datasource = this.datasource + if (!datasource) { + const { datasource: ds } = await this.retrieveMetadata(datasourceId) + datasource = ds } const table = this.tables[tableName] - let isSql = isSQL(this.datasource) + let isSql = isSQL(datasource) if (!table) { - throw `Unable to process query, table "${tableName}" not defined.` + throw new Error( + `Unable to process query, table "${tableName}" not defined.` + ) } // look for specific components of config which may not be considered acceptable let { id, row, filters, sort, paginate, rows } = cleanupConfig( @@ -601,7 +640,7 @@ export class ExternalRequest { break } } - filters = buildFilters(id, filters || {}, table) + filters = this.prepareFilters(id, filters || {}, table) const relationships = buildExternalRelationships(table, this.tables) const incRelationships = @@ -649,10 +688,15 @@ export class ExternalRequest { body: row || rows, // pass an id filter into extra, purely for mysql/returning extra: { - idFilter: buildFilters(id || generateIdForRow(row, table), {}, table), + idFilter: this.prepareFilters( + id || generateIdForRow(row, table), + {}, + table + ), }, meta: { table, + id: config.id, }, } @@ -662,12 +706,14 @@ export class ExternalRequest { } // aliasing can be disabled fully if desired - let response - if (env.SQL_ALIASING_DISABLE) { - response = await getDatasourceAndQuery(json) - } else { - const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables)) - response = await aliasing.queryWithAliasing(json) + const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables)) + let response = env.SQL_ALIASING_DISABLE + ? await getDatasourceAndQuery(json) + : await aliasing.queryWithAliasing(json, makeExternalQuery) + + // if it's a counting operation there will be no more processing, just return the number + if (this.operation === Operation.COUNT) { + return processRowCountResponse(response) as ExternalRequestReturnType } const responseRows = Array.isArray(response) ? response : [] diff --git a/packages/server/src/api/controllers/row/external.ts b/packages/server/src/api/controllers/row/external.ts index d301155231..126b11d0c1 100644 --- a/packages/server/src/api/controllers/row/external.ts +++ b/packages/server/src/api/controllers/row/external.ts @@ -39,9 +39,10 @@ export async function handleRequest( export async function patch(ctx: UserCtx) { const tableId = utils.getTableId(ctx) - const { _id, ...rowData } = ctx.request.body + const { _id, ...rowData } = ctx.request.body const table = await sdk.tables.getTable(tableId) + const { row: dataToUpdate } = await inputProcessing( ctx.user?._id, cloneDeep(table), @@ -79,6 +80,7 @@ export async function patch(ctx: UserCtx) { ...response, row: enrichedRow, table, + oldRow: beforeRow, } } @@ -134,10 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) { const id = ctx.params.rowId const tableId = utils.getTableId(ctx) const { datasourceId, tableName } = breakExternalTableId(tableId) - const datasource: Datasource = await sdk.datasources.get(datasourceId!) - if (!tableName) { - ctx.throw(400, "Unable to find table.") - } + const datasource: Datasource = await sdk.datasources.get(datasourceId) if (!datasource || !datasource.entities) { ctx.throw(400, "Datasource has not been configured for plus API.") } @@ -161,7 +160,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) { } const links = row[fieldName] const linkedTableId = field.tableId - const linkedTableName = breakExternalTableId(linkedTableId).tableName! + const linkedTableName = breakExternalTableId(linkedTableId).tableName const linkedTable = tables[linkedTableName] // don't support composite keys right now const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0]) diff --git a/packages/server/src/api/controllers/row/index.ts b/packages/server/src/api/controllers/row/index.ts index 945b7ca847..760b73f404 100644 --- a/packages/server/src/api/controllers/row/index.ts +++ b/packages/server/src/api/controllers/row/index.ts @@ -55,13 +55,13 @@ export async function patch( return save(ctx) } try { - const { row, table } = await pickApi(tableId).patch(ctx) + const { row, table, oldRow } = await pickApi(tableId).patch(ctx) if (!row) { ctx.throw(404, "Row not found") } ctx.status = 200 ctx.eventEmitter && - ctx.eventEmitter.emitRow(`row:update`, appId, row, table) + ctx.eventEmitter.emitRow(`row:update`, appId, row, table, oldRow) ctx.message = `${table.name} updated successfully.` ctx.body = row gridSocket?.emitRowUpdate(ctx, row) diff --git a/packages/server/src/api/controllers/row/internal.ts b/packages/server/src/api/controllers/row/internal.ts index cc903bd74a..54d9b6a536 100644 --- a/packages/server/src/api/controllers/row/internal.ts +++ b/packages/server/src/api/controllers/row/internal.ts @@ -85,13 +85,15 @@ export async function patch(ctx: UserCtx) { // the row has been updated, need to put it into the ctx ctx.request.body = row as any await userController.updateMetadata(ctx as any) - return { row: ctx.body as Row, table } + return { row: ctx.body as Row, table, oldRow } } - return finaliseRow(table, row, { + const result = await finaliseRow(table, row, { oldTable: dbTable, updateFormula: true, }) + + return { ...result, oldRow } } export async function find(ctx: UserCtx): Promise { diff --git a/packages/server/src/api/controllers/row/utils/basic.ts b/packages/server/src/api/controllers/row/utils/basic.ts index afb98d0255..bca2494ac3 100644 --- a/packages/server/src/api/controllers/row/utils/basic.ts +++ b/packages/server/src/api/controllers/row/utils/basic.ts @@ -99,7 +99,7 @@ export function basicProcessing({ row, tableName: table._id!, fieldName: internalColumn, - isLinked: false, + isLinked, }) } } diff --git a/packages/server/src/api/controllers/row/utils/sqlUtils.ts b/packages/server/src/api/controllers/row/utils/sqlUtils.ts index 372b8394ff..767916616c 100644 --- a/packages/server/src/api/controllers/row/utils/sqlUtils.ts +++ b/packages/server/src/api/controllers/row/utils/sqlUtils.ts @@ -1,5 +1,9 @@ import { + DatasourcePlusQueryResponse, + DSPlusOperation, FieldType, + isManyToOne, + isOneToMany, ManyToManyRelationshipFieldMetadata, RelationshipFieldMetadata, RelationshipsJson, @@ -91,12 +95,12 @@ export function buildExternalRelationships( ): RelationshipsJson[] { const relationships = [] for (let [fieldName, field] of Object.entries(table.schema)) { - if (field.type !== FieldType.LINK) { + if (field.type !== FieldType.LINK || !field.tableId) { continue } const { tableName: linkTableName } = breakExternalTableId(field.tableId) // no table to link to, this is not a valid relationships - if (!linkTableName || !tables[linkTableName]) { + if (!tables[linkTableName]) { continue } const linkTable = tables[linkTableName] @@ -108,7 +112,7 @@ export function buildExternalRelationships( // need to specify where to put this back into column: fieldName, } - if (isManyToMany(field)) { + if (isManyToMany(field) && field.through) { const { tableName: throughTableName } = breakExternalTableId( field.through ) @@ -118,7 +122,7 @@ export function buildExternalRelationships( definition.to = field.throughFrom || linkTable.primary[0] definition.fromPrimary = table.primary[0] definition.toPrimary = linkTable.primary[0] - } else { + } else if (isManyToOne(field) || isOneToMany(field)) { // if no foreign key specified then use the name of the field in other table definition.from = field.foreignKey || table.primary[0] definition.to = field.fieldName @@ -178,17 +182,27 @@ export function buildSqlFieldList( } let fields = extractRealFields(table) for (let field of Object.values(table.schema)) { - if (field.type !== FieldType.LINK || !opts?.relationships) { + if ( + field.type !== FieldType.LINK || + !opts?.relationships || + !field.tableId + ) { continue } const { tableName: linkTableName } = breakExternalTableId(field.tableId) - if (linkTableName) { - const linkTable = tables[linkTableName] - if (linkTable) { - const linkedFields = extractRealFields(linkTable, fields) - fields = fields.concat(linkedFields) - } + const linkTable = tables[linkTableName] + if (linkTable) { + const linkedFields = extractRealFields(linkTable, fields) + fields = fields.concat(linkedFields) } } return fields } + +export function isKnexEmptyReadResponse(resp: DatasourcePlusQueryResponse) { + return ( + !Array.isArray(resp) || + resp.length === 0 || + (DSPlusOperation.READ in resp[0] && resp[0].read === true) + ) +} diff --git a/packages/server/src/api/controllers/row/utils/utils.ts b/packages/server/src/api/controllers/row/utils/utils.ts index c2d62e0204..ae34034221 100644 --- a/packages/server/src/api/controllers/row/utils/utils.ts +++ b/packages/server/src/api/controllers/row/utils/utils.ts @@ -14,7 +14,7 @@ import { processDates, processFormulas, } from "../../../../utilities/rowProcessor" -import { updateRelationshipColumns } from "./sqlUtils" +import { isKnexEmptyReadResponse, updateRelationshipColumns } from "./sqlUtils" import { basicProcessing, generateIdForRow, @@ -137,7 +137,7 @@ export async function sqlOutputProcessing( relationships: RelationshipsJson[], opts?: { sqs?: boolean } ): Promise { - if (!Array.isArray(rows) || rows.length === 0 || rows[0].read === true) { + if (isKnexEmptyReadResponse(rows)) { return [] } let finalRows: { [key: string]: Row } = {} diff --git a/packages/server/src/api/controllers/row/views.ts b/packages/server/src/api/controllers/row/views.ts index 80aa97d8c0..63ce12f0ab 100644 --- a/packages/server/src/api/controllers/row/views.ts +++ b/packages/server/src/api/controllers/row/views.ts @@ -69,6 +69,7 @@ export async function searchView( limit: body.limit, bookmark: body.bookmark, paginate: body.paginate, + countRows: body.countRows, } const result = await sdk.rows.search(searchOptions) diff --git a/packages/server/src/api/controllers/table/external.ts b/packages/server/src/api/controllers/table/external.ts index bd674d7d38..c3356919c8 100644 --- a/packages/server/src/api/controllers/table/external.ts +++ b/packages/server/src/api/controllers/table/external.ts @@ -16,14 +16,18 @@ import { import sdk from "../../../sdk" import { builderSocket } from "../../../websockets" import { inputProcessing } from "../../../utilities/rowProcessor" +import { isEqual } from "lodash" function getDatasourceId(table: Table) { if (!table) { - throw "No table supplied" + throw new Error("No table supplied") } if (table.sourceId) { return table.sourceId } + if (!table._id) { + throw new Error("No table ID supplied") + } return breakExternalTableId(table._id).datasourceId } @@ -82,15 +86,30 @@ export async function bulkImport( ctx: UserCtx ) { let table = await sdk.tables.getTable(ctx.params.tableId) - const { rows } = ctx.request.body + const { rows, identifierFields } = ctx.request.body const schema = table.schema + if ( + identifierFields && + identifierFields.length > 0 && + !isEqual(identifierFields, table.primary) + ) { + // This is becuse we make use of the ON CONFLICT functionality in SQL + // databases, which only triggers when there's a conflict against a unique + // index. The only unique index we can count on atm in Budibase is the + // primary key, so this functionality always uses the primary key. + ctx.throw( + 400, + "Identifier fields are not supported for bulk import into an external datasource." + ) + } + if (!rows || !isRows(rows) || !isSchema(schema)) { ctx.throw(400, "Provided data import information is invalid.") } const parsedRows = [] - for (const row of parse(rows, schema)) { + for (const row of parse(rows, table)) { const processed = await inputProcessing(ctx.user?._id, table, row, { noAutoRelationships: true, }) @@ -98,7 +117,7 @@ export async function bulkImport( table = processed.table } - await handleRequest(Operation.BULK_CREATE, table._id!, { + await handleRequest(Operation.BULK_UPSERT, table._id!, { rows: parsedRows, }) await events.rows.imported(table, parsedRows.length) diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts index a42cfc43c3..0e9a32b294 100644 --- a/packages/server/src/api/controllers/table/utils.ts +++ b/packages/server/src/api/controllers/table/utils.ts @@ -178,7 +178,7 @@ export async function handleDataImport( } const db = context.getAppDB() - const data = parse(importRows, schema) + const data = parse(importRows, table) let finalData: any = await importToRows(data, table, user) diff --git a/packages/server/src/api/routes/row.ts b/packages/server/src/api/routes/row.ts index f1aa39a461..e443b2daeb 100644 --- a/packages/server/src/api/routes/row.ts +++ b/packages/server/src/api/routes/row.ts @@ -86,6 +86,7 @@ router router.post( "/api/v2/views/:viewId/search", + internalSearchValidator(), authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"), rowController.views.searchView ) diff --git a/packages/server/src/api/routes/tests/automation.spec.ts b/packages/server/src/api/routes/tests/automation.spec.ts index 711cfb8d4f..8cbd14d8b3 100644 --- a/packages/server/src/api/routes/tests/automation.spec.ts +++ b/packages/server/src/api/routes/tests/automation.spec.ts @@ -13,6 +13,7 @@ import { events } from "@budibase/backend-core" import sdk from "../../../sdk" import { Automation } from "@budibase/types" import { mocks } from "@budibase/backend-core/tests" +import { FilterConditions } from "../../../automations/steps/filter" const MAX_RETRIES = 4 let { @@ -21,6 +22,7 @@ let { automationTrigger, automationStep, collectAutomation, + filterAutomation, } = setup.structures describe("/automations", () => { @@ -155,7 +157,12 @@ describe("/automations", () => { automation.appId = config.appId automation = await config.createAutomation(automation) await setup.delay(500) - const res = await testAutomation(config, automation) + const res = await testAutomation(config, automation, { + row: { + name: "Test", + description: "TEST", + }, + }) expect(events.automation.tested).toHaveBeenCalledTimes(1) // this looks a bit mad but we don't actually have a way to wait for a response from the automation to // know that it has finished all of its actions - this is currently the best way @@ -436,4 +443,38 @@ describe("/automations", () => { expect(res).toEqual(true) }) }) + + describe("Update Row Old / New Row comparison", () => { + it.each([ + { oldCity: "asdsadsadsad", newCity: "new" }, + { oldCity: "Belfast", newCity: "Belfast" }, + ])( + "triggers an update row automation and compares new to old rows with old city '%s' and new city '%s'", + async ({ oldCity, newCity }) => { + const expectedResult = oldCity === newCity + + let table = await config.createTable() + + let automation = await filterAutomation() + automation.definition.trigger.inputs.tableId = table._id + automation.definition.steps[0].inputs = { + condition: FilterConditions.EQUAL, + field: "{{ trigger.row.City }}", + value: "{{ trigger.oldRow.City }}", + } + automation.appId = config.appId! + automation = await config.createAutomation(automation) + let triggerInputs = { + oldRow: { + City: oldCity, + }, + row: { + City: newCity, + }, + } + const res = await testAutomation(config, automation, triggerInputs) + expect(res.body.steps[1].outputs.result).toEqual(expectedResult) + } + ) + }) }) diff --git a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts index b060a099d8..e72a091688 100644 --- a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts +++ b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts @@ -734,6 +734,7 @@ describe.each( name: entityId, schema: {}, type: "table", + primary: ["id"], sourceId: datasource._id!, sourceType: TableSourceType.EXTERNAL, }, diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index f822615a87..b6e3edf5ff 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1,6 +1,11 @@ -import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" +import { + DatabaseName, + getDatasource, + knexClient, +} from "../../../integrations/tests/utils" import tk from "timekeeper" +import emitter from "../../../../src/events" import { outputProcessing } from "../../../utilities/rowProcessor" import * as setup from "./utilities" import { context, InternalTable, tenancy } from "@budibase/backend-core" @@ -24,13 +29,38 @@ import { StaticQuotaName, Table, TableSourceType, + UpdatedRowEventEmitter, + TableSchema, } from "@budibase/types" import { generator, mocks } from "@budibase/backend-core/tests" import _, { merge } from "lodash" import * as uuid from "uuid" +import { Knex } from "knex" const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString() tk.freeze(timestamp) +interface WaitOptions { + name: string + matchFn?: (event: any) => boolean +} +async function waitForEvent( + opts: WaitOptions, + callback: () => Promise +): Promise { + const p = new Promise((resolve: any) => { + const listener = (event: any) => { + if (opts.matchFn && !opts.matchFn(event)) { + return + } + resolve(event) + emitter.off(opts.name, listener) + } + emitter.on(opts.name, listener) + }) + + await callback() + return await p +} describe.each([ ["internal", undefined], @@ -40,17 +70,21 @@ describe.each([ [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], ])("/rows (%s)", (providerType, dsProvider) => { const isInternal = dsProvider === undefined + const isMSSQL = providerType === DatabaseName.SQL_SERVER const config = setup.getConfig() let table: Table let datasource: Datasource | undefined + let client: Knex | undefined beforeAll(async () => { await config.init() if (dsProvider) { + const rawDatasource = await dsProvider datasource = await config.createDatasource({ - datasource: await dsProvider, + datasource: rawDatasource, }) + client = await knexClient(rawDatasource) } }) @@ -64,6 +98,23 @@ describe.each([ // the table name they're writing to. ...overrides: Partial>[] ): SaveTableRequest { + const defaultSchema: TableSchema = { + id: { + type: FieldType.AUTO, + name: "id", + autocolumn: true, + constraints: { + presence: true, + }, + }, + } + + for (const override of overrides) { + if (override.primary) { + delete defaultSchema.id + } + } + const req: SaveTableRequest = { name: uuid.v4().substring(0, 10), type: "table", @@ -72,16 +123,7 @@ describe.each([ : TableSourceType.INTERNAL, sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, primary: ["id"], - schema: { - id: { - type: FieldType.AUTO, - name: "id", - autocolumn: true, - constraints: { - presence: true, - }, - }, - }, + schema: defaultSchema, } return merge(req, ...overrides) } @@ -273,13 +315,13 @@ describe.each([ // as quickly as possible. await Promise.all( sequence.map(async () => { - const attempts = 20 + const attempts = 30 for (let attempt = 0; attempt < attempts; attempt++) { try { await config.api.row.save(table._id!, {}) return } catch (e) { - await new Promise(r => setTimeout(r, Math.random() * 15)) + await new Promise(r => setTimeout(r, Math.random() * 50)) } } throw new Error(`Failed to create row after ${attempts} attempts`) @@ -564,6 +606,35 @@ describe.each([ expect(res.name).toEqual("Updated Name") await assertRowUsage(rowUsage) }) + + !isInternal && + it("can update a row on an external table with a primary key", async () => { + const tableName = uuid.v4().substring(0, 10) + await client!.schema.createTable(tableName, table => { + table.increments("id").primary() + table.string("name") + }) + + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = res.datasource.entities![tableName] + + const row = await config.api.row.save(table._id!, { + id: 1, + name: "Row 1", + }) + + const updatedRow = await config.api.row.save(table._id!, { + _id: row._id!, + name: "Row 1 Updated", + }) + + expect(updatedRow.name).toEqual("Row 1 Updated") + + const rows = await config.api.row.fetch(table._id!) + expect(rows).toHaveLength(1) + }) }) describe("patch", () => { @@ -608,6 +679,32 @@ describe.each([ await assertRowUsage(rowUsage) }) + it("should update only the fields that are supplied and emit the correct oldRow", async () => { + let beforeRow = await config.api.row.save(table._id!, { + name: "test", + description: "test", + }) + const opts = { + name: "row:update", + matchFn: (event: UpdatedRowEventEmitter) => + event.row._id === beforeRow._id, + } + const event = await waitForEvent(opts, async () => { + await config.api.row.patch(table._id!, { + _id: beforeRow._id!, + _rev: beforeRow._rev!, + tableId: table._id!, + name: "Updated Name", + }) + }) + + expect(event.oldRow).toBeDefined() + expect(event.oldRow.name).toEqual("test") + expect(event.row.name).toEqual("Updated Name") + expect(event.oldRow.description).toEqual(beforeRow.description) + expect(event.row.description).toEqual(beforeRow.description) + }) + it("should throw an error when given improper types", async () => { const existing = await config.api.row.save(table._id!, {}) const rowUsage = await getRowUsage() @@ -699,7 +796,8 @@ describe.each([ }) !isInternal && - // TODO: SQL is having issues creating composite keys + // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing + // to identity columns. This is not something Budibase does currently. providerType !== DatabaseName.SQL_SERVER && it("should support updating fields that are part of a composite key", async () => { const tableRequest = saveTableRequest({ @@ -852,32 +950,21 @@ describe.each([ await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) }) - it("Should ignore malformed/invalid delete requests", async () => { - const rowUsage = await getRowUsage() + it.each([{ not: "valid" }, { rows: 123 }, "invalid"])( + "Should ignore malformed/invalid delete request: %s", + async (request: any) => { + const rowUsage = await getRowUsage() - await config.api.row.delete(table._id!, { not: "valid" } as any, { - status: 400, - body: { - message: "Invalid delete rows request", - }, - }) + await config.api.row.delete(table._id!, request, { + status: 400, + body: { + message: "Invalid delete rows request", + }, + }) - await config.api.row.delete(table._id!, { rows: 123 } as any, { - status: 400, - body: { - message: "Invalid delete rows request", - }, - }) - - await config.api.row.delete(table._id!, "invalid" as any, { - status: 400, - body: { - message: "Invalid delete rows request", - }, - }) - - await assertRowUsage(rowUsage) - }) + await assertRowUsage(rowUsage) + } + ) }) describe("bulkImport", () => { @@ -911,6 +998,236 @@ describe.each([ row = await config.api.row.save(table._id!, {}) expect(row.autoId).toEqual(3) }) + + it("should be able to bulkImport rows", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const rowUsage = await getRowUsage() + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Row 1", + description: "Row 1 description", + }, + { + name: "Row 2", + description: "Row 2 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(2) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1") + expect(rows[0].description).toEqual("Row 1 description") + expect(rows[1].name).toEqual("Row 2") + expect(rows[1].description).toEqual("Row 2 description") + + await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage) + }) + + // Upserting isn't yet supported in MSSQL, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + it("should be able to update existing rows with bulkImport", async () => { + const table = await config.api.table.save( + saveTableRequest({ + primary: ["userId"], + schema: { + userId: { + type: FieldType.NUMBER, + name: "userId", + constraints: { + presence: true, + }, + }, + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const row1 = await config.api.row.save(table._id!, { + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) + + const row2 = await config.api.row.save(table._id!, { + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["userId"], + rows: [ + { + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") + }) + + // Upserting isn't yet supported in MSSQL, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isInternal && + it("should be able to update existing rows with composite primary keys with bulkImport", async () => { + const tableName = uuid.v4() + await client?.schema.createTable(tableName, table => { + table.integer("companyId") + table.integer("userId") + table.string("name") + table.string("description") + table.primary(["companyId", "userId"]) + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) + + const row2 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["companyId", "userId"], + rows: [ + { + companyId: 1, + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + companyId: 1, + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + companyId: 1, + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") + }) + + // Upserting isn't yet supported in MSSQL, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isInternal && + it("should be able to update existing rows an autoID primary key", async () => { + const tableName = uuid.v4() + await client!.schema.createTable(tableName, table => { + table.increments("userId").primary() + table.string("name") + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + name: "Clare", + }) + + const row2 = await config.api.row.save(table._id!, { + name: "Jeff", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["userId"], + rows: [ + { + userId: row1.userId, + name: "Clare updated", + }, + { + userId: row2.userId, + name: "Jeff updated", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(2) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Clare updated") + expect(rows[1].name).toEqual("Jeff updated") + }) }) describe("enrich", () => { diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index f651908c01..589f129f31 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -1,5 +1,9 @@ import { tableForDatasource } from "../../../tests/utilities/structures" -import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" +import { + DatabaseName, + getDatasource, + knexClient, +} from "../../../integrations/tests/utils" import { db as dbCore, utils } from "@budibase/backend-core" import * as setup from "./utilities" @@ -18,11 +22,14 @@ import { User, Row, RelationshipType, + SearchResponse, } from "@budibase/types" import _ from "lodash" import tk from "timekeeper" import { encodeJSBinding } from "@budibase/string-templates" import { dataFilters } from "@budibase/shared-core" +import { Knex } from "knex" +import { structures } from "@budibase/backend-core/tests" describe.each([ ["in-memory", undefined], @@ -41,6 +48,7 @@ describe.each([ let envCleanup: (() => void) | undefined let datasource: Datasource | undefined + let client: Knex | undefined let table: Table let rows: Row[] @@ -62,8 +70,10 @@ describe.each([ } if (dsProvider) { + const rawDatasource = await dsProvider + client = await knexClient(rawDatasource) datasource = await config.createDatasource({ - datasource: await dsProvider, + datasource: rawDatasource, }) } }) @@ -75,9 +85,9 @@ describe.each([ } }) - async function createTable(schema: TableSchema) { + async function createTable(schema: TableSchema, name?: string) { return await config.api.table.save( - tableForDatasource(datasource, { schema }) + tableForDatasource(datasource, { schema, name }) ) } @@ -92,16 +102,14 @@ describe.each([ class SearchAssertion { constructor(private readonly query: RowSearchParams) {} - private async performSearch(): Promise { + private async performSearch(): Promise> { if (isInMemory) { return dataFilters.search(_.cloneDeep(rows), this.query) } else { - return ( - await config.api.row.search(table._id!, { - ...this.query, - tableId: table._id!, - }) - ).rows + return config.api.row.search(table._id!, { + ...this.query, + tableId: table._id!, + }) } } @@ -175,7 +183,7 @@ describe.each([ // different to the one passed in will cause the assertion to fail. Extra // rows returned by the query will also cause the assertion to fail. async toMatchExactly(expectedRows: any[]) { - const foundRows = await this.performSearch() + const { rows: foundRows } = await this.performSearch() // eslint-disable-next-line jest/no-standalone-expect expect(foundRows).toHaveLength(expectedRows.length) @@ -191,7 +199,7 @@ describe.each([ // passed in. The order of the rows is not important, but extra rows will // cause the assertion to fail. async toContainExactly(expectedRows: any[]) { - const foundRows = await this.performSearch() + const { rows: foundRows } = await this.performSearch() // eslint-disable-next-line jest/no-standalone-expect expect(foundRows).toHaveLength(expectedRows.length) @@ -205,11 +213,36 @@ describe.each([ ) } + // Asserts that the query returns some property values - this cannot be used + // to check row values, however this shouldn't be important for checking properties + // typing for this has to be any, Jest doesn't expose types for matchers like expect.any(...) + async toMatch(properties: Record) { + const response = await this.performSearch() + const keys = Object.keys(properties) as Array> + for (let key of keys) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[key]).toBeDefined() + if (properties[key]) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[key]).toEqual(properties[key]) + } + } + } + + // Asserts that the query doesn't return a property, e.g. pagination parameters. + async toNotHaveProperty(properties: (keyof SearchResponse)[]) { + const response = await this.performSearch() + for (let property of properties) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[property]).toBeUndefined() + } + } + // Asserts that the query returns rows matching the set of rows passed in. // The order of the rows is not important. Extra rows will not cause the // assertion to fail. async toContain(expectedRows: any[]) { - const foundRows = await this.performSearch() + const { rows: foundRows } = await this.performSearch() // eslint-disable-next-line jest/no-standalone-expect expect([...foundRows]).toEqual( @@ -226,7 +259,7 @@ describe.each([ } async toHaveLength(length: number) { - const foundRows = await this.performSearch() + const { rows: foundRows } = await this.performSearch() // eslint-disable-next-line jest/no-standalone-expect expect(foundRows).toHaveLength(length) @@ -250,55 +283,63 @@ describe.each([ }) describe("equal", () => { - it("successfully finds true row", () => - expectQuery({ equal: { isTrue: true } }).toMatchExactly([ + it("successfully finds true row", async () => { + await expectQuery({ equal: { isTrue: true } }).toMatchExactly([ { isTrue: true }, - ])) + ]) + }) - it("successfully finds false row", () => - expectQuery({ equal: { isTrue: false } }).toMatchExactly([ + it("successfully finds false row", async () => { + await expectQuery({ equal: { isTrue: false } }).toMatchExactly([ { isTrue: false }, - ])) + ]) + }) }) describe("notEqual", () => { - it("successfully finds false row", () => - expectQuery({ notEqual: { isTrue: true } }).toContainExactly([ + it("successfully finds false row", async () => { + await expectQuery({ notEqual: { isTrue: true } }).toContainExactly([ { isTrue: false }, - ])) + ]) + }) - it("successfully finds true row", () => - expectQuery({ notEqual: { isTrue: false } }).toContainExactly([ + it("successfully finds true row", async () => { + await expectQuery({ notEqual: { isTrue: false } }).toContainExactly([ { isTrue: true }, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds true row", () => - expectQuery({ oneOf: { isTrue: [true] } }).toContainExactly([ + it("successfully finds true row", async () => { + await expectQuery({ oneOf: { isTrue: [true] } }).toContainExactly([ { isTrue: true }, - ])) + ]) + }) - it("successfully finds false row", () => - expectQuery({ oneOf: { isTrue: [false] } }).toContainExactly([ + it("successfully finds false row", async () => { + await expectQuery({ oneOf: { isTrue: [false] } }).toContainExactly([ { isTrue: false }, - ])) + ]) + }) }) describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "isTrue", sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ isTrue: false }, { isTrue: true }])) + }).toMatchExactly([{ isTrue: false }, { isTrue: true }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "isTrue", sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ isTrue: true }, { isTrue: false }])) + }).toMatchExactly([{ isTrue: true }, { isTrue: false }]) + }) }) }) @@ -652,192 +693,269 @@ describe.each([ }) describe("misc", () => { - it("should return all if no query is passed", () => - expectSearch({} as RowSearchParams).toContainExactly([ + it("should return all if no query is passed", async () => { + await expectSearch({} as RowSearchParams).toContainExactly([ { name: "foo" }, { name: "bar" }, - ])) + ]) + }) - it("should return all if empty query is passed", () => - expectQuery({}).toContainExactly([{ name: "foo" }, { name: "bar" }])) + it("should return all if empty query is passed", async () => { + await expectQuery({}).toContainExactly([ + { name: "foo" }, + { name: "bar" }, + ]) + }) - it("should return all if onEmptyFilter is RETURN_ALL", () => - expectQuery({ + it("should return all if onEmptyFilter is RETURN_ALL", async () => { + await expectQuery({ onEmptyFilter: EmptyFilterOption.RETURN_ALL, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) - it("should return nothing if onEmptyFilter is RETURN_NONE", () => - expectQuery({ + it("should return nothing if onEmptyFilter is RETURN_NONE", async () => { + await expectQuery({ onEmptyFilter: EmptyFilterOption.RETURN_NONE, - }).toFindNothing()) + }).toFindNothing() + }) - it("should respect limit", () => - expectSearch({ limit: 1, paginate: true, query: {} }).toHaveLength(1)) + it("should respect limit", async () => { + await expectSearch({ + limit: 1, + paginate: true, + query: {}, + }).toHaveLength(1) + }) }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { name: "foo" } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { name: "foo" } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { name: "none" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { name: "none" } }).toFindNothing() + }) - it("works as an or condition", () => - expectQuery({ + it("works as an or condition", async () => { + await expectQuery({ allOr: true, equal: { name: "foo" }, oneOf: { name: ["bar"] }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) - it("can have multiple values for same column", () => - expectQuery({ + it("can have multiple values for same column", async () => { + await expectQuery({ allOr: true, equal: { "1:name": "foo", "2:name": "bar" }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { name: "foo" } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { name: "foo" } }).toContainExactly([ { name: "bar" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { name: "bar" } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { name: "bar" } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { name: ["foo"] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { name: ["foo"] } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { name: ["none"] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { name: ["none"] } }).toFindNothing() + }) }) describe("fuzzy", () => { - it("successfully finds a row", () => - expectQuery({ fuzzy: { name: "oo" } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ fuzzy: { name: "oo" } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ fuzzy: { name: "none" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ fuzzy: { name: "none" } }).toFindNothing() + }) }) describe("string", () => { - it("successfully finds a row", () => - expectQuery({ string: { name: "fo" } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ string: { name: "fo" } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ string: { name: "none" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ string: { name: "none" } }).toFindNothing() + }) - it("is case-insensitive", () => - expectQuery({ string: { name: "FO" } }).toContainExactly([ + it("is case-insensitive", async () => { + await expectQuery({ string: { name: "FO" } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) }) describe("range", () => { - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { name: { low: "a", high: "z" } }, - }).toContainExactly([{ name: "bar" }, { name: "foo" }])) + }).toContainExactly([{ name: "bar" }, { name: "foo" }]) + }) - it("successfully finds a row with a high bound", () => - expectQuery({ + it("successfully finds a row with a high bound", async () => { + await expectQuery({ range: { name: { low: "a", high: "c" } }, - }).toContainExactly([{ name: "bar" }])) + }).toContainExactly([{ name: "bar" }]) + }) - it("successfully finds a row with a low bound", () => - expectQuery({ + it("successfully finds a row with a low bound", async () => { + await expectQuery({ range: { name: { low: "f", high: "z" } }, - }).toContainExactly([{ name: "foo" }])) + }).toContainExactly([{ name: "foo" }]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { name: { low: "g", high: "h" } }, - }).toFindNothing()) + }).toFindNothing() + }) !isLucene && - it("ignores low if it's an empty object", () => - expectQuery({ + it("ignores low if it's an empty object", async () => { + await expectQuery({ // @ts-ignore range: { name: { low: {}, high: "z" } }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) !isLucene && - it("ignores high if it's an empty object", () => - expectQuery({ + it("ignores high if it's an empty object", async () => { + await expectQuery({ // @ts-ignore range: { name: { low: "a", high: {} } }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) }) describe("empty", () => { - it("finds no empty rows", () => - expectQuery({ empty: { name: null } }).toFindNothing()) + it("finds no empty rows", async () => { + await expectQuery({ empty: { name: null } }).toFindNothing() + }) - it("should not be affected by when filter empty behaviour", () => - expectQuery({ + it("should not be affected by when filter empty behaviour", async () => { + await expectQuery({ empty: { name: null }, onEmptyFilter: EmptyFilterOption.RETURN_ALL, - }).toFindNothing()) + }).toFindNothing() + }) }) describe("notEmpty", () => { - it("finds all non-empty rows", () => - expectQuery({ notEmpty: { name: null } }).toContainExactly([ + it("finds all non-empty rows", async () => { + await expectQuery({ notEmpty: { name: null } }).toContainExactly([ { name: "foo" }, { name: "bar" }, - ])) + ]) + }) - it("should not be affected by when filter empty behaviour", () => - expectQuery({ + it("should not be affected by when filter empty behaviour", async () => { + await expectQuery({ notEmpty: { name: null }, onEmptyFilter: EmptyFilterOption.RETURN_NONE, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) }) describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "name", sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ name: "bar" }, { name: "foo" }])) + }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "name", sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ name: "foo" }, { name: "bar" }])) + }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) + }) describe("sortType STRING", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "name", sortType: SortType.STRING, sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ name: "bar" }, { name: "foo" }])) + }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "name", sortType: SortType.STRING, sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ name: "foo" }, { name: "bar" }])) + }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) + }) }) + + !isInternal && + !isInMemory && + // This test was added because we automatically add in a sort by the + // primary key, and we used to do this unconditionally which caused + // problems because it was possible for the primary key to appear twice + // in the resulting SQL ORDER BY clause, resulting in an SQL error. + // We now check first to make sure that the primary key isn't already + // in the sort before adding it. + describe("sort on primary key", () => { + beforeAll(async () => { + const tableName = structures.uuid().substring(0, 10) + await client!.schema.createTable(tableName, t => { + t.string("name").primary() + }) + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + + table = resp.datasource.entities![tableName] + + await createRows([{ name: "foo" }, { name: "bar" }]) + }) + + it("should be able to sort by a primary key column ascending", async () => + expectSearch({ + query: {}, + sort: "name", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ name: "bar" }, { name: "foo" }])) + + it("should be able to sort by a primary key column descending", async () => + expectSearch({ + query: {}, + sort: "name", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ name: "foo" }, { name: "bar" }])) + }) }) }) @@ -850,97 +968,119 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { age: 1 } }).toContainExactly([{ age: 1 }])) + it("successfully finds a row", async () => { + await expectQuery({ equal: { age: 1 } }).toContainExactly([{ age: 1 }]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { age: 2 } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { age: 2 } }).toFindNothing() + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { age: 1 } }).toContainExactly([{ age: 10 }])) + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { age: 1 } }).toContainExactly([ + { age: 10 }, + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { age: 10 } }).toContainExactly([{ age: 1 }])) + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { age: 10 } }).toContainExactly([ + { age: 1 }, + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { age: [1] } }).toContainExactly([{ age: 1 }])) + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { age: [1] } }).toContainExactly([ + { age: 1 }, + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { age: [2] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { age: [2] } }).toFindNothing() + }) }) describe("range", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ range: { age: { low: 1, high: 5 } }, - }).toContainExactly([{ age: 1 }])) + }).toContainExactly([{ age: 1 }]) + }) - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { age: { low: 1, high: 10 } }, - }).toContainExactly([{ age: 1 }, { age: 10 }])) + }).toContainExactly([{ age: 1 }, { age: 10 }]) + }) - it("successfully finds a row with a high bound", () => - expectQuery({ + it("successfully finds a row with a high bound", async () => { + await expectQuery({ range: { age: { low: 5, high: 10 } }, - }).toContainExactly([{ age: 10 }])) + }).toContainExactly([{ age: 10 }]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { age: { low: 5, high: 9 } }, - }).toFindNothing()) + }).toFindNothing() + }) // We never implemented half-open ranges in Lucene. !isLucene && - it("can search using just a low value", () => - expectQuery({ + it("can search using just a low value", async () => { + await expectQuery({ range: { age: { low: 5 } }, - }).toContainExactly([{ age: 10 }])) + }).toContainExactly([{ age: 10 }]) + }) // We never implemented half-open ranges in Lucene. !isLucene && - it("can search using just a high value", () => - expectQuery({ + it("can search using just a high value", async () => { + await expectQuery({ range: { age: { high: 5 } }, - }).toContainExactly([{ age: 1 }])) + }).toContainExactly([{ age: 1 }]) + }) }) describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "age", sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ age: 1 }, { age: 10 }])) + }).toMatchExactly([{ age: 1 }, { age: 10 }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "age", sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ age: 10 }, { age: 1 }])) + }).toMatchExactly([{ age: 10 }, { age: 1 }]) + }) }) describe("sortType NUMBER", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "age", sortType: SortType.NUMBER, sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ age: 1 }, { age: 10 }])) + }).toMatchExactly([{ age: 1 }, { age: 10 }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "age", sortType: SortType.NUMBER, sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ age: 10 }, { age: 1 }])) + }).toMatchExactly([{ age: 10 }, { age: 1 }]) + }) }) }) @@ -960,104 +1100,120 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { dob: JAN_1ST } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { dob: JAN_1ST } }).toContainExactly([ { dob: JAN_1ST }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing() + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { dob: JAN_1ST } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { dob: JAN_1ST } }).toContainExactly([ { dob: JAN_10TH }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { dob: JAN_10TH } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { dob: JAN_10TH } }).toContainExactly([ { dob: JAN_1ST }, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly([ { dob: JAN_1ST }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing() + }) }) describe("range", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ range: { dob: { low: JAN_1ST, high: JAN_5TH } }, - }).toContainExactly([{ dob: JAN_1ST }])) + }).toContainExactly([{ dob: JAN_1ST }]) + }) - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { dob: { low: JAN_1ST, high: JAN_10TH } }, - }).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }])) + }).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) - it("successfully finds a row with a high bound", () => - expectQuery({ + it("successfully finds a row with a high bound", async () => { + await expectQuery({ range: { dob: { low: JAN_5TH, high: JAN_10TH } }, - }).toContainExactly([{ dob: JAN_10TH }])) + }).toContainExactly([{ dob: JAN_10TH }]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { dob: { low: JAN_5TH, high: JAN_9TH } }, - }).toFindNothing()) + }).toFindNothing() + }) // We never implemented half-open ranges in Lucene. !isLucene && - it("can search using just a low value", () => - expectQuery({ + it("can search using just a low value", async () => { + await expectQuery({ range: { dob: { low: JAN_5TH } }, - }).toContainExactly([{ dob: JAN_10TH }])) + }).toContainExactly([{ dob: JAN_10TH }]) + }) // We never implemented half-open ranges in Lucene. !isLucene && - it("can search using just a high value", () => - expectQuery({ + it("can search using just a high value", async () => { + await expectQuery({ range: { dob: { high: JAN_5TH } }, - }).toContainExactly([{ dob: JAN_1ST }])) + }).toContainExactly([{ dob: JAN_1ST }]) + }) }) describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "dob", sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }])) + }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "dob", sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }])) + }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) + }) describe("sortType STRING", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "dob", sortType: SortType.STRING, sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }])) + }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "dob", sortType: SortType.STRING, sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }])) + }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) + }) }) }) }) @@ -1091,72 +1247,85 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { time: T_1000 } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { time: T_1000 } }).toContainExactly([ { time: "10:00:00" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { time: UNEXISTING_TIME } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { time: UNEXISTING_TIME }, + }).toFindNothing() + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { time: T_1000 } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { time: T_1000 } }).toContainExactly([ { timeid: NULL_TIME__ID }, { time: "10:45:00" }, { time: "12:00:00" }, { time: "15:30:00" }, { time: "00:00:00" }, - ])) + ]) + }) - it("return all when requesting non-existing", () => - expectQuery({ notEqual: { time: UNEXISTING_TIME } }).toContainExactly( - [ - { timeid: NULL_TIME__ID }, - { time: "10:00:00" }, - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - { time: "00:00:00" }, - ] - )) + it("return all when requesting non-existing", async () => { + await expectQuery({ + notEqual: { time: UNEXISTING_TIME }, + }).toContainExactly([ + { timeid: NULL_TIME__ID }, + { time: "10:00:00" }, + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + { time: "00:00:00" }, + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { time: [T_1000] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { time: [T_1000] } }).toContainExactly([ { time: "10:00:00" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { time: [UNEXISTING_TIME] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ + oneOf: { time: [UNEXISTING_TIME] }, + }).toFindNothing() + }) }) describe("range", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ range: { time: { low: T_1045, high: T_1045 } }, - }).toContainExactly([{ time: "10:45:00" }])) + }).toContainExactly([{ time: "10:45:00" }]) + }) - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { time: { low: T_1045, high: T_1530 } }, }).toContainExactly([ { time: "10:45:00" }, { time: "12:00:00" }, { time: "15:30:00" }, - ])) + ]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { time: { low: UNEXISTING_TIME, high: UNEXISTING_TIME } }, - }).toFindNothing()) + }).toFindNothing() + }) }) describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "time", sortOrder: SortOrder.ASCENDING, @@ -1167,10 +1336,11 @@ describe.each([ { time: "10:45:00" }, { time: "12:00:00" }, { time: "15:30:00" }, - ])) + ]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "time", sortOrder: SortOrder.DESCENDING, @@ -1181,11 +1351,12 @@ describe.each([ { time: "10:00:00" }, { time: "00:00:00" }, { timeid: NULL_TIME__ID }, - ])) + ]) + }) describe("sortType STRING", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "time", sortType: SortType.STRING, @@ -1197,10 +1368,11 @@ describe.each([ { time: "10:45:00" }, { time: "12:00:00" }, { time: "15:30:00" }, - ])) + ]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "time", sortType: SortType.STRING, @@ -1212,7 +1384,8 @@ describe.each([ { time: "10:00:00" }, { time: "00:00:00" }, { timeid: NULL_TIME__ID }, - ])) + ]) + }) }) }) }) @@ -1230,66 +1403,78 @@ describe.each([ }) describe("contains", () => { - it("successfully finds a row", () => - expectQuery({ contains: { numbers: ["one"] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ contains: { numbers: ["one"] } }).toContainExactly([ { numbers: ["one", "two"] }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ contains: { numbers: ["none"] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ contains: { numbers: ["none"] } }).toFindNothing() + }) - it("fails to find row containing all", () => - expectQuery({ + it("fails to find row containing all", async () => { + await expectQuery({ contains: { numbers: ["one", "two", "three"] }, - }).toFindNothing()) + }).toFindNothing() + }) - it("finds all with empty list", () => - expectQuery({ contains: { numbers: [] } }).toContainExactly([ + it("finds all with empty list", async () => { + await expectQuery({ contains: { numbers: [] } }).toContainExactly([ { numbers: ["one", "two"] }, { numbers: ["three"] }, - ])) + ]) + }) }) describe("notContains", () => { - it("successfully finds a row", () => - expectQuery({ notContains: { numbers: ["one"] } }).toContainExactly([ - { numbers: ["three"] }, - ])) + it("successfully finds a row", async () => { + await expectQuery({ + notContains: { numbers: ["one"] }, + }).toContainExactly([{ numbers: ["three"] }]) + }) - it("fails to find nonexistent row", () => - expectQuery({ + it("fails to find nonexistent row", async () => { + await expectQuery({ notContains: { numbers: ["one", "two", "three"] }, }).toContainExactly([ { numbers: ["one", "two"] }, { numbers: ["three"] }, - ])) + ]) + }) // Not sure if this is correct behaviour but changing it would be a // breaking change. - it("finds all with empty list", () => - expectQuery({ notContains: { numbers: [] } }).toContainExactly([ + it("finds all with empty list", async () => { + await expectQuery({ notContains: { numbers: [] } }).toContainExactly([ { numbers: ["one", "two"] }, { numbers: ["three"] }, - ])) + ]) + }) }) describe("containsAny", () => { - it("successfully finds rows", () => - expectQuery({ + it("successfully finds rows", async () => { + await expectQuery({ containsAny: { numbers: ["one", "two", "three"] }, }).toContainExactly([ { numbers: ["one", "two"] }, { numbers: ["three"] }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ containsAny: { numbers: ["none"] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ + containsAny: { numbers: ["none"] }, + }).toFindNothing() + }) - it("finds all with empty list", () => - expectQuery({ containsAny: { numbers: [] } }).toContainExactly([ + it("finds all with empty list", async () => { + await expectQuery({ containsAny: { numbers: [] } }).toContainExactly([ { numbers: ["one", "two"] }, { numbers: ["three"] }, - ])) + ]) + }) }) }) @@ -1308,48 +1493,56 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { num: SMALL } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { num: SMALL } }).toContainExactly([ { num: SMALL }, - ])) + ]) + }) - it("successfully finds a big value", () => - expectQuery({ equal: { num: BIG } }).toContainExactly([{ num: BIG }])) + it("successfully finds a big value", async () => { + await expectQuery({ equal: { num: BIG } }).toContainExactly([ + { num: BIG }, + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { num: "2" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { num: "2" } }).toFindNothing() + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { num: SMALL } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { num: SMALL } }).toContainExactly([ { num: MEDIUM }, { num: BIG }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { num: 10 } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { num: 10 } }).toContainExactly([ { num: SMALL }, { num: MEDIUM }, { num: BIG }, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([ { num: SMALL }, - ])) + ]) + }) - it("successfully finds all rows", () => - expectQuery({ oneOf: { num: [SMALL, MEDIUM, BIG] } }).toContainExactly([ - { num: SMALL }, - { num: MEDIUM }, - { num: BIG }, - ])) + it("successfully finds all rows", async () => { + await expectQuery({ + oneOf: { num: [SMALL, MEDIUM, BIG] }, + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }, { num: BIG }]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { num: [2] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { num: [2] } }).toFindNothing() + }) }) // Range searches against bigints don't seem to work at all in Lucene, and I @@ -1357,35 +1550,41 @@ describe.each([ // we've decided not to spend time on it. !isLucene && describe("range", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ range: { num: { low: SMALL, high: "5" } }, - }).toContainExactly([{ num: SMALL }])) + }).toContainExactly([{ num: SMALL }]) + }) - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { num: { low: SMALL, high: MEDIUM } }, - }).toContainExactly([{ num: SMALL }, { num: MEDIUM }])) + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) + }) - it("successfully finds a row with a high bound", () => - expectQuery({ + it("successfully finds a row with a high bound", async () => { + await expectQuery({ range: { num: { low: MEDIUM, high: BIG } }, - }).toContainExactly([{ num: MEDIUM }, { num: BIG }])) + }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { num: { low: "5", high: "5" } }, - }).toFindNothing()) + }).toFindNothing() + }) - it("can search using just a low value", () => - expectQuery({ + it("can search using just a low value", async () => { + await expectQuery({ range: { num: { low: MEDIUM } }, - }).toContainExactly([{ num: MEDIUM }, { num: BIG }])) + }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) + }) - it("can search using just a high value", () => - expectQuery({ + it("can search using just a high value", async () => { + await expectQuery({ range: { num: { high: MEDIUM } }, - }).toContainExactly([{ num: SMALL }, { num: MEDIUM }])) + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) + }) }) }) @@ -1404,16 +1603,20 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { auto: 1 } }).toContainExactly([{ auto: 1 }])) + it("successfully finds a row", async () => { + await expectQuery({ equal: { auto: 1 } }).toContainExactly([ + { auto: 1 }, + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { auto: 0 } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { auto: 0 } }).toFindNothing() + }) }) describe("not equal", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { auto: 1 } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { auto: 1 } }).toContainExactly([ { auto: 2 }, { auto: 3 }, { auto: 4 }, @@ -1423,10 +1626,11 @@ describe.each([ { auto: 8 }, { auto: 9 }, { auto: 10 }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { auto: 0 } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { auto: 0 } }).toContainExactly([ { auto: 1 }, { auto: 2 }, { auto: 3 }, @@ -1437,55 +1641,66 @@ describe.each([ { auto: 8 }, { auto: 9 }, { auto: 10 }, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { auto: [1] } }).toContainExactly([{ auto: 1 }])) + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { auto: [1] } }).toContainExactly([ + { auto: 1 }, + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { auto: [0] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { auto: [0] } }).toFindNothing() + }) }) describe("range", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ range: { auto: { low: 1, high: 1 } }, - }).toContainExactly([{ auto: 1 }])) + }).toContainExactly([{ auto: 1 }]) + }) - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { auto: { low: 1, high: 2 } }, - }).toContainExactly([{ auto: 1 }, { auto: 2 }])) + }).toContainExactly([{ auto: 1 }, { auto: 2 }]) + }) - it("successfully finds a row with a high bound", () => - expectQuery({ + it("successfully finds a row with a high bound", async () => { + await expectQuery({ range: { auto: { low: 2, high: 2 } }, - }).toContainExactly([{ auto: 2 }])) + }).toContainExactly([{ auto: 2 }]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { auto: { low: 0, high: 0 } }, - }).toFindNothing()) + }).toFindNothing() + }) isSqs && - it("can search using just a low value", () => - expectQuery({ + it("can search using just a low value", async () => { + await expectQuery({ range: { auto: { low: 9 } }, - }).toContainExactly([{ auto: 9 }, { auto: 10 }])) + }).toContainExactly([{ auto: 9 }, { auto: 10 }]) + }) isSqs && - it("can search using just a high value", () => - expectQuery({ + it("can search using just a high value", async () => { + await expectQuery({ range: { auto: { high: 2 } }, - }).toContainExactly([{ auto: 1 }, { auto: 2 }])) + }).toContainExactly([{ auto: 1 }, { auto: 2 }]) + }) }) isSqs && describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "auto", sortOrder: SortOrder.ASCENDING, @@ -1500,10 +1715,11 @@ describe.each([ { auto: 8 }, { auto: 9 }, { auto: 10 }, - ])) + ]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "auto", sortOrder: SortOrder.DESCENDING, @@ -1518,25 +1734,39 @@ describe.each([ { auto: 3 }, { auto: 2 }, { auto: 1 }, - ])) + ]) + }) // This is important for pagination. The order of results must always // be stable or pagination will break. We don't want the user to need // to specify an order for pagination to work. it("is stable without a sort specified", async () => { - let { rows } = await config.api.row.search(table._id!, { - tableId: table._id!, - query: {}, - }) + let { rows: fullRowList } = await config.api.row.search( + table._id!, + { + tableId: table._id!, + query: {}, + } + ) - for (let i = 0; i < 10; i++) { + // repeat the search many times to check the first row is always the same + let bookmark: string | number | undefined, + hasNextPage: boolean | undefined = true, + rowCount: number = 0 + do { const response = await config.api.row.search(table._id!, { tableId: table._id!, limit: 1, + paginate: true, query: {}, + bookmark, }) - expect(response.rows).toEqual(rows) - } + bookmark = response.bookmark + hasNextPage = response.hasNextPage + expect(response.rows.length).toEqual(1) + const foundRow = response.rows[0] + expect(foundRow).toEqual(fullRowList[rowCount++]) + } while (hasNextPage) }) }) @@ -1578,13 +1808,15 @@ describe.each([ await createRows([{ "1:name": "bar" }, { "1:name": "foo" }]) }) - it("successfully finds a row", () => - expectQuery({ equal: { "1:1:name": "bar" } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { "1:1:name": "bar" } }).toContainExactly([ { "1:name": "bar" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { "1:1:name": "none" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { "1:1:name": "none" } }).toFindNothing() + }) }) describe("user", () => { @@ -1611,51 +1843,59 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { user: user1._id } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { user: user1._id } }).toContainExactly([ { user: { _id: user1._id } }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { user: "us_none" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { user: "us_none" } }).toFindNothing() + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { user: user1._id } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { user: user1._id } }).toContainExactly([ { user: { _id: user2._id } }, {}, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { user: "us_none" } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { user: "us_none" } }).toContainExactly([ { user: { _id: user1._id } }, { user: { _id: user2._id } }, {}, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { user: [user1._id] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { user: [user1._id] } }).toContainExactly([ { user: { _id: user1._id } }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { user: ["us_none"] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { user: ["us_none"] } }).toFindNothing() + }) }) describe("empty", () => { - it("finds empty rows", () => - expectQuery({ empty: { user: null } }).toContainExactly([{}])) + it("finds empty rows", async () => { + await expectQuery({ empty: { user: null } }).toContainExactly([{}]) + }) }) describe("notEmpty", () => { - it("finds non-empty rows", () => - expectQuery({ notEmpty: { user: null } }).toContainExactly([ + it("finds non-empty rows", async () => { + await expectQuery({ notEmpty: { user: null } }).toContainExactly([ { user: { _id: user1._id } }, { user: { _id: user2._id } }, - ])) + ]) + }) }) }) @@ -1689,58 +1929,71 @@ describe.each([ }) describe("contains", () => { - it("successfully finds a row", () => - expectQuery({ contains: { users: [user1._id] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ + contains: { users: [user1._id] }, + }).toContainExactly([ { users: [{ _id: user1._id }] }, { users: [{ _id: user1._id }, { _id: user2._id }] }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ contains: { users: ["us_none"] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ contains: { users: ["us_none"] } }).toFindNothing() + }) }) describe("notContains", () => { - it("successfully finds a row", () => - expectQuery({ notContains: { users: [user1._id] } }).toContainExactly([ - { users: [{ _id: user2._id }] }, - {}, - ])) + it("successfully finds a row", async () => { + await expectQuery({ + notContains: { users: [user1._id] }, + }).toContainExactly([{ users: [{ _id: user2._id }] }, {}]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notContains: { users: ["us_none"] } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ + notContains: { users: ["us_none"] }, + }).toContainExactly([ { users: [{ _id: user1._id }] }, { users: [{ _id: user2._id }] }, { users: [{ _id: user1._id }, { _id: user2._id }] }, {}, - ])) + ]) + }) }) describe("containsAny", () => { - it("successfully finds rows", () => - expectQuery({ + it("successfully finds rows", async () => { + await expectQuery({ containsAny: { users: [user1._id, user2._id] }, }).toContainExactly([ { users: [{ _id: user1._id }] }, { users: [{ _id: user2._id }] }, { users: [{ _id: user1._id }, { _id: user2._id }] }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ containsAny: { users: ["us_none"] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ + containsAny: { users: ["us_none"] }, + }).toFindNothing() + }) }) describe("multi-column equals", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ equal: { number: 1 }, contains: { users: [user1._id] }, - }).toContainExactly([{ users: [{ _id: user1._id }], number: 1 }])) + }).toContainExactly([{ users: [{ _id: user1._id }], number: 1 }]) + }) - it("fails to find nonexistent row", () => - expectQuery({ + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { number: 2 }, contains: { users: [user1._id] }, - }).toFindNothing()) + }).toFindNothing() + }) }) }) @@ -1750,51 +2003,109 @@ describe.each([ // isn't available. !isInMemory && describe("relations", () => { - let otherTable: Table - let otherRows: Row[] + let productCategoryTable: Table, productCatRows: Row[] beforeAll(async () => { - otherTable = await createTable({ - one: { name: "one", type: FieldType.STRING }, - }) - table = await createTable({ - two: { name: "two", type: FieldType.STRING }, - other: { - type: FieldType.LINK, - relationshipType: RelationshipType.ONE_TO_MANY, - name: "other", - fieldName: "other", - tableId: otherTable._id!, - constraints: { - type: "array", + productCategoryTable = await createTable( + { + name: { name: "name", type: FieldType.STRING }, + }, + "productCategory" + ) + table = await createTable( + { + name: { name: "name", type: FieldType.STRING }, + productCat: { + type: FieldType.LINK, + relationshipType: RelationshipType.ONE_TO_MANY, + name: "productCat", + fieldName: "product", + tableId: productCategoryTable._id!, + constraints: { + type: "array", + }, }, }, - }) + "product" + ) - otherRows = await Promise.all([ - config.api.row.save(otherTable._id!, { one: "foo" }), - config.api.row.save(otherTable._id!, { one: "bar" }), + productCatRows = await Promise.all([ + config.api.row.save(productCategoryTable._id!, { name: "foo" }), + config.api.row.save(productCategoryTable._id!, { name: "bar" }), ]) await Promise.all([ config.api.row.save(table._id!, { - two: "foo", - other: [otherRows[0]._id], + name: "foo", + productCat: [productCatRows[0]._id], }), config.api.row.save(table._id!, { - two: "bar", - other: [otherRows[1]._id], + name: "bar", + productCat: [productCatRows[1]._id], + }), + config.api.row.save(table._id!, { + name: "baz", + productCat: [], }), ]) - - rows = await config.api.row.fetch(table._id!) }) - it("can search through relations", () => - expectQuery({ - equal: { [`${otherTable.name}.one`]: "foo" }, + it("should be able to filter by relationship using column name", async () => { + await expectQuery({ + equal: { ["productCat.name"]: "foo" }, }).toContainExactly([ - { two: "foo", other: [{ _id: otherRows[0]._id }] }, - ])) + { name: "foo", productCat: [{ _id: productCatRows[0]._id }] }, + ]) + }) + + it("should be able to filter by relationship using table name", async () => { + await expectQuery({ + equal: { ["productCategory.name"]: "foo" }, + }).toContainExactly([ + { name: "foo", productCat: [{ _id: productCatRows[0]._id }] }, + ]) + }) + + it("shouldn't return any relationship for last row", async () => { + await expectQuery({ + equal: { ["name"]: "baz" }, + }).toContainExactly([{ name: "baz", productCat: undefined }]) + }) + }) + + // lucene can't count the total rows + !isLucene && + describe("row counting", () => { + beforeAll(async () => { + table = await createTable({ + name: { + name: "name", + type: FieldType.STRING, + }, + }) + await createRows([{ name: "a" }, { name: "b" }]) + }) + + it("should be able to count rows when option set", async () => { + await expectSearch({ + countRows: true, + query: { + notEmpty: { + name: true, + }, + }, + }).toMatch({ totalRows: 2, rows: expect.any(Array) }) + }) + + it("shouldn't count rows when option is not set", async () => { + await expectSearch({ + countRows: false, + query: { + notEmpty: { + name: true, + }, + }, + }).toNotHaveProperty(["totalRows"]) + }) }) }) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index f23e0de6db..e75e5e23e7 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -276,6 +276,34 @@ describe.each([ }) }) + isInternal && + it("shouldn't allow duplicate column names", async () => { + const saveTableRequest: SaveTableRequest = { + ...basicTable(), + } + saveTableRequest.schema["Type"] = { + type: FieldType.STRING, + name: "Type", + } + await config.api.table.save(saveTableRequest, { + status: 400, + body: { + message: + 'Column(s) "type" are duplicated - check for other columns with these name (case in-sensitive)', + }, + }) + saveTableRequest.schema.foo = { type: FieldType.STRING, name: "foo" } + saveTableRequest.schema.FOO = { type: FieldType.STRING, name: "FOO" } + + await config.api.table.save(saveTableRequest, { + status: 400, + body: { + message: + 'Column(s) "type, foo" are duplicated - check for other columns with these name (case in-sensitive)', + }, + }) + }) + it("should add a new column for an internal DB table", async () => { const saveTableRequest: SaveTableRequest = { ...basicTable(), diff --git a/packages/server/src/api/routes/tests/utilities/TestFunctions.ts b/packages/server/src/api/routes/tests/utilities/TestFunctions.ts index 8a843551ac..27d8592849 100644 --- a/packages/server/src/api/routes/tests/utilities/TestFunctions.ts +++ b/packages/server/src/api/routes/tests/utilities/TestFunctions.ts @@ -158,15 +158,16 @@ export const getDB = () => { return context.getAppDB() } -export const testAutomation = async (config: any, automation: any) => { +export const testAutomation = async ( + config: any, + automation: any, + triggerInputs: any +) => { return runRequest(automation.appId, async () => { return await config.request .post(`/api/automations/${automation._id}/test`) .send({ - row: { - name: "Test", - description: "TEST", - }, + ...triggerInputs, }) .set(config.defaultHeaders()) .expect("Content-Type", /json/) diff --git a/packages/server/src/api/routes/tests/viewV2.spec.ts b/packages/server/src/api/routes/tests/viewV2.spec.ts index 06921037dd..99ff4f8db7 100644 --- a/packages/server/src/api/routes/tests/viewV2.spec.ts +++ b/packages/server/src/api/routes/tests/viewV2.spec.ts @@ -7,6 +7,7 @@ import { INTERNAL_TABLE_SOURCE_ID, PermissionLevel, QuotaUsageType, + Row, SaveTableRequest, SearchFilterOperator, SortOrder, @@ -17,6 +18,7 @@ import { UpdateViewRequest, ViewUIFieldMetadata, ViewV2, + SearchResponse, } from "@budibase/types" import { generator, mocks } from "@budibase/backend-core/tests" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" @@ -25,17 +27,21 @@ import { quotas } from "@budibase/pro" import { db, roles } from "@budibase/backend-core" describe.each([ - ["internal", undefined], + ["lucene", undefined], + ["sqs", undefined], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], -])("/v2/views (%s)", (_, dsProvider) => { +])("/v2/views (%s)", (name, dsProvider) => { const config = setup.getConfig() - const isInternal = !dsProvider + const isSqs = name === "sqs" + const isLucene = name === "lucene" + const isInternal = isSqs || isLucene let table: Table let datasource: Datasource + let envCleanup: (() => void) | undefined function saveTableRequest( ...overrides: Partial>[] @@ -82,6 +88,9 @@ describe.each([ } beforeAll(async () => { + if (isSqs) { + envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" }) + } await config.init() if (dsProvider) { @@ -94,6 +103,9 @@ describe.each([ afterAll(async () => { setup.afterAll() + if (envCleanup) { + envCleanup() + } }) beforeEach(() => { @@ -1252,12 +1264,13 @@ describe.each([ paginate: true, limit: 4, query: {}, + countRows: true, }) expect(page1).toEqual({ rows: expect.arrayContaining(rows.slice(0, 4)), - totalRows: isInternal ? 10 : undefined, hasNextPage: true, bookmark: expect.anything(), + totalRows: 10, }) const page2 = await config.api.viewV2.search(view.id, { @@ -1265,12 +1278,13 @@ describe.each([ limit: 4, bookmark: page1.bookmark, query: {}, + countRows: true, }) expect(page2).toEqual({ rows: expect.arrayContaining(rows.slice(4, 8)), - totalRows: isInternal ? 10 : undefined, hasNextPage: true, bookmark: expect.anything(), + totalRows: 10, }) const page3 = await config.api.viewV2.search(view.id, { @@ -1278,13 +1292,17 @@ describe.each([ limit: 4, bookmark: page2.bookmark, query: {}, + countRows: true, }) - expect(page3).toEqual({ + const expectation: SearchResponse = { rows: expect.arrayContaining(rows.slice(8)), - totalRows: isInternal ? 10 : undefined, hasNextPage: false, - bookmark: expect.anything(), - }) + totalRows: 10, + } + if (isLucene) { + expectation.bookmark = expect.anything() + } + expect(page3).toEqual(expectation) }) const sortTestOptions: [ diff --git a/packages/server/src/api/routes/utils/validators.ts b/packages/server/src/api/routes/utils/validators.ts index a63b29fe5a..671ce95038 100644 --- a/packages/server/src/api/routes/utils/validators.ts +++ b/packages/server/src/api/routes/utils/validators.ts @@ -109,6 +109,7 @@ export function internalSearchValidator() { sortOrder: OPTIONAL_STRING, sortType: OPTIONAL_STRING, paginate: Joi.boolean(), + countRows: Joi.boolean(), bookmark: Joi.alternatives() .try(OPTIONAL_STRING, OPTIONAL_NUMBER) .optional(), diff --git a/packages/server/src/automations/triggerInfo/rowUpdated.ts b/packages/server/src/automations/triggerInfo/rowUpdated.ts index 5e60015808..eab7c40a09 100644 --- a/packages/server/src/automations/triggerInfo/rowUpdated.ts +++ b/packages/server/src/automations/triggerInfo/rowUpdated.ts @@ -27,10 +27,17 @@ export const definition: AutomationTriggerSchema = { }, outputs: { properties: { - row: { + oldRow: { type: AutomationIOType.OBJECT, customType: AutomationCustomIOType.ROW, description: "The row that was updated", + title: "Old Row", + }, + row: { + type: AutomationIOType.OBJECT, + customType: AutomationCustomIOType.ROW, + description: "The row before it was updated", + title: "Row", }, id: { type: AutomationIOType.STRING, diff --git a/packages/server/src/automations/triggers.ts b/packages/server/src/automations/triggers.ts index 223b8d2eb6..9aa80035bd 100644 --- a/packages/server/src/automations/triggers.ts +++ b/packages/server/src/automations/triggers.ts @@ -8,7 +8,13 @@ import { checkTestFlag } from "../utilities/redis" import * as utils from "./utils" import env from "../environment" import { context, db as dbCore } from "@budibase/backend-core" -import { Automation, Row, AutomationData, AutomationJob } from "@budibase/types" +import { + Automation, + Row, + AutomationData, + AutomationJob, + UpdatedRowEventEmitter, +} from "@budibase/types" import { executeInThread } from "../threads/automation" export const TRIGGER_DEFINITIONS = definitions @@ -65,7 +71,7 @@ async function queueRelevantRowAutomations( }) } -emitter.on("row:save", async function (event) { +emitter.on("row:save", async function (event: UpdatedRowEventEmitter) { /* istanbul ignore next */ if (!event || !event.row || !event.row.tableId) { return diff --git a/packages/server/src/events/BudibaseEmitter.ts b/packages/server/src/events/BudibaseEmitter.ts index 43871d8754..8feb36bbf5 100644 --- a/packages/server/src/events/BudibaseEmitter.ts +++ b/packages/server/src/events/BudibaseEmitter.ts @@ -13,8 +13,14 @@ import { Table, Row } from "@budibase/types" * This is specifically quite important for template strings used in automations. */ class BudibaseEmitter extends EventEmitter { - emitRow(eventName: string, appId: string, row: Row, table?: Table) { - rowEmission({ emitter: this, eventName, appId, row, table }) + emitRow( + eventName: string, + appId: string, + row: Row, + table?: Table, + oldRow?: Row + ) { + rowEmission({ emitter: this, eventName, appId, row, table, oldRow }) } emitTable(eventName: string, appId: string, table?: Table) { diff --git a/packages/server/src/events/utils.ts b/packages/server/src/events/utils.ts index 20efb453f2..b972c8e473 100644 --- a/packages/server/src/events/utils.ts +++ b/packages/server/src/events/utils.ts @@ -7,6 +7,7 @@ type BBEventOpts = { appId: string table?: Table row?: Row + oldRow?: Row metadata?: any } @@ -18,6 +19,7 @@ type BBEvent = { appId: string tableId?: string row?: Row + oldRow?: Row table?: BBEventTable id?: string revision?: string @@ -31,9 +33,11 @@ export function rowEmission({ row, table, metadata, + oldRow, }: BBEventOpts) { let event: BBEvent = { row, + oldRow, appId, tableId: row?.tableId, } diff --git a/packages/server/src/integrations/base/query.ts b/packages/server/src/integrations/base/query.ts index 371592bece..55886cd20f 100644 --- a/packages/server/src/integrations/base/query.ts +++ b/packages/server/src/integrations/base/query.ts @@ -22,6 +22,9 @@ export async function makeExternalQuery( ) { throw new Error("Entity ID and table metadata do not align") } + if (!datasource) { + throw new Error("No datasource provided for external query") + } datasource = await sdk.datasources.enrich(datasource) const Integration = await getIntegration(datasource.source) // query is the opinionated function diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts index cad1b346c0..b595508093 100644 --- a/packages/server/src/integrations/tests/sql.spec.ts +++ b/packages/server/src/integrations/tests/sql.spec.ts @@ -142,7 +142,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson({ schema: "production" })) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, + sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" order by "test"."id" asc limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`, }) }) @@ -150,7 +150,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson()) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, + sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" order by "test"."id" asc limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`, }) }) @@ -160,7 +160,7 @@ describe("SQL query builder", () => { ) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" limit $2`, + sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" order by "test"."id" asc limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" order by "test"."id" asc limit $2`, }) }) @@ -175,8 +175,8 @@ describe("SQL query builder", () => { }) ) expect(query).toEqual({ - bindings: ["john%", limit], - sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`, + bindings: ["john%", limit, 5000], + sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`, }) query = new Sql(SqlClient.ORACLE, limit)._query( @@ -190,8 +190,8 @@ describe("SQL query builder", () => { }) ) expect(query).toEqual({ - bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit], - sql: `select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4)) where rownum <= :5) "test"`, + bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit, 5000], + sql: `select * from (select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4) order by "test"."id" asc) where rownum <= :5) "test" order by "test"."id" asc) where rownum <= :6`, }) query = new Sql(SqlClient.ORACLE, limit)._query( @@ -204,8 +204,8 @@ describe("SQL query builder", () => { }) ) expect(query).toEqual({ - bindings: [`%jo%`, limit], - sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`, + bindings: [`%jo%`, limit, 5000], + sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`, }) }) }) diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index 67f3d1d05d..0b433896bf 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -57,15 +57,14 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [relationshipLimit, limit], - sql: multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", + sql: expect.stringContaining( + multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", "a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city", "a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", - "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" - from (select * from "persons" as "a" order by "a"."firstname" asc nulls first limit $1) as "a" - left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid" - order by "a"."firstname" asc nulls first limit $2`), + "b"."completed" as "b.completed", "b"."qaid" as "b.qaid"`) + ), }) }) @@ -74,13 +73,10 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [relationshipLimit, "assembling", limit], - sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", - "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", - "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" - from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a" - left join "products_tasks" as "c" on "a"."productid" = "c"."productid" - left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where COALESCE("b"."taskname" = $2, FALSE) - order by "a"."productname" asc nulls first limit $3`), + sql: expect.stringContaining( + multiline(`where COALESCE("b"."taskname" = $2, FALSE) + order by "a"."productname" asc nulls first, "a"."productid" asc limit $3`) + ), }) }) @@ -89,13 +85,10 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [relationshipLimit, limit], - sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", - "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", - "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" - from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a" - left join "products_tasks" as "c" on "a"."productid" = "c"."productid" - left join "tasks" as "b" on "b"."taskid" = "c"."taskid" - order by "a"."productname" asc nulls first limit $2`), + sql: expect.stringContaining( + multiline(`left join "products_tasks" as "c" on "a"."productid" = "c"."productid" + left join "tasks" as "b" on "b"."taskid" = "c"."taskid" `) + ), }) }) @@ -106,11 +99,11 @@ describe("Captures of real examples", () => { expect(query).toEqual({ bindings: [...filters, limit, limit], sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", - "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", - "b"."productname" as "b.productname", "b"."productid" as "b.productid" - from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a" - left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" - left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`), + "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", + "b"."productname" as "b.productname", "b"."productid" as "b.productid" + from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) order by "a"."taskid" asc limit $3) as "a" + left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" + left join "products" as "b" on "b"."productid" = "c"."productid" order by "a"."taskid" asc limit $4`), }) }) @@ -132,19 +125,11 @@ describe("Captures of real examples", () => { equalValue, limit, ], - sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid", - "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", - "b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname", - "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", - "c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname", - "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", - "c"."city" as "c.city", "c"."lastname" as "c.lastname" - from (select * from "tasks" as "a" where COALESCE("a"."completed" != $1, TRUE) - order by "a"."taskname" asc nulls first limit $2) as "a" - left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid" - left join "products" as "b" on "b"."productid" = "d"."productid" - left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid" - where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE) order by "a"."taskname" asc nulls first limit $6`), + sql: expect.stringContaining( + multiline( + `where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE)` + ) + ), }) }) }) @@ -200,8 +185,9 @@ describe("Captures of real examples", () => { returningQuery = input }, queryJson) expect(returningQuery).toEqual({ - sql: "select * from (select top (@p0) * from [people] where CASE WHEN [people].[name] = @p1 THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p2 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]", - bindings: [1, "Test", 22], + sql: multiline(`select top (@p0) * from (select top (@p1) * from [people] where CASE WHEN [people].[name] = @p2 + THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p3 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]`), + bindings: [5000, 1, "Test", 22], }) }) }) diff --git a/packages/server/src/sdk/app/rows/index.ts b/packages/server/src/sdk/app/rows/index.ts index c117941419..fb077509a9 100644 --- a/packages/server/src/sdk/app/rows/index.ts +++ b/packages/server/src/sdk/app/rows/index.ts @@ -3,12 +3,14 @@ import * as rows from "./rows" import * as search from "./search" import * as utils from "./utils" import * as external from "./external" +import * as filters from "./search/filters" import AliasTables from "./sqlAlias" export default { ...attachments, ...rows, ...search, + filters, utils, external, AliasTables, diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index 0430965ec7..93c46d8cc3 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -1,14 +1,14 @@ import { - SortJson, + IncludeRelationship, Operation, PaginationJson, - IncludeRelationship, Row, - SearchFilters, RowSearchParams, + SearchFilters, SearchResponse, - Table, + SortJson, SortOrder, + Table, } from "@budibase/types" import * as exporters from "../../../../api/controllers/view/exporters" import { handleRequest } from "../../../../api/controllers/row/external" @@ -18,7 +18,7 @@ import { } from "../../../../integrations/utils" import { utils } from "@budibase/shared-core" import { ExportRowsParams, ExportRowsResult } from "./types" -import { HTTPError, db } from "@budibase/backend-core" +import { db, HTTPError } from "@budibase/backend-core" import pick from "lodash/pick" import { outputProcessing } from "../../../../utilities/rowProcessor" import sdk from "../../../" @@ -28,20 +28,26 @@ export async function search( table: Table ): Promise> { const { tableId } = options - const { paginate, query, ...params } = options + const { countRows, paginate, query, ...params } = options const { limit } = params let bookmark = (params.bookmark && parseInt(params.bookmark as string)) || undefined if (paginate && !bookmark) { - bookmark = 1 + bookmark = 0 } - let paginateObj = {} + let paginateObj: PaginationJson | undefined - if (paginate) { + if (paginate && !limit) { + throw new Error("Cannot paginate query without a limit") + } + + if (paginate && limit) { paginateObj = { // add one so we can track if there is another page - limit: limit, - page: bookmark, + limit: limit + 1, + } + if (bookmark) { + paginateObj.offset = limit * bookmark } } else if (params && limit) { paginateObj = { @@ -69,24 +75,27 @@ export async function search( } try { - let rows = await handleRequest(Operation.READ, tableId, { + const parameters = { filters: query, sort, paginate: paginateObj as PaginationJson, includeSqlRelationships: IncludeRelationship.INCLUDE, - }) + } + const queries: Promise[] = [] + queries.push(handleRequest(Operation.READ, tableId, parameters)) + if (countRows) { + queries.push(handleRequest(Operation.COUNT, tableId, parameters)) + } + const responses = await Promise.all(queries) + let rows = responses[0] as Row[] + const totalRows = + responses.length > 1 ? (responses[1] as number) : undefined + let hasNextPage = false - if (paginate && rows.length === limit) { - const nextRows = await handleRequest(Operation.READ, tableId, { - filters: query, - sort, - paginate: { - limit: 1, - page: bookmark! * limit + 1, - }, - includeSqlRelationships: IncludeRelationship.INCLUDE, - }) - hasNextPage = nextRows.length > 0 + // remove the extra row if it's there + if (paginate && limit && rows.length > limit) { + rows.pop() + hasNextPage = true } if (options.fields) { @@ -100,7 +109,17 @@ export async function search( }) // need wrapper object for bookmarks etc when paginating - return { rows, hasNextPage, bookmark: bookmark && bookmark + 1 } + const response: SearchResponse = { rows, hasNextPage } + if (hasNextPage && bookmark != null) { + response.bookmark = bookmark + 1 + } + if (totalRows != null) { + response.totalRows = totalRows + } + if (paginate && !hasNextPage) { + response.hasNextPage = false + } + return response } catch (err: any) { if (err.message && err.message.includes("does not exist")) { throw new Error( @@ -126,6 +145,10 @@ export async function exportRows( delimiter, customHeaders, } = options + + if (!tableId) { + throw new HTTPError("No table ID for search provided.", 400) + } const { datasourceId, tableName } = breakExternalTableId(tableId) let requestQuery: SearchFilters = {} @@ -148,7 +171,7 @@ export async function exportRows( requestQuery = query || {} } - const datasource = await sdk.datasources.get(datasourceId!) + const datasource = await sdk.datasources.get(datasourceId) const table = await sdk.tables.getTable(tableId) if (!datasource || !datasource.entities) { throw new HTTPError("Datasource has not been configured for plus API.", 400) @@ -161,10 +184,6 @@ export async function exportRows( let rows: Row[] = [] let headers - if (!tableName) { - throw new HTTPError("Could not find table name.", 400) - } - // Filter data to only specified columns if required if (columns && columns.length) { for (let i = 0; i < result.rows.length; i++) { diff --git a/packages/server/src/sdk/app/rows/search/filters.ts b/packages/server/src/sdk/app/rows/search/filters.ts new file mode 100644 index 0000000000..ccce0ab86a --- /dev/null +++ b/packages/server/src/sdk/app/rows/search/filters.ts @@ -0,0 +1,62 @@ +import { + FieldType, + RelationshipFieldMetadata, + SearchFilters, + Table, +} from "@budibase/types" +import { isPlainObject } from "lodash" + +export function getRelationshipColumns(table: Table): { + name: string + definition: RelationshipFieldMetadata +}[] { + // performing this with a for loop rather than an array filter improves + // type guarding, as no casts are required + const linkEntries: [string, RelationshipFieldMetadata][] = [] + for (let entry of Object.entries(table.schema)) { + if (entry[1].type === FieldType.LINK) { + const linkColumn: RelationshipFieldMetadata = entry[1] + linkEntries.push([entry[0], linkColumn]) + } + } + return linkEntries.map(entry => ({ + name: entry[0], + definition: entry[1], + })) +} + +export function getTableIDList( + tables: Table[] +): { name: string; id: string }[] { + return tables + .filter(table => table.originalName && table._id) + .map(table => ({ id: table._id!, name: table.originalName! })) +} + +export function updateFilterKeys( + filters: SearchFilters, + updates: { original: string; updated: string }[] +): SearchFilters { + const makeFilterKeyRegex = (str: string) => + new RegExp(`^${str}\\.|:${str}\\.`) + for (let filter of Object.values(filters)) { + if (!isPlainObject(filter)) { + continue + } + for (let [key, keyFilter] of Object.entries(filter)) { + if (keyFilter === "") { + delete filter[key] + } + const possibleKey = updates.find(({ original }) => + key.match(makeFilterKeyRegex(original)) + ) + if (possibleKey && possibleKey.original !== possibleKey.updated) { + // only replace the first, not replaceAll + filter[key.replace(possibleKey.original, possibleKey.updated)] = + filter[key] + delete filter[key] + } + } + } + return filters +} diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 98b4053931..174ecc0e38 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -1,4 +1,5 @@ import { + Datasource, DocumentType, FieldType, Operation, @@ -28,6 +29,12 @@ import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils" import AliasTables from "../sqlAlias" import { outputProcessing } from "../../../../utilities/rowProcessor" import pick from "lodash/pick" +import { processRowCountResponse } from "../utils" +import { + updateFilterKeys, + getRelationshipColumns, + getTableIDList, +} from "./filters" const builder = new sql.Sql(SqlClient.SQL_LITE) @@ -58,34 +65,31 @@ function buildInternalFieldList( return fieldList } -function tableNameInFieldRegex(tableName: string) { - return new RegExp(`^${tableName}.|:${tableName}.`, "g") -} - -function cleanupFilters(filters: SearchFilters, tables: Table[]) { - for (let filter of Object.values(filters)) { - if (typeof filter !== "object") { - continue - } - for (let [key, keyFilter] of Object.entries(filter)) { - if (keyFilter === "") { - delete filter[key] - } - - // relationship, switch to table ID - const tableRelated = tables.find( - table => - table.originalName && - key.match(tableNameInFieldRegex(table.originalName)) +function cleanupFilters( + filters: SearchFilters, + table: Table, + allTables: Table[] +) { + // get a list of all relationship columns in the table for updating + const relationshipColumns = getRelationshipColumns(table) + // get table names to ID map for relationships + const tableNameToID = getTableIDList(allTables) + // all should be applied at once + filters = updateFilterKeys( + filters, + relationshipColumns + .map(({ name, definition }) => ({ + original: name, + updated: definition.tableId, + })) + .concat( + tableNameToID.map(({ name, id }) => ({ + original: name, + updated: id, + })) ) - if (tableRelated && tableRelated.originalName) { - // only replace the first, not replaceAll - filter[key.replace(tableRelated.originalName, tableRelated._id!)] = - filter[key] - delete filter[key] - } - } - } + ) + return filters } @@ -95,14 +99,29 @@ function buildTableMap(tables: Table[]) { // update the table name, should never query by name for SQLite table.originalName = table.name table.name = table._id! + // need a primary for sorting, lookups etc + table.primary = ["_id"] tableMap[table._id!] = table } return tableMap } -async function runSqlQuery(json: QueryJson, tables: Table[]) { +function runSqlQuery(json: QueryJson, tables: Table[]): Promise +function runSqlQuery( + json: QueryJson, + tables: Table[], + opts: { countTotalRows: true } +): Promise +async function runSqlQuery( + json: QueryJson, + tables: Table[], + opts?: { countTotalRows?: boolean } +) { const alias = new AliasTables(tables.map(table => table.name)) - return await alias.queryWithAliasing(json, async json => { + if (opts?.countTotalRows) { + json.endpoint.operation = Operation.COUNT + } + const processSQLQuery = async (_: Datasource, json: QueryJson) => { const query = builder._query(json, { disableReturning: true, }) @@ -124,17 +143,27 @@ async function runSqlQuery(json: QueryJson, tables: Table[]) { const db = context.getAppDB() return await db.sql(sql, bindings) - }) + } + const response = await alias.queryWithAliasing(json, processSQLQuery) + if (opts?.countTotalRows) { + return processRowCountResponse(response) + } else { + return response + } } export async function search( options: RowSearchParams, table: Table ): Promise> { - const { paginate, query, ...params } = options + let { paginate, query, ...params } = options const allTables = await sdk.tables.getAllInternalTables() const allTablesMap = buildTableMap(allTables) + // make sure we have the mapped/latest table + if (table?._id) { + table = allTablesMap[table?._id] + } if (!table) { throw new Error("Unable to find table") } @@ -149,7 +178,7 @@ export async function search( operation: Operation.READ, }, filters: { - ...cleanupFilters(query, allTables), + ...cleanupFilters(query, table, allTables), documentType: DocumentType.ROW, }, table, @@ -169,7 +198,7 @@ export async function search( sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING request.sort = { [sortField.name]: { - direction: params.sortOrder || SortOrder.DESCENDING, + direction: params.sortOrder || SortOrder.ASCENDING, type: sortType as SortType, }, } @@ -180,7 +209,8 @@ export async function search( } const bookmark: number = (params.bookmark as number) || 0 - if (paginate && params.limit) { + if (params.limit) { + paginate = true request.paginate = { limit: params.limit + 1, offset: bookmark * params.limit, @@ -188,7 +218,20 @@ export async function search( } try { - const rows = await runSqlQuery(request, allTables) + const queries: Promise[] = [] + queries.push(runSqlQuery(request, allTables)) + if (options.countRows) { + // get the total count of rows + queries.push( + runSqlQuery(request, allTables, { + countTotalRows: true, + }) + ) + } + const responses = await Promise.all(queries) + let rows = responses[0] as Row[] + const totalRows = + responses.length > 1 ? (responses[1] as number) : undefined // process from the format of tableId.column to expected format also // make sure JSON columns corrected @@ -201,7 +244,8 @@ export async function search( // check for pagination final row let nextRow: Row | undefined - if (paginate && params.limit && processed.length > params.limit) { + if (paginate && params.limit && rows.length > params.limit) { + // remove the extra row that confirmed if there is another row to move to nextRow = processed.pop() } @@ -217,21 +261,21 @@ export async function search( finalRows = finalRows.map((r: any) => pick(r, fields)) } - // check for pagination - if (paginate) { - const response: SearchResponse = { - rows: finalRows, - } - if (nextRow) { - response.hasNextPage = true - response.bookmark = bookmark + 1 - } - return response - } else { - return { - rows: finalRows, - } + const response: SearchResponse = { + rows: finalRows, } + if (totalRows != null) { + response.totalRows = totalRows + } + // check for pagination + if (paginate && nextRow) { + response.hasNextPage = true + response.bookmark = bookmark + 1 + } + if (paginate && !nextRow) { + response.hasNextPage = false + } + return response } catch (err: any) { const msg = typeof err === "string" ? err : err.message if (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) { diff --git a/packages/server/src/sdk/app/rows/sqlAlias.ts b/packages/server/src/sdk/app/rows/sqlAlias.ts index ab4f5d2844..bc8fc56d5e 100644 --- a/packages/server/src/sdk/app/rows/sqlAlias.ts +++ b/packages/server/src/sdk/app/rows/sqlAlias.ts @@ -11,7 +11,12 @@ import { SQS_DATASOURCE_INTERNAL } from "@budibase/backend-core" import { getSQLClient } from "./utils" import { cloneDeep } from "lodash" import datasources from "../datasources" -import { makeExternalQuery } from "../../../integrations/base/query" +import { BudibaseInternalDB } from "../../../db/utils" + +type PerformQueryFunction = ( + datasource: Datasource, + json: QueryJson +) => Promise const WRITE_OPERATIONS: Operation[] = [ Operation.CREATE, @@ -65,7 +70,7 @@ export default class AliasTables { this.charSeq = new CharSequence() } - isAliasingEnabled(json: QueryJson, datasource: Datasource) { + isAliasingEnabled(json: QueryJson, datasource?: Datasource) { const operation = json.endpoint.operation const fieldLength = json.resource?.fields?.length if ( @@ -75,6 +80,10 @@ export default class AliasTables { ) { return false } + // SQS - doesn't have a datasource + if (!datasource) { + return true + } try { const sqlClient = getSQLClient(datasource) const isWrite = WRITE_OPERATIONS.includes(operation) @@ -167,13 +176,14 @@ export default class AliasTables { async queryWithAliasing( json: QueryJson, - queryFn?: (json: QueryJson) => Promise + queryFn: PerformQueryFunction ): Promise { const datasourceId = json.endpoint.datasourceId const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL - let aliasingEnabled: boolean, datasource: Datasource | undefined + let aliasingEnabled: boolean, datasource: Datasource if (isSqs) { - aliasingEnabled = true + aliasingEnabled = this.isAliasingEnabled(json) + datasource = BudibaseInternalDB } else { datasource = await datasources.get(datasourceId) aliasingEnabled = this.isAliasingEnabled(json, datasource) @@ -225,14 +235,7 @@ export default class AliasTables { json.tableAliases = invertedTableAliases } - let response: DatasourcePlusQueryResponse - if (datasource && !isSqs) { - response = await makeExternalQuery(datasource, json) - } else if (queryFn) { - response = await queryFn(json) - } else { - throw new Error("No supplied method to perform aliased query") - } + let response: DatasourcePlusQueryResponse = await queryFn(datasource, json) if (Array.isArray(response) && aliasingEnabled) { return this.reverse(response) } else { diff --git a/packages/server/src/sdk/app/rows/utils.ts b/packages/server/src/sdk/app/rows/utils.ts index bb37fd99f3..cd1b663f6a 100644 --- a/packages/server/src/sdk/app/rows/utils.ts +++ b/packages/server/src/sdk/app/rows/utils.ts @@ -50,6 +50,17 @@ export function getSQLClient(datasource: Datasource): SqlClient { throw new Error("Unable to determine client for SQL datasource") } +export function processRowCountResponse( + response: DatasourcePlusQueryResponse +): number { + if (response && response.length === 1 && "total" in response[0]) { + const total = response[0].total + return typeof total === "number" ? total : parseInt(total) + } else { + throw new Error("Unable to count rows in query - no count response") + } +} + export async function getDatasourceAndQuery( json: QueryJson ): Promise { diff --git a/packages/server/src/sdk/app/tables/getters.ts b/packages/server/src/sdk/app/tables/getters.ts index 355493579d..738e57eff8 100644 --- a/packages/server/src/sdk/app/tables/getters.ts +++ b/packages/server/src/sdk/app/tables/getters.ts @@ -90,10 +90,10 @@ export async function getExternalTable( export async function getTable(tableId: string): Promise { const db = context.getAppDB() let output: Table - if (isExternalTableID(tableId)) { + if (tableId && isExternalTableID(tableId)) { let { datasourceId, tableName } = breakExternalTableId(tableId) - const datasource = await datasources.get(datasourceId!) - const table = await getExternalTable(datasourceId!, tableName!) + const datasource = await datasources.get(datasourceId) + const table = await getExternalTable(datasourceId, tableName) output = { ...table, sql: isSQL(datasource) } } else { output = await db.get
(tableId) diff --git a/packages/server/src/sdk/app/tables/internal/index.ts b/packages/server/src/sdk/app/tables/internal/index.ts index ea40d2bfe9..fc32708708 100644 --- a/packages/server/src/sdk/app/tables/internal/index.ts +++ b/packages/server/src/sdk/app/tables/internal/index.ts @@ -17,6 +17,7 @@ import { cloneDeep } from "lodash/fp" import isEqual from "lodash/isEqual" import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula" import { context } from "@budibase/backend-core" +import { findDuplicateInternalColumns } from "@budibase/shared-core" import { getTable } from "../getters" import { checkAutoColumns } from "./utils" import * as viewsSdk from "../../views" @@ -44,6 +45,17 @@ export async function save( if (hasTypeChanged(table, oldTable)) { throw new Error("A column type has changed.") } + + // check for case sensitivity - we don't want to allow duplicated columns + const duplicateColumn = findDuplicateInternalColumns(table) + if (duplicateColumn.length) { + throw new Error( + `Column(s) "${duplicateColumn.join( + ", " + )}" are duplicated - check for other columns with these name (case in-sensitive)` + ) + } + // check that subtypes have been maintained table = checkAutoColumns(table, oldTable) diff --git a/packages/server/src/sdk/app/views/external.ts b/packages/server/src/sdk/app/views/external.ts index 0f96bcc061..2b3e271597 100644 --- a/packages/server/src/sdk/app/views/external.ts +++ b/packages/server/src/sdk/app/views/external.ts @@ -10,9 +10,9 @@ export async function get(viewId: string): Promise { const { tableId } = utils.extractViewInfoFromID(viewId) const { datasourceId, tableName } = breakExternalTableId(tableId) - const ds = await sdk.datasources.get(datasourceId!) + const ds = await sdk.datasources.get(datasourceId) - const table = ds.entities![tableName!] + const table = ds.entities![tableName] const views = Object.values(table.views!).filter(isV2) const found = views.find(v => v.id === viewId) if (!found) { @@ -25,9 +25,9 @@ export async function getEnriched(viewId: string): Promise { const { tableId } = utils.extractViewInfoFromID(viewId) const { datasourceId, tableName } = breakExternalTableId(tableId) - const ds = await sdk.datasources.get(datasourceId!) + const ds = await sdk.datasources.get(datasourceId) - const table = ds.entities![tableName!] + const table = ds.entities![tableName] const views = Object.values(table.views!).filter(isV2) const found = views.find(v => v.id === viewId) if (!found) { @@ -49,9 +49,9 @@ export async function create( const db = context.getAppDB() const { datasourceId, tableName } = breakExternalTableId(tableId) - const ds = await sdk.datasources.get(datasourceId!) - ds.entities![tableName!].views ??= {} - ds.entities![tableName!].views![view.name] = view + const ds = await sdk.datasources.get(datasourceId) + ds.entities![tableName].views ??= {} + ds.entities![tableName].views![view.name] = view await db.put(ds) return view } @@ -60,9 +60,9 @@ export async function update(tableId: string, view: ViewV2): Promise { const db = context.getAppDB() const { datasourceId, tableName } = breakExternalTableId(tableId) - const ds = await sdk.datasources.get(datasourceId!) - ds.entities![tableName!].views ??= {} - const views = ds.entities![tableName!].views! + const ds = await sdk.datasources.get(datasourceId) + ds.entities![tableName].views ??= {} + const views = ds.entities![tableName].views! const existingView = Object.values(views).find( v => isV2(v) && v.id === view.id @@ -87,9 +87,9 @@ export async function remove(viewId: string): Promise { } const { datasourceId, tableName } = breakExternalTableId(view.tableId) - const ds = await sdk.datasources.get(datasourceId!) + const ds = await sdk.datasources.get(datasourceId) - delete ds.entities![tableName!].views![view?.name] + delete ds.entities![tableName].views![view?.name] await db.put(ds) return view } diff --git a/packages/server/src/tests/utilities/structures.ts b/packages/server/src/tests/utilities/structures.ts index 7213cc66f1..a59719ab2c 100644 --- a/packages/server/src/tests/utilities/structures.ts +++ b/packages/server/src/tests/utilities/structures.ts @@ -359,6 +359,36 @@ export function collectAutomation(tableId?: string): Automation { return automation as Automation } +export function filterAutomation(tableId?: string): Automation { + const automation: any = { + name: "looping", + type: "automation", + definition: { + steps: [ + { + id: "b", + type: "ACTION", + internal: true, + stepId: AutomationActionStepId.FILTER, + inputs: {}, + schema: BUILTIN_ACTION_DEFINITIONS.EXECUTE_SCRIPT.schema, + }, + ], + trigger: { + id: "a", + type: "TRIGGER", + event: "row:save", + stepId: AutomationTriggerStepId.ROW_SAVED, + inputs: { + tableId, + }, + schema: TRIGGER_DEFINITIONS.ROW_SAVED.schema, + }, + }, + } + return automation as Automation +} + export function basicAutomationResults( automationId: string ): AutomationResults { diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index 8e6cd34c7c..e473675633 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -4,6 +4,7 @@ import { TableSchema, FieldSchema, Row, + Table, } from "@budibase/types" import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core" import { db } from "@budibase/backend-core" @@ -118,16 +119,26 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults { return results } -export function parse(rows: Rows, schema: TableSchema): Rows { +export function parse(rows: Rows, table: Table): Rows { return rows.map(row => { const parsedRow: Row = {} Object.entries(row).forEach(([columnName, columnData]) => { - if (!(columnName in schema) || schema[columnName]?.autocolumn) { + const schema = table.schema + if (!(columnName in schema)) { // Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case return } + if ( + schema[columnName].autocolumn && + !table.primary?.includes(columnName) + ) { + // Don't want the user specifying values for autocolumns unless they're updating + // a row through its primary key. + return + } + const columnSchema = schema[columnName] const { type: columnType } = columnSchema if (columnType === FieldType.NUMBER) { diff --git a/packages/shared-core/src/constants/index.ts b/packages/shared-core/src/constants/index.ts index afb7e659e1..c9d1a8fc8f 100644 --- a/packages/shared-core/src/constants/index.ts +++ b/packages/shared-core/src/constants/index.ts @@ -1,5 +1,6 @@ export * from "./api" export * from "./fields" +export * from "./rows" export const OperatorOptions = { Equals: { @@ -179,3 +180,5 @@ export enum BpmStatusValue { VERIFYING_EMAIL = "verifying_email", COMPLETED = "completed", } + +export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default" diff --git a/packages/shared-core/src/constants/rows.ts b/packages/shared-core/src/constants/rows.ts new file mode 100644 index 0000000000..bfa7595d62 --- /dev/null +++ b/packages/shared-core/src/constants/rows.ts @@ -0,0 +1,14 @@ +export const CONSTANT_INTERNAL_ROW_COLS = [ + "_id", + "_rev", + "type", + "createdAt", + "updatedAt", + "tableId", +] as const + +export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const + +export function isInternalColumnName(name: string): boolean { + return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name) +} diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 52ab3ed626..bd75406e26 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -12,6 +12,7 @@ import { SortOrder, RowSearchParams, EmptyFilterOption, + SearchResponse, } from "@budibase/types" import dayjs from "dayjs" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" @@ -262,15 +263,23 @@ export const buildQuery = (filter: SearchFilter[]) => { return query } -export const search = (docs: Record[], query: RowSearchParams) => { +export const search = ( + docs: Record[], + query: RowSearchParams +): SearchResponse> => { let result = runQuery(docs, query.query) if (query.sort) { result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING) } + let totalRows = result.length if (query.limit) { result = limit(result, query.limit.toString()) } - return result + const response: SearchResponse> = { rows: result } + if (query.countRows) { + response.totalRows = totalRows + } + return response } /** diff --git a/packages/shared-core/src/table.ts b/packages/shared-core/src/table.ts index 7706b78037..4b578a2aef 100644 --- a/packages/shared-core/src/table.ts +++ b/packages/shared-core/src/table.ts @@ -1,4 +1,5 @@ -import { FieldType } from "@budibase/types" +import { FieldType, Table } from "@budibase/types" +import { CONSTANT_INTERNAL_ROW_COLS } from "./constants" const allowDisplayColumnByType: Record = { [FieldType.STRING]: true, @@ -51,3 +52,22 @@ export function canBeDisplayColumn(type: FieldType): boolean { export function canBeSortColumn(type: FieldType): boolean { return !!allowSortColumnByType[type] } + +export function findDuplicateInternalColumns(table: Table): string[] { + // get the column names + const columnNames = Object.keys(table.schema) + .concat(CONSTANT_INTERNAL_ROW_COLS) + .map(colName => colName.toLowerCase()) + // there are duplicates + const set = new Set(columnNames) + let duplicates: string[] = [] + if (set.size !== columnNames.length) { + for (let key of set.keys()) { + const count = columnNames.filter(name => name === key).length + if (count > 1) { + duplicates.push(key) + } + } + } + return duplicates +} diff --git a/packages/types/src/api/web/app/rows.ts b/packages/types/src/api/web/app/rows.ts index 5d49f01bfc..c120af0628 100644 --- a/packages/types/src/api/web/app/rows.ts +++ b/packages/types/src/api/web/app/rows.ts @@ -25,6 +25,7 @@ export interface SearchViewRowRequest | "bookmark" | "paginate" | "query" + | "countRows" > {} export interface SearchRowResponse { diff --git a/packages/types/src/documents/app/automation.ts b/packages/types/src/documents/app/automation.ts index 63291fa3bb..5954a47151 100644 --- a/packages/types/src/documents/app/automation.ts +++ b/packages/types/src/documents/app/automation.ts @@ -2,6 +2,8 @@ import { Document } from "../document" import { EventEmitter } from "events" import { User } from "../global" import { ReadStream } from "fs" +import { Row } from "./row" +import { Table } from "./table" export enum AutomationIOType { OBJECT = "object", @@ -252,3 +254,10 @@ export type BucketedContent = AutomationAttachmentContent & { bucket: string path: string } + +export type UpdatedRowEventEmitter = { + row: Row + oldRow: Row + table: Table + appId: string +} diff --git a/packages/types/src/sdk/datasources.ts b/packages/types/src/sdk/datasources.ts index 3a788cbac6..bdad9c4825 100644 --- a/packages/types/src/sdk/datasources.ts +++ b/packages/types/src/sdk/datasources.ts @@ -8,7 +8,9 @@ export enum Operation { READ = "READ", UPDATE = "UPDATE", DELETE = "DELETE", + COUNT = "COUNT", BULK_CREATE = "BULK_CREATE", + BULK_UPSERT = "BULK_UPSERT", CREATE_TABLE = "CREATE_TABLE", UPDATE_TABLE = "UPDATE_TABLE", DELETE_TABLE = "DELETE_TABLE", @@ -20,6 +22,7 @@ export const RowOperations = [ Operation.UPDATE, Operation.DELETE, Operation.BULK_CREATE, + Operation.BULK_UPSERT, ] export enum QueryType { @@ -186,7 +189,7 @@ export interface Schema { } // return these when an operation occurred but we got no response -enum DSPlusOperation { +export enum DSPlusOperation { CREATE = "create", READ = "read", UPDATE = "update", @@ -196,6 +199,7 @@ enum DSPlusOperation { export type DatasourcePlusQueryResponse = | Row[] | Record[] + | { total: number }[] | void export interface DatasourcePlus extends IntegrationBase { diff --git a/packages/types/src/sdk/row.ts b/packages/types/src/sdk/row.ts index 7f3fc1f391..b0b137034b 100644 --- a/packages/types/src/sdk/row.ts +++ b/packages/types/src/sdk/row.ts @@ -17,6 +17,7 @@ export interface SearchParams { fields?: string[] indexer?: () => Promise rows?: Row[] + countRows?: boolean } // when searching for rows we want a more extensive search type that requires certain properties diff --git a/yarn.lock b/yarn.lock index d71dd4da78..9914c334df 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2193,9 +2193,9 @@ "@bull-board/api" "5.10.2" "@camunda8/sdk@^8.5.3": - version "8.6.2" - resolved "https://registry.yarnpkg.com/@camunda8/sdk/-/sdk-8.6.2.tgz#7f1ed90dfb5ad50ac22e5f984e92739c4e54f216" - integrity sha512-QdpuU3qsbJVKYDuIIYIgryl9HbnOoUqmeUcCU4YZPBhoWVkbCjnP0GD4Q3485SE3WzpbbAMoLtYCHi7hJwnAcA== + version "8.6.6" + resolved "https://registry.yarnpkg.com/@camunda8/sdk/-/sdk-8.6.6.tgz#39f894f89b485df7c2a803e590d4175fbfb6fb8f" + integrity sha512-u0A1Q0Fwh6W33i9ky2nfA6DJUKgLES8FAhp7k3L7L8ldNM5NgLRLiz1eZgUWK5CT4D78aFoSkm3VobFo+V42yQ== dependencies: "@grpc/grpc-js" "1.10.9" "@grpc/proto-loader" "0.7.13" @@ -2401,230 +2401,230 @@ find-up "^5.0.0" strip-json-comments "^3.1.1" -"@esbuild/aix-ppc64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz#a70f4ac11c6a1dfc18b8bbb13284155d933b9537" - integrity sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g== +"@esbuild/aix-ppc64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f" + integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ== "@esbuild/android-arm64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz#984b4f9c8d0377443cc2dfcef266d02244593622" integrity sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ== -"@esbuild/android-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz#db1c9202a5bc92ea04c7b6840f1bbe09ebf9e6b9" - integrity sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg== +"@esbuild/android-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052" + integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A== "@esbuild/android-arm@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.18.20.tgz#fedb265bc3a589c84cc11f810804f234947c3682" integrity sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw== -"@esbuild/android-arm@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.20.2.tgz#3b488c49aee9d491c2c8f98a909b785870d6e995" - integrity sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w== +"@esbuild/android-arm@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28" + integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg== "@esbuild/android-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.18.20.tgz#35cf419c4cfc8babe8893d296cd990e9e9f756f2" integrity sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg== -"@esbuild/android-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.20.2.tgz#3b1628029e5576249d2b2d766696e50768449f98" - integrity sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg== +"@esbuild/android-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e" + integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA== "@esbuild/darwin-arm64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz#08172cbeccf95fbc383399a7f39cfbddaeb0d7c1" integrity sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA== -"@esbuild/darwin-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz#6e8517a045ddd86ae30c6608c8475ebc0c4000bb" - integrity sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA== +"@esbuild/darwin-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a" + integrity sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ== "@esbuild/darwin-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz#d70d5790d8bf475556b67d0f8b7c5bdff053d85d" integrity sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ== -"@esbuild/darwin-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz#90ed098e1f9dd8a9381695b207e1cff45540a0d0" - integrity sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA== +"@esbuild/darwin-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22" + integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw== "@esbuild/freebsd-arm64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz#98755cd12707f93f210e2494d6a4b51b96977f54" integrity sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw== -"@esbuild/freebsd-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz#d71502d1ee89a1130327e890364666c760a2a911" - integrity sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw== +"@esbuild/freebsd-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e" + integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g== "@esbuild/freebsd-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz#c1eb2bff03915f87c29cece4c1a7fa1f423b066e" integrity sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ== -"@esbuild/freebsd-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz#aa5ea58d9c1dd9af688b8b6f63ef0d3d60cea53c" - integrity sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw== +"@esbuild/freebsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261" + integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ== "@esbuild/linux-arm64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz#bad4238bd8f4fc25b5a021280c770ab5fc3a02a0" integrity sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA== -"@esbuild/linux-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz#055b63725df678379b0f6db9d0fa85463755b2e5" - integrity sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A== +"@esbuild/linux-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b" + integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q== "@esbuild/linux-arm@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz#3e617c61f33508a27150ee417543c8ab5acc73b0" integrity sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg== -"@esbuild/linux-arm@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz#76b3b98cb1f87936fbc37f073efabad49dcd889c" - integrity sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg== +"@esbuild/linux-arm@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9" + integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA== "@esbuild/linux-ia32@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz#699391cccba9aee6019b7f9892eb99219f1570a7" integrity sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA== -"@esbuild/linux-ia32@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz#c0e5e787c285264e5dfc7a79f04b8b4eefdad7fa" - integrity sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig== +"@esbuild/linux-ia32@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2" + integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg== "@esbuild/linux-loong64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz#e6fccb7aac178dd2ffb9860465ac89d7f23b977d" integrity sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg== -"@esbuild/linux-loong64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz#a6184e62bd7cdc63e0c0448b83801001653219c5" - integrity sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ== +"@esbuild/linux-loong64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df" + integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg== "@esbuild/linux-mips64el@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz#eeff3a937de9c2310de30622a957ad1bd9183231" integrity sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ== -"@esbuild/linux-mips64el@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz#d08e39ce86f45ef8fc88549d29c62b8acf5649aa" - integrity sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA== +"@esbuild/linux-mips64el@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe" + integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg== "@esbuild/linux-ppc64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz#2f7156bde20b01527993e6881435ad79ba9599fb" integrity sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA== -"@esbuild/linux-ppc64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz#8d252f0b7756ffd6d1cbde5ea67ff8fd20437f20" - integrity sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg== +"@esbuild/linux-ppc64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4" + integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w== "@esbuild/linux-riscv64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz#6628389f210123d8b4743045af8caa7d4ddfc7a6" integrity sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A== -"@esbuild/linux-riscv64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz#19f6dcdb14409dae607f66ca1181dd4e9db81300" - integrity sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg== +"@esbuild/linux-riscv64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc" + integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA== "@esbuild/linux-s390x@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz#255e81fb289b101026131858ab99fba63dcf0071" integrity sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ== -"@esbuild/linux-s390x@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz#3c830c90f1a5d7dd1473d5595ea4ebb920988685" - integrity sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ== +"@esbuild/linux-s390x@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de" + integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A== "@esbuild/linux-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz#c7690b3417af318a9b6f96df3031a8865176d338" integrity sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w== -"@esbuild/linux-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz#86eca35203afc0d9de0694c64ec0ab0a378f6fff" - integrity sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw== +"@esbuild/linux-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0" + integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ== "@esbuild/netbsd-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz#30e8cd8a3dded63975e2df2438ca109601ebe0d1" integrity sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A== -"@esbuild/netbsd-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz#e771c8eb0e0f6e1877ffd4220036b98aed5915e6" - integrity sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ== +"@esbuild/netbsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047" + integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg== "@esbuild/openbsd-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz#7812af31b205055874c8082ea9cf9ab0da6217ae" integrity sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg== -"@esbuild/openbsd-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz#9a795ae4b4e37e674f0f4d716f3e226dd7c39baf" - integrity sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ== +"@esbuild/openbsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70" + integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow== "@esbuild/sunos-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz#d5c275c3b4e73c9b0ecd38d1ca62c020f887ab9d" integrity sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ== -"@esbuild/sunos-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz#7df23b61a497b8ac189def6e25a95673caedb03f" - integrity sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w== +"@esbuild/sunos-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b" + integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg== "@esbuild/win32-arm64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz#73bc7f5a9f8a77805f357fab97f290d0e4820ac9" integrity sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg== -"@esbuild/win32-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz#f1ae5abf9ca052ae11c1bc806fb4c0f519bacf90" - integrity sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ== +"@esbuild/win32-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d" + integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A== "@esbuild/win32-ia32@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz#ec93cbf0ef1085cc12e71e0d661d20569ff42102" integrity sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g== -"@esbuild/win32-ia32@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz#241fe62c34d8e8461cd708277813e1d0ba55ce23" - integrity sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ== +"@esbuild/win32-ia32@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b" + integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA== "@esbuild/win32-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz#786c5f41f043b07afb1af37683d7c33668858f6d" integrity sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ== -"@esbuild/win32-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz#9c907b21e30a52db959ba4f80bb01a0cc403d5cc" - integrity sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ== +"@esbuild/win32-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c" + integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw== "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": version "4.4.0" @@ -5941,9 +5941,9 @@ integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg== "@types/node@>=8.1.0": - version "20.14.2" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.14.2.tgz#a5f4d2bcb4b6a87bffcaa717718c5a0f208f4a18" - integrity sha512-xyu6WAMVwv6AKFLB+e/7ySZVr/0zLCzOa7rSpq6jNwpqOrUbcACDWC+53d4n2QHOnDou0fbIsg8wZu/sxrnI4Q== + version "20.14.5" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.14.5.tgz#fe35e3022ebe58b8f201580eb24e1fcfc0f2487d" + integrity sha512-aoRR+fJkZT2l0aGOJhuA8frnCSoNX6W7U2mpNq63+BxBIj5BQFt8rHy627kijCmm63ijdSdwvGgpUsU6MBsZZA== dependencies: undici-types "~5.26.4" @@ -6881,11 +6881,18 @@ acorn-walk@^8.0.2, acorn-walk@^8.1.1: resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== -acorn-walk@^8.2.0, acorn-walk@^8.3.2: +acorn-walk@^8.2.0: version "8.3.2" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.2.tgz#7703af9415f1b6db9315d6895503862e231d34aa" integrity sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A== +acorn-walk@^8.3.2: + version "8.3.3" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.3.tgz#9caeac29eefaa0c41e3d4c65137de4d6f34df43e" + integrity sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw== + dependencies: + acorn "^8.11.0" + acorn@^5.2.1, acorn@^5.7.3: version "5.7.4" resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.4.tgz#3e8d8a9947d0599a1796d10225d7432f4a4acf5e" @@ -6901,6 +6908,11 @@ acorn@^8.1.0, acorn@^8.10.0, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8. resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.2.tgz#ca0d78b51895be5390a5903c5b3bdcdaf78ae40b" integrity sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w== +acorn@^8.11.0: + version "8.12.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.0.tgz#1627bfa2e058148036133b8d9b51a700663c294c" + integrity sha512-RTvkC4w+KNXrM39/lWCUaG0IbRkWdCv7W/IOW9oU6SawyxulvkQy5HQPVTKxEjczcUvapcrw3cFx/60VN/NRNw== + acorn@^8.11.3, acorn@^8.8.1: version "8.11.3" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" @@ -10284,7 +10296,7 @@ engine.io-parser@~5.0.3: resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.0.6.tgz#7811244af173e157295dec9b2718dfe42a64ef45" integrity sha512-tjuoZDMAdEhVnSFleYPCtdL2GXwVTGtNjoeJd9IhIG3C1xs9uwxqRNEu5WpnDZCaozwVlK/nuQhpodhXSIMaxw== -engine.io@~6.4.1: +engine.io@~6.4.2: version "6.4.2" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.2.tgz#ffeaf68f69b1364b0286badddf15ff633476473f" integrity sha512-FKn/3oMiJjrOEOeUub2WCox6JhxBXq/Zn3fZOMCBxKnNYtsdKjxhl7yR3fZhM9PV+rdE75SU5SYMc+2PGzo+Tg== @@ -10542,34 +10554,34 @@ esbuild@^0.18.10, esbuild@^0.18.17: "@esbuild/win32-ia32" "0.18.20" "@esbuild/win32-x64" "0.18.20" -esbuild@^0.20.1: - version "0.20.2" - resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.20.2.tgz#9d6b2386561766ee6b5a55196c6d766d28c87ea1" - integrity sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g== +esbuild@^0.21.3: + version "0.21.5" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d" + integrity sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw== optionalDependencies: - "@esbuild/aix-ppc64" "0.20.2" - "@esbuild/android-arm" "0.20.2" - "@esbuild/android-arm64" "0.20.2" - "@esbuild/android-x64" "0.20.2" - "@esbuild/darwin-arm64" "0.20.2" - "@esbuild/darwin-x64" "0.20.2" - "@esbuild/freebsd-arm64" "0.20.2" - "@esbuild/freebsd-x64" "0.20.2" - "@esbuild/linux-arm" "0.20.2" - "@esbuild/linux-arm64" "0.20.2" - "@esbuild/linux-ia32" "0.20.2" - "@esbuild/linux-loong64" "0.20.2" - "@esbuild/linux-mips64el" "0.20.2" - "@esbuild/linux-ppc64" "0.20.2" - "@esbuild/linux-riscv64" "0.20.2" - "@esbuild/linux-s390x" "0.20.2" - "@esbuild/linux-x64" "0.20.2" - "@esbuild/netbsd-x64" "0.20.2" - "@esbuild/openbsd-x64" "0.20.2" - "@esbuild/sunos-x64" "0.20.2" - "@esbuild/win32-arm64" "0.20.2" - "@esbuild/win32-ia32" "0.20.2" - "@esbuild/win32-x64" "0.20.2" + "@esbuild/aix-ppc64" "0.21.5" + "@esbuild/android-arm" "0.21.5" + "@esbuild/android-arm64" "0.21.5" + "@esbuild/android-x64" "0.21.5" + "@esbuild/darwin-arm64" "0.21.5" + "@esbuild/darwin-x64" "0.21.5" + "@esbuild/freebsd-arm64" "0.21.5" + "@esbuild/freebsd-x64" "0.21.5" + "@esbuild/linux-arm" "0.21.5" + "@esbuild/linux-arm64" "0.21.5" + "@esbuild/linux-ia32" "0.21.5" + "@esbuild/linux-loong64" "0.21.5" + "@esbuild/linux-mips64el" "0.21.5" + "@esbuild/linux-ppc64" "0.21.5" + "@esbuild/linux-riscv64" "0.21.5" + "@esbuild/linux-s390x" "0.21.5" + "@esbuild/linux-x64" "0.21.5" + "@esbuild/netbsd-x64" "0.21.5" + "@esbuild/openbsd-x64" "0.21.5" + "@esbuild/sunos-x64" "0.21.5" + "@esbuild/win32-arm64" "0.21.5" + "@esbuild/win32-ia32" "0.21.5" + "@esbuild/win32-x64" "0.21.5" escalade@^3.1.1: version "3.1.1" @@ -18139,9 +18151,9 @@ posthog-js@^1.118.0: preact "^10.19.3" posthog-js@^1.13.4: - version "1.139.1" - resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.139.1.tgz#25db31d94ce218357a2be43be4a55cfbb940f295" - integrity sha512-+JDu2S7z6sh9Q5kj0oh/W8PZJMQ1gSigWi7gbY4NwwCq2M3t0wNFjxlfHbAo1GncRWDxen+IC+3J7oJ8TJGnkA== + version "1.139.2" + resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.139.2.tgz#f8de29edf2770da47fcccb7838902d1e89d6b43d" + integrity sha512-myyuOADqZvYwgqmriwlKDEUDwLhscivFLh67UWBj4Wt9kOlmklvJb36W0ES2GAS6IdojbnGZGH5lF3heqreLWQ== dependencies: fflate "^0.4.8" preact "^10.19.3" @@ -20148,17 +20160,25 @@ socket.io-parser@~4.2.1: "@socket.io/component-emitter" "~3.1.0" debug "~4.3.1" -socket.io@4.6.1: - version "4.6.1" - resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.1.tgz#62ec117e5fce0692fa50498da9347cfb52c3bc70" - integrity sha512-KMcaAi4l/8+xEjkRICl6ak8ySoxsYG+gG6/XfRCPJPQ/haCRIJBTL4wIl8YCsmtaBovcAXGLOShyVWQ/FG8GZA== +socket.io-parser@~4.2.4: + version "4.2.4" + resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.4.tgz#c806966cf7270601e47469ddeec30fbdfda44c83" + integrity sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew== + dependencies: + "@socket.io/component-emitter" "~3.1.0" + debug "~4.3.1" + +socket.io@4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.2.tgz#d597db077d4df9cbbdfaa7a9ed8ccc3d49439786" + integrity sha512-Vp+lSks5k0dewYTfwgPT9UeGGd+ht7sCpB7p0e83VgO4X/AHYWhXITMrNk/pg8syY2bpx23ptClCQuHhqi2BgQ== dependencies: accepts "~1.3.4" base64id "~2.0.0" debug "~4.3.2" - engine.io "~6.4.1" + engine.io "~6.4.2" socket.io-adapter "~2.5.2" - socket.io-parser "~4.2.1" + socket.io-parser "~4.2.4" socks-proxy-agent@^7.0.0: version "7.0.0" @@ -21090,19 +21110,7 @@ tar@6.1.11: mkdirp "^1.0.3" yallist "^4.0.0" -tar@6.1.15: - version "6.1.15" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69" - integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A== - dependencies: - chownr "^2.0.0" - fs-minipass "^2.0.0" - minipass "^5.0.0" - minizlib "^2.1.1" - mkdirp "^1.0.3" - yallist "^4.0.0" - -tar@^6.1.11, tar@^6.1.2: +tar@6.2.1, tar@^6.1.11, tar@^6.1.2: version "6.2.1" resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A== @@ -22195,11 +22203,11 @@ vite@^4.5.0: fsevents "~2.3.2" vite@^5.0.0: - version "5.2.13" - resolved "https://registry.yarnpkg.com/vite/-/vite-5.2.13.tgz#945ababcbe3d837ae2479c29f661cd20bc5e1a80" - integrity sha512-SSq1noJfY9pR3I1TUENL3rQYDQCFqgD+lM6fTRAM8Nv6Lsg5hDLaXkjETVeBt+7vZBCMoibD+6IWnT2mJ+Zb/A== + version "5.3.1" + resolved "https://registry.yarnpkg.com/vite/-/vite-5.3.1.tgz#bb2ca6b5fd7483249d3e86b25026e27ba8a663e6" + integrity sha512-XBmSKRLXLxiaPYamLv3/hnP/KXDai1NDexN0FpkTaZXTfycHvkRHoenpgl/fvuK/kPbB6xAgoyiryAhQNxYmAQ== dependencies: - esbuild "^0.20.1" + esbuild "^0.21.3" postcss "^8.4.38" rollup "^4.13.0" optionalDependencies: @@ -22640,14 +22648,14 @@ write-stream@~0.4.3: readable-stream "~0.0.2" ws@^7.4.6: - version "7.5.9" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" - integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + version "7.5.10" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9" + integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ== ws@^8.13.0: - version "8.13.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0" - integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== + version "8.17.1" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b" + integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ== ws@~8.11.0: version "8.11.0"