Merge branch 'master' into grow-472-remove-use-of-types-package-in-account-portal

This commit is contained in:
melohagan 2024-06-24 15:00:51 +01:00 committed by GitHub
commit f14ffc4609
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
88 changed files with 2474 additions and 1332 deletions

View File

@ -92,7 +92,8 @@
// differs to external, but the API is broadly the same
"jest/no-conditional-expect": "off",
// have to turn this off to allow function overloading in typescript
"no-dupe-class-members": "off"
"no-dupe-class-members": "off",
"no-redeclare": "off"
}
},
{

View File

@ -1,5 +1,5 @@
{
"version": "2.28.7",
"version": "2.29.1",
"npmClient": "yarn",
"packages": [
"packages/*",

@ -1 +1 @@
Subproject commit 247f56d455abbd64da17d865275ed978f577549f
Subproject commit b600cca314a5cc9971e44d46047d1a0019b46b08

View File

@ -72,4 +72,4 @@ export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs"
export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory"
export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses"
export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee"
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
export { DEFAULT_BB_DATASOURCE_ID } from "@budibase/shared-core"

View File

@ -1,14 +1,5 @@
export const CONSTANT_INTERNAL_ROW_COLS = [
"_id",
"_rev",
"type",
"createdAt",
"updatedAt",
"tableId",
] as const
export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const
export function isInternalColumnName(name: string): boolean {
return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)
}
export {
CONSTANT_INTERNAL_ROW_COLS,
CONSTANT_EXTERNAL_ROW_COLS,
isInternalColumnName,
} from "@budibase/shared-core"

View File

@ -1,10 +1,10 @@
import { Knex, knex } from "knex"
import * as dbCore from "../db"
import {
isIsoDateString,
isValidFilter,
getNativeSql,
isExternalTable,
isIsoDateString,
isValidFilter,
} from "./utils"
import { SqlStatements } from "./sqlStatements"
import SqlTableQueryBuilder from "./sqlTable"
@ -12,21 +12,21 @@ import {
BBReferenceFieldMetadata,
FieldSchema,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
JsonFieldMetadata,
JsonTypes,
Operation,
prefixed,
QueryJson,
SqlQuery,
QueryOptions,
RelationshipsJson,
SearchFilters,
SortOrder,
SqlClient,
SqlQuery,
SqlQueryBinding,
Table,
TableSourceType,
INTERNAL_TABLE_SOURCE_ID,
SqlClient,
QueryOptions,
JsonTypes,
prefixed,
SortOrder,
} from "@budibase/types"
import environment from "../environment"
import { helpers } from "@budibase/shared-core"
@ -114,7 +114,7 @@ function generateSelectStatement(
): (string | Knex.Raw)[] | "*" {
const { resource, meta } = json
if (!resource) {
if (!resource || !resource.fields || resource.fields.length === 0) {
return "*"
}
@ -410,13 +410,32 @@ class InternalBuilder {
return query
}
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort, paginate } = json
addDistinctCount(
query: Knex.QueryBuilder,
json: QueryJson
): Knex.QueryBuilder {
const table = json.meta.table
const primary = table.primary
const aliases = json.tableAliases
const aliased =
table.name && aliases?.[table.name] ? aliases[table.name] : table.name
if (!primary) {
throw new Error("SQL counting requires primary key to be supplied")
}
return query.countDistinct(`${aliased}.${primary[0]} as total`)
}
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort } = json
const table = json.meta.table
const primaryKey = table.primary
const tableName = getTableName(table)
const aliases = json.tableAliases
const aliased =
tableName && aliases?.[tableName] ? aliases[tableName] : table?.name
if (!Array.isArray(primaryKey)) {
throw new Error("Sorting requires primary key to be specified for table")
}
if (sort && Object.keys(sort || {}).length > 0) {
for (let [key, value] of Object.entries(sort)) {
const direction =
@ -429,10 +448,9 @@ class InternalBuilder {
query = query.orderBy(`${aliased}.${key}`, direction, nulls)
}
} else if (this.client === SqlClient.MS_SQL && paginate?.limit) {
// @ts-ignore
query = query.orderBy(`${aliased}.${table?.primary[0]}`)
}
// always add sorting by the primary key - make sure result is deterministic
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
return query
}
@ -522,7 +540,7 @@ class InternalBuilder {
})
}
}
return query.limit(BASE_LIMIT)
return query
}
knexWithAlias(
@ -533,13 +551,12 @@ class InternalBuilder {
const tableName = endpoint.entityId
const tableAlias = aliases?.[tableName]
const query = knex(
return knex(
this.tableNameWithSchema(tableName, {
alias: tableAlias,
schema: endpoint.schema,
})
)
return query
}
create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
@ -571,52 +588,93 @@ class InternalBuilder {
return query.insert(parsedBody)
}
read(knex: Knex, json: QueryJson, limit: number): Knex.QueryBuilder {
let { endpoint, resource, filters, paginate, relationships, tableAliases } =
json
bulkUpsert(knex: Knex, json: QueryJson): Knex.QueryBuilder {
const { endpoint, body } = json
let query = this.knexWithAlias(knex, endpoint)
if (!Array.isArray(body)) {
return query
}
const parsedBody = body.map(row => parseBody(row))
if (
this.client === SqlClient.POSTGRES ||
this.client === SqlClient.SQL_LITE ||
this.client === SqlClient.MY_SQL
) {
const primary = json.meta.table.primary
if (!primary) {
throw new Error("Primary key is required for upsert")
}
return query.insert(parsedBody).onConflict(primary).merge()
} else if (this.client === SqlClient.MS_SQL) {
// No upsert or onConflict support in MSSQL yet, see:
// https://github.com/knex/knex/pull/6050
return query.insert(parsedBody)
}
return query.upsert(parsedBody)
}
read(
knex: Knex,
json: QueryJson,
opts: {
limits?: { base: number; query: number }
} = {}
): Knex.QueryBuilder {
let { endpoint, filters, paginate, relationships, tableAliases } = json
const { limits } = opts
const counting = endpoint.operation === Operation.COUNT
const tableName = endpoint.entityId
// select all if not specified
if (!resource) {
resource = { fields: [] }
}
let selectStatement: string | (string | Knex.Raw)[] = "*"
// handle select
if (resource.fields && resource.fields.length > 0) {
// select the resources as the format "table.columnName" - this is what is provided
// by the resource builder further up
selectStatement = generateSelectStatement(json, knex)
}
let foundLimit = limit || BASE_LIMIT
// start building the query
let query = this.knexWithAlias(knex, endpoint, tableAliases)
// handle pagination
let foundOffset: number | null = null
let foundLimit = limits?.query || limits?.base
if (paginate && paginate.page && paginate.limit) {
// @ts-ignore
const page = paginate.page <= 1 ? 0 : paginate.page - 1
const offset = page * paginate.limit
foundLimit = paginate.limit
foundOffset = offset
} else if (paginate && paginate.offset && paginate.limit) {
foundLimit = paginate.limit
foundOffset = paginate.offset
} else if (paginate && paginate.limit) {
foundLimit = paginate.limit
}
// start building the query
let query = this.knexWithAlias(knex, endpoint, tableAliases)
query = query.limit(foundLimit)
if (foundOffset) {
query = query.offset(foundOffset)
// counting should not sort, limit or offset
if (!counting) {
// add the found limit if supplied
if (foundLimit != null) {
query = query.limit(foundLimit)
}
// add overall pagination
if (foundOffset != null) {
query = query.offset(foundOffset)
}
// add sorting to pre-query
// no point in sorting when counting
query = this.addSorting(query, json)
}
// add filters to the query (where)
query = this.addFilters(query, filters, json.meta.table, {
aliases: tableAliases,
})
// add sorting to pre-query
query = this.addSorting(query, json)
const alias = tableAliases?.[tableName] || tableName
let preQuery = knex({
[alias]: query,
} as any).select(selectStatement) as any
let preQuery: Knex.QueryBuilder = knex({
// the typescript definition for the knex constructor doesn't support this
// syntax, but it is the only way to alias a pre-query result as part of
// a query - there is an alias dictionary type, but it assumes it can only
// be a table name, not a pre-query
[alias]: query as any,
})
// if counting, use distinct count, else select
preQuery = !counting
? preQuery.select(generateSelectStatement(json, knex))
: this.addDistinctCount(preQuery, json)
// have to add after as well (this breaks MS-SQL)
if (this.client !== SqlClient.MS_SQL) {
if (this.client !== SqlClient.MS_SQL && !counting) {
preQuery = this.addSorting(preQuery, json)
}
// handle joins
@ -627,6 +685,13 @@ class InternalBuilder {
endpoint.schema,
tableAliases
)
// add a base limit over the whole query
// if counting we can't set this limit
if (limits?.base) {
query = query.limit(limits.base)
}
return this.addFilters(query, filters, json.meta.table, {
relationship: true,
aliases: tableAliases,
@ -671,6 +736,19 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
this.limit = limit
}
private convertToNative(query: Knex.QueryBuilder, opts: QueryOptions = {}) {
const sqlClient = this.getSqlClient()
if (opts?.disableBindings) {
return { sql: query.toString() }
} else {
let native = getNativeSql(query)
if (sqlClient === SqlClient.SQL_LITE) {
native = convertBooleans(native)
}
return native
}
}
/**
* @param json The JSON query DSL which is to be converted to SQL.
* @param opts extra options which are to be passed into the query builder, e.g. disableReturning
@ -694,7 +772,16 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
query = builder.create(client, json, opts)
break
case Operation.READ:
query = builder.read(client, json, this.limit)
query = builder.read(client, json, {
limits: {
query: this.limit,
base: BASE_LIMIT,
},
})
break
case Operation.COUNT:
// read without any limits to count
query = builder.read(client, json)
break
case Operation.UPDATE:
query = builder.update(client, json, opts)
@ -705,6 +792,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
case Operation.BULK_CREATE:
query = builder.bulkCreate(client, json)
break
case Operation.BULK_UPSERT:
query = builder.bulkUpsert(client, json)
break
case Operation.CREATE_TABLE:
case Operation.UPDATE_TABLE:
case Operation.DELETE_TABLE:
@ -713,15 +803,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
throw `Operation type is not supported by SQL query builder`
}
if (opts?.disableBindings) {
return { sql: query.toString() }
} else {
let native = getNativeSql(query)
if (sqlClient === SqlClient.SQL_LITE) {
native = convertBooleans(native)
}
return native
}
return this.convertToNative(query, opts)
}
async getReturningRow(queryFn: QueryFunction, json: QueryJson) {
@ -797,6 +879,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
await this.getReturningRow(queryFn, this.checkLookupKeys(id, json))
)
}
if (operation === Operation.COUNT) {
return results
}
if (operation !== Operation.READ) {
return row
}

View File

@ -162,6 +162,7 @@
max-height: 100%;
}
.modal-inner-wrapper {
padding: 40px;
flex: 1 1 auto;
display: flex;
flex-direction: row;
@ -176,7 +177,6 @@
border: 2px solid var(--spectrum-global-color-gray-200);
overflow: visible;
max-height: none;
margin: 40px 0;
transform: none;
--spectrum-dialog-confirm-border-radius: var(
--spectrum-global-dimension-size-100

View File

@ -16,6 +16,8 @@
DatePicker,
DrawerContent,
Toggle,
Icon,
Divider,
} from "@budibase/bbui"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
import { automationStore, selectedAutomation, tables } from "stores/builder"
@ -89,6 +91,8 @@
? [hbAutocomplete([...bindingsToCompletions(bindings, codeMode)])]
: []
let testDataRowVisibility = {}
const getInputData = (testData, blockInputs) => {
// Test data is not cloned for reactivity
let newInputData = testData || cloneDeep(blockInputs)
@ -196,7 +200,8 @@
(automation.trigger?.event === "row:update" ||
automation.trigger?.event === "row:save")
) {
if (name !== "id" && name !== "revision") return `trigger.row.${name}`
let noRowKeywordBindings = ["id", "revision", "oldRow"]
if (!noRowKeywordBindings.includes(name)) return `trigger.row.${name}`
}
/* End special cases for generating custom schemas based on triggers */
@ -372,7 +377,11 @@
function getFieldLabel(key, value) {
const requiredSuffix = requiredProperties.includes(key) ? "*" : ""
return `${value.title || (key === "row" ? "Table" : key)} ${requiredSuffix}`
return `${value.title || (key === "row" ? "Row" : key)} ${requiredSuffix}`
}
function toggleTestDataRowVisibility(key) {
testDataRowVisibility[key] = !testDataRowVisibility[key]
}
function handleAttachmentParams(keyValueObj) {
@ -607,20 +616,48 @@
on:change={e => onChange(e, key)}
/>
{:else if value.customType === "row"}
<RowSelector
value={inputData[key]}
meta={inputData["meta"] || {}}
on:change={e => {
if (e.detail?.key) {
onChange(e, e.detail.key)
} else {
onChange(e, key)
}
}}
{bindings}
{isTestModal}
{isUpdateRow}
/>
{#if isTestModal}
<div class="align-horizontally">
<Icon
name={testDataRowVisibility[key] ? "Remove" : "Add"}
hoverable
on:click={() => toggleTestDataRowVisibility(key)}
/>
<Label size="XL">{label}</Label>
</div>
{#if testDataRowVisibility[key]}
<RowSelector
value={inputData[key]}
meta={inputData["meta"] || {}}
on:change={e => {
if (e.detail?.key) {
onChange(e, e.detail.key)
} else {
onChange(e, key)
}
}}
{bindings}
{isTestModal}
{isUpdateRow}
/>
{/if}
<Divider />
{:else}
<RowSelector
value={inputData[key]}
meta={inputData["meta"] || {}}
on:change={e => {
if (e.detail?.key) {
onChange(e, e.detail.key)
} else {
onChange(e, key)
}
}}
{bindings}
{isTestModal}
{isUpdateRow}
/>
{/if}
{:else if value.customType === "webhookUrl"}
<WebhookDisplay
on:change={e => onChange(e, key)}
@ -736,6 +773,12 @@
width: 320px;
}
.align-horizontally {
display: flex;
gap: var(--spacing-s);
align-items: center;
}
.fields {
display: flex;
flex-direction: column;

View File

@ -17,6 +17,8 @@
SWITCHABLE_TYPES,
ValidColumnNameRegex,
helpers,
CONSTANT_INTERNAL_ROW_COLS,
CONSTANT_EXTERNAL_ROW_COLS,
} from "@budibase/shared-core"
import { createEventDispatcher, getContext, onMount } from "svelte"
import { cloneDeep } from "lodash/fp"
@ -52,7 +54,6 @@
const DATE_TYPE = FieldType.DATETIME
const dispatch = createEventDispatcher()
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
const { dispatch: gridDispatch, rows } = getContext("grid")
export let field
@ -487,20 +488,27 @@
})
}
const newError = {}
const prohibited = externalTable
? CONSTANT_EXTERNAL_ROW_COLS
: CONSTANT_INTERNAL_ROW_COLS
if (!externalTable && fieldInfo.name?.startsWith("_")) {
newError.name = `Column name cannot start with an underscore.`
} else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) {
newError.name = `Illegal character; must be alpha-numeric.`
} else if (PROHIBITED_COLUMN_NAMES.some(name => fieldInfo.name === name)) {
newError.name = `${PROHIBITED_COLUMN_NAMES.join(
} else if (
prohibited.some(
name => fieldInfo?.name?.toLowerCase() === name.toLowerCase()
)
) {
newError.name = `${prohibited.join(
", "
)} are not allowed as column names`
)} are not allowed as column names - case insensitive.`
} else if (inUse($tables.selected, fieldInfo.name, originalName)) {
newError.name = `Column name already in use.`
}
if (fieldInfo.type === FieldType.AUTO && !fieldInfo.subtype) {
newError.subtype = `Auto Column requires a type`
newError.subtype = `Auto Column requires a type.`
}
if (fieldInfo.fieldName && fieldInfo.tableId) {

View File

@ -0,0 +1,8 @@
<div class="root">This action doesn't require any settings.</div>
<style>
.root {
max-width: 400px;
margin: 0 auto;
}
</style>

View File

@ -53,6 +53,12 @@
placeholder="Are you sure you want to delete?"
bind:value={parameters.confirmText}
/>
<Label small>Confirm Text</Label>
<Input placeholder="Confirm" bind:value={parameters.confirmButtonText} />
<Label small>Cancel Text</Label>
<Input placeholder="Cancel" bind:value={parameters.cancelButtonText} />
{/if}
</div>
</div>

View File

@ -83,6 +83,12 @@
placeholder="Are you sure you want to duplicate this row?"
bind:value={parameters.confirmText}
/>
<Label small>Confirm Text</Label>
<Input placeholder="Confirm" bind:value={parameters.confirmButtonText} />
<Label small>Cancel Text</Label>
<Input placeholder="Cancel" bind:value={parameters.cancelButtonText} />
{/if}
</div>

View File

@ -74,6 +74,18 @@
placeholder="Are you sure you want to execute this query?"
bind:value={parameters.confirmText}
/>
<Input
label="Confirm Text"
placeholder="Confirm"
bind:value={parameters.confirmButtonText}
/>
<Input
label="Cancel Text"
placeholder="Cancel"
bind:value={parameters.cancelButtonText}
/>
{/if}
{#if query?.parameters?.length > 0}

View File

@ -0,0 +1,36 @@
<script>
import { Select, Label } from "@budibase/bbui"
import { selectedScreen } from "stores/builder"
import { findAllMatchingComponents } from "helpers/components"
export let parameters
$: modalOptions = getModalOptions($selectedScreen)
const getModalOptions = screen => {
const modalComponents = findAllMatchingComponents(screen.props, component =>
component._component.endsWith("/modal")
)
return modalComponents.map(modal => ({
label: modal._instanceName,
value: modal._id,
}))
}
</script>
<div class="root">
<Label small>Modal</Label>
<Select bind:value={parameters.id} options={modalOptions} />
</div>
<style>
.root {
display: grid;
column-gap: var(--spacing-l);
row-gap: var(--spacing-s);
grid-template-columns: 60px 1fr;
align-items: center;
max-width: 400px;
margin: 0 auto;
}
</style>

View File

@ -80,6 +80,12 @@
placeholder="Are you sure you want to save this row?"
bind:value={parameters.confirmText}
/>
<Label small>Confirm Text</Label>
<Input placeholder="Confirm" bind:value={parameters.confirmButtonText} />
<Label small>Cancel Text</Label>
<Input placeholder="Cancel" bind:value={parameters.cancelButtonText} />
{/if}
</div>

View File

@ -21,5 +21,7 @@ export { default as ShowNotification } from "./ShowNotification.svelte"
export { default as PromptUser } from "./PromptUser.svelte"
export { default as OpenSidePanel } from "./OpenSidePanel.svelte"
export { default as CloseSidePanel } from "./CloseSidePanel.svelte"
export { default as OpenModal } from "./OpenModal.svelte"
export { default as CloseModal } from "./CloseModal.svelte"
export { default as ClearRowSelection } from "./ClearRowSelection.svelte"
export { default as DownloadFile } from "./DownloadFile.svelte"

View File

@ -157,6 +157,18 @@
"component": "CloseSidePanel",
"dependsOnFeature": "sidePanel"
},
{
"name": "Open Modal",
"type": "application",
"component": "OpenModal",
"dependsOnFeature": "modal"
},
{
"name": "Close Modal",
"type": "application",
"component": "CloseModal",
"dependsOnFeature": "modal"
},
{
"name": "Clear Row Selection",
"type": "data",

View File

@ -59,7 +59,14 @@
// Build up list of illegal children from ancestors
let illegalChildren = definition.illegalChildren || []
path.forEach(ancestor => {
if (ancestor._component === `@budibase/standard-components/sidepanel`) {
// Sidepanels and modals can be nested anywhere in the component tree, but really they are always rendered at the top level.
// Because of this, it doesn't make sense to carry over any parent illegal children to them, so the array is reset here.
if (
[
"@budibase/standard-components/sidepanel",
"@budibase/standard-components/modal",
].includes(ancestor._component)
) {
illegalChildren = []
}
const def = componentStore.getDefinition(ancestor._component)

View File

@ -14,7 +14,7 @@
{
"name": "Layout",
"icon": "ClassicGridView",
"children": ["container", "section", "sidepanel"]
"children": ["container", "section", "sidepanel", "modal"]
},
{
"name": "Data",

View File

@ -125,7 +125,14 @@ export class ScreenStore extends BudiStore {
return
}
if (type === "@budibase/standard-components/sidepanel") {
// Sidepanels and modals can be nested anywhere in the component tree, but really they are always rendered at the top level.
// Because of this, it doesn't make sense to carry over any parent illegal children to them, so the array is reset here.
if (
[
"@budibase/standard-components/sidepanel",
"@budibase/standard-components/modal",
].includes(type)
) {
illegalChildren = []
}

View File

@ -32,7 +32,7 @@
"pouchdb": "7.3.0",
"pouchdb-replication-stream": "1.2.9",
"randomstring": "1.1.5",
"tar": "6.1.15",
"tar": "6.2.1",
"yaml": "^2.1.1"
},
"devDependencies": {

View File

@ -11,6 +11,7 @@
"continueIfAction": true,
"showNotificationAction": true,
"sidePanel": true,
"modal": true,
"skeletonLoader": true
},
"typeSupportPresets": {
@ -6975,7 +6976,7 @@
"name": "Side Panel",
"icon": "RailRight",
"hasChildren": true,
"illegalChildren": ["section", "sidepanel"],
"illegalChildren": ["section", "sidepanel", "modal"],
"showEmptyState": false,
"draggable": false,
"info": "Side panels are hidden by default. They will only be revealed when triggered by the 'Open Side Panel' action.",
@ -6993,6 +6994,52 @@
}
]
},
"modal": {
"name": "Modal",
"icon": "MBox",
"hasChildren": true,
"illegalChildren": ["section", "modal", "sidepanel"],
"showEmptyState": false,
"draggable": false,
"info": "Modals are hidden by default. They will only be revealed when triggered by the 'Open Modal' action.",
"settings": [
{
"type": "boolean",
"key": "ignoreClicksOutside",
"label": "Ignore clicks outside",
"defaultValue": false
},
{
"type": "event",
"key": "onClose",
"label": "On close"
},
{
"type": "select",
"label": "Size",
"key": "size",
"defaultValue": "small",
"options": [
{
"label": "Small",
"value": "small"
},
{
"label": "Medium",
"value": "medium"
},
{
"label": "Large",
"value": "large"
},
{
"label": "Fullscreen",
"value": "fullscreen"
}
]
}
]
},
"rowexplorer": {
"block": true,
"name": "Row Explorer Block",

View File

@ -20,6 +20,7 @@
devToolsEnabled,
environmentStore,
sidePanelStore,
modalStore,
} from "stores"
import NotificationDisplay from "components/overlay/NotificationDisplay.svelte"
import ConfirmationDisplay from "components/overlay/ConfirmationDisplay.svelte"
@ -104,10 +105,15 @@
})
}
const handleHashChange = () => {
const { open } = $sidePanelStore
if (open) {
const { open: sidePanelOpen } = $sidePanelStore
if (sidePanelOpen) {
sidePanelStore.actions.close()
}
const { open: modalOpen } = $modalStore
if (modalOpen) {
modalStore.actions.close()
}
}
window.addEventListener("hashchange", handleHashChange)
return () => {

View File

@ -12,6 +12,7 @@
linkable,
builderStore,
sidePanelStore,
modalStore,
appStore,
} = sdk
const context = getContext("context")
@ -77,6 +78,7 @@
!$builderStore.inBuilder &&
$sidePanelStore.open &&
!$sidePanelStore.ignoreClicksOutside
$: screenId = $builderStore.inBuilder
? `${$builderStore.screen?._id}-screen`
: "screen"
@ -198,6 +200,7 @@
const handleClickLink = () => {
mobileOpen = false
sidePanelStore.actions.close()
modalStore.actions.close()
}
</script>

View File

@ -1,7 +1,7 @@
<script>
import { getContext } from "svelte"
const { linkable, styleable, builderStore, sidePanelStore } =
const { linkable, styleable, builderStore, sidePanelStore, modalStore } =
getContext("sdk")
const component = getContext("component")
@ -29,6 +29,11 @@
// overrides the color when it's passed as inline style.
$: styles = enrichStyles($component.styles, color)
const handleUrlChange = () => {
sidePanelStore.actions.close()
modalStore.actions.close()
}
const getSanitizedUrl = (url, externalLink, newTab) => {
if (!url) {
return externalLink || newTab ? "#/" : "/"
@ -109,7 +114,7 @@
class:italic
class:underline
class="align--{align || 'left'} size--{size || 'M'}"
on:click={sidePanelStore.actions.close}
on:click={handleUrlChange}
>
{componentText}
</a>

View File

@ -0,0 +1,141 @@
<script>
import { getContext } from "svelte"
import { Modal, Icon } from "@budibase/bbui"
const component = getContext("component")
const { styleable, modalStore, builderStore, dndIsDragging } =
getContext("sdk")
export let onClose
export let ignoreClicksOutside
export let size
let modal
// Open modal automatically in builder
$: {
if ($builderStore.inBuilder) {
if (
$component.inSelectedPath &&
$modalStore.contentId !== $component.id
) {
modalStore.actions.open($component.id)
} else if (
!$component.inSelectedPath &&
$modalStore.contentId === $component.id &&
!$dndIsDragging
) {
modalStore.actions.close()
}
}
}
$: open = $modalStore.contentId === $component.id
const handleModalClose = async () => {
if (onClose) {
await onClose()
}
modalStore.actions.close()
}
const handleOpen = (open, modal) => {
if (!modal) return
if (open) {
modal.show()
} else {
modal.hide()
}
}
$: handleOpen(open, modal)
</script>
<!-- Conditional displaying in the builder is necessary otherwise previews don't update properly upon component deletion -->
{#if !$builderStore.inBuilder || open}
<Modal
on:cancel={handleModalClose}
bind:this={modal}
disableCancel={$builderStore.inBuilder}
zIndex={2}
>
<div use:styleable={$component.styles} class={`modal-content ${size}`}>
<div class="modal-header">
<Icon
color="var(--spectrum-global-color-gray-800)"
name="Close"
hoverable
on:click={handleModalClose}
/>
</div>
<div class="modal-main">
<div class="modal-main-inner">
<slot />
</div>
</div>
</div>
</Modal>
{/if}
<style>
.modal-content {
display: flex;
flex-direction: column;
max-width: 100%;
box-sizing: border-box;
padding: 12px 0px 40px;
}
.small {
width: 400px;
min-height: 200px;
}
.medium {
width: 600px;
min-height: 400px;
}
.large {
width: 800px;
min-height: 600px;
}
.fullscreen {
width: calc(100vw - 80px);
min-height: calc(100vh - 80px);
}
.modal-header {
display: flex;
flex-direction: row;
justify-content: flex-end;
flex-shrink: 0;
flex-grow: 0;
padding: 0 12px 12px;
box-sizing: border-box;
}
.modal-main {
padding: 0 40px;
flex-grow: 1;
display: flex;
flex-direction: column;
}
.modal-main :global(.component > *) {
max-width: 100%;
}
.modal-main-inner {
flex-grow: 1;
display: flex;
flex-direction: column;
word-break: break-word;
}
.modal-main-inner:empty {
border-radius: 3px;
border: 2px dashed var(--spectrum-global-color-gray-400);
}
</style>

View File

@ -31,41 +31,23 @@
let schema
$: formattedFields = convertOldFieldFormat(fields)
$: fieldsOrDefault = getDefaultFields(formattedFields, schema)
$: fetchSchema(dataSource)
$: id = $component.id
// We could simply spread $$props into the inner form and append our
// additions, but that would create svelte warnings about unused props and
// make maintenance in future more confusing as we typically always have a
// proper mapping of schema settings to component exports, without having to
// search multiple files
$: innerProps = {
dataSource,
actionUrl,
actionType,
size,
disabled,
fields: fieldsOrDefault,
title,
description,
schema,
notificationOverride,
buttons:
buttons ||
Utils.buildFormBlockButtonConfig({
_id: id,
showDeleteButton,
showSaveButton,
saveButtonLabel,
deleteButtonLabel,
notificationOverride,
actionType,
actionUrl,
dataSource,
}),
buttonPosition: buttons ? buttonPosition : "top",
}
$: formattedFields = convertOldFieldFormat(fields)
$: fieldsOrDefault = getDefaultFields(formattedFields, schema)
$: buttonsOrDefault =
buttons ||
Utils.buildFormBlockButtonConfig({
_id: id,
showDeleteButton,
showSaveButton,
saveButtonLabel,
deleteButtonLabel,
notificationOverride,
actionType,
actionUrl,
dataSource,
})
// Provide additional data context for live binding eval
export const getAdditionalDataContext = () => {
@ -123,5 +105,18 @@
</script>
<FormBlockWrapper {actionType} {dataSource} {rowId} {noRowsMessage}>
<InnerFormBlock {...innerProps} />
<InnerFormBlock
{dataSource}
{actionUrl}
{actionType}
{size}
{disabled}
fields={fieldsOrDefault}
{title}
{description}
{schema}
{notificationOverride}
buttons={buttonsOrDefault}
buttonPosition={buttons ? buttonPosition : "top"}
/>
</FormBlockWrapper>

View File

@ -91,15 +91,13 @@
{#if description}
<BlockComponent type="text" props={{ text: description }} order={1} />
{/if}
{#key fields}
<BlockComponent type="container">
<div class="form-block fields" class:mobile={$context.device.mobile}>
{#each fields as field, idx}
<FormBlockComponent {field} {schema} order={idx} />
{/each}
</div>
</BlockComponent>
{/key}
<BlockComponent type="container">
<div class="form-block fields" class:mobile={$context.device.mobile}>
{#each fields as field, idx}
<FormBlockComponent {field} {schema} order={idx} />
{/each}
</div>
</BlockComponent>
</BlockComponent>
{#if buttonPosition === "bottom"}
<BlockComponent

View File

@ -37,6 +37,7 @@ export { default as markdownviewer } from "./MarkdownViewer.svelte"
export { default as embeddedmap } from "./embedded-map/EmbeddedMap.svelte"
export { default as grid } from "./Grid.svelte"
export { default as sidepanel } from "./SidePanel.svelte"
export { default as modal } from "./Modal.svelte"
export { default as gridblock } from "./GridBlock.svelte"
export * from "./charts"
export * from "./forms"

View File

@ -8,6 +8,8 @@
<ModalContent
title={$confirmationStore.title}
onConfirm={confirmationStore.actions.confirm}
confirmText={$confirmationStore.confirmButtonText}
cancelText={$confirmationStore.cancelButtonText}
>
{$confirmationStore.text}
</ModalContent>

View File

@ -57,7 +57,9 @@
return
}
nextState.indicators[idx].visible =
nextState.indicators[idx].insideSidePanel || entries[0].isIntersecting
nextState.indicators[idx].insideModal ||
nextState.indicators[idx].insideSidePanel ||
entries[0].isIntersecting
if (++callbackCount === observers.length) {
state = nextState
updating = false
@ -139,6 +141,7 @@
height: elBounds.height + 4,
visible: false,
insideSidePanel: !!child.closest(".side-panel"),
insideModal: !!child.closest(".modal-content"),
})
})
}

View File

@ -11,6 +11,7 @@ import {
currentRole,
environmentStore,
sidePanelStore,
modalStore,
dndIsDragging,
confirmationStore,
roleStore,
@ -53,6 +54,7 @@ export default {
componentStore,
environmentStore,
sidePanelStore,
modalStore,
dndIsDragging,
currentRole,
confirmationStore,

View File

@ -4,6 +4,8 @@ const initialState = {
showConfirmation: false,
title: null,
text: null,
confirmButtonText: null,
cancelButtonText: null,
onConfirm: null,
onCancel: null,
}
@ -11,11 +13,20 @@ const initialState = {
const createConfirmationStore = () => {
const store = writable(initialState)
const showConfirmation = (title, text, onConfirm, onCancel) => {
const showConfirmation = (
title,
text,
onConfirm,
onCancel,
confirmButtonText,
cancelButtonText
) => {
store.set({
showConfirmation: true,
title,
text,
confirmButtonText,
cancelButtonText,
onConfirm,
onCancel,
})

View File

@ -27,6 +27,7 @@ export {
dndIsDragging,
} from "./dnd"
export { sidePanelStore } from "./sidePanel"
export { modalStore } from "./modal"
export { hoverStore } from "./hover"
// Context stores are layered and duplicated, so it is not a singleton

View File

@ -0,0 +1,32 @@
import { writable } from "svelte/store"
export const createModalStore = () => {
const initialState = {
contentId: null,
}
const store = writable(initialState)
const open = id => {
store.update(state => {
state.contentId = id
return state
})
}
const close = () => {
store.update(state => {
state.contentId = null
return state
})
}
return {
subscribe: store.subscribe,
actions: {
open,
close,
},
}
}
export const modalStore = createModalStore()

View File

@ -12,6 +12,7 @@ import {
uploadStore,
rowSelectionStore,
sidePanelStore,
modalStore,
} from "stores"
import { API } from "api"
import { ActionTypes } from "constants"
@ -436,6 +437,17 @@ const closeSidePanelHandler = () => {
sidePanelStore.actions.close()
}
const openModalHandler = action => {
const { id } = action.parameters
if (id) {
modalStore.actions.open(id)
}
}
const closeModalHandler = () => {
modalStore.actions.close()
}
const downloadFileHandler = async action => {
const { url, fileName } = action.parameters
try {
@ -499,6 +511,8 @@ const handlerMap = {
["Prompt User"]: promptUserHandler,
["Open Side Panel"]: openSidePanelHandler,
["Close Side Panel"]: closeSidePanelHandler,
["Open Modal"]: openModalHandler,
["Close Modal"]: closeModalHandler,
["Download File"]: downloadFileHandler,
}
@ -508,6 +522,7 @@ const confirmTextMap = {
["Execute Query"]: "Are you sure you want to execute this query?",
["Trigger Automation"]: "Are you sure you want to trigger this automation?",
["Prompt User"]: "Are you sure you want to continue?",
["Duplicate Row"]: "Are you sure you want to duplicate this row?",
}
/**
@ -568,6 +583,11 @@ export const enrichButtonActions = (actions, context) => {
const defaultTitleText = action["##eventHandlerType"]
const customTitleText =
action.parameters?.customTitleText || defaultTitleText
const cancelButtonText =
action.parameters?.cancelButtonText || "Cancel"
const confirmButtonText =
action.parameters?.confirmButtonText || "Confirm"
confirmationStore.actions.showConfirmation(
customTitleText,
confirmText,
@ -598,7 +618,9 @@ export const enrichButtonActions = (actions, context) => {
},
() => {
resolve(false)
}
},
confirmButtonText,
cancelButtonText
)
})
}

View File

@ -18,7 +18,7 @@
import FilterUsers from "./FilterUsers.svelte"
import { getFields } from "../utils/searchFields"
const { OperatorOptions } = Constants
const { OperatorOptions, DEFAULT_BB_DATASOURCE_ID } = Constants
export let schemaFields
export let filters = []
@ -28,6 +28,23 @@
export let allowBindings = false
export let filtersLabel = "Filters"
$: {
if (
tables.find(
table =>
table._id === datasource.tableId &&
table.sourceId === DEFAULT_BB_DATASOURCE_ID
) &&
!schemaFields.some(field => field.name === "_id")
) {
schemaFields = [
...schemaFields,
{ name: "_id", type: "string" },
{ name: "_rev", type: "string" },
]
}
}
$: matchAny = filters?.find(filter => filter.operator === "allOr") != null
$: onEmptyFilter =
filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all"
@ -35,7 +52,6 @@
$: fieldFilters = filters.filter(
filter => filter.operator !== "allOr" && !filter.onEmptyFilter
)
const behaviourOptions = [
{ value: "and", label: "Match all filters" },
{ value: "or", label: "Match any filter" },
@ -44,7 +60,6 @@
{ value: "all", label: "Return all table rows" },
{ value: "none", label: "Return no rows" },
]
const context = getContext("context")
$: fieldOptions = getFields(tables, schemaFields || [], {

View File

@ -1,7 +1,11 @@
/**
* Operator options for lucene queries
*/
export { OperatorOptions, SqlNumberTypeRangeMap } from "@budibase/shared-core"
export {
OperatorOptions,
SqlNumberTypeRangeMap,
DEFAULT_BB_DATASOURCE_ID,
} from "@budibase/shared-core"
export { Feature as Features } from "@budibase/types"
import { BpmCorrelationKey } from "@budibase/shared-core"
import { FieldType, BBReferenceFieldSubType } from "@budibase/types"

View File

@ -161,6 +161,9 @@ export const buildFormBlockButtonConfig = props => {
{
"##eventHandlerType": "Close Side Panel",
},
{
"##eventHandlerType": "Close Modal",
},
// Clear a create form once submitted
...(actionType !== "Create"
? []

View File

@ -109,8 +109,8 @@
"serialize-error": "^7.0.1",
"server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0",
"socket.io": "4.6.1",
"tar": "6.1.15",
"socket.io": "4.6.2",
"tar": "6.2.1",
"to-json-schema": "0.2.5",
"uuid": "^8.3.2",
"validate.js": "0.13.1",

View File

@ -54,8 +54,31 @@ INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('Mi
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support');
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (1, 2, 'assembling', TRUE);
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (2, 1, 'processing', FALSE);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Dave', 'Bar', '2 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('James', 'Bar', '3 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Jenny', 'Bar', '4 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Grace', 'Bar', '5 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Sarah', 'Bar', '6 Foo Street', 'Bartown', 'support', 0, 1993);
INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Kelly', 'Bar', '7 Foo Street', 'Bartown', 'support', 0, 1993);
-- insert a lot of tasks for testing
WITH RECURSIVE generate_series AS (
SELECT 1 AS n
UNION ALL
SELECT n + 1 FROM generate_series WHERE n < 6000
),
random_data AS (
SELECT
n,
(random() * 9 + 1)::int AS ExecutorID,
(random() * 9 + 1)::int AS QaID,
'assembling' AS TaskName,
(random() < 0.5) AS Completed
FROM generate_series
)
INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed)
SELECT ExecutorID, QaID, TaskName, Completed
FROM random_data;
INSERT INTO Products (ProductName) VALUES ('Computers');
INSERT INTO Products (ProductName) VALUES ('Laptops');
INSERT INTO Products (ProductName) VALUES ('Chairs');

View File

@ -7,6 +7,7 @@ import {
FieldType,
FilterType,
IncludeRelationship,
isManyToOne,
OneToManyRelationshipFieldMetadata,
Operation,
PaginationJson,
@ -16,29 +17,33 @@ import {
SortJson,
SortType,
Table,
isManyToOne,
} from "@budibase/types"
import {
breakExternalTableId,
breakRowIdField,
convertRowId,
generateRowIdField,
isRowId,
isSQL,
generateRowIdField,
} from "../../../integrations/utils"
import {
buildExternalRelationships,
buildSqlFieldList,
generateIdForRow,
sqlOutputProcessing,
isKnexEmptyReadResponse,
isManyToMany,
sqlOutputProcessing,
} from "./utils"
import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils"
import {
getDatasourceAndQuery,
processRowCountResponse,
} from "../../../sdk/app/rows/utils"
import { processObjectSync } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp"
import { db as dbCore } from "@budibase/backend-core"
import sdk from "../../../sdk"
import env from "../../../environment"
import { makeExternalQuery } from "../../../integrations/base/query"
export interface ManyRelationship {
tableId?: string
@ -60,6 +65,13 @@ export interface RunConfig {
includeSqlRelationships?: IncludeRelationship
}
export type ExternalRequestReturnType<T extends Operation> =
T extends Operation.READ
? Row[]
: T extends Operation.COUNT
? number
: { row: Row; table: Table }
function buildFilters(
id: string | undefined | string[],
filters: SearchFilters,
@ -223,9 +235,6 @@ function isEditableColumn(column: FieldSchema) {
return !(isExternalAutoColumn || isFormula)
}
export type ExternalRequestReturnType<T extends Operation> =
T extends Operation.READ ? Row[] : { row: Row; table: Table }
export class ExternalRequest<T extends Operation> {
private readonly operation: T
private readonly tableId: string
@ -428,7 +437,9 @@ export class ExternalRequest<T extends Operation> {
})
// this is the response from knex if no rows found
const rows: Row[] =
!Array.isArray(response) || response?.[0].read ? [] : response
!Array.isArray(response) || isKnexEmptyReadResponse(response)
? []
: response
const storeTo = isManyToMany(field)
? field.throughFrom || linkPrimaryKey
: fieldName
@ -517,7 +528,7 @@ export class ExternalRequest<T extends Operation> {
// finally cleanup anything that needs to be removed
for (let [colName, { isMany, rows, tableId }] of Object.entries(related)) {
const table: Table | undefined = this.getTable(tableId)
// if its not the foreign key skip it, nothing to do
// if it's not the foreign key skip it, nothing to do
if (
!table ||
(!isMany && table.primary && table.primary.indexOf(colName) !== -1)
@ -662,12 +673,14 @@ export class ExternalRequest<T extends Operation> {
}
// aliasing can be disabled fully if desired
let response
if (env.SQL_ALIASING_DISABLE) {
response = await getDatasourceAndQuery(json)
} else {
const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
response = await aliasing.queryWithAliasing(json)
const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
let response = env.SQL_ALIASING_DISABLE
? await getDatasourceAndQuery(json)
: await aliasing.queryWithAliasing(json, makeExternalQuery)
// if it's a counting operation there will be no more processing, just return the number
if (this.operation === Operation.COUNT) {
return processRowCountResponse(response) as ExternalRequestReturnType<T>
}
const responseRows = Array.isArray(response) ? response : []

View File

@ -39,9 +39,10 @@ export async function handleRequest<T extends Operation>(
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const tableId = utils.getTableId(ctx)
const { _id, ...rowData } = ctx.request.body
const { _id, ...rowData } = ctx.request.body
const table = await sdk.tables.getTable(tableId)
const { row: dataToUpdate } = await inputProcessing(
ctx.user?._id,
cloneDeep(table),
@ -79,6 +80,7 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
...response,
row: enrichedRow,
table,
oldRow: beforeRow,
}
}

View File

@ -55,13 +55,13 @@ export async function patch(
return save(ctx)
}
try {
const { row, table } = await pickApi(tableId).patch(ctx)
const { row, table, oldRow } = await pickApi(tableId).patch(ctx)
if (!row) {
ctx.throw(404, "Row not found")
}
ctx.status = 200
ctx.eventEmitter &&
ctx.eventEmitter.emitRow(`row:update`, appId, row, table)
ctx.eventEmitter.emitRow(`row:update`, appId, row, table, oldRow)
ctx.message = `${table.name} updated successfully.`
ctx.body = row
gridSocket?.emitRowUpdate(ctx, row)

View File

@ -85,13 +85,15 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
// the row has been updated, need to put it into the ctx
ctx.request.body = row as any
await userController.updateMetadata(ctx as any)
return { row: ctx.body as Row, table }
return { row: ctx.body as Row, table, oldRow }
}
return finaliseRow(table, row, {
const result = await finaliseRow(table, row, {
oldTable: dbTable,
updateFormula: true,
})
return { ...result, oldRow }
}
export async function find(ctx: UserCtx): Promise<Row> {

View File

@ -1,4 +1,6 @@
import {
DatasourcePlusQueryResponse,
DSPlusOperation,
FieldType,
ManyToManyRelationshipFieldMetadata,
RelationshipFieldMetadata,
@ -192,3 +194,11 @@ export function buildSqlFieldList(
}
return fields
}
export function isKnexEmptyReadResponse(resp: DatasourcePlusQueryResponse) {
return (
!Array.isArray(resp) ||
resp.length === 0 ||
(DSPlusOperation.READ in resp[0] && resp[0].read === true)
)
}

View File

@ -14,7 +14,7 @@ import {
processDates,
processFormulas,
} from "../../../../utilities/rowProcessor"
import { updateRelationshipColumns } from "./sqlUtils"
import { isKnexEmptyReadResponse, updateRelationshipColumns } from "./sqlUtils"
import {
basicProcessing,
generateIdForRow,
@ -137,7 +137,7 @@ export async function sqlOutputProcessing(
relationships: RelationshipsJson[],
opts?: { sqs?: boolean }
): Promise<Row[]> {
if (!Array.isArray(rows) || rows.length === 0 || rows[0].read === true) {
if (isKnexEmptyReadResponse(rows)) {
return []
}
let finalRows: { [key: string]: Row } = {}

View File

@ -69,6 +69,7 @@ export async function searchView(
limit: body.limit,
bookmark: body.bookmark,
paginate: body.paginate,
countRows: body.countRows,
}
const result = await sdk.rows.search(searchOptions)

View File

@ -98,7 +98,7 @@ export async function bulkImport(
table = processed.table
}
await handleRequest(Operation.BULK_CREATE, table._id!, {
await handleRequest(Operation.BULK_UPSERT, table._id!, {
rows: parsedRows,
})
await events.rows.imported(table, parsedRows.length)

View File

@ -86,6 +86,7 @@ router
router.post(
"/api/v2/views/:viewId/search",
internalSearchValidator(),
authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"),
rowController.views.searchView
)

View File

@ -13,6 +13,7 @@ import { events } from "@budibase/backend-core"
import sdk from "../../../sdk"
import { Automation } from "@budibase/types"
import { mocks } from "@budibase/backend-core/tests"
import { FilterConditions } from "../../../automations/steps/filter"
const MAX_RETRIES = 4
let {
@ -21,6 +22,7 @@ let {
automationTrigger,
automationStep,
collectAutomation,
filterAutomation,
} = setup.structures
describe("/automations", () => {
@ -155,7 +157,12 @@ describe("/automations", () => {
automation.appId = config.appId
automation = await config.createAutomation(automation)
await setup.delay(500)
const res = await testAutomation(config, automation)
const res = await testAutomation(config, automation, {
row: {
name: "Test",
description: "TEST",
},
})
expect(events.automation.tested).toHaveBeenCalledTimes(1)
// this looks a bit mad but we don't actually have a way to wait for a response from the automation to
// know that it has finished all of its actions - this is currently the best way
@ -436,4 +443,38 @@ describe("/automations", () => {
expect(res).toEqual(true)
})
})
describe("Update Row Old / New Row comparison", () => {
it.each([
{ oldCity: "asdsadsadsad", newCity: "new" },
{ oldCity: "Belfast", newCity: "Belfast" },
])(
"triggers an update row automation and compares new to old rows with old city '%s' and new city '%s'",
async ({ oldCity, newCity }) => {
const expectedResult = oldCity === newCity
let table = await config.createTable()
let automation = await filterAutomation()
automation.definition.trigger.inputs.tableId = table._id
automation.definition.steps[0].inputs = {
condition: FilterConditions.EQUAL,
field: "{{ trigger.row.City }}",
value: "{{ trigger.oldRow.City }}",
}
automation.appId = config.appId!
automation = await config.createAutomation(automation)
let triggerInputs = {
oldRow: {
City: oldCity,
},
row: {
City: newCity,
},
}
const res = await testAutomation(config, automation, triggerInputs)
expect(res.body.steps[1].outputs.result).toEqual(expectedResult)
}
)
})
})

View File

@ -10,37 +10,11 @@ import * as setup from "../utilities"
import {
DatabaseName,
getDatasource,
rawQuery,
knexClient,
} from "../../../../integrations/tests/utils"
import { Expectations } from "src/tests/utilities/api/base"
import { events } from "@budibase/backend-core"
const createTableSQL: Record<string, string> = {
[SourceName.POSTGRES]: `
CREATE TABLE test_table (
id serial PRIMARY KEY,
name VARCHAR ( 50 ) NOT NULL,
birthday TIMESTAMP,
number INT
);`,
[SourceName.MYSQL]: `
CREATE TABLE test_table (
id INT AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(50) NOT NULL,
birthday TIMESTAMP,
number INT
);`,
[SourceName.SQL_SERVER]: `
CREATE TABLE test_table (
id INT IDENTITY(1,1) PRIMARY KEY,
name NVARCHAR(50) NOT NULL,
birthday DATETIME,
number INT
);`,
}
const insertSQL = `INSERT INTO test_table (name) VALUES ('one'), ('two'), ('three'), ('four'), ('five')`
const dropTableSQL = `DROP TABLE test_table;`
import { Knex } from "knex"
describe.each(
[
@ -53,6 +27,7 @@ describe.each(
const config = setup.getConfig()
let rawDatasource: Datasource
let datasource: Datasource
let client: Knex
async function createQuery(
query: Partial<Query>,
@ -82,21 +57,34 @@ describe.each(
rawDatasource = await dsProvider
datasource = await config.api.datasource.create(rawDatasource)
// The Datasource API does not return the password, but we need
// it later to connect to the underlying database, so we fill it
// back in here.
// The Datasource API doesn ot return the password, but we need it later to
// connect to the underlying database, so we fill it back in here.
datasource.config!.password = rawDatasource.config!.password
await rawQuery(datasource, createTableSQL[datasource.source])
await rawQuery(datasource, insertSQL)
client = await knexClient(rawDatasource)
await client.schema.dropTableIfExists("test_table")
await client.schema.createTable("test_table", table => {
table.increments("id").primary()
table.string("name")
table.timestamp("birthday")
table.integer("number")
})
await client("test_table").insert([
{ name: "one" },
{ name: "two" },
{ name: "three" },
{ name: "four" },
{ name: "five" },
])
jest.clearAllMocks()
})
afterEach(async () => {
const ds = await config.api.datasource.get(datasource._id!)
config.api.datasource.delete(ds)
await rawQuery(datasource, dropTableSQL)
await config.api.datasource.delete(ds)
})
afterAll(async () => {
@ -207,7 +195,7 @@ describe.each(
},
})
await config.publish()
await config.api.application.publish(config.getAppId())
const prodQuery = await config.api.query.getProd(query._id!)
expect(prodQuery._id).toEqual(query._id)
@ -429,11 +417,11 @@ describe.each(
},
])
const rows = await rawQuery(
datasource,
"SELECT * FROM test_table WHERE name = 'baz'"
)
const rows = await client("test_table").where({ name: "baz" }).select()
expect(rows).toHaveLength(1)
for (const row of rows) {
expect(row).toMatchObject({ name: "baz" })
}
})
it("should not allow handlebars as parameters", async () => {
@ -490,11 +478,14 @@ describe.each(
expect(result.data).toEqual([{ created: true }])
const rows = await rawQuery(
datasource,
`SELECT * FROM test_table WHERE birthday = '${date.toISOString()}'`
)
const rows = await client("test_table")
.where({ birthday: datetimeStr })
.select()
expect(rows).toHaveLength(1)
for (const row of rows) {
expect(new Date(row.birthday)).toEqual(date)
}
}
)
@ -522,10 +513,9 @@ describe.each(
expect(result.data).toEqual([{ created: true }])
const rows = await rawQuery(
datasource,
`SELECT * FROM test_table WHERE name = '${notDateStr}'`
)
const rows = await client("test_table")
.where({ name: notDateStr })
.select()
expect(rows).toHaveLength(1)
}
)
@ -660,10 +650,7 @@ describe.each(
},
])
const rows = await rawQuery(
datasource,
"SELECT * FROM test_table WHERE id = 1"
)
const rows = await client("test_table").where({ id: 1 }).select()
expect(rows).toEqual([
{ id: 1, name: "foo", birthday: null, number: null },
])
@ -731,10 +718,7 @@ describe.each(
},
])
const rows = await rawQuery(
datasource,
"SELECT * FROM test_table WHERE id = 1"
)
const rows = await client("test_table").where({ id: 1 }).select()
expect(rows).toHaveLength(0)
})
})
@ -750,6 +734,7 @@ describe.each(
name: entityId,
schema: {},
type: "table",
primary: ["id"],
sourceId: datasource._id!,
sourceType: TableSourceType.EXTERNAL,
},

View File

@ -1,6 +1,7 @@
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import tk from "timekeeper"
import emitter from "../../../../src/events"
import { outputProcessing } from "../../../utilities/rowProcessor"
import * as setup from "./utilities"
import { context, InternalTable, tenancy } from "@budibase/backend-core"
@ -24,6 +25,8 @@ import {
StaticQuotaName,
Table,
TableSourceType,
UpdatedRowEventEmitter,
TableSchema,
} from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash"
@ -31,6 +34,28 @@ import * as uuid from "uuid"
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
tk.freeze(timestamp)
interface WaitOptions {
name: string
matchFn?: (event: any) => boolean
}
async function waitForEvent(
opts: WaitOptions,
callback: () => Promise<void>
): Promise<any> {
const p = new Promise((resolve: any) => {
const listener = (event: any) => {
if (opts.matchFn && !opts.matchFn(event)) {
return
}
resolve(event)
emitter.off(opts.name, listener)
}
emitter.on(opts.name, listener)
})
await callback()
return await p
}
describe.each([
["internal", undefined],
@ -40,6 +65,7 @@ describe.each([
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/rows (%s)", (providerType, dsProvider) => {
const isInternal = dsProvider === undefined
const isMSSQL = providerType === DatabaseName.SQL_SERVER
const config = setup.getConfig()
let table: Table
@ -64,6 +90,23 @@ describe.each([
// the table name they're writing to.
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
): SaveTableRequest {
const defaultSchema: TableSchema = {
id: {
type: FieldType.AUTO,
name: "id",
autocolumn: true,
constraints: {
presence: true,
},
},
}
for (const override of overrides) {
if (override.primary) {
delete defaultSchema.id
}
}
const req: SaveTableRequest = {
name: uuid.v4().substring(0, 10),
type: "table",
@ -72,16 +115,7 @@ describe.each([
: TableSourceType.INTERNAL,
sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID,
primary: ["id"],
schema: {
id: {
type: FieldType.AUTO,
name: "id",
autocolumn: true,
constraints: {
presence: true,
},
},
},
schema: defaultSchema,
}
return merge(req, ...overrides)
}
@ -608,6 +642,31 @@ describe.each([
await assertRowUsage(rowUsage)
})
it("should update only the fields that are supplied and emit the correct oldRow", async () => {
let beforeRow = await config.api.row.save(table._id!, {
name: "test",
description: "test",
})
const opts = {
name: "row:update",
matchFn: (event: UpdatedRowEventEmitter) =>
event.row._id === beforeRow._id,
}
const event = await waitForEvent(opts, async () => {
await config.api.row.patch(table._id!, {
_id: beforeRow._id!,
_rev: beforeRow._rev!,
tableId: table._id!,
name: "Updated Name",
})
})
expect(event.oldRow).toBeDefined()
expect(event.oldRow.name).toEqual("test")
expect(event.row.name).toEqual("Updated Name")
expect(event.oldRow.description).toEqual(beforeRow.description)
expect(event.row.description).toEqual(beforeRow.description)
})
it("should throw an error when given improper types", async () => {
const existing = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage()
@ -911,6 +970,121 @@ describe.each([
row = await config.api.row.save(table._id!, {})
expect(row.autoId).toEqual(3)
})
it("should be able to bulkImport rows", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
const rowUsage = await getRowUsage()
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
{
name: "Row 2",
description: "Row 2 description",
},
],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(2)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1")
expect(rows[0].description).toEqual("Row 1 description")
expect(rows[1].name).toEqual("Row 2")
expect(rows[1].description).toEqual("Row 2 description")
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
})
// Upserting isn't yet supported in MSSQL, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
it("should be able to update existing rows with bulkImport", async () => {
const table = await config.api.table.save(
saveTableRequest({
primary: ["userId"],
schema: {
userId: {
type: FieldType.NUMBER,
name: "userId",
constraints: {
presence: true,
},
},
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
const row1 = await config.api.row.save(table._id!, {
userId: 1,
name: "Row 1",
description: "Row 1 description",
})
const row2 = await config.api.row.save(table._id!, {
userId: 2,
name: "Row 2",
description: "Row 2 description",
})
await config.api.row.bulkImport(table._id!, {
identifierFields: ["userId"],
rows: [
{
userId: row1.userId,
name: "Row 1 updated",
description: "Row 1 description updated",
},
{
userId: row2.userId,
name: "Row 2 updated",
description: "Row 2 description updated",
},
{
userId: 3,
name: "Row 3",
description: "Row 3 description",
},
],
})
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1 updated")
expect(rows[0].description).toEqual("Row 1 description updated")
expect(rows[1].name).toEqual("Row 2 updated")
expect(rows[1].description).toEqual("Row 2 description updated")
expect(rows[2].name).toEqual("Row 3")
expect(rows[2].description).toEqual("Row 3 description")
})
})
describe("enrich", () => {

File diff suppressed because it is too large Load Diff

View File

@ -276,6 +276,34 @@ describe.each([
})
})
isInternal &&
it("shouldn't allow duplicate column names", async () => {
const saveTableRequest: SaveTableRequest = {
...basicTable(),
}
saveTableRequest.schema["Type"] = {
type: FieldType.STRING,
name: "Type",
}
await config.api.table.save(saveTableRequest, {
status: 400,
body: {
message:
'Column(s) "type" are duplicated - check for other columns with these name (case in-sensitive)',
},
})
saveTableRequest.schema.foo = { type: FieldType.STRING, name: "foo" }
saveTableRequest.schema.FOO = { type: FieldType.STRING, name: "FOO" }
await config.api.table.save(saveTableRequest, {
status: 400,
body: {
message:
'Column(s) "type, foo" are duplicated - check for other columns with these name (case in-sensitive)',
},
})
})
it("should add a new column for an internal DB table", async () => {
const saveTableRequest: SaveTableRequest = {
...basicTable(),

View File

@ -158,15 +158,16 @@ export const getDB = () => {
return context.getAppDB()
}
export const testAutomation = async (config: any, automation: any) => {
export const testAutomation = async (
config: any,
automation: any,
triggerInputs: any
) => {
return runRequest(automation.appId, async () => {
return await config.request
.post(`/api/automations/${automation._id}/test`)
.send({
row: {
name: "Test",
description: "TEST",
},
...triggerInputs,
})
.set(config.defaultHeaders())
.expect("Content-Type", /json/)

View File

@ -7,6 +7,7 @@ import {
INTERNAL_TABLE_SOURCE_ID,
PermissionLevel,
QuotaUsageType,
Row,
SaveTableRequest,
SearchFilterOperator,
SortOrder,
@ -17,6 +18,7 @@ import {
UpdateViewRequest,
ViewUIFieldMetadata,
ViewV2,
SearchResponse,
} from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
@ -25,17 +27,21 @@ import { quotas } from "@budibase/pro"
import { db, roles } from "@budibase/backend-core"
describe.each([
["internal", undefined],
["lucene", undefined],
["sqs", undefined],
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/v2/views (%s)", (_, dsProvider) => {
])("/v2/views (%s)", (name, dsProvider) => {
const config = setup.getConfig()
const isInternal = !dsProvider
const isSqs = name === "sqs"
const isLucene = name === "lucene"
const isInternal = isSqs || isLucene
let table: Table
let datasource: Datasource
let envCleanup: (() => void) | undefined
function saveTableRequest(
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
@ -82,6 +88,9 @@ describe.each([
}
beforeAll(async () => {
if (isSqs) {
envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" })
}
await config.init()
if (dsProvider) {
@ -94,6 +103,9 @@ describe.each([
afterAll(async () => {
setup.afterAll()
if (envCleanup) {
envCleanup()
}
})
beforeEach(() => {
@ -1252,12 +1264,13 @@ describe.each([
paginate: true,
limit: 4,
query: {},
countRows: true,
})
expect(page1).toEqual({
rows: expect.arrayContaining(rows.slice(0, 4)),
totalRows: isInternal ? 10 : undefined,
hasNextPage: true,
bookmark: expect.anything(),
totalRows: 10,
})
const page2 = await config.api.viewV2.search(view.id, {
@ -1265,12 +1278,13 @@ describe.each([
limit: 4,
bookmark: page1.bookmark,
query: {},
countRows: true,
})
expect(page2).toEqual({
rows: expect.arrayContaining(rows.slice(4, 8)),
totalRows: isInternal ? 10 : undefined,
hasNextPage: true,
bookmark: expect.anything(),
totalRows: 10,
})
const page3 = await config.api.viewV2.search(view.id, {
@ -1278,13 +1292,17 @@ describe.each([
limit: 4,
bookmark: page2.bookmark,
query: {},
countRows: true,
})
expect(page3).toEqual({
const expectation: SearchResponse<Row> = {
rows: expect.arrayContaining(rows.slice(8)),
totalRows: isInternal ? 10 : undefined,
hasNextPage: false,
bookmark: expect.anything(),
})
totalRows: 10,
}
if (isLucene) {
expectation.bookmark = expect.anything()
}
expect(page3).toEqual(expectation)
})
const sortTestOptions: [

View File

@ -109,6 +109,7 @@ export function internalSearchValidator() {
sortOrder: OPTIONAL_STRING,
sortType: OPTIONAL_STRING,
paginate: Joi.boolean(),
countRows: Joi.boolean(),
bookmark: Joi.alternatives()
.try(OPTIONAL_STRING, OPTIONAL_NUMBER)
.optional(),

View File

@ -27,10 +27,17 @@ export const definition: AutomationTriggerSchema = {
},
outputs: {
properties: {
row: {
oldRow: {
type: AutomationIOType.OBJECT,
customType: AutomationCustomIOType.ROW,
description: "The row that was updated",
title: "Old Row",
},
row: {
type: AutomationIOType.OBJECT,
customType: AutomationCustomIOType.ROW,
description: "The row before it was updated",
title: "Row",
},
id: {
type: AutomationIOType.STRING,

View File

@ -8,7 +8,13 @@ import { checkTestFlag } from "../utilities/redis"
import * as utils from "./utils"
import env from "../environment"
import { context, db as dbCore } from "@budibase/backend-core"
import { Automation, Row, AutomationData, AutomationJob } from "@budibase/types"
import {
Automation,
Row,
AutomationData,
AutomationJob,
UpdatedRowEventEmitter,
} from "@budibase/types"
import { executeInThread } from "../threads/automation"
export const TRIGGER_DEFINITIONS = definitions
@ -65,7 +71,7 @@ async function queueRelevantRowAutomations(
})
}
emitter.on("row:save", async function (event) {
emitter.on("row:save", async function (event: UpdatedRowEventEmitter) {
/* istanbul ignore next */
if (!event || !event.row || !event.row.tableId) {
return

View File

@ -13,8 +13,14 @@ import { Table, Row } from "@budibase/types"
* This is specifically quite important for template strings used in automations.
*/
class BudibaseEmitter extends EventEmitter {
emitRow(eventName: string, appId: string, row: Row, table?: Table) {
rowEmission({ emitter: this, eventName, appId, row, table })
emitRow(
eventName: string,
appId: string,
row: Row,
table?: Table,
oldRow?: Row
) {
rowEmission({ emitter: this, eventName, appId, row, table, oldRow })
}
emitTable(eventName: string, appId: string, table?: Table) {

View File

@ -7,6 +7,7 @@ type BBEventOpts = {
appId: string
table?: Table
row?: Row
oldRow?: Row
metadata?: any
}
@ -18,6 +19,7 @@ type BBEvent = {
appId: string
tableId?: string
row?: Row
oldRow?: Row
table?: BBEventTable
id?: string
revision?: string
@ -31,9 +33,11 @@ export function rowEmission({
row,
table,
metadata,
oldRow,
}: BBEventOpts) {
let event: BBEvent = {
row,
oldRow,
appId,
tableId: row?.tableId,
}

View File

@ -1,19 +1,12 @@
import fetch from "node-fetch"
import {
generateMakeRequest,
MakeRequestResponse,
} from "../api/routes/public/tests/utils"
import * as setup from "../api/routes/tests/utilities"
import { Datasource, FieldType } from "@budibase/types"
import {
DatabaseName,
getDatasource,
rawQuery,
knexClient,
} from "../integrations/tests/utils"
import { generator } from "@budibase/backend-core/tests"
import { tableForDatasource } from "../../src/tests/utilities/structures"
// @ts-ignore
fetch.mockSearch()
import { Knex } from "knex"
function uniqueTableName(length?: number): string {
return generator
@ -24,129 +17,74 @@ function uniqueTableName(length?: number): string {
const config = setup.getConfig()!
jest.mock("../websockets", () => ({
clientAppSocket: jest.fn(),
gridAppSocket: jest.fn(),
initialise: jest.fn(),
builderSocket: {
emitTableUpdate: jest.fn(),
emitTableDeletion: jest.fn(),
emitDatasourceUpdate: jest.fn(),
emitDatasourceDeletion: jest.fn(),
emitScreenUpdate: jest.fn(),
emitAppMetadataUpdate: jest.fn(),
emitAppPublish: jest.fn(),
},
}))
describe("mysql integrations", () => {
let makeRequest: MakeRequestResponse,
rawDatasource: Datasource,
datasource: Datasource
let datasource: Datasource
let client: Knex
beforeAll(async () => {
await config.init()
const apiKey = await config.generateApiKey()
makeRequest = generateMakeRequest(apiKey, true)
rawDatasource = await getDatasource(DatabaseName.MYSQL)
const rawDatasource = await getDatasource(DatabaseName.MYSQL)
datasource = await config.api.datasource.create(rawDatasource)
client = await knexClient(rawDatasource)
})
afterAll(config.end)
it("validate table schema", async () => {
// Creating a table so that `entities` is populated.
await config.api.table.save(tableForDatasource(datasource))
const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
expect(res.status).toBe(200)
expect(res.body).toEqual({
config: {
database: expect.any(String),
host: datasource.config!.host,
password: "--secret-value--",
port: datasource.config!.port,
user: "root",
},
plus: true,
source: "MYSQL",
type: "datasource_plus",
isSQL: true,
_id: expect.any(String),
_rev: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
entities: expect.any(Object),
})
})
describe("Integration compatibility with mysql search_path", () => {
let datasource: Datasource, rawDatasource: Datasource
let datasource: Datasource
let rawDatasource: Datasource
let client: Knex
const database = generator.guid()
const database2 = generator.guid()
beforeAll(async () => {
rawDatasource = await getDatasource(DatabaseName.MYSQL)
client = await knexClient(rawDatasource)
await rawQuery(rawDatasource, `CREATE DATABASE \`${database}\`;`)
await rawQuery(rawDatasource, `CREATE DATABASE \`${database2}\`;`)
await client.raw(`CREATE DATABASE \`${database}\`;`)
await client.raw(`CREATE DATABASE \`${database2}\`;`)
const pathConfig: any = {
...rawDatasource,
config: {
...rawDatasource.config!,
database,
},
}
datasource = await config.api.datasource.create(pathConfig)
rawDatasource.config!.database = database
datasource = await config.api.datasource.create(rawDatasource)
})
afterAll(async () => {
await rawQuery(rawDatasource, `DROP DATABASE \`${database}\`;`)
await rawQuery(rawDatasource, `DROP DATABASE \`${database2}\`;`)
await client.raw(`DROP DATABASE \`${database}\`;`)
await client.raw(`DROP DATABASE \`${database2}\`;`)
})
it("discovers tables from any schema in search path", async () => {
await rawQuery(
rawDatasource,
`CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);`
)
const response = await makeRequest("post", "/api/datasources/info", {
datasource: datasource,
await client.schema.createTable(`${database}.table1`, table => {
table.increments("id1").primary()
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames).toEqual(
expect.arrayContaining(["table1"])
)
const res = await config.api.datasource.info(datasource)
expect(res.tableNames).toBeDefined()
expect(res.tableNames).toEqual(expect.arrayContaining(["table1"]))
})
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
await rawQuery(
rawDatasource,
`CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
)
await rawQuery(
rawDatasource,
`CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
)
const response = await makeRequest(
"post",
`/api/datasources/${datasource._id}/schema`,
{
tablesFilter: [repeated_table_name],
await client.schema.createTable(
`${database}.${repeated_table_name}`,
table => {
table.increments("id").primary()
table.string("val1")
}
)
expect(response.status).toBe(200)
expect(
response.body.datasource.entities[repeated_table_name].schema
).toBeDefined()
const schema =
response.body.datasource.entities[repeated_table_name].schema
await client.schema.createTable(
`${database2}.${repeated_table_name}`,
table => {
table.increments("id2").primary()
table.string("val2")
}
)
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
tablesFilter: [repeated_table_name],
})
expect(res.datasource.entities![repeated_table_name].schema).toBeDefined()
const schema = res.datasource.entities![repeated_table_name].schema
expect(Object.keys(schema).sort()).toEqual(["id", "val1"])
})
})
@ -159,28 +97,27 @@ describe("mysql integrations", () => {
})
afterEach(async () => {
await rawQuery(rawDatasource, `DROP TABLE IF EXISTS \`${tableName}\``)
await client.schema.dropTableIfExists(tableName)
})
it("recognises enum columns as options", async () => {
const enumColumnName = "status"
const createTableQuery = `
CREATE TABLE \`${tableName}\` (
\`order_id\` INT AUTO_INCREMENT PRIMARY KEY,
\`customer_name\` VARCHAR(100) NOT NULL,
\`${enumColumnName}\` ENUM('pending', 'processing', 'shipped', 'delivered', 'cancelled')
);
`
await client.schema.createTable(tableName, table => {
table.increments("order_id").primary()
table.string("customer_name", 100).notNullable()
table.enum(
enumColumnName,
["pending", "processing", "shipped", "delivered", "cancelled"],
{ useNative: true, enumName: `${tableName}_${enumColumnName}` }
)
})
await rawQuery(rawDatasource, createTableQuery)
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const response = await makeRequest(
"post",
`/api/datasources/${datasource._id}/schema`
)
const table = response.body.datasource.entities[tableName]
const table = res.datasource.entities![tableName]
expect(table).toBeDefined()
expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)

View File

@ -1,9 +1,3 @@
import fetch from "node-fetch"
import {
generateMakeRequest,
MakeRequestResponse,
} from "../api/routes/public/tests/utils"
import * as setup from "../api/routes/tests/utilities"
import { Datasource, FieldType } from "@budibase/types"
import _ from "lodash"
@ -11,29 +5,21 @@ import { generator } from "@budibase/backend-core/tests"
import {
DatabaseName,
getDatasource,
rawQuery,
knexClient,
} from "../integrations/tests/utils"
// @ts-ignore
fetch.mockSearch()
import { Knex } from "knex"
const config = setup.getConfig()!
jest.mock("../websockets")
describe("postgres integrations", () => {
let makeRequest: MakeRequestResponse,
rawDatasource: Datasource,
datasource: Datasource
let datasource: Datasource
let client: Knex
beforeAll(async () => {
await config.init()
const apiKey = await config.generateApiKey()
makeRequest = generateMakeRequest(apiKey, true)
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
datasource = await config.api.datasource.create(rawDatasource)
client = await knexClient(rawDatasource)
})
afterAll(config.end)
@ -46,11 +32,13 @@ describe("postgres integrations", () => {
})
afterEach(async () => {
await rawQuery(rawDatasource, `DROP TABLE IF EXISTS "${tableName}"`)
await client.schema.dropTableIfExists(tableName)
})
it("recognises when a table has no primary key", async () => {
await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`)
await client.schema.createTable(tableName, table => {
table.increments("id", { primaryKey: false })
})
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
@ -62,10 +50,9 @@ describe("postgres integrations", () => {
})
it("recognises when a table is using a reserved column name", async () => {
await rawQuery(
rawDatasource,
`CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) `
)
await client.schema.createTable(tableName, table => {
table.increments("_id").primary()
})
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
@ -81,20 +68,15 @@ describe("postgres integrations", () => {
.guid()
.replaceAll("-", "")
.substring(0, 6)}`
const enumColumnName = "status"
await rawQuery(
rawDatasource,
`
CREATE TYPE order_status AS ENUM ('pending', 'processing', 'shipped', 'delivered', 'cancelled');
CREATE TABLE ${tableName} (
order_id SERIAL PRIMARY KEY,
customer_name VARCHAR(100) NOT NULL,
${enumColumnName} order_status
);
`
)
await client.schema.createTable(tableName, table => {
table.increments("order_id").primary()
table.string("customer_name").notNullable()
table.enum("status", ["pending", "processing", "shipped"], {
useNative: true,
enumName: `${tableName}_status`,
})
})
const response = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
@ -103,69 +85,70 @@ describe("postgres integrations", () => {
const table = response.datasource.entities?.[tableName]
expect(table).toBeDefined()
expect(table?.schema[enumColumnName].type).toEqual(FieldType.OPTIONS)
expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS)
})
})
describe("Integration compatibility with postgres search_path", () => {
let rawDatasource: Datasource,
datasource: Datasource,
schema1: string,
schema2: string
let datasource: Datasource
let client: Knex
let schema1: string
let schema2: string
beforeEach(async () => {
schema1 = generator.guid().replaceAll("-", "")
schema2 = generator.guid().replaceAll("-", "")
rawDatasource = await getDatasource(DatabaseName.POSTGRES)
const dbConfig = rawDatasource.config!
const rawDatasource = await getDatasource(DatabaseName.POSTGRES)
client = await knexClient(rawDatasource)
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema1}";`)
await rawQuery(rawDatasource, `CREATE SCHEMA "${schema2}";`)
await client.schema.createSchema(schema1)
await client.schema.createSchema(schema2)
const pathConfig: any = {
...rawDatasource,
config: {
...dbConfig,
schema: `${schema1}, ${schema2}`,
},
}
datasource = await config.api.datasource.create(pathConfig)
rawDatasource.config!.schema = `${schema1}, ${schema2}`
client = await knexClient(rawDatasource)
datasource = await config.api.datasource.create(rawDatasource)
})
afterEach(async () => {
await rawQuery(rawDatasource, `DROP SCHEMA "${schema1}" CASCADE;`)
await rawQuery(rawDatasource, `DROP SCHEMA "${schema2}" CASCADE;`)
await client.schema.dropSchema(schema1, true)
await client.schema.dropSchema(schema2, true)
})
it("discovers tables from any schema in search path", async () => {
await rawQuery(
rawDatasource,
`CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);`
)
await rawQuery(
rawDatasource,
`CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);`
)
const response = await makeRequest("post", "/api/datasources/info", {
datasource: datasource,
await client.schema.createTable(`${schema1}.table1`, table => {
table.increments("id1").primary()
})
expect(response.status).toBe(200)
expect(response.body.tableNames).toBeDefined()
expect(response.body.tableNames).toEqual(
await client.schema.createTable(`${schema2}.table2`, table => {
table.increments("id2").primary()
})
const response = await config.api.datasource.info(datasource)
expect(response.tableNames).toBeDefined()
expect(response.tableNames).toEqual(
expect.arrayContaining(["table1", "table2"])
)
})
it("does not mix columns from different tables", async () => {
const repeated_table_name = "table_same_name"
await rawQuery(
rawDatasource,
`CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);`
await client.schema.createTable(
`${schema1}.${repeated_table_name}`,
table => {
table.increments("id").primary()
table.string("val1")
}
)
await rawQuery(
rawDatasource,
`CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);`
await client.schema.createTable(
`${schema2}.${repeated_table_name}`,
table => {
table.increments("id2").primary()
table.string("val2")
}
)
const response = await config.api.datasource.fetchSchema({
@ -182,15 +165,11 @@ describe("postgres integrations", () => {
describe("check custom column types", () => {
beforeAll(async () => {
await rawQuery(
rawDatasource,
`CREATE TABLE binaryTable (
id BYTEA PRIMARY KEY,
column1 TEXT,
column2 INT
);
`
)
await client.schema.createTable("binaryTable", table => {
table.binary("id").primary()
table.string("column1")
table.integer("column2")
})
})
it("should handle binary columns", async () => {
@ -198,7 +177,7 @@ describe("postgres integrations", () => {
datasourceId: datasource._id!,
})
expect(response.datasource.entities).toBeDefined()
const table = response.datasource.entities?.["binarytable"]
const table = response.datasource.entities?.["binaryTable"]
expect(table).toBeDefined()
expect(table?.schema.id.externalType).toBe("bytea")
const row = await config.api.row.save(table?._id!, {
@ -214,14 +193,10 @@ describe("postgres integrations", () => {
describe("check fetching null/not null table", () => {
beforeAll(async () => {
await rawQuery(
rawDatasource,
`CREATE TABLE nullableTable (
order_id SERIAL PRIMARY KEY,
order_number INT NOT NULL
);
`
)
await client.schema.createTable("nullableTable", table => {
table.increments("order_id").primary()
table.integer("order_number").notNullable()
})
})
it("should be able to change the table to allow nullable and refetch this", async () => {
@ -230,25 +205,24 @@ describe("postgres integrations", () => {
})
const entities = response.datasource.entities
expect(entities).toBeDefined()
const nullableTable = entities?.["nullabletable"]
const nullableTable = entities?.["nullableTable"]
expect(nullableTable).toBeDefined()
expect(
nullableTable?.schema["order_number"].constraints?.presence
).toEqual(true)
// need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase
// is aware of - therefore we can try to fetch and make sure BB updates correctly
await rawQuery(
rawDatasource,
`ALTER TABLE nullableTable
ALTER COLUMN order_number DROP NOT NULL;
`
)
await client.schema.alterTable("nullableTable", table => {
table.setNullable("order_number")
})
const responseAfter = await config.api.datasource.fetchSchema({
datasourceId: datasource._id!,
})
const entitiesAfter = responseAfter.datasource.entities
expect(entitiesAfter).toBeDefined()
const nullableTableAfter = entitiesAfter?.["nullabletable"]
const nullableTableAfter = entitiesAfter?.["nullableTable"]
expect(nullableTableAfter).toBeDefined()
expect(
nullableTableAfter?.schema["order_number"].constraints?.presence

View File

@ -22,6 +22,9 @@ export async function makeExternalQuery(
) {
throw new Error("Entity ID and table metadata do not align")
}
if (!datasource) {
throw new Error("No datasource provided for external query")
}
datasource = await sdk.datasources.enrich(datasource)
const Integration = await getIntegration(datasource.source)
// query is the opinionated function

View File

@ -142,7 +142,7 @@ describe("SQL query builder", () => {
const query = sql._query(generateRelationshipJson({ schema: "production" }))
expect(query).toEqual({
bindings: [500, 5000],
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`,
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" order by "test"."id" asc limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`,
})
})
@ -150,7 +150,7 @@ describe("SQL query builder", () => {
const query = sql._query(generateRelationshipJson())
expect(query).toEqual({
bindings: [500, 5000],
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`,
sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" order by "test"."id" asc limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`,
})
})
@ -160,7 +160,7 @@ describe("SQL query builder", () => {
)
expect(query).toEqual({
bindings: [500, 5000],
sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" limit $2`,
sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" order by "test"."id" asc limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" order by "test"."id" asc limit $2`,
})
})
@ -175,8 +175,8 @@ describe("SQL query builder", () => {
})
)
expect(query).toEqual({
bindings: ["john%", limit],
sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`,
bindings: ["john%", limit, 5000],
sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
})
query = new Sql(SqlClient.ORACLE, limit)._query(
@ -190,8 +190,8 @@ describe("SQL query builder", () => {
})
)
expect(query).toEqual({
bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit],
sql: `select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4)) where rownum <= :5) "test"`,
bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit, 5000],
sql: `select * from (select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4) order by "test"."id" asc) where rownum <= :5) "test" order by "test"."id" asc) where rownum <= :6`,
})
query = new Sql(SqlClient.ORACLE, limit)._query(
@ -204,8 +204,8 @@ describe("SQL query builder", () => {
})
)
expect(query).toEqual({
bindings: [`%jo%`, limit],
sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`,
bindings: [`%jo%`, limit, 5000],
sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`,
})
})
})

View File

@ -57,15 +57,14 @@ describe("Captures of real examples", () => {
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
expect(query).toEqual({
bindings: [relationshipLimit, limit],
sql: multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid",
sql: expect.stringContaining(
multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid",
"a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city",
"a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname",
"b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid",
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
from (select * from "persons" as "a" order by "a"."firstname" asc nulls first limit $1) as "a"
left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid"
order by "a"."firstname" asc nulls first limit $2`),
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"`)
),
})
})
@ -74,13 +73,10 @@ describe("Captures of real examples", () => {
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
expect(query).toEqual({
bindings: [relationshipLimit, "assembling", limit],
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid",
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a"
left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where COALESCE("b"."taskname" = $2, FALSE)
order by "a"."productname" asc nulls first limit $3`),
sql: expect.stringContaining(
multiline(`where COALESCE("b"."taskname" = $2, FALSE)
order by "a"."productname" asc nulls first, "a"."productid" asc limit $3`)
),
})
})
@ -89,13 +85,10 @@ describe("Captures of real examples", () => {
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
expect(query).toEqual({
bindings: [relationshipLimit, limit],
sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid",
"b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid",
"b"."completed" as "b.completed", "b"."qaid" as "b.qaid"
from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a"
left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
left join "tasks" as "b" on "b"."taskid" = "c"."taskid"
order by "a"."productname" asc nulls first limit $2`),
sql: expect.stringContaining(
multiline(`left join "products_tasks" as "c" on "a"."productid" = "c"."productid"
left join "tasks" as "b" on "b"."taskid" = "c"."taskid" `)
),
})
})
@ -106,11 +99,11 @@ describe("Captures of real examples", () => {
expect(query).toEqual({
bindings: [...filters, limit, limit],
sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname",
"a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid",
"b"."productname" as "b.productname", "b"."productid" as "b.productid"
from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a"
left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid"
left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`),
"a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid",
"b"."productname" as "b.productname", "b"."productid" as "b.productid"
from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) order by "a"."taskid" asc limit $3) as "a"
left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid"
left join "products" as "b" on "b"."productid" = "c"."productid" order by "a"."taskid" asc limit $4`),
})
})
@ -132,19 +125,11 @@ describe("Captures of real examples", () => {
equalValue,
limit,
],
sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid",
"a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname",
"b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname",
"c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type",
"c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname",
"c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type",
"c"."city" as "c.city", "c"."lastname" as "c.lastname"
from (select * from "tasks" as "a" where COALESCE("a"."completed" != $1, TRUE)
order by "a"."taskname" asc nulls first limit $2) as "a"
left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid"
left join "products" as "b" on "b"."productid" = "d"."productid"
left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid"
where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE) order by "a"."taskname" asc nulls first limit $6`),
sql: expect.stringContaining(
multiline(
`where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE)`
)
),
})
})
})
@ -200,8 +185,9 @@ describe("Captures of real examples", () => {
returningQuery = input
}, queryJson)
expect(returningQuery).toEqual({
sql: "select * from (select top (@p0) * from [people] where CASE WHEN [people].[name] = @p1 THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p2 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]",
bindings: [1, "Test", 22],
sql: multiline(`select top (@p0) * from (select top (@p1) * from [people] where CASE WHEN [people].[name] = @p2
THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p3 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]`),
bindings: [5000, 1, "Test", 22],
})
})
})

View File

@ -48,16 +48,16 @@ export async function getDatasources(
return Promise.all(sourceNames.map(sourceName => providers[sourceName]()))
}
export async function rawQuery(ds: Datasource, sql: string): Promise<any> {
export async function knexClient(ds: Datasource) {
switch (ds.source) {
case SourceName.POSTGRES: {
return postgres.rawQuery(ds, sql)
return postgres.knexClient(ds)
}
case SourceName.MYSQL: {
return mysql.rawQuery(ds, sql)
return mysql.knexClient(ds)
}
case SourceName.SQL_SERVER: {
return mssql.rawQuery(ds, sql)
return mssql.knexClient(ds)
}
default: {
throw new Error(`Unsupported source: ${ds.source}`)

View File

@ -1,9 +1,9 @@
import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
import { rawQuery } from "./mysql"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
import { knexClient } from "./mysql"
let ports: Promise<testContainerUtils.Port[]>
@ -55,7 +55,8 @@ export async function getDatasource(): Promise<Datasource> {
}
const database = generator.guid().replaceAll("-", "")
await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
const client = await knexClient(datasource)
await client.raw(`CREATE DATABASE \`${database}\``)
datasource.config.database = database
return datasource
}

View File

@ -1,8 +1,8 @@
import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers"
import mssql from "mssql"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
import knex from "knex"
let ports: Promise<testContainerUtils.Port[]>
@ -49,26 +49,23 @@ export async function getDatasource(): Promise<Datasource> {
}
const database = generator.guid().replaceAll("-", "")
await rawQuery(datasource, `CREATE DATABASE "${database}"`)
const client = await knexClient(datasource)
await client.raw(`CREATE DATABASE "${database}"`)
datasource.config!.database = database
return datasource
}
export async function rawQuery(ds: Datasource, sql: string) {
export async function knexClient(ds: Datasource) {
if (!ds.config) {
throw new Error("Datasource config is missing")
}
if (ds.source !== SourceName.SQL_SERVER) {
throw new Error("Datasource source is not SQL Server")
throw new Error("Datasource source is not MSSQL")
}
const pool = new mssql.ConnectionPool(ds.config! as mssql.config)
const client = await pool.connect()
try {
const { recordset } = await client.query(sql)
return recordset
} finally {
await pool.close()
}
return knex({
client: "mssql",
connection: ds.config,
})
}

View File

@ -1,9 +1,9 @@
import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
import mysql from "mysql2/promise"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
import knex from "knex"
let ports: Promise<testContainerUtils.Port[]>
@ -56,12 +56,13 @@ export async function getDatasource(): Promise<Datasource> {
}
const database = generator.guid().replaceAll("-", "")
await rawQuery(datasource, `CREATE DATABASE \`${database}\``)
const client = await knexClient(datasource)
await client.raw(`CREATE DATABASE \`${database}\``)
datasource.config!.database = database
return datasource
}
export async function rawQuery(ds: Datasource, sql: string) {
export async function knexClient(ds: Datasource) {
if (!ds.config) {
throw new Error("Datasource config is missing")
}
@ -69,11 +70,8 @@ export async function rawQuery(ds: Datasource, sql: string) {
throw new Error("Datasource source is not MySQL")
}
const connection = await mysql.createConnection(ds.config)
try {
const [rows] = await connection.query(sql)
return rows
} finally {
connection.end()
}
return knex({
client: "mysql2",
connection: ds.config,
})
}

View File

@ -1,8 +1,8 @@
import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers"
import pg from "pg"
import { generator, testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
import knex from "knex"
let ports: Promise<testContainerUtils.Port[]>
@ -43,13 +43,14 @@ export async function getDatasource(): Promise<Datasource> {
}
const database = generator.guid().replaceAll("-", "")
await rawQuery(datasource, `CREATE DATABASE "${database}"`)
const client = await knexClient(datasource)
await client.raw(`CREATE DATABASE "${database}"`)
datasource.config!.database = database
return datasource
}
export async function rawQuery(ds: Datasource, sql: string) {
export async function knexClient(ds: Datasource) {
if (!ds.config) {
throw new Error("Datasource config is missing")
}
@ -57,12 +58,8 @@ export async function rawQuery(ds: Datasource, sql: string) {
throw new Error("Datasource source is not Postgres")
}
const client = new pg.Client(ds.config)
await client.connect()
try {
const { rows } = await client.query(sql)
return rows
} finally {
await client.end()
}
return knex({
client: "pg",
connection: ds.config,
})
}

View File

@ -1,14 +1,14 @@
import {
SortJson,
IncludeRelationship,
Operation,
PaginationJson,
IncludeRelationship,
Row,
SearchFilters,
RowSearchParams,
SearchFilters,
SearchResponse,
Table,
SortJson,
SortOrder,
Table,
} from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters"
import { handleRequest } from "../../../../api/controllers/row/external"
@ -18,7 +18,7 @@ import {
} from "../../../../integrations/utils"
import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "./types"
import { HTTPError, db } from "@budibase/backend-core"
import { db, HTTPError } from "@budibase/backend-core"
import pick from "lodash/pick"
import { outputProcessing } from "../../../../utilities/rowProcessor"
import sdk from "../../../"
@ -28,20 +28,26 @@ export async function search(
table: Table
): Promise<SearchResponse<Row>> {
const { tableId } = options
const { paginate, query, ...params } = options
const { countRows, paginate, query, ...params } = options
const { limit } = params
let bookmark =
(params.bookmark && parseInt(params.bookmark as string)) || undefined
if (paginate && !bookmark) {
bookmark = 1
bookmark = 0
}
let paginateObj = {}
let paginateObj: PaginationJson | undefined
if (paginate) {
if (paginate && !limit) {
throw new Error("Cannot paginate query without a limit")
}
if (paginate && limit) {
paginateObj = {
// add one so we can track if there is another page
limit: limit,
page: bookmark,
limit: limit + 1,
}
if (bookmark) {
paginateObj.offset = limit * bookmark
}
} else if (params && limit) {
paginateObj = {
@ -69,24 +75,27 @@ export async function search(
}
try {
let rows = await handleRequest(Operation.READ, tableId, {
const parameters = {
filters: query,
sort,
paginate: paginateObj as PaginationJson,
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
}
const queries: Promise<Row[] | number>[] = []
queries.push(handleRequest(Operation.READ, tableId, parameters))
if (countRows) {
queries.push(handleRequest(Operation.COUNT, tableId, parameters))
}
const responses = await Promise.all(queries)
let rows = responses[0] as Row[]
const totalRows =
responses.length > 1 ? (responses[1] as number) : undefined
let hasNextPage = false
if (paginate && rows.length === limit) {
const nextRows = await handleRequest(Operation.READ, tableId, {
filters: query,
sort,
paginate: {
limit: 1,
page: bookmark! * limit + 1,
},
includeSqlRelationships: IncludeRelationship.INCLUDE,
})
hasNextPage = nextRows.length > 0
// remove the extra row if it's there
if (paginate && limit && rows.length > limit) {
rows.pop()
hasNextPage = true
}
if (options.fields) {
@ -100,7 +109,17 @@ export async function search(
})
// need wrapper object for bookmarks etc when paginating
return { rows, hasNextPage, bookmark: bookmark && bookmark + 1 }
const response: SearchResponse<Row> = { rows, hasNextPage }
if (hasNextPage && bookmark != null) {
response.bookmark = bookmark + 1
}
if (totalRows != null) {
response.totalRows = totalRows
}
if (paginate && !hasNextPage) {
response.hasNextPage = false
}
return response
} catch (err: any) {
if (err.message && err.message.includes("does not exist")) {
throw new Error(

View File

@ -12,6 +12,7 @@ import {
SortType,
SqlClient,
Table,
Datasource,
} from "@budibase/types"
import {
buildInternalRelationships,
@ -28,6 +29,7 @@ import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
import AliasTables from "../sqlAlias"
import { outputProcessing } from "../../../../utilities/rowProcessor"
import pick from "lodash/pick"
import { processRowCountResponse } from "../utils"
const builder = new sql.Sql(SqlClient.SQL_LITE)
@ -95,14 +97,29 @@ function buildTableMap(tables: Table[]) {
// update the table name, should never query by name for SQLite
table.originalName = table.name
table.name = table._id!
// need a primary for sorting, lookups etc
table.primary = ["_id"]
tableMap[table._id!] = table
}
return tableMap
}
async function runSqlQuery(json: QueryJson, tables: Table[]) {
function runSqlQuery(json: QueryJson, tables: Table[]): Promise<Row[]>
function runSqlQuery(
json: QueryJson,
tables: Table[],
opts: { countTotalRows: true }
): Promise<number>
async function runSqlQuery(
json: QueryJson,
tables: Table[],
opts?: { countTotalRows?: boolean }
) {
const alias = new AliasTables(tables.map(table => table.name))
return await alias.queryWithAliasing(json, async json => {
if (opts?.countTotalRows) {
json.endpoint.operation = Operation.COUNT
}
const processSQLQuery = async (_: Datasource, json: QueryJson) => {
const query = builder._query(json, {
disableReturning: true,
})
@ -124,17 +141,27 @@ async function runSqlQuery(json: QueryJson, tables: Table[]) {
const db = context.getAppDB()
return await db.sql<Row>(sql, bindings)
})
}
const response = await alias.queryWithAliasing(json, processSQLQuery)
if (opts?.countTotalRows) {
return processRowCountResponse(response)
} else {
return response
}
}
export async function search(
options: RowSearchParams,
table: Table
): Promise<SearchResponse<Row>> {
const { paginate, query, ...params } = options
let { paginate, query, ...params } = options
const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables)
// make sure we have the mapped/latest table
if (table?._id) {
table = allTablesMap[table?._id]
}
if (!table) {
throw new Error("Unable to find table")
}
@ -167,13 +194,9 @@ export async function search(
const sortField = table.schema[params.sort]
const sortType =
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
const sortDirection =
params.sortOrder === SortOrder.ASCENDING
? SortOrder.ASCENDING
: SortOrder.DESCENDING
request.sort = {
[sortField.name]: {
direction: sortDirection,
direction: params.sortOrder || SortOrder.ASCENDING,
type: sortType as SortType,
},
}
@ -182,16 +205,31 @@ export async function search(
if (params.bookmark && typeof params.bookmark !== "number") {
throw new Error("Unable to paginate with string based bookmarks")
}
const bookmark: number = (params.bookmark as number) || 1
const limit = params.limit
if (paginate && params.limit) {
const bookmark: number = (params.bookmark as number) || 0
if (params.limit) {
paginate = true
request.paginate = {
limit: params.limit + 1,
page: bookmark,
offset: bookmark * params.limit,
}
}
try {
const rows = await runSqlQuery(request, allTables)
const queries: Promise<Row[] | number>[] = []
queries.push(runSqlQuery(request, allTables))
if (options.countRows) {
// get the total count of rows
queries.push(
runSqlQuery(request, allTables, {
countTotalRows: true,
})
)
}
const responses = await Promise.all(queries)
let rows = responses[0] as Row[]
const totalRows =
responses.length > 1 ? (responses[1] as number) : undefined
// process from the format of tableId.column to expected format also
// make sure JSON columns corrected
@ -204,7 +242,8 @@ export async function search(
// check for pagination final row
let nextRow: Row | undefined
if (paginate && params.limit && processed.length > params.limit) {
if (paginate && params.limit && rows.length > params.limit) {
// remove the extra row that confirmed if there is another row to move to
nextRow = processed.pop()
}
@ -220,27 +259,21 @@ export async function search(
finalRows = finalRows.map((r: any) => pick(r, fields))
}
// check for pagination
if (paginate && limit) {
const response: SearchResponse<Row> = {
rows: finalRows,
}
const prevLimit = request.paginate!.limit
request.paginate = {
limit: 1,
page: bookmark * prevLimit + 1,
}
const hasNextPage = !!nextRow
response.hasNextPage = hasNextPage
if (hasNextPage) {
response.bookmark = bookmark + 1
}
return response
} else {
return {
rows: finalRows,
}
const response: SearchResponse<Row> = {
rows: finalRows,
}
if (totalRows != null) {
response.totalRows = totalRows
}
// check for pagination
if (paginate && nextRow) {
response.hasNextPage = true
response.bookmark = bookmark + 1
}
if (paginate && !nextRow) {
response.hasNextPage = false
}
return response
} catch (err: any) {
const msg = typeof err === "string" ? err : err.message
if (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) {

View File

@ -11,7 +11,12 @@ import { SQS_DATASOURCE_INTERNAL } from "@budibase/backend-core"
import { getSQLClient } from "./utils"
import { cloneDeep } from "lodash"
import datasources from "../datasources"
import { makeExternalQuery } from "../../../integrations/base/query"
import { BudibaseInternalDB } from "../../../db/utils"
type PerformQueryFunction = (
datasource: Datasource,
json: QueryJson
) => Promise<DatasourcePlusQueryResponse>
const WRITE_OPERATIONS: Operation[] = [
Operation.CREATE,
@ -65,7 +70,7 @@ export default class AliasTables {
this.charSeq = new CharSequence()
}
isAliasingEnabled(json: QueryJson, datasource: Datasource) {
isAliasingEnabled(json: QueryJson, datasource?: Datasource) {
const operation = json.endpoint.operation
const fieldLength = json.resource?.fields?.length
if (
@ -75,6 +80,10 @@ export default class AliasTables {
) {
return false
}
// SQS - doesn't have a datasource
if (!datasource) {
return true
}
try {
const sqlClient = getSQLClient(datasource)
const isWrite = WRITE_OPERATIONS.includes(operation)
@ -167,13 +176,14 @@ export default class AliasTables {
async queryWithAliasing(
json: QueryJson,
queryFn?: (json: QueryJson) => Promise<DatasourcePlusQueryResponse>
queryFn: PerformQueryFunction
): Promise<DatasourcePlusQueryResponse> {
const datasourceId = json.endpoint.datasourceId
const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL
let aliasingEnabled: boolean, datasource: Datasource | undefined
let aliasingEnabled: boolean, datasource: Datasource
if (isSqs) {
aliasingEnabled = true
aliasingEnabled = this.isAliasingEnabled(json)
datasource = BudibaseInternalDB
} else {
datasource = await datasources.get(datasourceId)
aliasingEnabled = this.isAliasingEnabled(json, datasource)
@ -225,14 +235,7 @@ export default class AliasTables {
json.tableAliases = invertedTableAliases
}
let response: DatasourcePlusQueryResponse
if (datasource && !isSqs) {
response = await makeExternalQuery(datasource, json)
} else if (queryFn) {
response = await queryFn(json)
} else {
throw new Error("No supplied method to perform aliased query")
}
let response: DatasourcePlusQueryResponse = await queryFn(datasource, json)
if (Array.isArray(response) && aliasingEnabled) {
return this.reverse(response)
} else {

View File

@ -50,6 +50,17 @@ export function getSQLClient(datasource: Datasource): SqlClient {
throw new Error("Unable to determine client for SQL datasource")
}
export function processRowCountResponse(
response: DatasourcePlusQueryResponse
): number {
if (response && response.length === 1 && "total" in response[0]) {
const total = response[0].total
return typeof total === "number" ? total : parseInt(total)
} else {
throw new Error("Unable to count rows in query - no count response")
}
}
export async function getDatasourceAndQuery(
json: QueryJson
): Promise<DatasourcePlusQueryResponse> {

View File

@ -17,6 +17,7 @@ import { cloneDeep } from "lodash/fp"
import isEqual from "lodash/isEqual"
import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula"
import { context } from "@budibase/backend-core"
import { findDuplicateInternalColumns } from "@budibase/shared-core"
import { getTable } from "../getters"
import { checkAutoColumns } from "./utils"
import * as viewsSdk from "../../views"
@ -44,6 +45,17 @@ export async function save(
if (hasTypeChanged(table, oldTable)) {
throw new Error("A column type has changed.")
}
// check for case sensitivity - we don't want to allow duplicated columns
const duplicateColumn = findDuplicateInternalColumns(table)
if (duplicateColumn.length) {
throw new Error(
`Column(s) "${duplicateColumn.join(
", "
)}" are duplicated - check for other columns with these name (case in-sensitive)`
)
}
// check that subtypes have been maintained
table = checkAutoColumns(table, oldTable)

View File

@ -359,6 +359,36 @@ export function collectAutomation(tableId?: string): Automation {
return automation as Automation
}
export function filterAutomation(tableId?: string): Automation {
const automation: any = {
name: "looping",
type: "automation",
definition: {
steps: [
{
id: "b",
type: "ACTION",
internal: true,
stepId: AutomationActionStepId.FILTER,
inputs: {},
schema: BUILTIN_ACTION_DEFINITIONS.EXECUTE_SCRIPT.schema,
},
],
trigger: {
id: "a",
type: "TRIGGER",
event: "row:save",
stepId: AutomationTriggerStepId.ROW_SAVED,
inputs: {
tableId,
},
schema: TRIGGER_DEFINITIONS.ROW_SAVED.schema,
},
},
}
return automation as Automation
}
export function basicAutomationResults(
automationId: string
): AutomationResults {

View File

@ -1,5 +1,6 @@
export * from "./api"
export * from "./fields"
export * from "./rows"
export const OperatorOptions = {
Equals: {
@ -179,3 +180,5 @@ export enum BpmStatusValue {
VERIFYING_EMAIL = "verifying_email",
COMPLETED = "completed",
}
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"

View File

@ -0,0 +1,14 @@
export const CONSTANT_INTERNAL_ROW_COLS = [
"_id",
"_rev",
"type",
"createdAt",
"updatedAt",
"tableId",
] as const
export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const
export function isInternalColumnName(name: string): boolean {
return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)
}

View File

@ -12,6 +12,7 @@ import {
SortOrder,
RowSearchParams,
EmptyFilterOption,
SearchResponse,
} from "@budibase/types"
import dayjs from "dayjs"
import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants"
@ -262,15 +263,23 @@ export const buildQuery = (filter: SearchFilter[]) => {
return query
}
export const search = (docs: Record<string, any>[], query: RowSearchParams) => {
export const search = (
docs: Record<string, any>[],
query: RowSearchParams
): SearchResponse<Record<string, any>> => {
let result = runQuery(docs, query.query)
if (query.sort) {
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
}
let totalRows = result.length
if (query.limit) {
result = limit(result, query.limit.toString())
}
return result
const response: SearchResponse<Record<string, any>> = { rows: result }
if (query.countRows) {
response.totalRows = totalRows
}
return response
}
/**

View File

@ -1,4 +1,5 @@
import { FieldType } from "@budibase/types"
import { FieldType, Table } from "@budibase/types"
import { CONSTANT_INTERNAL_ROW_COLS } from "./constants"
const allowDisplayColumnByType: Record<FieldType, boolean> = {
[FieldType.STRING]: true,
@ -51,3 +52,22 @@ export function canBeDisplayColumn(type: FieldType): boolean {
export function canBeSortColumn(type: FieldType): boolean {
return !!allowSortColumnByType[type]
}
export function findDuplicateInternalColumns(table: Table): string[] {
// get the column names
const columnNames = Object.keys(table.schema)
.concat(CONSTANT_INTERNAL_ROW_COLS)
.map(colName => colName.toLowerCase())
// there are duplicates
const set = new Set(columnNames)
let duplicates: string[] = []
if (set.size !== columnNames.length) {
for (let key of set.keys()) {
const count = columnNames.filter(name => name === key).length
if (count > 1) {
duplicates.push(key)
}
}
}
return duplicates
}

View File

@ -25,6 +25,7 @@ export interface SearchViewRowRequest
| "bookmark"
| "paginate"
| "query"
| "countRows"
> {}
export interface SearchRowResponse {

View File

@ -2,6 +2,8 @@ import { Document } from "../document"
import { EventEmitter } from "events"
import { User } from "../global"
import { ReadStream } from "fs"
import { Row } from "./row"
import { Table } from "./table"
export enum AutomationIOType {
OBJECT = "object",
@ -252,3 +254,10 @@ export type BucketedContent = AutomationAttachmentContent & {
bucket: string
path: string
}
export type UpdatedRowEventEmitter = {
row: Row
oldRow: Row
table: Table
appId: string
}

View File

@ -8,7 +8,9 @@ export enum Operation {
READ = "READ",
UPDATE = "UPDATE",
DELETE = "DELETE",
COUNT = "COUNT",
BULK_CREATE = "BULK_CREATE",
BULK_UPSERT = "BULK_UPSERT",
CREATE_TABLE = "CREATE_TABLE",
UPDATE_TABLE = "UPDATE_TABLE",
DELETE_TABLE = "DELETE_TABLE",
@ -20,6 +22,7 @@ export const RowOperations = [
Operation.UPDATE,
Operation.DELETE,
Operation.BULK_CREATE,
Operation.BULK_UPSERT,
]
export enum QueryType {
@ -186,7 +189,7 @@ export interface Schema {
}
// return these when an operation occurred but we got no response
enum DSPlusOperation {
export enum DSPlusOperation {
CREATE = "create",
READ = "read",
UPDATE = "update",
@ -196,6 +199,7 @@ enum DSPlusOperation {
export type DatasourcePlusQueryResponse =
| Row[]
| Record<DSPlusOperation, boolean>[]
| { total: number }[]
| void
export interface DatasourcePlus extends IntegrationBase {

View File

@ -17,6 +17,7 @@ export interface SearchParams {
fields?: string[]
indexer?: () => Promise<any>
rows?: Row[]
countRows?: boolean
}
// when searching for rows we want a more extensive search type that requires certain properties

View File

@ -85,6 +85,7 @@ export interface SortJson {
export interface PaginationJson {
limit: number
page?: string | number
offset?: number
}
export interface RenameColumn {

326
yarn.lock
View File

@ -2193,9 +2193,9 @@
"@bull-board/api" "5.10.2"
"@camunda8/sdk@^8.5.3":
version "8.6.2"
resolved "https://registry.yarnpkg.com/@camunda8/sdk/-/sdk-8.6.2.tgz#7f1ed90dfb5ad50ac22e5f984e92739c4e54f216"
integrity sha512-QdpuU3qsbJVKYDuIIYIgryl9HbnOoUqmeUcCU4YZPBhoWVkbCjnP0GD4Q3485SE3WzpbbAMoLtYCHi7hJwnAcA==
version "8.6.6"
resolved "https://registry.yarnpkg.com/@camunda8/sdk/-/sdk-8.6.6.tgz#39f894f89b485df7c2a803e590d4175fbfb6fb8f"
integrity sha512-u0A1Q0Fwh6W33i9ky2nfA6DJUKgLES8FAhp7k3L7L8ldNM5NgLRLiz1eZgUWK5CT4D78aFoSkm3VobFo+V42yQ==
dependencies:
"@grpc/grpc-js" "1.10.9"
"@grpc/proto-loader" "0.7.13"
@ -2401,230 +2401,230 @@
find-up "^5.0.0"
strip-json-comments "^3.1.1"
"@esbuild/aix-ppc64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz#a70f4ac11c6a1dfc18b8bbb13284155d933b9537"
integrity sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==
"@esbuild/aix-ppc64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f"
integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==
"@esbuild/android-arm64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz#984b4f9c8d0377443cc2dfcef266d02244593622"
integrity sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==
"@esbuild/android-arm64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz#db1c9202a5bc92ea04c7b6840f1bbe09ebf9e6b9"
integrity sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==
"@esbuild/android-arm64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052"
integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==
"@esbuild/android-arm@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.18.20.tgz#fedb265bc3a589c84cc11f810804f234947c3682"
integrity sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==
"@esbuild/android-arm@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.20.2.tgz#3b488c49aee9d491c2c8f98a909b785870d6e995"
integrity sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==
"@esbuild/android-arm@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28"
integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==
"@esbuild/android-x64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.18.20.tgz#35cf419c4cfc8babe8893d296cd990e9e9f756f2"
integrity sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==
"@esbuild/android-x64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.20.2.tgz#3b1628029e5576249d2b2d766696e50768449f98"
integrity sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==
"@esbuild/android-x64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e"
integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==
"@esbuild/darwin-arm64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz#08172cbeccf95fbc383399a7f39cfbddaeb0d7c1"
integrity sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==
"@esbuild/darwin-arm64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz#6e8517a045ddd86ae30c6608c8475ebc0c4000bb"
integrity sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==
"@esbuild/darwin-arm64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a"
integrity sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==
"@esbuild/darwin-x64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz#d70d5790d8bf475556b67d0f8b7c5bdff053d85d"
integrity sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==
"@esbuild/darwin-x64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz#90ed098e1f9dd8a9381695b207e1cff45540a0d0"
integrity sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==
"@esbuild/darwin-x64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22"
integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==
"@esbuild/freebsd-arm64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz#98755cd12707f93f210e2494d6a4b51b96977f54"
integrity sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==
"@esbuild/freebsd-arm64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz#d71502d1ee89a1130327e890364666c760a2a911"
integrity sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==
"@esbuild/freebsd-arm64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e"
integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==
"@esbuild/freebsd-x64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz#c1eb2bff03915f87c29cece4c1a7fa1f423b066e"
integrity sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==
"@esbuild/freebsd-x64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz#aa5ea58d9c1dd9af688b8b6f63ef0d3d60cea53c"
integrity sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==
"@esbuild/freebsd-x64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261"
integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==
"@esbuild/linux-arm64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz#bad4238bd8f4fc25b5a021280c770ab5fc3a02a0"
integrity sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==
"@esbuild/linux-arm64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz#055b63725df678379b0f6db9d0fa85463755b2e5"
integrity sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==
"@esbuild/linux-arm64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b"
integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==
"@esbuild/linux-arm@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz#3e617c61f33508a27150ee417543c8ab5acc73b0"
integrity sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==
"@esbuild/linux-arm@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz#76b3b98cb1f87936fbc37f073efabad49dcd889c"
integrity sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==
"@esbuild/linux-arm@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9"
integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==
"@esbuild/linux-ia32@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz#699391cccba9aee6019b7f9892eb99219f1570a7"
integrity sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==
"@esbuild/linux-ia32@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz#c0e5e787c285264e5dfc7a79f04b8b4eefdad7fa"
integrity sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==
"@esbuild/linux-ia32@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2"
integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==
"@esbuild/linux-loong64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz#e6fccb7aac178dd2ffb9860465ac89d7f23b977d"
integrity sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==
"@esbuild/linux-loong64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz#a6184e62bd7cdc63e0c0448b83801001653219c5"
integrity sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==
"@esbuild/linux-loong64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df"
integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==
"@esbuild/linux-mips64el@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz#eeff3a937de9c2310de30622a957ad1bd9183231"
integrity sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==
"@esbuild/linux-mips64el@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz#d08e39ce86f45ef8fc88549d29c62b8acf5649aa"
integrity sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==
"@esbuild/linux-mips64el@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe"
integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==
"@esbuild/linux-ppc64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz#2f7156bde20b01527993e6881435ad79ba9599fb"
integrity sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==
"@esbuild/linux-ppc64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz#8d252f0b7756ffd6d1cbde5ea67ff8fd20437f20"
integrity sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==
"@esbuild/linux-ppc64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4"
integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==
"@esbuild/linux-riscv64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz#6628389f210123d8b4743045af8caa7d4ddfc7a6"
integrity sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==
"@esbuild/linux-riscv64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz#19f6dcdb14409dae607f66ca1181dd4e9db81300"
integrity sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==
"@esbuild/linux-riscv64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc"
integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==
"@esbuild/linux-s390x@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz#255e81fb289b101026131858ab99fba63dcf0071"
integrity sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==
"@esbuild/linux-s390x@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz#3c830c90f1a5d7dd1473d5595ea4ebb920988685"
integrity sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==
"@esbuild/linux-s390x@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de"
integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==
"@esbuild/linux-x64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz#c7690b3417af318a9b6f96df3031a8865176d338"
integrity sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==
"@esbuild/linux-x64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz#86eca35203afc0d9de0694c64ec0ab0a378f6fff"
integrity sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==
"@esbuild/linux-x64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0"
integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==
"@esbuild/netbsd-x64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz#30e8cd8a3dded63975e2df2438ca109601ebe0d1"
integrity sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==
"@esbuild/netbsd-x64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz#e771c8eb0e0f6e1877ffd4220036b98aed5915e6"
integrity sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==
"@esbuild/netbsd-x64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047"
integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==
"@esbuild/openbsd-x64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz#7812af31b205055874c8082ea9cf9ab0da6217ae"
integrity sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==
"@esbuild/openbsd-x64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz#9a795ae4b4e37e674f0f4d716f3e226dd7c39baf"
integrity sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==
"@esbuild/openbsd-x64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70"
integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==
"@esbuild/sunos-x64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz#d5c275c3b4e73c9b0ecd38d1ca62c020f887ab9d"
integrity sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==
"@esbuild/sunos-x64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz#7df23b61a497b8ac189def6e25a95673caedb03f"
integrity sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==
"@esbuild/sunos-x64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b"
integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==
"@esbuild/win32-arm64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz#73bc7f5a9f8a77805f357fab97f290d0e4820ac9"
integrity sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==
"@esbuild/win32-arm64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz#f1ae5abf9ca052ae11c1bc806fb4c0f519bacf90"
integrity sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==
"@esbuild/win32-arm64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d"
integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==
"@esbuild/win32-ia32@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz#ec93cbf0ef1085cc12e71e0d661d20569ff42102"
integrity sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==
"@esbuild/win32-ia32@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz#241fe62c34d8e8461cd708277813e1d0ba55ce23"
integrity sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==
"@esbuild/win32-ia32@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b"
integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==
"@esbuild/win32-x64@0.18.20":
version "0.18.20"
resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz#786c5f41f043b07afb1af37683d7c33668858f6d"
integrity sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==
"@esbuild/win32-x64@0.20.2":
version "0.20.2"
resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz#9c907b21e30a52db959ba4f80bb01a0cc403d5cc"
integrity sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==
"@esbuild/win32-x64@0.21.5":
version "0.21.5"
resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c"
integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==
"@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0":
version "4.4.0"
@ -5941,9 +5941,9 @@
integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg==
"@types/node@>=8.1.0":
version "20.14.2"
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.14.2.tgz#a5f4d2bcb4b6a87bffcaa717718c5a0f208f4a18"
integrity sha512-xyu6WAMVwv6AKFLB+e/7ySZVr/0zLCzOa7rSpq6jNwpqOrUbcACDWC+53d4n2QHOnDou0fbIsg8wZu/sxrnI4Q==
version "20.14.5"
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.14.5.tgz#fe35e3022ebe58b8f201580eb24e1fcfc0f2487d"
integrity sha512-aoRR+fJkZT2l0aGOJhuA8frnCSoNX6W7U2mpNq63+BxBIj5BQFt8rHy627kijCmm63ijdSdwvGgpUsU6MBsZZA==
dependencies:
undici-types "~5.26.4"
@ -6881,11 +6881,18 @@ acorn-walk@^8.0.2, acorn-walk@^8.1.1:
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1"
integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==
acorn-walk@^8.2.0, acorn-walk@^8.3.2:
acorn-walk@^8.2.0:
version "8.3.2"
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.2.tgz#7703af9415f1b6db9315d6895503862e231d34aa"
integrity sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==
acorn-walk@^8.3.2:
version "8.3.3"
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.3.tgz#9caeac29eefaa0c41e3d4c65137de4d6f34df43e"
integrity sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==
dependencies:
acorn "^8.11.0"
acorn@^5.2.1, acorn@^5.7.3:
version "5.7.4"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.4.tgz#3e8d8a9947d0599a1796d10225d7432f4a4acf5e"
@ -6901,6 +6908,11 @@ acorn@^8.1.0, acorn@^8.10.0, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.2.tgz#ca0d78b51895be5390a5903c5b3bdcdaf78ae40b"
integrity sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w==
acorn@^8.11.0:
version "8.12.0"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.0.tgz#1627bfa2e058148036133b8d9b51a700663c294c"
integrity sha512-RTvkC4w+KNXrM39/lWCUaG0IbRkWdCv7W/IOW9oU6SawyxulvkQy5HQPVTKxEjczcUvapcrw3cFx/60VN/NRNw==
acorn@^8.11.3, acorn@^8.8.1:
version "8.11.3"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a"
@ -10284,7 +10296,7 @@ engine.io-parser@~5.0.3:
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.0.6.tgz#7811244af173e157295dec9b2718dfe42a64ef45"
integrity sha512-tjuoZDMAdEhVnSFleYPCtdL2GXwVTGtNjoeJd9IhIG3C1xs9uwxqRNEu5WpnDZCaozwVlK/nuQhpodhXSIMaxw==
engine.io@~6.4.1:
engine.io@~6.4.2:
version "6.4.2"
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.2.tgz#ffeaf68f69b1364b0286badddf15ff633476473f"
integrity sha512-FKn/3oMiJjrOEOeUub2WCox6JhxBXq/Zn3fZOMCBxKnNYtsdKjxhl7yR3fZhM9PV+rdE75SU5SYMc+2PGzo+Tg==
@ -10542,34 +10554,34 @@ esbuild@^0.18.10, esbuild@^0.18.17:
"@esbuild/win32-ia32" "0.18.20"
"@esbuild/win32-x64" "0.18.20"
esbuild@^0.20.1:
version "0.20.2"
resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.20.2.tgz#9d6b2386561766ee6b5a55196c6d766d28c87ea1"
integrity sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==
esbuild@^0.21.3:
version "0.21.5"
resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d"
integrity sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==
optionalDependencies:
"@esbuild/aix-ppc64" "0.20.2"
"@esbuild/android-arm" "0.20.2"
"@esbuild/android-arm64" "0.20.2"
"@esbuild/android-x64" "0.20.2"
"@esbuild/darwin-arm64" "0.20.2"
"@esbuild/darwin-x64" "0.20.2"
"@esbuild/freebsd-arm64" "0.20.2"
"@esbuild/freebsd-x64" "0.20.2"
"@esbuild/linux-arm" "0.20.2"
"@esbuild/linux-arm64" "0.20.2"
"@esbuild/linux-ia32" "0.20.2"
"@esbuild/linux-loong64" "0.20.2"
"@esbuild/linux-mips64el" "0.20.2"
"@esbuild/linux-ppc64" "0.20.2"
"@esbuild/linux-riscv64" "0.20.2"
"@esbuild/linux-s390x" "0.20.2"
"@esbuild/linux-x64" "0.20.2"
"@esbuild/netbsd-x64" "0.20.2"
"@esbuild/openbsd-x64" "0.20.2"
"@esbuild/sunos-x64" "0.20.2"
"@esbuild/win32-arm64" "0.20.2"
"@esbuild/win32-ia32" "0.20.2"
"@esbuild/win32-x64" "0.20.2"
"@esbuild/aix-ppc64" "0.21.5"
"@esbuild/android-arm" "0.21.5"
"@esbuild/android-arm64" "0.21.5"
"@esbuild/android-x64" "0.21.5"
"@esbuild/darwin-arm64" "0.21.5"
"@esbuild/darwin-x64" "0.21.5"
"@esbuild/freebsd-arm64" "0.21.5"
"@esbuild/freebsd-x64" "0.21.5"
"@esbuild/linux-arm" "0.21.5"
"@esbuild/linux-arm64" "0.21.5"
"@esbuild/linux-ia32" "0.21.5"
"@esbuild/linux-loong64" "0.21.5"
"@esbuild/linux-mips64el" "0.21.5"
"@esbuild/linux-ppc64" "0.21.5"
"@esbuild/linux-riscv64" "0.21.5"
"@esbuild/linux-s390x" "0.21.5"
"@esbuild/linux-x64" "0.21.5"
"@esbuild/netbsd-x64" "0.21.5"
"@esbuild/openbsd-x64" "0.21.5"
"@esbuild/sunos-x64" "0.21.5"
"@esbuild/win32-arm64" "0.21.5"
"@esbuild/win32-ia32" "0.21.5"
"@esbuild/win32-x64" "0.21.5"
escalade@^3.1.1:
version "3.1.1"
@ -18139,9 +18151,9 @@ posthog-js@^1.118.0:
preact "^10.19.3"
posthog-js@^1.13.4:
version "1.139.1"
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.139.1.tgz#25db31d94ce218357a2be43be4a55cfbb940f295"
integrity sha512-+JDu2S7z6sh9Q5kj0oh/W8PZJMQ1gSigWi7gbY4NwwCq2M3t0wNFjxlfHbAo1GncRWDxen+IC+3J7oJ8TJGnkA==
version "1.139.2"
resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.139.2.tgz#f8de29edf2770da47fcccb7838902d1e89d6b43d"
integrity sha512-myyuOADqZvYwgqmriwlKDEUDwLhscivFLh67UWBj4Wt9kOlmklvJb36W0ES2GAS6IdojbnGZGH5lF3heqreLWQ==
dependencies:
fflate "^0.4.8"
preact "^10.19.3"
@ -20148,17 +20160,25 @@ socket.io-parser@~4.2.1:
"@socket.io/component-emitter" "~3.1.0"
debug "~4.3.1"
socket.io@4.6.1:
version "4.6.1"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.1.tgz#62ec117e5fce0692fa50498da9347cfb52c3bc70"
integrity sha512-KMcaAi4l/8+xEjkRICl6ak8ySoxsYG+gG6/XfRCPJPQ/haCRIJBTL4wIl8YCsmtaBovcAXGLOShyVWQ/FG8GZA==
socket.io-parser@~4.2.4:
version "4.2.4"
resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.4.tgz#c806966cf7270601e47469ddeec30fbdfda44c83"
integrity sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==
dependencies:
"@socket.io/component-emitter" "~3.1.0"
debug "~4.3.1"
socket.io@4.6.2:
version "4.6.2"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.2.tgz#d597db077d4df9cbbdfaa7a9ed8ccc3d49439786"
integrity sha512-Vp+lSks5k0dewYTfwgPT9UeGGd+ht7sCpB7p0e83VgO4X/AHYWhXITMrNk/pg8syY2bpx23ptClCQuHhqi2BgQ==
dependencies:
accepts "~1.3.4"
base64id "~2.0.0"
debug "~4.3.2"
engine.io "~6.4.1"
engine.io "~6.4.2"
socket.io-adapter "~2.5.2"
socket.io-parser "~4.2.1"
socket.io-parser "~4.2.4"
socks-proxy-agent@^7.0.0:
version "7.0.0"
@ -21090,19 +21110,7 @@ tar@6.1.11:
mkdirp "^1.0.3"
yallist "^4.0.0"
tar@6.1.15:
version "6.1.15"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69"
integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==
dependencies:
chownr "^2.0.0"
fs-minipass "^2.0.0"
minipass "^5.0.0"
minizlib "^2.1.1"
mkdirp "^1.0.3"
yallist "^4.0.0"
tar@^6.1.11, tar@^6.1.2:
tar@6.2.1, tar@^6.1.11, tar@^6.1.2:
version "6.2.1"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a"
integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==
@ -22195,11 +22203,11 @@ vite@^4.5.0:
fsevents "~2.3.2"
vite@^5.0.0:
version "5.2.13"
resolved "https://registry.yarnpkg.com/vite/-/vite-5.2.13.tgz#945ababcbe3d837ae2479c29f661cd20bc5e1a80"
integrity sha512-SSq1noJfY9pR3I1TUENL3rQYDQCFqgD+lM6fTRAM8Nv6Lsg5hDLaXkjETVeBt+7vZBCMoibD+6IWnT2mJ+Zb/A==
version "5.3.1"
resolved "https://registry.yarnpkg.com/vite/-/vite-5.3.1.tgz#bb2ca6b5fd7483249d3e86b25026e27ba8a663e6"
integrity sha512-XBmSKRLXLxiaPYamLv3/hnP/KXDai1NDexN0FpkTaZXTfycHvkRHoenpgl/fvuK/kPbB6xAgoyiryAhQNxYmAQ==
dependencies:
esbuild "^0.20.1"
esbuild "^0.21.3"
postcss "^8.4.38"
rollup "^4.13.0"
optionalDependencies:
@ -22640,14 +22648,14 @@ write-stream@~0.4.3:
readable-stream "~0.0.2"
ws@^7.4.6:
version "7.5.9"
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591"
integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==
version "7.5.10"
resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9"
integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==
ws@^8.13.0:
version "8.13.0"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0"
integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==
version "8.17.1"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b"
integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==
ws@~8.11.0:
version "8.11.0"