Merge branch 'master' into demock-dynamodb
This commit is contained in:
commit
d930621bd1
|
@ -88,6 +88,16 @@ export default async function setup() {
|
||||||
content: `
|
content: `
|
||||||
[log]
|
[log]
|
||||||
level = warn
|
level = warn
|
||||||
|
|
||||||
|
[httpd]
|
||||||
|
socket_options = [{nodelay, true}]
|
||||||
|
|
||||||
|
[couchdb]
|
||||||
|
single_node = true
|
||||||
|
|
||||||
|
[cluster]
|
||||||
|
n = 1
|
||||||
|
q = 1
|
||||||
`,
|
`,
|
||||||
target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
|
target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||||
"version": "3.4.22",
|
"version": "3.4.24",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"concurrency": 20,
|
"concurrency": 20,
|
||||||
"command": {
|
"command": {
|
||||||
|
|
|
@ -3,7 +3,6 @@ import { newid } from "../utils"
|
||||||
import { Queue, QueueOptions, JobOptions } from "./queue"
|
import { Queue, QueueOptions, JobOptions } from "./queue"
|
||||||
import { helpers } from "@budibase/shared-core"
|
import { helpers } from "@budibase/shared-core"
|
||||||
import { Job, JobId, JobInformation } from "bull"
|
import { Job, JobId, JobInformation } from "bull"
|
||||||
import { cloneDeep } from "lodash"
|
|
||||||
|
|
||||||
function jobToJobInformation(job: Job): JobInformation {
|
function jobToJobInformation(job: Job): JobInformation {
|
||||||
let cron = ""
|
let cron = ""
|
||||||
|
@ -88,9 +87,7 @@ export class InMemoryQueue<T = any> implements Partial<Queue<T>> {
|
||||||
*/
|
*/
|
||||||
async process(concurrencyOrFunc: number | any, func?: any) {
|
async process(concurrencyOrFunc: number | any, func?: any) {
|
||||||
func = typeof concurrencyOrFunc === "number" ? func : concurrencyOrFunc
|
func = typeof concurrencyOrFunc === "number" ? func : concurrencyOrFunc
|
||||||
this._emitter.on("message", async msg => {
|
this._emitter.on("message", async message => {
|
||||||
const message = cloneDeep(msg)
|
|
||||||
|
|
||||||
// For the purpose of testing, don't trigger cron jobs immediately.
|
// For the purpose of testing, don't trigger cron jobs immediately.
|
||||||
// Require the test to trigger them manually with timestamps.
|
// Require the test to trigger them manually with timestamps.
|
||||||
if (!message.manualTrigger && message.opts?.repeat != null) {
|
if (!message.manualTrigger && message.opts?.repeat != null) {
|
||||||
|
@ -165,6 +162,9 @@ export class InMemoryQueue<T = any> implements Partial<Queue<T>> {
|
||||||
opts,
|
opts,
|
||||||
}
|
}
|
||||||
this._messages.push(message)
|
this._messages.push(message)
|
||||||
|
if (this._messages.length > 1000) {
|
||||||
|
this._messages.shift()
|
||||||
|
}
|
||||||
this._addCount++
|
this._addCount++
|
||||||
this._emitter.emit("message", message)
|
this._emitter.emit("message", message)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,22 +1,26 @@
|
||||||
<script>
|
<script lang="ts" context="module">
|
||||||
|
type Option = any
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script lang="ts">
|
||||||
import Picker from "./Picker.svelte"
|
import Picker from "./Picker.svelte"
|
||||||
import { createEventDispatcher } from "svelte"
|
import { createEventDispatcher } from "svelte"
|
||||||
|
|
||||||
export let value = []
|
export let value: string[] = []
|
||||||
export let id = null
|
export let id: string | undefined = undefined
|
||||||
export let placeholder = null
|
export let placeholder: string | null = null
|
||||||
export let disabled = false
|
export let disabled: boolean = false
|
||||||
export let options = []
|
export let options: Option[] = []
|
||||||
export let getOptionLabel = option => option
|
export let getOptionLabel = (option: Option, _index?: number) => option
|
||||||
export let getOptionValue = option => option
|
export let getOptionValue = (option: Option, _index?: number) => option
|
||||||
export let readonly = false
|
export let readonly: boolean = false
|
||||||
export let autocomplete = false
|
export let autocomplete: boolean = false
|
||||||
export let sort = false
|
export let sort: boolean = false
|
||||||
export let autoWidth = false
|
export let autoWidth: boolean = false
|
||||||
export let searchTerm = null
|
export let searchTerm: string | null = null
|
||||||
export let customPopoverHeight = undefined
|
export let customPopoverHeight: string | undefined = undefined
|
||||||
export let open = false
|
export let open: boolean = false
|
||||||
export let loading
|
export let loading: boolean
|
||||||
export let onOptionMouseenter = () => {}
|
export let onOptionMouseenter = () => {}
|
||||||
export let onOptionMouseleave = () => {}
|
export let onOptionMouseleave = () => {}
|
||||||
|
|
||||||
|
@ -27,10 +31,15 @@
|
||||||
$: optionLookupMap = getOptionLookupMap(options)
|
$: optionLookupMap = getOptionLookupMap(options)
|
||||||
|
|
||||||
$: fieldText = getFieldText(arrayValue, optionLookupMap, placeholder)
|
$: fieldText = getFieldText(arrayValue, optionLookupMap, placeholder)
|
||||||
$: isOptionSelected = optionValue => selectedLookupMap[optionValue] === true
|
$: isOptionSelected = (optionValue: string) =>
|
||||||
|
selectedLookupMap[optionValue] === true
|
||||||
$: toggleOption = makeToggleOption(selectedLookupMap, arrayValue)
|
$: toggleOption = makeToggleOption(selectedLookupMap, arrayValue)
|
||||||
|
|
||||||
const getFieldText = (value, map, placeholder) => {
|
const getFieldText = (
|
||||||
|
value: string[],
|
||||||
|
map: Record<string, any> | null,
|
||||||
|
placeholder: string | null
|
||||||
|
) => {
|
||||||
if (Array.isArray(value) && value.length > 0) {
|
if (Array.isArray(value) && value.length > 0) {
|
||||||
if (!map) {
|
if (!map) {
|
||||||
return ""
|
return ""
|
||||||
|
@ -42,8 +51,8 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const getSelectedLookupMap = value => {
|
const getSelectedLookupMap = (value: string[]) => {
|
||||||
let map = {}
|
const map: Record<string, boolean> = {}
|
||||||
if (Array.isArray(value) && value.length > 0) {
|
if (Array.isArray(value) && value.length > 0) {
|
||||||
value.forEach(option => {
|
value.forEach(option => {
|
||||||
if (option) {
|
if (option) {
|
||||||
|
@ -54,22 +63,23 @@
|
||||||
return map
|
return map
|
||||||
}
|
}
|
||||||
|
|
||||||
const getOptionLookupMap = options => {
|
const getOptionLookupMap = (options: Option[]) => {
|
||||||
let map = null
|
if (!options?.length) {
|
||||||
if (options?.length) {
|
return null
|
||||||
map = {}
|
|
||||||
options.forEach((option, idx) => {
|
|
||||||
const optionValue = getOptionValue(option, idx)
|
|
||||||
if (optionValue != null) {
|
|
||||||
map[optionValue] = getOptionLabel(option, idx) || ""
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const map: Record<string, any> = {}
|
||||||
|
options.forEach((option, idx) => {
|
||||||
|
const optionValue = getOptionValue(option, idx)
|
||||||
|
if (optionValue != null) {
|
||||||
|
map[optionValue] = getOptionLabel(option, idx) || ""
|
||||||
|
}
|
||||||
|
})
|
||||||
return map
|
return map
|
||||||
}
|
}
|
||||||
|
|
||||||
const makeToggleOption = (map, value) => {
|
const makeToggleOption = (map: Record<string, boolean>, value: string[]) => {
|
||||||
return optionValue => {
|
return (optionValue: string) => {
|
||||||
if (map[optionValue]) {
|
if (map[optionValue]) {
|
||||||
const filtered = value.filter(option => option !== optionValue)
|
const filtered = value.filter(option => option !== optionValue)
|
||||||
dispatch("change", filtered)
|
dispatch("change", filtered)
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
import posthog from "posthog-js"
|
import posthog from "posthog-js"
|
||||||
import { Events } from "./constants"
|
|
||||||
|
|
||||||
export default class PosthogClient {
|
export default class PosthogClient {
|
||||||
constructor(token) {
|
token: string
|
||||||
|
initialised: boolean
|
||||||
|
|
||||||
|
constructor(token: string) {
|
||||||
this.token = token
|
this.token = token
|
||||||
|
this.initialised = false
|
||||||
}
|
}
|
||||||
|
|
||||||
init() {
|
init() {
|
||||||
|
@ -12,6 +15,8 @@ export default class PosthogClient {
|
||||||
posthog.init(this.token, {
|
posthog.init(this.token, {
|
||||||
autocapture: false,
|
autocapture: false,
|
||||||
capture_pageview: false,
|
capture_pageview: false,
|
||||||
|
// disable by default
|
||||||
|
disable_session_recording: true,
|
||||||
})
|
})
|
||||||
posthog.set_config({ persistence: "cookie" })
|
posthog.set_config({ persistence: "cookie" })
|
||||||
|
|
||||||
|
@ -22,7 +27,7 @@ export default class PosthogClient {
|
||||||
* Set the posthog context to the current user
|
* Set the posthog context to the current user
|
||||||
* @param {String} id - unique user id
|
* @param {String} id - unique user id
|
||||||
*/
|
*/
|
||||||
identify(id) {
|
identify(id: string) {
|
||||||
if (!this.initialised) return
|
if (!this.initialised) return
|
||||||
|
|
||||||
posthog.identify(id)
|
posthog.identify(id)
|
||||||
|
@ -32,7 +37,7 @@ export default class PosthogClient {
|
||||||
* Update user metadata associated with current user in posthog
|
* Update user metadata associated with current user in posthog
|
||||||
* @param {Object} meta - user fields
|
* @param {Object} meta - user fields
|
||||||
*/
|
*/
|
||||||
updateUser(meta) {
|
updateUser(meta: Record<string, any>) {
|
||||||
if (!this.initialised) return
|
if (!this.initialised) return
|
||||||
|
|
||||||
posthog.people.set(meta)
|
posthog.people.set(meta)
|
||||||
|
@ -43,28 +48,22 @@ export default class PosthogClient {
|
||||||
* @param {String} event - event identifier
|
* @param {String} event - event identifier
|
||||||
* @param {Object} props - properties for the event
|
* @param {Object} props - properties for the event
|
||||||
*/
|
*/
|
||||||
captureEvent(eventName, props) {
|
captureEvent(event: string, props: Record<string, any>) {
|
||||||
if (!this.initialised) return
|
if (!this.initialised) {
|
||||||
|
return
|
||||||
props.sourceApp = "builder"
|
|
||||||
posthog.capture(eventName, props)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Submit NPS feedback to posthog.
|
|
||||||
* @param {Object} values - NPS Values
|
|
||||||
*/
|
|
||||||
npsFeedback(values) {
|
|
||||||
if (!this.initialised) return
|
|
||||||
|
|
||||||
localStorage.setItem(Events.NPS.SUBMITTED, Date.now())
|
|
||||||
|
|
||||||
const prefixedFeedback = {}
|
|
||||||
for (let key in values) {
|
|
||||||
prefixedFeedback[`feedback_${key}`] = values[key]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
posthog.capture(Events.NPS.SUBMITTED, prefixedFeedback)
|
props.sourceApp = "builder"
|
||||||
|
posthog.capture(event, props)
|
||||||
|
}
|
||||||
|
|
||||||
|
enableSessionRecording() {
|
||||||
|
if (!this.initialised) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
posthog.set_config({
|
||||||
|
disable_session_recording: false,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
|
@ -31,6 +31,10 @@ class AnalyticsHub {
|
||||||
posthog.captureEvent(eventName, props)
|
posthog.captureEvent(eventName, props)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enableSessionRecording() {
|
||||||
|
posthog.enableSessionRecording()
|
||||||
|
}
|
||||||
|
|
||||||
async logout() {
|
async logout() {
|
||||||
posthog.logout()
|
posthog.logout()
|
||||||
}
|
}
|
||||||
|
|
|
@ -145,9 +145,11 @@
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
popoverAnchor = target
|
popoverAnchor = target
|
||||||
|
|
||||||
|
const doc = new DOMParser().parseFromString(helper.description, "text/html")
|
||||||
hoverTarget = {
|
hoverTarget = {
|
||||||
type: "helper",
|
type: "helper",
|
||||||
description: helper.description,
|
description: doc.body.textContent || "",
|
||||||
code: getHelperExample(helper, mode === BindingMode.JavaScript),
|
code: getHelperExample(helper, mode === BindingMode.JavaScript),
|
||||||
}
|
}
|
||||||
popover.show()
|
popover.show()
|
||||||
|
@ -241,20 +243,19 @@
|
||||||
>
|
>
|
||||||
{#if hoverTarget.description}
|
{#if hoverTarget.description}
|
||||||
<div>
|
<div>
|
||||||
<!-- eslint-disable-next-line svelte/no-at-html-tags-->
|
{hoverTarget.description}
|
||||||
{@html hoverTarget.description}
|
|
||||||
</div>
|
</div>
|
||||||
{/if}
|
{/if}
|
||||||
{#if hoverTarget.code}
|
{#if hoverTarget.code}
|
||||||
{#if mode === BindingMode.JavaScript}
|
{#if mode === BindingMode.Text || (mode === BindingMode.JavaScript && hoverTarget.type === "binding")}
|
||||||
|
<!-- eslint-disable-next-line svelte/no-at-html-tags-->
|
||||||
|
<pre>{@html hoverTarget.code}</pre>
|
||||||
|
{:else}
|
||||||
<CodeEditor
|
<CodeEditor
|
||||||
value={hoverTarget.code?.trim()}
|
value={hoverTarget.code?.trim()}
|
||||||
mode={EditorModes.JS}
|
mode={EditorModes.JS}
|
||||||
readonly
|
readonly
|
||||||
/>
|
/>
|
||||||
{:else if mode === BindingMode.Text}
|
|
||||||
<!-- eslint-disable-next-line svelte/no-at-html-tags-->
|
|
||||||
<pre>{@html hoverTarget.code}</pre>
|
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -8,6 +8,7 @@ import {
|
||||||
SystemStatusResponse,
|
SystemStatusResponse,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { BudiStore } from "../BudiStore"
|
import { BudiStore } from "../BudiStore"
|
||||||
|
import Analytics from "../../analytics"
|
||||||
|
|
||||||
interface AdminState extends GetEnvironmentResponse {
|
interface AdminState extends GetEnvironmentResponse {
|
||||||
loaded: boolean
|
loaded: boolean
|
||||||
|
@ -33,6 +34,8 @@ export class AdminStore extends BudiStore<AdminState> {
|
||||||
await this.getEnvironment()
|
await this.getEnvironment()
|
||||||
// enable system status checks in the cloud
|
// enable system status checks in the cloud
|
||||||
if (get(this.store).cloud) {
|
if (get(this.store).cloud) {
|
||||||
|
// in cloud allow this
|
||||||
|
Analytics.enableSessionRecording()
|
||||||
await this.getSystemStatus()
|
await this.getSystemStatus()
|
||||||
this.checkStatus()
|
this.checkStatus()
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,11 +5,12 @@
|
||||||
import { memo } from "@budibase/frontend-core"
|
import { memo } from "@budibase/frontend-core"
|
||||||
import Placeholder from "../Placeholder.svelte"
|
import Placeholder from "../Placeholder.svelte"
|
||||||
import InnerForm from "./InnerForm.svelte"
|
import InnerForm from "./InnerForm.svelte"
|
||||||
|
import type { FieldApi } from "."
|
||||||
|
|
||||||
export let label: string | undefined = undefined
|
export let label: string | undefined = undefined
|
||||||
export let field: string | undefined = undefined
|
export let field: string | undefined = undefined
|
||||||
export let fieldState: any
|
export let fieldState: any
|
||||||
export let fieldApi: any
|
export let fieldApi: FieldApi
|
||||||
export let fieldSchema: any
|
export let fieldSchema: any
|
||||||
export let defaultValue: string | undefined = undefined
|
export let defaultValue: string | undefined = undefined
|
||||||
export let type: any
|
export let type: any
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
|
import { CoreSelect, CoreMultiselect } from "@budibase/bbui"
|
||||||
import { FieldType, InternalTable } from "@budibase/types"
|
import { BasicOperator, FieldType, InternalTable } from "@budibase/types"
|
||||||
import { fetchData, Utils } from "@budibase/frontend-core"
|
import { fetchData, Utils } from "@budibase/frontend-core"
|
||||||
import { getContext } from "svelte"
|
import { getContext } from "svelte"
|
||||||
import Field from "./Field.svelte"
|
import Field from "./Field.svelte"
|
||||||
|
@ -9,10 +9,11 @@
|
||||||
RelationshipFieldMetadata,
|
RelationshipFieldMetadata,
|
||||||
Row,
|
Row,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
import type { FieldApi, FieldState } from "."
|
||||||
|
|
||||||
export let field: string | undefined = undefined
|
export let field: string | undefined = undefined
|
||||||
export let label: string | undefined = undefined
|
export let label: string | undefined = undefined
|
||||||
export let placeholder: any = undefined
|
export let placeholder: string | undefined = undefined
|
||||||
export let disabled: boolean = false
|
export let disabled: boolean = false
|
||||||
export let readonly: boolean = false
|
export let readonly: boolean = false
|
||||||
export let validation: any
|
export let validation: any
|
||||||
|
@ -35,12 +36,13 @@
|
||||||
const { API } = getContext("sdk")
|
const { API } = getContext("sdk")
|
||||||
|
|
||||||
// Field state
|
// Field state
|
||||||
let fieldState: any
|
let fieldState: FieldState<string | string[]> | undefined
|
||||||
let fieldApi: any
|
|
||||||
|
let fieldApi: FieldApi
|
||||||
let fieldSchema: RelationshipFieldMetadata | undefined
|
let fieldSchema: RelationshipFieldMetadata | undefined
|
||||||
|
|
||||||
// Local UI state
|
// Local UI state
|
||||||
let searchTerm: any
|
let searchTerm: string
|
||||||
let open: boolean = false
|
let open: boolean = false
|
||||||
|
|
||||||
// Options state
|
// Options state
|
||||||
|
@ -106,17 +108,14 @@
|
||||||
filter: SearchFilter[],
|
filter: SearchFilter[],
|
||||||
linkedTableId?: string
|
linkedTableId?: string
|
||||||
) => {
|
) => {
|
||||||
if (!linkedTableId) {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
const datasource =
|
const datasource =
|
||||||
datasourceType === "table"
|
dsType === "table"
|
||||||
? {
|
? {
|
||||||
type: datasourceType,
|
type: dsType,
|
||||||
tableId: fieldSchema?.tableId!,
|
tableId: linkedTableId!,
|
||||||
}
|
}
|
||||||
: {
|
: {
|
||||||
type: datasourceType,
|
type: dsType,
|
||||||
tableId: InternalTable.USER_METADATA,
|
tableId: InternalTable.USER_METADATA,
|
||||||
}
|
}
|
||||||
return fetchData({
|
return fetchData({
|
||||||
|
@ -306,14 +305,14 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure we match all filters, rather than any
|
// Ensure we match all filters, rather than any
|
||||||
let newFilter: any = filter
|
let newFilter = filter
|
||||||
if (searchTerm) {
|
if (searchTerm) {
|
||||||
// @ts-expect-error this doesn't fit types, but don't want to change it yet
|
// @ts-expect-error this doesn't fit types, but don't want to change it yet
|
||||||
newFilter = (newFilter || []).filter(x => x.operator !== "allOr")
|
newFilter = (newFilter || []).filter(x => x.operator !== "allOr")
|
||||||
newFilter.push({
|
newFilter.push({
|
||||||
// Use a big numeric prefix to avoid clashing with an existing filter
|
// Use a big numeric prefix to avoid clashing with an existing filter
|
||||||
field: `999:${primaryDisplay}`,
|
field: `999:${primaryDisplay}`,
|
||||||
operator: "string",
|
operator: BasicOperator.STRING,
|
||||||
value: searchTerm,
|
value: searchTerm,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,3 +19,15 @@ export { default as codescanner } from "./CodeScannerField.svelte"
|
||||||
export { default as signaturesinglefield } from "./SignatureField.svelte"
|
export { default as signaturesinglefield } from "./SignatureField.svelte"
|
||||||
export { default as bbreferencefield } from "./BBReferenceField.svelte"
|
export { default as bbreferencefield } from "./BBReferenceField.svelte"
|
||||||
export { default as bbreferencesinglefield } from "./BBReferenceSingleField.svelte"
|
export { default as bbreferencesinglefield } from "./BBReferenceSingleField.svelte"
|
||||||
|
|
||||||
|
export interface FieldApi {
|
||||||
|
setValue(value: any): boolean
|
||||||
|
deregister(): void
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FieldState<T> {
|
||||||
|
value: T
|
||||||
|
fieldId: string
|
||||||
|
disabled: boolean
|
||||||
|
readonly: boolean
|
||||||
|
}
|
|
@ -290,8 +290,7 @@ describe("/automations", () => {
|
||||||
await setup.delay(500)
|
await setup.delay(500)
|
||||||
let elements = await getAllTableRows(config)
|
let elements = await getAllTableRows(config)
|
||||||
// don't test it unless there are values to test
|
// don't test it unless there are values to test
|
||||||
if (elements.length > 1) {
|
if (elements.length >= 1) {
|
||||||
expect(elements.length).toBeGreaterThanOrEqual(MAX_RETRIES)
|
|
||||||
expect(elements[0].name).toEqual("Test")
|
expect(elements[0].name).toEqual("Test")
|
||||||
expect(elements[0].description).toEqual("TEST")
|
expect(elements[0].description).toEqual("TEST")
|
||||||
return
|
return
|
||||||
|
|
|
@ -166,18 +166,6 @@ if (descriptions.length) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const resetRowUsage = async () => {
|
|
||||||
await config.doInContext(
|
|
||||||
undefined,
|
|
||||||
async () =>
|
|
||||||
await quotas.setUsage(
|
|
||||||
0,
|
|
||||||
StaticQuotaName.ROWS,
|
|
||||||
QuotaUsageType.STATIC
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const getRowUsage = async () => {
|
const getRowUsage = async () => {
|
||||||
const { total } = await config.doInContext(undefined, () =>
|
const { total } = await config.doInContext(undefined, () =>
|
||||||
quotas.getCurrentUsageValues(
|
quotas.getCurrentUsageValues(
|
||||||
|
@ -188,19 +176,27 @@ if (descriptions.length) {
|
||||||
return total
|
return total
|
||||||
}
|
}
|
||||||
|
|
||||||
const assertRowUsage = async (expected: number) => {
|
async function expectRowUsage(expected: number, f: () => Promise<void>) {
|
||||||
const usage = await getRowUsage()
|
const before = await getRowUsage()
|
||||||
|
await f()
|
||||||
|
const after = await getRowUsage()
|
||||||
|
const usage = after - before
|
||||||
|
|
||||||
// Because our quota tracking is not perfect, we allow a 10% margin of
|
// Because our quota tracking is not perfect, we allow a 10% margin of
|
||||||
// error. This is to account for the fact that parallel writes can result
|
// error. This is to account for the fact that parallel writes can
|
||||||
// in some quota updates getting lost. We don't have any need to solve this
|
// result in some quota updates getting lost. We don't have any need
|
||||||
// right now, so we just allow for some error.
|
// to solve this right now, so we just allow for some error.
|
||||||
if (expected === 0) {
|
if (expected === 0) {
|
||||||
expect(usage).toEqual(0)
|
expect(usage).toEqual(0)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
expect(usage).toBeGreaterThan(expected * 0.9)
|
if (usage < 0) {
|
||||||
expect(usage).toBeLessThan(expected * 1.1)
|
expect(usage).toBeGreaterThan(expected * 1.1)
|
||||||
|
expect(usage).toBeLessThan(expected * 0.9)
|
||||||
|
} else {
|
||||||
|
expect(usage).toBeGreaterThan(expected * 0.9)
|
||||||
|
expect(usage).toBeLessThan(expected * 1.1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const defaultRowFields = isInternal
|
const defaultRowFields = isInternal
|
||||||
|
@ -215,91 +211,86 @@ if (descriptions.length) {
|
||||||
table = await config.api.table.save(defaultTable())
|
table = await config.api.table.save(defaultTable())
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
|
||||||
await resetRowUsage()
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("create", () => {
|
describe("create", () => {
|
||||||
it("creates a new row successfully", async () => {
|
it("creates a new row successfully", async () => {
|
||||||
const rowUsage = await getRowUsage()
|
await expectRowUsage(isInternal ? 1 : 0, async () => {
|
||||||
const row = await config.api.row.save(table._id!, {
|
const row = await config.api.row.save(table._id!, {
|
||||||
name: "Test Contact",
|
name: "Test Contact",
|
||||||
|
})
|
||||||
|
expect(row.name).toEqual("Test Contact")
|
||||||
|
expect(row._rev).toBeDefined()
|
||||||
})
|
})
|
||||||
expect(row.name).toEqual("Test Contact")
|
|
||||||
expect(row._rev).toBeDefined()
|
|
||||||
await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to create a row for a table that does not exist", async () => {
|
it("fails to create a row for a table that does not exist", async () => {
|
||||||
const rowUsage = await getRowUsage()
|
await expectRowUsage(0, async () => {
|
||||||
await config.api.row.save("1234567", {}, { status: 404 })
|
await config.api.row.save("1234567", {}, { status: 404 })
|
||||||
await assertRowUsage(rowUsage)
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to create a row if required fields are missing", async () => {
|
it("fails to create a row if required fields are missing", async () => {
|
||||||
const rowUsage = await getRowUsage()
|
await expectRowUsage(0, async () => {
|
||||||
const table = await config.api.table.save(
|
const table = await config.api.table.save(
|
||||||
saveTableRequest({
|
|
||||||
schema: {
|
|
||||||
required: {
|
|
||||||
type: FieldType.STRING,
|
|
||||||
name: "required",
|
|
||||||
constraints: {
|
|
||||||
type: "string",
|
|
||||||
presence: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
)
|
|
||||||
await config.api.row.save(
|
|
||||||
table._id!,
|
|
||||||
{},
|
|
||||||
{
|
|
||||||
status: 500,
|
|
||||||
body: {
|
|
||||||
validationErrors: {
|
|
||||||
required: ["can't be blank"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
await assertRowUsage(rowUsage)
|
|
||||||
})
|
|
||||||
|
|
||||||
isInternal &&
|
|
||||||
it("increment row autoId per create row request", async () => {
|
|
||||||
const rowUsage = await getRowUsage()
|
|
||||||
|
|
||||||
const newTable = await config.api.table.save(
|
|
||||||
saveTableRequest({
|
saveTableRequest({
|
||||||
schema: {
|
schema: {
|
||||||
"Row ID": {
|
required: {
|
||||||
name: "Row ID",
|
type: FieldType.STRING,
|
||||||
type: FieldType.NUMBER,
|
name: "required",
|
||||||
subtype: AutoFieldSubType.AUTO_ID,
|
|
||||||
icon: "ri-magic-line",
|
|
||||||
autocolumn: true,
|
|
||||||
constraints: {
|
constraints: {
|
||||||
type: "number",
|
type: "string",
|
||||||
presence: true,
|
presence: true,
|
||||||
numericality: {
|
|
||||||
greaterThanOrEqualTo: "",
|
|
||||||
lessThanOrEqualTo: "",
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
await config.api.row.save(
|
||||||
|
table._id!,
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
status: 500,
|
||||||
|
body: {
|
||||||
|
validationErrors: {
|
||||||
|
required: ["can't be blank"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
let previousId = 0
|
isInternal &&
|
||||||
for (let i = 0; i < 10; i++) {
|
it("increment row autoId per create row request", async () => {
|
||||||
const row = await config.api.row.save(newTable._id!, {})
|
await expectRowUsage(isInternal ? 10 : 0, async () => {
|
||||||
expect(row["Row ID"]).toBeGreaterThan(previousId)
|
const newTable = await config.api.table.save(
|
||||||
previousId = row["Row ID"]
|
saveTableRequest({
|
||||||
}
|
schema: {
|
||||||
await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage)
|
"Row ID": {
|
||||||
|
name: "Row ID",
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
subtype: AutoFieldSubType.AUTO_ID,
|
||||||
|
icon: "ri-magic-line",
|
||||||
|
autocolumn: true,
|
||||||
|
constraints: {
|
||||||
|
type: "number",
|
||||||
|
presence: true,
|
||||||
|
numericality: {
|
||||||
|
greaterThanOrEqualTo: "",
|
||||||
|
lessThanOrEqualTo: "",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
let previousId = 0
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
const row = await config.api.row.save(newTable._id!, {})
|
||||||
|
expect(row["Row ID"]).toBeGreaterThan(previousId)
|
||||||
|
previousId = row["Row ID"]
|
||||||
|
}
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
isInternal &&
|
isInternal &&
|
||||||
|
@ -985,16 +976,16 @@ if (descriptions.length) {
|
||||||
describe("update", () => {
|
describe("update", () => {
|
||||||
it("updates an existing row successfully", async () => {
|
it("updates an existing row successfully", async () => {
|
||||||
const existing = await config.api.row.save(table._id!, {})
|
const existing = await config.api.row.save(table._id!, {})
|
||||||
const rowUsage = await getRowUsage()
|
|
||||||
|
|
||||||
const res = await config.api.row.save(table._id!, {
|
await expectRowUsage(0, async () => {
|
||||||
_id: existing._id,
|
const res = await config.api.row.save(table._id!, {
|
||||||
_rev: existing._rev,
|
_id: existing._id,
|
||||||
name: "Updated Name",
|
_rev: existing._rev,
|
||||||
|
name: "Updated Name",
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(res.name).toEqual("Updated Name")
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(res.name).toEqual("Updated Name")
|
|
||||||
await assertRowUsage(rowUsage)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
!isInternal &&
|
!isInternal &&
|
||||||
|
@ -1177,23 +1168,22 @@ if (descriptions.length) {
|
||||||
it("should update only the fields that are supplied", async () => {
|
it("should update only the fields that are supplied", async () => {
|
||||||
const existing = await config.api.row.save(table._id!, {})
|
const existing = await config.api.row.save(table._id!, {})
|
||||||
|
|
||||||
const rowUsage = await getRowUsage()
|
await expectRowUsage(0, async () => {
|
||||||
|
const row = await config.api.row.patch(table._id!, {
|
||||||
|
_id: existing._id!,
|
||||||
|
_rev: existing._rev!,
|
||||||
|
tableId: table._id!,
|
||||||
|
name: "Updated Name",
|
||||||
|
})
|
||||||
|
|
||||||
const row = await config.api.row.patch(table._id!, {
|
expect(row.name).toEqual("Updated Name")
|
||||||
_id: existing._id!,
|
expect(row.description).toEqual(existing.description)
|
||||||
_rev: existing._rev!,
|
|
||||||
tableId: table._id!,
|
const savedRow = await config.api.row.get(table._id!, row._id!)
|
||||||
name: "Updated Name",
|
|
||||||
|
expect(savedRow.description).toEqual(existing.description)
|
||||||
|
expect(savedRow.name).toEqual("Updated Name")
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(row.name).toEqual("Updated Name")
|
|
||||||
expect(row.description).toEqual(existing.description)
|
|
||||||
|
|
||||||
const savedRow = await config.api.row.get(table._id!, row._id!)
|
|
||||||
|
|
||||||
expect(savedRow.description).toEqual(existing.description)
|
|
||||||
expect(savedRow.name).toEqual("Updated Name")
|
|
||||||
await assertRowUsage(rowUsage)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should update only the fields that are supplied and emit the correct oldRow", async () => {
|
it("should update only the fields that are supplied and emit the correct oldRow", async () => {
|
||||||
|
@ -1224,20 +1214,19 @@ if (descriptions.length) {
|
||||||
|
|
||||||
it("should throw an error when given improper types", async () => {
|
it("should throw an error when given improper types", async () => {
|
||||||
const existing = await config.api.row.save(table._id!, {})
|
const existing = await config.api.row.save(table._id!, {})
|
||||||
const rowUsage = await getRowUsage()
|
|
||||||
|
|
||||||
await config.api.row.patch(
|
await expectRowUsage(0, async () => {
|
||||||
table._id!,
|
await config.api.row.patch(
|
||||||
{
|
table._id!,
|
||||||
_id: existing._id!,
|
{
|
||||||
_rev: existing._rev!,
|
_id: existing._id!,
|
||||||
tableId: table._id!,
|
_rev: existing._rev!,
|
||||||
name: 1,
|
tableId: table._id!,
|
||||||
},
|
name: 1,
|
||||||
{ status: 400 }
|
},
|
||||||
)
|
{ status: 400 }
|
||||||
|
)
|
||||||
await assertRowUsage(rowUsage)
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should not overwrite links if those links are not set", async () => {
|
it("should not overwrite links if those links are not set", async () => {
|
||||||
|
@ -1452,25 +1441,25 @@ if (descriptions.length) {
|
||||||
|
|
||||||
it("should be able to delete a row", async () => {
|
it("should be able to delete a row", async () => {
|
||||||
const createdRow = await config.api.row.save(table._id!, {})
|
const createdRow = await config.api.row.save(table._id!, {})
|
||||||
const rowUsage = await getRowUsage()
|
|
||||||
|
|
||||||
const res = await config.api.row.bulkDelete(table._id!, {
|
await expectRowUsage(isInternal ? -1 : 0, async () => {
|
||||||
rows: [createdRow],
|
const res = await config.api.row.bulkDelete(table._id!, {
|
||||||
|
rows: [createdRow],
|
||||||
|
})
|
||||||
|
expect(res[0]._id).toEqual(createdRow._id)
|
||||||
})
|
})
|
||||||
expect(res[0]._id).toEqual(createdRow._id)
|
|
||||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to delete a row with ID only", async () => {
|
it("should be able to delete a row with ID only", async () => {
|
||||||
const createdRow = await config.api.row.save(table._id!, {})
|
const createdRow = await config.api.row.save(table._id!, {})
|
||||||
const rowUsage = await getRowUsage()
|
|
||||||
|
|
||||||
const res = await config.api.row.bulkDelete(table._id!, {
|
await expectRowUsage(isInternal ? -1 : 0, async () => {
|
||||||
rows: [createdRow._id!],
|
const res = await config.api.row.bulkDelete(table._id!, {
|
||||||
|
rows: [createdRow._id!],
|
||||||
|
})
|
||||||
|
expect(res[0]._id).toEqual(createdRow._id)
|
||||||
|
expect(res[0].tableId).toEqual(table._id!)
|
||||||
})
|
})
|
||||||
expect(res[0]._id).toEqual(createdRow._id)
|
|
||||||
expect(res[0].tableId).toEqual(table._id!)
|
|
||||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to bulk delete rows, including a row that doesn't exist", async () => {
|
it("should be able to bulk delete rows, including a row that doesn't exist", async () => {
|
||||||
|
@ -1560,31 +1549,29 @@ if (descriptions.length) {
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return no errors on valid row", async () => {
|
it("should return no errors on valid row", async () => {
|
||||||
const rowUsage = await getRowUsage()
|
await expectRowUsage(0, async () => {
|
||||||
|
const res = await config.api.row.validate(table._id!, {
|
||||||
|
name: "ivan",
|
||||||
|
})
|
||||||
|
|
||||||
const res = await config.api.row.validate(table._id!, {
|
expect(res.valid).toBe(true)
|
||||||
name: "ivan",
|
expect(Object.keys(res.errors)).toEqual([])
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(res.valid).toBe(true)
|
|
||||||
expect(Object.keys(res.errors)).toEqual([])
|
|
||||||
await assertRowUsage(rowUsage)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should errors on invalid row", async () => {
|
it("should errors on invalid row", async () => {
|
||||||
const rowUsage = await getRowUsage()
|
await expectRowUsage(0, async () => {
|
||||||
|
const res = await config.api.row.validate(table._id!, { name: 1 })
|
||||||
|
|
||||||
const res = await config.api.row.validate(table._id!, { name: 1 })
|
if (isInternal) {
|
||||||
|
expect(res.valid).toBe(false)
|
||||||
if (isInternal) {
|
expect(Object.keys(res.errors)).toEqual(["name"])
|
||||||
expect(res.valid).toBe(false)
|
} else {
|
||||||
expect(Object.keys(res.errors)).toEqual(["name"])
|
// Validation for external is not implemented, so it will always return valid
|
||||||
} else {
|
expect(res.valid).toBe(true)
|
||||||
// Validation for external is not implemented, so it will always return valid
|
expect(Object.keys(res.errors)).toEqual([])
|
||||||
expect(res.valid).toBe(true)
|
}
|
||||||
expect(Object.keys(res.errors)).toEqual([])
|
})
|
||||||
}
|
|
||||||
await assertRowUsage(rowUsage)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -1596,15 +1583,15 @@ if (descriptions.length) {
|
||||||
it("should be able to delete a bulk set of rows", async () => {
|
it("should be able to delete a bulk set of rows", async () => {
|
||||||
const row1 = await config.api.row.save(table._id!, {})
|
const row1 = await config.api.row.save(table._id!, {})
|
||||||
const row2 = await config.api.row.save(table._id!, {})
|
const row2 = await config.api.row.save(table._id!, {})
|
||||||
const rowUsage = await getRowUsage()
|
|
||||||
|
|
||||||
const res = await config.api.row.bulkDelete(table._id!, {
|
await expectRowUsage(isInternal ? -2 : 0, async () => {
|
||||||
rows: [row1, row2],
|
const res = await config.api.row.bulkDelete(table._id!, {
|
||||||
|
rows: [row1, row2],
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(res.length).toEqual(2)
|
||||||
|
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(res.length).toEqual(2)
|
|
||||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
|
||||||
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to delete a variety of row set types", async () => {
|
it("should be able to delete a variety of row set types", async () => {
|
||||||
|
@ -1613,41 +1600,42 @@ if (descriptions.length) {
|
||||||
config.api.row.save(table._id!, {}),
|
config.api.row.save(table._id!, {}),
|
||||||
config.api.row.save(table._id!, {}),
|
config.api.row.save(table._id!, {}),
|
||||||
])
|
])
|
||||||
const rowUsage = await getRowUsage()
|
|
||||||
|
|
||||||
const res = await config.api.row.bulkDelete(table._id!, {
|
await expectRowUsage(isInternal ? -3 : 0, async () => {
|
||||||
rows: [row1, row2._id!, { _id: row3._id }],
|
const res = await config.api.row.bulkDelete(table._id!, {
|
||||||
|
rows: [row1, row2._id!, { _id: row3._id }],
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(res.length).toEqual(3)
|
||||||
|
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(res.length).toEqual(3)
|
|
||||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
|
||||||
await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should accept a valid row object and delete the row", async () => {
|
it("should accept a valid row object and delete the row", async () => {
|
||||||
const row1 = await config.api.row.save(table._id!, {})
|
const row1 = await config.api.row.save(table._id!, {})
|
||||||
const rowUsage = await getRowUsage()
|
|
||||||
|
|
||||||
const res = await config.api.row.delete(table._id!, row1 as DeleteRow)
|
await expectRowUsage(isInternal ? -1 : 0, async () => {
|
||||||
|
const res = await config.api.row.delete(
|
||||||
|
table._id!,
|
||||||
|
row1 as DeleteRow
|
||||||
|
)
|
||||||
|
|
||||||
expect(res.id).toEqual(row1._id)
|
expect(res.id).toEqual(row1._id)
|
||||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it.each([{ not: "valid" }, { rows: 123 }, "invalid"])(
|
it.each([{ not: "valid" }, { rows: 123 }, "invalid"])(
|
||||||
"should ignore malformed/invalid delete request: %s",
|
"should ignore malformed/invalid delete request: %s",
|
||||||
async (request: any) => {
|
async (request: any) => {
|
||||||
const rowUsage = await getRowUsage()
|
await expectRowUsage(0, async () => {
|
||||||
|
await config.api.row.delete(table._id!, request, {
|
||||||
await config.api.row.delete(table._id!, request, {
|
status: 400,
|
||||||
status: 400,
|
body: {
|
||||||
body: {
|
message: "Invalid delete rows request",
|
||||||
message: "Invalid delete rows request",
|
},
|
||||||
},
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
await assertRowUsage(rowUsage)
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -1733,31 +1721,29 @@ if (descriptions.length) {
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
const rowUsage = await getRowUsage()
|
await expectRowUsage(isInternal ? 2 : 0, async () => {
|
||||||
|
await config.api.row.bulkImport(table._id!, {
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
name: "Row 1",
|
||||||
|
description: "Row 1 description",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Row 2",
|
||||||
|
description: "Row 2 description",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
|
||||||
await config.api.row.bulkImport(table._id!, {
|
const rows = await config.api.row.fetch(table._id!)
|
||||||
rows: [
|
expect(rows.length).toEqual(2)
|
||||||
{
|
|
||||||
name: "Row 1",
|
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||||
description: "Row 1 description",
|
expect(rows[0].name).toEqual("Row 1")
|
||||||
},
|
expect(rows[0].description).toEqual("Row 1 description")
|
||||||
{
|
expect(rows[1].name).toEqual("Row 2")
|
||||||
name: "Row 2",
|
expect(rows[1].description).toEqual("Row 2 description")
|
||||||
description: "Row 2 description",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const rows = await config.api.row.fetch(table._id!)
|
|
||||||
expect(rows.length).toEqual(2)
|
|
||||||
|
|
||||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
|
||||||
expect(rows[0].name).toEqual("Row 1")
|
|
||||||
expect(rows[0].description).toEqual("Row 1 description")
|
|
||||||
expect(rows[1].name).toEqual("Row 2")
|
|
||||||
expect(rows[1].description).toEqual("Row 2 description")
|
|
||||||
|
|
||||||
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
isInternal &&
|
isInternal &&
|
||||||
|
@ -1782,35 +1768,33 @@ if (descriptions.length) {
|
||||||
description: "Existing description",
|
description: "Existing description",
|
||||||
})
|
})
|
||||||
|
|
||||||
const rowUsage = await getRowUsage()
|
await expectRowUsage(2, async () => {
|
||||||
|
await config.api.row.bulkImport(table._id!, {
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
name: "Row 1",
|
||||||
|
description: "Row 1 description",
|
||||||
|
},
|
||||||
|
{ ...existingRow, name: "Updated existing row" },
|
||||||
|
{
|
||||||
|
name: "Row 2",
|
||||||
|
description: "Row 2 description",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
identifierFields: ["_id"],
|
||||||
|
})
|
||||||
|
|
||||||
await config.api.row.bulkImport(table._id!, {
|
const rows = await config.api.row.fetch(table._id!)
|
||||||
rows: [
|
expect(rows.length).toEqual(3)
|
||||||
{
|
|
||||||
name: "Row 1",
|
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||||
description: "Row 1 description",
|
expect(rows[0].name).toEqual("Row 1")
|
||||||
},
|
expect(rows[0].description).toEqual("Row 1 description")
|
||||||
{ ...existingRow, name: "Updated existing row" },
|
expect(rows[1].name).toEqual("Row 2")
|
||||||
{
|
expect(rows[1].description).toEqual("Row 2 description")
|
||||||
name: "Row 2",
|
expect(rows[2].name).toEqual("Updated existing row")
|
||||||
description: "Row 2 description",
|
expect(rows[2].description).toEqual("Existing description")
|
||||||
},
|
|
||||||
],
|
|
||||||
identifierFields: ["_id"],
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const rows = await config.api.row.fetch(table._id!)
|
|
||||||
expect(rows.length).toEqual(3)
|
|
||||||
|
|
||||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
|
||||||
expect(rows[0].name).toEqual("Row 1")
|
|
||||||
expect(rows[0].description).toEqual("Row 1 description")
|
|
||||||
expect(rows[1].name).toEqual("Row 2")
|
|
||||||
expect(rows[1].description).toEqual("Row 2 description")
|
|
||||||
expect(rows[2].name).toEqual("Updated existing row")
|
|
||||||
expect(rows[2].description).toEqual("Existing description")
|
|
||||||
|
|
||||||
await assertRowUsage(rowUsage + 2)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
isInternal &&
|
isInternal &&
|
||||||
|
@ -1835,36 +1819,34 @@ if (descriptions.length) {
|
||||||
description: "Existing description",
|
description: "Existing description",
|
||||||
})
|
})
|
||||||
|
|
||||||
const rowUsage = await getRowUsage()
|
await expectRowUsage(3, async () => {
|
||||||
|
await config.api.row.bulkImport(table._id!, {
|
||||||
|
rows: [
|
||||||
|
{
|
||||||
|
name: "Row 1",
|
||||||
|
description: "Row 1 description",
|
||||||
|
},
|
||||||
|
{ ...existingRow, name: "Updated existing row" },
|
||||||
|
{
|
||||||
|
name: "Row 2",
|
||||||
|
description: "Row 2 description",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
|
||||||
await config.api.row.bulkImport(table._id!, {
|
const rows = await config.api.row.fetch(table._id!)
|
||||||
rows: [
|
expect(rows.length).toEqual(4)
|
||||||
{
|
|
||||||
name: "Row 1",
|
rows.sort((a, b) => a.name.localeCompare(b.name))
|
||||||
description: "Row 1 description",
|
expect(rows[0].name).toEqual("Existing row")
|
||||||
},
|
expect(rows[0].description).toEqual("Existing description")
|
||||||
{ ...existingRow, name: "Updated existing row" },
|
expect(rows[1].name).toEqual("Row 1")
|
||||||
{
|
expect(rows[1].description).toEqual("Row 1 description")
|
||||||
name: "Row 2",
|
expect(rows[2].name).toEqual("Row 2")
|
||||||
description: "Row 2 description",
|
expect(rows[2].description).toEqual("Row 2 description")
|
||||||
},
|
expect(rows[3].name).toEqual("Updated existing row")
|
||||||
],
|
expect(rows[3].description).toEqual("Existing description")
|
||||||
})
|
})
|
||||||
|
|
||||||
const rows = await config.api.row.fetch(table._id!)
|
|
||||||
expect(rows.length).toEqual(4)
|
|
||||||
|
|
||||||
rows.sort((a, b) => a.name.localeCompare(b.name))
|
|
||||||
expect(rows[0].name).toEqual("Existing row")
|
|
||||||
expect(rows[0].description).toEqual("Existing description")
|
|
||||||
expect(rows[1].name).toEqual("Row 1")
|
|
||||||
expect(rows[1].description).toEqual("Row 1 description")
|
|
||||||
expect(rows[2].name).toEqual("Row 2")
|
|
||||||
expect(rows[2].description).toEqual("Row 2 description")
|
|
||||||
expect(rows[3].name).toEqual("Updated existing row")
|
|
||||||
expect(rows[3].description).toEqual("Existing description")
|
|
||||||
|
|
||||||
await assertRowUsage(rowUsage + 3)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
// Upserting isn't yet supported in MSSQL / Oracle, see:
|
// Upserting isn't yet supported in MSSQL / Oracle, see:
|
||||||
|
@ -2187,29 +2169,29 @@ if (descriptions.length) {
|
||||||
return { linkedTable, firstRow, secondRow }
|
return { linkedTable, firstRow, secondRow }
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
const rowUsage = await getRowUsage()
|
|
||||||
|
|
||||||
// test basic enrichment
|
await expectRowUsage(0, async () => {
|
||||||
const resBasic = await config.api.row.get(
|
// test basic enrichment
|
||||||
linkedTable._id!,
|
const resBasic = await config.api.row.get(
|
||||||
secondRow._id!
|
linkedTable._id!,
|
||||||
)
|
secondRow._id!
|
||||||
expect(resBasic.link.length).toBe(1)
|
)
|
||||||
expect(resBasic.link[0]).toEqual({
|
expect(resBasic.link.length).toBe(1)
|
||||||
_id: firstRow._id,
|
expect(resBasic.link[0]).toEqual({
|
||||||
primaryDisplay: firstRow.name,
|
_id: firstRow._id,
|
||||||
|
primaryDisplay: firstRow.name,
|
||||||
|
})
|
||||||
|
|
||||||
|
// test full enrichment
|
||||||
|
const resEnriched = await config.api.row.getEnriched(
|
||||||
|
linkedTable._id!,
|
||||||
|
secondRow._id!
|
||||||
|
)
|
||||||
|
expect(resEnriched.link.length).toBe(1)
|
||||||
|
expect(resEnriched.link[0]._id).toBe(firstRow._id)
|
||||||
|
expect(resEnriched.link[0].name).toBe("Test Contact")
|
||||||
|
expect(resEnriched.link[0].description).toBe("original description")
|
||||||
})
|
})
|
||||||
|
|
||||||
// test full enrichment
|
|
||||||
const resEnriched = await config.api.row.getEnriched(
|
|
||||||
linkedTable._id!,
|
|
||||||
secondRow._id!
|
|
||||||
)
|
|
||||||
expect(resEnriched.link.length).toBe(1)
|
|
||||||
expect(resEnriched.link[0]._id).toBe(firstRow._id)
|
|
||||||
expect(resEnriched.link[0].name).toBe("Test Contact")
|
|
||||||
expect(resEnriched.link[0].description).toBe("original description")
|
|
||||||
await assertRowUsage(rowUsage)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -2826,34 +2826,44 @@ if (descriptions.length) {
|
||||||
return total
|
return total
|
||||||
}
|
}
|
||||||
|
|
||||||
const assertRowUsage = async (expected: number) => {
|
async function expectRowUsage<T>(
|
||||||
const usage = await getRowUsage()
|
expected: number,
|
||||||
|
f: () => Promise<T>
|
||||||
|
): Promise<T> {
|
||||||
|
const before = await getRowUsage()
|
||||||
|
const result = await f()
|
||||||
|
const after = await getRowUsage()
|
||||||
|
const usage = after - before
|
||||||
expect(usage).toBe(expected)
|
expect(usage).toBe(expected)
|
||||||
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
it("should be able to delete a row", async () => {
|
it("should be able to delete a row", async () => {
|
||||||
const createdRow = await config.api.row.save(table._id!, {})
|
const createdRow = await expectRowUsage(isInternal ? 1 : 0, () =>
|
||||||
const rowUsage = await getRowUsage()
|
config.api.row.save(table._id!, {})
|
||||||
await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
)
|
||||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
await expectRowUsage(isInternal ? -1 : 0, () =>
|
||||||
|
config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
||||||
|
)
|
||||||
await config.api.row.get(table._id!, createdRow._id!, {
|
await config.api.row.get(table._id!, createdRow._id!, {
|
||||||
status: 404,
|
status: 404,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to delete multiple rows", async () => {
|
it("should be able to delete multiple rows", async () => {
|
||||||
const rows = await Promise.all([
|
const rows = await expectRowUsage(isInternal ? 3 : 0, async () => {
|
||||||
config.api.row.save(table._id!, {}),
|
return [
|
||||||
config.api.row.save(table._id!, {}),
|
await config.api.row.save(table._id!, {}),
|
||||||
config.api.row.save(table._id!, {}),
|
await config.api.row.save(table._id!, {}),
|
||||||
])
|
await config.api.row.save(table._id!, {}),
|
||||||
const rowUsage = await getRowUsage()
|
]
|
||||||
|
|
||||||
await config.api.row.bulkDelete(view.id, {
|
|
||||||
rows: [rows[0], rows[2]],
|
|
||||||
})
|
})
|
||||||
|
|
||||||
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
|
await expectRowUsage(isInternal ? -2 : 0, async () => {
|
||||||
|
await config.api.row.bulkDelete(view.id, {
|
||||||
|
rows: [rows[0], rows[2]],
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
await config.api.row.get(table._id!, rows[0]._id!, {
|
await config.api.row.get(table._id!, rows[0]._id!, {
|
||||||
status: 404,
|
status: 404,
|
||||||
|
|
|
@ -195,7 +195,34 @@ describe("Attempt to run a basic loop automation", () => {
|
||||||
.serverLog({ text: "{{steps.1.iterations}}" })
|
.serverLog({ text: "{{steps.1.iterations}}" })
|
||||||
.test({ fields: {} })
|
.test({ fields: {} })
|
||||||
|
|
||||||
|
expect(results.steps[0].outputs.status).toBe(
|
||||||
|
AutomationStepStatus.MAX_ITERATIONS
|
||||||
|
)
|
||||||
expect(results.steps[0].outputs.iterations).toBe(2)
|
expect(results.steps[0].outputs.iterations).toBe(2)
|
||||||
|
expect(results.steps[0].outputs.items).toHaveLength(2)
|
||||||
|
expect(results.steps[0].outputs.items[0].message).toEndWith("test")
|
||||||
|
expect(results.steps[0].outputs.items[1].message).toEndWith("test2")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should stop when a failure condition is hit", async () => {
|
||||||
|
const results = await createAutomationBuilder(config)
|
||||||
|
.onAppAction()
|
||||||
|
.loop({
|
||||||
|
option: LoopStepType.ARRAY,
|
||||||
|
binding: ["test", "test2", "test3"],
|
||||||
|
failure: "test3",
|
||||||
|
})
|
||||||
|
.serverLog({ text: "{{loop.currentItem}}" })
|
||||||
|
.serverLog({ text: "{{steps.1.iterations}}" })
|
||||||
|
.test({ fields: {} })
|
||||||
|
|
||||||
|
expect(results.steps[0].outputs.status).toBe(
|
||||||
|
AutomationStepStatus.FAILURE_CONDITION
|
||||||
|
)
|
||||||
|
expect(results.steps[0].outputs.iterations).toBe(2)
|
||||||
|
expect(results.steps[0].outputs.items).toHaveLength(2)
|
||||||
|
expect(results.steps[0].outputs.items[0].message).toEndWith("test")
|
||||||
|
expect(results.steps[0].outputs.items[1].message).toEndWith("test2")
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should run an automation with loop and max iterations to ensure context correctness further down the tree", async () => {
|
it("should run an automation with loop and max iterations to ensure context correctness further down the tree", async () => {
|
||||||
|
|
|
@ -20,9 +20,12 @@ export interface TriggerOutput {
|
||||||
|
|
||||||
export interface AutomationContext {
|
export interface AutomationContext {
|
||||||
trigger: AutomationTriggerResultOutputs
|
trigger: AutomationTriggerResultOutputs
|
||||||
steps: [AutomationTriggerResultOutputs, ...AutomationStepResultOutputs[]]
|
steps: Record<
|
||||||
stepsById: Record<string, AutomationStepResultOutputs>
|
string,
|
||||||
|
AutomationStepResultOutputs | AutomationTriggerResultOutputs
|
||||||
|
>
|
||||||
stepsByName: Record<string, AutomationStepResultOutputs>
|
stepsByName: Record<string, AutomationStepResultOutputs>
|
||||||
|
stepsById: Record<string, AutomationStepResultOutputs>
|
||||||
env?: Record<string, string>
|
env?: Record<string, string>
|
||||||
user?: UserBindings
|
user?: UserBindings
|
||||||
settings?: {
|
settings?: {
|
||||||
|
@ -31,4 +34,6 @@ export interface AutomationContext {
|
||||||
company?: string
|
company?: string
|
||||||
}
|
}
|
||||||
loop?: { currentItem: any }
|
loop?: { currentItem: any }
|
||||||
|
_stepIndex: number
|
||||||
|
_error: boolean
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,6 +32,8 @@ class AutomationEmitter implements ContextEmitter {
|
||||||
|
|
||||||
if (chainAutomations === true) {
|
if (chainAutomations === true) {
|
||||||
return MAX_AUTOMATIONS_ALLOWED
|
return MAX_AUTOMATIONS_ALLOWED
|
||||||
|
} else if (env.isTest()) {
|
||||||
|
return 0
|
||||||
} else if (chainAutomations === undefined && env.SELF_HOSTED) {
|
} else if (chainAutomations === undefined && env.SELF_HOSTED) {
|
||||||
return MAX_AUTOMATIONS_ALLOWED
|
return MAX_AUTOMATIONS_ALLOWED
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -23,6 +23,6 @@ nock.enableNetConnect(host => {
|
||||||
|
|
||||||
testContainerUtils.setupEnv(env, coreEnv)
|
testContainerUtils.setupEnv(env, coreEnv)
|
||||||
|
|
||||||
afterAll(() => {
|
afterAll(async () => {
|
||||||
timers.cleanup()
|
timers.cleanup()
|
||||||
})
|
})
|
||||||
|
|
|
@ -146,8 +146,9 @@ export abstract class TestAPI {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let resp: Response | undefined = undefined
|
||||||
try {
|
try {
|
||||||
return await req
|
resp = await req
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
// We've found that occasionally the connection between supertest and the
|
// We've found that occasionally the connection between supertest and the
|
||||||
// server supertest starts gets reset. Not sure why, but retrying it
|
// server supertest starts gets reset. Not sure why, but retrying it
|
||||||
|
@ -161,6 +162,7 @@ export abstract class TestAPI {
|
||||||
}
|
}
|
||||||
throw e
|
throw e
|
||||||
}
|
}
|
||||||
|
return resp
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async getHeaders(
|
protected async getHeaders(
|
||||||
|
|
|
@ -143,7 +143,6 @@ async function branchMatches(
|
||||||
branch: Readonly<Branch>
|
branch: Readonly<Branch>
|
||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
const toFilter: Record<string, any> = {}
|
const toFilter: Record<string, any> = {}
|
||||||
const preparedCtx = prepareContext(ctx)
|
|
||||||
|
|
||||||
// Because we allow bindings on both the left and right of each condition in
|
// Because we allow bindings on both the left and right of each condition in
|
||||||
// automation branches, we can't pass the BranchSearchFilters directly to
|
// automation branches, we can't pass the BranchSearchFilters directly to
|
||||||
|
@ -160,9 +159,9 @@ async function branchMatches(
|
||||||
filter.conditions = filter.conditions.map(evaluateBindings)
|
filter.conditions = filter.conditions.map(evaluateBindings)
|
||||||
} else {
|
} else {
|
||||||
for (const [field, value] of Object.entries(filter)) {
|
for (const [field, value] of Object.entries(filter)) {
|
||||||
toFilter[field] = processStringSync(field, preparedCtx)
|
toFilter[field] = processStringSync(field, ctx)
|
||||||
if (typeof value === "string" && findHBSBlocks(value).length > 0) {
|
if (typeof value === "string" && findHBSBlocks(value).length > 0) {
|
||||||
filter[field] = processStringSync(value, preparedCtx)
|
filter[field] = processStringSync(value, ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -178,17 +177,6 @@ async function branchMatches(
|
||||||
return result.length > 0
|
return result.length > 0
|
||||||
}
|
}
|
||||||
|
|
||||||
function prepareContext(context: AutomationContext) {
|
|
||||||
return {
|
|
||||||
...context,
|
|
||||||
steps: {
|
|
||||||
...context.steps,
|
|
||||||
...context.stepsById,
|
|
||||||
...context.stepsByName,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function enrichBaseContext(context: AutomationContext) {
|
async function enrichBaseContext(context: AutomationContext) {
|
||||||
context.env = await sdkUtils.getEnvironmentVariables()
|
context.env = await sdkUtils.getEnvironmentVariables()
|
||||||
|
|
||||||
|
@ -304,41 +292,37 @@ class Orchestrator {
|
||||||
}
|
}
|
||||||
|
|
||||||
hasErrored(context: AutomationContext): boolean {
|
hasErrored(context: AutomationContext): boolean {
|
||||||
const [_trigger, ...steps] = context.steps
|
return context._error === true
|
||||||
for (const step of steps) {
|
|
||||||
if (step.success === false) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(): Promise<AutomationResults> {
|
async execute(): Promise<AutomationResults> {
|
||||||
return await tracer.trace("execute", async span => {
|
return await tracer.trace("execute", async span => {
|
||||||
span.addTags({ appId: this.appId, automationId: this.automation._id })
|
span.addTags({ appId: this.appId, automationId: this.automation._id })
|
||||||
|
|
||||||
const job = cloneDeep(this.job)
|
const data = cloneDeep(this.job.data)
|
||||||
delete job.data.event.appId
|
delete data.event.appId
|
||||||
delete job.data.event.metadata
|
delete data.event.metadata
|
||||||
|
|
||||||
if (this.isCron() && !job.data.event.timestamp) {
|
if (this.isCron() && !data.event.timestamp) {
|
||||||
job.data.event.timestamp = Date.now()
|
data.event.timestamp = Date.now()
|
||||||
}
|
}
|
||||||
|
|
||||||
const trigger: AutomationTriggerResult = {
|
const trigger: AutomationTriggerResult = {
|
||||||
id: job.data.automation.definition.trigger.id,
|
id: data.automation.definition.trigger.id,
|
||||||
stepId: job.data.automation.definition.trigger.stepId,
|
stepId: data.automation.definition.trigger.stepId,
|
||||||
inputs: null,
|
inputs: null,
|
||||||
outputs: job.data.event,
|
outputs: data.event,
|
||||||
}
|
}
|
||||||
const result: AutomationResults = { trigger, steps: [trigger] }
|
const result: AutomationResults = { trigger, steps: [trigger] }
|
||||||
|
|
||||||
const ctx: AutomationContext = {
|
const ctx: AutomationContext = {
|
||||||
trigger: trigger.outputs,
|
trigger: trigger.outputs,
|
||||||
steps: [trigger.outputs],
|
steps: { "0": trigger.outputs },
|
||||||
stepsById: {},
|
|
||||||
stepsByName: {},
|
stepsByName: {},
|
||||||
|
stepsById: {},
|
||||||
user: trigger.outputs.user,
|
user: trigger.outputs.user,
|
||||||
|
_error: false,
|
||||||
|
_stepIndex: 1,
|
||||||
}
|
}
|
||||||
await enrichBaseContext(ctx)
|
await enrichBaseContext(ctx)
|
||||||
|
|
||||||
|
@ -348,7 +332,7 @@ class Orchestrator {
|
||||||
try {
|
try {
|
||||||
await helpers.withTimeout(timeout, async () => {
|
await helpers.withTimeout(timeout, async () => {
|
||||||
const [stepOutputs, executionTime] = await utils.time(() =>
|
const [stepOutputs, executionTime] = await utils.time(() =>
|
||||||
this.executeSteps(ctx, job.data.automation.definition.steps)
|
this.executeSteps(ctx, data.automation.definition.steps)
|
||||||
)
|
)
|
||||||
|
|
||||||
result.steps.push(...stepOutputs)
|
result.steps.push(...stepOutputs)
|
||||||
|
@ -400,9 +384,20 @@ class Orchestrator {
|
||||||
step: AutomationStep,
|
step: AutomationStep,
|
||||||
result: AutomationStepResult
|
result: AutomationStepResult
|
||||||
) {
|
) {
|
||||||
ctx.steps.push(result.outputs)
|
ctx.steps[step.id] = result.outputs
|
||||||
|
ctx.steps[step.name || step.id] = result.outputs
|
||||||
|
|
||||||
ctx.stepsById[step.id] = result.outputs
|
ctx.stepsById[step.id] = result.outputs
|
||||||
ctx.stepsByName[step.name || step.id] = result.outputs
|
ctx.stepsByName[step.name || step.id] = result.outputs
|
||||||
|
|
||||||
|
ctx._stepIndex ||= 0
|
||||||
|
ctx.steps[ctx._stepIndex] = result.outputs
|
||||||
|
ctx._stepIndex++
|
||||||
|
|
||||||
|
if (result.outputs.success === false) {
|
||||||
|
ctx._error = true
|
||||||
|
}
|
||||||
|
|
||||||
results.push(result)
|
results.push(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -449,7 +444,7 @@ class Orchestrator {
|
||||||
stepToLoop: AutomationStep
|
stepToLoop: AutomationStep
|
||||||
): Promise<AutomationStepResult> {
|
): Promise<AutomationStepResult> {
|
||||||
return await tracer.trace("executeLoopStep", async span => {
|
return await tracer.trace("executeLoopStep", async span => {
|
||||||
await processObject(step.inputs, prepareContext(ctx))
|
await processObject(step.inputs, ctx)
|
||||||
|
|
||||||
const maxIterations = getLoopMaxIterations(step)
|
const maxIterations = getLoopMaxIterations(step)
|
||||||
const items: Record<string, any>[] = []
|
const items: Record<string, any>[] = []
|
||||||
|
@ -478,6 +473,7 @@ class Orchestrator {
|
||||||
return stepFailure(stepToLoop, {
|
return stepFailure(stepToLoop, {
|
||||||
status: AutomationStepStatus.MAX_ITERATIONS,
|
status: AutomationStepStatus.MAX_ITERATIONS,
|
||||||
iterations,
|
iterations,
|
||||||
|
items,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -488,6 +484,8 @@ class Orchestrator {
|
||||||
})
|
})
|
||||||
return stepFailure(stepToLoop, {
|
return stepFailure(stepToLoop, {
|
||||||
status: AutomationStepStatus.FAILURE_CONDITION,
|
status: AutomationStepStatus.FAILURE_CONDITION,
|
||||||
|
iterations,
|
||||||
|
items,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -558,7 +556,7 @@ class Orchestrator {
|
||||||
}
|
}
|
||||||
|
|
||||||
const inputs = automationUtils.cleanInputValues(
|
const inputs = automationUtils.cleanInputValues(
|
||||||
await processObject(cloneDeep(step.inputs), prepareContext(ctx)),
|
await processObject(cloneDeep(step.inputs), ctx),
|
||||||
step.schema.inputs.properties
|
step.schema.inputs.properties
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -566,7 +564,7 @@ class Orchestrator {
|
||||||
inputs,
|
inputs,
|
||||||
appId: this.appId,
|
appId: this.appId,
|
||||||
emitter: this.emitter,
|
emitter: this.emitter,
|
||||||
context: prepareContext(ctx),
|
context: ctx,
|
||||||
})
|
})
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
|
|
@ -117,7 +117,8 @@ export function isSupportedUserSearch(
|
||||||
{ op: BasicOperator.EQUAL, key: "_id" },
|
{ op: BasicOperator.EQUAL, key: "_id" },
|
||||||
{ op: ArrayOperator.ONE_OF, key: "_id" },
|
{ op: ArrayOperator.ONE_OF, key: "_id" },
|
||||||
]
|
]
|
||||||
for (const [key, operation] of Object.entries(query)) {
|
const { allOr, onEmptyFilter, ...filters } = query
|
||||||
|
for (const [key, operation] of Object.entries(filters)) {
|
||||||
if (typeof operation !== "object") {
|
if (typeof operation !== "object") {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,6 +36,7 @@ export const HelperFunctionNames = {
|
||||||
ALL: "all",
|
ALL: "all",
|
||||||
LITERAL: "literal",
|
LITERAL: "literal",
|
||||||
JS: "js",
|
JS: "js",
|
||||||
|
DECODE_ID: "decodeId",
|
||||||
}
|
}
|
||||||
|
|
||||||
export const LITERAL_MARKER = "%LITERAL%"
|
export const LITERAL_MARKER = "%LITERAL%"
|
||||||
|
|
|
@ -25,13 +25,29 @@ function isObject(value: string | any[]) {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const HELPERS = [
|
export const HELPERS = [
|
||||||
// external helpers
|
// external helpers
|
||||||
new Helper(HelperFunctionNames.OBJECT, (value: any) => {
|
new Helper(HelperFunctionNames.OBJECT, (value: any) => {
|
||||||
return new Handlebars.SafeString(JSON.stringify(value))
|
return new Handlebars.SafeString(JSON.stringify(value))
|
||||||
}),
|
}),
|
||||||
// javascript helper
|
// javascript helper
|
||||||
new Helper(HelperFunctionNames.JS, processJS, false),
|
new Helper(HelperFunctionNames.JS, processJS, false),
|
||||||
|
new Helper(HelperFunctionNames.DECODE_ID, (_id: string | { _id: string }) => {
|
||||||
|
if (!_id) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
// have to replace on the way back as we swapped out the double quotes
|
||||||
|
// when encoding, but JSON can't handle the single quotes
|
||||||
|
const id = typeof _id === "string" ? _id : _id._id
|
||||||
|
const decoded: string = decodeURIComponent(id).replace(/'/g, '"')
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(decoded)
|
||||||
|
return Array.isArray(parsed) ? parsed : [parsed]
|
||||||
|
} catch (err) {
|
||||||
|
// wasn't json - likely was handlebars for a many to many
|
||||||
|
return [_id]
|
||||||
|
}
|
||||||
|
}),
|
||||||
// this help is applied to all statements
|
// this help is applied to all statements
|
||||||
new Helper(
|
new Helper(
|
||||||
HelperFunctionNames.ALL,
|
HelperFunctionNames.ALL,
|
||||||
|
|
|
@ -517,3 +517,44 @@ describe("helper overlap", () => {
|
||||||
expect(output).toEqual("a")
|
expect(output).toEqual("a")
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe("Test the decodeId helper", () => {
|
||||||
|
it("should decode a valid encoded ID", async () => {
|
||||||
|
const encodedId = encodeURIComponent("[42]") // "%5B42%5D"
|
||||||
|
const output = await processString("{{ decodeId id }}", { id: encodedId })
|
||||||
|
expect(output).toBe("42")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("Should return an unchanged string if the string isn't encoded", async () => {
|
||||||
|
const unencodedId = "forty-two"
|
||||||
|
const output = await processString("{{ decodeId id }}", { id: unencodedId })
|
||||||
|
expect(output).toBe("forty-two")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("Should return a string of comma-separated IDs when passed multiple IDs in a URI encoded array", async () => {
|
||||||
|
const encodedIds = encodeURIComponent("[1,2,3]") // "%5B1%2C2%2C3%5D"
|
||||||
|
const output = await processString("{{ decodeId id }}", { id: encodedIds })
|
||||||
|
expect(output).toBe("1,2,3")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("Handles empty array gracefully", async () => {
|
||||||
|
const output = await processString("{{ decodeId value }}", {
|
||||||
|
value: [],
|
||||||
|
})
|
||||||
|
expect(output).toBe("[[]]")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("Handles undefined gracefully", async () => {
|
||||||
|
const output = await processString("{{ decodeId value }}", {
|
||||||
|
value: undefined,
|
||||||
|
})
|
||||||
|
expect(output).toBe("")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("Handles null gracefully", async () => {
|
||||||
|
const output = await processString("{{ decodeId value }}", {
|
||||||
|
value: undefined,
|
||||||
|
})
|
||||||
|
expect(output).toBe("")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
Loading…
Reference in New Issue