Merge branch 'master' into googlesheets-mock-fix
This commit is contained in:
commit
78aab3f12d
|
@ -165,6 +165,7 @@ jobs:
|
|||
oracle,
|
||||
sqs,
|
||||
elasticsearch,
|
||||
dynamodb,
|
||||
none,
|
||||
]
|
||||
steps:
|
||||
|
@ -205,6 +206,8 @@ jobs:
|
|||
docker pull postgres:9.5.25
|
||||
elif [ "${{ matrix.datasource }}" == "elasticsearch" ]; then
|
||||
docker pull elasticsearch@${{ steps.dotenv.outputs.ELASTICSEARCH_SHA }}
|
||||
elif [ "${{ matrix.datasource }}" == "dynamodb" ]; then
|
||||
docker pull amazon/dynamodb-local@${{ steps.dotenv.outputs.DYNAMODB_SHA }}
|
||||
fi
|
||||
docker pull minio/minio &
|
||||
docker pull redis &
|
||||
|
|
|
@ -88,6 +88,16 @@ export default async function setup() {
|
|||
content: `
|
||||
[log]
|
||||
level = warn
|
||||
|
||||
[httpd]
|
||||
socket_options = [{nodelay, true}]
|
||||
|
||||
[couchdb]
|
||||
single_node = true
|
||||
|
||||
[cluster]
|
||||
n = 1
|
||||
q = 1
|
||||
`,
|
||||
target: "/opt/couchdb/etc/local.d/test-couchdb.ini",
|
||||
},
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "3.4.22",
|
||||
"version": "3.4.24",
|
||||
"npmClient": "yarn",
|
||||
"concurrency": 20,
|
||||
"command": {
|
||||
|
|
|
@ -3,7 +3,6 @@ import { newid } from "../utils"
|
|||
import { Queue, QueueOptions, JobOptions } from "./queue"
|
||||
import { helpers } from "@budibase/shared-core"
|
||||
import { Job, JobId, JobInformation } from "bull"
|
||||
import { cloneDeep } from "lodash"
|
||||
|
||||
function jobToJobInformation(job: Job): JobInformation {
|
||||
let cron = ""
|
||||
|
@ -88,9 +87,7 @@ export class InMemoryQueue<T = any> implements Partial<Queue<T>> {
|
|||
*/
|
||||
async process(concurrencyOrFunc: number | any, func?: any) {
|
||||
func = typeof concurrencyOrFunc === "number" ? func : concurrencyOrFunc
|
||||
this._emitter.on("message", async msg => {
|
||||
const message = cloneDeep(msg)
|
||||
|
||||
this._emitter.on("message", async message => {
|
||||
// For the purpose of testing, don't trigger cron jobs immediately.
|
||||
// Require the test to trigger them manually with timestamps.
|
||||
if (!message.manualTrigger && message.opts?.repeat != null) {
|
||||
|
@ -165,6 +162,9 @@ export class InMemoryQueue<T = any> implements Partial<Queue<T>> {
|
|||
opts,
|
||||
}
|
||||
this._messages.push(message)
|
||||
if (this._messages.length > 1000) {
|
||||
this._messages.shift()
|
||||
}
|
||||
this._addCount++
|
||||
this._emitter.emit("message", message)
|
||||
}
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
import posthog from "posthog-js"
|
||||
import { Events } from "./constants"
|
||||
|
||||
export default class PosthogClient {
|
||||
constructor(token) {
|
||||
token: string
|
||||
initialised: boolean
|
||||
|
||||
constructor(token: string) {
|
||||
this.token = token
|
||||
this.initialised = false
|
||||
}
|
||||
|
||||
init() {
|
||||
|
@ -12,6 +15,8 @@ export default class PosthogClient {
|
|||
posthog.init(this.token, {
|
||||
autocapture: false,
|
||||
capture_pageview: false,
|
||||
// disable by default
|
||||
disable_session_recording: true,
|
||||
})
|
||||
posthog.set_config({ persistence: "cookie" })
|
||||
|
||||
|
@ -22,7 +27,7 @@ export default class PosthogClient {
|
|||
* Set the posthog context to the current user
|
||||
* @param {String} id - unique user id
|
||||
*/
|
||||
identify(id) {
|
||||
identify(id: string) {
|
||||
if (!this.initialised) return
|
||||
|
||||
posthog.identify(id)
|
||||
|
@ -32,7 +37,7 @@ export default class PosthogClient {
|
|||
* Update user metadata associated with current user in posthog
|
||||
* @param {Object} meta - user fields
|
||||
*/
|
||||
updateUser(meta) {
|
||||
updateUser(meta: Record<string, any>) {
|
||||
if (!this.initialised) return
|
||||
|
||||
posthog.people.set(meta)
|
||||
|
@ -43,28 +48,22 @@ export default class PosthogClient {
|
|||
* @param {String} event - event identifier
|
||||
* @param {Object} props - properties for the event
|
||||
*/
|
||||
captureEvent(eventName, props) {
|
||||
if (!this.initialised) return
|
||||
captureEvent(event: string, props: Record<string, any>) {
|
||||
if (!this.initialised) {
|
||||
return
|
||||
}
|
||||
|
||||
props.sourceApp = "builder"
|
||||
posthog.capture(eventName, props)
|
||||
posthog.capture(event, props)
|
||||
}
|
||||
|
||||
/**
|
||||
* Submit NPS feedback to posthog.
|
||||
* @param {Object} values - NPS Values
|
||||
*/
|
||||
npsFeedback(values) {
|
||||
if (!this.initialised) return
|
||||
|
||||
localStorage.setItem(Events.NPS.SUBMITTED, Date.now())
|
||||
|
||||
const prefixedFeedback = {}
|
||||
for (let key in values) {
|
||||
prefixedFeedback[`feedback_${key}`] = values[key]
|
||||
enableSessionRecording() {
|
||||
if (!this.initialised) {
|
||||
return
|
||||
}
|
||||
|
||||
posthog.capture(Events.NPS.SUBMITTED, prefixedFeedback)
|
||||
posthog.set_config({
|
||||
disable_session_recording: false,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
|
@ -31,6 +31,10 @@ class AnalyticsHub {
|
|||
posthog.captureEvent(eventName, props)
|
||||
}
|
||||
|
||||
enableSessionRecording() {
|
||||
posthog.enableSessionRecording()
|
||||
}
|
||||
|
||||
async logout() {
|
||||
posthog.logout()
|
||||
}
|
||||
|
|
|
@ -145,9 +145,11 @@
|
|||
return
|
||||
}
|
||||
popoverAnchor = target
|
||||
|
||||
const doc = new DOMParser().parseFromString(helper.description, "text/html")
|
||||
hoverTarget = {
|
||||
type: "helper",
|
||||
description: helper.description,
|
||||
description: doc.body.textContent || "",
|
||||
code: getHelperExample(helper, mode === BindingMode.JavaScript),
|
||||
}
|
||||
popover.show()
|
||||
|
@ -241,20 +243,19 @@
|
|||
>
|
||||
{#if hoverTarget.description}
|
||||
<div>
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags-->
|
||||
{@html hoverTarget.description}
|
||||
{hoverTarget.description}
|
||||
</div>
|
||||
{/if}
|
||||
{#if hoverTarget.code}
|
||||
{#if mode === BindingMode.JavaScript}
|
||||
{#if mode === BindingMode.Text || (mode === BindingMode.JavaScript && hoverTarget.type === "binding")}
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags-->
|
||||
<pre>{@html hoverTarget.code}</pre>
|
||||
{:else}
|
||||
<CodeEditor
|
||||
value={hoverTarget.code?.trim()}
|
||||
mode={EditorModes.JS}
|
||||
readonly
|
||||
/>
|
||||
{:else if mode === BindingMode.Text}
|
||||
<!-- eslint-disable-next-line svelte/no-at-html-tags-->
|
||||
<pre>{@html hoverTarget.code}</pre>
|
||||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
|
|
|
@ -8,6 +8,7 @@ import {
|
|||
SystemStatusResponse,
|
||||
} from "@budibase/types"
|
||||
import { BudiStore } from "../BudiStore"
|
||||
import Analytics from "../../analytics"
|
||||
|
||||
interface AdminState extends GetEnvironmentResponse {
|
||||
loaded: boolean
|
||||
|
@ -33,6 +34,8 @@ export class AdminStore extends BudiStore<AdminState> {
|
|||
await this.getEnvironment()
|
||||
// enable system status checks in the cloud
|
||||
if (get(this.store).cloud) {
|
||||
// in cloud allow this
|
||||
Analytics.enableSessionRecording()
|
||||
await this.getSystemStatus()
|
||||
this.checkStatus()
|
||||
}
|
||||
|
|
|
@ -108,17 +108,14 @@
|
|||
filter: SearchFilter[],
|
||||
linkedTableId?: string
|
||||
) => {
|
||||
if (!linkedTableId) {
|
||||
return undefined
|
||||
}
|
||||
const datasource =
|
||||
datasourceType === "table"
|
||||
dsType === "table"
|
||||
? {
|
||||
type: datasourceType,
|
||||
tableId: fieldSchema?.tableId!,
|
||||
type: dsType,
|
||||
tableId: linkedTableId!,
|
||||
}
|
||||
: {
|
||||
type: datasourceType,
|
||||
type: dsType,
|
||||
tableId: InternalTable.USER_METADATA,
|
||||
}
|
||||
return fetchData({
|
||||
|
|
|
@ -4,3 +4,4 @@ POSTGRES_SHA=sha256:bd0d8e485d1aca439d39e5ea99b931160bd28d862e74c786f7508e9d0053
|
|||
MONGODB_SHA=sha256:afa36bca12295b5f9dae68a493c706113922bdab520e901bd5d6c9d7247a1d8d
|
||||
MARIADB_SHA=sha256:e59ba8783bf7bc02a4779f103bb0d8751ac0e10f9471089709608377eded7aa8
|
||||
ELASTICSEARCH_SHA=sha256:9a6443f55243f6acbfeb4a112d15eb3b9aac74bf25e0e39fa19b3ddd3a6879d0
|
||||
DYNAMODB_SHA=sha256:cf8cebd061f988628c02daff10fdb950a54478feff9c52f6ddf84710fe3c3906
|
|
@ -290,8 +290,7 @@ describe("/automations", () => {
|
|||
await setup.delay(500)
|
||||
let elements = await getAllTableRows(config)
|
||||
// don't test it unless there are values to test
|
||||
if (elements.length > 1) {
|
||||
expect(elements.length).toBeGreaterThanOrEqual(MAX_RETRIES)
|
||||
if (elements.length >= 1) {
|
||||
expect(elements[0].name).toEqual("Test")
|
||||
expect(elements[0].description).toEqual("TEST")
|
||||
return
|
||||
|
|
|
@ -166,18 +166,6 @@ if (descriptions.length) {
|
|||
)
|
||||
}
|
||||
|
||||
const resetRowUsage = async () => {
|
||||
await config.doInContext(
|
||||
undefined,
|
||||
async () =>
|
||||
await quotas.setUsage(
|
||||
0,
|
||||
StaticQuotaName.ROWS,
|
||||
QuotaUsageType.STATIC
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
const getRowUsage = async () => {
|
||||
const { total } = await config.doInContext(undefined, () =>
|
||||
quotas.getCurrentUsageValues(
|
||||
|
@ -188,20 +176,28 @@ if (descriptions.length) {
|
|||
return total
|
||||
}
|
||||
|
||||
const assertRowUsage = async (expected: number) => {
|
||||
const usage = await getRowUsage()
|
||||
async function expectRowUsage(expected: number, f: () => Promise<void>) {
|
||||
const before = await getRowUsage()
|
||||
await f()
|
||||
const after = await getRowUsage()
|
||||
const usage = after - before
|
||||
|
||||
// Because our quota tracking is not perfect, we allow a 10% margin of
|
||||
// error. This is to account for the fact that parallel writes can result
|
||||
// in some quota updates getting lost. We don't have any need to solve this
|
||||
// right now, so we just allow for some error.
|
||||
// error. This is to account for the fact that parallel writes can
|
||||
// result in some quota updates getting lost. We don't have any need
|
||||
// to solve this right now, so we just allow for some error.
|
||||
if (expected === 0) {
|
||||
expect(usage).toEqual(0)
|
||||
return
|
||||
}
|
||||
if (usage < 0) {
|
||||
expect(usage).toBeGreaterThan(expected * 1.1)
|
||||
expect(usage).toBeLessThan(expected * 0.9)
|
||||
} else {
|
||||
expect(usage).toBeGreaterThan(expected * 0.9)
|
||||
expect(usage).toBeLessThan(expected * 1.1)
|
||||
}
|
||||
}
|
||||
|
||||
const defaultRowFields = isInternal
|
||||
? {
|
||||
|
@ -215,29 +211,25 @@ if (descriptions.length) {
|
|||
table = await config.api.table.save(defaultTable())
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await resetRowUsage()
|
||||
})
|
||||
|
||||
describe("create", () => {
|
||||
it("creates a new row successfully", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
await expectRowUsage(isInternal ? 1 : 0, async () => {
|
||||
const row = await config.api.row.save(table._id!, {
|
||||
name: "Test Contact",
|
||||
})
|
||||
expect(row.name).toEqual("Test Contact")
|
||||
expect(row._rev).toBeDefined()
|
||||
await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
it("fails to create a row for a table that does not exist", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
await expectRowUsage(0, async () => {
|
||||
await config.api.row.save("1234567", {}, { status: 404 })
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
it("fails to create a row if required fields are missing", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
await expectRowUsage(0, async () => {
|
||||
const table = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
|
@ -264,13 +256,12 @@ if (descriptions.length) {
|
|||
},
|
||||
}
|
||||
)
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
it("increment row autoId per create row request", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(isInternal ? 10 : 0, async () => {
|
||||
const newTable = await config.api.table.save(
|
||||
saveTableRequest({
|
||||
schema: {
|
||||
|
@ -299,7 +290,7 @@ if (descriptions.length) {
|
|||
expect(row["Row ID"]).toBeGreaterThan(previousId)
|
||||
previousId = row["Row ID"]
|
||||
}
|
||||
await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
|
@ -985,8 +976,8 @@ if (descriptions.length) {
|
|||
describe("update", () => {
|
||||
it("updates an existing row successfully", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(0, async () => {
|
||||
const res = await config.api.row.save(table._id!, {
|
||||
_id: existing._id,
|
||||
_rev: existing._rev,
|
||||
|
@ -994,7 +985,7 @@ if (descriptions.length) {
|
|||
})
|
||||
|
||||
expect(res.name).toEqual("Updated Name")
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
!isInternal &&
|
||||
|
@ -1177,8 +1168,7 @@ if (descriptions.length) {
|
|||
it("should update only the fields that are supplied", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(0, async () => {
|
||||
const row = await config.api.row.patch(table._id!, {
|
||||
_id: existing._id!,
|
||||
_rev: existing._rev!,
|
||||
|
@ -1193,7 +1183,7 @@ if (descriptions.length) {
|
|||
|
||||
expect(savedRow.description).toEqual(existing.description)
|
||||
expect(savedRow.name).toEqual("Updated Name")
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
it("should update only the fields that are supplied and emit the correct oldRow", async () => {
|
||||
|
@ -1224,8 +1214,8 @@ if (descriptions.length) {
|
|||
|
||||
it("should throw an error when given improper types", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(0, async () => {
|
||||
await config.api.row.patch(
|
||||
table._id!,
|
||||
{
|
||||
|
@ -1236,8 +1226,7 @@ if (descriptions.length) {
|
|||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
it("should not overwrite links if those links are not set", async () => {
|
||||
|
@ -1452,25 +1441,25 @@ if (descriptions.length) {
|
|||
|
||||
it("should be able to delete a row", async () => {
|
||||
const createdRow = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(isInternal ? -1 : 0, async () => {
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [createdRow],
|
||||
})
|
||||
expect(res[0]._id).toEqual(createdRow._id)
|
||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete a row with ID only", async () => {
|
||||
const createdRow = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(isInternal ? -1 : 0, async () => {
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [createdRow._id!],
|
||||
})
|
||||
expect(res[0]._id).toEqual(createdRow._id)
|
||||
expect(res[0].tableId).toEqual(table._id!)
|
||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to bulk delete rows, including a row that doesn't exist", async () => {
|
||||
|
@ -1560,20 +1549,18 @@ if (descriptions.length) {
|
|||
})
|
||||
|
||||
it("should return no errors on valid row", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(0, async () => {
|
||||
const res = await config.api.row.validate(table._id!, {
|
||||
name: "ivan",
|
||||
})
|
||||
|
||||
expect(res.valid).toBe(true)
|
||||
expect(Object.keys(res.errors)).toEqual([])
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
it("should errors on invalid row", async () => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(0, async () => {
|
||||
const res = await config.api.row.validate(table._id!, { name: 1 })
|
||||
|
||||
if (isInternal) {
|
||||
|
@ -1584,7 +1571,7 @@ if (descriptions.length) {
|
|||
expect(res.valid).toBe(true)
|
||||
expect(Object.keys(res.errors)).toEqual([])
|
||||
}
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -1596,15 +1583,15 @@ if (descriptions.length) {
|
|||
it("should be able to delete a bulk set of rows", async () => {
|
||||
const row1 = await config.api.row.save(table._id!, {})
|
||||
const row2 = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(isInternal ? -2 : 0, async () => {
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [row1, row2],
|
||||
})
|
||||
|
||||
expect(res.length).toEqual(2)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete a variety of row set types", async () => {
|
||||
|
@ -1613,41 +1600,42 @@ if (descriptions.length) {
|
|||
config.api.row.save(table._id!, {}),
|
||||
config.api.row.save(table._id!, {}),
|
||||
])
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(isInternal ? -3 : 0, async () => {
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [row1, row2._id!, { _id: row3._id }],
|
||||
})
|
||||
|
||||
expect(res.length).toEqual(3)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
it("should accept a valid row object and delete the row", async () => {
|
||||
const row1 = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
const res = await config.api.row.delete(table._id!, row1 as DeleteRow)
|
||||
await expectRowUsage(isInternal ? -1 : 0, async () => {
|
||||
const res = await config.api.row.delete(
|
||||
table._id!,
|
||||
row1 as DeleteRow
|
||||
)
|
||||
|
||||
expect(res.id).toEqual(row1._id)
|
||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
it.each([{ not: "valid" }, { rows: 123 }, "invalid"])(
|
||||
"should ignore malformed/invalid delete request: %s",
|
||||
async (request: any) => {
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(0, async () => {
|
||||
await config.api.row.delete(table._id!, request, {
|
||||
status: 400,
|
||||
body: {
|
||||
message: "Invalid delete rows request",
|
||||
},
|
||||
})
|
||||
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
}
|
||||
)
|
||||
})
|
||||
|
@ -1733,8 +1721,7 @@ if (descriptions.length) {
|
|||
})
|
||||
)
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(isInternal ? 2 : 0, async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
|
@ -1756,8 +1743,7 @@ if (descriptions.length) {
|
|||
expect(rows[0].description).toEqual("Row 1 description")
|
||||
expect(rows[1].name).toEqual("Row 2")
|
||||
expect(rows[1].description).toEqual("Row 2 description")
|
||||
|
||||
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
|
||||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
|
@ -1782,8 +1768,7 @@ if (descriptions.length) {
|
|||
description: "Existing description",
|
||||
})
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(2, async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
|
@ -1809,8 +1794,7 @@ if (descriptions.length) {
|
|||
expect(rows[1].description).toEqual("Row 2 description")
|
||||
expect(rows[2].name).toEqual("Updated existing row")
|
||||
expect(rows[2].description).toEqual("Existing description")
|
||||
|
||||
await assertRowUsage(rowUsage + 2)
|
||||
})
|
||||
})
|
||||
|
||||
isInternal &&
|
||||
|
@ -1835,8 +1819,7 @@ if (descriptions.length) {
|
|||
description: "Existing description",
|
||||
})
|
||||
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(3, async () => {
|
||||
await config.api.row.bulkImport(table._id!, {
|
||||
rows: [
|
||||
{
|
||||
|
@ -1863,8 +1846,7 @@ if (descriptions.length) {
|
|||
expect(rows[2].description).toEqual("Row 2 description")
|
||||
expect(rows[3].name).toEqual("Updated existing row")
|
||||
expect(rows[3].description).toEqual("Existing description")
|
||||
|
||||
await assertRowUsage(rowUsage + 3)
|
||||
})
|
||||
})
|
||||
|
||||
// Upserting isn't yet supported in MSSQL / Oracle, see:
|
||||
|
@ -2187,8 +2169,8 @@ if (descriptions.length) {
|
|||
return { linkedTable, firstRow, secondRow }
|
||||
}
|
||||
)
|
||||
const rowUsage = await getRowUsage()
|
||||
|
||||
await expectRowUsage(0, async () => {
|
||||
// test basic enrichment
|
||||
const resBasic = await config.api.row.get(
|
||||
linkedTable._id!,
|
||||
|
@ -2209,7 +2191,7 @@ if (descriptions.length) {
|
|||
expect(resEnriched.link[0]._id).toBe(firstRow._id)
|
||||
expect(resEnriched.link[0].name).toBe("Test Contact")
|
||||
expect(resEnriched.link[0].description).toBe("original description")
|
||||
await assertRowUsage(rowUsage)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -2826,34 +2826,44 @@ if (descriptions.length) {
|
|||
return total
|
||||
}
|
||||
|
||||
const assertRowUsage = async (expected: number) => {
|
||||
const usage = await getRowUsage()
|
||||
async function expectRowUsage<T>(
|
||||
expected: number,
|
||||
f: () => Promise<T>
|
||||
): Promise<T> {
|
||||
const before = await getRowUsage()
|
||||
const result = await f()
|
||||
const after = await getRowUsage()
|
||||
const usage = after - before
|
||||
expect(usage).toBe(expected)
|
||||
return result
|
||||
}
|
||||
|
||||
it("should be able to delete a row", async () => {
|
||||
const createdRow = await config.api.row.save(table._id!, {})
|
||||
const rowUsage = await getRowUsage()
|
||||
await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
||||
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||
const createdRow = await expectRowUsage(isInternal ? 1 : 0, () =>
|
||||
config.api.row.save(table._id!, {})
|
||||
)
|
||||
await expectRowUsage(isInternal ? -1 : 0, () =>
|
||||
config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
||||
)
|
||||
await config.api.row.get(table._id!, createdRow._id!, {
|
||||
status: 404,
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to delete multiple rows", async () => {
|
||||
const rows = await Promise.all([
|
||||
config.api.row.save(table._id!, {}),
|
||||
config.api.row.save(table._id!, {}),
|
||||
config.api.row.save(table._id!, {}),
|
||||
])
|
||||
const rowUsage = await getRowUsage()
|
||||
const rows = await expectRowUsage(isInternal ? 3 : 0, async () => {
|
||||
return [
|
||||
await config.api.row.save(table._id!, {}),
|
||||
await config.api.row.save(table._id!, {}),
|
||||
await config.api.row.save(table._id!, {}),
|
||||
]
|
||||
})
|
||||
|
||||
await expectRowUsage(isInternal ? -2 : 0, async () => {
|
||||
await config.api.row.bulkDelete(view.id, {
|
||||
rows: [rows[0], rows[2]],
|
||||
})
|
||||
|
||||
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
|
||||
})
|
||||
|
||||
await config.api.row.get(table._id!, rows[0]._id!, {
|
||||
status: 404,
|
||||
|
|
|
@ -195,7 +195,34 @@ describe("Attempt to run a basic loop automation", () => {
|
|||
.serverLog({ text: "{{steps.1.iterations}}" })
|
||||
.test({ fields: {} })
|
||||
|
||||
expect(results.steps[0].outputs.status).toBe(
|
||||
AutomationStepStatus.MAX_ITERATIONS
|
||||
)
|
||||
expect(results.steps[0].outputs.iterations).toBe(2)
|
||||
expect(results.steps[0].outputs.items).toHaveLength(2)
|
||||
expect(results.steps[0].outputs.items[0].message).toEndWith("test")
|
||||
expect(results.steps[0].outputs.items[1].message).toEndWith("test2")
|
||||
})
|
||||
|
||||
it("should stop when a failure condition is hit", async () => {
|
||||
const results = await createAutomationBuilder(config)
|
||||
.onAppAction()
|
||||
.loop({
|
||||
option: LoopStepType.ARRAY,
|
||||
binding: ["test", "test2", "test3"],
|
||||
failure: "test3",
|
||||
})
|
||||
.serverLog({ text: "{{loop.currentItem}}" })
|
||||
.serverLog({ text: "{{steps.1.iterations}}" })
|
||||
.test({ fields: {} })
|
||||
|
||||
expect(results.steps[0].outputs.status).toBe(
|
||||
AutomationStepStatus.FAILURE_CONDITION
|
||||
)
|
||||
expect(results.steps[0].outputs.iterations).toBe(2)
|
||||
expect(results.steps[0].outputs.items).toHaveLength(2)
|
||||
expect(results.steps[0].outputs.items[0].message).toEndWith("test")
|
||||
expect(results.steps[0].outputs.items[1].message).toEndWith("test2")
|
||||
})
|
||||
|
||||
it("should run an automation with loop and max iterations to ensure context correctness further down the tree", async () => {
|
||||
|
|
|
@ -20,9 +20,12 @@ export interface TriggerOutput {
|
|||
|
||||
export interface AutomationContext {
|
||||
trigger: AutomationTriggerResultOutputs
|
||||
steps: [AutomationTriggerResultOutputs, ...AutomationStepResultOutputs[]]
|
||||
stepsById: Record<string, AutomationStepResultOutputs>
|
||||
steps: Record<
|
||||
string,
|
||||
AutomationStepResultOutputs | AutomationTriggerResultOutputs
|
||||
>
|
||||
stepsByName: Record<string, AutomationStepResultOutputs>
|
||||
stepsById: Record<string, AutomationStepResultOutputs>
|
||||
env?: Record<string, string>
|
||||
user?: UserBindings
|
||||
settings?: {
|
||||
|
@ -31,4 +34,6 @@ export interface AutomationContext {
|
|||
company?: string
|
||||
}
|
||||
loop?: { currentItem: any }
|
||||
_stepIndex: number
|
||||
_error: boolean
|
||||
}
|
||||
|
|
|
@ -32,6 +32,8 @@ class AutomationEmitter implements ContextEmitter {
|
|||
|
||||
if (chainAutomations === true) {
|
||||
return MAX_AUTOMATIONS_ALLOWED
|
||||
} else if (env.isTest()) {
|
||||
return 0
|
||||
} else if (chainAutomations === undefined && env.SELF_HOSTED) {
|
||||
return MAX_AUTOMATIONS_ALLOWED
|
||||
} else {
|
||||
|
|
|
@ -14,15 +14,14 @@ import {
|
|||
UpdateCommandInput,
|
||||
DeleteCommandInput,
|
||||
} from "@aws-sdk/lib-dynamodb"
|
||||
import { DynamoDB } from "@aws-sdk/client-dynamodb"
|
||||
import { DynamoDB, DynamoDBClientConfig } from "@aws-sdk/client-dynamodb"
|
||||
import { AWS_REGION } from "../constants"
|
||||
|
||||
interface DynamoDBConfig {
|
||||
export interface DynamoDBConfig {
|
||||
region: string
|
||||
accessKeyId: string
|
||||
secretAccessKey: string
|
||||
endpoint?: string
|
||||
currentClockSkew?: boolean
|
||||
}
|
||||
|
||||
const SCHEMA: Integration = {
|
||||
|
@ -138,22 +137,16 @@ const SCHEMA: Integration = {
|
|||
},
|
||||
}
|
||||
|
||||
class DynamoDBIntegration implements IntegrationBase {
|
||||
private config: DynamoDBConfig
|
||||
private client
|
||||
export class DynamoDBIntegration implements IntegrationBase {
|
||||
private config: DynamoDBClientConfig
|
||||
private client: DynamoDBDocument
|
||||
|
||||
constructor(config: DynamoDBConfig) {
|
||||
this.config = config
|
||||
|
||||
// User is using a local dynamoDB endpoint, don't auth with remote
|
||||
if (this.config?.endpoint?.includes("localhost")) {
|
||||
// @ts-ignore
|
||||
this.config = {}
|
||||
}
|
||||
|
||||
this.config = {
|
||||
...this.config,
|
||||
currentClockSkew: true,
|
||||
credentials: {
|
||||
accessKeyId: config.accessKeyId,
|
||||
secretAccessKey: config.secretAccessKey,
|
||||
},
|
||||
region: config.region || AWS_REGION,
|
||||
endpoint: config.endpoint || undefined,
|
||||
}
|
||||
|
|
|
@ -1,167 +1,108 @@
|
|||
jest.mock("@aws-sdk/lib-dynamodb", () => ({
|
||||
DynamoDBDocument: {
|
||||
from: jest.fn(() => ({
|
||||
update: jest.fn(),
|
||||
put: jest.fn(),
|
||||
query: jest.fn(() => ({
|
||||
Items: [],
|
||||
})),
|
||||
scan: jest.fn(() => ({
|
||||
Items: [],
|
||||
})),
|
||||
delete: jest.fn(),
|
||||
get: jest.fn(),
|
||||
})),
|
||||
},
|
||||
}))
|
||||
jest.mock("@aws-sdk/client-dynamodb")
|
||||
import { default as DynamoDBIntegration } from "../dynamodb"
|
||||
import { Datasource } from "@budibase/types"
|
||||
import { DynamoDBConfig, DynamoDBIntegration } from "../dynamodb"
|
||||
import { DatabaseName, datasourceDescribe } from "./utils"
|
||||
import {
|
||||
CreateTableCommandInput,
|
||||
DynamoDB,
|
||||
DynamoDBClientConfig,
|
||||
} from "@aws-sdk/client-dynamodb"
|
||||
|
||||
class TestConfiguration {
|
||||
integration: any
|
||||
const describes = datasourceDescribe({ only: [DatabaseName.DYNAMODB] })
|
||||
|
||||
constructor(config: any = {}) {
|
||||
this.integration = new DynamoDBIntegration.integration(config)
|
||||
async function createTable(client: DynamoDB, req: CreateTableCommandInput) {
|
||||
try {
|
||||
await client.deleteTable({ TableName: req.TableName })
|
||||
} catch (e: any) {
|
||||
if (e.name !== "ResourceNotFoundException") {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
describe("DynamoDB Integration", () => {
|
||||
let config: any
|
||||
let tableName = "Users"
|
||||
return await client.createTable(req)
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
config = new TestConfiguration()
|
||||
})
|
||||
if (describes.length > 0) {
|
||||
describe.each(describes)("DynamoDB Integration", ({ dsProvider }) => {
|
||||
let table = "Users"
|
||||
let rawDatasource: Datasource
|
||||
let dynamodb: DynamoDBIntegration
|
||||
|
||||
it("calls the create method with the correct params", async () => {
|
||||
await config.integration.create({
|
||||
table: tableName,
|
||||
json: {
|
||||
Name: "John",
|
||||
},
|
||||
})
|
||||
expect(config.integration.client.put).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
Name: "John",
|
||||
})
|
||||
})
|
||||
function item(json: Record<string, any>) {
|
||||
return { table, json: { Item: json } }
|
||||
}
|
||||
|
||||
it("calls the read method with the correct params", async () => {
|
||||
const indexName = "Test"
|
||||
function key(json: Record<string, any>) {
|
||||
return { table, json: { Key: json } }
|
||||
}
|
||||
|
||||
const response = await config.integration.read({
|
||||
table: tableName,
|
||||
index: indexName,
|
||||
json: {},
|
||||
})
|
||||
expect(config.integration.client.query).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
IndexName: indexName,
|
||||
})
|
||||
expect(response).toEqual([])
|
||||
})
|
||||
beforeEach(async () => {
|
||||
const ds = await dsProvider()
|
||||
rawDatasource = ds.rawDatasource!
|
||||
dynamodb = new DynamoDBIntegration(
|
||||
rawDatasource.config! as DynamoDBConfig
|
||||
)
|
||||
|
||||
it("calls the scan method with the correct params", async () => {
|
||||
const indexName = "Test"
|
||||
|
||||
const response = await config.integration.scan({
|
||||
table: tableName,
|
||||
index: indexName,
|
||||
json: {},
|
||||
})
|
||||
expect(config.integration.client.scan).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
IndexName: indexName,
|
||||
})
|
||||
expect(response).toEqual([])
|
||||
})
|
||||
|
||||
it("calls the get method with the correct params", async () => {
|
||||
await config.integration.get({
|
||||
table: tableName,
|
||||
json: {
|
||||
Id: 123,
|
||||
},
|
||||
})
|
||||
|
||||
expect(config.integration.client.get).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
Id: 123,
|
||||
})
|
||||
})
|
||||
|
||||
it("calls the update method with the correct params", async () => {
|
||||
await config.integration.update({
|
||||
table: tableName,
|
||||
json: {
|
||||
Name: "John",
|
||||
},
|
||||
})
|
||||
expect(config.integration.client.update).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
Name: "John",
|
||||
})
|
||||
})
|
||||
|
||||
it("calls the delete method with the correct params", async () => {
|
||||
await config.integration.delete({
|
||||
table: tableName,
|
||||
json: {
|
||||
Name: "John",
|
||||
},
|
||||
})
|
||||
expect(config.integration.client.delete).toHaveBeenCalledWith({
|
||||
TableName: tableName,
|
||||
Name: "John",
|
||||
})
|
||||
})
|
||||
|
||||
it("configures the dynamoDB constructor based on an empty endpoint parameter", async () => {
|
||||
const config = {
|
||||
region: "us-east-1",
|
||||
const config: DynamoDBClientConfig = {
|
||||
credentials: {
|
||||
accessKeyId: "test",
|
||||
secretAccessKey: "test",
|
||||
},
|
||||
region: "us-east-1",
|
||||
endpoint: rawDatasource.config!.endpoint,
|
||||
}
|
||||
|
||||
const integration: any = new DynamoDBIntegration.integration(config)
|
||||
|
||||
expect(integration.config).toEqual({
|
||||
currentClockSkew: true,
|
||||
...config,
|
||||
const client = new DynamoDB(config)
|
||||
await createTable(client, {
|
||||
TableName: table,
|
||||
KeySchema: [{ AttributeName: "Id", KeyType: "HASH" }],
|
||||
AttributeDefinitions: [{ AttributeName: "Id", AttributeType: "N" }],
|
||||
ProvisionedThroughput: { ReadCapacityUnits: 1, WriteCapacityUnits: 1 },
|
||||
})
|
||||
})
|
||||
|
||||
it("configures the dynamoDB constructor based on a localhost endpoint parameter", async () => {
|
||||
const config = {
|
||||
region: "us-east-1",
|
||||
accessKeyId: "test",
|
||||
secretAccessKey: "test",
|
||||
endpoint: "localhost:8080",
|
||||
it("can create and read a record", async () => {
|
||||
await dynamodb.create(item({ Id: 1, Name: "John" }))
|
||||
|
||||
const resp = await dynamodb.get(key({ Id: 1 }))
|
||||
expect(resp.Item).toEqual({ Id: 1, Name: "John" })
|
||||
})
|
||||
|
||||
it("can scan", async () => {
|
||||
await dynamodb.create(item({ Id: 1, Name: "John" }))
|
||||
await dynamodb.create(item({ Id: 2, Name: "Jane" }))
|
||||
await dynamodb.create(item({ Id: 3, Name: "Jack" }))
|
||||
|
||||
const resp = await dynamodb.scan({ table, json: {}, index: null })
|
||||
expect(resp).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ Id: 1, Name: "John" },
|
||||
{ Id: 2, Name: "Jane" },
|
||||
{ Id: 3, Name: "Jack" },
|
||||
])
|
||||
)
|
||||
})
|
||||
|
||||
it("can update", async () => {
|
||||
await dynamodb.create(item({ Id: 1, Foo: "John" }))
|
||||
await dynamodb.update({
|
||||
table,
|
||||
json: {
|
||||
Key: { Id: 1 },
|
||||
UpdateExpression: "SET Foo = :foo",
|
||||
ExpressionAttributeValues: { ":foo": "Jane" },
|
||||
},
|
||||
})
|
||||
|
||||
const updatedRecord = await dynamodb.get(key({ Id: 1 }))
|
||||
expect(updatedRecord.Item).toEqual({ Id: 1, Foo: "Jane" })
|
||||
})
|
||||
|
||||
it("can delete", async () => {
|
||||
await dynamodb.create(item({ Id: 1, Name: "John" }))
|
||||
await dynamodb.delete(key({ Id: 1 }))
|
||||
|
||||
const deletedRecord = await dynamodb.get(key({ Id: 1 }))
|
||||
expect(deletedRecord.Item).toBeUndefined()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const integration: any = new DynamoDBIntegration.integration(config)
|
||||
|
||||
expect(integration.config).toEqual({
|
||||
region: "us-east-1",
|
||||
currentClockSkew: true,
|
||||
endpoint: "localhost:8080",
|
||||
})
|
||||
})
|
||||
|
||||
it("configures the dynamoDB constructor based on a remote endpoint parameter", async () => {
|
||||
const config = {
|
||||
region: "us-east-1",
|
||||
accessKeyId: "test",
|
||||
secretAccessKey: "test",
|
||||
endpoint: "dynamodb.aws.foo.net",
|
||||
}
|
||||
|
||||
const integration = new DynamoDBIntegration.integration(config)
|
||||
|
||||
// @ts-ignore
|
||||
expect(integration.config).toEqual({
|
||||
currentClockSkew: true,
|
||||
...config,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait } from "testcontainers"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { startContainer } from "."
|
||||
import { DYNAMODB_IMAGE } from "./images"
|
||||
import { DynamoDBConfig } from "../../dynamodb"
|
||||
|
||||
let ports: Promise<testContainerUtils.Port[]>
|
||||
|
||||
export async function getDatasource(): Promise<Datasource> {
|
||||
if (!ports) {
|
||||
ports = startContainer(
|
||||
new GenericContainer(DYNAMODB_IMAGE)
|
||||
.withExposedPorts(8000)
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
// https://stackoverflow.com/a/77373799
|
||||
`if [ "$(curl -s -o /dev/null -I -w ''%{http_code}'' http://localhost:8000)" == "400" ]; then exit 0; else exit 1; fi`
|
||||
).withStartupTimeout(60000)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
const port = (await ports).find(x => x.container === 8000)?.host
|
||||
if (!port) {
|
||||
throw new Error("DynamoDB port not found")
|
||||
}
|
||||
|
||||
const config: DynamoDBConfig = {
|
||||
accessKeyId: "test",
|
||||
secretAccessKey: "test",
|
||||
region: "us-east-1",
|
||||
endpoint: `http://127.0.0.1:${port}`,
|
||||
}
|
||||
|
||||
return {
|
||||
type: "datasource",
|
||||
source: SourceName.DYNAMODB,
|
||||
config,
|
||||
}
|
||||
}
|
|
@ -13,3 +13,4 @@ export const POSTGRES_LEGACY_IMAGE = `postgres:9.5.25`
|
|||
export const MONGODB_IMAGE = `mongo@${process.env.MONGODB_SHA}`
|
||||
export const MARIADB_IMAGE = `mariadb@${process.env.MARIADB_SHA}`
|
||||
export const ELASTICSEARCH_IMAGE = `elasticsearch@${process.env.ELASTICSEARCH_SHA}`
|
||||
export const DYNAMODB_IMAGE = `amazon/dynamodb-local@${process.env.DYNAMODB_SHA}`
|
||||
|
|
|
@ -7,6 +7,7 @@ import * as mssql from "./mssql"
|
|||
import * as mariadb from "./mariadb"
|
||||
import * as oracle from "./oracle"
|
||||
import * as elasticsearch from "./elasticsearch"
|
||||
import * as dynamodb from "./dynamodb"
|
||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||
import { Knex } from "knex"
|
||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||
|
@ -25,6 +26,7 @@ export enum DatabaseName {
|
|||
ORACLE = "oracle",
|
||||
SQS = "sqs",
|
||||
ELASTICSEARCH = "elasticsearch",
|
||||
DYNAMODB = "dynamodb",
|
||||
}
|
||||
|
||||
const DATASOURCE_PLUS = [
|
||||
|
@ -50,6 +52,7 @@ const providers: Record<DatabaseName, DatasourceProvider> = {
|
|||
// rest
|
||||
[DatabaseName.ELASTICSEARCH]: elasticsearch.getDatasource,
|
||||
[DatabaseName.MONGODB]: mongodb.getDatasource,
|
||||
[DatabaseName.DYNAMODB]: dynamodb.getDatasource,
|
||||
}
|
||||
|
||||
export interface DatasourceDescribeReturnPromise {
|
||||
|
|
|
@ -23,6 +23,6 @@ nock.enableNetConnect(host => {
|
|||
|
||||
testContainerUtils.setupEnv(env, coreEnv)
|
||||
|
||||
afterAll(() => {
|
||||
afterAll(async () => {
|
||||
timers.cleanup()
|
||||
})
|
||||
|
|
|
@ -146,8 +146,9 @@ export abstract class TestAPI {
|
|||
}
|
||||
}
|
||||
|
||||
let resp: Response | undefined = undefined
|
||||
try {
|
||||
return await req
|
||||
resp = await req
|
||||
} catch (e: any) {
|
||||
// We've found that occasionally the connection between supertest and the
|
||||
// server supertest starts gets reset. Not sure why, but retrying it
|
||||
|
@ -161,6 +162,7 @@ export abstract class TestAPI {
|
|||
}
|
||||
throw e
|
||||
}
|
||||
return resp
|
||||
}
|
||||
|
||||
protected async getHeaders(
|
||||
|
|
|
@ -143,7 +143,6 @@ async function branchMatches(
|
|||
branch: Readonly<Branch>
|
||||
): Promise<boolean> {
|
||||
const toFilter: Record<string, any> = {}
|
||||
const preparedCtx = prepareContext(ctx)
|
||||
|
||||
// Because we allow bindings on both the left and right of each condition in
|
||||
// automation branches, we can't pass the BranchSearchFilters directly to
|
||||
|
@ -160,9 +159,9 @@ async function branchMatches(
|
|||
filter.conditions = filter.conditions.map(evaluateBindings)
|
||||
} else {
|
||||
for (const [field, value] of Object.entries(filter)) {
|
||||
toFilter[field] = processStringSync(field, preparedCtx)
|
||||
toFilter[field] = processStringSync(field, ctx)
|
||||
if (typeof value === "string" && findHBSBlocks(value).length > 0) {
|
||||
filter[field] = processStringSync(value, preparedCtx)
|
||||
filter[field] = processStringSync(value, ctx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -178,17 +177,6 @@ async function branchMatches(
|
|||
return result.length > 0
|
||||
}
|
||||
|
||||
function prepareContext(context: AutomationContext) {
|
||||
return {
|
||||
...context,
|
||||
steps: {
|
||||
...context.steps,
|
||||
...context.stepsById,
|
||||
...context.stepsByName,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
async function enrichBaseContext(context: AutomationContext) {
|
||||
context.env = await sdkUtils.getEnvironmentVariables()
|
||||
|
||||
|
@ -304,41 +292,37 @@ class Orchestrator {
|
|||
}
|
||||
|
||||
hasErrored(context: AutomationContext): boolean {
|
||||
const [_trigger, ...steps] = context.steps
|
||||
for (const step of steps) {
|
||||
if (step.success === false) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return context._error === true
|
||||
}
|
||||
|
||||
async execute(): Promise<AutomationResults> {
|
||||
return await tracer.trace("execute", async span => {
|
||||
span.addTags({ appId: this.appId, automationId: this.automation._id })
|
||||
|
||||
const job = cloneDeep(this.job)
|
||||
delete job.data.event.appId
|
||||
delete job.data.event.metadata
|
||||
const data = cloneDeep(this.job.data)
|
||||
delete data.event.appId
|
||||
delete data.event.metadata
|
||||
|
||||
if (this.isCron() && !job.data.event.timestamp) {
|
||||
job.data.event.timestamp = Date.now()
|
||||
if (this.isCron() && !data.event.timestamp) {
|
||||
data.event.timestamp = Date.now()
|
||||
}
|
||||
|
||||
const trigger: AutomationTriggerResult = {
|
||||
id: job.data.automation.definition.trigger.id,
|
||||
stepId: job.data.automation.definition.trigger.stepId,
|
||||
id: data.automation.definition.trigger.id,
|
||||
stepId: data.automation.definition.trigger.stepId,
|
||||
inputs: null,
|
||||
outputs: job.data.event,
|
||||
outputs: data.event,
|
||||
}
|
||||
const result: AutomationResults = { trigger, steps: [trigger] }
|
||||
|
||||
const ctx: AutomationContext = {
|
||||
trigger: trigger.outputs,
|
||||
steps: [trigger.outputs],
|
||||
stepsById: {},
|
||||
steps: { "0": trigger.outputs },
|
||||
stepsByName: {},
|
||||
stepsById: {},
|
||||
user: trigger.outputs.user,
|
||||
_error: false,
|
||||
_stepIndex: 1,
|
||||
}
|
||||
await enrichBaseContext(ctx)
|
||||
|
||||
|
@ -348,7 +332,7 @@ class Orchestrator {
|
|||
try {
|
||||
await helpers.withTimeout(timeout, async () => {
|
||||
const [stepOutputs, executionTime] = await utils.time(() =>
|
||||
this.executeSteps(ctx, job.data.automation.definition.steps)
|
||||
this.executeSteps(ctx, data.automation.definition.steps)
|
||||
)
|
||||
|
||||
result.steps.push(...stepOutputs)
|
||||
|
@ -400,9 +384,20 @@ class Orchestrator {
|
|||
step: AutomationStep,
|
||||
result: AutomationStepResult
|
||||
) {
|
||||
ctx.steps.push(result.outputs)
|
||||
ctx.steps[step.id] = result.outputs
|
||||
ctx.steps[step.name || step.id] = result.outputs
|
||||
|
||||
ctx.stepsById[step.id] = result.outputs
|
||||
ctx.stepsByName[step.name || step.id] = result.outputs
|
||||
|
||||
ctx._stepIndex ||= 0
|
||||
ctx.steps[ctx._stepIndex] = result.outputs
|
||||
ctx._stepIndex++
|
||||
|
||||
if (result.outputs.success === false) {
|
||||
ctx._error = true
|
||||
}
|
||||
|
||||
results.push(result)
|
||||
}
|
||||
|
||||
|
@ -449,7 +444,7 @@ class Orchestrator {
|
|||
stepToLoop: AutomationStep
|
||||
): Promise<AutomationStepResult> {
|
||||
return await tracer.trace("executeLoopStep", async span => {
|
||||
await processObject(step.inputs, prepareContext(ctx))
|
||||
await processObject(step.inputs, ctx)
|
||||
|
||||
const maxIterations = getLoopMaxIterations(step)
|
||||
const items: Record<string, any>[] = []
|
||||
|
@ -478,6 +473,7 @@ class Orchestrator {
|
|||
return stepFailure(stepToLoop, {
|
||||
status: AutomationStepStatus.MAX_ITERATIONS,
|
||||
iterations,
|
||||
items,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -488,6 +484,8 @@ class Orchestrator {
|
|||
})
|
||||
return stepFailure(stepToLoop, {
|
||||
status: AutomationStepStatus.FAILURE_CONDITION,
|
||||
iterations,
|
||||
items,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -558,7 +556,7 @@ class Orchestrator {
|
|||
}
|
||||
|
||||
const inputs = automationUtils.cleanInputValues(
|
||||
await processObject(cloneDeep(step.inputs), prepareContext(ctx)),
|
||||
await processObject(cloneDeep(step.inputs), ctx),
|
||||
step.schema.inputs.properties
|
||||
)
|
||||
|
||||
|
@ -566,7 +564,7 @@ class Orchestrator {
|
|||
inputs,
|
||||
appId: this.appId,
|
||||
emitter: this.emitter,
|
||||
context: prepareContext(ctx),
|
||||
context: ctx,
|
||||
})
|
||||
|
||||
if (
|
||||
|
|
|
@ -117,7 +117,8 @@ export function isSupportedUserSearch(
|
|||
{ op: BasicOperator.EQUAL, key: "_id" },
|
||||
{ op: ArrayOperator.ONE_OF, key: "_id" },
|
||||
]
|
||||
for (const [key, operation] of Object.entries(query)) {
|
||||
const { allOr, onEmptyFilter, ...filters } = query
|
||||
for (const [key, operation] of Object.entries(filters)) {
|
||||
if (typeof operation !== "object") {
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ export const HelperFunctionNames = {
|
|||
ALL: "all",
|
||||
LITERAL: "literal",
|
||||
JS: "js",
|
||||
DECODE_ID: "decodeId",
|
||||
}
|
||||
|
||||
export const LITERAL_MARKER = "%LITERAL%"
|
||||
|
|
|
@ -25,13 +25,29 @@ function isObject(value: string | any[]) {
|
|||
)
|
||||
}
|
||||
|
||||
const HELPERS = [
|
||||
export const HELPERS = [
|
||||
// external helpers
|
||||
new Helper(HelperFunctionNames.OBJECT, (value: any) => {
|
||||
return new Handlebars.SafeString(JSON.stringify(value))
|
||||
}),
|
||||
// javascript helper
|
||||
new Helper(HelperFunctionNames.JS, processJS, false),
|
||||
new Helper(HelperFunctionNames.DECODE_ID, (_id: string | { _id: string }) => {
|
||||
if (!_id) {
|
||||
return []
|
||||
}
|
||||
// have to replace on the way back as we swapped out the double quotes
|
||||
// when encoding, but JSON can't handle the single quotes
|
||||
const id = typeof _id === "string" ? _id : _id._id
|
||||
const decoded: string = decodeURIComponent(id).replace(/'/g, '"')
|
||||
try {
|
||||
const parsed = JSON.parse(decoded)
|
||||
return Array.isArray(parsed) ? parsed : [parsed]
|
||||
} catch (err) {
|
||||
// wasn't json - likely was handlebars for a many to many
|
||||
return [_id]
|
||||
}
|
||||
}),
|
||||
// this help is applied to all statements
|
||||
new Helper(
|
||||
HelperFunctionNames.ALL,
|
||||
|
|
|
@ -517,3 +517,44 @@ describe("helper overlap", () => {
|
|||
expect(output).toEqual("a")
|
||||
})
|
||||
})
|
||||
|
||||
describe("Test the decodeId helper", () => {
|
||||
it("should decode a valid encoded ID", async () => {
|
||||
const encodedId = encodeURIComponent("[42]") // "%5B42%5D"
|
||||
const output = await processString("{{ decodeId id }}", { id: encodedId })
|
||||
expect(output).toBe("42")
|
||||
})
|
||||
|
||||
it("Should return an unchanged string if the string isn't encoded", async () => {
|
||||
const unencodedId = "forty-two"
|
||||
const output = await processString("{{ decodeId id }}", { id: unencodedId })
|
||||
expect(output).toBe("forty-two")
|
||||
})
|
||||
|
||||
it("Should return a string of comma-separated IDs when passed multiple IDs in a URI encoded array", async () => {
|
||||
const encodedIds = encodeURIComponent("[1,2,3]") // "%5B1%2C2%2C3%5D"
|
||||
const output = await processString("{{ decodeId id }}", { id: encodedIds })
|
||||
expect(output).toBe("1,2,3")
|
||||
})
|
||||
|
||||
it("Handles empty array gracefully", async () => {
|
||||
const output = await processString("{{ decodeId value }}", {
|
||||
value: [],
|
||||
})
|
||||
expect(output).toBe("[[]]")
|
||||
})
|
||||
|
||||
it("Handles undefined gracefully", async () => {
|
||||
const output = await processString("{{ decodeId value }}", {
|
||||
value: undefined,
|
||||
})
|
||||
expect(output).toBe("")
|
||||
})
|
||||
|
||||
it("Handles null gracefully", async () => {
|
||||
const output = await processString("{{ decodeId value }}", {
|
||||
value: undefined,
|
||||
})
|
||||
expect(output).toBe("")
|
||||
})
|
||||
})
|
||||
|
|
|
@ -12,8 +12,7 @@ nock.enableNetConnect(host => {
|
|||
return (
|
||||
host.includes("localhost") ||
|
||||
host.includes("127.0.0.1") ||
|
||||
host.includes("::1") ||
|
||||
host.includes("ethereal.email") // used in realEmail.spec.ts
|
||||
host.includes("::1")
|
||||
)
|
||||
})
|
||||
|
||||
|
|
Loading…
Reference in New Issue