diff --git a/lerna.json b/lerna.json index a9c50ea4b5..d695869907 100644 --- a/lerna.json +++ b/lerna.json @@ -1,6 +1,6 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", - "version": "2.32.4", + "version": "2.32.6", "npmClient": "yarn", "packages": [ "packages/*", diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 6bef6efeb3..2ab8c550cc 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -143,6 +143,7 @@ const environment = { POSTHOG_TOKEN: process.env.POSTHOG_TOKEN, POSTHOG_PERSONAL_TOKEN: process.env.POSTHOG_PERSONAL_TOKEN, POSTHOG_API_HOST: process.env.POSTHOG_API_HOST || "https://us.i.posthog.com", + POSTHOG_FEATURE_FLAGS_ENABLED: process.env.POSTHOG_FEATURE_FLAGS_ENABLED, ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS, TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS, CLOUDFRONT_CDN: process.env.CLOUDFRONT_CDN, diff --git a/packages/backend-core/src/features/index.ts b/packages/backend-core/src/features/index.ts index 5b6ea4ca92..0765d09036 100644 --- a/packages/backend-core/src/features/index.ts +++ b/packages/backend-core/src/features/index.ts @@ -6,7 +6,12 @@ import tracer from "dd-trace" let posthog: PostHog | undefined export function init(opts?: PostHogOptions) { - if (env.POSTHOG_TOKEN && env.POSTHOG_API_HOST && !env.SELF_HOSTED) { + if ( + env.POSTHOG_TOKEN && + env.POSTHOG_API_HOST && + !env.SELF_HOSTED && + env.POSTHOG_FEATURE_FLAGS_ENABLED + ) { console.log("initializing posthog client...") posthog = new PostHog(env.POSTHOG_TOKEN, { host: env.POSTHOG_API_HOST, diff --git a/packages/backend-core/src/features/tests/features.spec.ts b/packages/backend-core/src/features/tests/features.spec.ts index d092585cc6..01c9bfa3c6 100644 --- a/packages/backend-core/src/features/tests/features.spec.ts +++ b/packages/backend-core/src/features/tests/features.spec.ts @@ -148,6 +148,7 @@ describe("feature flags", () => { const env: Partial = { TENANT_FEATURE_FLAGS: environmentFlags, SELF_HOSTED: false, + POSTHOG_FEATURE_FLAGS_ENABLED: "true", } if (posthogFlags) { diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 2b5243f856..55f71d76b0 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -40,7 +40,6 @@ import { dataFilters, helpers } from "@budibase/shared-core" import { cloneDeep } from "lodash" type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any -const MAX_SQS_RELATIONSHIP_FIELDS = 63 function getBaseLimit() { const envLimit = environment.SQL_MAX_ROWS @@ -56,6 +55,20 @@ function getRelationshipLimit() { return envLimit || 500 } +function prioritisedArraySort(toSort: string[], priorities: string[]) { + return toSort.sort((a, b) => { + const aPriority = priorities.find(field => field && a.endsWith(field)) + const bPriority = priorities.find(field => field && b.endsWith(field)) + if (aPriority && !bPriority) { + return -1 + } + if (!aPriority && bPriority) { + return 1 + } + return a.localeCompare(b) + }) +} + function getTableName(table?: Table): string | undefined { // SQS uses the table ID rather than the table name if ( @@ -877,6 +890,22 @@ class InternalBuilder { return `'${unaliased}'${separator}${tableField}` } + maxFunctionParameters() { + // functions like say json_build_object() in SQL have a limit as to how many can be performed + // before a limit is met, this limit exists in Postgres/SQLite. This can be very important, such as + // for JSON column building as part of relationships. We also have a default limit to avoid very complex + // functions being built - it is likely this is not necessary or the best way to do it. + switch (this.client) { + case SqlClient.SQL_LITE: + return 127 + case SqlClient.POSTGRES: + return 100 + // other DBs don't have a limit, but set some sort of limit + default: + return 200 + } + } + addJsonRelationships( query: Knex.QueryBuilder, fromTable: string, @@ -884,7 +913,7 @@ class InternalBuilder { ): Knex.QueryBuilder { const sqlClient = this.client const knex = this.knex - const { resource, tableAliases: aliases, endpoint } = this.query + const { resource, tableAliases: aliases, endpoint, meta } = this.query const fields = resource?.fields || [] for (let relationship of relationships) { const { @@ -899,21 +928,27 @@ class InternalBuilder { if (!toTable || !fromTable) { continue } + const relatedTable = meta.tables?.[toTable] const toAlias = aliases?.[toTable] || toTable, fromAlias = aliases?.[fromTable] || fromTable let toTableWithSchema = this.tableNameWithSchema(toTable, { alias: toAlias, schema: endpoint.schema, }) - let relationshipFields = fields.filter( - field => field.split(".")[0] === toAlias + const requiredFields = [ + ...(relatedTable?.primary || []), + relatedTable?.primaryDisplay, + ].filter(field => field) as string[] + // sort the required fields to first in the list, so they don't get sliced out + let relationshipFields = prioritisedArraySort( + fields.filter(field => field.split(".")[0] === toAlias), + requiredFields + ) + + relationshipFields = relationshipFields.slice( + 0, + Math.floor(this.maxFunctionParameters() / 2) ) - if (this.client === SqlClient.SQL_LITE) { - relationshipFields = relationshipFields.slice( - 0, - MAX_SQS_RELATIONSHIP_FIELDS - ) - } const fieldList: string = relationshipFields .map(field => this.buildJsonField(field)) .join(",") diff --git a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte index 6c4865b539..af67ae8d22 100644 --- a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte +++ b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte @@ -595,9 +595,13 @@ let loopBlockCount = 0 const addBinding = (name, value, icon, idx, isLoopBlock, bindingName) => { if (!name) return - const runtimeBinding = determineRuntimeBinding(name, idx, isLoopBlock) + const runtimeBinding = determineRuntimeBinding( + name, + idx, + isLoopBlock, + bindingName + ) const categoryName = determineCategoryName(idx, isLoopBlock, bindingName) - bindings.push( createBindingObject( name, @@ -613,7 +617,7 @@ ) } - const determineRuntimeBinding = (name, idx, isLoopBlock) => { + const determineRuntimeBinding = (name, idx, isLoopBlock, bindingName) => { let runtimeName /* Begin special cases for generating custom schemas based on triggers */ @@ -634,12 +638,17 @@ } /* End special cases for generating custom schemas based on triggers */ + let hasUserDefinedName = automation.stepNames?.[allSteps[idx]?.id] if (isLoopBlock) { runtimeName = `loop.${name}` } else if (block.name.startsWith("JS")) { - runtimeName = `steps[${idx - loopBlockCount}].${name}` + runtimeName = hasUserDefinedName + ? `stepsByName[${bindingName}].${name}` + : `steps[${idx - loopBlockCount}].${name}` } else { - runtimeName = `steps.${idx - loopBlockCount}.${name}` + runtimeName = hasUserDefinedName + ? `stepsByName.${bindingName}.${name}` + : `steps.${idx - loopBlockCount}.${name}` } return idx === 0 ? `trigger.${name}` : runtimeName } @@ -666,11 +675,11 @@ const field = Object.values(FIELDS).find( field => field.type === value.type && field.subtype === value.subtype ) - return { - readableBinding: bindingName - ? `${bindingName}.${name}` - : runtimeBinding, + readableBinding: + bindingName && !isLoopBlock + ? `steps.${bindingName}.${name}` + : runtimeBinding, runtimeBinding, type: value.type, description: value.description, @@ -690,8 +699,12 @@ allSteps[idx]?.stepId === ActionStepID.LOOP && allSteps.some(x => x.blockToLoop === block.id) let schema = cloneDeep(allSteps[idx]?.schema?.outputs?.properties) ?? {} + if (allSteps[idx]?.name.includes("Looping")) { + isLoopBlock = true + loopBlockCount++ + } let bindingName = - automation.stepNames?.[allSteps[idx - loopBlockCount].id] + automation.stepNames?.[allSteps[idx].id] || allSteps[idx].name if (isLoopBlock) { schema = { @@ -740,13 +753,12 @@ if (wasLoopBlock) { loopBlockCount++ - continue + schema = cloneDeep(allSteps[idx - 1]?.schema?.outputs?.properties) } - Object.entries(schema).forEach(([name, value]) => + Object.entries(schema).forEach(([name, value]) => { addBinding(name, value, icon, idx, isLoopBlock, bindingName) - ) + }) } - return bindings } diff --git a/packages/builder/src/helpers/automations/nameHelpers.js b/packages/builder/src/helpers/automations/nameHelpers.js new file mode 100644 index 0000000000..ad18213ab0 --- /dev/null +++ b/packages/builder/src/helpers/automations/nameHelpers.js @@ -0,0 +1,117 @@ +import { AutomationActionStepId } from "@budibase/types" + +export const updateBindingsInInputs = (inputs, oldName, newName, stepIndex) => { + if (typeof inputs === "string") { + return inputs + .replace( + new RegExp(`stepsByName\\.${oldName}\\.`, "g"), + `stepsByName.${newName}.` + ) + .replace( + new RegExp(`steps\\.${stepIndex}\\.`, "g"), + `stepsByName.${newName}.` + ) + } + + if (Array.isArray(inputs)) { + return inputs.map(item => + updateBindingsInInputs(item, oldName, newName, stepIndex) + ) + } + + if (typeof inputs === "object" && inputs !== null) { + const updatedInputs = {} + for (const [key, value] of Object.entries(inputs)) { + const updatedKey = updateBindingsInInputs( + key, + oldName, + newName, + stepIndex + ) + updatedInputs[updatedKey] = updateBindingsInInputs( + value, + oldName, + newName, + stepIndex + ) + } + return updatedInputs + } + return inputs +} + +export const updateBindingsInSteps = ( + steps, + oldName, + newName, + changedStepIndex +) => { + return steps.map(step => { + const updatedStep = { + ...step, + inputs: updateBindingsInInputs( + step.inputs, + oldName, + newName, + changedStepIndex + ), + } + + if ("branches" in updatedStep.inputs) { + updatedStep.inputs.branches = updatedStep.inputs.branches.map(branch => ({ + ...branch, + condition: updateBindingsInInputs( + branch.condition, + oldName, + newName, + changedStepIndex + ), + })) + + if (updatedStep.inputs.children) { + for (const [key, childSteps] of Object.entries( + updatedStep.inputs.children + )) { + updatedStep.inputs.children[key] = updateBindingsInSteps( + childSteps, + oldName, + newName, + changedStepIndex + ) + } + } + } + + return updatedStep + }) +} +export const getNewStepName = (automation, step) => { + const baseName = step.name + + const countExistingSteps = steps => { + return steps.reduce((count, currentStep) => { + if (currentStep.name && currentStep.name.startsWith(baseName)) { + count++ + } + if ( + currentStep.stepId === AutomationActionStepId.BRANCH && + currentStep.inputs && + currentStep.inputs.children + ) { + Object.values(currentStep.inputs.children).forEach(branchSteps => { + count += countExistingSteps(branchSteps) + }) + } + return count + }, 0) + } + let existingCount = 0 + if (automation?.definition) { + existingCount = countExistingSteps(automation.definition.steps) + } + if (existingCount === 0) { + return baseName + } + + return `${baseName} ${existingCount + 1}` +} diff --git a/packages/builder/src/helpers/tests/nameHelpers.spec.js b/packages/builder/src/helpers/tests/nameHelpers.spec.js new file mode 100644 index 0000000000..1ce2d1987a --- /dev/null +++ b/packages/builder/src/helpers/tests/nameHelpers.spec.js @@ -0,0 +1,177 @@ +import { cloneDeep } from "lodash" +import { + updateBindingsInInputs, + updateBindingsInSteps, +} from "../automations/nameHelpers" +describe("Automation Binding Update Functions", () => { + const sampleAutomation = { + definition: { + steps: [ + { + name: "First Step", + inputs: { + text: "Starting automation", + }, + id: "step1", + }, + { + name: "Second Step", + inputs: { + text: "{{ steps.0.success }} and {{ stepsByName.First Step.message }}", + }, + id: "step2", + }, + { + name: "Branch", + inputs: { + branches: [ + { + name: "branch1", + condition: { + equal: { + "steps.1.success": true, + }, + }, + }, + ], + children: { + branch1: [ + { + name: "Nested Step", + inputs: { + text: "{{ stepsByName.Second Step.message }} and {{ steps.1.success }}", + }, + id: "nestedStep", + }, + ], + }, + }, + id: "branchStep", + }, + ], + stepNames: { + step1: "First Step", + step2: "Second Step", + branchStep: "Branch", + }, + }, + } + + it("updateBindingsInInputs updates string bindings correctly", () => { + const input = "{{ stepsByName.oldName.success }} and {{ steps.1.message }}" + const result = updateBindingsInInputs(input, "oldName", "newName", 1) + expect(result).toBe( + "{{ stepsByName.newName.success }} and {{ stepsByName.newName.message }}" + ) + }) + + it("updateBindingsInInputs handles nested objects", () => { + const input = { + text: "{{ stepsByName.oldName.success }}", + nested: { + value: "{{ steps.1.message }}", + }, + } + const result = updateBindingsInInputs(input, "oldName", "newName", 1) + expect(result).toEqual({ + text: "{{ stepsByName.newName.success }}", + nested: { + value: "{{ stepsByName.newName.message }}", + }, + }) + }) + + it("updateBindingsInSteps updates bindings in all steps", () => { + const steps = cloneDeep(sampleAutomation.definition.steps) + const result = updateBindingsInSteps( + steps, + "Second Step", + "Renamed Step", + 1 + ) + + expect(result[1].name).toBe("Second Step") + + expect(result[2].inputs.branches[0].condition.equal).toEqual({ + "stepsByName.Renamed Step.success": true, + }) + + const nestedStepText = result[2].inputs.children.branch1[0].inputs.text + expect(nestedStepText).toBe( + "{{ stepsByName.Renamed Step.message }} and {{ stepsByName.Renamed Step.success }}" + ) + }) + + it("updateBindingsInSteps handles steps with no bindings", () => { + const steps = [ + { + name: "No Binding Step", + inputs: { + text: "Plain text", + }, + id: "noBindingStep", + }, + ] + const result = updateBindingsInSteps(steps, "Old Name", "New Name", 0) + expect(result).toEqual(steps) + }) + + it("updateBindingsInSteps updates bindings in deeply nested branches", () => { + const deeplyNestedStep = { + name: "Deep Branch", + inputs: { + branches: [ + { + name: "deepBranch", + condition: { + equal: { + "stepsByName.Second Step.success": true, + }, + }, + }, + ], + children: { + deepBranch: [ + { + name: "Deep Log", + inputs: { + text: "{{ steps.1.message }}", + }, + }, + ], + }, + }, + } + + const steps = [...sampleAutomation.definition.steps, deeplyNestedStep] + const result = updateBindingsInSteps( + steps, + "Second Step", + "Renamed Step", + 1 + ) + + expect( + result[3].inputs.branches[0].condition.equal[ + "stepsByName.Renamed Step.success" + ] + ).toBe(true) + expect(result[3].inputs.children.deepBranch[0].inputs.text).toBe( + "{{ stepsByName.Renamed Step.message }}" + ) + }) + + it("updateBindingsInSteps does not affect unrelated bindings", () => { + const steps = cloneDeep(sampleAutomation.definition.steps) + const result = updateBindingsInSteps( + steps, + "Second Step", + "Renamed Step", + 1 + ) + + expect(result[1].inputs.text).toBe( + "{{ steps.0.success }} and {{ stepsByName.First Step.message }}" + ) + }) +}) diff --git a/packages/builder/src/stores/builder/automations.js b/packages/builder/src/stores/builder/automations.js index fdb0991911..6627c67080 100644 --- a/packages/builder/src/stores/builder/automations.js +++ b/packages/builder/src/stores/builder/automations.js @@ -6,6 +6,10 @@ import { createHistoryStore } from "stores/builder/history" import { notifications } from "@budibase/bbui" import { updateReferencesInObject } from "dataBinding" import { AutomationTriggerStepId } from "@budibase/types" +import { + updateBindingsInSteps, + getNewStepName, +} from "helpers/automations/nameHelpers" const initialAutomationState = { automations: [], @@ -275,13 +279,17 @@ const automationActions = store => ({ await store.actions.save(newAutomation) }, constructBlock(type, stepId, blockDefinition) { - return { + let newName + const newStep = { ...blockDefinition, inputs: blockDefinition.inputs || {}, stepId, type, id: generate(), } + newName = getNewStepName(get(selectedAutomation), newStep) + newStep.name = newName + return newStep }, addBlockToAutomation: async (block, blockIdx) => { const automation = get(selectedAutomation) @@ -301,15 +309,34 @@ const automationActions = store => ({ saveAutomationName: async (blockId, name) => { const automation = get(selectedAutomation) let newAutomation = cloneDeep(automation) - if (!automation) { + if (!newAutomation) { return } - newAutomation.definition.stepNames = { - ...newAutomation.definition.stepNames, - [blockId]: name.trim(), - } - await store.actions.save(newAutomation) + const stepIndex = newAutomation.definition.steps.findIndex( + step => step.id === blockId + ) + + if (stepIndex !== -1) { + const oldName = newAutomation.definition.steps[stepIndex].name + const newName = name.trim() + + newAutomation.definition.stepNames = { + ...newAutomation.definition.stepNames, + [blockId]: newName, + } + + newAutomation.definition.steps[stepIndex].name = newName + + newAutomation.definition.steps = updateBindingsInSteps( + newAutomation.definition.steps, + oldName, + newName, + stepIndex + ) + + await store.actions.save(newAutomation) + } }, deleteAutomationName: async blockId => { const automation = get(selectedAutomation) diff --git a/packages/client/src/utils/conditions.js b/packages/client/src/utils/conditions.js index a23d879e45..b68afa4ab3 100644 --- a/packages/client/src/utils/conditions.js +++ b/packages/client/src/utils/conditions.js @@ -1,4 +1,5 @@ import { QueryUtils } from "@budibase/frontend-core" +import { EmptyFilterOption } from "@budibase/types" export const getActiveConditions = conditions => { if (!conditions?.length) { @@ -33,7 +34,8 @@ export const getActiveConditions = conditions => { value: condition.referenceValue, } - const query = QueryUtils.buildQuery([luceneCondition]) + let query = QueryUtils.buildQuery([luceneCondition]) + query.onEmptyFilter = EmptyFilterOption.RETURN_NONE const result = QueryUtils.runQuery([luceneCondition], query) return result.length > 0 }) diff --git a/packages/server/package.json b/packages/server/package.json index 6dfd528963..0b36d49c2e 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -80,7 +80,7 @@ "dotenv": "8.2.0", "form-data": "4.0.0", "global-agent": "3.0.0", - "google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.3", + "google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5", "ioredis": "5.3.2", "isolated-vm": "^4.7.2", "jimp": "0.22.12", diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index ac2d1e8c39..9c9bd0b284 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -12,6 +12,7 @@ import { OneToManyRelationshipFieldMetadata, Operation, PaginationJson, + QueryJson, RelationshipFieldMetadata, Row, SearchFilters, @@ -161,7 +162,6 @@ export class ExternalRequest { private readonly tableId: string private datasource?: Datasource private tables: { [key: string]: Table } = {} - private tableList: Table[] constructor(operation: T, tableId: string, datasource?: Datasource) { this.operation = operation @@ -170,7 +170,6 @@ export class ExternalRequest { if (datasource && datasource.entities) { this.tables = datasource.entities } - this.tableList = Object.values(this.tables) } private prepareFilters( @@ -301,7 +300,6 @@ export class ExternalRequest { throw "No tables found, fetch tables before query." } this.tables = this.datasource.entities - this.tableList = Object.values(this.tables) } return { tables: this.tables, datasource: this.datasource } } @@ -463,7 +461,7 @@ export class ExternalRequest { breakExternalTableId(relatedTableId) // @ts-ignore const linkPrimaryKey = this.tables[relatedTableName].primary[0] - if (!lookupField || !row[lookupField]) { + if (!lookupField || !row?.[lookupField] == null) { continue } const endpoint = getEndpoint(relatedTableId, Operation.READ) @@ -631,7 +629,8 @@ export class ExternalRequest { const { datasource: ds } = await this.retrieveMetadata(datasourceId) datasource = ds } - const table = this.tables[tableName] + const tables = this.tables + const table = tables[tableName] let isSql = isSQL(datasource) if (!table) { throw new Error( @@ -686,7 +685,7 @@ export class ExternalRequest { ) { throw "Deletion must be filtered" } - let json = { + let json: QueryJson = { endpoint: { datasourceId: datasourceId!, entityId: tableName, @@ -715,7 +714,7 @@ export class ExternalRequest { }, meta: { table, - id: config.id, + tables: tables, }, } diff --git a/packages/server/src/api/controllers/row/index.ts b/packages/server/src/api/controllers/row/index.ts index cd85f57982..2e5785157d 100644 --- a/packages/server/src/api/controllers/row/index.ts +++ b/packages/server/src/api/controllers/row/index.ts @@ -138,7 +138,7 @@ async function processDeleteRowsRequest(ctx: UserCtx) { const { tableId } = utils.getSourceId(ctx) const processedRows = request.rows.map(row => { - let processedRow: Row = typeof row == "string" ? { _id: row } : row + let processedRow: Row = typeof row == "string" ? { _id: row, tableId } : row return !processedRow._rev ? addRev(fixRow(processedRow, ctx.params), tableId) : fixRow(processedRow, ctx.params) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 9790703806..dc03a21d6d 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1138,6 +1138,18 @@ describe.each([ await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) }) + it("should be able to delete a row with ID only", async () => { + const createdRow = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [createdRow._id!], + }) + expect(res[0]._id).toEqual(createdRow._id) + expect(res[0].tableId).toEqual(table._id!) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) + }) + it("should be able to bulk delete rows, including a row that doesn't exist", async () => { const createdRow = await config.api.row.save(table._id!, {}) const createdRow2 = await config.api.row.save(table._id!, {}) diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index c9e7f4c3c5..0b0802bab2 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -3080,4 +3080,46 @@ describe.each([ }).toHaveLength(4) }) }) + + isSql && + describe("max related columns", () => { + let relatedRows: Row[] + + beforeAll(async () => { + const relatedSchema: TableSchema = {} + const row: Row = {} + for (let i = 0; i < 100; i++) { + const name = `column${i}` + relatedSchema[name] = { name, type: FieldType.NUMBER } + row[name] = i + } + const relatedTable = await createTable(relatedSchema) + table = await createTable({ + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTable._id!, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + }) + relatedRows = await Promise.all([ + config.api.row.save(relatedTable._id!, row), + ]) + await config.api.row.save(table._id!, { + name: "foo", + related1: [relatedRows[0]._id], + }) + }) + + it("retrieve the row with relationships", async () => { + await expectQuery({}).toContainExactly([ + { + name: "foo", + related1: [{ _id: relatedRows[0]._id }], + }, + ]) + }) + }) }) diff --git a/packages/server/src/automations/tests/scenarios/looping.spec.ts b/packages/server/src/automations/tests/scenarios/looping.spec.ts index 9bc382a187..6dde05ddb8 100644 --- a/packages/server/src/automations/tests/scenarios/looping.spec.ts +++ b/packages/server/src/automations/tests/scenarios/looping.spec.ts @@ -242,4 +242,31 @@ describe("Loop automations", () => { expect(results.steps[1].outputs.message).toContain("- 3") expect(results.steps[3].outputs.message).toContain("- 3") }) + + it("should use automation names to loop with", async () => { + const builder = createAutomationBuilder({ + name: "Test Trigger with Loop and Create Row", + }) + + const results = await builder + .appAction({ fields: {} }) + .loop( + { + option: LoopStepType.ARRAY, + binding: [1, 2, 3], + }, + { stepName: "FirstLoopStep" } + ) + .serverLog( + { text: "Message {{loop.currentItem}}" }, + { stepName: "FirstLoopLog" } + ) + .serverLog( + { text: "{{steps.FirstLoopLog.iterations}}" }, + { stepName: "FirstLoopIterationLog" } + ) + .run() + + expect(results.steps[1].outputs.message).toContain("- 3") + }) }) diff --git a/packages/server/src/automations/tests/scenarios/scenarios.spec.ts b/packages/server/src/automations/tests/scenarios/scenarios.spec.ts index 40d6094525..49c05984fe 100644 --- a/packages/server/src/automations/tests/scenarios/scenarios.spec.ts +++ b/packages/server/src/automations/tests/scenarios/scenarios.spec.ts @@ -49,151 +49,188 @@ describe("Automation Scenarios", () => { }, }) }) - }) - it("should trigger an automation which querys the database", async () => { - const table = await config.createTable() - const row = { - name: "Test Row", - description: "original description", - tableId: table._id, - } - await config.createRow(row) - await config.createRow(row) - const builder = createAutomationBuilder({ - name: "Test Row Save and Create", + it("should trigger an automation which querys the database", async () => { + const table = await config.createTable() + const row = { + name: "Test Row", + description: "original description", + tableId: table._id, + } + await config.createRow(row) + await config.createRow(row) + const builder = createAutomationBuilder({ + name: "Test Row Save and Create", + }) + + const results = await builder + .appAction({ fields: {} }) + .queryRows({ + tableId: table._id!, + }) + .run() + + expect(results.steps).toHaveLength(1) + expect(results.steps[0].outputs.rows).toHaveLength(2) }) - const results = await builder - .appAction({ fields: {} }) - .queryRows({ - tableId: table._id!, + it("should trigger an automation which querys the database then deletes a row", async () => { + const table = await config.createTable() + const row = { + name: "DFN", + description: "original description", + tableId: table._id, + } + await config.createRow(row) + await config.createRow(row) + const builder = createAutomationBuilder({ + name: "Test Row Save and Create", }) - .run() - expect(results.steps).toHaveLength(1) - expect(results.steps[0].outputs.rows).toHaveLength(2) - }) + const results = await builder + .appAction({ fields: {} }) + .queryRows({ + tableId: table._id!, + }) + .deleteRow({ + tableId: table._id!, + id: "{{ steps.1.rows.0._id }}", + }) + .queryRows({ + tableId: table._id!, + }) + .run() - it("should trigger an automation which querys the database then deletes a row", async () => { - const table = await config.createTable() - const row = { - name: "DFN", - description: "original description", - tableId: table._id, - } - await config.createRow(row) - await config.createRow(row) - const builder = createAutomationBuilder({ - name: "Test Row Save and Create", + expect(results.steps).toHaveLength(3) + expect(results.steps[1].outputs.success).toBeTruthy() + expect(results.steps[2].outputs.rows).toHaveLength(1) }) - const results = await builder - .appAction({ fields: {} }) - .queryRows({ - tableId: table._id!, - }) - .deleteRow({ - tableId: table._id!, - id: "{{ steps.1.rows.0._id }}", - }) - .queryRows({ - tableId: table._id!, - }) - .run() - - expect(results.steps).toHaveLength(3) - expect(results.steps[1].outputs.success).toBeTruthy() - expect(results.steps[2].outputs.rows).toHaveLength(1) - }) - - it("should query an external database for some data then insert than into an internal table", async () => { - const { datasource, client } = await setup.setupTestDatasource( - config, - DatabaseName.MYSQL - ) - - const newTable = await config.createTable({ - name: "table", - type: "table", - schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { - presence: true, - }, - }, - age: { - name: "age", - type: FieldType.NUMBER, - constraints: { - presence: true, - }, - }, - }, - }) - - const tableName = await setup.createTestTable(client, { - name: { type: "string" }, - age: { type: "number" }, - }) - - const rows = [ - { name: "Joe", age: 20 }, - { name: "Bob", age: 25 }, - { name: "Paul", age: 30 }, - ] - - await setup.insertTestData(client, tableName, rows) - - const query = await setup.saveTestQuery( - config, - client, - tableName, - datasource - ) - - const builder = createAutomationBuilder({ - name: "Test external query and save", - }) - - const results = await builder - .appAction({ - fields: {}, - }) - .executeQuery({ - query: { - queryId: query._id!, - }, - }) - .loop({ - option: LoopStepType.ARRAY, - binding: "{{ steps.1.response }}", - }) - .createRow({ - row: { - name: "{{ loop.currentItem.name }}", - age: "{{ loop.currentItem.age }}", - tableId: newTable._id!, - }, - }) - .queryRows({ - tableId: newTable._id!, - }) - .run() - - expect(results.steps).toHaveLength(3) - - expect(results.steps[1].outputs.iterations).toBe(3) - expect(results.steps[1].outputs.items).toHaveLength(3) - - expect(results.steps[2].outputs.rows).toHaveLength(3) - - rows.forEach(expectedRow => { - expect(results.steps[2].outputs.rows).toEqual( - expect.arrayContaining([expect.objectContaining(expectedRow)]) + it("should query an external database for some data then insert than into an internal table", async () => { + const { datasource, client } = await setup.setupTestDatasource( + config, + DatabaseName.MYSQL ) + + const newTable = await config.createTable({ + name: "table", + type: "table", + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { + presence: true, + }, + }, + age: { + name: "age", + type: FieldType.NUMBER, + constraints: { + presence: true, + }, + }, + }, + }) + + const tableName = await setup.createTestTable(client, { + name: { type: "string" }, + age: { type: "number" }, + }) + + const rows = [ + { name: "Joe", age: 20 }, + { name: "Bob", age: 25 }, + { name: "Paul", age: 30 }, + ] + + await setup.insertTestData(client, tableName, rows) + + const query = await setup.saveTestQuery( + config, + client, + tableName, + datasource + ) + + const builder = createAutomationBuilder({ + name: "Test external query and save", + }) + + const results = await builder + .appAction({ + fields: {}, + }) + .executeQuery({ + query: { + queryId: query._id!, + }, + }) + .loop({ + option: LoopStepType.ARRAY, + binding: "{{ steps.1.response }}", + }) + .createRow({ + row: { + name: "{{ loop.currentItem.name }}", + age: "{{ loop.currentItem.age }}", + tableId: newTable._id!, + }, + }) + .queryRows({ + tableId: newTable._id!, + }) + .run() + + expect(results.steps).toHaveLength(3) + + expect(results.steps[1].outputs.iterations).toBe(3) + expect(results.steps[1].outputs.items).toHaveLength(3) + + expect(results.steps[2].outputs.rows).toHaveLength(3) + + rows.forEach(expectedRow => { + expect(results.steps[2].outputs.rows).toEqual( + expect.arrayContaining([expect.objectContaining(expectedRow)]) + ) + }) + }) + }) + + describe("Name Based Automations", () => { + it("should fetch and delete a rpw using automation naming", async () => { + const table = await config.createTable() + const row = { + name: "DFN", + description: "original description", + tableId: table._id, + } + await config.createRow(row) + await config.createRow(row) + const builder = createAutomationBuilder({ + name: "Test Query and Delete Row", + }) + + const results = await builder + .appAction({ fields: {} }) + .queryRows( + { + tableId: table._id!, + }, + { stepName: "InitialQueryStep" } + ) + .deleteRow({ + tableId: table._id!, + id: "{{ steps.InitialQueryStep.rows.0._id }}", + }) + .queryRows({ + tableId: table._id!, + }) + .run() + + expect(results.steps).toHaveLength(3) + expect(results.steps[1].outputs.success).toBeTruthy() + expect(results.steps[2].outputs.rows).toHaveLength(1) }) }) }) diff --git a/packages/server/src/automations/tests/utilities/AutomationTestBuilder.ts b/packages/server/src/automations/tests/utilities/AutomationTestBuilder.ts index f477efabe4..7528ea0f2e 100644 --- a/packages/server/src/automations/tests/utilities/AutomationTestBuilder.ts +++ b/packages/server/src/automations/tests/utilities/AutomationTestBuilder.ts @@ -57,21 +57,27 @@ type BranchConfig = { class BaseStepBuilder { protected steps: AutomationStep[] = [] + protected stepNames: { [key: string]: string } = {} protected step( stepId: TStep, stepSchema: Omit, - inputs: AutomationStepInputs + inputs: AutomationStepInputs, + stepName?: string ): this { + const id = uuidv4() this.steps.push({ ...stepSchema, inputs: inputs as any, - id: uuidv4(), + id, stepId, + name: stepName || stepSchema.name, }) + if (stepName) { + this.stepNames[id] = stepName + } return this } - protected addBranchStep(branchConfig: BranchConfig): void { const branchStepInputs: BranchStepInputs = { branches: [] as Branch[], @@ -99,66 +105,80 @@ class BaseStepBuilder { } // STEPS - createRow(inputs: CreateRowStepInputs): this { + createRow(inputs: CreateRowStepInputs, opts?: { stepName?: string }): this { return this.step( AutomationActionStepId.CREATE_ROW, BUILTIN_ACTION_DEFINITIONS.CREATE_ROW, - inputs + inputs, + opts?.stepName ) } - updateRow(inputs: UpdateRowStepInputs): this { + updateRow(inputs: UpdateRowStepInputs, opts?: { stepName?: string }): this { return this.step( AutomationActionStepId.UPDATE_ROW, BUILTIN_ACTION_DEFINITIONS.UPDATE_ROW, - inputs + inputs, + opts?.stepName ) } - deleteRow(inputs: DeleteRowStepInputs): this { + deleteRow(inputs: DeleteRowStepInputs, opts?: { stepName?: string }): this { return this.step( AutomationActionStepId.DELETE_ROW, BUILTIN_ACTION_DEFINITIONS.DELETE_ROW, - inputs + inputs, + opts?.stepName ) } - sendSmtpEmail(inputs: SmtpEmailStepInputs): this { + sendSmtpEmail( + inputs: SmtpEmailStepInputs, + opts?: { stepName?: string } + ): this { return this.step( AutomationActionStepId.SEND_EMAIL_SMTP, BUILTIN_ACTION_DEFINITIONS.SEND_EMAIL_SMTP, - inputs + inputs, + opts?.stepName ) } - executeQuery(inputs: ExecuteQueryStepInputs): this { + executeQuery( + inputs: ExecuteQueryStepInputs, + opts?: { stepName?: string } + ): this { return this.step( AutomationActionStepId.EXECUTE_QUERY, BUILTIN_ACTION_DEFINITIONS.EXECUTE_QUERY, - inputs + inputs, + opts?.stepName ) } - queryRows(inputs: QueryRowsStepInputs): this { + queryRows(inputs: QueryRowsStepInputs, opts?: { stepName?: string }): this { return this.step( AutomationActionStepId.QUERY_ROWS, BUILTIN_ACTION_DEFINITIONS.QUERY_ROWS, - inputs + inputs, + opts?.stepName ) } - loop(inputs: LoopStepInputs): this { + loop(inputs: LoopStepInputs, opts?: { stepName?: string }): this { return this.step( AutomationActionStepId.LOOP, BUILTIN_ACTION_DEFINITIONS.LOOP, - inputs + inputs, + opts?.stepName ) } - serverLog(input: ServerLogStepInputs): this { + serverLog(input: ServerLogStepInputs, opts?: { stepName?: string }): this { return this.step( AutomationActionStepId.SERVER_LOG, BUILTIN_ACTION_DEFINITIONS.SERVER_LOG, - input + input, + opts?.stepName ) } } @@ -186,6 +206,7 @@ class AutomationBuilder extends BaseStepBuilder { definition: { steps: [], trigger: {} as AutomationTrigger, + stepNames: {}, }, type: "automation", appId: options.appId ?? setup.getConfig().getAppId(), @@ -268,6 +289,7 @@ class AutomationBuilder extends BaseStepBuilder { build(): Automation { this.automationConfig.definition.steps = this.steps + this.automationConfig.definition.stepNames = this.stepNames return this.automationConfig } diff --git a/packages/server/src/definitions/automations.ts b/packages/server/src/definitions/automations.ts index e84eecea51..6488e604e9 100644 --- a/packages/server/src/definitions/automations.ts +++ b/packages/server/src/definitions/automations.ts @@ -15,6 +15,7 @@ export interface TriggerOutput { export interface AutomationContext extends AutomationResults { steps: any[] + stepsByName?: Record env?: Record trigger: any } diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts index 0d766ac1ef..6012ff7789 100644 --- a/packages/server/src/integrations/googlesheets.ts +++ b/packages/server/src/integrations/googlesheets.ts @@ -330,15 +330,16 @@ export class GoogleSheetsIntegration implements DatasourcePlus { return { tables: {}, errors: {} } } await this.connect() + const sheets = this.client.sheetsByIndex const tables: Record = {} let errors: Record = {} + await utils.parallelForeach( sheets, async sheet => { - // must fetch rows to determine schema try { - await sheet.getRows() + await sheet.getRows({ limit: 1 }) } catch (err) { // We expect this to always be an Error so if it's not, rethrow it to // make sure we don't fail quietly. @@ -346,26 +347,34 @@ export class GoogleSheetsIntegration implements DatasourcePlus { throw err } - if (err.message.startsWith("No values in the header row")) { - errors[sheet.title] = err.message - } else { - // If we get an error we don't expect, rethrow to avoid failing - // quietly. - throw err + if ( + err.message.startsWith("No values in the header row") || + err.message.startsWith("All your header cells are blank") + ) { + errors[ + sheet.title + ] = `Failed to find a header row in sheet "${sheet.title}", is the first row blank?` + return } - return - } - const id = buildExternalTableId(datasourceId, sheet.title) - tables[sheet.title] = this.getTableSchema( - sheet.title, - sheet.headerValues, - datasourceId, - id - ) + // If we get an error we don't expect, rethrow to avoid failing + // quietly. + throw err + } }, 10 ) + + for (const sheet of sheets) { + const id = buildExternalTableId(datasourceId, sheet.title) + tables[sheet.title] = this.getTableSchema( + sheet.title, + sheet.headerValues, + datasourceId, + id + ) + } + let externalTables = finaliseExternalTables(tables, entities) errors = { ...errors, ...checkExternalTables(externalTables) } return { tables: externalTables, errors } @@ -551,11 +560,16 @@ export class GoogleSheetsIntegration implements DatasourcePlus { await this.connect() const hasFilters = dataFilters.hasFilters(query.filters) const limit = query.paginate?.limit || 100 - const page: number = - typeof query.paginate?.page === "number" - ? query.paginate.page - : parseInt(query.paginate?.page || "1") - const offset = (page - 1) * limit + let offset = query.paginate?.offset || 0 + + let page = query.paginate?.page + if (typeof page === "string") { + page = parseInt(page) + } + if (page !== undefined) { + offset = page * limit + } + const sheet = this.client.sheetsByTitle[query.sheet] let rows: GoogleSpreadsheetRow[] = [] if (query.paginate && !hasFilters) { diff --git a/packages/server/src/integrations/tests/googlesheets.spec.ts b/packages/server/src/integrations/tests/googlesheets.spec.ts index 079e418f3b..dcf4a61b50 100644 --- a/packages/server/src/integrations/tests/googlesheets.spec.ts +++ b/packages/server/src/integrations/tests/googlesheets.spec.ts @@ -208,6 +208,56 @@ describe("Google Sheets Integration", () => { expect(row2.name).toEqual("Test Contact 2") expect(row2.description).toEqual("original description 2") }) + + it("can paginate correctly", async () => { + await config.api.row.bulkImport(table._id!, { + rows: Array.from({ length: 248 }, (_, i) => ({ + name: `${i}`, + description: "", + })), + }) + + let resp = await config.api.row.search(table._id!, { + tableId: table._id!, + query: {}, + paginate: true, + limit: 10, + }) + let rows = resp.rows + + while (resp.hasNextPage) { + resp = await config.api.row.search(table._id!, { + tableId: table._id!, + query: {}, + paginate: true, + limit: 10, + bookmark: resp.bookmark, + }) + rows = rows.concat(resp.rows) + if (rows.length > 250) { + throw new Error("Too many rows returned") + } + } + + expect(rows.length).toEqual(250) + expect(rows.map(row => row.name)).toEqual( + expect.arrayContaining(Array.from({ length: 248 }, (_, i) => `${i}`)) + ) + }) + + it("can export rows", async () => { + const resp = await config.api.row.exportRows(table._id!, {}) + const parsed = JSON.parse(resp) + expect(parsed.length).toEqual(2) + expect(parsed[0]).toMatchObject({ + name: "Test Contact 1", + description: "original description 1", + }) + expect(parsed[1]).toMatchObject({ + name: "Test Contact 2", + description: "original description 2", + }) + }) }) describe("update", () => { @@ -455,4 +505,97 @@ describe("Google Sheets Integration", () => { expect(emptyRows.length).toEqual(0) }) }) + + describe("fetch schema", () => { + it("should fail to import a completely blank sheet", async () => { + mock.createSheet({ title: "Sheet1" }) + await config.api.datasource.fetchSchema( + { + datasourceId: datasource._id!, + tablesFilter: ["Sheet1"], + }, + { + status: 200, + body: { + errors: { + Sheet1: + 'Failed to find a header row in sheet "Sheet1", is the first row blank?', + }, + }, + } + ) + }) + + it("should fail to import multiple sheets with blank headers", async () => { + mock.createSheet({ title: "Sheet1" }) + mock.createSheet({ title: "Sheet2" }) + + await config.api.datasource.fetchSchema( + { + datasourceId: datasource!._id!, + tablesFilter: ["Sheet1", "Sheet2"], + }, + { + status: 200, + body: { + errors: { + Sheet1: + 'Failed to find a header row in sheet "Sheet1", is the first row blank?', + Sheet2: + 'Failed to find a header row in sheet "Sheet2", is the first row blank?', + }, + }, + } + ) + }) + + it("should only fail the sheet with missing headers", async () => { + mock.createSheet({ title: "Sheet1" }) + mock.createSheet({ title: "Sheet2" }) + mock.createSheet({ title: "Sheet3" }) + + mock.set("Sheet1!A1", "name") + mock.set("Sheet1!B1", "dob") + mock.set("Sheet2!A1", "name") + mock.set("Sheet2!B1", "dob") + + await config.api.datasource.fetchSchema( + { + datasourceId: datasource!._id!, + tablesFilter: ["Sheet1", "Sheet2", "Sheet3"], + }, + { + status: 200, + body: { + errors: { + Sheet3: + 'Failed to find a header row in sheet "Sheet3", is the first row blank?', + }, + }, + } + ) + }) + + it("should only succeed if sheet with missing headers is not being imported", async () => { + mock.createSheet({ title: "Sheet1" }) + mock.createSheet({ title: "Sheet2" }) + mock.createSheet({ title: "Sheet3" }) + + mock.set("Sheet1!A1", "name") + mock.set("Sheet1!B1", "dob") + mock.set("Sheet2!A1", "name") + mock.set("Sheet2!B1", "dob") + + await config.api.datasource.fetchSchema( + { + datasourceId: datasource!._id!, + tablesFilter: ["Sheet1", "Sheet2"], + }, + { + status: 200, + body: { errors: {} }, + } + ) + }) + }) }) diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts index cf5b88b0fd..5a505fbb40 100644 --- a/packages/server/src/integrations/tests/sql.spec.ts +++ b/packages/server/src/integrations/tests/sql.spec.ts @@ -162,7 +162,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson({ schema: "production" })) expect(query).toEqual({ bindings: [limit, relationshipLimit], - sql: `with "paginated" as (select "brands".* from "production"."brands" order by "test"."id" asc limit $1) select "brands".*, (select json_agg(json_build_object('product_id',"products"."product_id",'product_name',"products"."product_name",'brand_id',"products"."brand_id")) from (select "products".* from "production"."products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $2) as "products") as "products" from "paginated" as "brands" order by "test"."id" asc`, + sql: `with "paginated" as (select "brands".* from "production"."brands" order by "test"."id" asc limit $1) select "brands".*, (select json_agg(json_build_object('brand_id',"products"."brand_id",'product_id',"products"."product_id",'product_name',"products"."product_name")) from (select "products".* from "production"."products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $2) as "products") as "products" from "paginated" as "brands" order by "test"."id" asc`, }) }) @@ -170,7 +170,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson()) expect(query).toEqual({ bindings: [limit, relationshipLimit], - sql: `with "paginated" as (select "brands".* from "brands" order by "test"."id" asc limit $1) select "brands".*, (select json_agg(json_build_object('product_id',"products"."product_id",'product_name',"products"."product_name",'brand_id',"products"."brand_id")) from (select "products".* from "products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $2) as "products") as "products" from "paginated" as "brands" order by "test"."id" asc`, + sql: `with "paginated" as (select "brands".* from "brands" order by "test"."id" asc limit $1) select "brands".*, (select json_agg(json_build_object('brand_id',"products"."brand_id",'product_id',"products"."product_id",'product_name',"products"."product_name")) from (select "products".* from "products" as "products" where "products"."brand_id" = "brands"."brand_id" order by "products"."brand_id" asc limit $2) as "products") as "products" from "paginated" as "brands" order by "test"."id" asc`, }) }) diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index 9548499c65..fc5af4238c 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -63,7 +63,7 @@ describe("Captures of real examples", () => { bindings: [primaryLimit, relationshipLimit, relationshipLimit], sql: expect.stringContaining( multiline( - `select json_agg(json_build_object('executorid',"b"."executorid",'taskname',"b"."taskname",'taskid',"b"."taskid",'completed',"b"."completed",'qaid',"b"."qaid",'executorid',"b"."executorid",'taskname',"b"."taskname",'taskid',"b"."taskid",'completed',"b"."completed",'qaid',"b"."qaid")` + `select json_agg(json_build_object('completed',"b"."completed",'completed',"b"."completed",'executorid',"b"."executorid",'executorid',"b"."executorid",'qaid',"b"."qaid",'qaid',"b"."qaid",'taskid',"b"."taskid",'taskid',"b"."taskid",'taskname',"b"."taskname",'taskname',"b"."taskname")` ) ), }) @@ -95,7 +95,7 @@ describe("Captures of real examples", () => { sql: expect.stringContaining( multiline( `with "paginated" as (select "a".* from "products" as "a" order by "a"."productname" asc nulls first, "a"."productid" asc limit $1) - select "a".*, (select json_agg(json_build_object('executorid',"b"."executorid",'taskname',"b"."taskname",'taskid',"b"."taskid",'completed',"b"."completed",'qaid',"b"."qaid")) + select "a".*, (select json_agg(json_build_object('completed',"b"."completed",'executorid',"b"."executorid",'qaid',"b"."qaid",'taskid',"b"."taskid",'taskname',"b"."taskname")) from (select "b".* from "tasks" as "b" inner join "products_tasks" as "c" on "b"."taskid" = "c"."taskid" where "c"."productid" = "a"."productid" order by "b"."taskid" asc limit $2) as "b") as "tasks" from "paginated" as "a" order by "a"."productname" asc nulls first, "a"."productid" asc` ) @@ -113,7 +113,7 @@ describe("Captures of real examples", () => { bindings: [...filters, relationshipLimit, relationshipLimit], sql: multiline( `with "paginated" as (select "a".* from "tasks" as "a" where "a"."taskid" in ($1, $2) order by "a"."taskid" asc limit $3) - select "a".*, (select json_agg(json_build_object('productname',"b"."productname",'productid',"b"."productid")) + select "a".*, (select json_agg(json_build_object('productid',"b"."productid",'productname',"b"."productname")) from (select "b".* from "products" as "b" inner join "products_tasks" as "c" on "b"."productid" = "c"."productid" where "c"."taskid" = "a"."taskid" order by "b"."productid" asc limit $4) as "b") as "products" from "paginated" as "a" order by "a"."taskid" asc` ), diff --git a/packages/server/src/integrations/tests/utils/googlesheets.ts b/packages/server/src/integrations/tests/utils/googlesheets.ts index 4b17c25b01..4747f5f9bf 100644 --- a/packages/server/src/integrations/tests/utils/googlesheets.ts +++ b/packages/server/src/integrations/tests/utils/googlesheets.ts @@ -22,6 +22,7 @@ import type { CellPadding, Color, GridRange, + DataSourceSheetProperties, } from "google-spreadsheet/src/lib/types/sheets-types" const BLACK: Color = { red: 0, green: 0, blue: 0 } @@ -91,7 +92,7 @@ interface UpdateValuesResponse { // https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/request#AddSheetRequest interface AddSheetRequest { - properties: WorksheetProperties + properties: Partial } // https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets/response#AddSheetResponse @@ -236,6 +237,38 @@ export class GoogleSheetsMock { this.mockAPI() } + public cell(cell: string): Value | undefined { + const cellData = this.cellData(cell) + if (!cellData) { + return undefined + } + return this.cellValue(cellData) + } + + public set(cell: string, value: Value): void { + const cellData = this.cellData(cell) + if (!cellData) { + throw new Error(`Cell ${cell} not found`) + } + cellData.userEnteredValue = this.createValue(value) + } + + public sheet(name: string | number): Sheet | undefined { + if (typeof name === "number") { + return this.getSheetById(name) + } + return this.getSheetByName(name) + } + + public createSheet(opts: Partial): Sheet { + const properties = this.defaultWorksheetProperties(opts) + if (this.getSheetByName(properties.title)) { + throw new Error(`Sheet ${properties.title} already exists`) + } + const resp = this.handleAddSheet({ properties }) + return this.getSheetById(resp.properties.sheetId)! + } + private route( method: "get" | "put" | "post", path: string | RegExp, @@ -462,35 +495,39 @@ export class GoogleSheetsMock { return response } - private handleAddSheet(request: AddSheetRequest): AddSheetResponse { - const properties: Omit = { + private defaultWorksheetProperties( + opts: Partial + ): WorksheetProperties { + return { index: this.spreadsheet.sheets.length, hidden: false, rightToLeft: false, tabColor: BLACK, tabColorStyle: { rgbColor: BLACK }, sheetType: "GRID", - title: request.properties.title, + title: "Sheet", sheetId: this.spreadsheet.sheets.length, gridProperties: { rowCount: 100, columnCount: 26, - frozenRowCount: 0, - frozenColumnCount: 0, - hideGridlines: false, - rowGroupControlAfter: false, - columnGroupControlAfter: false, }, + dataSourceSheetProperties: {} as DataSourceSheetProperties, + ...opts, } + } + private handleAddSheet(request: AddSheetRequest): AddSheetResponse { + const properties = this.defaultWorksheetProperties(request.properties) this.spreadsheet.sheets.push({ - properties: properties as WorksheetProperties, - data: [this.createEmptyGrid(100, 26)], + properties, + data: [ + this.createEmptyGrid( + properties.gridProperties.rowCount, + properties.gridProperties.columnCount + ), + ], }) - - // dataSourceSheetProperties is only returned by the API if the sheet type is - // DATA_SOURCE, which we aren't using, so sadly we need to cast here. - return { properties: properties as WorksheetProperties } + return { properties } } private handleDeleteRange(request: DeleteRangeRequest) { @@ -767,21 +804,6 @@ export class GoogleSheetsMock { return this.getCellNumericIndexes(sheetId, startRowIndex, startColumnIndex) } - public cell(cell: string): Value | undefined { - const cellData = this.cellData(cell) - if (!cellData) { - return undefined - } - return this.cellValue(cellData) - } - - public sheet(name: string | number): Sheet | undefined { - if (typeof name === "number") { - return this.getSheetById(name) - } - return this.getSheetByName(name) - } - private getCellNumericIndexes( sheet: Sheet | number, row: number, diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 64ff392a8f..a59935c6aa 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -89,7 +89,12 @@ class Orchestrator { delete triggerOutput.appId delete triggerOutput.metadata // step zero is never used as the template string is zero indexed for customer facing - this.context = { steps: [{}], trigger: triggerOutput } + this.context = { + steps: [{}], + stepsByName: {}, + trigger: triggerOutput, + } + this.automation = automation // create an emitter which has the chain count for this automation run in it, so it can block // excessive chaining if required @@ -451,6 +456,9 @@ class Orchestrator { outputs: tempOutput, inputs: steps[stepToLoopIndex].inputs, }) + + const stepName = steps[stepToLoopIndex].name || steps[stepToLoopIndex].id + this.context.stepsByName![stepName] = tempOutput this.context.steps[this.context.steps.length] = tempOutput this.context.steps = this.context.steps.filter( item => !item.hasOwnProperty.call(item, "currentItem") @@ -555,8 +563,13 @@ class Orchestrator { loopIteration ) } + const stepFn = await this.getStepFunctionality(step.stepId) - let inputs = await processObject(originalStepInput, this.context) + let inputs = await this.addContextAndProcess( + originalStepInput, + this.context + ) + inputs = automationUtils.cleanInputValues(inputs, step.schema.inputs) const outputs = await stepFn({ @@ -583,6 +596,18 @@ class Orchestrator { return null } + private async addContextAndProcess(inputs: any, context: any) { + const processContext = { + ...context, + steps: { + ...context.steps, + ...context.stepsByName, + }, + } + + return processObject(inputs, processContext) + } + private handleStepOutput( step: AutomationStep, outputs: any, @@ -600,6 +625,8 @@ class Orchestrator { } else { this.updateExecutionOutput(step.id, step.stepId, step.inputs, outputs) this.context.steps[this.context.steps.length] = outputs + const stepName = step.name || step.id + this.context.stepsByName![stepName] = outputs } } } diff --git a/packages/types/src/documents/app/automation/automation.ts b/packages/types/src/documents/app/automation/automation.ts index 72f8a1aa7c..effe99a328 100644 --- a/packages/types/src/documents/app/automation/automation.ts +++ b/packages/types/src/documents/app/automation/automation.ts @@ -124,6 +124,8 @@ export interface Automation extends Document { definition: { steps: AutomationStep[] trigger: AutomationTrigger + // stepNames is used to lookup step names from their correspnding step ID. + stepNames?: Record } screenId?: string uiTree?: any diff --git a/yarn.lock b/yarn.lock index 110cbd7a15..34c9cb15d9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2053,6 +2053,44 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== +"@budibase/backend-core@2.32.6": + version "0.0.0" + dependencies: + "@budibase/nano" "10.1.5" + "@budibase/pouchdb-replication-stream" "1.2.11" + "@budibase/shared-core" "0.0.0" + "@budibase/types" "0.0.0" + aws-cloudfront-sign "3.0.2" + aws-sdk "2.1030.0" + bcrypt "5.1.0" + bcryptjs "2.4.3" + bull "4.10.1" + correlation-id "4.0.0" + dd-trace "5.2.0" + dotenv "16.0.1" + ioredis "5.3.2" + joi "17.6.0" + jsonwebtoken "9.0.2" + knex "2.4.2" + koa-passport "^6.0.0" + koa-pino-logger "4.0.0" + lodash "4.17.21" + node-fetch "2.6.7" + passport-google-oauth "2.0.0" + passport-local "1.0.0" + passport-oauth2-refresh "^2.1.0" + pino "8.11.0" + pino-http "8.3.3" + posthog-node "4.0.1" + pouchdb "7.3.0" + pouchdb-find "7.2.2" + redlock "4.2.0" + rotating-file-stream "3.1.0" + sanitize-s3-objectkey "0.0.1" + semver "^7.5.4" + tar-fs "2.1.1" + uuid "^8.3.2" + "@budibase/handlebars-helpers@^0.13.2": version "0.13.2" resolved "https://registry.yarnpkg.com/@budibase/handlebars-helpers/-/handlebars-helpers-0.13.2.tgz#73ab51c464e91fd955b429017648e0257060db77" @@ -2095,6 +2133,45 @@ pouchdb-promise "^6.0.4" through2 "^2.0.0" +"@budibase/pro@npm:@budibase/pro@latest": + version "2.32.6" + resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.32.6.tgz#02ddef737ee8f52dafd8fab8f8f277dfc89cd33f" + integrity sha512-+XEv4JtMvUKZWyllcw+iFOh44zxsoJLmUdShu4bAjj5zXWgElF6LjFpK51IrQzM6xKfQxn7N2vmxu7175u5dDQ== + dependencies: + "@budibase/backend-core" "2.32.6" + "@budibase/shared-core" "2.32.6" + "@budibase/string-templates" "2.32.6" + "@budibase/types" "2.32.6" + "@koa/router" "8.0.8" + bull "4.10.1" + dd-trace "5.2.0" + joi "17.6.0" + jsonwebtoken "9.0.2" + lru-cache "^7.14.1" + memorystream "^0.3.1" + node-fetch "2.6.7" + scim-patch "^0.8.1" + scim2-parse-filter "^0.2.8" + +"@budibase/shared-core@2.32.6": + version "0.0.0" + dependencies: + "@budibase/types" "0.0.0" + cron-validate "1.4.5" + +"@budibase/string-templates@2.32.6": + version "0.0.0" + dependencies: + "@budibase/handlebars-helpers" "^0.13.2" + dayjs "^1.10.8" + handlebars "^4.7.8" + lodash.clonedeep "^4.5.0" + +"@budibase/types@2.32.6": + version "0.0.0" + dependencies: + scim-patch "^0.8.1" + "@bull-board/api@5.10.2": version "5.10.2" resolved "https://registry.yarnpkg.com/@bull-board/api/-/api-5.10.2.tgz#ae8ff6918b23897bf879a6ead3683f964374c4b3" @@ -12277,10 +12354,10 @@ google-p12-pem@^4.0.0: dependencies: node-forge "^1.3.1" -"google-spreadsheet@npm:@budibase/google-spreadsheet@4.1.3": - version "4.1.3" - resolved "https://registry.yarnpkg.com/@budibase/google-spreadsheet/-/google-spreadsheet-4.1.3.tgz#bcee7bd9d90f82c54b16a9aca963b87aceb050ad" - integrity sha512-03VX3/K5NXIh6+XAIDZgcHPmR76xwd8vIDL7RedMpvM2IcXK0Iq/KU7FmLY0t/mKqORAGC7+0rajd0jLFezC4w== +"google-spreadsheet@npm:@budibase/google-spreadsheet@4.1.5": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@budibase/google-spreadsheet/-/google-spreadsheet-4.1.5.tgz#c89ffcbfcb1a3538e910d9275f73efc1d7deb85f" + integrity sha512-t1uBjuRSkNLnZ89DYtYQ2GW33xVU84qOyOPbGi+M0w7cAJofs95PwlBLhVol6Pv5VbeL0I1J7M4XyVqp0nSZtQ== dependencies: axios "^1.4.0" lodash "^4.17.21"