Merge branch 'develop' of github.com:Budibase/budibase into responsive-portal
This commit is contained in:
commit
6efd505118
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "0.9.125-alpha.2",
|
||||
"version": "0.9.125-alpha.5",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/auth",
|
||||
"version": "0.9.125-alpha.2",
|
||||
"version": "0.9.125-alpha.5",
|
||||
"description": "Authentication middlewares for budibase builder and apps",
|
||||
"main": "src/index.js",
|
||||
"author": "Budibase",
|
||||
|
|
|
@ -3,7 +3,27 @@ const { getTenantId, lookupTenantId, getGlobalDB } = require("../tenancy")
|
|||
|
||||
const EXPIRY_SECONDS = 3600
|
||||
|
||||
exports.getUser = async (userId, tenantId = null) => {
|
||||
/**
|
||||
* The default populate user function
|
||||
*/
|
||||
const populateFromDB = (userId, tenantId) => {
|
||||
return getGlobalDB(tenantId).get(userId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the requested user by id.
|
||||
* Use redis cache to first read the user.
|
||||
* If not present fallback to loading the user directly and re-caching.
|
||||
* @param {*} userId the id of the user to get
|
||||
* @param {*} tenantId the tenant of the user to get
|
||||
* @param {*} populateUser function to provide the user for re-caching. default to couch db
|
||||
* @returns
|
||||
*/
|
||||
exports.getUser = async (
|
||||
userId,
|
||||
tenantId = null,
|
||||
populateUser = populateFromDB
|
||||
) => {
|
||||
if (!tenantId) {
|
||||
try {
|
||||
tenantId = getTenantId()
|
||||
|
@ -15,7 +35,7 @@ exports.getUser = async (userId, tenantId = null) => {
|
|||
// try cache
|
||||
let user = await client.get(userId)
|
||||
if (!user) {
|
||||
user = await getGlobalDB(tenantId).get(userId)
|
||||
user = await populateUser(userId, tenantId)
|
||||
client.store(userId, user, EXPIRY_SECONDS)
|
||||
}
|
||||
if (user && !user.tenantId && tenantId) {
|
||||
|
|
|
@ -35,10 +35,6 @@ exports.APP_PREFIX = DocumentTypes.APP + SEPARATOR
|
|||
exports.APP_DEV = exports.APP_DEV_PREFIX = DocumentTypes.APP_DEV + SEPARATOR
|
||||
exports.SEPARATOR = SEPARATOR
|
||||
|
||||
function isDevApp(app) {
|
||||
return app.appId.startsWith(exports.APP_DEV_PREFIX)
|
||||
}
|
||||
|
||||
/**
|
||||
* If creating DB allDocs/query params with only a single top level ID this can be used, this
|
||||
* is usually the case as most of our docs are top level e.g. tables, automations, users and so on.
|
||||
|
@ -62,6 +58,18 @@ function getDocParams(docType, docId = null, otherProps = {}) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.isDevAppID = appId => {
|
||||
return appId.startsWith(exports.APP_DEV_PREFIX)
|
||||
}
|
||||
|
||||
exports.isProdAppID = appId => {
|
||||
return appId.startsWith(exports.APP_PREFIX) && !exports.isDevAppID(appId)
|
||||
}
|
||||
|
||||
function isDevApp(app) {
|
||||
return exports.isDevAppID(app.appId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an app ID this will attempt to retrieve the tenant ID from it.
|
||||
* @return {null|string} The tenant ID found within the app ID.
|
||||
|
|
|
@ -21,7 +21,10 @@ function finalise(
|
|||
* The tenancy modules should not be used here and it should be assumed that the tenancy context
|
||||
* has not yet been populated.
|
||||
*/
|
||||
module.exports = (noAuthPatterns = [], opts = { publicAllowed: false }) => {
|
||||
module.exports = (
|
||||
noAuthPatterns = [],
|
||||
opts = { publicAllowed: false, populateUser: null }
|
||||
) => {
|
||||
const noAuthOptions = noAuthPatterns ? buildMatcherRegex(noAuthPatterns) : []
|
||||
return async (ctx, next) => {
|
||||
let publicEndpoint = false
|
||||
|
@ -46,7 +49,15 @@ module.exports = (noAuthPatterns = [], opts = { publicAllowed: false }) => {
|
|||
error = "No session found"
|
||||
} else {
|
||||
try {
|
||||
if (opts && opts.populateUser) {
|
||||
user = await getUser(
|
||||
userId,
|
||||
session.tenantId,
|
||||
opts.populateUser(ctx)
|
||||
)
|
||||
} else {
|
||||
user = await getUser(userId, session.tenantId)
|
||||
}
|
||||
delete user.password
|
||||
authenticated = true
|
||||
} catch (err) {
|
||||
|
|
|
@ -8,11 +8,13 @@ const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
|
|||
|
||||
exports.Databases = {
|
||||
PW_RESETS: "pwReset",
|
||||
VERIFICATIONS: "verification",
|
||||
INVITATIONS: "invitation",
|
||||
DEV_LOCKS: "devLocks",
|
||||
DEBOUNCE: "debounce",
|
||||
SESSIONS: "session",
|
||||
USER_CACHE: "users",
|
||||
FLAGS: "flags",
|
||||
}
|
||||
|
||||
exports.SEPARATOR = SEPARATOR
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/bbui",
|
||||
"description": "A UI solution used in the different Budibase projects.",
|
||||
"version": "0.9.125-alpha.2",
|
||||
"version": "0.9.125-alpha.5",
|
||||
"license": "AGPL-3.0",
|
||||
"svelte": "src/index.js",
|
||||
"module": "dist/bbui.es.js",
|
||||
|
|
|
@ -24,7 +24,7 @@ context("Create a automation", () => {
|
|||
})
|
||||
|
||||
// Create action
|
||||
cy.contains("Action").click()
|
||||
cy.contains("Internal").click()
|
||||
cy.contains("Create Row").click()
|
||||
cy.get(".setup").within(() => {
|
||||
cy.get(".spectrum-Picker-label").click()
|
||||
|
|
|
@ -23,6 +23,7 @@ process.env.MINIO_SECRET_KEY = "budibase"
|
|||
process.env.COUCH_DB_USER = "budibase"
|
||||
process.env.COUCH_DB_PASSWORD = "budibase"
|
||||
process.env.INTERNAL_API_KEY = "budibase"
|
||||
process.env.ALLOW_DEV_AUTOMATIONS = 1
|
||||
|
||||
// Stop info logs polluting test outputs
|
||||
process.env.LOG_LEVEL = "error"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/builder",
|
||||
"version": "0.9.125-alpha.2",
|
||||
"version": "0.9.125-alpha.5",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
@ -65,10 +65,10 @@
|
|||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^0.9.125-alpha.2",
|
||||
"@budibase/client": "^0.9.125-alpha.2",
|
||||
"@budibase/bbui": "^0.9.125-alpha.5",
|
||||
"@budibase/client": "^0.9.125-alpha.5",
|
||||
"@budibase/colorpicker": "1.1.2",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.2",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.5",
|
||||
"@sentry/browser": "5.19.1",
|
||||
"@spectrum-css/page": "^3.0.1",
|
||||
"@spectrum-css/vars": "^3.0.1",
|
||||
|
|
|
@ -78,8 +78,11 @@ const automationActions = store => ({
|
|||
},
|
||||
trigger: async ({ automation }) => {
|
||||
const { _id } = automation
|
||||
const TRIGGER_AUTOMATION_URL = `/api/automations/${_id}/trigger`
|
||||
return await api.post(TRIGGER_AUTOMATION_URL)
|
||||
return await api.post(`/api/automations/${_id}/trigger`)
|
||||
},
|
||||
test: async ({ automation }) => {
|
||||
const { _id } = automation
|
||||
return await api.post(`/api/automations/${_id}/test`)
|
||||
},
|
||||
select: automation => {
|
||||
store.update(state => {
|
||||
|
|
|
@ -14,15 +14,17 @@
|
|||
disabled: hasTrigger,
|
||||
},
|
||||
{
|
||||
label: "Action",
|
||||
label: "Internal",
|
||||
value: "ACTION",
|
||||
internal: true,
|
||||
icon: "Actions",
|
||||
disabled: !hasTrigger,
|
||||
},
|
||||
{
|
||||
label: "Logic",
|
||||
value: "LOGIC",
|
||||
icon: "Filter",
|
||||
label: "External",
|
||||
value: "ACTION",
|
||||
internal: false,
|
||||
icon: "Extension",
|
||||
disabled: !hasTrigger,
|
||||
},
|
||||
]
|
||||
|
@ -32,9 +34,13 @@
|
|||
let popover
|
||||
let webhookModal
|
||||
$: selectedTab = selectedIndex == null ? null : tabs[selectedIndex].value
|
||||
$: selectedInternal =
|
||||
selectedIndex == null ? null : tabs[selectedIndex].internal
|
||||
$: anchor = selectedIndex === -1 ? null : anchors[selectedIndex]
|
||||
$: blocks = sortBy(entry => entry[1].name)(
|
||||
Object.entries($automationStore.blockDefinitions[selectedTab] ?? {})
|
||||
).filter(
|
||||
entry => selectedInternal == null || entry[1].internal === selectedInternal
|
||||
)
|
||||
|
||||
function onChangeTab(idx) {
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
}
|
||||
|
||||
async function testAutomation() {
|
||||
const result = await automationStore.actions.trigger({
|
||||
const result = await automationStore.actions.test({
|
||||
automation: $automationStore.selectedAutomation.automation,
|
||||
})
|
||||
if (result.status === 200) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/cli",
|
||||
"version": "0.9.125-alpha.2",
|
||||
"version": "0.9.125-alpha.5",
|
||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||
"main": "src/index.js",
|
||||
"bin": {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/client",
|
||||
"version": "0.9.125-alpha.2",
|
||||
"version": "0.9.125-alpha.5",
|
||||
"license": "MPL-2.0",
|
||||
"module": "dist/budibase-client.js",
|
||||
"main": "dist/budibase-client.js",
|
||||
|
@ -19,9 +19,9 @@
|
|||
"dev:builder": "rollup -cw"
|
||||
},
|
||||
"dependencies": {
|
||||
"@budibase/bbui": "^0.9.125-alpha.2",
|
||||
"@budibase/bbui": "^0.9.125-alpha.5",
|
||||
"@budibase/standard-components": "^0.9.124",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.2",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.5",
|
||||
"regexparam": "^1.3.0",
|
||||
"shortid": "^2.2.15",
|
||||
"svelte-spa-router": "^3.0.5"
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/server",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "0.9.125-alpha.2",
|
||||
"version": "0.9.125-alpha.5",
|
||||
"description": "Budibase Web Server",
|
||||
"main": "src/index.js",
|
||||
"repository": {
|
||||
|
@ -61,9 +61,9 @@
|
|||
"author": "Budibase",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@budibase/auth": "^0.9.125-alpha.2",
|
||||
"@budibase/client": "^0.9.125-alpha.2",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.2",
|
||||
"@budibase/auth": "^0.9.125-alpha.5",
|
||||
"@budibase/client": "^0.9.125-alpha.5",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.5",
|
||||
"@elastic/elasticsearch": "7.10.0",
|
||||
"@koa/router": "8.0.0",
|
||||
"@sendgrid/mail": "7.1.1",
|
||||
|
@ -109,7 +109,6 @@
|
|||
"to-json-schema": "0.2.5",
|
||||
"uuid": "3.3.2",
|
||||
"validate.js": "0.13.1",
|
||||
"worker-farm": "1.7.0",
|
||||
"yargs": "13.2.4",
|
||||
"zlib": "1.0.5"
|
||||
},
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
const CouchDB = require("../../db")
|
||||
const actions = require("../../automations/actions")
|
||||
const logic = require("../../automations/logic")
|
||||
const triggers = require("../../automations/triggers")
|
||||
const webhooks = require("./webhook")
|
||||
const { getAutomationParams, generateAutomationID } = require("../../db/utils")
|
||||
|
||||
const WH_STEP_ID = triggers.BUILTIN_DEFINITIONS.WEBHOOK.stepId
|
||||
const CRON_STEP_ID = triggers.BUILTIN_DEFINITIONS.CRON.stepId
|
||||
const {
|
||||
checkForWebhooks,
|
||||
updateTestHistory,
|
||||
} = require("../../automations/utils")
|
||||
const { deleteEntityMetadata } = require("../../utilities")
|
||||
const { MetadataTypes } = require("../../constants")
|
||||
const { setTestFlag, clearTestFlag } = require("../../utilities/redis")
|
||||
|
||||
/*************************
|
||||
* *
|
||||
|
@ -14,6 +16,19 @@ const CRON_STEP_ID = triggers.BUILTIN_DEFINITIONS.CRON.stepId
|
|||
* *
|
||||
*************************/
|
||||
|
||||
async function cleanupAutomationMetadata(appId, automationId) {
|
||||
await deleteEntityMetadata(
|
||||
appId,
|
||||
MetadataTypes.AUTOMATION_TEST_INPUT,
|
||||
automationId
|
||||
)
|
||||
await deleteEntityMetadata(
|
||||
appId,
|
||||
MetadataTypes.AUTOMATION_TEST_HISTORY,
|
||||
automationId
|
||||
)
|
||||
}
|
||||
|
||||
function cleanAutomationInputs(automation) {
|
||||
if (automation == null) {
|
||||
return automation
|
||||
|
@ -21,6 +36,10 @@ function cleanAutomationInputs(automation) {
|
|||
let steps = automation.definition.steps
|
||||
let trigger = automation.definition.trigger
|
||||
let allSteps = [...steps, trigger]
|
||||
// live is not a property used anymore
|
||||
if (automation.live != null) {
|
||||
delete automation.live
|
||||
}
|
||||
for (let step of allSteps) {
|
||||
if (step == null) {
|
||||
continue
|
||||
|
@ -34,119 +53,6 @@ function cleanAutomationInputs(automation) {
|
|||
return automation
|
||||
}
|
||||
|
||||
/**
|
||||
* This function handles checking of any cron jobs need to be created or deleted for automations.
|
||||
* @param {string} appId The ID of the app in which we are checking for webhooks
|
||||
* @param {object|undefined} oldAuto The old automation object if updating/deleting
|
||||
* @param {object|undefined} newAuto The new automation object if creating/updating
|
||||
*/
|
||||
async function checkForCronTriggers({ appId, oldAuto, newAuto }) {
|
||||
const oldTrigger = oldAuto ? oldAuto.definition.trigger : null
|
||||
const newTrigger = newAuto ? newAuto.definition.trigger : null
|
||||
function isCronTrigger(auto) {
|
||||
return (
|
||||
auto &&
|
||||
auto.definition.trigger &&
|
||||
auto.definition.trigger.stepId === CRON_STEP_ID
|
||||
)
|
||||
}
|
||||
|
||||
const isLive = auto => auto && auto.live
|
||||
|
||||
const cronTriggerRemoved =
|
||||
isCronTrigger(oldAuto) && !isCronTrigger(newAuto) && oldTrigger.cronJobId
|
||||
const cronTriggerDeactivated = !isLive(newAuto) && isLive(oldAuto)
|
||||
|
||||
const cronTriggerActivated = isLive(newAuto) && !isLive(oldAuto)
|
||||
|
||||
if (cronTriggerRemoved || (cronTriggerDeactivated && oldTrigger.cronJobId)) {
|
||||
await triggers.automationQueue.removeRepeatableByKey(oldTrigger.cronJobId)
|
||||
}
|
||||
// need to create cron job
|
||||
else if (isCronTrigger(newAuto) && cronTriggerActivated) {
|
||||
const job = await triggers.automationQueue.add(
|
||||
{
|
||||
automation: newAuto,
|
||||
event: { appId, timestamp: Date.now() },
|
||||
},
|
||||
{ repeat: { cron: newTrigger.inputs.cron } }
|
||||
)
|
||||
// Assign cron job ID from bull so we can remove it later if the cron trigger is removed
|
||||
newTrigger.cronJobId = job.id
|
||||
}
|
||||
return newAuto
|
||||
}
|
||||
|
||||
/**
|
||||
* This function handles checking if any webhooks need to be created or deleted for automations.
|
||||
* @param {string} appId The ID of the app in which we are checking for webhooks
|
||||
* @param {object|undefined} oldAuto The old automation object if updating/deleting
|
||||
* @param {object|undefined} newAuto The new automation object if creating/updating
|
||||
* @returns {Promise<object|undefined>} After this is complete the new automation object may have been updated and should be
|
||||
* written to DB (this does not write to DB as it would be wasteful to repeat).
|
||||
*/
|
||||
async function checkForWebhooks({ appId, oldAuto, newAuto }) {
|
||||
const oldTrigger = oldAuto ? oldAuto.definition.trigger : null
|
||||
const newTrigger = newAuto ? newAuto.definition.trigger : null
|
||||
const triggerChanged =
|
||||
oldTrigger && newTrigger && oldTrigger.id !== newTrigger.id
|
||||
function isWebhookTrigger(auto) {
|
||||
return (
|
||||
auto &&
|
||||
auto.definition.trigger &&
|
||||
auto.definition.trigger.stepId === WH_STEP_ID
|
||||
)
|
||||
}
|
||||
// need to delete webhook
|
||||
if (
|
||||
isWebhookTrigger(oldAuto) &&
|
||||
(!isWebhookTrigger(newAuto) || triggerChanged) &&
|
||||
oldTrigger.webhookId
|
||||
) {
|
||||
try {
|
||||
let db = new CouchDB(appId)
|
||||
// need to get the webhook to get the rev
|
||||
const webhook = await db.get(oldTrigger.webhookId)
|
||||
const ctx = {
|
||||
appId,
|
||||
params: { id: webhook._id, rev: webhook._rev },
|
||||
}
|
||||
// might be updating - reset the inputs to remove the URLs
|
||||
if (newTrigger) {
|
||||
delete newTrigger.webhookId
|
||||
newTrigger.inputs = {}
|
||||
}
|
||||
await webhooks.destroy(ctx)
|
||||
} catch (err) {
|
||||
// don't worry about not being able to delete, if it doesn't exist all good
|
||||
}
|
||||
}
|
||||
// need to create webhook
|
||||
if (
|
||||
(!isWebhookTrigger(oldAuto) || triggerChanged) &&
|
||||
isWebhookTrigger(newAuto)
|
||||
) {
|
||||
const ctx = {
|
||||
appId,
|
||||
request: {
|
||||
body: new webhooks.Webhook(
|
||||
"Automation webhook",
|
||||
webhooks.WebhookType.AUTOMATION,
|
||||
newAuto._id
|
||||
),
|
||||
},
|
||||
}
|
||||
await webhooks.save(ctx)
|
||||
const id = ctx.body.webhook._id
|
||||
newTrigger.webhookId = id
|
||||
newTrigger.inputs = {
|
||||
schemaUrl: `api/webhooks/schema/${appId}/${id}`,
|
||||
triggerUrl: `api/webhooks/trigger/${appId}/${id}`,
|
||||
}
|
||||
}
|
||||
return newAuto
|
||||
}
|
||||
|
||||
exports.create = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
let automation = ctx.request.body
|
||||
|
@ -165,10 +71,6 @@ exports.create = async function (ctx) {
|
|||
appId: ctx.appId,
|
||||
newAuto: automation,
|
||||
})
|
||||
automation = await checkForCronTriggers({
|
||||
appId: ctx.appId,
|
||||
newAuto: automation,
|
||||
})
|
||||
const response = await db.put(automation)
|
||||
automation._rev = response.rev
|
||||
|
||||
|
@ -193,14 +95,26 @@ exports.update = async function (ctx) {
|
|||
oldAuto: oldAutomation,
|
||||
newAuto: automation,
|
||||
})
|
||||
automation = await checkForCronTriggers({
|
||||
appId: ctx.appId,
|
||||
oldAuto: oldAutomation,
|
||||
newAuto: automation,
|
||||
})
|
||||
const response = await db.put(automation)
|
||||
automation._rev = response.rev
|
||||
|
||||
const oldAutoTrigger =
|
||||
oldAutomation && oldAutomation.definition.trigger
|
||||
? oldAutomation.definition.trigger
|
||||
: {}
|
||||
const newAutoTrigger =
|
||||
automation && automation.definition.trigger
|
||||
? automation.definition.trigger
|
||||
: {}
|
||||
// trigger has been updated, remove the test inputs
|
||||
if (oldAutoTrigger.id !== newAutoTrigger.id) {
|
||||
await deleteEntityMetadata(
|
||||
ctx.appId,
|
||||
MetadataTypes.AUTOMATION_TEST_INPUT,
|
||||
automation._id
|
||||
)
|
||||
}
|
||||
|
||||
ctx.status = 200
|
||||
ctx.body = {
|
||||
message: `Automation ${automation._id} updated successfully.`,
|
||||
|
@ -229,35 +143,29 @@ exports.find = async function (ctx) {
|
|||
|
||||
exports.destroy = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const oldAutomation = await db.get(ctx.params.id)
|
||||
const automationId = ctx.params.id
|
||||
const oldAutomation = await db.get(automationId)
|
||||
await checkForWebhooks({
|
||||
appId: ctx.appId,
|
||||
oldAuto: oldAutomation,
|
||||
})
|
||||
await checkForCronTriggers({
|
||||
appId: ctx.appId,
|
||||
oldAuto: oldAutomation,
|
||||
})
|
||||
ctx.body = await db.remove(ctx.params.id, ctx.params.rev)
|
||||
// delete metadata first
|
||||
await cleanupAutomationMetadata(ctx.appId, automationId)
|
||||
ctx.body = await db.remove(automationId, ctx.params.rev)
|
||||
}
|
||||
|
||||
exports.getActionList = async function (ctx) {
|
||||
ctx.body = actions.DEFINITIONS
|
||||
ctx.body = actions.ACTION_DEFINITIONS
|
||||
}
|
||||
|
||||
exports.getTriggerList = async function (ctx) {
|
||||
ctx.body = triggers.BUILTIN_DEFINITIONS
|
||||
}
|
||||
|
||||
exports.getLogicList = async function (ctx) {
|
||||
ctx.body = logic.BUILTIN_DEFINITIONS
|
||||
ctx.body = triggers.TRIGGER_DEFINITIONS
|
||||
}
|
||||
|
||||
module.exports.getDefinitionList = async function (ctx) {
|
||||
ctx.body = {
|
||||
logic: logic.BUILTIN_DEFINITIONS,
|
||||
trigger: triggers.BUILTIN_DEFINITIONS,
|
||||
action: actions.DEFINITIONS,
|
||||
trigger: triggers.TRIGGER_DEFINITIONS,
|
||||
action: actions.ACTION_DEFINITIONS,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -268,15 +176,37 @@ module.exports.getDefinitionList = async function (ctx) {
|
|||
*********************/
|
||||
|
||||
exports.trigger = async function (ctx) {
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
let automation = await db.get(ctx.params.id)
|
||||
await triggers.externalTrigger(automation, {
|
||||
...ctx.request.body,
|
||||
appId: ctx.appId,
|
||||
appId,
|
||||
})
|
||||
ctx.status = 200
|
||||
ctx.body = {
|
||||
message: `Automation ${automation._id} has been triggered.`,
|
||||
automation,
|
||||
}
|
||||
}
|
||||
|
||||
exports.test = async function (ctx) {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
let automation = await db.get(ctx.params.id)
|
||||
await setTestFlag(automation._id)
|
||||
const response = await triggers.externalTrigger(
|
||||
automation,
|
||||
{
|
||||
...ctx.request.body,
|
||||
appId,
|
||||
},
|
||||
{ getResponses: true }
|
||||
)
|
||||
// save a test history run
|
||||
await updateTestHistory(ctx.appId, automation, {
|
||||
...ctx.request.body,
|
||||
occurredAt: new Date().getTime(),
|
||||
})
|
||||
await clearTestFlag(automation._id)
|
||||
ctx.body = response
|
||||
}
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
const CouchDB = require("../../../db")
|
||||
const Deployment = require("./Deployment")
|
||||
const { Replication } = require("@budibase/auth/db")
|
||||
const { DocumentTypes } = require("../../../db/utils")
|
||||
const { DocumentTypes, getAutomationParams } = require("../../../db/utils")
|
||||
const {
|
||||
disableAllCrons,
|
||||
enableCronTrigger,
|
||||
} = require("../../../automations/utils")
|
||||
|
||||
// the max time we can wait for an invalidation to complete before considering it failed
|
||||
const MAX_PENDING_TIME_MS = 30 * 60000
|
||||
|
@ -58,6 +62,23 @@ async function storeDeploymentHistory(deployment) {
|
|||
return deployment
|
||||
}
|
||||
|
||||
async function initDeployedApp(prodAppId) {
|
||||
const db = new CouchDB(prodAppId)
|
||||
const automations = (
|
||||
await db.allDocs(
|
||||
getAutomationParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
).rows.map(row => row.doc)
|
||||
const promises = []
|
||||
await disableAllCrons(prodAppId)
|
||||
for (let automation of automations) {
|
||||
promises.push(enableCronTrigger(prodAppId, automation))
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
|
||||
async function deployApp(deployment) {
|
||||
try {
|
||||
const productionAppId = deployment.appId.replace("_dev", "")
|
||||
|
@ -85,6 +106,7 @@ async function deployApp(deployment) {
|
|||
},
|
||||
})
|
||||
|
||||
await initDeployedApp(productionAppId)
|
||||
deployment.setStatus(DeploymentStatus.SUCCESS)
|
||||
await storeDeploymentHistory(deployment)
|
||||
} catch (err) {
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
const { MetadataTypes } = require("../../constants")
|
||||
const CouchDB = require("../../db")
|
||||
const { generateMetadataID } = require("../../db/utils")
|
||||
const { saveEntityMetadata, deleteEntityMetadata } = require("../../utilities")
|
||||
|
||||
exports.getTypes = async ctx => {
|
||||
ctx.body = {
|
||||
types: MetadataTypes,
|
||||
}
|
||||
}
|
||||
|
||||
exports.saveMetadata = async ctx => {
|
||||
const { type, entityId } = ctx.params
|
||||
if (type === MetadataTypes.AUTOMATION_TEST_HISTORY) {
|
||||
ctx.throw(400, "Cannot save automation history type")
|
||||
}
|
||||
ctx.body = await saveEntityMetadata(
|
||||
ctx.appId,
|
||||
type,
|
||||
entityId,
|
||||
ctx.request.body
|
||||
)
|
||||
}
|
||||
|
||||
exports.deleteMetadata = async ctx => {
|
||||
const { type, entityId } = ctx.params
|
||||
await deleteEntityMetadata(ctx.appId, type, entityId)
|
||||
ctx.body = {
|
||||
message: "Metadata deleted successfully",
|
||||
}
|
||||
}
|
||||
|
||||
exports.getMetadata = async ctx => {
|
||||
const { type, entityId } = ctx.params
|
||||
const db = new CouchDB(ctx.appId)
|
||||
const id = generateMetadataID(type, entityId)
|
||||
try {
|
||||
ctx.body = await db.get(id)
|
||||
} catch (err) {
|
||||
if (err.status === 404) {
|
||||
ctx.body = {}
|
||||
} else {
|
||||
ctx.throw(err.status, err)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -160,8 +160,6 @@ exports.execute = async function (ctx) {
|
|||
)
|
||||
|
||||
const integration = new Integration(datasource.config)
|
||||
console.log(query)
|
||||
// ctx.body = {}
|
||||
// call the relevant CRUD method on the integration class
|
||||
ctx.body = formatResponse(await integration[query.queryVerb](enrichedQuery))
|
||||
// cleanup
|
||||
|
|
|
@ -23,6 +23,19 @@ const CALCULATION_TYPES = {
|
|||
STATS: "stats",
|
||||
}
|
||||
|
||||
async function storeResponse(ctx, db, row, oldTable, table) {
|
||||
row.type = "row"
|
||||
const response = await db.put(row)
|
||||
// don't worry about rev, tables handle rev/lastID updates
|
||||
if (!isEqual(oldTable, table)) {
|
||||
await db.put(table)
|
||||
}
|
||||
row._rev = response.rev
|
||||
// process the row before return, to include relationships
|
||||
row = await outputProcessing(ctx, table, row, { squash: false })
|
||||
return { row, table }
|
||||
}
|
||||
|
||||
exports.patch = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
|
@ -77,14 +90,7 @@ exports.patch = async ctx => {
|
|||
return { row: ctx.body, table }
|
||||
}
|
||||
|
||||
const response = await db.put(row)
|
||||
// don't worry about rev, tables handle rev/lastID updates
|
||||
if (!isEqual(dbTable, table)) {
|
||||
await db.put(table)
|
||||
}
|
||||
row._rev = response.rev
|
||||
row.type = "row"
|
||||
return { row, table }
|
||||
return storeResponse(ctx, db, row, dbTable, table)
|
||||
}
|
||||
|
||||
exports.save = async function (ctx) {
|
||||
|
@ -118,14 +124,7 @@ exports.save = async function (ctx) {
|
|||
table,
|
||||
})
|
||||
|
||||
row.type = "row"
|
||||
const response = await db.put(row)
|
||||
// don't worry about rev, tables handle rev/lastID updates
|
||||
if (!isEqual(dbTable, table)) {
|
||||
await db.put(table)
|
||||
}
|
||||
row._rev = response.rev
|
||||
return { row, table }
|
||||
return storeResponse(ctx, db, row, dbTable, table)
|
||||
}
|
||||
|
||||
exports.fetchView = async ctx => {
|
||||
|
@ -221,34 +220,47 @@ exports.destroy = async function (ctx) {
|
|||
const appId = ctx.appId
|
||||
const db = new CouchDB(appId)
|
||||
const { _id, _rev } = ctx.request.body
|
||||
const row = await db.get(_id)
|
||||
let row = await db.get(_id)
|
||||
|
||||
if (row.tableId !== ctx.params.tableId) {
|
||||
throw "Supplied tableId doesn't match the row's tableId"
|
||||
}
|
||||
const table = await db.get(row.tableId)
|
||||
// update the row to include full relationships before deleting them
|
||||
row = await outputProcessing(ctx, table, row, { squash: false })
|
||||
// now remove the relationships
|
||||
await linkRows.updateLinks({
|
||||
appId,
|
||||
eventType: linkRows.EventType.ROW_DELETE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
})
|
||||
|
||||
let response
|
||||
if (ctx.params.tableId === InternalTables.USER_METADATA) {
|
||||
ctx.params = {
|
||||
id: _id,
|
||||
}
|
||||
await userController.destroyMetadata(ctx)
|
||||
return { response: ctx.body, row }
|
||||
response = ctx.body
|
||||
} else {
|
||||
const response = await db.remove(_id, _rev)
|
||||
return { response, row }
|
||||
response = await db.remove(_id, _rev)
|
||||
}
|
||||
return { response, row }
|
||||
}
|
||||
|
||||
exports.bulkDestroy = async ctx => {
|
||||
const appId = ctx.appId
|
||||
const { rows } = ctx.request.body
|
||||
const db = new CouchDB(appId)
|
||||
const tableId = ctx.params.tableId
|
||||
const table = await db.get(tableId)
|
||||
let { rows } = ctx.request.body
|
||||
|
||||
// before carrying out any updates, make sure the rows are ready to be returned
|
||||
// they need to be the full rows (including previous relationships) for automations
|
||||
rows = await outputProcessing(ctx, table, rows, { squash: false })
|
||||
|
||||
// remove the relationships first
|
||||
let updates = rows.map(row =>
|
||||
linkRows.updateLinks({
|
||||
appId,
|
||||
|
@ -257,8 +269,7 @@ exports.bulkDestroy = async ctx => {
|
|||
tableId: row.tableId,
|
||||
})
|
||||
)
|
||||
// TODO remove special user case in future
|
||||
if (ctx.params.tableId === InternalTables.USER_METADATA) {
|
||||
if (tableId === InternalTables.USER_METADATA) {
|
||||
updates = updates.concat(
|
||||
rows.map(row => {
|
||||
ctx.params = {
|
||||
|
|
|
@ -9,6 +9,10 @@ const {
|
|||
} = require("@budibase/auth/permissions")
|
||||
const Joi = require("joi")
|
||||
const { bodyResource, paramResource } = require("../../middleware/resourceId")
|
||||
const {
|
||||
middleware: appInfoMiddleware,
|
||||
AppType,
|
||||
} = require("../../middleware/appInfo")
|
||||
|
||||
const router = Router()
|
||||
|
||||
|
@ -22,7 +26,6 @@ function generateStepSchema(allowStepTypes) {
|
|||
tagline: Joi.string().required(),
|
||||
icon: Joi.string().required(),
|
||||
params: Joi.object(),
|
||||
// TODO: validate args a bit more deeply
|
||||
args: Joi.object(),
|
||||
type: Joi.string().required().valid(...allowStepTypes),
|
||||
}).unknown(true)
|
||||
|
@ -31,7 +34,6 @@ function generateStepSchema(allowStepTypes) {
|
|||
function generateValidator(existing = false) {
|
||||
// prettier-ignore
|
||||
return joiValidator.body(Joi.object({
|
||||
live: Joi.bool(),
|
||||
_id: existing ? Joi.string().required() : Joi.string(),
|
||||
_rev: existing ? Joi.string().required() : Joi.string(),
|
||||
name: Joi.string().required(),
|
||||
|
@ -54,11 +56,6 @@ router
|
|||
authorized(BUILDER),
|
||||
controller.getActionList
|
||||
)
|
||||
.get(
|
||||
"/api/automations/logic/list",
|
||||
authorized(BUILDER),
|
||||
controller.getLogicList
|
||||
)
|
||||
.get(
|
||||
"/api/automations/definitions/list",
|
||||
authorized(BUILDER),
|
||||
|
@ -84,17 +81,25 @@ router
|
|||
generateValidator(false),
|
||||
controller.create
|
||||
)
|
||||
.post(
|
||||
"/api/automations/:id/trigger",
|
||||
paramResource("id"),
|
||||
authorized(PermissionTypes.AUTOMATION, PermissionLevels.EXECUTE),
|
||||
controller.trigger
|
||||
)
|
||||
.delete(
|
||||
"/api/automations/:id/:rev",
|
||||
paramResource("id"),
|
||||
authorized(BUILDER),
|
||||
controller.destroy
|
||||
)
|
||||
.post(
|
||||
"/api/automations/:id/trigger",
|
||||
appInfoMiddleware({ appType: AppType.PROD }),
|
||||
paramResource("id"),
|
||||
authorized(PermissionTypes.AUTOMATION, PermissionLevels.EXECUTE),
|
||||
controller.trigger
|
||||
)
|
||||
.post(
|
||||
"/api/automations/:id/test",
|
||||
appInfoMiddleware({ appType: AppType.DEV }),
|
||||
paramResource("id"),
|
||||
authorized(PermissionTypes.AUTOMATION, PermissionLevels.EXECUTE),
|
||||
controller.test
|
||||
)
|
||||
|
||||
module.exports = router
|
||||
|
|
|
@ -22,6 +22,7 @@ const datasourceRoutes = require("./datasource")
|
|||
const queryRoutes = require("./query")
|
||||
const hostingRoutes = require("./hosting")
|
||||
const backupRoutes = require("./backup")
|
||||
const metadataRoutes = require("./metadata")
|
||||
const devRoutes = require("./dev")
|
||||
|
||||
exports.mainRoutes = [
|
||||
|
@ -46,6 +47,7 @@ exports.mainRoutes = [
|
|||
queryRoutes,
|
||||
hostingRoutes,
|
||||
backupRoutes,
|
||||
metadataRoutes,
|
||||
devRoutes,
|
||||
// these need to be handled last as they still use /api/:tableId
|
||||
// this could be breaking as koa may recognise other routes as this
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
const Router = require("@koa/router")
|
||||
const controller = require("../controllers/metadata")
|
||||
const {
|
||||
middleware: appInfoMiddleware,
|
||||
AppType,
|
||||
} = require("../../middleware/appInfo")
|
||||
const authorized = require("../../middleware/authorized")
|
||||
const { BUILDER } = require("@budibase/auth/permissions")
|
||||
|
||||
const router = Router()
|
||||
|
||||
router
|
||||
.post(
|
||||
"/api/metadata/:type/:entityId",
|
||||
authorized(BUILDER),
|
||||
appInfoMiddleware({ appType: AppType.DEV }),
|
||||
controller.saveMetadata
|
||||
)
|
||||
.delete(
|
||||
"/api/metadata/:type/:entityId",
|
||||
authorized(BUILDER),
|
||||
appInfoMiddleware({ appType: AppType.DEV }),
|
||||
controller.deleteMetadata
|
||||
)
|
||||
.get(
|
||||
"/api/metadata/type",
|
||||
authorized(BUILDER),
|
||||
appInfoMiddleware({ appType: AppType.DEV }),
|
||||
controller.getTypes
|
||||
)
|
||||
.get(
|
||||
"/api/metadata/:type/:entityId",
|
||||
authorized(BUILDER),
|
||||
appInfoMiddleware({ appType: AppType.DEV }),
|
||||
controller.getMetadata
|
||||
)
|
||||
|
||||
module.exports = router
|
|
@ -2,6 +2,7 @@ const {
|
|||
checkBuilderEndpoint,
|
||||
getAllTableRows,
|
||||
clearAllAutomations,
|
||||
testAutomation,
|
||||
} = require("./utilities/TestFunctions")
|
||||
const setup = require("./utilities")
|
||||
const { basicAutomation } = setup.structures
|
||||
|
@ -10,7 +11,6 @@ const MAX_RETRIES = 4
|
|||
|
||||
let ACTION_DEFINITIONS = {}
|
||||
let TRIGGER_DEFINITIONS = {}
|
||||
let LOGIC_DEFINITIONS = {}
|
||||
|
||||
describe("/automations", () => {
|
||||
let request = setup.getRequest()
|
||||
|
@ -23,15 +23,6 @@ describe("/automations", () => {
|
|||
await config.init()
|
||||
})
|
||||
|
||||
const triggerWorkflow = async automation => {
|
||||
return await request
|
||||
.post(`/api/automations/${automation._id}/trigger`)
|
||||
.send({ name: "Test", description: "TEST" })
|
||||
.set(config.defaultHeaders())
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200)
|
||||
}
|
||||
|
||||
describe("get definitions", () => {
|
||||
it("returns a list of definitions for actions", async () => {
|
||||
const res = await request
|
||||
|
@ -44,7 +35,7 @@ describe("/automations", () => {
|
|||
ACTION_DEFINITIONS = res.body
|
||||
})
|
||||
|
||||
it("returns a list of definitions for triggers", async () => {
|
||||
it("returns a list of definitions for triggerInfo", async () => {
|
||||
const res = await request
|
||||
.get(`/api/automations/trigger/list`)
|
||||
.set(config.defaultHeaders())
|
||||
|
@ -55,17 +46,6 @@ describe("/automations", () => {
|
|||
TRIGGER_DEFINITIONS = res.body
|
||||
})
|
||||
|
||||
it("returns a list of definitions for actions", async () => {
|
||||
const res = await request
|
||||
.get(`/api/automations/logic/list`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect('Content-Type', /json/)
|
||||
.expect(200)
|
||||
|
||||
expect(Object.keys(res.body).length).not.toEqual(0)
|
||||
LOGIC_DEFINITIONS = res.body
|
||||
})
|
||||
|
||||
it("returns all of the definitions in one", async () => {
|
||||
const res = await request
|
||||
.get(`/api/automations/definitions/list`)
|
||||
|
@ -75,7 +55,6 @@ describe("/automations", () => {
|
|||
|
||||
expect(Object.keys(res.body.action).length).toBeGreaterThanOrEqual(Object.keys(ACTION_DEFINITIONS).length)
|
||||
expect(Object.keys(res.body.trigger).length).toEqual(Object.keys(TRIGGER_DEFINITIONS).length)
|
||||
expect(Object.keys(res.body.logic).length).toEqual(Object.keys(LOGIC_DEFINITIONS).length)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -168,14 +147,13 @@ describe("/automations", () => {
|
|||
automation.definition.steps[0].inputs.row.tableId = table._id
|
||||
automation = await config.createAutomation(automation)
|
||||
await setup.delay(500)
|
||||
const res = await triggerWorkflow(automation)
|
||||
const res = await testAutomation(config, automation)
|
||||
// this looks a bit mad but we don't actually have a way to wait for a response from the automation to
|
||||
// know that it has finished all of its actions - this is currently the best way
|
||||
// also when this runs in CI it is very temper-mental so for now trying to make run stable by repeating until it works
|
||||
// TODO: update when workflow logs are a thing
|
||||
for (let tries = 0; tries < MAX_RETRIES; tries++) {
|
||||
expect(res.body.message).toEqual(`Automation ${automation._id} has been triggered.`)
|
||||
expect(res.body.automation.name).toEqual(automation.name)
|
||||
expect(res.body).toBeDefined()
|
||||
await setup.delay(500)
|
||||
let elements = await getAllTableRows(config)
|
||||
// don't test it unless there are values to test
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
const { testAutomation } = require("./utilities/TestFunctions")
|
||||
const setup = require("./utilities")
|
||||
const { MetadataTypes } = require("../../../constants")
|
||||
|
||||
describe("/metadata", () => {
|
||||
let request = setup.getRequest()
|
||||
let config = setup.getConfig()
|
||||
let automation
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
beforeEach(async () => {
|
||||
await config.init()
|
||||
automation = await config.createAutomation()
|
||||
})
|
||||
|
||||
async function createMetadata(data, type = MetadataTypes.AUTOMATION_TEST_INPUT) {
|
||||
const res = await request
|
||||
.post(`/api/metadata/${type}/${automation._id}`)
|
||||
.send(data)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body._rev).toBeDefined()
|
||||
}
|
||||
|
||||
async function getMetadata(type) {
|
||||
const res = await request
|
||||
.get(`/api/metadata/${type}/${automation._id}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
return res.body
|
||||
}
|
||||
|
||||
describe("save", () => {
|
||||
it("should be able to save some metadata", async () => {
|
||||
await createMetadata({ test: "a" })
|
||||
const testInput = await getMetadata(MetadataTypes.AUTOMATION_TEST_INPUT)
|
||||
expect(testInput.test).toBe("a")
|
||||
})
|
||||
|
||||
it("should save history metadata on automation run", async () => {
|
||||
// this should have created some history
|
||||
await testAutomation(config, automation)
|
||||
const metadata = await getMetadata(MetadataTypes.AUTOMATION_TEST_HISTORY)
|
||||
expect(metadata).toBeDefined()
|
||||
expect(metadata.history.length).toBe(1)
|
||||
expect(typeof metadata.history[0].occurredAt).toBe("number")
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
it("should be able to delete some test inputs", async () => {
|
||||
const res = await request
|
||||
.delete(`/api/metadata/${MetadataTypes.AUTOMATION_TEST_INPUT}/${automation._id}`)
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
expect(res.body.message).toBeDefined()
|
||||
const metadata = await getMetadata(MetadataTypes.AUTOMATION_TEST_INPUT)
|
||||
expect(metadata.test).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
|
@ -101,3 +101,17 @@ exports.checkPermissionsEndpoint = async ({
|
|||
exports.getDB = config => {
|
||||
return new CouchDB(config.getAppId())
|
||||
}
|
||||
|
||||
exports.testAutomation = async (config, automation) => {
|
||||
return await config.request
|
||||
.post(`/api/automations/${automation._id}/test`)
|
||||
.send({
|
||||
row: {
|
||||
name: "Test",
|
||||
description: "TEST",
|
||||
},
|
||||
})
|
||||
.set(config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
}
|
||||
|
|
|
@ -88,8 +88,8 @@ module.exports = server.listen(env.PORT || 0, async () => {
|
|||
env._set("PORT", server.address().port)
|
||||
eventEmitter.emitPort(env.PORT)
|
||||
fileSystem.init()
|
||||
await automations.init()
|
||||
await redis.init()
|
||||
await automations.init()
|
||||
})
|
||||
|
||||
process.on("uncaughtException", err => {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
const sendgridEmail = require("./steps/sendgridEmail")
|
||||
const sendSmtpEmail = require("./steps/sendSmtpEmail")
|
||||
const createRow = require("./steps/createRow")
|
||||
const updateRow = require("./steps/updateRow")
|
||||
|
@ -8,15 +7,14 @@ const bash = require("./steps/bash")
|
|||
const executeQuery = require("./steps/executeQuery")
|
||||
const outgoingWebhook = require("./steps/outgoingWebhook")
|
||||
const serverLog = require("./steps/serverLog")
|
||||
const env = require("../environment")
|
||||
const Sentry = require("@sentry/node")
|
||||
const {
|
||||
automationInit,
|
||||
getExternalAutomationStep,
|
||||
} = require("../utilities/fileSystem")
|
||||
const discord = require("./steps/discord")
|
||||
const slack = require("./steps/slack")
|
||||
const zapier = require("./steps/zapier")
|
||||
const integromat = require("./steps/integromat")
|
||||
let filter = require("./steps/filter")
|
||||
let delay = require("./steps/delay")
|
||||
|
||||
const BUILTIN_ACTIONS = {
|
||||
SEND_EMAIL: sendgridEmail.run,
|
||||
const ACTION_IMPLS = {
|
||||
SEND_EMAIL_SMTP: sendSmtpEmail.run,
|
||||
CREATE_ROW: createRow.run,
|
||||
UPDATE_ROW: updateRow.run,
|
||||
|
@ -26,9 +24,15 @@ const BUILTIN_ACTIONS = {
|
|||
EXECUTE_BASH: bash.run,
|
||||
EXECUTE_QUERY: executeQuery.run,
|
||||
SERVER_LOG: serverLog.run,
|
||||
DELAY: delay.run,
|
||||
FILTER: filter.run,
|
||||
// these used to be lowercase step IDs, maintain for backwards compat
|
||||
discord: discord.run,
|
||||
slack: slack.run,
|
||||
zapier: zapier.run,
|
||||
integromat: integromat.run,
|
||||
}
|
||||
const BUILTIN_DEFINITIONS = {
|
||||
SEND_EMAIL: sendgridEmail.definition,
|
||||
const ACTION_DEFINITIONS = {
|
||||
SEND_EMAIL_SMTP: sendSmtpEmail.definition,
|
||||
CREATE_ROW: createRow.definition,
|
||||
UPDATE_ROW: updateRow.definition,
|
||||
|
@ -38,47 +42,20 @@ const BUILTIN_DEFINITIONS = {
|
|||
EXECUTE_QUERY: executeQuery.definition,
|
||||
EXECUTE_BASH: bash.definition,
|
||||
SERVER_LOG: serverLog.definition,
|
||||
}
|
||||
|
||||
let MANIFEST = null
|
||||
|
||||
/* istanbul ignore next */
|
||||
function buildBundleName(pkgName, version) {
|
||||
return `${pkgName}@${version}.min.js`
|
||||
DELAY: delay.definition,
|
||||
FILTER: filter.definition,
|
||||
// these used to be lowercase step IDs, maintain for backwards compat
|
||||
discord: discord.definition,
|
||||
slack: slack.definition,
|
||||
zapier: zapier.definition,
|
||||
integromat: integromat.definition,
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
module.exports.getAction = async function (actionName) {
|
||||
if (BUILTIN_ACTIONS[actionName] != null) {
|
||||
return BUILTIN_ACTIONS[actionName]
|
||||
exports.getAction = async function (actionName) {
|
||||
if (ACTION_IMPLS[actionName] != null) {
|
||||
return ACTION_IMPLS[actionName]
|
||||
}
|
||||
// worker pools means that a worker may not have manifest
|
||||
if (env.isProd() && MANIFEST == null) {
|
||||
MANIFEST = await module.exports.init()
|
||||
}
|
||||
// env setup to get async packages
|
||||
if (!MANIFEST || !MANIFEST.packages || !MANIFEST.packages[actionName]) {
|
||||
return null
|
||||
}
|
||||
const pkg = MANIFEST.packages[actionName]
|
||||
const bundleName = buildBundleName(pkg.stepId, pkg.version)
|
||||
return getExternalAutomationStep(pkg.stepId, pkg.version, bundleName)
|
||||
}
|
||||
|
||||
module.exports.init = async function () {
|
||||
try {
|
||||
MANIFEST = await automationInit()
|
||||
module.exports.DEFINITIONS =
|
||||
MANIFEST && MANIFEST.packages
|
||||
? Object.assign(MANIFEST.packages, BUILTIN_DEFINITIONS)
|
||||
: BUILTIN_DEFINITIONS
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
Sentry.captureException(err)
|
||||
}
|
||||
return MANIFEST
|
||||
}
|
||||
|
||||
// definitions will have downloaded ones added to it, while builtin won't
|
||||
module.exports.DEFINITIONS = BUILTIN_DEFINITIONS
|
||||
module.exports.BUILTIN_DEFINITIONS = BUILTIN_DEFINITIONS
|
||||
exports.ACTION_DEFINITIONS = ACTION_DEFINITIONS
|
||||
|
|
|
@ -1,14 +1,22 @@
|
|||
const { createBullBoard } = require("bull-board")
|
||||
const { BullAdapter } = require("bull-board/bullAdapter")
|
||||
const { getQueues } = require("./triggers")
|
||||
const express = require("express")
|
||||
const env = require("../environment")
|
||||
const Queue = env.isTest()
|
||||
? require("../utilities/queue/inMemoryQueue")
|
||||
: require("bull")
|
||||
const { JobQueues } = require("../constants")
|
||||
const { utils } = require("@budibase/auth/redis")
|
||||
const { opts } = utils.getRedisOptions()
|
||||
|
||||
let automationQueue = new Queue(JobQueues.AUTOMATIONS, { redis: opts })
|
||||
|
||||
exports.pathPrefix = "/bulladmin"
|
||||
|
||||
exports.init = () => {
|
||||
const expressApp = express()
|
||||
// Set up queues for bull board admin
|
||||
const queues = getQueues()
|
||||
const queues = [automationQueue]
|
||||
const adapters = []
|
||||
for (let queue of queues) {
|
||||
adapters.push(new BullAdapter(queue))
|
||||
|
@ -18,3 +26,5 @@ exports.init = () => {
|
|||
expressApp.use(exports.pathPrefix, router)
|
||||
return expressApp
|
||||
}
|
||||
|
||||
exports.queue = automationQueue
|
||||
|
|
|
@ -1,51 +1,17 @@
|
|||
const triggers = require("./triggers")
|
||||
const actions = require("./actions")
|
||||
const env = require("../environment")
|
||||
const workerFarm = require("worker-farm")
|
||||
const singleThread = require("./thread")
|
||||
const { getAPIKey, update, Properties } = require("../utilities/usageQuota")
|
||||
|
||||
let workers = workerFarm(require.resolve("./thread"))
|
||||
|
||||
function runWorker(job) {
|
||||
return new Promise((resolve, reject) => {
|
||||
workers(job, err => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function updateQuota(automation) {
|
||||
const appId = automation.appId
|
||||
const apiObj = await getAPIKey(appId)
|
||||
// this will fail, causing automation to escape if limits reached
|
||||
await update(apiObj.apiKey, Properties.AUTOMATION, 1)
|
||||
return apiObj.apiKey
|
||||
}
|
||||
const { processEvent } = require("./utils")
|
||||
const { queue } = require("./bullboard")
|
||||
|
||||
/**
|
||||
* This module is built purely to kick off the worker farm and manage the inputs/outputs
|
||||
*/
|
||||
module.exports.init = async function () {
|
||||
await actions.init()
|
||||
triggers.automationQueue.process(async job => {
|
||||
try {
|
||||
if (env.USE_QUOTAS) {
|
||||
job.data.automation.apiKey = await updateQuota(job.data.automation)
|
||||
}
|
||||
if (env.isProd()) {
|
||||
await runWorker(job)
|
||||
} else {
|
||||
await singleThread(job)
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`${job.data.automation.appId} automation ${job.data.automation._id} was unable to run - ${err}`
|
||||
)
|
||||
}
|
||||
exports.init = function () {
|
||||
// this promise will not complete
|
||||
return queue.process(async job => {
|
||||
await processEvent(job)
|
||||
})
|
||||
}
|
||||
|
||||
exports.getQueues = () => {
|
||||
return [queue]
|
||||
}
|
||||
exports.queue = queue
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
let filter = require("./steps/filter")
|
||||
let delay = require("./steps/delay")
|
||||
|
||||
let BUILTIN_LOGIC = {
|
||||
DELAY: delay.run,
|
||||
FILTER: filter.run,
|
||||
}
|
||||
|
||||
let BUILTIN_DEFINITIONS = {
|
||||
DELAY: delay.definition,
|
||||
FILTER: filter.definition,
|
||||
}
|
||||
|
||||
module.exports.getLogic = function (logicName) {
|
||||
if (BUILTIN_LOGIC[logicName] != null) {
|
||||
return BUILTIN_LOGIC[logicName]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.BUILTIN_DEFINITIONS = BUILTIN_DEFINITIONS
|
|
@ -1,12 +1,13 @@
|
|||
const { execSync } = require("child_process")
|
||||
const { processStringSync } = require("@budibase/string-templates")
|
||||
|
||||
module.exports.definition = {
|
||||
exports.definition = {
|
||||
name: "Bash Scripting",
|
||||
tagline: "Execute a bash command",
|
||||
icon: "ri-terminal-box-line",
|
||||
description: "Run a bash script",
|
||||
type: "ACTION",
|
||||
internal: true,
|
||||
stepId: "EXECUTE_BASH",
|
||||
inputs: {},
|
||||
schema: {
|
||||
|
@ -24,7 +25,11 @@ module.exports.definition = {
|
|||
properties: {
|
||||
stdout: {
|
||||
type: "string",
|
||||
description: "Standard output of your bash command or script.",
|
||||
description: "Standard output of your bash command or script",
|
||||
},
|
||||
success: {
|
||||
type: "boolean",
|
||||
description: "Whether the command was successful",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -32,7 +37,7 @@ module.exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
module.exports.run = async function ({ inputs, context }) {
|
||||
exports.run = async function ({ inputs, context }) {
|
||||
if (inputs.code == null) {
|
||||
return {
|
||||
stdout: "Budibase bash automation failed: Invalid inputs",
|
||||
|
@ -42,18 +47,20 @@ module.exports.run = async function ({ inputs, context }) {
|
|||
try {
|
||||
const command = processStringSync(inputs.code, context)
|
||||
|
||||
let stdout
|
||||
let stdout,
|
||||
success = true
|
||||
try {
|
||||
stdout = execSync(command, { timeout: 500 })
|
||||
} catch (err) {
|
||||
stdout = err.message
|
||||
success = false
|
||||
}
|
||||
|
||||
return {
|
||||
stdout,
|
||||
success,
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return {
|
||||
success: false,
|
||||
response: err,
|
||||
|
|
|
@ -3,12 +3,13 @@ const automationUtils = require("../automationUtils")
|
|||
const env = require("../../environment")
|
||||
const usage = require("../../utilities/usageQuota")
|
||||
|
||||
module.exports.definition = {
|
||||
exports.definition = {
|
||||
name: "Create Row",
|
||||
tagline: "Create a {{inputs.enriched.table.name}} row",
|
||||
icon: "ri-save-3-line",
|
||||
description: "Add a row to your database",
|
||||
type: "ACTION",
|
||||
internal: true,
|
||||
stepId: "CREATE_ROW",
|
||||
inputs: {},
|
||||
schema: {
|
||||
|
@ -42,7 +43,7 @@ module.exports.definition = {
|
|||
},
|
||||
success: {
|
||||
type: "boolean",
|
||||
description: "Whether the action was successful",
|
||||
description: "Whether the row creation was successful",
|
||||
},
|
||||
id: {
|
||||
type: "string",
|
||||
|
@ -58,7 +59,7 @@ module.exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
module.exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
||||
exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
||||
if (inputs.row == null || inputs.row.tableId == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
@ -97,7 +98,6 @@ module.exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
|||
success: ctx.status === 200,
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return {
|
||||
success: false,
|
||||
response: err,
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
let { wait } = require("../../utilities")
|
||||
|
||||
module.exports.definition = {
|
||||
exports.definition = {
|
||||
name: "Delay",
|
||||
icon: "ri-time-line",
|
||||
tagline: "Delay for {{inputs.time}} milliseconds",
|
||||
description: "Delay the automation until an amount of time has passed",
|
||||
stepId: "DELAY",
|
||||
internal: true,
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
|
@ -17,10 +18,22 @@ module.exports.definition = {
|
|||
},
|
||||
required: ["time"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
success: {
|
||||
type: "boolean",
|
||||
description: "Whether the delay was successful",
|
||||
},
|
||||
},
|
||||
required: ["success"],
|
||||
},
|
||||
},
|
||||
type: "LOGIC",
|
||||
}
|
||||
|
||||
module.exports.run = async function delay({ inputs }) {
|
||||
exports.run = async function delay({ inputs }) {
|
||||
await wait(inputs.time)
|
||||
return {
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,13 +2,14 @@ const rowController = require("../../api/controllers/row")
|
|||
const env = require("../../environment")
|
||||
const usage = require("../../utilities/usageQuota")
|
||||
|
||||
module.exports.definition = {
|
||||
exports.definition = {
|
||||
description: "Delete a row from your database",
|
||||
icon: "ri-delete-bin-line",
|
||||
name: "Delete Row",
|
||||
tagline: "Delete a {{inputs.enriched.table.name}} row",
|
||||
type: "ACTION",
|
||||
stepId: "DELETE_ROW",
|
||||
internal: true,
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
|
@ -42,7 +43,7 @@ module.exports.definition = {
|
|||
},
|
||||
success: {
|
||||
type: "boolean",
|
||||
description: "Whether the action was successful",
|
||||
description: "Whether the deletion was successful",
|
||||
},
|
||||
},
|
||||
required: ["row", "success"],
|
||||
|
@ -50,7 +51,7 @@ module.exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
module.exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
||||
exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
||||
if (inputs.id == null || inputs.revision == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
@ -84,7 +85,6 @@ module.exports.run = async function ({ inputs, appId, apiKey, emitter }) {
|
|||
success: ctx.status === 200,
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return {
|
||||
success: false,
|
||||
response: err,
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
const fetch = require("node-fetch")
|
||||
const { getFetchResponse } = require("./utils")
|
||||
|
||||
const DEFAULT_USERNAME = "Budibase Automate"
|
||||
const DEFAULT_AVATAR_URL = "https://i.imgur.com/a1cmTKM.png"
|
||||
|
||||
exports.definition = {
|
||||
name: "Discord Message",
|
||||
tagline: "Send a message to a Discord server",
|
||||
description: "Send a message to a Discord server",
|
||||
icon: "ri-discord-line",
|
||||
stepId: "discord",
|
||||
type: "ACTION",
|
||||
internal: false,
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
url: {
|
||||
type: "string",
|
||||
title: "Discord Webhook URL",
|
||||
},
|
||||
username: {
|
||||
type: "string",
|
||||
title: "Bot Name",
|
||||
},
|
||||
avatar_url: {
|
||||
type: "string",
|
||||
title: "Bot Avatar URL",
|
||||
},
|
||||
content: {
|
||||
type: "string",
|
||||
title: "Message",
|
||||
},
|
||||
},
|
||||
required: ["url", "content"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
httpStatus: {
|
||||
type: "number",
|
||||
description: "The HTTP status code of the request",
|
||||
},
|
||||
response: {
|
||||
type: "string",
|
||||
description: "The response from the Discord Webhook",
|
||||
},
|
||||
success: {
|
||||
type: "boolean",
|
||||
description: "Whether the message sent successfully",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
exports.run = async function ({ inputs }) {
|
||||
let { url, username, avatar_url, content } = inputs
|
||||
if (!username) {
|
||||
username = DEFAULT_USERNAME
|
||||
}
|
||||
if (!avatar_url) {
|
||||
avatar_url = DEFAULT_AVATAR_URL
|
||||
}
|
||||
const response = await fetch(url, {
|
||||
method: "post",
|
||||
body: JSON.stringify({
|
||||
username,
|
||||
avatar_url,
|
||||
content,
|
||||
}),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
})
|
||||
|
||||
const { status, message } = await getFetchResponse(response)
|
||||
return {
|
||||
httpStatus: status,
|
||||
success: status === 200,
|
||||
response: message,
|
||||
}
|
||||
}
|
|
@ -1,12 +1,14 @@
|
|||
const queryController = require("../../api/controllers/query")
|
||||
const { buildCtx } = require("./utils")
|
||||
|
||||
module.exports.definition = {
|
||||
exports.definition = {
|
||||
name: "External Data Connector",
|
||||
tagline: "Execute Data Connector",
|
||||
icon: "ri-database-2-line",
|
||||
description: "Execute a query in an external data connector",
|
||||
type: "ACTION",
|
||||
stepId: "EXECUTE_QUERY",
|
||||
internal: true,
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
|
@ -42,7 +44,7 @@ module.exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
module.exports.run = async function ({ inputs, appId, emitter }) {
|
||||
exports.run = async function ({ inputs, appId, emitter }) {
|
||||
if (inputs.query == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
@ -54,28 +56,22 @@ module.exports.run = async function ({ inputs, appId, emitter }) {
|
|||
|
||||
const { queryId, ...rest } = inputs.query
|
||||
|
||||
const ctx = {
|
||||
params: {
|
||||
queryId,
|
||||
},
|
||||
request: {
|
||||
const ctx = buildCtx(appId, emitter, {
|
||||
body: {
|
||||
parameters: rest,
|
||||
},
|
||||
params: {
|
||||
queryId,
|
||||
},
|
||||
appId,
|
||||
eventEmitter: emitter,
|
||||
}
|
||||
|
||||
await queryController.execute(ctx)
|
||||
})
|
||||
|
||||
try {
|
||||
await queryController.execute(ctx)
|
||||
return {
|
||||
response: ctx.body,
|
||||
success: ctx.status === 200,
|
||||
success: true,
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return {
|
||||
success: false,
|
||||
response: err,
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
const scriptController = require("../../api/controllers/script")
|
||||
const { buildCtx } = require("./utils")
|
||||
|
||||
module.exports.definition = {
|
||||
exports.definition = {
|
||||
name: "JS Scripting",
|
||||
tagline: "Execute JavaScript Code",
|
||||
icon: "ri-terminal-box-line",
|
||||
description: "Run a piece of JavaScript code in your automation",
|
||||
type: "ACTION",
|
||||
internal: true,
|
||||
stepId: "EXECUTE_SCRIPT",
|
||||
inputs: {},
|
||||
schema: {
|
||||
|
@ -23,8 +25,7 @@ module.exports.definition = {
|
|||
properties: {
|
||||
value: {
|
||||
type: "string",
|
||||
description:
|
||||
"The result of the last statement of the executed script.",
|
||||
description: "The result of the return statement",
|
||||
},
|
||||
success: {
|
||||
type: "boolean",
|
||||
|
@ -36,7 +37,7 @@ module.exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
module.exports.run = async function ({ inputs, appId, context, emitter }) {
|
||||
exports.run = async function ({ inputs, appId, context, emitter }) {
|
||||
if (inputs.code == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
@ -46,25 +47,20 @@ module.exports.run = async function ({ inputs, appId, context, emitter }) {
|
|||
}
|
||||
}
|
||||
|
||||
const ctx = {
|
||||
request: {
|
||||
const ctx = buildCtx(appId, emitter, {
|
||||
body: {
|
||||
script: inputs.code,
|
||||
context,
|
||||
},
|
||||
},
|
||||
user: { appId },
|
||||
eventEmitter: emitter,
|
||||
}
|
||||
})
|
||||
|
||||
try {
|
||||
await scriptController.execute(ctx)
|
||||
return {
|
||||
success: ctx.status === 200,
|
||||
success: true,
|
||||
value: ctx.body,
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return {
|
||||
success: false,
|
||||
response: err,
|
||||
|
|
|
@ -1,29 +1,30 @@
|
|||
const LogicConditions = {
|
||||
const FilterConditions = {
|
||||
EQUAL: "EQUAL",
|
||||
NOT_EQUAL: "NOT_EQUAL",
|
||||
GREATER_THAN: "GREATER_THAN",
|
||||
LESS_THAN: "LESS_THAN",
|
||||
}
|
||||
|
||||
const PrettyLogicConditions = {
|
||||
[LogicConditions.EQUAL]: "Equals",
|
||||
[LogicConditions.NOT_EQUAL]: "Not equals",
|
||||
[LogicConditions.GREATER_THAN]: "Greater than",
|
||||
[LogicConditions.LESS_THAN]: "Less than",
|
||||
const PrettyFilterConditions = {
|
||||
[FilterConditions.EQUAL]: "Equals",
|
||||
[FilterConditions.NOT_EQUAL]: "Not equals",
|
||||
[FilterConditions.GREATER_THAN]: "Greater than",
|
||||
[FilterConditions.LESS_THAN]: "Less than",
|
||||
}
|
||||
|
||||
module.exports.LogicConditions = LogicConditions
|
||||
module.exports.PrettyLogicConditions = PrettyLogicConditions
|
||||
exports.FilterConditions = FilterConditions
|
||||
exports.PrettyFilterConditions = PrettyFilterConditions
|
||||
|
||||
module.exports.definition = {
|
||||
exports.definition = {
|
||||
name: "Filter",
|
||||
tagline: "{{inputs.field}} {{inputs.condition}} {{inputs.value}}",
|
||||
icon: "ri-git-branch-line",
|
||||
description: "Filter any automations which do not meet certain conditions",
|
||||
type: "LOGIC",
|
||||
internal: true,
|
||||
stepId: "FILTER",
|
||||
inputs: {
|
||||
condition: LogicConditions.EQUALS,
|
||||
condition: FilterConditions.EQUALS,
|
||||
},
|
||||
schema: {
|
||||
inputs: {
|
||||
|
@ -35,8 +36,8 @@ module.exports.definition = {
|
|||
condition: {
|
||||
type: "string",
|
||||
title: "Condition",
|
||||
enum: Object.values(LogicConditions),
|
||||
pretty: Object.values(PrettyLogicConditions),
|
||||
enum: Object.values(FilterConditions),
|
||||
pretty: Object.values(PrettyFilterConditions),
|
||||
},
|
||||
value: {
|
||||
type: "string",
|
||||
|
@ -57,7 +58,7 @@ module.exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
module.exports.run = async function filter({ inputs }) {
|
||||
exports.run = async function filter({ inputs }) {
|
||||
let { field, condition, value } = inputs
|
||||
// coerce types so that we can use them
|
||||
if (!isNaN(value) && !isNaN(field)) {
|
||||
|
@ -70,16 +71,16 @@ module.exports.run = async function filter({ inputs }) {
|
|||
let success = false
|
||||
if (typeof field !== "object" && typeof value !== "object") {
|
||||
switch (condition) {
|
||||
case LogicConditions.EQUAL:
|
||||
case FilterConditions.EQUAL:
|
||||
success = field === value
|
||||
break
|
||||
case LogicConditions.NOT_EQUAL:
|
||||
case FilterConditions.NOT_EQUAL:
|
||||
success = field !== value
|
||||
break
|
||||
case LogicConditions.GREATER_THAN:
|
||||
case FilterConditions.GREATER_THAN:
|
||||
success = field > value
|
||||
break
|
||||
case LogicConditions.LESS_THAN:
|
||||
case FilterConditions.LESS_THAN:
|
||||
success = field < value
|
||||
break
|
||||
}
|
||||
|
|
|
@ -0,0 +1,87 @@
|
|||
const fetch = require("node-fetch")
|
||||
const { getFetchResponse } = require("./utils")
|
||||
|
||||
exports.definition = {
|
||||
name: "Integromat Integration",
|
||||
tagline: "Trigger an Integromat scenario",
|
||||
description:
|
||||
"Performs a webhook call to Integromat and gets the response (if configured)",
|
||||
icon: "ri-shut-down-line",
|
||||
stepId: "integromat",
|
||||
type: "ACTION",
|
||||
internal: false,
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
url: {
|
||||
type: "string",
|
||||
title: "Webhook URL",
|
||||
},
|
||||
value1: {
|
||||
type: "string",
|
||||
title: "Input Value 1",
|
||||
},
|
||||
value2: {
|
||||
type: "string",
|
||||
title: "Input Value 2",
|
||||
},
|
||||
value3: {
|
||||
type: "string",
|
||||
title: "Input Value 3",
|
||||
},
|
||||
value4: {
|
||||
type: "string",
|
||||
title: "Input Value 4",
|
||||
},
|
||||
value5: {
|
||||
type: "string",
|
||||
title: "Input Value 5",
|
||||
},
|
||||
},
|
||||
required: ["url", "value1", "value2", "value3", "value4", "value5"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
success: {
|
||||
type: "boolean",
|
||||
description: "Whether call was successful",
|
||||
},
|
||||
httpStatus: {
|
||||
type: "number",
|
||||
description: "The HTTP status code returned",
|
||||
},
|
||||
response: {
|
||||
type: "object",
|
||||
description: "The webhook response - this can have properties",
|
||||
},
|
||||
},
|
||||
required: ["success", "response"],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
exports.run = async function ({ inputs }) {
|
||||
const { url, value1, value2, value3, value4, value5 } = inputs
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: "post",
|
||||
body: JSON.stringify({
|
||||
value1,
|
||||
value2,
|
||||
value3,
|
||||
value4,
|
||||
value5,
|
||||
}),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
})
|
||||
|
||||
const { status, message } = await getFetchResponse(response)
|
||||
return {
|
||||
httpStatus: status,
|
||||
success: status === 200,
|
||||
response: message,
|
||||
}
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
const fetch = require("node-fetch")
|
||||
const { getFetchResponse } = require("./utils")
|
||||
|
||||
const RequestType = {
|
||||
POST: "POST",
|
||||
|
@ -16,12 +17,13 @@ const BODY_REQUESTS = [RequestType.POST, RequestType.PUT, RequestType.PATCH]
|
|||
* GET/DELETE requests cannot handle body elements so they will not be sent if configured.
|
||||
*/
|
||||
|
||||
module.exports.definition = {
|
||||
exports.definition = {
|
||||
name: "Outgoing webhook",
|
||||
tagline: "Send a {{inputs.requestMethod}} request",
|
||||
icon: "ri-send-plane-line",
|
||||
description: "Send a request of specified method to a URL",
|
||||
type: "ACTION",
|
||||
internal: true,
|
||||
stepId: "OUTGOING_WEBHOOK",
|
||||
inputs: {
|
||||
requestMethod: "POST",
|
||||
|
@ -60,6 +62,10 @@ module.exports.definition = {
|
|||
type: "object",
|
||||
description: "The response from the webhook",
|
||||
},
|
||||
httpStatus: {
|
||||
type: "number",
|
||||
description: "The HTTP status code returned",
|
||||
},
|
||||
success: {
|
||||
type: "boolean",
|
||||
description: "Whether the action was successful",
|
||||
|
@ -70,7 +76,7 @@ module.exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
module.exports.run = async function ({ inputs }) {
|
||||
exports.run = async function ({ inputs }) {
|
||||
let { requestMethod, url, requestBody, headers } = inputs
|
||||
if (!url.startsWith("http")) {
|
||||
url = `http://${url}`
|
||||
|
@ -107,19 +113,11 @@ module.exports.run = async function ({ inputs }) {
|
|||
JSON.parse(request.body)
|
||||
}
|
||||
const response = await fetch(url, request)
|
||||
const contentType = response.headers.get("content-type")
|
||||
const success = response.status === 200
|
||||
let resp
|
||||
if (!success) {
|
||||
resp = response.statusText
|
||||
} else if (contentType && contentType.indexOf("application/json") !== -1) {
|
||||
resp = await response.json()
|
||||
} else {
|
||||
resp = await response.text()
|
||||
}
|
||||
const { status, message } = await getFetchResponse(response)
|
||||
return {
|
||||
response: resp,
|
||||
success: success,
|
||||
httpStatus: status,
|
||||
response: message,
|
||||
success: status === 200,
|
||||
}
|
||||
} catch (err) {
|
||||
/* istanbul ignore next */
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
const { sendSmtpEmail } = require("../../utilities/workerRequests")
|
||||
|
||||
module.exports.definition = {
|
||||
exports.definition = {
|
||||
description: "Send an email using SMTP",
|
||||
tagline: "Send SMTP email to {{inputs.to}}",
|
||||
icon: "ri-mail-open-line",
|
||||
name: "Send Email (SMTP)",
|
||||
type: "ACTION",
|
||||
internal: true,
|
||||
stepId: "SEND_EMAIL_SMTP",
|
||||
inputs: {},
|
||||
schema: {
|
||||
|
@ -46,7 +47,7 @@ module.exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
module.exports.run = async function ({ inputs }) {
|
||||
exports.run = async function ({ inputs }) {
|
||||
let { to, from, subject, contents } = inputs
|
||||
if (!contents) {
|
||||
contents = "<h1>No content</h1>"
|
||||
|
@ -58,7 +59,6 @@ module.exports.run = async function ({ inputs }) {
|
|||
response,
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return {
|
||||
success: false,
|
||||
response: err,
|
||||
|
|
|
@ -1,74 +0,0 @@
|
|||
module.exports.definition = {
|
||||
description: "Send an email using SendGrid",
|
||||
tagline: "Send email to {{inputs.to}}",
|
||||
icon: "ri-mail-open-line",
|
||||
name: "Send Email (SendGrid)",
|
||||
type: "ACTION",
|
||||
stepId: "SEND_EMAIL",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
apiKey: {
|
||||
type: "string",
|
||||
title: "SendGrid API key",
|
||||
},
|
||||
to: {
|
||||
type: "string",
|
||||
title: "Send To",
|
||||
},
|
||||
from: {
|
||||
type: "string",
|
||||
title: "Send From",
|
||||
},
|
||||
subject: {
|
||||
type: "string",
|
||||
title: "Email Subject",
|
||||
},
|
||||
contents: {
|
||||
type: "string",
|
||||
title: "Email Contents",
|
||||
},
|
||||
},
|
||||
required: ["to", "from", "subject", "contents"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
success: {
|
||||
type: "boolean",
|
||||
description: "Whether the email was sent",
|
||||
},
|
||||
response: {
|
||||
type: "object",
|
||||
description: "A response from the email client, this may be an error",
|
||||
},
|
||||
},
|
||||
required: ["success"],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
module.exports.run = async function ({ inputs }) {
|
||||
const sgMail = require("@sendgrid/mail")
|
||||
sgMail.setApiKey(inputs.apiKey)
|
||||
const msg = {
|
||||
to: inputs.to,
|
||||
from: inputs.from,
|
||||
subject: inputs.subject,
|
||||
text: inputs.contents ? inputs.contents : "Empty",
|
||||
}
|
||||
|
||||
try {
|
||||
let response = await sgMail.send(msg)
|
||||
return {
|
||||
success: true,
|
||||
response,
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return {
|
||||
success: false,
|
||||
response: err,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -4,12 +4,13 @@
|
|||
* GET/DELETE requests cannot handle body elements so they will not be sent if configured.
|
||||
*/
|
||||
|
||||
module.exports.definition = {
|
||||
exports.definition = {
|
||||
name: "Backend log",
|
||||
tagline: "Console log a value in the backend",
|
||||
icon: "ri-server-line",
|
||||
description: "Logs the given text to the server (using console.log)",
|
||||
type: "ACTION",
|
||||
internal: true,
|
||||
stepId: "SERVER_LOG",
|
||||
inputs: {
|
||||
text: "",
|
||||
|
@ -36,6 +37,9 @@ module.exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
module.exports.run = async function ({ inputs, appId }) {
|
||||
exports.run = async function ({ inputs, appId }) {
|
||||
console.log(`App ${appId} - ${inputs.text}`)
|
||||
return {
|
||||
success: true,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
const fetch = require("node-fetch")
|
||||
const { getFetchResponse } = require("./utils")
|
||||
|
||||
exports.definition = {
|
||||
name: "Slack Message",
|
||||
tagline: "Send a message to Slack",
|
||||
description: "Send a message to Slack",
|
||||
icon: "ri-slack-line",
|
||||
stepId: "slack",
|
||||
type: "ACTION",
|
||||
internal: false,
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
url: {
|
||||
type: "string",
|
||||
title: "Incoming Webhook URL",
|
||||
},
|
||||
text: {
|
||||
type: "string",
|
||||
title: "Message",
|
||||
},
|
||||
},
|
||||
required: ["url", "text"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
httpStatus: {
|
||||
type: "number",
|
||||
description: "The HTTP status code of the request",
|
||||
},
|
||||
success: {
|
||||
type: "boolean",
|
||||
description: "Whether the message sent successfully",
|
||||
},
|
||||
response: {
|
||||
type: "string",
|
||||
description: "The response from the Slack Webhook",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
exports.run = async function ({ inputs }) {
|
||||
let { url, text } = inputs
|
||||
const response = await fetch(url, {
|
||||
method: "post",
|
||||
body: JSON.stringify({
|
||||
text,
|
||||
}),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
})
|
||||
|
||||
const { status, message } = await getFetchResponse(response)
|
||||
return {
|
||||
httpStatus: status,
|
||||
response: message,
|
||||
success: status === 200,
|
||||
}
|
||||
}
|
|
@ -1,12 +1,13 @@
|
|||
const rowController = require("../../api/controllers/row")
|
||||
const automationUtils = require("../automationUtils")
|
||||
|
||||
module.exports.definition = {
|
||||
exports.definition = {
|
||||
name: "Update Row",
|
||||
tagline: "Update a {{inputs.enriched.table.name}} row",
|
||||
icon: "ri-refresh-line",
|
||||
description: "Update a row in your database",
|
||||
type: "ACTION",
|
||||
internal: true,
|
||||
stepId: "UPDATE_ROW",
|
||||
inputs: {},
|
||||
schema: {
|
||||
|
@ -53,7 +54,7 @@ module.exports.definition = {
|
|||
},
|
||||
}
|
||||
|
||||
module.exports.run = async function ({ inputs, appId, emitter }) {
|
||||
exports.run = async function ({ inputs, appId, emitter }) {
|
||||
if (inputs.rowId == null || inputs.row == null) {
|
||||
return {
|
||||
success: false,
|
||||
|
@ -100,7 +101,6 @@ module.exports.run = async function ({ inputs, appId, emitter }) {
|
|||
success: ctx.status === 200,
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
return {
|
||||
success: false,
|
||||
response: err,
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
exports.getFetchResponse = async fetched => {
|
||||
let status = fetched.status,
|
||||
message
|
||||
const contentType = fetched.headers.get("content-type")
|
||||
try {
|
||||
if (contentType && contentType.indexOf("application/json") !== -1) {
|
||||
message = await fetched.json()
|
||||
} else {
|
||||
message = await fetched.text()
|
||||
}
|
||||
} catch (err) {
|
||||
message = "Failed to retrieve response"
|
||||
}
|
||||
return { status, message }
|
||||
}
|
||||
|
||||
// need to make sure all ctx structures have the
|
||||
// throw added to them, so that controllers don't
|
||||
// throw a ctx.throw undefined when error occurs
|
||||
exports.buildCtx = (appId, emitter, { body, params } = {}) => {
|
||||
const ctx = {
|
||||
appId,
|
||||
user: { appId },
|
||||
eventEmitter: emitter,
|
||||
throw: (code, error) => {
|
||||
throw error
|
||||
},
|
||||
}
|
||||
if (body) {
|
||||
ctx.request = { body }
|
||||
}
|
||||
if (params) {
|
||||
ctx.params = params
|
||||
}
|
||||
return ctx
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
const fetch = require("node-fetch")
|
||||
const { getFetchResponse } = require("./utils")
|
||||
|
||||
exports.definition = {
|
||||
name: "Zapier Webhook",
|
||||
stepId: "zapier",
|
||||
type: "ACTION",
|
||||
internal: false,
|
||||
description: "Trigger a Zapier Zap via webhooks",
|
||||
tagline: "Trigger a Zapier Zap",
|
||||
icon: "ri-flashlight-line",
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
url: {
|
||||
type: "string",
|
||||
title: "Webhook URL",
|
||||
},
|
||||
value1: {
|
||||
type: "string",
|
||||
title: "Payload Value 1",
|
||||
},
|
||||
value2: {
|
||||
type: "string",
|
||||
title: "Payload Value 2",
|
||||
},
|
||||
value3: {
|
||||
type: "string",
|
||||
title: "Payload Value 3",
|
||||
},
|
||||
value4: {
|
||||
type: "string",
|
||||
title: "Payload Value 4",
|
||||
},
|
||||
value5: {
|
||||
type: "string",
|
||||
title: "Payload Value 5",
|
||||
},
|
||||
},
|
||||
required: ["url"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
httpStatus: {
|
||||
type: "number",
|
||||
description: "The HTTP status code of the request",
|
||||
},
|
||||
response: {
|
||||
type: "string",
|
||||
description: "The response from Zapier",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
exports.run = async function ({ inputs }) {
|
||||
const { url, value1, value2, value3, value4, value5 } = inputs
|
||||
|
||||
// send the platform to make sure zaps always work, even
|
||||
// if no values supplied
|
||||
const response = await fetch(url, {
|
||||
method: "post",
|
||||
body: JSON.stringify({
|
||||
platform: "budibase",
|
||||
value1,
|
||||
value2,
|
||||
value3,
|
||||
value4,
|
||||
value5,
|
||||
}),
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
})
|
||||
|
||||
const { status, message } = await getFetchResponse(response)
|
||||
|
||||
return {
|
||||
success: status === 200,
|
||||
httpStatus: status,
|
||||
response: message,
|
||||
}
|
||||
}
|
|
@ -1,34 +1,18 @@
|
|||
jest.mock("../../utilities/usageQuota")
|
||||
jest.mock("../thread")
|
||||
jest.spyOn(global.console, "error")
|
||||
jest.mock("worker-farm", () => {
|
||||
return () => {
|
||||
const value = jest
|
||||
.fn()
|
||||
.mockReturnValueOnce(undefined)
|
||||
.mockReturnValueOnce("Error")
|
||||
return (input, callback) => {
|
||||
workerJob = input
|
||||
if (callback) {
|
||||
callback(value())
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
require("../../environment")
|
||||
const automation = require("../index")
|
||||
const usageQuota = require("../../utilities/usageQuota")
|
||||
const thread = require("../thread")
|
||||
const triggers = require("../triggers")
|
||||
const { basicAutomation, basicTable } = require("../../tests/utilities/structures")
|
||||
const { basicAutomation } = require("../../tests/utilities/structures")
|
||||
const { wait } = require("../../utilities")
|
||||
const { makePartial } = require("../../tests/utilities")
|
||||
const { cleanInputValues } = require("../automationUtils")
|
||||
const setup = require("./utilities")
|
||||
|
||||
let workerJob
|
||||
|
||||
usageQuota.getAPIKey.mockReturnValue({ apiKey: "test" })
|
||||
|
||||
describe("Run through some parts of the automations system", () => {
|
||||
|
@ -44,59 +28,12 @@ describe("Run through some parts of the automations system", () => {
|
|||
it("should be able to init in builder", async () => {
|
||||
await triggers.externalTrigger(basicAutomation(), { a: 1 })
|
||||
await wait(100)
|
||||
expect(workerJob).toBeUndefined()
|
||||
expect(thread).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it("should be able to init in prod", async () => {
|
||||
await setup.runInProd(async () => {
|
||||
await triggers.externalTrigger(basicAutomation(), { a: 1 })
|
||||
await wait(100)
|
||||
// haven't added a mock implementation so getAPIKey of usageQuota just returns undefined
|
||||
expect(usageQuota.update).toHaveBeenCalledWith("test", "automationRuns", 1)
|
||||
expect(workerJob).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
it("try error scenario", async () => {
|
||||
await setup.runInProd(async () => {
|
||||
// the second call will throw an error
|
||||
await triggers.externalTrigger(basicAutomation(), { a: 1 })
|
||||
await wait(100)
|
||||
expect(console.error).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it("should be able to check triggering row filling", async () => {
|
||||
const automation = basicAutomation()
|
||||
let table = basicTable()
|
||||
table.schema.boolean = {
|
||||
type: "boolean",
|
||||
constraints: {
|
||||
type: "boolean",
|
||||
},
|
||||
}
|
||||
table.schema.number = {
|
||||
type: "number",
|
||||
constraints: {
|
||||
type: "number",
|
||||
},
|
||||
}
|
||||
table.schema.datetime = {
|
||||
type: "datetime",
|
||||
constraints: {
|
||||
type: "datetime",
|
||||
},
|
||||
}
|
||||
table = await config.createTable(table)
|
||||
automation.definition.trigger.inputs.tableId = table._id
|
||||
const params = await triggers.fillRowOutput(automation, { appId: config.getAppId() })
|
||||
expect(params.row).toBeDefined()
|
||||
const date = new Date(params.row.datetime)
|
||||
expect(typeof params.row.name).toBe("string")
|
||||
expect(typeof params.row.boolean).toBe("boolean")
|
||||
expect(typeof params.row.number).toBe("number")
|
||||
expect(date.getFullYear()).toBe(1970)
|
||||
})
|
||||
|
||||
it("should check coercion", async () => {
|
||||
|
|
|
@ -4,7 +4,7 @@ describe("test the delay logic", () => {
|
|||
it("should be able to run the delay", async () => {
|
||||
const time = 100
|
||||
const before = Date.now()
|
||||
await setup.runStep(setup.logic.DELAY.stepId, { time: time })
|
||||
await setup.runStep(setup.actions.DELAY.stepId, { time: time })
|
||||
const now = Date.now()
|
||||
// divide by two just so that test will always pass as long as there was some sort of delay
|
||||
expect(now - before).toBeGreaterThanOrEqual(time / 2)
|
||||
|
|
|
@ -1,48 +1,48 @@
|
|||
const setup = require("./utilities")
|
||||
const { LogicConditions } = require("../steps/filter")
|
||||
const { FilterConditions } = require("../steps/filter")
|
||||
|
||||
describe("test the filter logic", () => {
|
||||
async function checkFilter(field, condition, value, pass = true) {
|
||||
let res = await setup.runStep(setup.logic.FILTER.stepId,
|
||||
let res = await setup.runStep(setup.actions.FILTER.stepId,
|
||||
{ field, condition, value }
|
||||
)
|
||||
expect(res.success).toEqual(pass)
|
||||
}
|
||||
|
||||
it("should be able test equality", async () => {
|
||||
await checkFilter("hello", LogicConditions.EQUAL, "hello", true)
|
||||
await checkFilter("hello", LogicConditions.EQUAL, "no", false)
|
||||
await checkFilter("hello", FilterConditions.EQUAL, "hello", true)
|
||||
await checkFilter("hello", FilterConditions.EQUAL, "no", false)
|
||||
})
|
||||
|
||||
it("should be able to test greater than", async () => {
|
||||
await checkFilter(10, LogicConditions.GREATER_THAN, 5, true)
|
||||
await checkFilter(10, LogicConditions.GREATER_THAN, 15, false)
|
||||
await checkFilter(10, FilterConditions.GREATER_THAN, 5, true)
|
||||
await checkFilter(10, FilterConditions.GREATER_THAN, 15, false)
|
||||
})
|
||||
|
||||
it("should be able to test less than", async () => {
|
||||
await checkFilter(5, LogicConditions.LESS_THAN, 10, true)
|
||||
await checkFilter(15, LogicConditions.LESS_THAN, 10, false)
|
||||
await checkFilter(5, FilterConditions.LESS_THAN, 10, true)
|
||||
await checkFilter(15, FilterConditions.LESS_THAN, 10, false)
|
||||
})
|
||||
|
||||
it("should be able to in-equality", async () => {
|
||||
await checkFilter("hello", LogicConditions.NOT_EQUAL, "no", true)
|
||||
await checkFilter(10, LogicConditions.NOT_EQUAL, 10, false)
|
||||
await checkFilter("hello", FilterConditions.NOT_EQUAL, "no", true)
|
||||
await checkFilter(10, FilterConditions.NOT_EQUAL, 10, false)
|
||||
})
|
||||
|
||||
it("check number coercion", async () => {
|
||||
await checkFilter("10", LogicConditions.GREATER_THAN, "5", true)
|
||||
await checkFilter("10", FilterConditions.GREATER_THAN, "5", true)
|
||||
})
|
||||
|
||||
it("check date coercion", async () => {
|
||||
await checkFilter(
|
||||
(new Date()).toISOString(),
|
||||
LogicConditions.GREATER_THAN,
|
||||
FilterConditions.GREATER_THAN,
|
||||
(new Date(-10000)).toISOString(),
|
||||
true
|
||||
)
|
||||
})
|
||||
|
||||
it("check objects always false", async () => {
|
||||
await checkFilter({}, LogicConditions.EQUAL, {}, false)
|
||||
await checkFilter({}, FilterConditions.EQUAL, {}, false)
|
||||
})
|
||||
})
|
|
@ -1,36 +0,0 @@
|
|||
const setup = require("./utilities")
|
||||
|
||||
jest.mock("@sendgrid/mail")
|
||||
|
||||
describe("test the send email action", () => {
|
||||
let inputs
|
||||
let config = setup.getConfig()
|
||||
|
||||
beforeEach(async () => {
|
||||
await config.init()
|
||||
inputs = {
|
||||
to: "me@test.com",
|
||||
from: "budibase@test.com",
|
||||
subject: "Testing",
|
||||
text: "Email contents",
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(setup.afterAll)
|
||||
|
||||
it("should be able to run the action", async () => {
|
||||
const res = await setup.runStep(setup.actions.SEND_EMAIL.stepId, inputs)
|
||||
expect(res.success).toEqual(true)
|
||||
// the mocked module throws back the input
|
||||
expect(res.response.to).toEqual("me@test.com")
|
||||
})
|
||||
|
||||
it("should return an error if input an invalid email address", async () => {
|
||||
const res = await setup.runStep(setup.actions.SEND_EMAIL.stepId, {
|
||||
...inputs,
|
||||
to: "invalid@test.com",
|
||||
})
|
||||
expect(res.success).toEqual(false)
|
||||
})
|
||||
|
||||
})
|
|
@ -1,6 +1,5 @@
|
|||
const TestConfig = require("../../../tests/utilities/TestConfiguration")
|
||||
const actions = require("../../actions")
|
||||
const logic = require("../../logic")
|
||||
const emitter = require("../../../events/index")
|
||||
const env = require("../../../environment")
|
||||
|
||||
|
@ -34,16 +33,7 @@ exports.runInProd = async fn => {
|
|||
}
|
||||
|
||||
exports.runStep = async function runStep(stepId, inputs) {
|
||||
let step
|
||||
if (
|
||||
Object.values(exports.actions)
|
||||
.map(action => action.stepId)
|
||||
.includes(stepId)
|
||||
) {
|
||||
step = await actions.getAction(stepId)
|
||||
} else {
|
||||
step = logic.getLogic(stepId)
|
||||
}
|
||||
let step = await actions.getAction(stepId)
|
||||
expect(step).toBeDefined()
|
||||
return step({
|
||||
inputs,
|
||||
|
@ -56,5 +46,4 @@ exports.runStep = async function runStep(stepId, inputs) {
|
|||
|
||||
exports.apiKey = "test"
|
||||
|
||||
exports.actions = actions.BUILTIN_DEFINITIONS
|
||||
exports.logic = logic.BUILTIN_DEFINITIONS
|
||||
exports.actions = actions.ACTION_DEFINITIONS
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const actions = require("./actions")
|
||||
const logic = require("./logic")
|
||||
const automationUtils = require("./automationUtils")
|
||||
const AutomationEmitter = require("../events/AutomationEmitter")
|
||||
const { processObject } = require("@budibase/string-templates")
|
||||
|
@ -8,7 +7,7 @@ const CouchDB = require("../db")
|
|||
const { DocumentTypes } = require("../db/utils")
|
||||
const { doInTenant } = require("@budibase/auth/tenancy")
|
||||
|
||||
const FILTER_STEP_ID = logic.BUILTIN_DEFINITIONS.FILTER.stepId
|
||||
const FILTER_STEP_ID = actions.ACTION_DEFINITIONS.FILTER.stepId
|
||||
|
||||
/**
|
||||
* The automation orchestrator is a class responsible for executing automations.
|
||||
|
@ -30,15 +29,15 @@ class Orchestrator {
|
|||
// create an emitter which has the chain count for this automation run in it, so it can block
|
||||
// excessive chaining if required
|
||||
this._emitter = new AutomationEmitter(this._chainCount + 1)
|
||||
this.executionOutput = { trigger: {}, steps: [] }
|
||||
// setup the execution output
|
||||
const triggerStepId = automation.definition.trigger.stepId
|
||||
const triggerId = automation.definition.trigger.id
|
||||
this.updateExecutionOutput(triggerId, triggerStepId, null, triggerOutput)
|
||||
}
|
||||
|
||||
async getStepFunctionality(type, stepId) {
|
||||
let step = null
|
||||
if (type === "ACTION") {
|
||||
step = await actions.getAction(stepId)
|
||||
} else if (type === "LOGIC") {
|
||||
step = logic.getLogic(stepId)
|
||||
}
|
||||
async getStepFunctionality(stepId) {
|
||||
let step = await actions.getAction(stepId)
|
||||
if (step == null) {
|
||||
throw `Cannot find automation step by name ${stepId}`
|
||||
}
|
||||
|
@ -55,11 +54,20 @@ class Orchestrator {
|
|||
return this._app
|
||||
}
|
||||
|
||||
updateExecutionOutput(id, stepId, inputs, outputs) {
|
||||
const stepObj = { id, stepId, inputs, outputs }
|
||||
// first entry is always the trigger (constructor)
|
||||
if (this.executionOutput.steps.length === 0) {
|
||||
this.executionOutput.trigger = stepObj
|
||||
}
|
||||
this.executionOutput.steps.push(stepObj)
|
||||
}
|
||||
|
||||
async execute() {
|
||||
let automation = this._automation
|
||||
const app = await this.getApp()
|
||||
for (let step of automation.definition.steps) {
|
||||
let stepFn = await this.getStepFunctionality(step.type, step.stepId)
|
||||
let stepFn = await this.getStepFunctionality(step.stepId)
|
||||
step.inputs = await processObject(step.inputs, this._context)
|
||||
step.inputs = automationUtils.cleanInputValues(
|
||||
step.inputs,
|
||||
|
@ -81,27 +89,20 @@ class Orchestrator {
|
|||
break
|
||||
}
|
||||
this._context.steps.push(outputs)
|
||||
this.updateExecutionOutput(step.id, step.stepId, step.inputs, outputs)
|
||||
} catch (err) {
|
||||
console.error(`Automation error - ${step.stepId} - ${err}`)
|
||||
return err
|
||||
}
|
||||
}
|
||||
return this.executionOutput
|
||||
}
|
||||
}
|
||||
|
||||
// callback is required for worker-farm to state that the worker thread has completed
|
||||
module.exports = async (job, cb = null) => {
|
||||
try {
|
||||
module.exports = async job => {
|
||||
const automationOrchestrator = new Orchestrator(
|
||||
job.data.automation,
|
||||
job.data.event
|
||||
)
|
||||
await automationOrchestrator.execute()
|
||||
if (cb) {
|
||||
cb()
|
||||
}
|
||||
} catch (err) {
|
||||
if (cb) {
|
||||
cb(err)
|
||||
}
|
||||
}
|
||||
return automationOrchestrator.execute()
|
||||
}
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
exports.definition = {
|
||||
name: "App Action",
|
||||
event: "app:trigger",
|
||||
icon: "ri-window-fill",
|
||||
tagline: "Automation fired from the frontend",
|
||||
description: "Trigger an automation from an action inside your app",
|
||||
stepId: "APP",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
fields: {
|
||||
type: "object",
|
||||
customType: "triggerSchema",
|
||||
title: "Fields",
|
||||
},
|
||||
},
|
||||
required: [],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
fields: {
|
||||
type: "object",
|
||||
description: "Fields submitted from the app frontend",
|
||||
},
|
||||
},
|
||||
required: ["fields"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
exports.definition = {
|
||||
name: "Cron Trigger",
|
||||
event: "cron:trigger",
|
||||
icon: "ri-timer-line",
|
||||
tagline: "Cron Trigger (<b>{{inputs.cron}}</b>)",
|
||||
description: "Triggers automation on a cron schedule.",
|
||||
stepId: "CRON",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
cron: {
|
||||
type: "string",
|
||||
customType: "cron",
|
||||
title: "Expression",
|
||||
},
|
||||
},
|
||||
required: ["cron"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
timestamp: {
|
||||
type: "number",
|
||||
description: "Timestamp the cron was executed",
|
||||
},
|
||||
},
|
||||
required: ["timestamp"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
const app = require("./app")
|
||||
const cron = require("./cron")
|
||||
const rowDeleted = require("./rowDeleted")
|
||||
const rowSaved = require("./rowSaved")
|
||||
const rowUpdated = require("./rowUpdated")
|
||||
const webhook = require("./webhook")
|
||||
|
||||
exports.definitions = {
|
||||
ROW_SAVED: rowSaved.definition,
|
||||
ROW_UPDATED: rowUpdated.definition,
|
||||
ROW_DELETED: rowDeleted.definition,
|
||||
WEBHOOK: webhook.definition,
|
||||
APP: app.definition,
|
||||
CRON: cron.definition,
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
exports.definition = {
|
||||
name: "Row Deleted",
|
||||
event: "row:delete",
|
||||
icon: "ri-delete-bin-line",
|
||||
tagline: "Row is deleted from {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is deleted from your database",
|
||||
stepId: "ROW_DELETED",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
tableId: {
|
||||
type: "string",
|
||||
customType: "table",
|
||||
title: "Table",
|
||||
},
|
||||
},
|
||||
required: ["tableId"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
row: {
|
||||
type: "object",
|
||||
customType: "row",
|
||||
description: "The row that was deleted",
|
||||
},
|
||||
},
|
||||
required: ["row"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
exports.definition = {
|
||||
name: "Row Created",
|
||||
event: "row:save",
|
||||
icon: "ri-save-line",
|
||||
tagline: "Row is added to {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is added to your database",
|
||||
stepId: "ROW_SAVED",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
tableId: {
|
||||
type: "string",
|
||||
customType: "table",
|
||||
title: "Table",
|
||||
},
|
||||
},
|
||||
required: ["tableId"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
row: {
|
||||
type: "object",
|
||||
customType: "row",
|
||||
description: "The new row that was created",
|
||||
},
|
||||
id: {
|
||||
type: "string",
|
||||
description: "Row ID - can be used for updating",
|
||||
},
|
||||
revision: {
|
||||
type: "string",
|
||||
description: "Revision of row",
|
||||
},
|
||||
},
|
||||
required: ["row", "id"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
exports.definition = {
|
||||
name: "Row Updated",
|
||||
event: "row:update",
|
||||
icon: "ri-refresh-line",
|
||||
tagline: "Row is updated in {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is updated in your database",
|
||||
stepId: "ROW_UPDATED",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
tableId: {
|
||||
type: "string",
|
||||
customType: "table",
|
||||
title: "Table",
|
||||
},
|
||||
},
|
||||
required: ["tableId"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
row: {
|
||||
type: "object",
|
||||
customType: "row",
|
||||
description: "The row that was updated",
|
||||
},
|
||||
id: {
|
||||
type: "string",
|
||||
description: "Row ID - can be used for updating",
|
||||
},
|
||||
revision: {
|
||||
type: "string",
|
||||
description: "Revision of row",
|
||||
},
|
||||
},
|
||||
required: ["row", "id"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
exports.definition = {
|
||||
name: "Webhook",
|
||||
event: "web:trigger",
|
||||
icon: "ri-global-line",
|
||||
tagline: "Webhook endpoint is hit",
|
||||
description: "Trigger an automation when a HTTP POST webhook is hit",
|
||||
stepId: "WEBHOOK",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
schemaUrl: {
|
||||
type: "string",
|
||||
customType: "webhookUrl",
|
||||
title: "Schema URL",
|
||||
},
|
||||
triggerUrl: {
|
||||
type: "string",
|
||||
customType: "webhookUrl",
|
||||
title: "Trigger URL",
|
||||
},
|
||||
},
|
||||
required: ["schemaUrl", "triggerUrl"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
body: {
|
||||
type: "object",
|
||||
description: "Body of the request which hit the webhook",
|
||||
},
|
||||
},
|
||||
required: ["body"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
}
|
|
@ -1,244 +1,22 @@
|
|||
const CouchDB = require("../db")
|
||||
const emitter = require("../events/index")
|
||||
const env = require("../environment")
|
||||
const Queue = env.isTest()
|
||||
? require("../utilities/queue/inMemoryQueue")
|
||||
: require("bull")
|
||||
const { getAutomationParams } = require("../db/utils")
|
||||
const { coerce } = require("../utilities/rowProcessor")
|
||||
const { utils } = require("@budibase/auth/redis")
|
||||
const { JobQueues } = require("../constants")
|
||||
const {
|
||||
isExternalTable,
|
||||
breakExternalTableId,
|
||||
} = require("../integrations/utils")
|
||||
const { getExternalTable } = require("../api/controllers/table/utils")
|
||||
const { definitions } = require("./triggerInfo")
|
||||
const { isDevAppID } = require("../db/utils")
|
||||
// need this to call directly, so we can get a response
|
||||
const { queue } = require("./bullboard")
|
||||
const { checkTestFlag } = require("../utilities/redis")
|
||||
const utils = require("./utils")
|
||||
const env = require("../environment")
|
||||
|
||||
const { opts } = utils.getRedisOptions()
|
||||
let automationQueue = new Queue(JobQueues.AUTOMATIONS, { redis: opts })
|
||||
|
||||
const FAKE_STRING = "TEST"
|
||||
const FAKE_BOOL = false
|
||||
const FAKE_NUMBER = 1
|
||||
const FAKE_DATETIME = "1970-01-01T00:00:00.000Z"
|
||||
|
||||
const BUILTIN_DEFINITIONS = {
|
||||
ROW_SAVED: {
|
||||
name: "Row Created",
|
||||
event: "row:save",
|
||||
icon: "ri-save-line",
|
||||
tagline: "Row is added to {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is added to your database",
|
||||
stepId: "ROW_SAVED",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
tableId: {
|
||||
type: "string",
|
||||
customType: "table",
|
||||
title: "Table",
|
||||
},
|
||||
},
|
||||
required: ["tableId"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
row: {
|
||||
type: "object",
|
||||
customType: "row",
|
||||
description: "The new row that was created",
|
||||
},
|
||||
id: {
|
||||
type: "string",
|
||||
description: "Row ID - can be used for updating",
|
||||
},
|
||||
revision: {
|
||||
type: "string",
|
||||
description: "Revision of row",
|
||||
},
|
||||
},
|
||||
required: ["row", "id"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
},
|
||||
ROW_UPDATED: {
|
||||
name: "Row Updated",
|
||||
event: "row:update",
|
||||
icon: "ri-refresh-line",
|
||||
tagline: "Row is updated in {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is updated in your database",
|
||||
stepId: "ROW_UPDATED",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
tableId: {
|
||||
type: "string",
|
||||
customType: "table",
|
||||
title: "Table",
|
||||
},
|
||||
},
|
||||
required: ["tableId"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
row: {
|
||||
type: "object",
|
||||
customType: "row",
|
||||
description: "The row that was updated",
|
||||
},
|
||||
id: {
|
||||
type: "string",
|
||||
description: "Row ID - can be used for updating",
|
||||
},
|
||||
revision: {
|
||||
type: "string",
|
||||
description: "Revision of row",
|
||||
},
|
||||
},
|
||||
required: ["row", "id"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
},
|
||||
ROW_DELETED: {
|
||||
name: "Row Deleted",
|
||||
event: "row:delete",
|
||||
icon: "ri-delete-bin-line",
|
||||
tagline: "Row is deleted from {{inputs.enriched.table.name}}",
|
||||
description: "Fired when a row is deleted from your database",
|
||||
stepId: "ROW_DELETED",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
tableId: {
|
||||
type: "string",
|
||||
customType: "table",
|
||||
title: "Table",
|
||||
},
|
||||
},
|
||||
required: ["tableId"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
row: {
|
||||
type: "object",
|
||||
customType: "row",
|
||||
description: "The row that was deleted",
|
||||
},
|
||||
},
|
||||
required: ["row"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
},
|
||||
WEBHOOK: {
|
||||
name: "Webhook",
|
||||
event: "web:trigger",
|
||||
icon: "ri-global-line",
|
||||
tagline: "Webhook endpoint is hit",
|
||||
description: "Trigger an automation when a HTTP POST webhook is hit",
|
||||
stepId: "WEBHOOK",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
schemaUrl: {
|
||||
type: "string",
|
||||
customType: "webhookUrl",
|
||||
title: "Schema URL",
|
||||
},
|
||||
triggerUrl: {
|
||||
type: "string",
|
||||
customType: "webhookUrl",
|
||||
title: "Trigger URL",
|
||||
},
|
||||
},
|
||||
required: ["schemaUrl", "triggerUrl"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
body: {
|
||||
type: "object",
|
||||
description: "Body of the request which hit the webhook",
|
||||
},
|
||||
},
|
||||
required: ["body"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
},
|
||||
APP: {
|
||||
name: "App Action",
|
||||
event: "app:trigger",
|
||||
icon: "ri-window-fill",
|
||||
tagline: "Automation fired from the frontend",
|
||||
description: "Trigger an automation from an action inside your app",
|
||||
stepId: "APP",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
fields: {
|
||||
type: "object",
|
||||
customType: "triggerSchema",
|
||||
title: "Fields",
|
||||
},
|
||||
},
|
||||
required: [],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
fields: {
|
||||
type: "object",
|
||||
description: "Fields submitted from the app frontend",
|
||||
},
|
||||
},
|
||||
required: ["fields"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
},
|
||||
CRON: {
|
||||
name: "Cron Trigger",
|
||||
event: "cron:trigger",
|
||||
icon: "ri-timer-line",
|
||||
tagline: "Cron Trigger (<b>{{inputs.cron}}</b>)",
|
||||
description: "Triggers automation on a cron schedule.",
|
||||
stepId: "CRON",
|
||||
inputs: {},
|
||||
schema: {
|
||||
inputs: {
|
||||
properties: {
|
||||
cron: {
|
||||
type: "string",
|
||||
customType: "cron",
|
||||
title: "Expression",
|
||||
},
|
||||
},
|
||||
required: ["cron"],
|
||||
},
|
||||
outputs: {
|
||||
properties: {
|
||||
timestamp: {
|
||||
type: "number",
|
||||
description: "Timestamp the cron was executed",
|
||||
},
|
||||
},
|
||||
required: ["timestamp"],
|
||||
},
|
||||
},
|
||||
type: "TRIGGER",
|
||||
},
|
||||
}
|
||||
const TRIGGER_DEFINITIONS = definitions
|
||||
|
||||
async function queueRelevantRowAutomations(event, eventType) {
|
||||
if (event.appId == null) {
|
||||
throw `No appId specified for ${eventType} - check event emitters.`
|
||||
}
|
||||
|
||||
const db = new CouchDB(event.appId)
|
||||
let automations = await db.allDocs(
|
||||
getAutomationParams(null, { include_docs: true })
|
||||
|
@ -255,14 +33,22 @@ async function queueRelevantRowAutomations(event, eventType) {
|
|||
for (let automation of automations) {
|
||||
let automationDef = automation.definition
|
||||
let automationTrigger = automationDef ? automationDef.trigger : {}
|
||||
// don't queue events which are for dev apps, only way to test automations is
|
||||
// running tests on them, in production the test flag will never
|
||||
// be checked due to lazy evaluation (first always false)
|
||||
if (
|
||||
!automation.live ||
|
||||
!automationTrigger.inputs ||
|
||||
automationTrigger.inputs.tableId !== event.row.tableId
|
||||
!env.ALLOW_DEV_AUTOMATIONS &&
|
||||
isDevAppID(event.appId) &&
|
||||
!(await checkTestFlag(automation._id))
|
||||
) {
|
||||
continue
|
||||
}
|
||||
automationQueue.add({ automation, event })
|
||||
if (
|
||||
automationTrigger.inputs &&
|
||||
automationTrigger.inputs.tableId === event.row.tableId
|
||||
) {
|
||||
await queue.add({ automation, event })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -290,51 +76,12 @@ emitter.on("row:delete", async function (event) {
|
|||
await queueRelevantRowAutomations(event, "row:delete")
|
||||
})
|
||||
|
||||
async function fillRowOutput(automation, params) {
|
||||
let triggerSchema = automation.definition.trigger
|
||||
let tableId = triggerSchema.inputs.tableId
|
||||
try {
|
||||
let table
|
||||
if (!isExternalTable(tableId)) {
|
||||
const db = new CouchDB(params.appId)
|
||||
table = await db.get(tableId)
|
||||
} else {
|
||||
const { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
table = await getExternalTable(params.appId, datasourceId, tableName)
|
||||
}
|
||||
let row = {}
|
||||
for (let schemaKey of Object.keys(table.schema)) {
|
||||
const paramValue = params[schemaKey]
|
||||
let propSchema = table.schema[schemaKey]
|
||||
switch (propSchema.constraints.type) {
|
||||
case "string":
|
||||
row[schemaKey] = paramValue || FAKE_STRING
|
||||
break
|
||||
case "boolean":
|
||||
row[schemaKey] = paramValue || FAKE_BOOL
|
||||
break
|
||||
case "number":
|
||||
row[schemaKey] = paramValue || FAKE_NUMBER
|
||||
break
|
||||
case "datetime":
|
||||
row[schemaKey] = paramValue || FAKE_DATETIME
|
||||
break
|
||||
}
|
||||
}
|
||||
params.row = row
|
||||
} catch (err) {
|
||||
/* istanbul ignore next */
|
||||
throw "Failed to find table for trigger"
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
module.exports.externalTrigger = async function (automation, params) {
|
||||
// TODO: replace this with allowing user in builder to input values in future
|
||||
exports.externalTrigger = async function (
|
||||
automation,
|
||||
params,
|
||||
{ getResponses } = {}
|
||||
) {
|
||||
if (automation.definition != null && automation.definition.trigger != null) {
|
||||
if (automation.definition.trigger.inputs.tableId != null) {
|
||||
params = await fillRowOutput(automation, params)
|
||||
}
|
||||
if (automation.definition.trigger.stepId === "APP") {
|
||||
// values are likely to be submitted as strings, so we shall convert to correct type
|
||||
const coercedFields = {}
|
||||
|
@ -345,14 +92,12 @@ module.exports.externalTrigger = async function (automation, params) {
|
|||
params.fields = coercedFields
|
||||
}
|
||||
}
|
||||
|
||||
automationQueue.add({ automation, event: params })
|
||||
const data = { automation, event: params }
|
||||
if (getResponses) {
|
||||
return utils.processEvent({ data })
|
||||
} else {
|
||||
return queue.add(data)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.getQueues = () => {
|
||||
return [automationQueue]
|
||||
}
|
||||
module.exports.fillRowOutput = fillRowOutput
|
||||
module.exports.automationQueue = automationQueue
|
||||
|
||||
module.exports.BUILTIN_DEFINITIONS = BUILTIN_DEFINITIONS
|
||||
exports.TRIGGER_DEFINITIONS = TRIGGER_DEFINITIONS
|
||||
|
|
|
@ -0,0 +1,159 @@
|
|||
const runner = require("./thread")
|
||||
const { definitions } = require("./triggerInfo")
|
||||
const webhooks = require("../api/controllers/webhook")
|
||||
const CouchDB = require("../db")
|
||||
const { queue } = require("./bullboard")
|
||||
const newid = require("../db/newid")
|
||||
const { updateEntityMetadata } = require("../utilities")
|
||||
const { MetadataTypes } = require("../constants")
|
||||
|
||||
const WH_STEP_ID = definitions.WEBHOOK.stepId
|
||||
const CRON_STEP_ID = definitions.CRON.stepId
|
||||
|
||||
exports.processEvent = async job => {
|
||||
try {
|
||||
// need to actually await these so that an error can be captured properly
|
||||
return await runner(job)
|
||||
} catch (err) {
|
||||
console.error(
|
||||
`${job.data.automation.appId} automation ${job.data.automation._id} was unable to run - ${err}`
|
||||
)
|
||||
return { err }
|
||||
}
|
||||
}
|
||||
|
||||
exports.updateTestHistory = async (appId, automation, history) => {
|
||||
return updateEntityMetadata(
|
||||
appId,
|
||||
MetadataTypes.AUTOMATION_TEST_HISTORY,
|
||||
automation._id,
|
||||
metadata => {
|
||||
if (metadata && Array.isArray(metadata.history)) {
|
||||
metadata.history.push(history)
|
||||
} else {
|
||||
metadata = {
|
||||
history: [history],
|
||||
}
|
||||
}
|
||||
return metadata
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// end the repetition and the job itself
|
||||
exports.disableAllCrons = async appId => {
|
||||
const promises = []
|
||||
const jobs = await queue.getRepeatableJobs()
|
||||
for (let job of jobs) {
|
||||
if (job.key.includes(`${appId}_cron`)) {
|
||||
promises.push(queue.removeRepeatableByKey(job.key))
|
||||
promises.push(queue.removeJobs(job.id))
|
||||
}
|
||||
}
|
||||
return Promise.all(promises)
|
||||
}
|
||||
|
||||
/**
|
||||
* This function handles checking of any cron jobs that need to be enabled/updated.
|
||||
* @param {string} appId The ID of the app in which we are checking for webhooks
|
||||
* @param {object|undefined} automation The automation object to be updated.
|
||||
*/
|
||||
exports.enableCronTrigger = async (appId, automation) => {
|
||||
const trigger = automation ? automation.definition.trigger : null
|
||||
function isCronTrigger(auto) {
|
||||
return (
|
||||
auto &&
|
||||
auto.definition.trigger &&
|
||||
auto.definition.trigger.stepId === CRON_STEP_ID
|
||||
)
|
||||
}
|
||||
// need to create cron job
|
||||
if (isCronTrigger(automation)) {
|
||||
// make a job id rather than letting Bull decide, makes it easier to handle on way out
|
||||
const jobId = `${appId}_cron_${newid()}`
|
||||
const job = await queue.add(
|
||||
{
|
||||
automation,
|
||||
event: { appId, timestamp: Date.now() },
|
||||
},
|
||||
{ repeat: { cron: trigger.inputs.cron }, jobId }
|
||||
)
|
||||
// Assign cron job ID from bull so we can remove it later if the cron trigger is removed
|
||||
trigger.cronJobId = job.id
|
||||
const db = new CouchDB(appId)
|
||||
const response = await db.put(automation)
|
||||
automation._id = response.id
|
||||
automation._rev = response.rev
|
||||
}
|
||||
return automation
|
||||
}
|
||||
|
||||
/**
|
||||
* This function handles checking if any webhooks need to be created or deleted for automations.
|
||||
* @param {string} appId The ID of the app in which we are checking for webhooks
|
||||
* @param {object|undefined} oldAuto The old automation object if updating/deleting
|
||||
* @param {object|undefined} newAuto The new automation object if creating/updating
|
||||
* @returns {Promise<object|undefined>} After this is complete the new automation object may have been updated and should be
|
||||
* written to DB (this does not write to DB as it would be wasteful to repeat).
|
||||
*/
|
||||
exports.checkForWebhooks = async ({ appId, oldAuto, newAuto }) => {
|
||||
const oldTrigger = oldAuto ? oldAuto.definition.trigger : null
|
||||
const newTrigger = newAuto ? newAuto.definition.trigger : null
|
||||
const triggerChanged =
|
||||
oldTrigger && newTrigger && oldTrigger.id !== newTrigger.id
|
||||
function isWebhookTrigger(auto) {
|
||||
return (
|
||||
auto &&
|
||||
auto.definition.trigger &&
|
||||
auto.definition.trigger.stepId === WH_STEP_ID
|
||||
)
|
||||
}
|
||||
// need to delete webhook
|
||||
if (
|
||||
isWebhookTrigger(oldAuto) &&
|
||||
(!isWebhookTrigger(newAuto) || triggerChanged) &&
|
||||
oldTrigger.webhookId
|
||||
) {
|
||||
try {
|
||||
let db = new CouchDB(appId)
|
||||
// need to get the webhook to get the rev
|
||||
const webhook = await db.get(oldTrigger.webhookId)
|
||||
const ctx = {
|
||||
appId,
|
||||
params: { id: webhook._id, rev: webhook._rev },
|
||||
}
|
||||
// might be updating - reset the inputs to remove the URLs
|
||||
if (newTrigger) {
|
||||
delete newTrigger.webhookId
|
||||
newTrigger.inputs = {}
|
||||
}
|
||||
await webhooks.destroy(ctx)
|
||||
} catch (err) {
|
||||
// don't worry about not being able to delete, if it doesn't exist all good
|
||||
}
|
||||
}
|
||||
// need to create webhook
|
||||
if (
|
||||
(!isWebhookTrigger(oldAuto) || triggerChanged) &&
|
||||
isWebhookTrigger(newAuto)
|
||||
) {
|
||||
const ctx = {
|
||||
appId,
|
||||
request: {
|
||||
body: new webhooks.Webhook(
|
||||
"Automation webhook",
|
||||
webhooks.WebhookType.AUTOMATION,
|
||||
newAuto._id
|
||||
),
|
||||
},
|
||||
}
|
||||
await webhooks.save(ctx)
|
||||
const id = ctx.body.webhook._id
|
||||
newTrigger.webhookId = id
|
||||
newTrigger.inputs = {
|
||||
schemaUrl: `api/webhooks/schema/${appId}/${id}`,
|
||||
triggerUrl: `api/webhooks/trigger/${appId}/${id}`,
|
||||
}
|
||||
}
|
||||
return newAuto
|
||||
}
|
|
@ -123,5 +123,10 @@ exports.BaseQueryVerbs = {
|
|||
DELETE: "delete",
|
||||
}
|
||||
|
||||
exports.MetadataTypes = {
|
||||
AUTOMATION_TEST_INPUT: "automationTestInput",
|
||||
AUTOMATION_TEST_HISTORY: "automationTestHistory",
|
||||
}
|
||||
|
||||
// pass through the list from the auth/core lib
|
||||
exports.ObjectStoreBuckets = ObjectStoreBuckets
|
||||
|
|
|
@ -36,6 +36,18 @@ exports.IncludeDocs = IncludeDocs
|
|||
exports.getLinkDocuments = getLinkDocuments
|
||||
exports.createLinkView = createLinkView
|
||||
|
||||
function clearRelationshipFields(table, rows) {
|
||||
for (let [key, field] of Object.entries(table.schema)) {
|
||||
if (field.type === FieldTypes.LINK) {
|
||||
rows = rows.map(row => {
|
||||
delete row[key]
|
||||
return row
|
||||
})
|
||||
}
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
async function getLinksForRows(appId, rows) {
|
||||
const tableIds = [...new Set(rows.map(el => el.tableId))]
|
||||
// start by getting all the link values for performance reasons
|
||||
|
@ -126,33 +138,6 @@ exports.updateLinks = async function (args) {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a row with information about the links that pertain to it.
|
||||
* @param {string} appId The instance in which this row has been created.
|
||||
* @param {object} rows The row(s) themselves which is to be updated with info (if applicable). This can be
|
||||
* a single row object or an array of rows - both will be handled.
|
||||
* @returns {Promise<object>} The updated row (this may be the same if no links were found). If an array was input
|
||||
* then an array will be output, object input -> object output.
|
||||
*/
|
||||
exports.attachLinkIDs = async (appId, rows) => {
|
||||
const links = await getLinksForRows(appId, rows)
|
||||
// now iterate through the rows and all field information
|
||||
for (let row of rows) {
|
||||
// find anything that matches the row's ID we are searching for and join it
|
||||
links
|
||||
.filter(el => el.thisId === row._id)
|
||||
.forEach(link => {
|
||||
if (row[link.fieldName] == null) {
|
||||
row[link.fieldName] = []
|
||||
}
|
||||
row[link.fieldName].push(link.id)
|
||||
})
|
||||
}
|
||||
// if it was an array when it came in then handle it as an array in response
|
||||
// otherwise return the first element as there was only one input
|
||||
return rows
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a table and a list of rows this will retrieve all of the attached docs and enrich them into the row.
|
||||
* This is required for formula fields, this may only be utilised internally (for now).
|
||||
|
@ -173,6 +158,9 @@ exports.attachFullLinkedDocs = async (ctx, table, rows) => {
|
|||
const links = (await getLinksForRows(appId, rows)).filter(link =>
|
||||
rows.some(row => row._id === link.thisId)
|
||||
)
|
||||
// clear any existing links that could be dupe'd
|
||||
rows = clearRelationshipFields(table, rows)
|
||||
// now get the docs and combine into the rows
|
||||
let linked = await getFullLinkedDocs(ctx, appId, links)
|
||||
const linkedTables = []
|
||||
for (let row of rows) {
|
||||
|
|
|
@ -59,16 +59,4 @@ describe("test link functionality", () => {
|
|||
expect(Array.isArray(output)).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("attachLinkIDs", () => {
|
||||
it("should be able to attach linkIDs", async () => {
|
||||
await config.init()
|
||||
await config.createTable()
|
||||
const table = await config.createLinkedTable()
|
||||
const row = await config.createRow()
|
||||
const linkRow = await config.createRow(basicLinkedRow(table._id, row._id))
|
||||
const attached = await links.attachLinkIDs(config.getAppId(), [linkRow])
|
||||
expect(attached[0].link[0]).toBe(row._id)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -7,6 +7,8 @@ const {
|
|||
APP_PREFIX,
|
||||
SEPARATOR,
|
||||
StaticDatabases,
|
||||
isDevAppID,
|
||||
isProdAppID,
|
||||
} = require("@budibase/auth/db")
|
||||
|
||||
const UNICODE_MAX = "\ufff0"
|
||||
|
@ -36,6 +38,7 @@ const DocumentTypes = {
|
|||
DATASOURCE_PLUS: "datasource_plus",
|
||||
QUERY: "query",
|
||||
DEPLOYMENTS: "deployments",
|
||||
METADATA: "metadata",
|
||||
}
|
||||
|
||||
const ViewNames = {
|
||||
|
@ -63,6 +66,8 @@ const BudibaseInternalDB = {
|
|||
|
||||
exports.APP_PREFIX = APP_PREFIX
|
||||
exports.APP_DEV_PREFIX = APP_DEV_PREFIX
|
||||
exports.isDevAppID = isDevAppID
|
||||
exports.isProdAppID = isProdAppID
|
||||
exports.USER_METDATA_PREFIX = `${DocumentTypes.ROW}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
|
||||
exports.LINK_USER_METADATA_PREFIX = `${DocumentTypes.LINK}${SEPARATOR}${InternalTables.USER_METADATA}${SEPARATOR}`
|
||||
exports.ViewNames = ViewNames
|
||||
|
@ -331,6 +336,18 @@ exports.getQueryParams = (datasourceId = null, otherProps = {}) => {
|
|||
)
|
||||
}
|
||||
|
||||
exports.generateMetadataID = (type, entityId) => {
|
||||
return `${DocumentTypes.METADATA}${SEPARATOR}${type}${SEPARATOR}${entityId}`
|
||||
}
|
||||
|
||||
exports.getMetadataParams = (type, entityId = null, otherProps = {}) => {
|
||||
let docId = `${type}${SEPARATOR}`
|
||||
if (entityId != null) {
|
||||
docId += entityId
|
||||
}
|
||||
return getDocParams(DocumentTypes.METADATA, docId, otherProps)
|
||||
}
|
||||
|
||||
/**
|
||||
* This can be used with the db.allDocs to get a list of IDs
|
||||
*/
|
||||
|
|
|
@ -55,6 +55,7 @@ module.exports = {
|
|||
BUDIBASE_API_KEY: process.env.BUDIBASE_API_KEY,
|
||||
USERID_API_KEY: process.env.USERID_API_KEY,
|
||||
DEPLOYMENT_CREDENTIALS_URL: process.env.DEPLOYMENT_CREDENTIALS_URL,
|
||||
ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS,
|
||||
_set(key, value) {
|
||||
process.env[key] = value
|
||||
module.exports[key] = value
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
const { isDevAppID, isProdAppID } = require("../db/utils")
|
||||
|
||||
exports.AppType = {
|
||||
DEV: "dev",
|
||||
PROD: "prod",
|
||||
}
|
||||
|
||||
exports.middleware =
|
||||
({ appType } = {}) =>
|
||||
(ctx, next) => {
|
||||
const appId = ctx.appId
|
||||
if (appType === exports.AppType.DEV && appId && !isDevAppID(appId)) {
|
||||
ctx.throw(400, "Only apps in development support this endpoint")
|
||||
}
|
||||
if (appType === exports.AppType.PROD && appId && !isProdAppID(appId)) {
|
||||
ctx.throw(400, "Only apps in production support this endpoint")
|
||||
}
|
||||
return next()
|
||||
}
|
|
@ -14,7 +14,7 @@ const DOMAIN_MAP = {
|
|||
views: usageQuota.Properties.VIEW,
|
||||
users: usageQuota.Properties.USER,
|
||||
// this will not be updated by endpoint calls
|
||||
// instead it will be updated by triggers
|
||||
// instead it will be updated by triggerInfo
|
||||
automationRuns: usageQuota.Properties.AUTOMATION,
|
||||
}
|
||||
|
||||
|
|
|
@ -14,18 +14,12 @@ const {
|
|||
downloadTarball,
|
||||
} = require("./utilities")
|
||||
const { updateClientLibrary } = require("./clientLibrary")
|
||||
const download = require("download")
|
||||
const env = require("../../environment")
|
||||
const { homedir } = require("os")
|
||||
const fetch = require("node-fetch")
|
||||
const {
|
||||
USER_METDATA_PREFIX,
|
||||
LINK_USER_METADATA_PREFIX,
|
||||
} = require("../../db/utils")
|
||||
|
||||
const DEFAULT_AUTOMATION_BUCKET =
|
||||
"https://prod-budi-automations.s3-eu-west-1.amazonaws.com"
|
||||
const DEFAULT_AUTOMATION_DIRECTORY = ".budibase-automations"
|
||||
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
|
||||
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
|
||||
|
||||
|
@ -205,30 +199,6 @@ exports.getComponentLibraryManifest = async (appId, library) => {
|
|||
return JSON.parse(resp)
|
||||
}
|
||||
|
||||
exports.automationInit = async () => {
|
||||
const directory =
|
||||
env.AUTOMATION_DIRECTORY || join(homedir(), DEFAULT_AUTOMATION_DIRECTORY)
|
||||
const bucket = env.AUTOMATION_BUCKET || DEFAULT_AUTOMATION_BUCKET
|
||||
if (!fs.existsSync(directory)) {
|
||||
fs.mkdirSync(directory, { recursive: true })
|
||||
}
|
||||
// env setup to get async packages
|
||||
let response = await fetch(`${bucket}/manifest.json`)
|
||||
return response.json()
|
||||
}
|
||||
|
||||
exports.getExternalAutomationStep = async (name, version, bundleName) => {
|
||||
const directory =
|
||||
env.AUTOMATION_DIRECTORY || join(homedir(), DEFAULT_AUTOMATION_DIRECTORY)
|
||||
const bucket = env.AUTOMATION_BUCKET || DEFAULT_AUTOMATION_BUCKET
|
||||
try {
|
||||
return require(join(directory, bundleName))
|
||||
} catch (err) {
|
||||
await download(`${bucket}/${name}/${version}/${bundleName}`, directory)
|
||||
return require(join(directory, bundleName))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* All file reads come through here just to make sure all of them make sense
|
||||
* allows a centralised location to check logic is all good.
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
const env = require("../environment")
|
||||
const { OBJ_STORE_DIRECTORY } = require("../constants")
|
||||
const { sanitizeKey } = require("@budibase/auth/src/objectStore")
|
||||
const CouchDB = require("../db")
|
||||
const { generateMetadataID } = require("../db/utils")
|
||||
|
||||
const BB_CDN = "https://cdn.budi.live"
|
||||
|
||||
|
@ -55,3 +57,52 @@ exports.attachmentsRelativeURL = attachmentKey => {
|
|||
`${exports.objectStoreUrl()}/${attachmentKey}`
|
||||
)
|
||||
}
|
||||
|
||||
exports.updateEntityMetadata = async (appId, type, entityId, updateFn) => {
|
||||
const db = new CouchDB(appId)
|
||||
const id = generateMetadataID(type, entityId)
|
||||
// read it to see if it exists, we'll overwrite it no matter what
|
||||
let rev,
|
||||
metadata = {}
|
||||
try {
|
||||
const oldMetadata = await db.get(id)
|
||||
rev = oldMetadata._rev
|
||||
metadata = updateFn(oldMetadata)
|
||||
} catch (err) {
|
||||
rev = null
|
||||
metadata = updateFn({})
|
||||
}
|
||||
metadata._id = id
|
||||
if (rev) {
|
||||
metadata._rev = rev
|
||||
}
|
||||
const response = await db.put(metadata)
|
||||
return {
|
||||
...metadata,
|
||||
_id: id,
|
||||
_rev: response.rev,
|
||||
}
|
||||
}
|
||||
|
||||
exports.saveEntityMetadata = async (appId, type, entityId, metadata) => {
|
||||
return exports.updateEntityMetadata(appId, type, entityId, () => {
|
||||
return metadata
|
||||
})
|
||||
}
|
||||
|
||||
exports.deleteEntityMetadata = async (appId, type, entityId) => {
|
||||
const db = new CouchDB(appId)
|
||||
const id = generateMetadataID(type, entityId)
|
||||
let rev
|
||||
try {
|
||||
const metadata = await db.get(id)
|
||||
if (metadata) {
|
||||
rev = metadata._rev
|
||||
}
|
||||
} catch (err) {
|
||||
// don't need to error if it doesn't exist
|
||||
}
|
||||
if (id && rev) {
|
||||
await db.remove(id, rev)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,20 +2,24 @@ const { Client, utils } = require("@budibase/auth/redis")
|
|||
const { getGlobalIDFromUserMetadataID } = require("../db/utils")
|
||||
|
||||
const APP_DEV_LOCK_SECONDS = 600
|
||||
let devAppClient, debounceClient
|
||||
const AUTOMATION_TEST_FLAG_SECONDS = 60
|
||||
let devAppClient, debounceClient, flagClient
|
||||
|
||||
// we init this as we want to keep the connection open all the time
|
||||
// reduces the performance hit
|
||||
exports.init = async () => {
|
||||
devAppClient = new Client(utils.Databases.DEV_LOCKS)
|
||||
debounceClient = new Client(utils.Databases.DEBOUNCE)
|
||||
flagClient = new Client(utils.Databases.FLAGS)
|
||||
await devAppClient.init()
|
||||
await debounceClient.init()
|
||||
await flagClient.init()
|
||||
}
|
||||
|
||||
exports.shutdown = async () => {
|
||||
if (devAppClient) await devAppClient.finish()
|
||||
if (debounceClient) await debounceClient.finish()
|
||||
if (flagClient) await flagClient.finish()
|
||||
}
|
||||
|
||||
exports.doesUserHaveLock = async (devAppId, user) => {
|
||||
|
@ -67,3 +71,16 @@ exports.checkDebounce = async id => {
|
|||
exports.setDebounce = async (id, seconds) => {
|
||||
await debounceClient.store(id, "debouncing", seconds)
|
||||
}
|
||||
|
||||
exports.setTestFlag = async id => {
|
||||
await flagClient.store(id, { testing: true }, AUTOMATION_TEST_FLAG_SECONDS)
|
||||
}
|
||||
|
||||
exports.checkTestFlag = async id => {
|
||||
const flag = await flagClient.get(id)
|
||||
return !!(flag && flag.testing)
|
||||
}
|
||||
|
||||
exports.clearTestFlag = async id => {
|
||||
await devAppClient.delete(id)
|
||||
}
|
||||
|
|
|
@ -185,10 +185,16 @@ exports.inputProcessing = (user = {}, table, row) => {
|
|||
* @param {object} ctx the request which is looking for enriched rows.
|
||||
* @param {object} table the table from which these rows came from originally, this is used to determine
|
||||
* the schema of the rows and then enrich.
|
||||
* @param {object[]} rows the rows which are to be enriched.
|
||||
* @returns {object[]} the enriched rows will be returned.
|
||||
* @param {object[]|object} rows the rows which are to be enriched.
|
||||
* @param {object} opts used to set some options for the output, such as disabling relationship squashing.
|
||||
* @returns {object[]|object} the enriched rows will be returned.
|
||||
*/
|
||||
exports.outputProcessing = async (ctx, table, rows) => {
|
||||
exports.outputProcessing = async (
|
||||
ctx,
|
||||
table,
|
||||
rows,
|
||||
opts = { squash: true }
|
||||
) => {
|
||||
const appId = ctx.appId
|
||||
let wasArray = true
|
||||
if (!(rows instanceof Array)) {
|
||||
|
@ -214,6 +220,12 @@ exports.outputProcessing = async (ctx, table, rows) => {
|
|||
}
|
||||
}
|
||||
}
|
||||
enriched = await linkRows.squashLinksToPrimaryDisplay(appId, table, enriched)
|
||||
if (opts.squash) {
|
||||
enriched = await linkRows.squashLinksToPrimaryDisplay(
|
||||
appId,
|
||||
table,
|
||||
enriched
|
||||
)
|
||||
}
|
||||
return wasArray ? enriched : enriched[0]
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@budibase/string-templates",
|
||||
"version": "0.9.125-alpha.2",
|
||||
"version": "0.9.125-alpha.5",
|
||||
"description": "Handlebars wrapper for Budibase templating.",
|
||||
"main": "src/index.cjs",
|
||||
"module": "dist/bundle.mjs",
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"name": "@budibase/worker",
|
||||
"email": "hi@budibase.com",
|
||||
"version": "0.9.125-alpha.2",
|
||||
"version": "0.9.125-alpha.5",
|
||||
"description": "Budibase background service",
|
||||
"main": "src/index.js",
|
||||
"repository": {
|
||||
|
@ -23,8 +23,8 @@
|
|||
"author": "Budibase",
|
||||
"license": "AGPL-3.0-or-later",
|
||||
"dependencies": {
|
||||
"@budibase/auth": "^0.9.125-alpha.2",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.2",
|
||||
"@budibase/auth": "^0.9.125-alpha.5",
|
||||
"@budibase/string-templates": "^0.9.125-alpha.5",
|
||||
"@koa/router": "^8.0.0",
|
||||
"@techpass/passport-openidconnect": "^0.3.0",
|
||||
"aws-sdk": "^2.811.0",
|
||||
|
|
|
@ -33,7 +33,7 @@ async function allUsers() {
|
|||
return response.rows.map(row => row.doc)
|
||||
}
|
||||
|
||||
async function saveUser(user, tenantId) {
|
||||
async function saveUser(user, tenantId, hashPassword = true) {
|
||||
if (!tenantId) {
|
||||
throw "No tenancy specified."
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ async function saveUser(user, tenantId) {
|
|||
// get the password, make sure one is defined
|
||||
let hashedPassword
|
||||
if (password) {
|
||||
hashedPassword = await hash(password)
|
||||
hashedPassword = hashPassword ? await hash(password) : password
|
||||
} else if (dbUser) {
|
||||
hashedPassword = dbUser.password
|
||||
} else {
|
||||
|
@ -110,6 +110,15 @@ exports.save = async ctx => {
|
|||
|
||||
exports.adminUser = async ctx => {
|
||||
const { email, password, tenantId } = ctx.request.body
|
||||
|
||||
// account portal sends a pre-hashed password - honour param to prevent double hashing
|
||||
let hashPassword = ctx.request.query.hashPassword
|
||||
if (hashPassword && hashPassword == "false") {
|
||||
hashPassword = false
|
||||
} else {
|
||||
hashPassword = true
|
||||
}
|
||||
|
||||
if (await doesTenantExist(tenantId)) {
|
||||
ctx.throw(403, "Organisation already exists.")
|
||||
}
|
||||
|
@ -141,7 +150,7 @@ exports.adminUser = async ctx => {
|
|||
tenantId,
|
||||
}
|
||||
try {
|
||||
ctx.body = await saveUser(user, tenantId)
|
||||
ctx.body = await saveUser(user, tenantId, hashPassword)
|
||||
} catch (err) {
|
||||
ctx.throw(err.status || 400, err)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue