Getting rid of automation step download system.

This commit is contained in:
mike12345567 2021-09-03 17:36:00 +01:00
parent 866f4187e0
commit 26d7cb3b9b
6 changed files with 255 additions and 70 deletions

View File

@ -8,12 +8,10 @@ const bash = require("./steps/bash")
const executeQuery = require("./steps/executeQuery") const executeQuery = require("./steps/executeQuery")
const outgoingWebhook = require("./steps/outgoingWebhook") const outgoingWebhook = require("./steps/outgoingWebhook")
const serverLog = require("./steps/serverLog") const serverLog = require("./steps/serverLog")
const env = require("../environment") const discord = require("./steps/discord")
const Sentry = require("@sentry/node") // TODO: remove zapier/integromat some time in the future/deprecate them
const { const zapier = require("./steps/zapier")
automationInit, const integromat = require("./steps/integromat")
getExternalAutomationStep,
} = require("../utilities/fileSystem")
const BUILTIN_ACTIONS = { const BUILTIN_ACTIONS = {
SEND_EMAIL: sendgridEmail.run, SEND_EMAIL: sendgridEmail.run,
@ -26,6 +24,10 @@ const BUILTIN_ACTIONS = {
EXECUTE_BASH: bash.run, EXECUTE_BASH: bash.run,
EXECUTE_QUERY: executeQuery.run, EXECUTE_QUERY: executeQuery.run,
SERVER_LOG: serverLog.run, SERVER_LOG: serverLog.run,
// these used to be lowercase step IDs, maintain for backwards compat
discord: discord.run,
zapier: zapier.run,
integromat: integromat.run,
} }
const BUILTIN_DEFINITIONS = { const BUILTIN_DEFINITIONS = {
SEND_EMAIL: sendgridEmail.definition, SEND_EMAIL: sendgridEmail.definition,
@ -38,13 +40,10 @@ const BUILTIN_DEFINITIONS = {
EXECUTE_QUERY: executeQuery.definition, EXECUTE_QUERY: executeQuery.definition,
EXECUTE_BASH: bash.definition, EXECUTE_BASH: bash.definition,
SERVER_LOG: serverLog.definition, SERVER_LOG: serverLog.definition,
} // these used to be lowercase step IDs, maintain for backwards compat
discord: discord.definition,
let MANIFEST = null zapier: zapier.definition,
integromat: integromat.definition,
/* istanbul ignore next */
function buildBundleName(pkgName, version) {
return `${pkgName}@${version}.min.js`
} }
/* istanbul ignore next */ /* istanbul ignore next */
@ -52,31 +51,6 @@ module.exports.getAction = async function (actionName) {
if (BUILTIN_ACTIONS[actionName] != null) { if (BUILTIN_ACTIONS[actionName] != null) {
return BUILTIN_ACTIONS[actionName] return BUILTIN_ACTIONS[actionName]
} }
// worker pools means that a worker may not have manifest
if (env.isProd() && MANIFEST == null) {
MANIFEST = await module.exports.init()
}
// env setup to get async packages
if (!MANIFEST || !MANIFEST.packages || !MANIFEST.packages[actionName]) {
return null
}
const pkg = MANIFEST.packages[actionName]
const bundleName = buildBundleName(pkg.stepId, pkg.version)
return getExternalAutomationStep(pkg.stepId, pkg.version, bundleName)
}
module.exports.init = async function () {
try {
MANIFEST = await automationInit()
module.exports.DEFINITIONS =
MANIFEST && MANIFEST.packages
? Object.assign(MANIFEST.packages, BUILTIN_DEFINITIONS)
: BUILTIN_DEFINITIONS
} catch (err) {
console.error(err)
Sentry.captureException(err)
}
return MANIFEST
} }
// definitions will have downloaded ones added to it, while builtin won't // definitions will have downloaded ones added to it, while builtin won't

View File

@ -1,5 +1,4 @@
const triggers = require("./triggers") const triggers = require("./triggers")
const actions = require("./actions")
const env = require("../environment") const env = require("../environment")
const workerFarm = require("worker-farm") const workerFarm = require("worker-farm")
const singleThread = require("./thread") const singleThread = require("./thread")
@ -31,7 +30,6 @@ async function updateQuota(automation) {
* This module is built purely to kick off the worker farm and manage the inputs/outputs * This module is built purely to kick off the worker farm and manage the inputs/outputs
*/ */
module.exports.init = async function () { module.exports.init = async function () {
await actions.init()
triggers.automationQueue.process(async job => { triggers.automationQueue.process(async job => {
try { try {
if (env.USE_QUOTAS) { if (env.USE_QUOTAS) {

View File

@ -0,0 +1,65 @@
const fetch = require("node-fetch")
module.exports.definition = {
name: "Discord Message",
tagline: "Send a message to a Discord server",
description: "Send a message to a Discord server",
icon: "ri-discord-line",
stepId: "discord",
type: "ACTION",
inputs: {
username: "Budibase Automate",
avatar_url: "https://i.imgur.com/a1cmTKM.png",
},
schema: {
inputs: {
properties: {
url: {
type: "string",
title: "Discord Webhook URL",
},
username: {
type: "string",
title: "Bot Name",
},
avatar_url: {
type: "string",
title: "Bot Avatar URL",
},
content: {
type: "string",
title: "Message",
},
},
required: ["url", "content"],
},
outputs: {
properties: {
httpStatus: {
type: "number",
description: "The HTTP status code of the request",
},
},
},
},
}
module.exports.run = async function ({ inputs }) {
const { url, username, avatar_url, content } = inputs
const response = await fetch(url, {
method: "post",
body: JSON.stringify({
username,
avatar_url,
content,
}),
headers: {
"Content-Type": "application/json",
},
})
return {
httpStatus: response.status,
}
}

View File

@ -0,0 +1,90 @@
const fetch = require("node-fetch")
module.exports.definition = {
name: "Integromat Integration",
tagline: "Trigger an Integromat scenario",
description:
"Performs a webhook call to Integromat and gets the response (if configured)",
icon: "ri-shut-down-line",
stepId: "integromat",
type: "ACTION",
inputs: {},
schema: {
inputs: {
properties: {
url: {
type: "string",
title: "Webhook URL",
},
value1: {
type: "string",
title: "Input Value 1",
},
value2: {
type: "string",
title: "Input Value 2",
},
value3: {
type: "string",
title: "Input Value 3",
},
value4: {
type: "string",
title: "Input Value 4",
},
value5: {
type: "string",
title: "Input Value 5",
},
},
required: ["url", "value1", "value2", "value3", "value4", "value5"],
},
outputs: {
properties: {
success: {
type: "boolean",
description: "Whether call was successful",
},
response: {
type: "object",
description: "The webhook response - this can have properties",
},
},
required: ["success", "response"],
},
},
}
module.exports.run = async function ({ inputs }) {
const { url, value1, value2, value3, value4, value5 } = inputs
const response = await fetch(url, {
method: "post",
body: JSON.stringify({
value1,
value2,
value3,
value4,
value5,
}),
headers: {
"Content-Type": "application/json",
},
})
let data
if (response.status === 200) {
try {
data = await response.json()
} catch (err) {
data = {}
}
} else {
data = await response.text()
}
return {
success: response.status === 200,
response: data,
}
}

View File

@ -0,0 +1,88 @@
const fetch = require("node-fetch")
module.exports.definition = {
name: "Zapier Webhook",
stepId: "zapier",
type: "ACTION",
description: "Trigger a Zapier Zap via webhooks",
tagline: "Trigger a Zapier Zap",
icon: "ri-flashlight-line",
schema: {
inputs: {
properties: {
url: {
type: "string",
title: "Webhook URL",
},
value1: {
type: "string",
title: "Payload Value 1",
},
value2: {
type: "string",
title: "Payload Value 2",
},
value3: {
type: "string",
title: "Payload Value 3",
},
value4: {
type: "string",
title: "Payload Value 4",
},
value5: {
type: "string",
title: "Payload Value 5",
},
},
required: ["url"],
},
outputs: {
properties: {
httpStatus: {
type: "number",
description: "The HTTP status code of the request",
},
zapierStatus: {
type: "string",
description: "The result status from Zapier",
},
},
},
},
}
module.exports.run = async function ({ inputs }) {
const { url, value1, value2, value3, value4, value5 } = inputs
// send the platform to make sure zaps always work, even
// if no values supplied
const response = await fetch(url, {
method: "post",
body: JSON.stringify({
platform: "budibase",
value1,
value2,
value3,
value4,
value5,
}),
headers: {
"Content-Type": "application/json",
},
})
let data = null
if (response.status === 200) {
try {
data = await response.json()
} catch (err) {
data = null
}
}
return {
httpStatus: response.status,
zapierStatus: data && data.status ? data.status : data,
}
}

View File

@ -14,18 +14,12 @@ const {
downloadTarball, downloadTarball,
} = require("./utilities") } = require("./utilities")
const { updateClientLibrary } = require("./clientLibrary") const { updateClientLibrary } = require("./clientLibrary")
const download = require("download")
const env = require("../../environment") const env = require("../../environment")
const { homedir } = require("os")
const fetch = require("node-fetch")
const { const {
USER_METDATA_PREFIX, USER_METDATA_PREFIX,
LINK_USER_METADATA_PREFIX, LINK_USER_METADATA_PREFIX,
} = require("../../db/utils") } = require("../../db/utils")
const DEFAULT_AUTOMATION_BUCKET =
"https://prod-budi-automations.s3-eu-west-1.amazonaws.com"
const DEFAULT_AUTOMATION_DIRECTORY = ".budibase-automations"
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..") const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules") const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
@ -209,30 +203,6 @@ exports.getComponentLibraryManifest = async (appId, library) => {
return JSON.parse(resp) return JSON.parse(resp)
} }
exports.automationInit = async () => {
const directory =
env.AUTOMATION_DIRECTORY || join(homedir(), DEFAULT_AUTOMATION_DIRECTORY)
const bucket = env.AUTOMATION_BUCKET || DEFAULT_AUTOMATION_BUCKET
if (!fs.existsSync(directory)) {
fs.mkdirSync(directory, { recursive: true })
}
// env setup to get async packages
let response = await fetch(`${bucket}/manifest.json`)
return response.json()
}
exports.getExternalAutomationStep = async (name, version, bundleName) => {
const directory =
env.AUTOMATION_DIRECTORY || join(homedir(), DEFAULT_AUTOMATION_DIRECTORY)
const bucket = env.AUTOMATION_BUCKET || DEFAULT_AUTOMATION_BUCKET
try {
return require(join(directory, bundleName))
} catch (err) {
await download(`${bucket}/${name}/${version}/${bundleName}`, directory)
return require(join(directory, bundleName))
}
}
/** /**
* All file reads come through here just to make sure all of them make sense * All file reads come through here just to make sure all of them make sense
* allows a centralised location to check logic is all good. * allows a centralised location to check logic is all good.