From d063d4fa672ea98b1e6880a5482dde8d19a8d695 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 18 Aug 2022 12:58:43 +0100 Subject: [PATCH 1/4] Fixing issue with custom datasource section appearing when none exist. --- .../modals/CreateDatasourceModal.svelte | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte b/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte index a500c6a5b3..8454f3b3d5 100644 --- a/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte +++ b/packages/builder/src/components/backend/DatasourceNavigator/modals/CreateDatasourceModal.svelte @@ -28,6 +28,9 @@ let importModal $: showImportButton = false + $: customIntegrations = Object.entries(integrations).filter( + entry => entry[1].custom + ) checkShowImport() @@ -163,17 +166,19 @@ /> {/each} - Custom data source -
- {#each Object.entries(integrations).filter(entry => entry[1].custom) as [integrationType, schema]} - selectIntegration(evt.detail)} - {schema} - bind:integrationType - {integration} - /> - {/each} -
+ {#if customIntegrations.length > 0} + Custom data source +
+ {#each customIntegrations as [integrationType, schema]} + selectIntegration(evt.detail)} + {schema} + bind:integrationType + {integration} + /> + {/each} +
+ {/if} From 9d22f83e4e2a3b3b4fe9c26d3a584ae1aab56bfb Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 18 Aug 2022 13:29:49 +0100 Subject: [PATCH 2/4] Adding check to make sure build/watch occurs inside a plugin directory. --- packages/cli/src/plugins/index.js | 15 +++++++++++++ packages/cli/src/plugins/validate.js | 4 ++++ packages/server/src/app.ts | 28 ++---------------------- packages/server/src/watch.ts | 32 ++++++++++++++++++++++++++++ 4 files changed, 53 insertions(+), 26 deletions(-) create mode 100644 packages/server/src/watch.ts diff --git a/packages/cli/src/plugins/index.js b/packages/cli/src/plugins/index.js index 3ee2463a31..ac931e0d22 100644 --- a/packages/cli/src/plugins/index.js +++ b/packages/cli/src/plugins/index.js @@ -9,6 +9,19 @@ const { runPkgCommand } = require("../exec") const { join } = require("path") const { success, error, info } = require("../utils") +function checkInPlugin() { + if (!fs.existsSync("package.json")) { + throw new Error( + "Please run in a plugin directory - must contain package.json" + ) + } + if (!fs.existsSync("schema.json")) { + throw new Error( + "Please run in a plugin directory - must contain schema.json" + ) + } +} + async function init(opts) { const type = opts["init"] || opts if (!type || !PLUGIN_TYPES_ARR.includes(type)) { @@ -42,6 +55,8 @@ async function init(opts) { } async function verify() { + // will throw errors if not acceptable + checkInPlugin() console.log(info("Verifying plugin...")) const schema = fs.readFileSync("schema.json", "utf8") const pkg = fs.readFileSync("package.json", "utf8") diff --git a/packages/cli/src/plugins/validate.js b/packages/cli/src/plugins/validate.js index 4c2fa6ba53..a6b4555cbd 100644 --- a/packages/cli/src/plugins/validate.js +++ b/packages/cli/src/plugins/validate.js @@ -22,6 +22,8 @@ function validateComponent(schema) { const validator = joi.object({ type: joi.string().allow("component").required(), metadata: joi.object().unknown(true).required(), + hash: joi.string().optional(), + version: joi.string().optional(), schema: joi .object({ name: joi.string().required(), @@ -53,6 +55,8 @@ function validateDatasource(schema) { const validator = joi.object({ type: joi.string().allow("datasource").required(), metadata: joi.object().unknown(true).required(), + hash: joi.string().optional(), + version: joi.string().optional(), schema: joi.object({ docs: joi.string(), friendlyName: joi.string().required(), diff --git a/packages/server/src/app.ts b/packages/server/src/app.ts index a830fd5518..8a4a412bc6 100644 --- a/packages/server/src/app.ts +++ b/packages/server/src/app.ts @@ -17,15 +17,12 @@ const bullboard = require("./automations/bullboard") const { logAlert } = require("@budibase/backend-core/logging") const { pinoSettings } = require("@budibase/backend-core") const { Thread } = require("./threads") -const chokidar = require("chokidar") const fs = require("fs") -const path = require("path") import redis from "./utilities/redis" import * as migrations from "./migrations" import { events, installation, tenancy } from "@budibase/backend-core" import { createAdminUser, getChecklist } from "./utilities/workerRequests" -import { processPlugin } from "./api/controllers/plugin" -import { DEFAULT_TENANT_ID } from "@budibase/backend-core/constants" +import { watch } from "./watch" const app = new Koa() @@ -144,28 +141,7 @@ module.exports = server.listen(env.PORT || 0, async () => { env.PLUGINS_DIR && fs.existsSync(env.PLUGINS_DIR) ) { - const watchPath = path.join(env.PLUGINS_DIR, "./**/*.tar.gz") - chokidar - .watch(watchPath, { - ignored: "**/node_modules", - awaitWriteFinish: true, - }) - .on("all", async (event: string, path: string) => { - // Sanity checks - if (!path?.endsWith(".tar.gz") || !fs.existsSync(path)) { - return - } - await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => { - try { - const split = path.split("/") - const name = split[split.length - 1] - console.log("Importing plugin:", path) - await processPlugin({ name, path }) - } catch (err) { - console.log("Failed to import plugin:", err) - } - }) - }) + watch() } // check for version updates diff --git a/packages/server/src/watch.ts b/packages/server/src/watch.ts new file mode 100644 index 0000000000..3d3f4280da --- /dev/null +++ b/packages/server/src/watch.ts @@ -0,0 +1,32 @@ +import path from "path" +import * as env from "./environment" +import chokidar from "chokidar" +import fs from "fs" +import { tenancy } from "@budibase/backend-core" +import { DEFAULT_TENANT_ID } from "@budibase/backend-core/constants" +import { processPlugin } from "./api/controllers/plugin" + +export function watch() { + const watchPath = path.join(env.PLUGINS_DIR, "./**/*.tar.gz") + chokidar + .watch(watchPath, { + ignored: "**/node_modules", + awaitWriteFinish: true, + }) + .on("all", async (event: string, path: string) => { + // Sanity checks + if (!path?.endsWith(".tar.gz") || !fs.existsSync(path)) { + return + } + await tenancy.doInTenant(DEFAULT_TENANT_ID, async () => { + try { + const split = path.split("/") + const name = split[split.length - 1] + console.log("Importing plugin:", path) + await processPlugin({ name, path }) + } catch (err) { + console.log("Failed to import plugin:", err) + } + }) + }) +} From e53f86c0b2fecd24c12e27ab8c6a5bfbccaf3604 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 18 Aug 2022 15:21:55 +0100 Subject: [PATCH 3/4] Adding in hash handling for datasource plugins. --- packages/server/src/integrations/index.ts | 6 +++++- packages/server/src/utilities/fileSystem/index.js | 12 ++++++++++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/packages/server/src/integrations/index.ts b/packages/server/src/integrations/index.ts index b9523061a4..7b730a1764 100644 --- a/packages/server/src/integrations/index.ts +++ b/packages/server/src/integrations/index.ts @@ -93,7 +93,11 @@ module.exports = { for (let plugin of plugins) { if (plugin.name === integration) { // need to use commonJS require due to its dynamic runtime nature - return getDatasourcePlugin(plugin.name, plugin.jsUrl) + return getDatasourcePlugin( + plugin.name, + plugin.jsUrl, + plugin.schema?.hash + ) } } }, diff --git a/packages/server/src/utilities/fileSystem/index.js b/packages/server/src/utilities/fileSystem/index.js index f937f172b5..096212864c 100644 --- a/packages/server/src/utilities/fileSystem/index.js +++ b/packages/server/src/utilities/fileSystem/index.js @@ -351,13 +351,20 @@ exports.extractPluginTarball = async file => { return { metadata, directory: path } } -exports.getDatasourcePlugin = async (name, url) => { +exports.getDatasourcePlugin = async (name, url, hash) => { if (!fs.existsSync(DATASOURCE_PATH)) { fs.mkdirSync(DATASOURCE_PATH) } const filename = join(DATASOURCE_PATH, name) + const metadataName = `${filename}.bbmetadata` if (fs.existsSync(filename)) { - return require(filename) + const currentHash = fs.readFileSync(metadataName, "utf8") + // if hash is the same return the file, otherwise remove it and re-download + if (currentHash === hash) { + return require(filename) + } else { + fs.unlinkSync(filename) + } } const fullUrl = checkSlashesInUrl( `${env.MINIO_URL}/${ObjectStoreBuckets.PLUGINS}/${url}` @@ -366,6 +373,7 @@ exports.getDatasourcePlugin = async (name, url) => { if (response.status === 200) { const content = await response.text() fs.writeFileSync(filename, content) + fs.writeFileSync(metadataName, hash) require(filename) } else { throw new Error( From 65659225e160b0d36b82b18cdfd6cad94726b245 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Thu, 18 Aug 2022 18:23:07 +0100 Subject: [PATCH 4/4] Adding management of the new README.md file that is retrieved with the plugin skeletons. --- packages/cli/src/plugins/index.js | 2 +- packages/cli/src/plugins/skeleton.js | 4 ++-- packages/server/src/utilities/fileSystem/index.js | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/cli/src/plugins/index.js b/packages/cli/src/plugins/index.js index ac931e0d22..643406bc4d 100644 --- a/packages/cli/src/plugins/index.js +++ b/packages/cli/src/plugins/index.js @@ -48,7 +48,7 @@ async function init(opts) { // get the skeleton console.log(info("Retrieving project...")) await getSkeleton(type, name) - await fleshOutSkeleton(name, desc, version) + await fleshOutSkeleton(type, name, desc, version) console.log(info("Installing dependencies...")) await runPkgCommand("install", join(process.cwd(), name)) console.log(info(`Plugin created in directory "${name}"`)) diff --git a/packages/cli/src/plugins/skeleton.js b/packages/cli/src/plugins/skeleton.js index a1d9101c6f..76b9aa2d8a 100644 --- a/packages/cli/src/plugins/skeleton.js +++ b/packages/cli/src/plugins/skeleton.js @@ -6,7 +6,7 @@ const { join } = require("path") const tar = require("tar") const { processStringSync } = require("@budibase/string-templates") -const HBS_FILES = ["package.json.hbs", "schema.json.hbs"] +const HBS_FILES = ["package.json.hbs", "schema.json.hbs", "README.md.hbs"] async function getSkeletonUrl(type) { const resp = await fetch( @@ -40,7 +40,7 @@ exports.getSkeleton = async (type, name) => { fs.rmSync(tarballFile) } -exports.fleshOutSkeleton = async (name, description, version) => { +exports.fleshOutSkeleton = async (type, name, description, version) => { for (let file of HBS_FILES) { const oldFile = join(name, file), newFile = join(name, file.substring(0, file.length - 4)) diff --git a/packages/server/src/utilities/fileSystem/index.js b/packages/server/src/utilities/fileSystem/index.js index 096212864c..622c989cb8 100644 --- a/packages/server/src/utilities/fileSystem/index.js +++ b/packages/server/src/utilities/fileSystem/index.js @@ -363,6 +363,7 @@ exports.getDatasourcePlugin = async (name, url, hash) => { if (currentHash === hash) { return require(filename) } else { + console.log(`Updating plugin: ${name}`) fs.unlinkSync(filename) } }