Updating plugins to not think about versions, only ever one version of each plugin by name, making plugins self host only and adding error checking for datasource implementations.

This commit is contained in:
mike12345567 2022-09-02 19:32:15 +01:00
parent 3ba2aaf567
commit a5f475872d
6 changed files with 74 additions and 43 deletions

View File

@ -2,6 +2,7 @@ const { DocumentType, getPluginParams } = require("../../db/utils")
const { getComponentLibraryManifest } = require("../../utilities/fileSystem")
const { getAppDB } = require("@budibase/backend-core/context")
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
const env = require("../../environment")
exports.fetchAppComponentDefinitions = async function (ctx) {
try {
@ -32,6 +33,8 @@ exports.fetchAppComponentDefinitions = async function (ctx) {
}
}
// for now custom components only supported in self-host
if (env.SELF_HOSTED) {
// Add custom components
const globalDB = getGlobalDB()
const response = await globalDB.allDocs(
@ -49,6 +52,7 @@ exports.fetchAppComponentDefinitions = async function (ctx) {
...plugin.schema.schema,
}
})
}
ctx.body = definitions
} catch (err) {

View File

@ -1,9 +1,10 @@
import { ObjectStoreBuckets } from "../../constants"
import { extractPluginTarball } from "../../utilities/fileSystem"
import { extractPluginTarball, loadJSFile } from "../../utilities/fileSystem"
import { getGlobalDB } from "@budibase/backend-core/tenancy"
import { generatePluginID, getPluginParams } from "../../db/utils"
import { uploadDirectory } from "@budibase/backend-core/objectStore"
import { PluginType, FileType } from "@budibase/types"
import env from "../../environment"
export async function getPlugins(type?: PluginType) {
const db = getGlobalDB()
@ -49,6 +50,9 @@ export async function fetch(ctx: any) {
export async function destroy(ctx: any) {}
export async function processPlugin(plugin: FileType) {
if (!env.SELF_HOSTED) {
throw new Error("Plugins not supported outside of self-host.")
}
const db = getGlobalDB()
const { metadata, directory } = await extractPluginTarball(plugin)
const version = metadata.package.version,
@ -56,7 +60,7 @@ export async function processPlugin(plugin: FileType) {
description = metadata.package.description
// first open the tarball into tmp directory
const bucketPath = `${name}/${version}/`
const bucketPath = `${name}/`
const files = await uploadDirectory(
ObjectStoreBuckets.PLUGINS,
directory,
@ -66,8 +70,20 @@ export async function processPlugin(plugin: FileType) {
if (!jsFile) {
throw new Error(`Plugin missing .js file.`)
}
// validate the JS for a datasource
if (metadata.schema.type === PluginType.DATASOURCE) {
const js = loadJSFile(directory, jsFile.name)
// TODO: this isn't safe - but we need full node environment
// in future we should do this in a thread for safety
try {
eval(js)
} catch (err: any) {
const message = err?.message ? err.message : JSON.stringify(err)
throw new Error(`JS invalid: ${message}`)
}
}
const jsFileName = jsFile.name
const pluginId = generatePluginID(name, version)
const pluginId = generatePluginID(name)
// overwrite existing docs entirely if they exist
let rev
@ -80,10 +96,10 @@ export async function processPlugin(plugin: FileType) {
const doc = {
_id: pluginId,
_rev: rev,
...metadata,
name,
version,
description,
...metadata,
jsUrl: `${bucketPath}${jsFileName}`,
}
const response = await db.put(doc)

View File

@ -371,8 +371,8 @@ exports.getMemoryViewParams = (otherProps = {}) => {
return getDocParams(DocumentType.MEM_VIEW, null, otherProps)
}
exports.generatePluginID = (name, version) => {
return `${DocumentType.PLUGIN}${SEPARATOR}${name}${SEPARATOR}${version}`
exports.generatePluginID = name => {
return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
}
/**

View File

@ -66,13 +66,12 @@ if (environment.SELF_HOSTED) {
DEFINITIONS[SourceName.GOOGLE_SHEETS] = googlesheets.schema
}
function isIntegrationAvailable(integration: string) {}
module.exports = {
getDefinitions: async () => {
const pluginSchemas: { [key: string]: Integration } = {}
if (environment.SELF_HOSTED) {
const plugins = await getPlugins(PluginType.DATASOURCE)
// extract the actual schema from each custom
const pluginSchemas: { [key: string]: Integration } = {}
for (let plugin of plugins) {
const sourceId = plugin.name
pluginSchemas[sourceId] = {
@ -80,6 +79,7 @@ module.exports = {
custom: true,
}
}
}
return {
...cloneDeep(DEFINITIONS),
...pluginSchemas,
@ -89,6 +89,7 @@ module.exports = {
if (INTEGRATIONS[integration]) {
return INTEGRATIONS[integration]
}
if (environment.SELF_HOSTED) {
const plugins = await getPlugins(PluginType.DATASOURCE)
for (let plugin of plugins) {
if (plugin.name === integration) {
@ -100,5 +101,7 @@ module.exports = {
)
}
}
}
throw new Error("No datasource implementation found.")
},
}

View File

@ -103,6 +103,13 @@ exports.loadHandlebarsFile = path => {
return fs.readFileSync(path, "utf8")
}
/**
* Same as above just with a different name.
*/
exports.loadJSFile = (directory, name) => {
return fs.readFileSync(join(directory, name), "utf8")
}
/**
* When return a file from the API need to write the file to the system temporarily so we
* can create a read stream to send.

View File

@ -29,8 +29,9 @@ export function watch() {
const name = split[split.length - 1]
console.log("Importing plugin:", path)
await processPlugin({ name, path })
} catch (err) {
console.log("Failed to import plugin:", err)
} catch (err: any) {
const message = err?.message ? err?.message : err
console.error("Failed to import plugin:", message)
}
})
})