Adding plugin upload API - takes a file form-data and then extracts, uploads to minio and stores data about the plugin to CouchDB.
This commit is contained in:
parent
bab20e5744
commit
b7116ccc18
|
@ -50,6 +50,7 @@ const env = {
|
|||
GLOBAL_BUCKET_NAME: process.env.GLOBAL_BUCKET_NAME || "global",
|
||||
GLOBAL_CLOUD_BUCKET_NAME:
|
||||
process.env.GLOBAL_CLOUD_BUCKET_NAME || "prod-budi-tenant-uploads",
|
||||
PLUGIN_BUCKET_NAME: process.env.PLUGIN_BUCKET_NAME || "plugins",
|
||||
USE_COUCH: process.env.USE_COUCH || true,
|
||||
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
|
||||
DEFAULT_LICENSE: process.env.DEFAULT_LICENSE,
|
||||
|
|
|
@ -57,7 +57,11 @@ function publicPolicy(bucketName: any) {
|
|||
}
|
||||
}
|
||||
|
||||
const PUBLIC_BUCKETS = [ObjectStoreBuckets.APPS, ObjectStoreBuckets.GLOBAL]
|
||||
const PUBLIC_BUCKETS = [
|
||||
ObjectStoreBuckets.APPS,
|
||||
ObjectStoreBuckets.GLOBAL,
|
||||
ObjectStoreBuckets.PLUGINS,
|
||||
]
|
||||
|
||||
/**
|
||||
* Gets a connection to the object store using the S3 SDK.
|
||||
|
|
|
@ -8,6 +8,7 @@ exports.ObjectStoreBuckets = {
|
|||
TEMPLATES: env.TEMPLATES_BUCKET_NAME,
|
||||
GLOBAL: env.GLOBAL_BUCKET_NAME,
|
||||
GLOBAL_CLOUD: env.GLOBAL_CLOUD_BUCKET_NAME,
|
||||
PLUGINS: env.PLUGIN_BUCKET_NAME,
|
||||
}
|
||||
|
||||
exports.budibaseTempDir = function () {
|
||||
|
|
|
@ -139,6 +139,7 @@
|
|||
"snowflake-promise": "^4.5.0",
|
||||
"svelte": "3.49.0",
|
||||
"swagger-parser": "10.0.3",
|
||||
"tar": "^6.1.11",
|
||||
"to-json-schema": "0.2.5",
|
||||
"uuid": "3.3.2",
|
||||
"validate.js": "0.13.1",
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
import { ObjectStoreBuckets } from "../../constants"
|
||||
import { extractPluginTarball } from "../../utilities/fileSystem"
|
||||
import { getGlobalDB } from "@budibase/backend-core/tenancy"
|
||||
import { generatePluginID } from "../../db/utils"
|
||||
import { uploadDirectory } from "@budibase/backend-core/objectStore"
|
||||
|
||||
export async function upload(ctx: any) {
|
||||
const plugins =
|
||||
ctx.request.files.file.length > 1
|
||||
? Array.from(ctx.request.files.file)
|
||||
: [ctx.request.files.file]
|
||||
const db = getGlobalDB()
|
||||
try {
|
||||
// can do single or multiple plugins
|
||||
for (let plugin of plugins) {
|
||||
const { metadata, directory } = await extractPluginTarball(plugin)
|
||||
const version = metadata.package.version,
|
||||
name = metadata.package.name,
|
||||
description = metadata.package.description
|
||||
|
||||
// first open the tarball into tmp directory
|
||||
const bucketPath = `${name}/${version}/`
|
||||
const files = await uploadDirectory(
|
||||
ObjectStoreBuckets.PLUGINS,
|
||||
directory,
|
||||
bucketPath
|
||||
)
|
||||
const jsFile = files.find((file: any) => file.name.endsWith(".js"))
|
||||
if (!jsFile) {
|
||||
throw new Error(`Plugin missing .js file.`)
|
||||
}
|
||||
const jsFileName = jsFile.name
|
||||
const pluginId = generatePluginID(name, version)
|
||||
let existing
|
||||
try {
|
||||
existing = await db.get(pluginId)
|
||||
} catch (err) {
|
||||
existing = null
|
||||
}
|
||||
if (existing) {
|
||||
throw new Error(
|
||||
`Plugin already exists: name: ${name}, version: ${version}`
|
||||
)
|
||||
}
|
||||
await db.put({
|
||||
_id: pluginId,
|
||||
name,
|
||||
version,
|
||||
description,
|
||||
...metadata,
|
||||
jsUrl: `${bucketPath}${jsFileName}`,
|
||||
})
|
||||
}
|
||||
} catch (err: any) {
|
||||
const errMsg = err?.message ? err?.message : err
|
||||
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetch(ctx: any) {}
|
||||
|
||||
export async function destroy(ctx: any) {}
|
|
@ -24,6 +24,7 @@ import metadataRoutes from "./metadata"
|
|||
import devRoutes from "./dev"
|
||||
import cloudRoutes from "./cloud"
|
||||
import migrationRoutes from "./migrations"
|
||||
import pluginRoutes from "./plugin"
|
||||
|
||||
export { default as staticRoutes } from "./static"
|
||||
export { default as publicRoutes } from "./public"
|
||||
|
@ -57,4 +58,5 @@ export const mainRoutes = [
|
|||
tableRoutes,
|
||||
rowRoutes,
|
||||
migrationRoutes,
|
||||
pluginRoutes,
|
||||
]
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
import Router from "@koa/router"
|
||||
import * as controller from "../controllers/plugin"
|
||||
import authorized from "../../middleware/authorized"
|
||||
import { BUILDER } from "@budibase/backend-core/permissions"
|
||||
|
||||
const router = new Router()
|
||||
|
||||
router
|
||||
.post("/api/plugin/upload", authorized(BUILDER), controller.upload)
|
||||
.get("/api/plugin", authorized(BUILDER), controller.fetch)
|
||||
.delete("/api/plugin/:pluginId", authorized(BUILDER), controller.destroy)
|
||||
|
||||
export default router
|
|
@ -42,6 +42,7 @@ const DocumentTypes = {
|
|||
MEM_VIEW: "view",
|
||||
USER_FLAG: "flag",
|
||||
AUTOMATION_METADATA: "meta_au",
|
||||
PLUGIN: "plg",
|
||||
}
|
||||
|
||||
const InternalTables = {
|
||||
|
@ -370,6 +371,10 @@ exports.getMemoryViewParams = (otherProps = {}) => {
|
|||
return getDocParams(DocumentTypes.MEM_VIEW, null, otherProps)
|
||||
}
|
||||
|
||||
exports.generatePluginID = (name, version) => {
|
||||
return `${DocumentTypes.PLUGIN}${SEPARATOR}${name}${SEPARATOR}${version}`
|
||||
}
|
||||
|
||||
/**
|
||||
* This can be used with the db.allDocs to get a list of IDs
|
||||
*/
|
||||
|
|
|
@ -25,6 +25,7 @@ const {
|
|||
} = require("../../db/utils")
|
||||
const MemoryStream = require("memorystream")
|
||||
const { getAppId } = require("@budibase/backend-core/context")
|
||||
const tar = require("tar")
|
||||
|
||||
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
|
||||
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
|
||||
|
@ -321,6 +322,32 @@ exports.cleanup = appIds => {
|
|||
}
|
||||
}
|
||||
|
||||
exports.extractPluginTarball = async file => {
|
||||
if (!file.name.endsWith(".tar.gz")) {
|
||||
throw new Error("Plugin must be compressed into a gzipped tarball.")
|
||||
}
|
||||
const path = join(budibaseTempDir(), file.name.split(".tar.gz")[0])
|
||||
// remove old tmp directories automatically - don't combine
|
||||
if (fs.existsSync(path)) {
|
||||
fs.rmSync(path, { recursive: true, force: true })
|
||||
}
|
||||
fs.mkdirSync(path)
|
||||
await tar.extract({
|
||||
file: file.path,
|
||||
C: path,
|
||||
})
|
||||
let metadata = {}
|
||||
try {
|
||||
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
|
||||
const schema = fs.readFileSync(join(path, "schema.json"), "utf8")
|
||||
metadata.schema = JSON.parse(schema)
|
||||
metadata.package = JSON.parse(pkg)
|
||||
} catch (err) {
|
||||
throw new Error("Unable to process schema.json/package.json in plugin.")
|
||||
}
|
||||
return { metadata, directory: path }
|
||||
}
|
||||
|
||||
/**
|
||||
* Full function definition for below can be found in the utilities.
|
||||
*/
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue