plugin create npm

This commit is contained in:
NEOLPAR 2022-08-30 21:37:08 +01:00
parent 700614f3e6
commit 7bd7c7a868
6 changed files with 209 additions and 12 deletions

View File

@ -18,14 +18,51 @@
Upload: ["Upload"],
}
let file
let sourceValue = "Upload"
let sourceValue = "NPM"
let typeValue = "Datasource"
let nameValue
let dynamicValues = {}
let verificationSuccessful = false
async function save() {
const source = sourceValue.toLocaleLowerCase()
const url = dynamicValues["URL"]
switch (source) {
case "upload":
if (file) {
await plugins.uploadPlugin(file, sourceValue)
}
break
case "github":
await plugins.createPlugin(
typeValue,
source,
nameValue,
url,
dynamicValues["Github Token"]
)
break
case "url":
await plugins.createPlugin(
typeValue,
source,
nameValue,
url,
dynamicValues["Header"]
)
break
case "npm":
await plugins.createPlugin(
typeValue,
source,
nameValue,
url,
dynamicValues["NPM Token"]
)
break
}
}
function verify() {
@ -45,7 +82,7 @@
<div class="form-row">
<Label size="M">Type</Label>
<Select
value="Datasource"
bind:value={typeValue}
placeholder={null}
options={["Component", "Datasource"]}
/>
@ -61,7 +98,7 @@
<div class="form-row">
<Label size="M">Name</Label>
<Input />
<Input bind:value={nameValue} />
</div>
{#each authOptions[sourceValue] as option}
{#if option === "Upload"}
@ -82,7 +119,7 @@
{:else}
<div class="form-row">
<Label size="M">{option}</Label>
<Input />
<Input bind:value={dynamicValues[option]} />
</div>
{/if}
{/each}

View File

@ -17,6 +17,41 @@ export function createPluginsStore() {
})
}
async function createPlugin(type, source, name, url, auth) {
let pluginData = {
type,
source,
name,
url,
}
switch (source) {
case "github":
pluginData.githubToken = auth
break
case "url":
pluginData.header = auth
break
case "npm":
pluginData.npmToken = auth
break
}
let resp = await API.createPlugin(pluginData)
console.log(resp)
// TODO_RIC
// let newPlugin = resp.plugins[0]
// update(state => {
// const currentIdx = state.findIndex(plugin => plugin._id === newPlugin._id)
// if (currentIdx >= 0) {
// state.splice(currentIdx, 1, newPlugin)
// } else {
// state.push(newPlugin)
// }
// return state
// })
}
async function uploadPlugin(file, source) {
let data = new FormData()
data.append("file", file)
@ -35,6 +70,7 @@ export function createPluginsStore() {
return {
subscribe,
load,
createPlugin,
deletePlugin,
uploadPlugin,
}

View File

@ -11,6 +11,16 @@ export const buildPluginEndpoints = API => ({
})
},
/**
* Creates a plugin from URL, Github or NPM
*/
createPlugin: async data => {
return await API.post({
url: `/api/plugin`,
body: data,
})
},
/**
* Gets a list of all plugins
*/

View File

@ -1,5 +1,9 @@
import { ObjectStoreBuckets } from "../../constants"
import { extractPluginTarball } from "../../utilities/fileSystem"
import {
extractPluginTarball,
npmPlugin,
getPluginMetadata,
} from "../../utilities/fileSystem"
import { getGlobalDB } from "@budibase/backend-core/tenancy"
import { generatePluginID, getPluginParams } from "../../db/utils"
import { uploadDirectory } from "@budibase/backend-core/objectStore"
@ -39,10 +43,48 @@ export async function upload(ctx: any) {
}
} catch (err: any) {
const errMsg = err?.message ? err?.message : err
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
}
}
export async function create(ctx: any) {
const { type, source, name, url, header, githubToken, npmToken } =
ctx.request.body
let metadata
let directory
switch (source) {
case "npm":
// const { metadata: metadataNpm, directory: directoryNpm } = await npmPlugin(url, name)
// metadata = metadataNpm
// directory = directoryNpm
console.log(22222, await getPluginMetadata(await npmPlugin(url, name)))
break
case "github":
console.log("github")
break
case "url":
console.log("url")
break
}
// try {
// const doc = storePlugin(metadata, directory, source)
//
// ctx.body = {
// message: "Plugin uploaded successfully",
// plugins: doc,
// }
// } catch (err: any) {
// const errMsg = err?.message ? err?.message : err
//
// ctx.throw(400, `Failed to import plugin: ${errMsg}`)
// }
ctx.status = 200
}
export async function fetch(ctx: any) {
ctx.body = await getPlugins()
}
@ -54,9 +96,12 @@ export async function destroy(ctx: any) {
ctx.status = 200
}
export async function processPlugin(plugin: FileType, source?: string) {
export async function storePlugin(
metadata: any,
directory: any,
source?: string
) {
const db = getGlobalDB()
const { metadata, directory } = await extractPluginTarball(plugin)
const version = metadata.package.version,
name = metadata.package.name,
description = metadata.package.description
@ -99,3 +144,8 @@ export async function processPlugin(plugin: FileType, source?: string) {
_rev: response.rev,
}
}
export async function processPlugin(plugin: FileType, source?: string) {
const { metadata, directory } = await extractPluginTarball(plugin)
return await storePlugin(metadata, directory, source)
}

View File

@ -7,6 +7,7 @@ const router = new Router()
router
.post("/api/plugin/upload/:source", authorized(BUILDER), controller.upload)
.post("/api/plugin", authorized(BUILDER), controller.create)
.get("/api/plugin", authorized(BUILDER), controller.fetch)
.delete(
"/api/plugin/:pluginId/:pluginRev",

View File

@ -1,6 +1,9 @@
const { budibaseTempDir } = require("../budibaseDir")
const fs = require("fs")
const { join } = require("path")
// const { promisify } = require("util")
// const exec = promisify(require("child_process").exec)
// const streamPipeline = promisify(require("stream"))
const uuid = require("uuid/v4")
const {
doWithDB,
@ -29,6 +32,7 @@ const MemoryStream = require("memorystream")
const { getAppId } = require("@budibase/backend-core/context")
const tar = require("tar")
const fetch = require("node-fetch")
// const fileType = require("file-type")
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
@ -326,11 +330,11 @@ exports.cleanup = appIds => {
}
}
exports.extractPluginTarball = async file => {
if (!file.name.endsWith(".tar.gz")) {
const extractPluginTarball = async (file, ext = ".tar.gz") => {
if (!file.name.endsWith(ext)) {
throw new Error("Plugin must be compressed into a gzipped tarball.")
}
const path = join(budibaseTempDir(), file.name.split(".tar.gz")[0])
const path = join(budibaseTempDir(), file.name.split(ext)[0])
// remove old tmp directories automatically - don't combine
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true, force: true })
@ -340,6 +344,63 @@ exports.extractPluginTarball = async file => {
file: file.path,
C: path,
})
return await getPluginMetadata(path)
}
exports.extractPluginTarball = extractPluginTarball
exports.npmPlugin = async (url, name = "") => {
let npmTarball = url
let filename = name
let path = join(budibaseTempDir(), name)
if (!npmTarball.includes(".tgz")) {
const npmPackageURl = url.replace(
"https://www.npmjs.com/package/",
"https://registry.npmjs.org/"
)
const response = await fetch(npmPackageURl)
if (response.status === 200) {
let npmDetails = await response.json()
filename = npmDetails.name
path = join(budibaseTempDir(), filename)
const npmVersion = npmDetails["dist-tags"].latest
npmTarball = npmDetails.versions[npmVersion].dist.tarball
} else {
throw "Cannot get package details"
}
}
try {
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true, force: true })
}
fs.mkdirSync(path)
const response = await fetch(npmTarball)
if (!response.ok)
throw new Error(`Loading NPM plugin failed ${response.statusText}`)
// const dest = fs.createWriteStream(`${path}/${filename}.tgz`)
await response.body.pipe(
await tar.x({
strip: 1,
C: path,
})
)
// const readStream = fs.createReadStream(`${path}/${filename}.tgz`)
// readStream.pipe(
// )
} catch (e) {
throw `Cannot store package locally: ${e.message}`
}
return path
}
const getPluginMetadata = async path => {
let metadata = {}
try {
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
@ -349,8 +410,10 @@ exports.extractPluginTarball = async file => {
} catch (err) {
throw new Error("Unable to process schema.json/package.json in plugin.")
}
return { metadata, directory: path }
}
exports.getPluginMetadata = getPluginMetadata
exports.getDatasourcePlugin = async (name, url, hash) => {
if (!fs.existsSync(DATASOURCE_PATH)) {