uploading npm and url plugins
This commit is contained in:
parent
7bd7c7a868
commit
ac33190ff0
|
@ -11,10 +11,10 @@
|
||||||
import { plugins } from "stores/portal"
|
import { plugins } from "stores/portal"
|
||||||
|
|
||||||
let authOptions = {
|
let authOptions = {
|
||||||
NPM: ["NPM Token", "URL"],
|
NPM: ["URL"],
|
||||||
Github: ["Github Token", "URL"],
|
Github: ["Github Token", "URL"],
|
||||||
URL: ["Header", "URL"],
|
URL: ["Headers", "URL"],
|
||||||
File: ["Path", "Header"],
|
File: ["Path", "Headers"],
|
||||||
Upload: ["Upload"],
|
Upload: ["Upload"],
|
||||||
}
|
}
|
||||||
let file
|
let file
|
||||||
|
@ -50,17 +50,11 @@
|
||||||
source,
|
source,
|
||||||
nameValue,
|
nameValue,
|
||||||
url,
|
url,
|
||||||
dynamicValues["Header"]
|
dynamicValues["Headers"]
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
case "npm":
|
case "npm":
|
||||||
await plugins.createPlugin(
|
await plugins.createPlugin(typeValue, source, nameValue, url)
|
||||||
typeValue,
|
|
||||||
source,
|
|
||||||
nameValue,
|
|
||||||
url,
|
|
||||||
dynamicValues["NPM Token"]
|
|
||||||
)
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ export function createPluginsStore() {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createPlugin(type, source, name, url, auth) {
|
async function createPlugin(type, source, name, url, auth = null) {
|
||||||
let pluginData = {
|
let pluginData = {
|
||||||
type,
|
type,
|
||||||
source,
|
source,
|
||||||
|
@ -26,19 +26,16 @@ export function createPluginsStore() {
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (source) {
|
switch (source) {
|
||||||
case "github":
|
|
||||||
pluginData.githubToken = auth
|
|
||||||
break
|
|
||||||
case "url":
|
case "url":
|
||||||
pluginData.header = auth
|
pluginData.headers = auth
|
||||||
break
|
break
|
||||||
case "npm":
|
case "npm":
|
||||||
pluginData.npmToken = auth
|
pluginData.npmToken = auth
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
let resp = await API.createPlugin(pluginData)
|
let res = await API.createPlugin(pluginData)
|
||||||
console.log(resp)
|
console.log("RESP", res)
|
||||||
// TODO_RIC
|
// TODO_RIC
|
||||||
// let newPlugin = resp.plugins[0]
|
// let newPlugin = resp.plugins[0]
|
||||||
// update(state => {
|
// update(state => {
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
import { ObjectStoreBuckets } from "../../constants"
|
import { ObjectStoreBuckets } from "../../constants"
|
||||||
import {
|
import {
|
||||||
extractPluginTarball,
|
extractPluginTarball,
|
||||||
npmPlugin,
|
createNpmPlugin,
|
||||||
getPluginMetadata,
|
createUrlPlugin,
|
||||||
} from "../../utilities/fileSystem"
|
} from "../../utilities/fileSystem"
|
||||||
import { getGlobalDB } from "@budibase/backend-core/tenancy"
|
import { getGlobalDB } from "@budibase/backend-core/tenancy"
|
||||||
import { generatePluginID, getPluginParams } from "../../db/utils"
|
import { generatePluginID, getPluginParams } from "../../db/utils"
|
||||||
|
@ -49,39 +49,40 @@ export async function upload(ctx: any) {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function create(ctx: any) {
|
export async function create(ctx: any) {
|
||||||
const { type, source, name, url, header, githubToken, npmToken } =
|
const { type, source, name, url, headers, githubToken } = ctx.request.body
|
||||||
ctx.request.body
|
|
||||||
let metadata
|
let metadata
|
||||||
let directory
|
let directory
|
||||||
|
|
||||||
switch (source) {
|
switch (source) {
|
||||||
case "npm":
|
case "npm":
|
||||||
// const { metadata: metadataNpm, directory: directoryNpm } = await npmPlugin(url, name)
|
const { metadata: metadataNpm, directory: directoryNpm } =
|
||||||
// metadata = metadataNpm
|
await createNpmPlugin(url, name)
|
||||||
// directory = directoryNpm
|
metadata = metadataNpm
|
||||||
|
directory = directoryNpm
|
||||||
console.log(22222, await getPluginMetadata(await npmPlugin(url, name)))
|
|
||||||
break
|
break
|
||||||
case "github":
|
case "github":
|
||||||
console.log("github")
|
console.log("github")
|
||||||
break
|
break
|
||||||
case "url":
|
case "url":
|
||||||
console.log("url")
|
const { metadata: metadataUrl, directory: directoryUrl } =
|
||||||
|
await createUrlPlugin(url, name, headers)
|
||||||
|
metadata = metadataUrl
|
||||||
|
directory = directoryUrl
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
// try {
|
try {
|
||||||
// const doc = storePlugin(metadata, directory, source)
|
const doc = storePlugin(metadata, directory, source)
|
||||||
//
|
|
||||||
// ctx.body = {
|
ctx.body = {
|
||||||
// message: "Plugin uploaded successfully",
|
message: "Plugin uploaded successfully",
|
||||||
// plugins: doc,
|
plugins: doc,
|
||||||
// }
|
}
|
||||||
// } catch (err: any) {
|
} catch (err: any) {
|
||||||
// const errMsg = err?.message ? err?.message : err
|
const errMsg = err?.message ? err?.message : err
|
||||||
//
|
|
||||||
// ctx.throw(400, `Failed to import plugin: ${errMsg}`)
|
ctx.throw(400, `Failed to import plugin: ${errMsg}`)
|
||||||
// }
|
}
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
const { budibaseTempDir } = require("../budibaseDir")
|
const { budibaseTempDir } = require("../budibaseDir")
|
||||||
const fs = require("fs")
|
const fs = require("fs")
|
||||||
const { join } = require("path")
|
const { join } = require("path")
|
||||||
// const { promisify } = require("util")
|
const { promisify } = require("util")
|
||||||
// const exec = promisify(require("child_process").exec)
|
const streamPipeline = promisify(require("stream").pipeline)
|
||||||
// const streamPipeline = promisify(require("stream"))
|
|
||||||
const uuid = require("uuid/v4")
|
const uuid = require("uuid/v4")
|
||||||
const {
|
const {
|
||||||
doWithDB,
|
doWithDB,
|
||||||
|
@ -32,7 +31,6 @@ const MemoryStream = require("memorystream")
|
||||||
const { getAppId } = require("@budibase/backend-core/context")
|
const { getAppId } = require("@budibase/backend-core/context")
|
||||||
const tar = require("tar")
|
const tar = require("tar")
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
// const fileType = require("file-type")
|
|
||||||
|
|
||||||
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
|
const TOP_LEVEL_PATH = join(__dirname, "..", "..", "..")
|
||||||
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
|
const NODE_MODULES_PATH = join(TOP_LEVEL_PATH, "node_modules")
|
||||||
|
@ -349,10 +347,9 @@ const extractPluginTarball = async (file, ext = ".tar.gz") => {
|
||||||
}
|
}
|
||||||
exports.extractPluginTarball = extractPluginTarball
|
exports.extractPluginTarball = extractPluginTarball
|
||||||
|
|
||||||
exports.npmPlugin = async (url, name = "") => {
|
exports.createNpmPlugin = async (url, name = "") => {
|
||||||
let npmTarball = url
|
let npmTarball = url
|
||||||
let filename = name
|
let pluginName = name
|
||||||
let path = join(budibaseTempDir(), name)
|
|
||||||
|
|
||||||
if (!npmTarball.includes(".tgz")) {
|
if (!npmTarball.includes(".tgz")) {
|
||||||
const npmPackageURl = url.replace(
|
const npmPackageURl = url.replace(
|
||||||
|
@ -362,8 +359,7 @@ exports.npmPlugin = async (url, name = "") => {
|
||||||
const response = await fetch(npmPackageURl)
|
const response = await fetch(npmPackageURl)
|
||||||
if (response.status === 200) {
|
if (response.status === 200) {
|
||||||
let npmDetails = await response.json()
|
let npmDetails = await response.json()
|
||||||
filename = npmDetails.name
|
pluginName = npmDetails.name
|
||||||
path = join(budibaseTempDir(), filename)
|
|
||||||
const npmVersion = npmDetails["dist-tags"].latest
|
const npmVersion = npmDetails["dist-tags"].latest
|
||||||
npmTarball = npmDetails.versions[npmVersion].dist.tarball
|
npmTarball = npmDetails.versions[npmVersion].dist.tarball
|
||||||
} else {
|
} else {
|
||||||
|
@ -371,36 +367,47 @@ exports.npmPlugin = async (url, name = "") => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return await downloadUnzipPlugin(pluginName, npmTarball)
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.createUrlPlugin = async (url, name = "", headers = {}) => {
|
||||||
|
if (!url.includes(".tgz") && !url.includes(".tar.gz")) {
|
||||||
|
throw new Error("Plugin must be compressed into a gzipped tarball.")
|
||||||
|
}
|
||||||
|
|
||||||
|
return await downloadUnzipPlugin(name, url, headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadUnzipPlugin = async (name, url, headers = {}) => {
|
||||||
|
console.log(name, url, headers)
|
||||||
|
const path = join(budibaseTempDir(), name)
|
||||||
try {
|
try {
|
||||||
|
// Remove first if exists
|
||||||
if (fs.existsSync(path)) {
|
if (fs.existsSync(path)) {
|
||||||
fs.rmSync(path, { recursive: true, force: true })
|
fs.rmSync(path, { recursive: true, force: true })
|
||||||
}
|
}
|
||||||
fs.mkdirSync(path)
|
fs.mkdirSync(path)
|
||||||
|
|
||||||
const response = await fetch(npmTarball)
|
const response = await fetch(url, { headers })
|
||||||
if (!response.ok)
|
if (!response.ok)
|
||||||
throw new Error(`Loading NPM plugin failed ${response.statusText}`)
|
throw new Error(`Loading NPM plugin failed ${response.statusText}`)
|
||||||
|
|
||||||
// const dest = fs.createWriteStream(`${path}/${filename}.tgz`)
|
await streamPipeline(
|
||||||
await response.body.pipe(
|
response.body,
|
||||||
await tar.x({
|
tar.x({
|
||||||
strip: 1,
|
strip: 1,
|
||||||
C: path,
|
C: path,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
return await getPluginMetadata(path)
|
||||||
// const readStream = fs.createReadStream(`${path}/${filename}.tgz`)
|
|
||||||
// readStream.pipe(
|
|
||||||
|
|
||||||
// )
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw `Cannot store package locally: ${e.message}`
|
throw `Cannot store plugin locally: ${e.message}`
|
||||||
}
|
}
|
||||||
|
|
||||||
return path
|
|
||||||
}
|
}
|
||||||
|
exports.downloadUnzipPlugin = downloadUnzipPlugin
|
||||||
|
|
||||||
const getPluginMetadata = async path => {
|
const getPluginMetadata = async path => {
|
||||||
|
console.log(path)
|
||||||
let metadata = {}
|
let metadata = {}
|
||||||
try {
|
try {
|
||||||
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
|
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
|
||||||
|
|
Loading…
Reference in New Issue